From 2224e6c48b2c74eb3c3f11125de4fc8f9fde8720 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Wed, 25 Sep 2024 12:53:51 +0100 Subject: [PATCH 01/84] feat(exex): finalize ExEx WAL on new finalized block header (#11174) --- Cargo.lock | 2 + crates/exex/exex/Cargo.toml | 1 + crates/exex/exex/src/lib.rs | 1 + crates/exex/exex/src/manager.rs | 165 +++++++++++++++++++++-- crates/exex/exex/src/wal/cache.rs | 2 +- crates/exex/exex/src/wal/mod.rs | 34 +++-- crates/exex/exex/src/wal/storage.rs | 12 +- crates/node/builder/Cargo.toml | 3 +- crates/node/builder/src/launch/engine.rs | 2 +- crates/node/builder/src/launch/exex.rs | 25 +++- crates/node/builder/src/launch/mod.rs | 2 +- crates/node/core/src/dirs.rs | 5 + 12 files changed, 215 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index da895a2b6f7c6..cd34b08f58209 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7377,6 +7377,7 @@ dependencies = [ "futures", "metrics", "reth-blockchain-tree", + "reth-chain-state", "reth-chainspec", "reth-config", "reth-db-api", @@ -7755,6 +7756,7 @@ dependencies = [ "reth-auto-seal-consensus", "reth-beacon-consensus", "reth-blockchain-tree", + "reth-chain-state", "reth-chainspec", "reth-cli-util", "reth-config", diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index 9b6146d220c8a..74f62904a67e9 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -13,6 +13,7 @@ workspace = true [dependencies] ## reth +reth-chain-state.workspace = true reth-chainspec.workspace = true reth-config.workspace = true reth-evm.workspace = true diff --git a/crates/exex/exex/src/lib.rs b/crates/exex/exex/src/lib.rs index d54bc3d9f3cfe..4a819767a7121 100644 --- a/crates/exex/exex/src/lib.rs +++ b/crates/exex/exex/src/lib.rs @@ -47,6 +47,7 @@ mod manager; pub use manager::*; mod wal; +pub use wal::*; // Re-export exex types #[doc(inline)] diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index f4c1687be6ae6..a775765c0278c 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -1,14 +1,17 @@ use crate::{ - BackfillJobFactory, ExExEvent, ExExNotification, FinishedExExHeight, StreamBackfillJob, + wal::Wal, BackfillJobFactory, ExExEvent, ExExNotification, FinishedExExHeight, + StreamBackfillJob, }; use alloy_primitives::{BlockNumber, U256}; use eyre::OptionExt; use futures::{Stream, StreamExt}; use metrics::Gauge; +use reth_chain_state::ForkChoiceStream; use reth_chainspec::Head; use reth_evm::execute::BlockExecutorProvider; use reth_exex_types::ExExHead; use reth_metrics::{metrics::Counter, Metrics}; +use reth_primitives::SealedHeader; use reth_provider::{BlockReader, Chain, HeaderProvider, StateProviderFactory}; use reth_tracing::tracing::debug; use std::{ @@ -530,6 +533,11 @@ pub struct ExExManager { /// The finished height of all `ExEx`'s. finished_height: watch::Sender, + /// Write-Ahead Log for the [`ExExNotification`]s. + wal: Wal, + /// A stream of finalized headers. + finalized_header_stream: ForkChoiceStream, + /// A handle to the `ExEx` manager. handle: ExExManagerHandle, /// Metrics for the `ExEx` manager. @@ -544,7 +552,12 @@ impl ExExManager { /// /// When the capacity is exceeded (which can happen if an `ExEx` is slow) no one can send /// notifications over [`ExExManagerHandle`]s until there is capacity again. - pub fn new(handles: Vec, max_capacity: usize) -> Self { + pub fn new( + handles: Vec, + max_capacity: usize, + wal: Wal, + finalized_header_stream: ForkChoiceStream, + ) -> Self { let num_exexs = handles.len(); let (handle_tx, handle_rx) = mpsc::unbounded_channel(); @@ -575,6 +588,9 @@ impl ExExManager { is_ready: is_ready_tx, finished_height: finished_height_tx, + wal, + finalized_header_stream, + handle: ExExManagerHandle { exex_tx: handle_tx, num_exexs, @@ -618,6 +634,16 @@ impl Future for ExExManager { type Output = eyre::Result<()>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + // Drain the finalized header stream and grab the last finalized header + let mut last_finalized_header = None; + while let Poll::Ready(finalized_header) = self.finalized_header_stream.poll_next_unpin(cx) { + last_finalized_header = finalized_header; + } + // If there is a finalized header, finalize the WAL with it + if let Some(header) = last_finalized_header { + self.wal.finalize((header.number, header.hash()).into())?; + } + // drain handle notifications while self.buffer.len() < self.max_capacity { if let Poll::Ready(Some(notification)) = self.handle_rx.poll_recv(cx) { @@ -820,6 +846,13 @@ mod tests { }; use reth_testing_utils::generators::{self, random_block, BlockParams}; + fn empty_finalized_header_stream() -> ForkChoiceStream { + let (tx, rx) = watch::channel(None); + // Do not drop the sender, otherwise the receiver will always return an error + std::mem::forget(tx); + ForkChoiceStream::new(rx) + } + #[tokio::test] async fn test_delivers_events() { let (mut exex_handle, event_tx, mut _notification_rx) = @@ -833,30 +866,66 @@ mod tests { #[tokio::test] async fn test_has_exexs() { + let temp_dir = tempfile::tempdir().unwrap(); let (exex_handle_1, _, _) = ExExHandle::new("test_exex_1".to_string(), Head::default(), (), ()); - assert!(!ExExManager::new(vec![], 0).handle.has_exexs()); - - assert!(ExExManager::new(vec![exex_handle_1], 0).handle.has_exexs()); + assert!(!ExExManager::new( + vec![], + 0, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream() + ) + .handle + .has_exexs()); + + assert!(ExExManager::new( + vec![exex_handle_1], + 0, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream() + ) + .handle + .has_exexs()); } #[tokio::test] async fn test_has_capacity() { + let temp_dir = tempfile::tempdir().unwrap(); let (exex_handle_1, _, _) = ExExHandle::new("test_exex_1".to_string(), Head::default(), (), ()); - assert!(!ExExManager::new(vec![], 0).handle.has_capacity()); - - assert!(ExExManager::new(vec![exex_handle_1], 10).handle.has_capacity()); + assert!(!ExExManager::new( + vec![], + 0, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream() + ) + .handle + .has_capacity()); + + assert!(ExExManager::new( + vec![exex_handle_1], + 10, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream() + ) + .handle + .has_capacity()); } #[test] fn test_push_notification() { + let temp_dir = tempfile::tempdir().unwrap(); let (exex_handle, _, _) = ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); // Create a mock ExExManager and add the exex_handle to it - let mut exex_manager = ExExManager::new(vec![exex_handle], 10); + let mut exex_manager = ExExManager::new( + vec![exex_handle], + 10, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream(), + ); // Define the notification for testing let mut block1 = SealedBlockWithSenders::default(); @@ -898,11 +967,17 @@ mod tests { #[test] fn test_update_capacity() { + let temp_dir = tempfile::tempdir().unwrap(); let (exex_handle, _, _) = ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); // Create a mock ExExManager and add the exex_handle to it let max_capacity = 5; - let mut exex_manager = ExExManager::new(vec![exex_handle], max_capacity); + let mut exex_manager = ExExManager::new( + vec![exex_handle], + max_capacity, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream(), + ); // Push some notifications to fill part of the buffer let mut block1 = SealedBlockWithSenders::default(); @@ -932,6 +1007,7 @@ mod tests { #[tokio::test] async fn test_updates_block_height() { + let temp_dir = tempfile::tempdir().unwrap(); let (exex_handle, event_tx, mut _notification_rx) = ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); @@ -942,7 +1018,12 @@ mod tests { event_tx.send(ExExEvent::FinishedHeight(42)).unwrap(); // Create a mock ExExManager and add the exex_handle to it - let exex_manager = ExExManager::new(vec![exex_handle], 10); + let exex_manager = ExExManager::new( + vec![exex_handle], + 10, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream(), + ); let mut cx = Context::from_waker(futures::task::noop_waker_ref()); @@ -969,6 +1050,7 @@ mod tests { #[tokio::test] async fn test_updates_block_height_lower() { + let temp_dir = tempfile::tempdir().unwrap(); // Create two `ExExHandle` instances let (exex_handle1, event_tx1, _) = ExExHandle::new("test_exex1".to_string(), Head::default(), (), ()); @@ -979,7 +1061,12 @@ mod tests { event_tx1.send(ExExEvent::FinishedHeight(42)).unwrap(); event_tx2.send(ExExEvent::FinishedHeight(10)).unwrap(); - let exex_manager = ExExManager::new(vec![exex_handle1, exex_handle2], 10); + let exex_manager = ExExManager::new( + vec![exex_handle1, exex_handle2], + 10, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream(), + ); let mut cx = Context::from_waker(futures::task::noop_waker_ref()); @@ -1002,6 +1089,7 @@ mod tests { #[tokio::test] async fn test_updates_block_height_greater() { + let temp_dir = tempfile::tempdir().unwrap(); // Create two `ExExHandle` instances let (exex_handle1, event_tx1, _) = ExExHandle::new("test_exex1".to_string(), Head::default(), (), ()); @@ -1015,7 +1103,12 @@ mod tests { event_tx1.send(ExExEvent::FinishedHeight(42)).unwrap(); event_tx2.send(ExExEvent::FinishedHeight(100)).unwrap(); - let exex_manager = ExExManager::new(vec![exex_handle1, exex_handle2], 10); + let exex_manager = ExExManager::new( + vec![exex_handle1, exex_handle2], + 10, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream(), + ); let mut cx = Context::from_waker(futures::task::noop_waker_ref()); @@ -1042,12 +1135,18 @@ mod tests { #[tokio::test] async fn test_exex_manager_capacity() { + let temp_dir = tempfile::tempdir().unwrap(); let (exex_handle_1, _, _) = ExExHandle::new("test_exex_1".to_string(), Head::default(), (), ()); // Create an ExExManager with a small max capacity let max_capacity = 2; - let mut exex_manager = ExExManager::new(vec![exex_handle_1], max_capacity); + let mut exex_manager = ExExManager::new( + vec![exex_handle_1], + max_capacity, + Wal::new(temp_dir.path()).unwrap(), + empty_finalized_header_stream(), + ); let mut cx = Context::from_waker(futures::task::noop_waker_ref()); @@ -1223,6 +1322,44 @@ mod tests { assert_eq!(exex_handle.next_notification_id, 23); } + #[tokio::test] + async fn test_exex_wal_finalize() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + + let temp_dir = tempfile::tempdir().unwrap(); + let mut wal = Wal::new(temp_dir.path()).unwrap(); + let block = random_block(&mut generators::rng(), 0, Default::default()) + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?; + let notification = ExExNotification::ChainCommitted { + new: Arc::new(Chain::new(vec![block.clone()], Default::default(), None)), + }; + wal.commit(¬ification)?; + + let (tx, rx) = watch::channel(None); + let finalized_header_stream = ForkChoiceStream::new(rx); + + let (exex_handle, _, _) = ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + + let mut exex_manager = + std::pin::pin!(ExExManager::new(vec![exex_handle], 1, wal, finalized_header_stream)); + + let mut cx = Context::from_waker(futures::task::noop_waker_ref()); + + assert!(exex_manager.as_mut().poll(&mut cx).is_pending()); + assert_eq!( + exex_manager.wal.iter_notifications()?.collect::>>()?, + [notification] + ); + + tx.send(Some(block.header.clone()))?; + + assert!(exex_manager.as_mut().poll(&mut cx).is_pending()); + assert!(exex_manager.wal.iter_notifications()?.next().is_none()); + + Ok(()) + } + #[tokio::test] async fn exex_notifications_behind_head_canonical() -> eyre::Result<()> { let mut rng = generators::rng(); diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index 3e26fdcf4ca2a..25719d11bf938 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -12,7 +12,7 @@ use reth_primitives::BlockNumHash; /// This cache is needed to avoid walking the WAL directory every time we want to find a /// notification corresponding to a block. #[derive(Debug)] -pub(super) struct BlockCache(BTreeMap>); +pub struct BlockCache(BTreeMap>); impl BlockCache { /// Creates a new instance of [`BlockCache`]. diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index a5e0188ca593a..0b699883ead32 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -1,17 +1,17 @@ #![allow(dead_code)] mod cache; +pub use cache::BlockCache; mod storage; +pub use storage::Storage; use std::path::Path; -use cache::BlockCache; use reth_exex_types::ExExNotification; use reth_primitives::BlockNumHash; use reth_tracing::tracing::{debug, instrument}; -use storage::Storage; -/// WAL is a write-ahead log (WAL) that stores the notifications sent to a particular ExEx. +/// WAL is a write-ahead log (WAL) that stores the notifications sent to ExExes. /// /// WAL is backed by a directory of binary files represented by [`Storage`] and a block cache /// represented by [`BlockCache`]. The role of the block cache is to avoid walking the WAL directory @@ -26,7 +26,7 @@ use storage::Storage; /// 3. When the chain is finalized, call [`Wal::finalize`] to prevent the infinite growth of the /// WAL. #[derive(Debug)] -pub(crate) struct Wal { +pub struct Wal { /// The underlying WAL storage backed by a file. storage: Storage, /// WAL block cache. See [`cache::BlockCache`] docs for more details. @@ -35,7 +35,7 @@ pub(crate) struct Wal { impl Wal { /// Creates a new instance of [`Wal`]. - pub(crate) fn new(directory: impl AsRef) -> eyre::Result { + pub fn new(directory: impl AsRef) -> eyre::Result { let mut wal = Self { storage: Storage::new(directory)?, block_cache: BlockCache::new() }; wal.fill_block_cache()?; Ok(wal) @@ -71,8 +71,7 @@ impl Wal { reverted_block_range = ?notification.reverted_chain().as_ref().map(|chain| chain.range()), committed_block_range = ?notification.committed_chain().as_ref().map(|chain| chain.range()) ))] - pub(crate) fn commit(&mut self, notification: &ExExNotification) -> eyre::Result<()> { - debug!("Writing notification to WAL"); + pub fn commit(&mut self, notification: &ExExNotification) -> eyre::Result<()> { let file_id = self.block_cache.back().map_or(0, |block| block.0 + 1); self.storage.write_notification(file_id, notification)?; @@ -94,7 +93,7 @@ impl Wal { /// 1. The block number and hash of the lowest removed block. /// 2. The notifications that were removed. #[instrument(target = "exex::wal", skip(self))] - pub(crate) fn rollback( + pub fn rollback( &mut self, to_block: BlockNumHash, ) -> eyre::Result)>> { @@ -162,9 +161,9 @@ impl Wal { /// 2. Removes the notifications from the beginning of WAL until the found notification. If this /// notification includes both finalized and non-finalized blocks, it will not be removed. #[instrument(target = "exex::wal", skip(self))] - pub(crate) fn finalize(&mut self, to_block: BlockNumHash) -> eyre::Result<()> { + pub fn finalize(&mut self, to_block: BlockNumHash) -> eyre::Result<()> { // First, walk cache to find the file ID of the notification with the finalized block and - // save the file ID with the last unfinalized block. Do not remove any notifications + // save the file ID with the first unfinalized block. Do not remove any notifications // yet. let mut unfinalized_from_file_id = None; { @@ -177,7 +176,9 @@ impl Wal { { let notification = self.storage.read_notification(file_id)?; if notification.committed_chain().unwrap().blocks().len() == 1 { - unfinalized_from_file_id = block_cache.peek().map(|(file_id, _)| *file_id); + unfinalized_from_file_id = Some( + block_cache.peek().map(|(file_id, _)| *file_id).unwrap_or(u64::MAX), + ); } else { unfinalized_from_file_id = Some(file_id); } @@ -226,6 +227,17 @@ impl Wal { Ok(()) } + + /// Returns an iterator over all notifications in the WAL. + pub(crate) fn iter_notifications( + &self, + ) -> eyre::Result> + '_>> { + let Some(range) = self.storage.files_range()? else { + return Ok(Box::new(std::iter::empty())) + }; + + Ok(Box::new(self.storage.iter_notifications(range).map(|entry| Ok(entry?.1)))) + } } #[cfg(test)] diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index 283b303a346f0..766d70b072749 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -15,7 +15,7 @@ use tracing::instrument; /// Each notification is represented by a single file that contains a MessagePack-encoded /// notification. #[derive(Debug)] -pub(super) struct Storage { +pub struct Storage { /// The path to the WAL file. path: PathBuf, } @@ -107,23 +107,25 @@ impl Storage { } /// Reads the notification from the file with the given id. + #[instrument(target = "exex::wal::storage", skip(self))] pub(super) fn read_notification(&self, file_id: u64) -> eyre::Result { - debug!(?file_id, "Reading notification from WAL"); - let file_path = self.file_path(file_id); + debug!(?file_path, "Reading notification from WAL"); + let mut file = File::open(&file_path)?; read_notification(&mut file) } /// Writes the notification to the file with the given id. + #[instrument(target = "exex::wal::storage", skip(self, notification))] pub(super) fn write_notification( &self, file_id: u64, notification: &ExExNotification, ) -> eyre::Result<()> { - debug!(?file_id, "Writing notification to WAL"); - let file_path = self.file_path(file_id); + debug!(?file_path, "Writing notification to WAL"); + let mut file = File::create_new(&file_path)?; write_notification(&mut file, notification)?; diff --git a/crates/node/builder/Cargo.toml b/crates/node/builder/Cargo.toml index 30aebfb763533..1bf2ba2337398 100644 --- a/crates/node/builder/Cargo.toml +++ b/crates/node/builder/Cargo.toml @@ -16,6 +16,7 @@ workspace = true reth-auto-seal-consensus.workspace = true reth-beacon-consensus.workspace = true reth-blockchain-tree.workspace = true +reth-chain-state.workspace = true reth-chainspec.workspace = true reth-cli-util.workspace = true reth-config.workspace = true @@ -45,12 +46,12 @@ reth-payload-validator.workspace = true reth-primitives.workspace = true reth-provider.workspace = true reth-prune.workspace = true +reth-rpc = { workspace = true, features = ["js-tracer"] } reth-rpc-api.workspace = true reth-rpc-builder.workspace = true reth-rpc-engine-api.workspace = true reth-rpc-eth-types.workspace = true reth-rpc-layer.workspace = true -reth-rpc = { workspace = true, features = ["js-tracer"] } reth-stages.workspace = true reth-static-file.workspace = true reth-tasks.workspace = true diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index 0f68772c45365..708d791a0e844 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -146,7 +146,7 @@ where ctx.configs().clone(), ) .launch() - .await; + .await?; // create pipeline let network_client = ctx.components().network().fetch_client().await?; diff --git a/crates/node/builder/src/launch/exex.rs b/crates/node/builder/src/launch/exex.rs index 8624e379575d4..d037200869c44 100644 --- a/crates/node/builder/src/launch/exex.rs +++ b/crates/node/builder/src/launch/exex.rs @@ -3,7 +3,9 @@ use std::{fmt, fmt::Debug}; use futures::future; -use reth_exex::{ExExContext, ExExHandle, ExExManager, ExExManagerHandle}; +use reth_chain_state::ForkChoiceSubscriptions; +use reth_chainspec::EthChainSpec; +use reth_exex::{ExExContext, ExExHandle, ExExManager, ExExManagerHandle, Wal}; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_primitives::Head; use reth_provider::CanonStateSubscriptions; @@ -35,12 +37,12 @@ impl ExExLauncher { /// /// Spawns all extensions and returns the handle to the exex manager if any extensions are /// installed. - pub async fn launch(self) -> Option { + pub async fn launch(self) -> eyre::Result> { let Self { head, extensions, components, config_container } = self; if extensions.is_empty() { // nothing to launch - return None + return Ok(None) } let mut exex_handles = Vec::with_capacity(extensions.len()); @@ -94,7 +96,20 @@ impl ExExLauncher { // spawn exex manager debug!(target: "reth::cli", "spawning exex manager"); // todo(onbjerg): rm magic number - let exex_manager = ExExManager::new(exex_handles, 1024); + let exex_wal = Wal::new( + config_container + .config + .datadir + .clone() + .resolve_datadir(config_container.config.chain.chain()) + .exex_wal(), + )?; + let exex_manager = ExExManager::new( + exex_handles, + 1024, + exex_wal, + components.provider().finalized_block_stream(), + ); let exex_manager_handle = exex_manager.handle(); components.task_executor().spawn_critical("exex manager", async move { exex_manager.await.expect("exex manager crashed"); @@ -117,7 +132,7 @@ impl ExExLauncher { info!(target: "reth::cli", "ExEx Manager started"); - Some(exex_manager_handle) + Ok(Some(exex_manager_handle)) } } diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 78697056d290f..9c7d562d19d2e 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -184,7 +184,7 @@ where ctx.configs().clone(), ) .launch() - .await; + .await?; // create pipeline let network_client = ctx.components().network().fetch_client().await?; diff --git a/crates/node/core/src/dirs.rs b/crates/node/core/src/dirs.rs index c788f35da1d67..4f8507c4e6876 100644 --- a/crates/node/core/src/dirs.rs +++ b/crates/node/core/src/dirs.rs @@ -350,6 +350,11 @@ impl ChainPath { pub fn invalid_block_hooks(&self) -> PathBuf { self.data_dir().join("invalid_block_hooks") } + + /// Returns the path to the ExEx WAL directory for this chain. + pub fn exex_wal(&self) -> PathBuf { + self.data_dir().join("exex/wal") + } } impl AsRef for ChainPath { From 2022dd3de55045a72954c36efbc9e4bba741543f Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:13:48 +0200 Subject: [PATCH 02/84] primitives: rm `alloy_consensus::transaction` reexports (#11191) --- Cargo.lock | 12 ++++++++++++ bin/reth/Cargo.toml | 1 + bin/reth/src/commands/debug_cmd/build_block.rs | 3 ++- crates/blockchain-tree/Cargo.toml | 1 + crates/blockchain-tree/src/blockchain_tree.rs | 3 ++- crates/chain-state/Cargo.toml | 3 +++ crates/chain-state/src/test_utils.rs | 3 ++- crates/consensus/common/Cargo.toml | 2 ++ crates/consensus/common/src/validation.rs | 3 ++- crates/ethereum/evm/Cargo.toml | 1 + crates/ethereum/evm/src/execute.rs | 3 ++- crates/exex/exex/Cargo.toml | 1 + crates/exex/exex/src/backfill/test_utils.rs | 3 ++- crates/net/eth-wire-types/Cargo.toml | 1 + crates/net/eth-wire-types/src/blocks.rs | 3 ++- crates/net/eth-wire-types/src/transactions.rs | 3 ++- crates/net/network/Cargo.toml | 1 + crates/net/network/tests/it/requests.rs | 3 ++- crates/net/network/tests/it/txgossip.rs | 3 ++- crates/optimism/evm/Cargo.toml | 1 + crates/optimism/evm/src/execute.rs | 4 ++-- crates/primitives/benches/validate_blob_tx.rs | 3 ++- crates/primitives/src/alloy_compat.rs | 4 ++-- crates/primitives/src/lib.rs | 6 +++--- crates/primitives/src/transaction/mod.rs | 4 +--- crates/primitives/src/transaction/pooled.rs | 8 +++++--- crates/primitives/src/transaction/sidecar.rs | 6 ++---- crates/primitives/src/transaction/tx_type.rs | 14 +++++++------- crates/rpc/rpc/Cargo.toml | 2 ++ crates/rpc/rpc/src/eth/core.rs | 2 +- .../codecs/src/alloy/transaction/eip1559.rs | 2 +- .../codecs/src/alloy/transaction/eip2930.rs | 4 ++-- .../codecs/src/alloy/transaction/eip4844.rs | 4 ++-- .../codecs/src/alloy/transaction/eip7702.rs | 4 ++-- crates/storage/provider/Cargo.toml | 4 ++++ crates/storage/provider/src/test_utils/blocks.rs | 3 ++- crates/transaction-pool/Cargo.toml | 4 +++- crates/transaction-pool/src/test_utils/gen.rs | 2 +- crates/transaction-pool/src/test_utils/mock.rs | 6 +++--- crates/transaction-pool/src/traits.rs | 6 ++---- testing/testing-utils/Cargo.toml | 1 + testing/testing-utils/src/generators.rs | 6 ++++-- 42 files changed, 98 insertions(+), 55 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cd34b08f58209..35301d3224836 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6173,6 +6173,7 @@ dependencies = [ name = "reth" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-rlp", "alloy-rpc-types", "aquamarine", @@ -6379,6 +6380,7 @@ dependencies = [ name = "reth-blockchain-tree" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-genesis", "alloy-primitives", "aquamarine", @@ -6427,6 +6429,7 @@ dependencies = [ name = "reth-chain-state" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-signer", @@ -6632,6 +6635,7 @@ dependencies = [ name = "reth-consensus-common" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-primitives", "mockall", "rand 0.8.5", @@ -7157,6 +7161,7 @@ name = "reth-eth-wire-types" version = "1.0.7" dependencies = [ "alloy-chains", + "alloy-consensus", "alloy-eips", "alloy-genesis", "alloy-primitives", @@ -7297,6 +7302,7 @@ dependencies = [ name = "reth-evm-ethereum" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-genesis", "alloy-primitives", @@ -7319,6 +7325,7 @@ dependencies = [ name = "reth-evm-optimism" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-genesis", "alloy-primitives", "reth-chainspec", @@ -7371,6 +7378,7 @@ dependencies = [ name = "reth-exex" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-genesis", "alloy-primitives", "eyre", @@ -7580,6 +7588,7 @@ dependencies = [ name = "reth-network" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-node-bindings", "alloy-primitives", @@ -8291,6 +8300,7 @@ dependencies = [ name = "reth-provider" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", @@ -8918,6 +8928,7 @@ dependencies = [ name = "reth-testing-utils" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-genesis", "alloy-primitives", @@ -8953,6 +8964,7 @@ dependencies = [ name = "reth-transaction-pool" version = "1.0.7" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rlp", diff --git a/bin/reth/Cargo.toml b/bin/reth/Cargo.toml index a78f0f9586955..19d7d18f1c086 100644 --- a/bin/reth/Cargo.toml +++ b/bin/reth/Cargo.toml @@ -68,6 +68,7 @@ reth-prune.workspace = true # crypto alloy-rlp.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } +alloy-consensus.workspace = true # tracing tracing.workspace = true diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index ab613dad6a713..a8589a74ec8d4 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -1,4 +1,5 @@ //! Command for debugging block building. +use alloy_consensus::TxEip4844; use alloy_rlp::Decodable; use alloy_rpc_types::engine::{BlobsBundleV1, PayloadAttributes}; use clap::Parser; @@ -25,7 +26,7 @@ use reth_payload_builder::database::CachedReads; use reth_primitives::{ revm_primitives::KzgSettings, Address, BlobTransaction, BlobTransactionSidecar, Bytes, PooledTransactionsElement, SealedBlock, SealedBlockWithSenders, Transaction, TransactionSigned, - TxEip4844, B256, U256, + B256, U256, }; use reth_provider::{ providers::BlockchainProvider, BlockHashReader, BlockReader, BlockWriter, ChainSpecProvider, diff --git a/crates/blockchain-tree/Cargo.toml b/crates/blockchain-tree/Cargo.toml index 6cf22f1c8c93e..bc7b340baaba4 100644 --- a/crates/blockchain-tree/Cargo.toml +++ b/crates/blockchain-tree/Cargo.toml @@ -59,6 +59,7 @@ reth-evm-ethereum.workspace = true parking_lot.workspace = true assert_matches.workspace = true alloy-genesis.workspace = true +alloy-consensus.workspace = true [features] test-utils = [] diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index ed702bff1a0f4..4efbc740d6824 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -1375,6 +1375,7 @@ where #[cfg(test)] mod tests { use super::*; + use alloy_consensus::TxEip1559; use alloy_genesis::{Genesis, GenesisAccount}; use alloy_primitives::{keccak256, Address, Sealable, B256}; use assert_matches::assert_matches; @@ -1390,7 +1391,7 @@ mod tests { proofs::{calculate_receipt_root, calculate_transaction_root}, revm_primitives::AccountInfo, Account, BlockBody, Header, Signature, Transaction, TransactionSigned, - TransactionSignedEcRecovered, TxEip1559, Withdrawals, + TransactionSignedEcRecovered, Withdrawals, }; use reth_provider::{ test_utils::{ diff --git a/crates/chain-state/Cargo.toml b/crates/chain-state/Cargo.toml index 91f68b761514e..63016918c5cba 100644 --- a/crates/chain-state/Cargo.toml +++ b/crates/chain-state/Cargo.toml @@ -42,12 +42,14 @@ pin-project.workspace = true # optional deps for test-utils alloy-signer = { workspace = true, optional = true } alloy-signer-local = { workspace = true, optional = true } +alloy-consensus = { workspace = true, optional = true } rand = { workspace = true, optional = true } revm = { workspace = true, optional = true } [dev-dependencies] alloy-signer.workspace = true alloy-signer-local.workspace = true +alloy-consensus.workspace = true rand.workspace = true revm.workspace = true @@ -55,6 +57,7 @@ revm.workspace = true test-utils = [ "alloy-signer", "alloy-signer-local", + "alloy-consensus", "rand", "revm" ] diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index 6fa072f829403..f2446eb151fce 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -2,6 +2,7 @@ use crate::{ in_memory::ExecutedBlock, CanonStateNotification, CanonStateNotifications, CanonStateSubscriptions, }; +use alloy_consensus::TxEip1559; use alloy_primitives::{Address, BlockNumber, B256, U256}; use alloy_signer::SignerSync; use alloy_signer_local::PrivateKeySigner; @@ -13,7 +14,7 @@ use reth_primitives::{ constants::{EIP1559_INITIAL_BASE_FEE, EMPTY_ROOT_HASH}, proofs::{calculate_receipt_root, calculate_transaction_root, calculate_withdrawals_root}, BlockBody, Header, Receipt, Receipts, Requests, SealedBlock, SealedBlockWithSenders, - SealedHeader, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxEip1559, + SealedHeader, Transaction, TransactionSigned, TransactionSignedEcRecovered, }; use reth_trie::{root::state_root_unhashed, updates::TrieUpdates, HashedPostState}; use revm::{db::BundleState, primitives::AccountInfo}; diff --git a/crates/consensus/common/Cargo.toml b/crates/consensus/common/Cargo.toml index c9fea9789b86f..df037fa323a4d 100644 --- a/crates/consensus/common/Cargo.toml +++ b/crates/consensus/common/Cargo.toml @@ -24,3 +24,5 @@ revm-primitives.workspace = true reth-storage-api.workspace = true rand.workspace = true mockall = "0.12" + +alloy-consensus.workspace = true diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 66a953a508708..da2fe94069e40 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -302,6 +302,7 @@ pub fn validate_against_parent_4844( #[cfg(test)] mod tests { use super::*; + use alloy_consensus::TxEip4844; use alloy_primitives::{ hex_literal::hex, Address, BlockHash, BlockNumber, Bytes, Parity, Sealable, U256, }; @@ -310,7 +311,7 @@ mod tests { use reth_chainspec::ChainSpecBuilder; use reth_primitives::{ proofs, Account, BlockBody, BlockHashOrNumber, Signature, Transaction, TransactionSigned, - TxEip4844, Withdrawal, Withdrawals, + Withdrawal, Withdrawals, }; use reth_storage_api::{ errors::provider::ProviderResult, AccountReader, HeaderProvider, WithdrawalsProvider, diff --git a/crates/ethereum/evm/Cargo.toml b/crates/ethereum/evm/Cargo.toml index 8a8e23a4cd116..61ce0a23b904b 100644 --- a/crates/ethereum/evm/Cargo.toml +++ b/crates/ethereum/evm/Cargo.toml @@ -36,6 +36,7 @@ reth-primitives = { workspace = true, features = ["secp256k1"] } secp256k1.workspace = true serde_json.workspace = true alloy-genesis.workspace = true +alloy-consensus.workspace = true [features] default = ["std"] diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 3e574147f2723..7af3f2e1d4812 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -472,6 +472,7 @@ where #[cfg(test)] mod tests { use super::*; + use alloy_consensus::TxLegacy; use alloy_eips::{ eip2935::{HISTORY_STORAGE_ADDRESS, HISTORY_STORAGE_CODE}, eip4788::{BEACON_ROOTS_ADDRESS, BEACON_ROOTS_CODE, SYSTEM_ADDRESS}, @@ -481,7 +482,7 @@ mod tests { use reth_chainspec::{ChainSpecBuilder, ForkCondition}; use reth_primitives::{ constants::{EMPTY_ROOT_HASH, ETH_TO_WEI}, - public_key_to_address, Account, Block, BlockBody, Transaction, TxLegacy, + public_key_to_address, Account, Block, BlockBody, Transaction, }; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, TransitionState, diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index 74f62904a67e9..cbb2214192b4f 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -57,6 +57,7 @@ reth-provider = { workspace = true, features = ["test-utils"] } reth-testing-utils.workspace = true alloy-genesis.workspace = true +alloy-consensus.workspace = true secp256k1.workspace = true tempfile.workspace = true diff --git a/crates/exex/exex/src/backfill/test_utils.rs b/crates/exex/exex/src/backfill/test_utils.rs index 8fd6071e8fe58..5969261a42a4b 100644 --- a/crates/exex/exex/src/backfill/test_utils.rs +++ b/crates/exex/exex/src/backfill/test_utils.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use alloy_consensus::TxEip2930; use alloy_genesis::{Genesis, GenesisAccount}; use alloy_primitives::{b256, Address, TxKind, U256}; use eyre::OptionExt; @@ -10,7 +11,7 @@ use reth_evm::execute::{ use reth_evm_ethereum::execute::EthExecutorProvider; use reth_primitives::{ constants::ETH_TO_WEI, Block, BlockBody, BlockWithSenders, Header, Receipt, Requests, - SealedBlockWithSenders, Transaction, TxEip2930, + SealedBlockWithSenders, Transaction, }; use reth_provider::{ providers::ProviderNodeTypes, BlockWriter as _, ExecutionOutcome, LatestStateProviderRef, diff --git a/crates/net/eth-wire-types/Cargo.toml b/crates/net/eth-wire-types/Cargo.toml index 1452a683b4780..6ce51786f282a 100644 --- a/crates/net/eth-wire-types/Cargo.toml +++ b/crates/net/eth-wire-types/Cargo.toml @@ -41,6 +41,7 @@ arbitrary = { workspace = true, features = ["derive"] } proptest.workspace = true proptest-arbitrary-interop.workspace = true rand.workspace = true +alloy-consensus.workspace = true [features] arbitrary = [ diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index 2eb33852207e4..7cea39a276d21 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -112,11 +112,12 @@ mod tests { message::RequestPair, BlockBodies, BlockHeaders, GetBlockBodies, GetBlockHeaders, HeadersDirection, }; + use alloy_consensus::TxLegacy; use alloy_primitives::{hex, TxKind, U256}; use alloy_rlp::{Decodable, Encodable}; use reth_primitives::{ alloy_primitives::Parity, BlockHashOrNumber, Header, Signature, Transaction, - TransactionSigned, TxLegacy, + TransactionSigned, }; use std::str::FromStr; diff --git a/crates/net/eth-wire-types/src/transactions.rs b/crates/net/eth-wire-types/src/transactions.rs index d750931ce75d6..ae3975d1b799f 100644 --- a/crates/net/eth-wire-types/src/transactions.rs +++ b/crates/net/eth-wire-types/src/transactions.rs @@ -77,12 +77,13 @@ impl FromIterator for PooledTransactions { #[cfg(test)] mod tests { use crate::{message::RequestPair, GetPooledTransactions, PooledTransactions}; + use alloy_consensus::{TxEip1559, TxLegacy}; use alloy_primitives::{hex, TxKind, U256}; use alloy_rlp::{Decodable, Encodable}; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_primitives::{ alloy_primitives::Parity, PooledTransactionsElement, Signature, Transaction, - TransactionSigned, TxEip1559, TxLegacy, + TransactionSigned, }; use std::str::FromStr; diff --git a/crates/net/network/Cargo.toml b/crates/net/network/Cargo.toml index 23407678dc9e9..1d3af517af3f7 100644 --- a/crates/net/network/Cargo.toml +++ b/crates/net/network/Cargo.toml @@ -87,6 +87,7 @@ reth-transaction-pool = { workspace = true, features = ["test-utils"] } # alloy deps for testing against nodes alloy-node-bindings.workspace = true alloy-provider= { workspace = true, features = ["admin-api"] } +alloy-consensus.workspace = true # misc serial_test.workspace = true diff --git a/crates/net/network/tests/it/requests.rs b/crates/net/network/tests/it/requests.rs index 5c32ce46fa837..42802046daa4a 100644 --- a/crates/net/network/tests/it/requests.rs +++ b/crates/net/network/tests/it/requests.rs @@ -3,6 +3,7 @@ use std::sync::Arc; +use alloy_consensus::TxEip2930; use alloy_primitives::{Bytes, TxKind, U256}; use rand::Rng; use reth_eth_wire::HeadersDirection; @@ -16,7 +17,7 @@ use reth_network_p2p::{ headers::client::{HeadersClient, HeadersRequest}, }; use reth_primitives::{ - alloy_primitives::Parity, Block, Header, Signature, Transaction, TransactionSigned, TxEip2930, + alloy_primitives::Parity, Block, Header, Signature, Transaction, TransactionSigned, }; use reth_provider::test_utils::MockEthProvider; diff --git a/crates/net/network/tests/it/txgossip.rs b/crates/net/network/tests/it/txgossip.rs index 56b71e45a2046..70ac67bb5bf8a 100644 --- a/crates/net/network/tests/it/txgossip.rs +++ b/crates/net/network/tests/it/txgossip.rs @@ -2,12 +2,13 @@ use std::sync::Arc; +use alloy_consensus::TxLegacy; use alloy_primitives::U256; use futures::StreamExt; use rand::thread_rng; use reth_network::{test_utils::Testnet, NetworkEvent, NetworkEventListenerProvider}; use reth_network_api::PeersInfo; -use reth_primitives::{Signature, TransactionSigned, TxLegacy}; +use reth_primitives::{Signature, TransactionSigned}; use reth_provider::test_utils::{ExtendedAccount, MockEthProvider}; use reth_transaction_pool::{test_utils::TransactionGenerator, PoolTransaction, TransactionPool}; diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index 567e8c0020dd6..b0b66cd4235e7 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -41,6 +41,7 @@ tracing.workspace = true reth-revm = { workspace = true, features = ["test-utils"] } reth-optimism-chainspec.workspace = true alloy-genesis.workspace = true +alloy-consensus.workspace = true [features] optimism = [ diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index ceae0314fc6ea..d502abe6abf60 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -464,12 +464,12 @@ where mod tests { use super::*; use crate::OpChainSpec; + use alloy_consensus::TxEip1559; use alloy_primitives::{b256, Address, StorageKey, StorageValue}; use reth_chainspec::{ChainSpecBuilder, MIN_TRANSACTION_GAS}; use reth_optimism_chainspec::optimism_deposit_tx_signature; use reth_primitives::{ - Account, Block, BlockBody, Signature, Transaction, TransactionSigned, TxEip1559, - BASE_MAINNET, + Account, Block, BlockBody, Signature, Transaction, TransactionSigned, BASE_MAINNET, }; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, L1_BLOCK_CONTRACT, diff --git a/crates/primitives/benches/validate_blob_tx.rs b/crates/primitives/benches/validate_blob_tx.rs index 622168bb35f84..61fe161f2f74f 100644 --- a/crates/primitives/benches/validate_blob_tx.rs +++ b/crates/primitives/benches/validate_blob_tx.rs @@ -1,5 +1,6 @@ #![allow(missing_docs)] +use alloy_consensus::TxEip4844; use alloy_eips::eip4844::env_settings::EnvKzgSettings; use alloy_primitives::hex; use criterion::{ @@ -11,7 +12,7 @@ use proptest::{ test_runner::{RngAlgorithm, TestRng, TestRunner}, }; use proptest_arbitrary_interop::arb; -use reth_primitives::{BlobTransactionSidecar, TxEip4844}; +use reth_primitives::BlobTransactionSidecar; use revm_primitives::MAX_BLOB_NUMBER_PER_BLOCK; // constant seed to use for the rng diff --git a/crates/primitives/src/alloy_compat.rs b/crates/primitives/src/alloy_compat.rs index 8f8ec1b397ca7..867aceecef0b1 100644 --- a/crates/primitives/src/alloy_compat.rs +++ b/crates/primitives/src/alloy_compat.rs @@ -2,10 +2,10 @@ use crate::{ constants::EMPTY_TRANSACTIONS, transaction::extract_chain_id, Block, BlockBody, Signature, - Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, - TxEip1559, TxEip2930, TxEip4844, TxLegacy, TxType, + Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxType, }; use alloc::{string::ToString, vec::Vec}; +use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}; use alloy_primitives::{Parity, TxKind}; use alloy_rlp::Error as RlpError; use alloy_serde::WithOtherFields; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 6aadae2082deb..cdcf642c05ae4 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -63,9 +63,9 @@ pub use transaction::BlobTransactionValidationError; pub use transaction::{ util::secp256k1::{public_key_to_address, recover_signer_unchecked, sign_message}, IntoRecoveredTransaction, InvalidTransactionError, Signature, Transaction, TransactionMeta, - TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxEip1559, TxEip2930, - TxEip4844, TxEip7702, TxHashOrNumber, TxLegacy, TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, - EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, LEGACY_TX_TYPE_ID, + TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxHashOrNumber, + TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, + LEGACY_TX_TYPE_ID, }; // Re-exports diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 01c290a0f5b37..f1dae00fece23 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -4,7 +4,7 @@ use crate::{keccak256, Address, BlockHashOrNumber, Bytes, TxHash, B256, U256}; use alloy_eips::eip7702::SignedAuthorization; use alloy_primitives::TxKind; -use alloy_consensus::SignableTransaction; +use alloy_consensus::{SignableTransaction, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; use alloy_eips::eip2930::AccessList; use alloy_primitives::Parity; use alloy_rlp::{ @@ -18,8 +18,6 @@ use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; use signature::{decode_with_eip155_chain_id, with_eip155_parity}; -pub use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; - pub use error::{ InvalidTransactionError, TransactionConversionError, TryFromRecoveredTransactionError, }; diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 0a068d8c49615..78ad756923c04 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -8,11 +8,13 @@ use super::{ }; use crate::{ Address, BlobTransaction, BlobTransactionSidecar, Bytes, Signature, Transaction, - TransactionSigned, TransactionSignedEcRecovered, TxEip1559, TxEip2930, TxEip4844, TxHash, - TxLegacy, B256, EIP4844_TX_TYPE_ID, + TransactionSigned, TransactionSignedEcRecovered, TxHash, B256, EIP4844_TX_TYPE_ID, }; use alloc::vec::Vec; -use alloy_consensus::{SignableTransaction, TxEip4844WithSidecar}; +use alloy_consensus::{ + transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, + SignableTransaction, TxEip4844WithSidecar, +}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header, EMPTY_LIST_CODE}; use bytes::Buf; use derive_more::{AsRef, Deref}; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index c0242acf97479..e486fa670b6d4 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -1,9 +1,7 @@ #![cfg_attr(docsrs, doc(cfg(feature = "c-kzg")))] -use crate::{ - keccak256, Signature, Transaction, TransactionSigned, TxEip4844, TxHash, EIP4844_TX_TYPE_ID, -}; -use alloy_consensus::TxEip4844WithSidecar; +use crate::{keccak256, Signature, Transaction, TransactionSigned, TxHash, EIP4844_TX_TYPE_ID}; +use alloy_consensus::{transaction::TxEip4844, TxEip4844WithSidecar}; use alloy_rlp::{Decodable, Error as RlpError, Header}; use serde::{Deserialize, Serialize}; diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index c4ddbb41cac4c..7331ba1ed93f8 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -23,23 +23,23 @@ pub(crate) const COMPACT_IDENTIFIER_EIP1559: usize = 2; #[cfg(any(test, feature = "reth-codec"))] pub(crate) const COMPACT_EXTENDED_IDENTIFIER_FLAG: usize = 3; -/// Identifier for legacy transaction, however [`TxLegacy`](crate::TxLegacy) this is technically not -/// typed. +/// Identifier for legacy transaction, however [`TxLegacy`](alloy_consensus::TxLegacy) this is +/// technically not typed. pub const LEGACY_TX_TYPE_ID: u8 = 0; -/// Identifier for [`TxEip2930`](crate::TxEip2930) transaction. +/// Identifier for [`TxEip2930`](alloy_consensus::TxEip2930) transaction. pub const EIP2930_TX_TYPE_ID: u8 = 1; -/// Identifier for [`TxEip1559`](crate::TxEip1559) transaction. +/// Identifier for [`TxEip1559`](alloy_consensus::TxEip1559) transaction. pub const EIP1559_TX_TYPE_ID: u8 = 2; -/// Identifier for [`TxEip4844`](crate::TxEip4844) transaction. +/// Identifier for [`TxEip4844`](alloy_consensus::TxEip4844) transaction. pub const EIP4844_TX_TYPE_ID: u8 = 3; -/// Identifier for [`TxEip7702`](crate::TxEip7702) transaction. +/// Identifier for [`TxEip7702`](alloy_consensus::TxEip7702) transaction. pub const EIP7702_TX_TYPE_ID: u8 = 4; -/// Identifier for [`TxDeposit`](crate::TxDeposit) transaction. +/// Identifier for [`TxDeposit`](op_alloy_consensus::TxDeposit) transaction. #[cfg(feature = "optimism")] pub const DEPOSIT_TX_TYPE_ID: u8 = 126; diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index 49a1e512ed2f7..9593efd4b8a33 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -90,6 +90,8 @@ reth-testing-utils.workspace = true reth-transaction-pool = { workspace = true, features = ["test-utils"] } reth-provider = { workspace = true, features = ["test-utils"] } +alloy-consensus.workspace = true + jsonrpsee-types.workspace = true jsonrpsee = { workspace = true, features = ["client"] } diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 6b258cac44678..47d58c7b0ed65 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -471,7 +471,7 @@ mod tests { if let Some(base_fee_per_gas) = header.base_fee_per_gas { let transaction = TransactionSigned { transaction: reth_primitives::Transaction::Eip1559( - reth_primitives::TxEip1559 { + alloy_consensus::TxEip1559 { max_priority_fee_per_gas: random_fee, max_fee_per_gas: random_fee + base_fee_per_gas, ..Default::default() diff --git a/crates/storage/codecs/src/alloy/transaction/eip1559.rs b/crates/storage/codecs/src/alloy/transaction/eip1559.rs index a0889492589d1..d2113a736ed24 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip1559.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip1559.rs @@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize}; /// By deriving `Compact` here, any future changes or enhancements to the `Compact` derive /// will automatically apply to this type. /// -/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::transaction::TxEip1559`] +/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip1559`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Compact, Default, Serialize, Deserialize)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[cfg_attr(test, crate::add_arbitrary_tests(compact))] diff --git a/crates/storage/codecs/src/alloy/transaction/eip2930.rs b/crates/storage/codecs/src/alloy/transaction/eip2930.rs index 33b58dfff739c..b8f24db747e7b 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip2930.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip2930.rs @@ -1,5 +1,5 @@ use crate::Compact; -use alloy_consensus::transaction::TxEip2930 as AlloyTxEip2930; +use alloy_consensus::TxEip2930 as AlloyTxEip2930; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Bytes, ChainId, TxKind, U256}; use reth_codecs_derive::add_arbitrary_tests; @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize}; /// By deriving `Compact` here, any future changes or enhancements to the `Compact` derive /// will automatically apply to this type. /// -/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::transaction::TxEip2930`] +/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip2930`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] diff --git a/crates/storage/codecs/src/alloy/transaction/eip4844.rs b/crates/storage/codecs/src/alloy/transaction/eip4844.rs index 15a5f443c46d1..e82b9affff476 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip4844.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip4844.rs @@ -1,6 +1,6 @@ use crate::{Compact, CompactPlaceholder}; use alloc::vec::Vec; -use alloy_consensus::transaction::TxEip4844 as AlloyTxEip4844; +use alloy_consensus::TxEip4844 as AlloyTxEip4844; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Address, Bytes, ChainId, B256, U256}; use reth_codecs_derive::add_arbitrary_tests; @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize}; /// By deriving `Compact` here, any future changes or enhancements to the `Compact` derive /// will automatically apply to this type. /// -/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::transaction::TxEip4844`] +/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip4844`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] diff --git a/crates/storage/codecs/src/alloy/transaction/eip7702.rs b/crates/storage/codecs/src/alloy/transaction/eip7702.rs index a44e97ee1d52c..5f34ac1c253e2 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip7702.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip7702.rs @@ -1,6 +1,6 @@ use crate::Compact; use alloc::vec::Vec; -use alloy_consensus::transaction::TxEip7702 as AlloyTxEip7702; +use alloy_consensus::TxEip7702 as AlloyTxEip7702; use alloy_eips::{eip2930::AccessList, eip7702::SignedAuthorization}; use alloy_primitives::{Address, Bytes, ChainId, U256}; use reth_codecs_derive::add_arbitrary_tests; @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize}; /// By deriving `Compact` here, any future changes or enhancements to the `Compact` derive /// will automatically apply to this type. /// -/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::transaction::TxEip7702`] +/// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip7702`] #[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[add_arbitrary_tests(compact)] diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index 7524fde6a2d79..048353452c0cf 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -64,6 +64,7 @@ strum.workspace = true # test-utils once_cell = { workspace = true, optional = true } reth-ethereum-engine-primitives = { workspace = true, optional = true } +alloy-consensus = { workspace = true, optional = true } # parallel utils rayon.workspace = true @@ -81,6 +82,8 @@ rand.workspace = true once_cell.workspace = true eyre.workspace = true +alloy-consensus.workspace = true + [features] optimism = ["reth-primitives/optimism", "reth-execution-types/optimism", "reth-optimism-primitives"] serde = ["reth-execution-types/serde"] @@ -91,4 +94,5 @@ test-utils = [ "reth-chain-state/test-utils", "once_cell", "reth-ethereum-engine-primitives", + "alloy-consensus", ] diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 237fc8e3487e8..352f5314af9fc 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -1,5 +1,6 @@ //! Dummy blocks and data for tests use crate::{DatabaseProviderRW, ExecutionOutcome}; +use alloy_consensus::TxLegacy; use alloy_primitives::{Log, Parity, Sealable, TxKind}; use once_cell::sync::Lazy; use reth_db::tables; @@ -7,7 +8,7 @@ use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_primitives::{ alloy_primitives, b256, hex_literal::hex, Account, Address, BlockBody, BlockNumber, Bytes, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, Signature, Transaction, - TransactionSigned, TxLegacy, TxType, Withdrawal, Withdrawals, B256, U256, + TransactionSigned, TxType, Withdrawal, Withdrawals, B256, U256, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{ diff --git a/crates/transaction-pool/Cargo.toml b/crates/transaction-pool/Cargo.toml index a0720c037ace3..41abbb4b6b7e1 100644 --- a/crates/transaction-pool/Cargo.toml +++ b/crates/transaction-pool/Cargo.toml @@ -54,6 +54,7 @@ rand = { workspace = true, optional = true } paste = { workspace = true, optional = true } proptest = { workspace = true, optional = true } proptest-arbitrary-interop = { workspace = true, optional = true } +alloy-consensus = { workspace = true, optional = true } [dev-dependencies] reth-primitives = { workspace = true, features = ["arbitrary"] } @@ -68,11 +69,12 @@ pprof = { workspace = true, features = ["criterion", "flamegraph"] } assert_matches.workspace = true tempfile.workspace = true serde_json.workspace = true +alloy-consensus.workspace = true [features] default = ["serde"] serde = ["dep:serde"] -test-utils = ["rand", "paste", "serde"] +test-utils = ["rand", "paste", "serde", "alloy-consensus"] arbitrary = ["proptest", "reth-primitives/arbitrary", "proptest-arbitrary-interop"] [[bench]] diff --git a/crates/transaction-pool/src/test_utils/gen.rs b/crates/transaction-pool/src/test_utils/gen.rs index f87083f16f369..e5fceb9150ed7 100644 --- a/crates/transaction-pool/src/test_utils/gen.rs +++ b/crates/transaction-pool/src/test_utils/gen.rs @@ -1,11 +1,11 @@ use crate::EthPooledTransaction; +use alloy_consensus::{TxEip1559, TxEip4844, TxLegacy}; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Address, TxKind, B256, U256}; use rand::Rng; use reth_chainspec::MAINNET; use reth_primitives::{ constants::MIN_PROTOCOL_BASE_FEE, sign_message, Bytes, Transaction, TransactionSigned, - TxEip1559, TxEip4844, TxLegacy, }; /// A generator for transactions for testing purposes. diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 604f4ffefbfcc..eb05a5272fe05 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -7,6 +7,7 @@ use crate::{ CoinbaseTipOrdering, EthBlobTransactionSidecar, EthPoolTransaction, PoolTransaction, ValidPoolTransaction, }; +use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Address, Bytes, ChainId, TxHash, TxKind, B256, U256}; use paste::paste; @@ -18,9 +19,8 @@ use reth_primitives::{ constants::{eip4844::DATA_GAS_PER_BLOB, MIN_PROTOCOL_BASE_FEE}, transaction::TryFromRecoveredTransactionError, BlobTransactionSidecar, BlobTransactionValidationError, PooledTransactionsElementEcRecovered, - Signature, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxEip1559, TxEip2930, - TxEip4844, TxLegacy, TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, - LEGACY_TX_TYPE_ID, + Signature, Transaction, TransactionSigned, TransactionSignedEcRecovered, TxType, + EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; use std::{ops::Range, sync::Arc, time::Instant, vec::IntoIter}; diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index cef776aab3205..05bc6140b1c0b 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -1392,10 +1392,8 @@ impl Stream for NewSubpoolTransactionStream { #[cfg(test)] mod tests { use super::*; - use reth_primitives::{ - constants::eip4844::DATA_GAS_PER_BLOB, Signature, TransactionSigned, TxEip1559, TxEip2930, - TxEip4844, TxEip7702, TxLegacy, - }; + use alloy_consensus::{TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; + use reth_primitives::{constants::eip4844::DATA_GAS_PER_BLOB, Signature, TransactionSigned}; #[test] fn test_pool_size_invariants() { diff --git a/testing/testing-utils/Cargo.toml b/testing/testing-utils/Cargo.toml index af592e294c847..49a59ecf6ae34 100644 --- a/testing/testing-utils/Cargo.toml +++ b/testing/testing-utils/Cargo.toml @@ -17,6 +17,7 @@ reth-primitives = { workspace = true, features = ["secp256k1"] } alloy-eips.workspace = true alloy-genesis.workspace = true alloy-primitives.workspace = true +alloy-consensus.workspace = true rand.workspace = true secp256k1 = { workspace = true, features = ["rand"] } diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index 46930a5dd9ef4..c1c23c9986706 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -1,5 +1,6 @@ //! Generators for different data structures like block headers, block bodies and ranges of those. +use alloy_consensus::TxLegacy; use alloy_eips::{ eip6110::DepositRequest, eip7002::WithdrawalRequest, eip7251::ConsolidationRequest, }; @@ -10,7 +11,7 @@ use rand::{ }; use reth_primitives::{ proofs, sign_message, Account, BlockBody, Header, Log, Receipt, Request, Requests, SealedBlock, - SealedHeader, StorageEntry, Transaction, TransactionSigned, TxLegacy, Withdrawal, Withdrawals, + SealedHeader, StorageEntry, Transaction, TransactionSigned, Withdrawal, Withdrawals, }; use secp256k1::{Keypair, Secp256k1}; use std::{ @@ -497,9 +498,10 @@ pub fn random_request(rng: &mut R) -> Request { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::TxEip1559; use alloy_eips::eip2930::AccessList; use alloy_primitives::Parity; - use reth_primitives::{hex, public_key_to_address, Signature, TxEip1559}; + use reth_primitives::{hex, public_key_to_address, Signature}; use std::str::FromStr; #[test] From 1e0a35e744633e605cf6ddd24f4b1c9d73b198c2 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Wed, 25 Sep 2024 14:34:54 +0200 Subject: [PATCH 03/84] perf(engine): migrate to AsyncStateRoot (#10927) Co-authored-by: Roman Krasiuk --- crates/blockchain-tree-api/src/error.rs | 4 ++++ crates/engine/tree/src/tree/mod.rs | 20 +++++++++--------- crates/trie/parallel/benches/root.rs | 2 +- crates/trie/parallel/src/async_root.rs | 27 ++++++++++--------------- 4 files changed, 27 insertions(+), 26 deletions(-) diff --git a/crates/blockchain-tree-api/src/error.rs b/crates/blockchain-tree-api/src/error.rs index 867580e3c4099..155e57c5eca05 100644 --- a/crates/blockchain-tree-api/src/error.rs +++ b/crates/blockchain-tree-api/src/error.rs @@ -333,6 +333,9 @@ pub enum InsertBlockErrorKindTwo { /// Provider error. #[error(transparent)] Provider(#[from] ProviderError), + /// Other errors. + #[error(transparent)] + Other(#[from] Box), } impl InsertBlockErrorKindTwo { @@ -365,6 +368,7 @@ impl InsertBlockErrorKindTwo { } } Self::Provider(err) => Err(InsertBlockFatalError::Provider(err)), + Self::Other(err) => Err(InternalBlockExecutionError::Other(err).into()), } } } diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 3b7cf8ae2b84b..e3d6c7356efcc 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -43,7 +43,7 @@ use reth_rpc_types::{ }; use reth_stages_api::ControlFlow; use reth_trie::{updates::TrieUpdates, HashedPostState, TrieInput}; -use reth_trie_parallel::parallel_root::ParallelStateRoot; +use reth_trie_parallel::async_root::{AsyncStateRoot, AsyncStateRootError}; use std::{ cmp::Ordering, collections::{btree_map, hash_map, BTreeMap, HashMap, HashSet, VecDeque}, @@ -549,6 +549,7 @@ where config: TreeConfig, ) -> Self { let (incoming_tx, incoming) = std::sync::mpsc::channel(); + Self { provider, executor_provider, @@ -2193,14 +2194,14 @@ where let persistence_in_progress = self.persistence_state.in_progress(); if !persistence_in_progress { state_root_result = match self - .compute_state_root_in_parallel(block.parent_hash, &hashed_state) + .compute_state_root_async(block.parent_hash, &hashed_state) { Ok((state_root, trie_output)) => Some((state_root, trie_output)), - Err(ProviderError::ConsistentView(error)) => { - debug!(target: "engine::tree", %error, "Parallel state root computation failed consistency check, falling back"); + Err(AsyncStateRootError::Provider(ProviderError::ConsistentView(error))) => { + debug!(target: "engine", %error, "Async state root computation failed consistency check, falling back"); None } - Err(error) => return Err(error.into()), + Err(error) => return Err(InsertBlockErrorKindTwo::Other(Box::new(error))), }; } @@ -2263,19 +2264,20 @@ where Ok(InsertPayloadOk2::Inserted(BlockStatus2::Valid)) } - /// Compute state root for the given hashed post state in parallel. + /// Compute state root for the given hashed post state asynchronously. /// /// # Returns /// /// Returns `Ok(_)` if computed successfully. /// Returns `Err(_)` if error was encountered during computation. /// `Err(ProviderError::ConsistentView(_))` can be safely ignored and fallback computation + /// should be used instead. - fn compute_state_root_in_parallel( + fn compute_state_root_async( &self, parent_hash: B256, hashed_state: &HashedPostState, - ) -> ProviderResult<(B256, TrieUpdates)> { + ) -> Result<(B256, TrieUpdates), AsyncStateRootError> { let consistent_view = ConsistentDbView::new_with_latest_tip(self.provider.clone())?; let mut input = TrieInput::default(); @@ -2297,7 +2299,7 @@ where // Extend with block we are validating root for. input.append_ref(hashed_state); - Ok(ParallelStateRoot::new(consistent_view, input).incremental_root_with_updates()?) + AsyncStateRoot::new(consistent_view, input).incremental_root_with_updates() } /// Handles an error that occurred while inserting a block. diff --git a/crates/trie/parallel/benches/root.rs b/crates/trie/parallel/benches/root.rs index 470222e3e1dad..e09fb93413890 100644 --- a/crates/trie/parallel/benches/root.rs +++ b/crates/trie/parallel/benches/root.rs @@ -73,7 +73,7 @@ pub fn calculate_state_root(c: &mut Criterion) { // async root group.bench_function(BenchmarkId::new("async root", size), |b| { - b.to_async(&runtime).iter_with_setup( + b.iter_with_setup( || AsyncStateRoot::new(view.clone(), TrieInput::from_state(updated_state.clone())), |calculator| calculator.incremental_root(), ); diff --git a/crates/trie/parallel/src/async_root.rs b/crates/trie/parallel/src/async_root.rs index 74481f09e9f8e..b6b57725cb703 100644 --- a/crates/trie/parallel/src/async_root.rs +++ b/crates/trie/parallel/src/async_root.rs @@ -19,7 +19,6 @@ use reth_trie::{ use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; use std::{collections::HashMap, sync::Arc}; use thiserror::Error; -use tokio::sync::oneshot; use tracing::*; /// Async state root calculator. @@ -63,21 +62,16 @@ where Factory: DatabaseProviderFactory + Clone + Send + Sync + 'static, { /// Calculate incremental state root asynchronously. - pub async fn incremental_root(self) -> Result { - self.calculate(false).await.map(|(root, _)| root) + pub fn incremental_root(self) -> Result { + self.calculate(false).map(|(root, _)| root) } /// Calculate incremental state root with updates asynchronously. - pub async fn incremental_root_with_updates( - self, - ) -> Result<(B256, TrieUpdates), AsyncStateRootError> { - self.calculate(true).await + pub fn incremental_root_with_updates(self) -> Result<(B256, TrieUpdates), AsyncStateRootError> { + self.calculate(true) } - async fn calculate( - self, - retain_updates: bool, - ) -> Result<(B256, TrieUpdates), AsyncStateRootError> { + fn calculate(self, retain_updates: bool) -> Result<(B256, TrieUpdates), AsyncStateRootError> { let mut tracker = ParallelTrieTracker::default(); let trie_nodes_sorted = Arc::new(self.input.nodes.into_sorted()); let hashed_state_sorted = Arc::new(self.input.state.into_sorted()); @@ -100,7 +94,7 @@ where #[cfg(feature = "metrics")] let metrics = self.metrics.storage_trie.clone(); - let (tx, rx) = oneshot::channel(); + let (tx, rx) = std::sync::mpsc::sync_channel(1); rayon::spawn_fifo(move || { let result = (|| -> Result<_, AsyncStateRootError> { @@ -160,7 +154,7 @@ where } TrieElement::Leaf(hashed_address, account) => { let (storage_root, _, updates) = match storage_roots.remove(&hashed_address) { - Some(rx) => rx.await.map_err(|_| { + Some(rx) => rx.recv().map_err(|_| { AsyncStateRootError::StorageRootChannelClosed { hashed_address } })??, // Since we do not store all intermediate nodes in the database, there might @@ -227,6 +221,9 @@ pub enum AsyncStateRootError { /// The hashed address for which channel was closed. hashed_address: B256, }, + /// Receive error + #[error(transparent)] + Receive(#[from] std::sync::mpsc::RecvError), /// Error while calculating storage root. #[error(transparent)] StorageRoot(#[from] StorageRootError), @@ -292,7 +289,6 @@ mod tests { assert_eq!( AsyncStateRoot::new(consistent_view.clone(), Default::default(),) .incremental_root() - .await .unwrap(), test_utils::state_root(state.clone()) ); @@ -323,9 +319,8 @@ mod tests { } assert_eq!( - AsyncStateRoot::new(consistent_view.clone(), TrieInput::from_state(hashed_state)) + AsyncStateRoot::new(consistent_view, TrieInput::from_state(hashed_state)) .incremental_root() - .await .unwrap(), test_utils::state_root(state) ); From 3d034519af0d81c73db7452df323e345b3fcf812 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Wed, 25 Sep 2024 14:13:28 +0100 Subject: [PATCH 04/84] feat(exex): `ExExNotification::into_inverted` (#11205) --- crates/exex/types/src/notification.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/crates/exex/types/src/notification.rs b/crates/exex/types/src/notification.rs index 390d9dc665a70..2a55957879b76 100644 --- a/crates/exex/types/src/notification.rs +++ b/crates/exex/types/src/notification.rs @@ -43,6 +43,20 @@ impl ExExNotification { Self::ChainCommitted { .. } => None, } } + + /// Converts the notification into a notification that is the inverse of the original one. + /// + /// - For [`Self::ChainCommitted`], it's [`Self::ChainReverted`]. + /// - For [`Self::ChainReverted`], it's [`Self::ChainCommitted`]. + /// - For [`Self::ChainReorged`], it's [`Self::ChainReorged`] with the new chain as the old + /// chain and the old chain as the new chain. + pub fn into_inverted(self) -> Self { + match self { + Self::ChainCommitted { new } => Self::ChainReverted { old: new }, + Self::ChainReverted { old } => Self::ChainCommitted { new: old }, + Self::ChainReorged { old, new } => Self::ChainReorged { old: new, new: old }, + } + } } impl From for ExExNotification { From c00516c3f964c1435396e29e5ae71070ef43ccdc Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Wed, 25 Sep 2024 16:49:50 +0200 Subject: [PATCH 05/84] rpc-types: rm `alloy-rpc-types-engine` reexport (#11206) --- Cargo.lock | 25 +++++++++++-------- crates/consensus/auto-seal/Cargo.toml | 2 +- crates/consensus/auto-seal/src/task.rs | 2 +- crates/consensus/beacon/Cargo.toml | 2 +- crates/consensus/beacon/src/engine/error.rs | 2 +- crates/consensus/beacon/src/engine/event.rs | 2 +- .../consensus/beacon/src/engine/forkchoice.rs | 2 +- crates/consensus/beacon/src/engine/handle.rs | 6 ++--- crates/consensus/beacon/src/engine/message.rs | 8 +++--- crates/consensus/beacon/src/engine/mod.rs | 12 ++++----- .../consensus/beacon/src/engine/test_utils.rs | 6 ++--- crates/consensus/debug-client/Cargo.toml | 2 +- crates/consensus/debug-client/src/client.rs | 9 +++---- crates/engine/local/Cargo.toml | 1 + crates/engine/local/src/service.rs | 4 +-- crates/engine/tree/Cargo.toml | 2 +- crates/engine/tree/src/tree/mod.rs | 11 +++----- crates/engine/util/Cargo.toml | 2 +- crates/engine/util/src/engine_store.rs | 5 +--- crates/engine/util/src/reorg.rs | 7 +++--- crates/engine/util/src/skip_new_payload.rs | 2 +- crates/ethereum/engine-primitives/Cargo.toml | 2 +- crates/ethereum/engine-primitives/src/lib.rs | 11 +++----- .../ethereum/engine-primitives/src/payload.rs | 8 +++--- crates/node/core/src/utils.rs | 2 +- crates/node/core/src/version.rs | 2 +- crates/optimism/node/Cargo.toml | 1 + crates/optimism/node/src/engine.rs | 2 +- crates/optimism/node/tests/e2e/p2p.rs | 2 +- crates/optimism/payload/Cargo.toml | 2 +- crates/optimism/payload/src/payload.rs | 2 +- crates/rpc/rpc-api/Cargo.toml | 2 +- crates/rpc/rpc-api/src/engine.rs | 10 ++++---- crates/rpc/rpc-builder/Cargo.toml | 1 + crates/rpc/rpc-builder/tests/it/auth.rs | 2 +- crates/rpc/rpc-builder/tests/it/utils.rs | 2 +- crates/rpc/rpc-engine-api/Cargo.toml | 1 + crates/rpc/rpc-engine-api/src/engine_api.rs | 14 +++++------ crates/rpc/rpc-engine-api/src/error.rs | 2 +- crates/rpc/rpc-engine-api/src/metrics.rs | 18 ++++++------- crates/rpc/rpc-engine-api/tests/it/payload.rs | 6 ++--- crates/rpc/rpc-server-types/Cargo.toml | 2 +- crates/rpc/rpc-server-types/src/result.rs | 2 +- crates/rpc/rpc-types-compat/Cargo.toml | 2 +- .../rpc-types-compat/src/engine/payload.rs | 16 ++++++------ crates/rpc/rpc-types/Cargo.toml | 6 ----- crates/rpc/rpc-types/src/eth/mod.rs | 4 --- crates/rpc/rpc-types/src/lib.rs | 7 ------ 48 files changed, 111 insertions(+), 134 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 35301d3224836..a624c269a8ad1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6244,6 +6244,7 @@ name = "reth-auto-seal-consensus" version = "1.0.7" dependencies = [ "alloy-primitives", + "alloy-rpc-types-engine", "futures-util", "reth-beacon-consensus", "reth-chainspec", @@ -6257,7 +6258,6 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rpc-types", "reth-stages-api", "reth-tokio-util", "reth-transaction-pool", @@ -6297,6 +6297,7 @@ version = "1.0.7" dependencies = [ "alloy-genesis", "alloy-primitives", + "alloy-rpc-types-engine", "assert_matches", "futures", "itertools 0.13.0", @@ -6326,7 +6327,6 @@ dependencies = [ "reth-provider", "reth-prune", "reth-prune-types", - "reth-rpc-types", "reth-rpc-types-compat", "reth-stages", "reth-stages-api", @@ -6654,6 +6654,7 @@ dependencies = [ "alloy-eips", "alloy-provider", "alloy-rpc-types", + "alloy-rpc-types-engine", "auto_impl", "eyre", "futures", @@ -6662,7 +6663,6 @@ dependencies = [ "reth-node-core", "reth-rpc-api", "reth-rpc-builder", - "reth-rpc-types", "reth-tracing", "ringbuffer", "serde", @@ -6968,6 +6968,7 @@ name = "reth-engine-local" version = "1.0.7" dependencies = [ "alloy-primitives", + "alloy-rpc-types-engine", "eyre", "futures-util", "reth-beacon-consensus", @@ -7040,6 +7041,7 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", + "alloy-rpc-types-engine", "assert_matches", "futures", "metrics", @@ -7066,7 +7068,6 @@ dependencies = [ "reth-prune", "reth-prune-types", "reth-revm", - "reth-rpc-types", "reth-rpc-types-compat", "reth-stages", "reth-stages-api", @@ -7085,6 +7086,7 @@ name = "reth-engine-util" version = "1.0.7" dependencies = [ "alloy-primitives", + "alloy-rpc-types-engine", "eyre", "futures", "itertools 0.13.0", @@ -7099,7 +7101,6 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rpc-types", "reth-rpc-types-compat", "reth-trie", "revm-primitives", @@ -7212,12 +7213,12 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", + "alloy-rpc-types-engine", "reth-chain-state", "reth-chainspec", "reth-engine-primitives", "reth-payload-primitives", "reth-primitives", - "reth-rpc-types", "reth-rpc-types-compat", "serde", "serde_json", @@ -7960,6 +7961,7 @@ version = "1.0.7" dependencies = [ "alloy-genesis", "alloy-primitives", + "alloy-rpc-types-engine", "async-trait", "clap", "eyre", @@ -8106,6 +8108,7 @@ version = "1.0.7" dependencies = [ "alloy-primitives", "alloy-rlp", + "alloy-rpc-types-engine", "op-alloy-rpc-types-engine", "reth-basic-payload-builder", "reth-chain-state", @@ -8119,7 +8122,6 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rpc-types", "reth-rpc-types-compat", "reth-transaction-pool", "reth-trie", @@ -8490,6 +8492,7 @@ dependencies = [ "alloy-rpc-types-anvil", "alloy-rpc-types-beacon", "alloy-rpc-types-debug", + "alloy-rpc-types-engine", "alloy-rpc-types-eth", "alloy-rpc-types-mev", "alloy-rpc-types-trace", @@ -8500,7 +8503,6 @@ dependencies = [ "reth-network-peers", "reth-primitives", "reth-rpc-eth-api", - "reth-rpc-types", "serde_json", ] @@ -8530,6 +8532,7 @@ dependencies = [ "alloy-network", "alloy-primitives", "alloy-rpc-types", + "alloy-rpc-types-engine", "alloy-rpc-types-eth", "alloy-rpc-types-trace", "alloy-serde", @@ -8582,6 +8585,7 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", + "alloy-rpc-types-engine", "assert_matches", "async-trait", "jsonrpsee-core", @@ -8715,12 +8719,12 @@ name = "reth-rpc-server-types" version = "1.0.7" dependencies = [ "alloy-primitives", + "alloy-rpc-types-engine", "jsonrpsee-core", "jsonrpsee-types", "reth-errors", "reth-network-api", "reth-primitives", - "reth-rpc-types", "serde", "strum", ] @@ -8729,7 +8733,6 @@ dependencies = [ name = "reth-rpc-types" version = "1.0.7" dependencies = [ - "alloy-rpc-types-engine", "jsonrpsee-types", ] @@ -8740,10 +8743,10 @@ dependencies = [ "alloy-primitives", "alloy-rlp", "alloy-rpc-types", + "alloy-rpc-types-engine", "alloy-rpc-types-eth", "alloy-serde", "reth-primitives", - "reth-rpc-types", "reth-trie-common", "serde_json", ] diff --git a/crates/consensus/auto-seal/Cargo.toml b/crates/consensus/auto-seal/Cargo.toml index ca35937a47392..d712071a19279 100644 --- a/crates/consensus/auto-seal/Cargo.toml +++ b/crates/consensus/auto-seal/Cargo.toml @@ -26,7 +26,6 @@ reth-transaction-pool.workspace = true reth-evm.workspace = true reth-engine-primitives.workspace = true reth-consensus.workspace = true -reth-rpc-types.workspace = true reth-network-peers.workspace = true reth-tokio-util.workspace = true reth-trie.workspace = true @@ -34,6 +33,7 @@ reth-trie.workspace = true # ethereum alloy-primitives.workspace = true revm-primitives.workspace = true +alloy-rpc-types-engine.workspace = true # async futures-util.workspace = true diff --git a/crates/consensus/auto-seal/src/task.rs b/crates/consensus/auto-seal/src/task.rs index 16c726c7a1a2b..8979428bed7c2 100644 --- a/crates/consensus/auto-seal/src/task.rs +++ b/crates/consensus/auto-seal/src/task.rs @@ -1,4 +1,5 @@ use crate::{mode::MiningMode, Storage}; +use alloy_rpc_types_engine::ForkchoiceState; use futures_util::{future::BoxFuture, FutureExt}; use reth_beacon_consensus::{BeaconEngineMessage, ForkchoiceStatus}; use reth_chainspec::{EthChainSpec, EthereumHardforks}; @@ -6,7 +7,6 @@ use reth_engine_primitives::EngineTypes; use reth_evm::execute::BlockExecutorProvider; use reth_primitives::IntoRecoveredTransaction; use reth_provider::{CanonChainTracker, StateProviderFactory}; -use reth_rpc_types::engine::ForkchoiceState; use reth_stages_api::PipelineEvent; use reth_tokio_util::EventStream; use reth_transaction_pool::{TransactionPool, ValidPoolTransaction}; diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index 6d3fc798cbeb3..f62c6fbf2a915 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -18,7 +18,6 @@ reth-primitives.workspace = true reth-stages-api.workspace = true reth-errors.workspace = true reth-provider.workspace = true -reth-rpc-types.workspace = true reth-tasks.workspace = true reth-payload-builder.workspace = true reth-payload-primitives.workspace = true @@ -32,6 +31,7 @@ reth-node-types.workspace = true # ethereum alloy-primitives.workspace = true +alloy-rpc-types-engine.workspace = true # async tokio = { workspace = true, features = ["sync"] } diff --git a/crates/consensus/beacon/src/engine/error.rs b/crates/consensus/beacon/src/engine/error.rs index 21c6974737d6b..4f58b7300d358 100644 --- a/crates/consensus/beacon/src/engine/error.rs +++ b/crates/consensus/beacon/src/engine/error.rs @@ -1,6 +1,6 @@ use crate::engine::hooks::EngineHookError; +use alloy_rpc_types_engine::ForkchoiceUpdateError; use reth_errors::{DatabaseError, RethError}; -use reth_rpc_types::engine::ForkchoiceUpdateError; use reth_stages_api::PipelineError; /// Beacon engine result. diff --git a/crates/consensus/beacon/src/engine/event.rs b/crates/consensus/beacon/src/engine/event.rs index d617ee4f23c47..975085a32f350 100644 --- a/crates/consensus/beacon/src/engine/event.rs +++ b/crates/consensus/beacon/src/engine/event.rs @@ -1,7 +1,7 @@ use crate::engine::forkchoice::ForkchoiceStatus; use alloy_primitives::B256; +use alloy_rpc_types_engine::ForkchoiceState; use reth_primitives::{SealedBlock, SealedHeader}; -use reth_rpc_types::engine::ForkchoiceState; use std::{ fmt::{Display, Formatter, Result}, sync::Arc, diff --git a/crates/consensus/beacon/src/engine/forkchoice.rs b/crates/consensus/beacon/src/engine/forkchoice.rs index f02b1200338b6..975c2ee3bc452 100644 --- a/crates/consensus/beacon/src/engine/forkchoice.rs +++ b/crates/consensus/beacon/src/engine/forkchoice.rs @@ -1,5 +1,5 @@ use alloy_primitives::B256; -use reth_rpc_types::engine::{ForkchoiceState, PayloadStatusEnum}; +use alloy_rpc_types_engine::{ForkchoiceState, PayloadStatusEnum}; /// The struct that keeps track of the received forkchoice state and their status. #[derive(Debug, Clone, Default)] diff --git a/crates/consensus/beacon/src/engine/handle.rs b/crates/consensus/beacon/src/engine/handle.rs index aee554f8241a6..65b7c38df9181 100644 --- a/crates/consensus/beacon/src/engine/handle.rs +++ b/crates/consensus/beacon/src/engine/handle.rs @@ -4,12 +4,12 @@ use crate::{ engine::message::OnForkChoiceUpdated, BeaconConsensusEngineEvent, BeaconEngineMessage, BeaconForkChoiceUpdateError, BeaconOnNewPayloadError, }; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, +}; use futures::TryFutureExt; use reth_engine_primitives::EngineTypes; use reth_errors::RethResult; -use reth_rpc_types::engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, -}; use reth_tokio_util::{EventSender, EventStream}; use tokio::sync::{mpsc::UnboundedSender, oneshot}; diff --git a/crates/consensus/beacon/src/engine/message.rs b/crates/consensus/beacon/src/engine/message.rs index 435f8a313d858..fdaad0cc4b0d4 100644 --- a/crates/consensus/beacon/src/engine/message.rs +++ b/crates/consensus/beacon/src/engine/message.rs @@ -1,12 +1,12 @@ use crate::engine::{error::BeaconOnNewPayloadError, forkchoice::ForkchoiceStatus}; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayload, ForkChoiceUpdateResult, ForkchoiceState, + ForkchoiceUpdateError, ForkchoiceUpdated, PayloadId, PayloadStatus, PayloadStatusEnum, +}; use futures::{future::Either, FutureExt}; use reth_engine_primitives::EngineTypes; use reth_errors::RethResult; use reth_payload_primitives::PayloadBuilderError; -use reth_rpc_types::engine::{ - CancunPayloadFields, ExecutionPayload, ForkChoiceUpdateResult, ForkchoiceState, - ForkchoiceUpdateError, ForkchoiceUpdated, PayloadId, PayloadStatus, PayloadStatusEnum, -}; use std::{ fmt::Display, future::Future, diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 74515f6c37073..3d16dfb65459c 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -1,4 +1,8 @@ use alloy_primitives::{BlockNumber, B256}; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, + PayloadValidationError, +}; use futures::{stream::BoxStream, Future, StreamExt}; use itertools::Either; use reth_blockchain_tree_api::{ @@ -22,10 +26,6 @@ use reth_provider::{ providers::ProviderNodeTypes, BlockIdReader, BlockReader, BlockSource, CanonChainTracker, ChainSpecProvider, ProviderError, StageCheckpointReader, }; -use reth_rpc_types::engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, - PayloadValidationError, -}; use reth_stages_api::{ControlFlow, Pipeline, PipelineTarget, StageId}; use reth_tasks::TaskSpawner; use reth_tokio_util::EventSender; @@ -1984,10 +1984,10 @@ mod tests { test_utils::{spawn_consensus_engine, TestConsensusEngineBuilder}, BeaconForkChoiceUpdateError, }; + use alloy_rpc_types_engine::{ForkchoiceState, ForkchoiceUpdated, PayloadStatus}; use assert_matches::assert_matches; use reth_chainspec::{ChainSpecBuilder, MAINNET}; use reth_provider::{BlockWriter, ProviderFactory}; - use reth_rpc_types::engine::{ForkchoiceState, ForkchoiceUpdated, PayloadStatus}; use reth_rpc_types_compat::engine::payload::block_to_payload_v1; use reth_stages::{ExecOutput, PipelineError, StageError}; use reth_stages_api::StageCheckpoint; @@ -2180,11 +2180,11 @@ mod tests { mod fork_choice_updated { use super::*; use alloy_primitives::U256; + use alloy_rpc_types_engine::ForkchoiceUpdateError; use generators::BlockParams; use reth_db::{tables, test_utils::create_test_static_files_dir, Database}; use reth_db_api::transaction::DbTxMut; use reth_provider::{providers::StaticFileProvider, test_utils::MockNodeTypesWithDB}; - use reth_rpc_types::engine::ForkchoiceUpdateError; use reth_testing_utils::generators::random_block; #[tokio::test] diff --git a/crates/consensus/beacon/src/engine/test_utils.rs b/crates/consensus/beacon/src/engine/test_utils.rs index 44d1d0f05c1dd..4dfd9c87d3212 100644 --- a/crates/consensus/beacon/src/engine/test_utils.rs +++ b/crates/consensus/beacon/src/engine/test_utils.rs @@ -5,6 +5,9 @@ use crate::{ BeaconOnNewPayloadError, EthBeaconConsensus, MIN_BLOCKS_FOR_PIPELINE_RUN, }; use alloy_primitives::{BlockNumber, Sealable, B256}; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, +}; use reth_blockchain_tree::{ config::BlockchainTreeConfig, externals::TreeExternals, BlockchainTree, ShareableBlockchainTree, }; @@ -30,9 +33,6 @@ use reth_provider::{ }; use reth_prune::Pruner; use reth_prune_types::PruneModes; -use reth_rpc_types::engine::{ - CancunPayloadFields, ExecutionPayload, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, -}; use reth_stages::{sets::DefaultStages, test_utils::TestStages, ExecOutput, Pipeline, StageError}; use reth_static_file::StaticFileProducer; use reth_tasks::TokioTaskExecutor; diff --git a/crates/consensus/debug-client/Cargo.toml b/crates/consensus/debug-client/Cargo.toml index 8a03c052768dc..74ea61da34514 100644 --- a/crates/consensus/debug-client/Cargo.toml +++ b/crates/consensus/debug-client/Cargo.toml @@ -15,7 +15,6 @@ workspace = true reth-node-api.workspace = true reth-node-core.workspace = true reth-rpc-api.workspace = true -reth-rpc-types.workspace = true reth-rpc-builder.workspace = true reth-tracing.workspace = true @@ -24,6 +23,7 @@ alloy-consensus = { workspace = true, features = ["serde"] } alloy-eips.workspace = true alloy-provider = { workspace = true, features = ["ws"] } alloy-rpc-types.workspace = true +alloy-rpc-types-engine.workspace = true auto_impl.workspace = true futures.workspace = true diff --git a/crates/consensus/debug-client/src/client.rs b/crates/consensus/debug-client/src/client.rs index eec1b5b99e9ed..1acdd197bfa94 100644 --- a/crates/consensus/debug-client/src/client.rs +++ b/crates/consensus/debug-client/src/client.rs @@ -1,13 +1,10 @@ use alloy_consensus::TxEnvelope; use alloy_eips::eip2718::Encodable2718; use alloy_rpc_types::{Block, BlockTransactions}; +use alloy_rpc_types_engine::{ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3}; use reth_node_api::EngineTypes; -use reth_node_core::{ - primitives::B256, - rpc::types::{ExecutionPayloadV2, ExecutionPayloadV3}, -}; +use reth_node_core::primitives::B256; use reth_rpc_builder::auth::AuthServerHandle; -use reth_rpc_types::ExecutionPayloadV1; use reth_tracing::tracing::warn; use ringbuffer::{AllocRingBuffer, RingBuffer}; use std::future::Future; @@ -133,7 +130,7 @@ impl DebugConsensusClient

{ continue; } }; - let state = reth_rpc_types::engine::ForkchoiceState { + let state = alloy_rpc_types_engine::ForkchoiceState { head_block_hash: block_hash, safe_block_hash, finalized_block_hash, diff --git a/crates/engine/local/Cargo.toml b/crates/engine/local/Cargo.toml index c43a97abd8503..286b9f836aa4f 100644 --- a/crates/engine/local/Cargo.toml +++ b/crates/engine/local/Cargo.toml @@ -23,6 +23,7 @@ reth-stages-api.workspace = true # alloy alloy-primitives.workspace = true +alloy-rpc-types-engine.workspace = true # async tokio.workspace = true diff --git a/crates/engine/local/src/service.rs b/crates/engine/local/src/service.rs index f9d2bd2aacb08..d276dc5c1f8a0 100644 --- a/crates/engine/local/src/service.rs +++ b/crates/engine/local/src/service.rs @@ -178,11 +178,11 @@ mod tests { struct TestPayloadAttributesBuilder; impl PayloadAttributesBuilder for TestPayloadAttributesBuilder { - type PayloadAttributes = reth_rpc_types::engine::PayloadAttributes; + type PayloadAttributes = alloy_rpc_types_engine::PayloadAttributes; type Error = Infallible; fn build(&self) -> Result { - Ok(reth_rpc_types::engine::PayloadAttributes { + Ok(alloy_rpc_types_engine::PayloadAttributes { timestamp: 0, prev_randao: Default::default(), suggested_fee_recipient: Default::default(), diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index 4697e7fb87a53..91c9cd5422d0b 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -29,7 +29,6 @@ reth-primitives.workspace = true reth-provider.workspace = true reth-prune.workspace = true reth-revm.workspace = true -reth-rpc-types.workspace = true reth-stages-api.workspace = true reth-tasks.workspace = true reth-trie.workspace = true @@ -38,6 +37,7 @@ reth-trie-parallel.workspace = true # alloy alloy-primitives.workspace = true alloy-eips.workspace = true +alloy-rpc-types-engine.workspace = true # common futures.workspace = true diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index e3d6c7356efcc..3dd736cdc10f6 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -6,6 +6,10 @@ use crate::{ }; use alloy_eips::BlockNumHash; use alloy_primitives::{BlockNumber, B256, U256}; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, + PayloadValidationError, +}; use reth_beacon_consensus::{ BeaconConsensusEngineEvent, BeaconEngineMessage, ForkchoiceStateTracker, InvalidHeaderCache, OnForkChoiceUpdated, MIN_BLOCKS_FOR_PIPELINE_RUN, @@ -34,13 +38,6 @@ use reth_provider::{ TransactionVariant, }; use reth_revm::database::StateProviderDatabase; -use reth_rpc_types::{ - engine::{ - CancunPayloadFields, ForkchoiceState, PayloadStatus, PayloadStatusEnum, - PayloadValidationError, - }, - ExecutionPayload, -}; use reth_stages_api::ControlFlow; use reth_trie::{updates::TrieUpdates, HashedPostState, TrieInput}; use reth_trie_parallel::async_root::{AsyncStateRoot, AsyncStateRootError}; diff --git a/crates/engine/util/Cargo.toml b/crates/engine/util/Cargo.toml index 76817694238e6..20a0acb8d428b 100644 --- a/crates/engine/util/Cargo.toml +++ b/crates/engine/util/Cargo.toml @@ -15,7 +15,6 @@ workspace = true reth-primitives.workspace = true reth-errors.workspace = true reth-fs-util.workspace = true -reth-rpc-types.workspace = true reth-rpc-types-compat.workspace = true reth-engine-primitives.workspace = true reth-beacon-consensus.workspace = true @@ -29,6 +28,7 @@ reth-trie.workspace = true # alloy alloy-primitives.workspace = true +alloy-rpc-types-engine.workspace = true # async tokio = { workspace = true, default-features = false } diff --git a/crates/engine/util/src/engine_store.rs b/crates/engine/util/src/engine_store.rs index a897be741ca8b..1f34451996117 100644 --- a/crates/engine/util/src/engine_store.rs +++ b/crates/engine/util/src/engine_store.rs @@ -1,13 +1,10 @@ //! Stores engine API messages to disk for later inspection and replay. +use alloy_rpc_types_engine::{CancunPayloadFields, ExecutionPayload, ForkchoiceState}; use futures::{Stream, StreamExt}; use reth_beacon_consensus::BeaconEngineMessage; use reth_engine_primitives::EngineTypes; use reth_fs_util as fs; -use reth_rpc_types::{ - engine::{CancunPayloadFields, ForkchoiceState}, - ExecutionPayload, -}; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index 8b7b2c036277b..b3fc8cbd81856 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -1,6 +1,9 @@ //! Stream wrapper that simulates reorgs. use alloy_primitives::U256; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, +}; use futures::{stream::FuturesUnordered, Stream, StreamExt, TryFutureExt}; use itertools::Either; use reth_beacon_consensus::{BeaconEngineMessage, BeaconOnNewPayloadError, OnForkChoiceUpdated}; @@ -17,10 +20,6 @@ use reth_revm::{ state_change::post_block_withdrawals_balance_increments, DatabaseCommit, }; -use reth_rpc_types::{ - engine::{CancunPayloadFields, ForkchoiceState, PayloadStatus}, - ExecutionPayload, -}; use reth_rpc_types_compat::engine::payload::block_to_payload; use reth_trie::HashedPostState; use revm_primitives::{ diff --git a/crates/engine/util/src/skip_new_payload.rs b/crates/engine/util/src/skip_new_payload.rs index f4ed8a543ab45..d2450711ecfcc 100644 --- a/crates/engine/util/src/skip_new_payload.rs +++ b/crates/engine/util/src/skip_new_payload.rs @@ -1,9 +1,9 @@ //! Stream wrapper that skips specified number of new payload messages. +use alloy_rpc_types_engine::{PayloadStatus, PayloadStatusEnum}; use futures::{Stream, StreamExt}; use reth_beacon_consensus::BeaconEngineMessage; use reth_engine_primitives::EngineTypes; -use reth_rpc_types::engine::{PayloadStatus, PayloadStatusEnum}; use std::{ pin::Pin, task::{ready, Context, Poll}, diff --git a/crates/ethereum/engine-primitives/Cargo.toml b/crates/ethereum/engine-primitives/Cargo.toml index cfeac285328e6..e9bcd42568656 100644 --- a/crates/ethereum/engine-primitives/Cargo.toml +++ b/crates/ethereum/engine-primitives/Cargo.toml @@ -16,7 +16,6 @@ reth-chainspec.workspace = true reth-primitives.workspace = true reth-engine-primitives.workspace = true reth-payload-primitives.workspace = true -reth-rpc-types.workspace = true reth-rpc-types-compat.workspace = true alloy-rlp.workspace = true reth-chain-state.workspace = true @@ -24,6 +23,7 @@ reth-chain-state.workspace = true # alloy alloy-primitives.workspace = true alloy-eips.workspace = true +alloy-rpc-types-engine.workspace = true # misc serde.workspace = true diff --git a/crates/ethereum/engine-primitives/src/lib.rs b/crates/ethereum/engine-primitives/src/lib.rs index 92243d72976e7..69d73a021747b 100644 --- a/crates/ethereum/engine-primitives/src/lib.rs +++ b/crates/ethereum/engine-primitives/src/lib.rs @@ -11,6 +11,10 @@ mod payload; use std::sync::Arc; +pub use alloy_rpc_types_engine::{ + ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadEnvelopeV4, + ExecutionPayloadV1, PayloadAttributes as EthPayloadAttributes, +}; pub use payload::{EthBuiltPayload, EthPayloadBuilderAttributes}; use reth_chainspec::ChainSpec; use reth_engine_primitives::{EngineTypes, EngineValidator}; @@ -18,13 +22,6 @@ use reth_payload_primitives::{ validate_version_specific_fields, EngineApiMessageVersion, EngineObjectValidationError, PayloadOrAttributes, PayloadTypes, }; -pub use reth_rpc_types::{ - engine::{ - ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadEnvelopeV4, - PayloadAttributes as EthPayloadAttributes, - }, - ExecutionPayloadV1, -}; /// The types used in the default mainnet ethereum beacon consensus engine. #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] diff --git a/crates/ethereum/engine-primitives/src/payload.rs b/crates/ethereum/engine-primitives/src/payload.rs index e22012b6c3d3f..dd0b7b405e9fc 100644 --- a/crates/ethereum/engine-primitives/src/payload.rs +++ b/crates/ethereum/engine-primitives/src/payload.rs @@ -3,13 +3,13 @@ use alloy_eips::eip4844::BlobTransactionSidecar; use alloy_primitives::{Address, B256, U256}; use alloy_rlp::Encodable; -use reth_chain_state::ExecutedBlock; -use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; -use reth_primitives::{SealedBlock, Withdrawals}; -use reth_rpc_types::engine::{ +use alloy_rpc_types_engine::{ ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadEnvelopeV4, ExecutionPayloadV1, PayloadAttributes, PayloadId, }; +use reth_chain_state::ExecutedBlock; +use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; +use reth_primitives::{SealedBlock, Withdrawals}; use reth_rpc_types_compat::engine::payload::{ block_to_payload_v1, block_to_payload_v3, block_to_payload_v4, convert_block_to_payload_field_v2, diff --git a/crates/node/core/src/utils.rs b/crates/node/core/src/utils.rs index ae9b1f021fd6c..a64d12114558c 100644 --- a/crates/node/core/src/utils.rs +++ b/crates/node/core/src/utils.rs @@ -2,6 +2,7 @@ //! blocks from the network. use alloy_primitives::Sealable; +use alloy_rpc_types_engine::{JwtError, JwtSecret}; use eyre::Result; use reth_chainspec::ChainSpec; use reth_consensus_common::validation::validate_block_pre_execution; @@ -11,7 +12,6 @@ use reth_network_p2p::{ priority::Priority, }; use reth_primitives::{BlockHashOrNumber, SealedBlock, SealedHeader}; -use reth_rpc_types::engine::{JwtError, JwtSecret}; use std::{ env::VarError, path::{Path, PathBuf}, diff --git a/crates/node/core/src/version.rs b/crates/node/core/src/version.rs index 683c305642650..84fcf3f0f11ef 100644 --- a/crates/node/core/src/version.rs +++ b/crates/node/core/src/version.rs @@ -1,6 +1,6 @@ //! Version information for reth. +use alloy_rpc_types_engine::ClientCode; use reth_db::ClientVersion; -use reth_rpc_types::engine::ClientCode; /// The client code for Reth pub const CLIENT_CODE: ClientCode = ClientCode::RH; diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index d4d9ba7af52c1..30e3fde890f2c 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -45,6 +45,7 @@ reth-optimism-forks.workspace = true # ethereum alloy-primitives.workspace = true op-alloy-rpc-types-engine.workspace = true +alloy-rpc-types-engine.workspace = true # async async-trait.workspace = true diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index bc5880034cdfa..06059083ff57f 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1}; use op_alloy_rpc_types_engine::{ OptimismExecutionPayloadEnvelopeV3, OptimismExecutionPayloadEnvelopeV4, OptimismPayloadAttributes, @@ -15,7 +16,6 @@ use reth_node_api::{ }; use reth_optimism_forks::OptimismHardfork; use reth_optimism_payload_builder::{OptimismBuiltPayload, OptimismPayloadBuilderAttributes}; -use reth_rpc_types::{engine::ExecutionPayloadEnvelopeV2, ExecutionPayloadV1}; /// The types used in the optimism beacon consensus engine. #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] diff --git a/crates/optimism/node/tests/e2e/p2p.rs b/crates/optimism/node/tests/e2e/p2p.rs index e4993e8f3138c..ebd35cc8a5c81 100644 --- a/crates/optimism/node/tests/e2e/p2p.rs +++ b/crates/optimism/node/tests/e2e/p2p.rs @@ -1,6 +1,6 @@ use crate::utils::{advance_chain, setup}; +use alloy_rpc_types_engine::PayloadStatusEnum; use reth::blockchain_tree::error::BlockchainTreeError; -use reth_rpc_types::engine::PayloadStatusEnum; use std::sync::Arc; use tokio::sync::Mutex; diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index 8fba06f228b06..e64b72610d99c 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -18,7 +18,6 @@ reth-primitives.workspace = true reth-revm.workspace = true reth-transaction-pool.workspace = true reth-provider.workspace = true -reth-rpc-types.workspace = true reth-rpc-types-compat.workspace = true reth-evm.workspace = true reth-execution-types.workspace = true @@ -38,6 +37,7 @@ alloy-primitives.workspace = true alloy-rlp.workspace = true op-alloy-rpc-types-engine.workspace = true revm-primitives.workspace = true +alloy-rpc-types-engine.workspace = true # misc tracing.workspace = true diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 28618026fc3d8..67eddb8e18b77 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -4,6 +4,7 @@ use alloy_primitives::{Address, B256, U256}; use alloy_rlp::Encodable; +use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; /// Re-export for use in downstream arguments. pub use op_alloy_rpc_types_engine::OptimismPayloadAttributes; use op_alloy_rpc_types_engine::{ @@ -16,7 +17,6 @@ use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; use reth_primitives::{ transaction::WithEncoded, BlobTransactionSidecar, SealedBlock, TransactionSigned, Withdrawals, }; -use reth_rpc_types::engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1, PayloadId}; use reth_rpc_types_compat::engine::payload::{ block_to_payload_v1, block_to_payload_v3, block_to_payload_v4, convert_block_to_payload_field_v2, diff --git a/crates/rpc/rpc-api/Cargo.toml b/crates/rpc/rpc-api/Cargo.toml index 08a23c8e8a27e..6e9e469ec4438 100644 --- a/crates/rpc/rpc-api/Cargo.toml +++ b/crates/rpc/rpc-api/Cargo.toml @@ -14,7 +14,6 @@ workspace = true [dependencies] # reth reth-primitives.workspace = true -reth-rpc-types.workspace = true reth-rpc-eth-api.workspace = true reth-engine-primitives.workspace = true reth-network-peers.workspace = true @@ -33,6 +32,7 @@ alloy-rpc-types-txpool.workspace = true alloy-rpc-types-admin.workspace = true alloy-serde.workspace = true alloy-rpc-types-beacon.workspace = true +alloy-rpc-types-engine.workspace = true # misc jsonrpsee = { workspace = true, features = ["server", "macros"] } diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index 5bd54ff02b199..d489d0dd7f643 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -9,15 +9,15 @@ use alloy_primitives::{Address, BlockHash, Bytes, B256, U256, U64}; use alloy_rpc_types::{ state::StateOverride, BlockOverrides, EIP1186AccountProofResponse, Filter, Log, SyncStatus, }; -use alloy_rpc_types_eth::transaction::TransactionRequest; -use alloy_serde::JsonStorageKey; -use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_engine_primitives::EngineTypes; -use reth_rpc_types::engine::{ +use alloy_rpc_types_engine::{ ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadBodiesV2, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, ExecutionPayloadV4, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, TransitionConfiguration, }; +use alloy_rpc_types_eth::transaction::TransactionRequest; +use alloy_serde::JsonStorageKey; +use jsonrpsee::{core::RpcResult, proc_macros::rpc}; +use reth_engine_primitives::EngineTypes; // NOTE: We can't use associated types in the `EngineApi` trait because of jsonrpsee, so we use a // generic here. It would be nice if the rpc macro would understand which types need to have serde. // By default, if the trait has a generic, the rpc macro will add e.g. `Engine: DeserializeOwned` to diff --git a/crates/rpc/rpc-builder/Cargo.toml b/crates/rpc/rpc-builder/Cargo.toml index 04e1a281e0651..a77e9b61b8592 100644 --- a/crates/rpc/rpc-builder/Cargo.toml +++ b/crates/rpc/rpc-builder/Cargo.toml @@ -72,6 +72,7 @@ reth-rpc-types-compat.workspace = true alloy-primitives.workspace = true alloy-rpc-types-eth.workspace = true alloy-rpc-types-trace.workspace = true +alloy-rpc-types-engine.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } serde_json.workspace = true diff --git a/crates/rpc/rpc-builder/tests/it/auth.rs b/crates/rpc/rpc-builder/tests/it/auth.rs index 79a42d121c0bb..71e8bf39f9ea2 100644 --- a/crates/rpc/rpc-builder/tests/it/auth.rs +++ b/crates/rpc/rpc-builder/tests/it/auth.rs @@ -2,12 +2,12 @@ use crate::utils::launch_auth; use alloy_primitives::U64; +use alloy_rpc_types_engine::{ForkchoiceState, PayloadId, TransitionConfiguration}; use jsonrpsee::core::client::{ClientT, SubscriptionClientT}; use reth_ethereum_engine_primitives::EthEngineTypes; use reth_primitives::Block; use reth_rpc_api::clients::EngineApiClient; use reth_rpc_layer::JwtSecret; -use reth_rpc_types::engine::{ForkchoiceState, PayloadId, TransitionConfiguration}; use reth_rpc_types_compat::engine::payload::{ block_to_payload_v1, convert_block_to_payload_input_v2, }; diff --git a/crates/rpc/rpc-builder/tests/it/utils.rs b/crates/rpc/rpc-builder/tests/it/utils.rs index dbdd2407fbb87..847de99564eff 100644 --- a/crates/rpc/rpc-builder/tests/it/utils.rs +++ b/crates/rpc/rpc-builder/tests/it/utils.rs @@ -1,5 +1,6 @@ use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; +use alloy_rpc_types_engine::{ClientCode, ClientVersionV1}; use reth_beacon_consensus::BeaconConsensusEngineHandle; use reth_chainspec::MAINNET; use reth_ethereum_engine_primitives::{EthEngineTypes, EthereumEngineValidator}; @@ -15,7 +16,6 @@ use reth_rpc_builder::{ use reth_rpc_engine_api::{capabilities::EngineCapabilities, EngineApi}; use reth_rpc_layer::JwtSecret; use reth_rpc_server_types::RpcModuleSelection; -use reth_rpc_types::engine::{ClientCode, ClientVersionV1}; use reth_tasks::TokioTaskExecutor; use reth_transaction_pool::{ noop::NoopTransactionPool, diff --git a/crates/rpc/rpc-engine-api/Cargo.toml b/crates/rpc/rpc-engine-api/Cargo.toml index 57f0832b5ff62..2e5ec0af8ce54 100644 --- a/crates/rpc/rpc-engine-api/Cargo.toml +++ b/crates/rpc/rpc-engine-api/Cargo.toml @@ -30,6 +30,7 @@ reth-transaction-pool.workspace = true # ethereum alloy-eips.workspace = true alloy-primitives.workspace = true +alloy-rpc-types-engine = { workspace = true, features = ["jsonrpsee-types"] } # async tokio = { workspace = true, features = ["sync"] } diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index 2b37f9d15f44b..907297de1776b 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -3,6 +3,12 @@ use crate::{ }; use alloy_eips::eip4844::BlobAndProofV1; use alloy_primitives::{BlockHash, BlockNumber, B256, U64}; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ClientVersionV1, ExecutionPayload, ExecutionPayloadBodiesV1, + ExecutionPayloadBodiesV2, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, + ExecutionPayloadV4, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, + TransitionConfiguration, +}; use async_trait::async_trait; use jsonrpsee_core::RpcResult; use reth_beacon_consensus::BeaconConsensusEngineHandle; @@ -16,12 +22,6 @@ use reth_payload_primitives::{ }; use reth_primitives::{Block, BlockHashOrNumber, EthereumHardfork}; use reth_rpc_api::EngineApiServer; -use reth_rpc_types::engine::{ - CancunPayloadFields, ClientVersionV1, ExecutionPayload, ExecutionPayloadBodiesV1, - ExecutionPayloadBodiesV2, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, - ExecutionPayloadV4, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, - TransitionConfiguration, -}; use reth_rpc_types_compat::engine::payload::{ convert_payload_input_v2_to_payload, convert_to_payload_body_v1, convert_to_payload_body_v2, }; @@ -948,6 +948,7 @@ where #[cfg(test)] mod tests { use super::*; + use alloy_rpc_types_engine::{ClientCode, ClientVersionV1}; use assert_matches::assert_matches; use reth_beacon_consensus::{BeaconConsensusEngineEvent, BeaconEngineMessage}; use reth_chainspec::{ChainSpec, MAINNET}; @@ -955,7 +956,6 @@ mod tests { use reth_payload_builder::test_utils::spawn_test_payload_service; use reth_primitives::SealedBlock; use reth_provider::test_utils::MockEthProvider; - use reth_rpc_types::engine::{ClientCode, ClientVersionV1}; use reth_rpc_types_compat::engine::payload::execution_payload_from_sealed_block; use reth_tasks::TokioTaskExecutor; use reth_testing_utils::generators::random_block; diff --git a/crates/rpc/rpc-engine-api/src/error.rs b/crates/rpc/rpc-engine-api/src/error.rs index b0ba93d6e455a..8e86af4c9c435 100644 --- a/crates/rpc/rpc-engine-api/src/error.rs +++ b/crates/rpc/rpc-engine-api/src/error.rs @@ -195,7 +195,7 @@ impl From for jsonrpsee_types::error::ErrorObject<'static> { #[cfg(test)] mod tests { use super::*; - use reth_rpc_types::engine::ForkchoiceUpdateError; + use alloy_rpc_types_engine::ForkchoiceUpdateError; #[track_caller] fn ensure_engine_rpc_error( diff --git a/crates/rpc/rpc-engine-api/src/metrics.rs b/crates/rpc/rpc-engine-api/src/metrics.rs index 0ae97768b6c08..2c4216664ae27 100644 --- a/crates/rpc/rpc-engine-api/src/metrics.rs +++ b/crates/rpc/rpc-engine-api/src/metrics.rs @@ -1,9 +1,9 @@ use std::time::Duration; use crate::EngineApiError; +use alloy_rpc_types_engine::{ForkchoiceUpdated, PayloadStatus, PayloadStatusEnum}; use metrics::{Counter, Histogram}; use reth_metrics::Metrics; -use reth_rpc_types::engine::{ForkchoiceUpdated, PayloadStatus, PayloadStatusEnum}; /// All beacon consensus engine metrics #[derive(Default)] @@ -61,16 +61,16 @@ pub(crate) struct ForkchoiceUpdatedResponseMetrics { /// The total count of forkchoice updated messages received. pub(crate) forkchoice_updated_messages: Counter, /// The total count of forkchoice updated messages that we responded to with - /// [Invalid](reth_rpc_types::engine::PayloadStatusEnum#Invalid). + /// [`Invalid`](alloy_rpc_types_engine::PayloadStatusEnum#Invalid). pub(crate) forkchoice_updated_invalid: Counter, /// The total count of forkchoice updated messages that we responded to with - /// [Valid](reth_rpc_types::engine::PayloadStatusEnum#Valid). + /// [`Valid`](alloy_rpc_types_engine::PayloadStatusEnum#Valid). pub(crate) forkchoice_updated_valid: Counter, /// The total count of forkchoice updated messages that we responded to with - /// [Syncing](reth_rpc_types::engine::PayloadStatusEnum#Syncing). + /// [`Syncing`](alloy_rpc_types_engine::PayloadStatusEnum#Syncing). pub(crate) forkchoice_updated_syncing: Counter, /// The total count of forkchoice updated messages that we responded to with - /// [Accepted](reth_rpc_types::engine::PayloadStatusEnum#Accepted). + /// [`Accepted`](alloy_rpc_types_engine::PayloadStatusEnum#Accepted). pub(crate) forkchoice_updated_accepted: Counter, /// The total count of forkchoice updated messages that were unsuccessful, i.e. we responded /// with an error type that is not a [`PayloadStatusEnum`]. @@ -84,16 +84,16 @@ pub(crate) struct NewPayloadStatusResponseMetrics { /// The total count of new payload messages received. pub(crate) new_payload_messages: Counter, /// The total count of new payload messages that we responded to with - /// [Invalid](reth_rpc_types::engine::PayloadStatusEnum#Invalid). + /// [Invalid](alloy_rpc_types_engine::PayloadStatusEnum#Invalid). pub(crate) new_payload_invalid: Counter, /// The total count of new payload messages that we responded to with - /// [Valid](reth_rpc_types::engine::PayloadStatusEnum#Valid). + /// [Valid](alloy_rpc_types_engine::PayloadStatusEnum#Valid). pub(crate) new_payload_valid: Counter, /// The total count of new payload messages that we responded to with - /// [Syncing](reth_rpc_types::engine::PayloadStatusEnum#Syncing). + /// [Syncing](alloy_rpc_types_engine::PayloadStatusEnum#Syncing). pub(crate) new_payload_syncing: Counter, /// The total count of new payload messages that we responded to with - /// [Accepted](reth_rpc_types::engine::PayloadStatusEnum#Accepted). + /// [Accepted](alloy_rpc_types_engine::PayloadStatusEnum#Accepted). pub(crate) new_payload_accepted: Counter, /// The total count of new payload messages that were unsuccessful, i.e. we responded with an /// error type that is not a [`PayloadStatusEnum`]. diff --git a/crates/rpc/rpc-engine-api/tests/it/payload.rs b/crates/rpc/rpc-engine-api/tests/it/payload.rs index ccb6878e9d5a4..e98f585c002d1 100644 --- a/crates/rpc/rpc-engine-api/tests/it/payload.rs +++ b/crates/rpc/rpc-engine-api/tests/it/payload.rs @@ -2,14 +2,14 @@ use alloy_primitives::{Bytes, U256}; use alloy_rlp::{Decodable, Error as RlpError}; +use alloy_rpc_types_engine::{ + ExecutionPayload, ExecutionPayloadBodyV1, ExecutionPayloadV1, PayloadError, +}; use assert_matches::assert_matches; use reth_primitives::{ alloy_primitives::Sealable, proofs, Block, SealedBlock, SealedHeader, TransactionSigned, Withdrawals, }; -use reth_rpc_types::engine::{ - ExecutionPayload, ExecutionPayloadBodyV1, ExecutionPayloadV1, PayloadError, -}; use reth_rpc_types_compat::engine::payload::{ block_to_payload, block_to_payload_v1, convert_to_payload_body_v1, try_into_sealed_block, try_payload_v1_to_block, diff --git a/crates/rpc/rpc-server-types/Cargo.toml b/crates/rpc/rpc-server-types/Cargo.toml index e908af0af75a9..08ecd3947742d 100644 --- a/crates/rpc/rpc-server-types/Cargo.toml +++ b/crates/rpc/rpc-server-types/Cargo.toml @@ -15,10 +15,10 @@ workspace = true reth-errors.workspace = true reth-network-api.workspace = true reth-primitives.workspace = true -reth-rpc-types.workspace = true # ethereum alloy-primitives.workspace = true +alloy-rpc-types-engine.workspace = true # rpc jsonrpsee-core.workspace = true diff --git a/crates/rpc/rpc-server-types/src/result.rs b/crates/rpc/rpc-server-types/src/result.rs index 3dc76f0d8f4bd..78e6436643a75 100644 --- a/crates/rpc/rpc-server-types/src/result.rs +++ b/crates/rpc/rpc-server-types/src/result.rs @@ -2,9 +2,9 @@ use std::fmt; +use alloy_rpc_types_engine::PayloadError; use jsonrpsee_core::RpcResult; use reth_primitives::BlockId; -use reth_rpc_types::engine::PayloadError; /// Helper trait to easily convert various `Result` types into [`RpcResult`] pub trait ToRpcResult: Sized { diff --git a/crates/rpc/rpc-types-compat/Cargo.toml b/crates/rpc/rpc-types-compat/Cargo.toml index bf569ec567ce9..1213e33ba6239 100644 --- a/crates/rpc/rpc-types-compat/Cargo.toml +++ b/crates/rpc/rpc-types-compat/Cargo.toml @@ -14,7 +14,6 @@ workspace = true [dependencies] # reth reth-primitives.workspace = true -reth-rpc-types.workspace = true reth-trie-common.workspace = true # ethereum @@ -23,6 +22,7 @@ alloy-rlp.workspace = true alloy-rpc-types.workspace = true alloy-rpc-types-eth = { workspace = true, default-features = false, features = ["serde"] } alloy-serde.workspace = true +alloy-rpc-types-engine.workspace = true [dev-dependencies] serde_json.workspace = true diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index f4e9dc6c77101..37b1a33f2f17d 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -2,16 +2,16 @@ //! Ethereum's Engine use alloy_primitives::{B256, U256}; +use alloy_rpc_types_engine::{ + payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2, ExecutionPayloadInputV2}, + ExecutionPayload, ExecutionPayloadBodyV2, ExecutionPayloadV1, ExecutionPayloadV2, + ExecutionPayloadV3, ExecutionPayloadV4, PayloadError, +}; use reth_primitives::{ constants::{EMPTY_OMMER_ROOT_HASH, MAXIMUM_EXTRA_DATA_SIZE}, proofs::{self}, Block, BlockBody, Header, Request, SealedBlock, TransactionSigned, Withdrawals, }; -use reth_rpc_types::engine::{ - payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2, ExecutionPayloadInputV2}, - ExecutionPayload, ExecutionPayloadBodyV2, ExecutionPayloadV1, ExecutionPayloadV2, - ExecutionPayloadV3, ExecutionPayloadV4, PayloadError, -}; /// Converts [`ExecutionPayloadV1`] to [`Block`] pub fn try_payload_v1_to_block(payload: ExecutionPayloadV1) -> Result { @@ -443,9 +443,9 @@ mod tests { validate_block_hash, }; use alloy_primitives::{b256, hex, Bytes, U256}; - use reth_rpc_types::{ - engine::{CancunPayloadFields, ExecutionPayloadV3, ExecutionPayloadV4}, - ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, + use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, + ExecutionPayloadV3, ExecutionPayloadV4, }; #[test] diff --git a/crates/rpc/rpc-types/Cargo.toml b/crates/rpc/rpc-types/Cargo.toml index 54840490bb47b..f53c7c753f85f 100644 --- a/crates/rpc/rpc-types/Cargo.toml +++ b/crates/rpc/rpc-types/Cargo.toml @@ -12,10 +12,6 @@ description = "Reth RPC types" workspace = true [dependencies] - -# ethereum -alloy-rpc-types-engine = { workspace = true, features = ["std", "serde", "jsonrpsee-types"], optional = true } - # misc jsonrpsee-types = { workspace = true, optional = true } @@ -25,6 +21,4 @@ jsonrpsee-types = { workspace = true, optional = true } default = ["jsonrpsee-types"] jsonrpsee-types = [ "dep:jsonrpsee-types", - "dep:alloy-rpc-types-engine", - "alloy-rpc-types-engine/jsonrpsee-types", ] \ No newline at end of file diff --git a/crates/rpc/rpc-types/src/eth/mod.rs b/crates/rpc/rpc-types/src/eth/mod.rs index 0db9f0e414679..52d0032846ad2 100644 --- a/crates/rpc/rpc-types/src/eth/mod.rs +++ b/crates/rpc/rpc-types/src/eth/mod.rs @@ -1,7 +1,3 @@ //! Ethereum related types pub(crate) mod error; - -// re-export -#[cfg(feature = "jsonrpsee-types")] -pub use alloy_rpc_types_engine as engine; diff --git a/crates/rpc/rpc-types/src/lib.rs b/crates/rpc/rpc-types/src/lib.rs index 59196990131de..dc7f65c8d6c31 100644 --- a/crates/rpc/rpc-types/src/lib.rs +++ b/crates/rpc/rpc-types/src/lib.rs @@ -15,10 +15,3 @@ mod eth; // Ethereum specific rpc types related to typed transaction requests and the engine API. #[cfg(feature = "jsonrpsee-types")] pub use eth::error::ToRpcError; -#[cfg(feature = "jsonrpsee-types")] -pub use eth::{ - engine, - engine::{ - ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3, PayloadError, - }, -}; From d891d402b760fa75929e934e9efddd3ed6244c6e Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Wed, 25 Sep 2024 16:50:54 +0200 Subject: [PATCH 06/84] feat(storage): add `HeaderExt` to compact `Header` (#11166) --- crates/storage/codecs/src/alloy/header.rs | 82 ++++++++++++++++++++++- crates/storage/codecs/src/alloy/mod.rs | 3 +- 2 files changed, 81 insertions(+), 4 deletions(-) diff --git a/crates/storage/codecs/src/alloy/header.rs b/crates/storage/codecs/src/alloy/header.rs index a72021fdcc1ba..b4fc90e390a51 100644 --- a/crates/storage/codecs/src/alloy/header.rs +++ b/crates/storage/codecs/src/alloy/header.rs @@ -32,15 +32,41 @@ pub(crate) struct Header { blob_gas_used: Option, excess_blob_gas: Option, parent_beacon_block_root: Option, - requests_root: Option, + extra_fields: Option, extra_data: Bytes, } +/// [`Header`] extension struct. +/// +/// All new fields should be added here in the form of a `Option`, since `Option` is +/// used as a field of [`Header`] for backwards compatibility. +/// +/// More information: & [`reth_codecs_derive::Compact`]. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] +pub(crate) struct HeaderExt { + requests_root: Option, +} + +impl HeaderExt { + /// Converts into [`Some`] if any of the field exists. Otherwise, returns [`None`]. + /// + /// Required since [`Header`] uses `Option` as a field. + const fn into_option(self) -> Option { + if self.requests_root.is_some() { + Some(self) + } else { + None + } + } +} + impl Compact for AlloyHeader { fn to_compact(&self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]>, { + let extra_fields = HeaderExt { requests_root: self.requests_root }; + let header = Header { parent_hash: self.parent_hash, ommers_hash: self.ommers_hash, @@ -61,7 +87,7 @@ impl Compact for AlloyHeader { blob_gas_used: self.blob_gas_used.map(|blob_gas| blob_gas as u64), excess_blob_gas: self.excess_blob_gas.map(|excess_blob| excess_blob as u64), parent_beacon_block_root: self.parent_beacon_block_root, - requests_root: self.requests_root, + extra_fields: extra_fields.into_option(), extra_data: self.extra_data.clone(), }; header.to_compact(buf) @@ -89,7 +115,7 @@ impl Compact for AlloyHeader { blob_gas_used: header.blob_gas_used.map(Into::into), excess_blob_gas: header.excess_blob_gas.map(Into::into), parent_beacon_block_root: header.parent_beacon_block_root, - requests_root: header.requests_root, + requests_root: header.extra_fields.and_then(|h| h.requests_root), extra_data: header.extra_data, }; (alloy_header, buf) @@ -99,9 +125,59 @@ impl Compact for AlloyHeader { #[cfg(test)] mod tests { use super::*; + use alloy_primitives::{address, b256, bloom, bytes, hex}; + + /// Holesky block #1947953 + const HOLESKY_BLOCK: Header = Header { + parent_hash: b256!("8605e0c46689f66b3deed82598e43d5002b71a929023b665228728f0c6e62a95"), + ommers_hash: b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"), + beneficiary: address!("c6e2459991bfe27cca6d86722f35da23a1e4cb97"), + state_root: b256!("edad188ca5647d62f4cca417c11a1afbadebce30d23260767f6f587e9b3b9993"), + transactions_root: b256!("4daf25dc08a841aa22aa0d3cb3e1f159d4dcaf6a6063d4d36bfac11d3fdb63ee"), + receipts_root: b256!("1a1500328e8ade2592bbea1e04f9a9fd8c0142d3175d6e8420984ee159abd0ed"), + withdrawals_root: Some(b256!("d0f7f22d6d915be5a3b9c0fee353f14de5ac5c8ac1850b76ce9be70b69dfe37d")), + logs_bloom: bloom!("36410880400480e1090a001c408880800019808000125124002100400048442220020000408040423088300004d0000050803000862485a02020011600a5010404143021800881e8e08c402940404002105004820c440051640000809c000011080002300208510808150101000038002500400040000230000000110442800000800204420100008110080200088c1610c0b80000c6008900000340400200200210010111020000200041a2010804801100030a0284a8463820120a0601480244521002a10201100400801101006002001000008000000ce011011041086418609002000128800008180141002003004c00800040940c00c1180ca002890040"), + difficulty: U256::ZERO, + number: 0x1db931, + gas_limit: 0x1c9c380, + gas_used: 0x440949, + timestamp: 0x66982980, + mix_hash: b256!("574db0ff0a2243b434ba2a35da8f2f72df08bca44f8733f4908d10dcaebc89f1"), + nonce: 0, + base_fee_per_gas: Some(0x8), + blob_gas_used: Some(0x60000), + excess_blob_gas: Some(0x0), + parent_beacon_block_root: Some(b256!("aa1d9606b7932f2280a19b3498b9ae9eebc6a83f1afde8e45944f79d353db4c1")), + extra_data: bytes!("726574682f76312e302e302f6c696e7578"), + extra_fields: None, + }; #[test] fn test_ensure_backwards_compatibility() { assert_eq!(Header::bitflag_encoded_bytes(), 4); + assert_eq!(HeaderExt::bitflag_encoded_bytes(), 1); + } + + #[test] + fn test_backwards_compatibility() { + let holesky_header_bytes = hex!("81a121788605e0c46689f66b3deed82598e43d5002b71a929023b665228728f0c6e62a951dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347c6e2459991bfe27cca6d86722f35da23a1e4cb97edad188ca5647d62f4cca417c11a1afbadebce30d23260767f6f587e9b3b99934daf25dc08a841aa22aa0d3cb3e1f159d4dcaf6a6063d4d36bfac11d3fdb63ee1a1500328e8ade2592bbea1e04f9a9fd8c0142d3175d6e8420984ee159abd0edd0f7f22d6d915be5a3b9c0fee353f14de5ac5c8ac1850b76ce9be70b69dfe37d36410880400480e1090a001c408880800019808000125124002100400048442220020000408040423088300004d0000050803000862485a02020011600a5010404143021800881e8e08c402940404002105004820c440051640000809c000011080002300208510808150101000038002500400040000230000000110442800000800204420100008110080200088c1610c0b80000c6008900000340400200200210010111020000200041a2010804801100030a0284a8463820120a0601480244521002a10201100400801101006002001000008000000ce011011041086418609002000128800008180141002003004c00800040940c00c1180ca0028900401db93101c9c38044094966982980574db0ff0a2243b434ba2a35da8f2f72df08bca44f8733f4908d10dcaebc89f101080306000000aa1d9606b7932f2280a19b3498b9ae9eebc6a83f1afde8e45944f79d353db4c1726574682f76312e302e302f6c696e7578"); + let (decoded_header, _) = + Header::from_compact(&holesky_header_bytes, holesky_header_bytes.len()); + + assert_eq!(decoded_header, HOLESKY_BLOCK); + + let mut encoded_header = Vec::with_capacity(holesky_header_bytes.len()); + assert_eq!(holesky_header_bytes.len(), decoded_header.to_compact(&mut encoded_header)); + assert_eq!(encoded_header, holesky_header_bytes); + } + + #[test] + fn test_extra_fields() { + let mut header = HOLESKY_BLOCK; + header.extra_fields = Some(HeaderExt { requests_root: Some(B256::random()) }); + + let mut encoded_header = vec![]; + let len = header.to_compact(&mut encoded_header); + assert_eq!(header, Header::from_compact(&encoded_header, len).0); } } diff --git a/crates/storage/codecs/src/alloy/mod.rs b/crates/storage/codecs/src/alloy/mod.rs index 8da0f7a947309..942258d0647ef 100644 --- a/crates/storage/codecs/src/alloy/mod.rs +++ b/crates/storage/codecs/src/alloy/mod.rs @@ -16,7 +16,7 @@ mod tests { alloy::{ authorization_list::Authorization, genesis_account::{GenesisAccount, GenesisAccountRef, StorageEntries, StorageEntry}, - header::Header, + header::{Header, HeaderExt}, transaction::{ eip1559::TxEip1559, eip2930::TxEip2930, eip4844::TxEip4844, eip7702::TxEip7702, legacy::TxLegacy, @@ -33,6 +33,7 @@ mod tests { // [`validate_bitflag_backwards_compat`] macro for detailed instructions on handling // it. validate_bitflag_backwards_compat!(Header, UnusedBits::Zero); + validate_bitflag_backwards_compat!(HeaderExt, UnusedBits::NotZero); validate_bitflag_backwards_compat!(TxEip2930, UnusedBits::Zero); validate_bitflag_backwards_compat!(StorageEntries, UnusedBits::Zero); validate_bitflag_backwards_compat!(StorageEntry, UnusedBits::Zero); From 63f56734ec90186b8db5456f65e7eb3b0c5799d3 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Wed, 25 Sep 2024 16:00:55 +0100 Subject: [PATCH 07/84] refactor(exex): move notifications to a separate module (#11212) --- crates/exex/exex/src/lib.rs | 3 + crates/exex/exex/src/manager.rs | 570 +------------------------ crates/exex/exex/src/notifications.rs | 573 ++++++++++++++++++++++++++ 3 files changed, 586 insertions(+), 560 deletions(-) create mode 100644 crates/exex/exex/src/notifications.rs diff --git a/crates/exex/exex/src/lib.rs b/crates/exex/exex/src/lib.rs index 4a819767a7121..edc9e40d449d8 100644 --- a/crates/exex/exex/src/lib.rs +++ b/crates/exex/exex/src/lib.rs @@ -46,6 +46,9 @@ pub use event::*; mod manager; pub use manager::*; +mod notifications; +pub use notifications::*; + mod wal; pub use wal::*; diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index a775765c0278c..3230e003b28db 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -1,18 +1,11 @@ -use crate::{ - wal::Wal, BackfillJobFactory, ExExEvent, ExExNotification, FinishedExExHeight, - StreamBackfillJob, -}; -use alloy_primitives::{BlockNumber, U256}; -use eyre::OptionExt; -use futures::{Stream, StreamExt}; +use crate::{wal::Wal, ExExEvent, ExExNotification, ExExNotifications, FinishedExExHeight}; +use alloy_primitives::BlockNumber; +use futures::StreamExt; use metrics::Gauge; use reth_chain_state::ForkChoiceStream; use reth_chainspec::Head; -use reth_evm::execute::BlockExecutorProvider; -use reth_exex_types::ExExHead; use reth_metrics::{metrics::Counter, Metrics}; use reth_primitives::SealedHeader; -use reth_provider::{BlockReader, Chain, HeaderProvider, StateProviderFactory}; use reth_tracing::tracing::debug; use std::{ collections::VecDeque, @@ -26,7 +19,7 @@ use std::{ task::{ready, Context, Poll}, }; use tokio::sync::{ - mpsc::{self, error::SendError, Receiver, UnboundedReceiver, UnboundedSender}, + mpsc::{self, error::SendError, UnboundedReceiver, UnboundedSender}, watch, }; use tokio_util::sync::{PollSendError, PollSender, ReusableBoxFuture}; @@ -68,7 +61,7 @@ impl ExExHandle { /// Create a new handle for the given `ExEx`. /// /// Returns the handle, as well as a [`UnboundedSender`] for [`ExExEvent`]s and a - /// [`Receiver`] for [`ExExNotification`]s that should be given to the `ExEx`. + /// [`mpsc::Receiver`] for [`ExExNotification`]s that should be given to the `ExEx`. pub fn new( id: String, node_head: Head, @@ -77,8 +70,7 @@ impl ExExHandle { ) -> (Self, UnboundedSender, ExExNotifications) { let (notification_tx, notification_rx) = mpsc::channel(1); let (event_tx, event_rx) = mpsc::unbounded_channel(); - let notifications = - ExExNotifications { node_head, provider, executor, notifications: notification_rx }; + let notifications = ExExNotifications::new(node_head, provider, executor, notification_rx); ( Self { @@ -156,328 +148,6 @@ impl ExExHandle { } } -/// A stream of [`ExExNotification`]s. The stream will emit notifications for all blocks. -pub struct ExExNotifications { - node_head: Head, - provider: P, - executor: E, - notifications: Receiver, -} - -impl Debug for ExExNotifications { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ExExNotifications") - .field("provider", &self.provider) - .field("executor", &self.executor) - .field("notifications", &self.notifications) - .finish() - } -} - -impl ExExNotifications { - /// Creates a new instance of [`ExExNotifications`]. - pub const fn new( - node_head: Head, - provider: P, - executor: E, - notifications: Receiver, - ) -> Self { - Self { node_head, provider, executor, notifications } - } - - /// Receives the next value for this receiver. - /// - /// This method returns `None` if the channel has been closed and there are - /// no remaining messages in the channel's buffer. This indicates that no - /// further values can ever be received from this `Receiver`. The channel is - /// closed when all senders have been dropped, or when [`Receiver::close`] is called. - /// - /// # Cancel safety - /// - /// This method is cancel safe. If `recv` is used as the event in a - /// [`tokio::select!`] statement and some other branch - /// completes first, it is guaranteed that no messages were received on this - /// channel. - /// - /// For full documentation, see [`Receiver::recv`]. - #[deprecated(note = "use `ExExNotifications::next` and its `Stream` implementation instead")] - pub async fn recv(&mut self) -> Option { - self.notifications.recv().await - } - - /// Polls to receive the next message on this channel. - /// - /// This method returns: - /// - /// * `Poll::Pending` if no messages are available but the channel is not closed, or if a - /// spurious failure happens. - /// * `Poll::Ready(Some(message))` if a message is available. - /// * `Poll::Ready(None)` if the channel has been closed and all messages sent before it was - /// closed have been received. - /// - /// When the method returns `Poll::Pending`, the `Waker` in the provided - /// `Context` is scheduled to receive a wakeup when a message is sent on any - /// receiver, or when the channel is closed. Note that on multiple calls to - /// `poll_recv` or `poll_recv_many`, only the `Waker` from the `Context` - /// passed to the most recent call is scheduled to receive a wakeup. - /// - /// If this method returns `Poll::Pending` due to a spurious failure, then - /// the `Waker` will be notified when the situation causing the spurious - /// failure has been resolved. Note that receiving such a wakeup does not - /// guarantee that the next call will succeed — it could fail with another - /// spurious failure. - /// - /// For full documentation, see [`Receiver::poll_recv`]. - #[deprecated( - note = "use `ExExNotifications::poll_next` and its `Stream` implementation instead" - )] - pub fn poll_recv(&mut self, cx: &mut Context<'_>) -> Poll> { - self.notifications.poll_recv(cx) - } -} - -impl ExExNotifications -where - P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, - E: BlockExecutorProvider + Clone + Unpin + 'static, -{ - /// Subscribe to notifications with the given head. - /// - /// Notifications will be sent starting from the head, not inclusive. For example, if - /// `head.number == 10`, then the first notification will be with `block.number == 11`. - pub fn with_head(self, head: ExExHead) -> ExExNotificationsWithHead { - ExExNotificationsWithHead::new( - self.node_head, - self.provider, - self.executor, - self.notifications, - head, - ) - } -} - -impl Stream for ExExNotifications { - type Item = ExExNotification; - - fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - self.get_mut().notifications.poll_recv(cx) - } -} - -/// A stream of [`ExExNotification`]s. The stream will only emit notifications for blocks that are -/// committed or reverted after the given head. -#[derive(Debug)] -pub struct ExExNotificationsWithHead { - node_head: Head, - provider: P, - executor: E, - notifications: Receiver, - exex_head: ExExHead, - pending_sync: bool, - /// The backfill job to run before consuming any notifications. - backfill_job: Option>, - /// Whether we're currently waiting for the node head to catch up to the same height as the - /// ExEx head. - node_head_catchup_in_progress: bool, -} - -impl ExExNotificationsWithHead -where - P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, - E: BlockExecutorProvider + Clone + Unpin + 'static, -{ - /// Creates a new [`ExExNotificationsWithHead`]. - pub const fn new( - node_head: Head, - provider: P, - executor: E, - notifications: Receiver, - exex_head: ExExHead, - ) -> Self { - Self { - node_head, - provider, - executor, - notifications, - exex_head, - pending_sync: true, - backfill_job: None, - node_head_catchup_in_progress: false, - } - } - - /// Compares the node head against the ExEx head, and synchronizes them in case of a mismatch. - /// - /// Possible situations are: - /// - ExEx is behind the node head (`node_head.number < exex_head.number`). - /// - ExEx is on the canonical chain (`exex_head.hash` is found in the node database). - /// Backfill from the node database. - /// - ExEx is not on the canonical chain (`exex_head.hash` is not found in the node database). - /// Unwind the ExEx to the first block matching between the ExEx and the node, and then - /// bacfkill from the node database. - /// - ExEx is at the same block number (`node_head.number == exex_head.number`). - /// - ExEx is on the canonical chain (`exex_head.hash` is found in the node database). Nothing - /// to do. - /// - ExEx is not on the canonical chain (`exex_head.hash` is not found in the node database). - /// Unwind the ExEx to the first block matching between the ExEx and the node, and then - /// backfill from the node database. - /// - ExEx is ahead of the node head (`node_head.number > exex_head.number`). Wait until the - /// node head catches up to the ExEx head, and then repeat the synchronization process. - fn synchronize(&mut self) -> eyre::Result<()> { - debug!(target: "exex::manager", "Synchronizing ExEx head"); - - let backfill_job_factory = - BackfillJobFactory::new(self.executor.clone(), self.provider.clone()); - match self.exex_head.block.number.cmp(&self.node_head.number) { - std::cmp::Ordering::Less => { - // ExEx is behind the node head - - if let Some(exex_header) = self.provider.header(&self.exex_head.block.hash)? { - // ExEx is on the canonical chain - debug!(target: "exex::manager", "ExEx is behind the node head and on the canonical chain"); - - if exex_header.number != self.exex_head.block.number { - eyre::bail!("ExEx head number does not match the hash") - } - - // ExEx is on the canonical chain, start backfill - let backfill = backfill_job_factory - .backfill(self.exex_head.block.number + 1..=self.node_head.number) - .into_stream(); - self.backfill_job = Some(backfill); - } else { - debug!(target: "exex::manager", "ExEx is behind the node head and not on the canonical chain"); - // ExEx is not on the canonical chain, first unwind it and then backfill - - // TODO(alexey): unwind and backfill - self.backfill_job = None; - } - } - #[allow(clippy::branches_sharing_code)] - std::cmp::Ordering::Equal => { - // ExEx is at the same block height as the node head - - if let Some(exex_header) = self.provider.header(&self.exex_head.block.hash)? { - // ExEx is on the canonical chain - debug!(target: "exex::manager", "ExEx is at the same block height as the node head and on the canonical chain"); - - if exex_header.number != self.exex_head.block.number { - eyre::bail!("ExEx head number does not match the hash") - } - - // ExEx is on the canonical chain and the same as the node head, no need to - // backfill - self.backfill_job = None; - } else { - // ExEx is not on the canonical chain, first unwind it and then backfill - debug!(target: "exex::manager", "ExEx is at the same block height as the node head but not on the canonical chain"); - - // TODO(alexey): unwind and backfill - self.backfill_job = None; - } - } - std::cmp::Ordering::Greater => { - debug!(target: "exex::manager", "ExEx is ahead of the node head"); - - // ExEx is ahead of the node head - - // TODO(alexey): wait until the node head is at the same height as the ExEx head - // and then repeat the process above - self.node_head_catchup_in_progress = true; - } - }; - - Ok(()) - } -} - -impl Stream for ExExNotificationsWithHead -where - P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, - E: BlockExecutorProvider + Clone + Unpin + 'static, -{ - type Item = eyre::Result; - - fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let this = self.get_mut(); - - if this.pending_sync { - this.synchronize()?; - this.pending_sync = false; - } - - if let Some(backfill_job) = &mut this.backfill_job { - if let Some(chain) = ready!(backfill_job.poll_next_unpin(cx)) { - return Poll::Ready(Some(Ok(ExExNotification::ChainCommitted { - new: Arc::new(chain?), - }))) - } - - // Backfill job is done, remove it - this.backfill_job = None; - } - - loop { - let Some(notification) = ready!(this.notifications.poll_recv(cx)) else { - return Poll::Ready(None) - }; - - // 1. Either committed or reverted chain from the notification. - // 2. Block number of the tip of the canonical chain: - // - For committed chain, it's the tip block number. - // - For reverted chain, it's the block number preceding the first block in the chain. - let (chain, tip) = notification - .committed_chain() - .map(|chain| (chain.clone(), chain.tip().number)) - .or_else(|| { - notification - .reverted_chain() - .map(|chain| (chain.clone(), chain.first().number - 1)) - }) - .unzip(); - - if this.node_head_catchup_in_progress { - // If we are waiting for the node head to catch up to the same height as the ExEx - // head, then we need to check if the ExEx is on the canonical chain. - - // Query the chain from the new notification for the ExEx head block number. - let exex_head_block = chain - .as_ref() - .and_then(|chain| chain.blocks().get(&this.exex_head.block.number)); - - // Compare the hash of the block from the new notification to the ExEx head - // hash. - if let Some((block, tip)) = exex_head_block.zip(tip) { - if block.hash() == this.exex_head.block.hash { - // ExEx is on the canonical chain, proceed with the notification - this.node_head_catchup_in_progress = false; - } else { - // ExEx is not on the canonical chain, synchronize - let tip = - this.provider.sealed_header(tip)?.ok_or_eyre("node head not found")?; - this.node_head = Head::new( - tip.number, - tip.hash(), - tip.difficulty, - U256::MAX, - tip.timestamp, - ); - this.synchronize()?; - } - } - } - - if notification - .committed_chain() - .or_else(|| notification.reverted_chain()) - .map_or(false, |chain| chain.first().number > this.exex_head.block.number) - { - return Poll::Ready(Some(Ok(notification))) - } - } - } -} - /// Metrics for the `ExEx` manager. #[derive(Metrics)] #[metrics(scope = "exex_manager")] @@ -836,15 +506,11 @@ impl Clone for ExExManagerHandle { mod tests { use super::*; use alloy_primitives::B256; + use eyre::OptionExt; use futures::StreamExt; - use reth_db_common::init::init_genesis; - use reth_evm_ethereum::execute::EthExecutorProvider; - use reth_primitives::{Block, BlockNumHash, Header, SealedBlockWithSenders}; - use reth_provider::{ - providers::BlockchainProvider2, test_utils::create_test_provider_factory, BlockReader, - BlockWriter, Chain, - }; - use reth_testing_utils::generators::{self, random_block, BlockParams}; + use reth_primitives::SealedBlockWithSenders; + use reth_provider::Chain; + use reth_testing_utils::generators::{self, random_block}; fn empty_finalized_header_stream() -> ForkChoiceStream { let (tx, rx) = watch::channel(None); @@ -1359,220 +1025,4 @@ mod tests { Ok(()) } - - #[tokio::test] - async fn exex_notifications_behind_head_canonical() -> eyre::Result<()> { - let mut rng = generators::rng(); - - let provider_factory = create_test_provider_factory(); - let genesis_hash = init_genesis(&provider_factory)?; - let genesis_block = provider_factory - .block(genesis_hash.into())? - .ok_or_else(|| eyre::eyre!("genesis block not found"))?; - - let provider = BlockchainProvider2::new(provider_factory.clone())?; - - let node_head_block = random_block( - &mut rng, - genesis_block.number + 1, - BlockParams { parent: Some(genesis_hash), tx_count: Some(0), ..Default::default() }, - ); - let provider_rw = provider_factory.provider_rw()?; - provider_rw.insert_block( - node_head_block.clone().seal_with_senders().ok_or_eyre("failed to recover senders")?, - )?; - provider_rw.commit()?; - - let node_head = Head { - number: node_head_block.number, - hash: node_head_block.hash(), - ..Default::default() - }; - let exex_head = - ExExHead { block: BlockNumHash { number: genesis_block.number, hash: genesis_hash } }; - - let notification = ExExNotification::ChainCommitted { - new: Arc::new(Chain::new( - vec![random_block( - &mut rng, - node_head.number + 1, - BlockParams { parent: Some(node_head.hash), ..Default::default() }, - ) - .seal_with_senders() - .ok_or_eyre("failed to recover senders")?], - Default::default(), - None, - )), - }; - - let (notifications_tx, notifications_rx) = mpsc::channel(1); - - notifications_tx.send(notification.clone()).await?; - - let mut notifications = ExExNotifications::new( - node_head, - provider, - EthExecutorProvider::mainnet(), - notifications_rx, - ) - .with_head(exex_head); - - // First notification is the backfill of missing blocks from the canonical chain - assert_eq!( - notifications.next().await.transpose()?, - Some(ExExNotification::ChainCommitted { - new: Arc::new( - BackfillJobFactory::new( - notifications.executor.clone(), - notifications.provider.clone() - ) - .backfill(1..=1) - .next() - .ok_or_eyre("failed to backfill")?? - ) - }) - ); - - // Second notification is the actual notification that we sent before - assert_eq!(notifications.next().await.transpose()?, Some(notification)); - - Ok(()) - } - - #[ignore] - #[tokio::test] - async fn exex_notifications_behind_head_non_canonical() -> eyre::Result<()> { - Ok(()) - } - - #[tokio::test] - async fn exex_notifications_same_head_canonical() -> eyre::Result<()> { - let provider_factory = create_test_provider_factory(); - let genesis_hash = init_genesis(&provider_factory)?; - let genesis_block = provider_factory - .block(genesis_hash.into())? - .ok_or_else(|| eyre::eyre!("genesis block not found"))?; - - let provider = BlockchainProvider2::new(provider_factory)?; - - let node_head = - Head { number: genesis_block.number, hash: genesis_hash, ..Default::default() }; - let exex_head = - ExExHead { block: BlockNumHash { number: node_head.number, hash: node_head.hash } }; - - let notification = ExExNotification::ChainCommitted { - new: Arc::new(Chain::new( - vec![Block { - header: Header { - parent_hash: node_head.hash, - number: node_head.number + 1, - ..Default::default() - }, - ..Default::default() - } - .seal_slow() - .seal_with_senders() - .ok_or_eyre("failed to recover senders")?], - Default::default(), - None, - )), - }; - - let (notifications_tx, notifications_rx) = mpsc::channel(1); - - notifications_tx.send(notification.clone()).await?; - - let mut notifications = ExExNotifications::new( - node_head, - provider, - EthExecutorProvider::mainnet(), - notifications_rx, - ) - .with_head(exex_head); - - let new_notification = notifications.next().await.transpose()?; - assert_eq!(new_notification, Some(notification)); - - Ok(()) - } - - #[ignore] - #[tokio::test] - async fn exex_notifications_same_head_non_canonical() -> eyre::Result<()> { - Ok(()) - } - - #[tokio::test] - async fn test_notifications_ahead_of_head() -> eyre::Result<()> { - let mut rng = generators::rng(); - - let provider_factory = create_test_provider_factory(); - let genesis_hash = init_genesis(&provider_factory)?; - let genesis_block = provider_factory - .block(genesis_hash.into())? - .ok_or_else(|| eyre::eyre!("genesis block not found"))?; - - let provider = BlockchainProvider2::new(provider_factory)?; - - let exex_head_block = random_block( - &mut rng, - genesis_block.number + 1, - BlockParams { parent: Some(genesis_hash), tx_count: Some(0), ..Default::default() }, - ); - - let node_head = - Head { number: genesis_block.number, hash: genesis_hash, ..Default::default() }; - let exex_head = ExExHead { - block: BlockNumHash { number: exex_head_block.number, hash: exex_head_block.hash() }, - }; - - let (notifications_tx, notifications_rx) = mpsc::channel(1); - - notifications_tx - .send(ExExNotification::ChainCommitted { - new: Arc::new(Chain::new( - vec![exex_head_block - .clone() - .seal_with_senders() - .ok_or_eyre("failed to recover senders")?], - Default::default(), - None, - )), - }) - .await?; - - let mut notifications = ExExNotifications::new( - node_head, - provider, - EthExecutorProvider::mainnet(), - notifications_rx, - ) - .with_head(exex_head); - - // First notification is skipped because the node is catching up with the ExEx - let new_notification = poll_fn(|cx| Poll::Ready(notifications.poll_next_unpin(cx))).await; - assert!(new_notification.is_pending()); - - // Imitate the node catching up with the ExEx by sending a notification for the missing - // block - let notification = ExExNotification::ChainCommitted { - new: Arc::new(Chain::new( - vec![random_block( - &mut rng, - exex_head_block.number + 1, - BlockParams { parent: Some(exex_head_block.hash()), ..Default::default() }, - ) - .seal_with_senders() - .ok_or_eyre("failed to recover senders")?], - Default::default(), - None, - )), - }; - notifications_tx.send(notification.clone()).await?; - - // Second notification is received because the node caught up with the ExEx - assert_eq!(notifications.next().await.transpose()?, Some(notification)); - - Ok(()) - } } diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs new file mode 100644 index 0000000000000..ce954e42a33a8 --- /dev/null +++ b/crates/exex/exex/src/notifications.rs @@ -0,0 +1,573 @@ +use crate::{BackfillJobFactory, ExExNotification, StreamBackfillJob}; +use alloy_primitives::U256; +use eyre::OptionExt; +use futures::{Stream, StreamExt}; +use reth_chainspec::Head; +use reth_evm::execute::BlockExecutorProvider; +use reth_exex_types::ExExHead; +use reth_provider::{BlockReader, Chain, HeaderProvider, StateProviderFactory}; +use reth_tracing::tracing::debug; +use std::{ + fmt::Debug, + pin::Pin, + sync::Arc, + task::{ready, Context, Poll}, +}; +use tokio::sync::mpsc::Receiver; + +/// A stream of [`ExExNotification`]s. The stream will emit notifications for all blocks. +pub struct ExExNotifications { + node_head: Head, + provider: P, + executor: E, + notifications: Receiver, +} + +impl Debug for ExExNotifications { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ExExNotifications") + .field("provider", &self.provider) + .field("executor", &self.executor) + .field("notifications", &self.notifications) + .finish() + } +} + +impl ExExNotifications { + /// Creates a new instance of [`ExExNotifications`]. + pub const fn new( + node_head: Head, + provider: P, + executor: E, + notifications: Receiver, + ) -> Self { + Self { node_head, provider, executor, notifications } + } + + /// Receives the next value for this receiver. + /// + /// This method returns `None` if the channel has been closed and there are + /// no remaining messages in the channel's buffer. This indicates that no + /// further values can ever be received from this `Receiver`. The channel is + /// closed when all senders have been dropped, or when [`Receiver::close`] is called. + /// + /// # Cancel safety + /// + /// This method is cancel safe. If `recv` is used as the event in a + /// [`tokio::select!`] statement and some other branch + /// completes first, it is guaranteed that no messages were received on this + /// channel. + /// + /// For full documentation, see [`Receiver::recv`]. + #[deprecated(note = "use `ExExNotifications::next` and its `Stream` implementation instead")] + pub async fn recv(&mut self) -> Option { + self.notifications.recv().await + } + + /// Polls to receive the next message on this channel. + /// + /// This method returns: + /// + /// * `Poll::Pending` if no messages are available but the channel is not closed, or if a + /// spurious failure happens. + /// * `Poll::Ready(Some(message))` if a message is available. + /// * `Poll::Ready(None)` if the channel has been closed and all messages sent before it was + /// closed have been received. + /// + /// When the method returns `Poll::Pending`, the `Waker` in the provided + /// `Context` is scheduled to receive a wakeup when a message is sent on any + /// receiver, or when the channel is closed. Note that on multiple calls to + /// `poll_recv` or `poll_recv_many`, only the `Waker` from the `Context` + /// passed to the most recent call is scheduled to receive a wakeup. + /// + /// If this method returns `Poll::Pending` due to a spurious failure, then + /// the `Waker` will be notified when the situation causing the spurious + /// failure has been resolved. Note that receiving such a wakeup does not + /// guarantee that the next call will succeed — it could fail with another + /// spurious failure. + /// + /// For full documentation, see [`Receiver::poll_recv`]. + #[deprecated( + note = "use `ExExNotifications::poll_next` and its `Stream` implementation instead" + )] + pub fn poll_recv(&mut self, cx: &mut Context<'_>) -> Poll> { + self.notifications.poll_recv(cx) + } +} + +impl ExExNotifications +where + P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, + E: BlockExecutorProvider + Clone + Unpin + 'static, +{ + /// Subscribe to notifications with the given head. + /// + /// Notifications will be sent starting from the head, not inclusive. For example, if + /// `head.number == 10`, then the first notification will be with `block.number == 11`. + pub fn with_head(self, head: ExExHead) -> ExExNotificationsWithHead { + ExExNotificationsWithHead::new( + self.node_head, + self.provider, + self.executor, + self.notifications, + head, + ) + } +} + +impl Stream for ExExNotifications { + type Item = ExExNotification; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.get_mut().notifications.poll_recv(cx) + } +} + +/// A stream of [`ExExNotification`]s. The stream will only emit notifications for blocks that are +/// committed or reverted after the given head. +#[derive(Debug)] +pub struct ExExNotificationsWithHead { + node_head: Head, + provider: P, + executor: E, + notifications: Receiver, + exex_head: ExExHead, + pending_sync: bool, + /// The backfill job to run before consuming any notifications. + backfill_job: Option>, + /// Whether we're currently waiting for the node head to catch up to the same height as the + /// ExEx head. + node_head_catchup_in_progress: bool, +} + +impl ExExNotificationsWithHead +where + P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, + E: BlockExecutorProvider + Clone + Unpin + 'static, +{ + /// Creates a new [`ExExNotificationsWithHead`]. + pub const fn new( + node_head: Head, + provider: P, + executor: E, + notifications: Receiver, + exex_head: ExExHead, + ) -> Self { + Self { + node_head, + provider, + executor, + notifications, + exex_head, + pending_sync: true, + backfill_job: None, + node_head_catchup_in_progress: false, + } + } + + /// Compares the node head against the ExEx head, and synchronizes them in case of a mismatch. + /// + /// Possible situations are: + /// - ExEx is behind the node head (`node_head.number < exex_head.number`). + /// - ExEx is on the canonical chain (`exex_head.hash` is found in the node database). + /// Backfill from the node database. + /// - ExEx is not on the canonical chain (`exex_head.hash` is not found in the node database). + /// Unwind the ExEx to the first block matching between the ExEx and the node, and then + /// bacfkill from the node database. + /// - ExEx is at the same block number (`node_head.number == exex_head.number`). + /// - ExEx is on the canonical chain (`exex_head.hash` is found in the node database). Nothing + /// to do. + /// - ExEx is not on the canonical chain (`exex_head.hash` is not found in the node database). + /// Unwind the ExEx to the first block matching between the ExEx and the node, and then + /// backfill from the node database. + /// - ExEx is ahead of the node head (`node_head.number > exex_head.number`). Wait until the + /// node head catches up to the ExEx head, and then repeat the synchronization process. + fn synchronize(&mut self) -> eyre::Result<()> { + debug!(target: "exex::manager", "Synchronizing ExEx head"); + + let backfill_job_factory = + BackfillJobFactory::new(self.executor.clone(), self.provider.clone()); + match self.exex_head.block.number.cmp(&self.node_head.number) { + std::cmp::Ordering::Less => { + // ExEx is behind the node head + + if let Some(exex_header) = self.provider.header(&self.exex_head.block.hash)? { + // ExEx is on the canonical chain + debug!(target: "exex::manager", "ExEx is behind the node head and on the canonical chain"); + + if exex_header.number != self.exex_head.block.number { + eyre::bail!("ExEx head number does not match the hash") + } + + // ExEx is on the canonical chain, start backfill + let backfill = backfill_job_factory + .backfill(self.exex_head.block.number + 1..=self.node_head.number) + .into_stream(); + self.backfill_job = Some(backfill); + } else { + debug!(target: "exex::manager", "ExEx is behind the node head and not on the canonical chain"); + // ExEx is not on the canonical chain, first unwind it and then backfill + + // TODO(alexey): unwind and backfill + self.backfill_job = None; + } + } + #[allow(clippy::branches_sharing_code)] + std::cmp::Ordering::Equal => { + // ExEx is at the same block height as the node head + + if let Some(exex_header) = self.provider.header(&self.exex_head.block.hash)? { + // ExEx is on the canonical chain + debug!(target: "exex::manager", "ExEx is at the same block height as the node head and on the canonical chain"); + + if exex_header.number != self.exex_head.block.number { + eyre::bail!("ExEx head number does not match the hash") + } + + // ExEx is on the canonical chain and the same as the node head, no need to + // backfill + self.backfill_job = None; + } else { + // ExEx is not on the canonical chain, first unwind it and then backfill + debug!(target: "exex::manager", "ExEx is at the same block height as the node head but not on the canonical chain"); + + // TODO(alexey): unwind and backfill + self.backfill_job = None; + } + } + std::cmp::Ordering::Greater => { + debug!(target: "exex::manager", "ExEx is ahead of the node head"); + + // ExEx is ahead of the node head + + // TODO(alexey): wait until the node head is at the same height as the ExEx head + // and then repeat the process above + self.node_head_catchup_in_progress = true; + } + }; + + Ok(()) + } +} + +impl Stream for ExExNotificationsWithHead +where + P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, + E: BlockExecutorProvider + Clone + Unpin + 'static, +{ + type Item = eyre::Result; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let this = self.get_mut(); + + if this.pending_sync { + this.synchronize()?; + this.pending_sync = false; + } + + if let Some(backfill_job) = &mut this.backfill_job { + if let Some(chain) = ready!(backfill_job.poll_next_unpin(cx)) { + return Poll::Ready(Some(Ok(ExExNotification::ChainCommitted { + new: Arc::new(chain?), + }))) + } + + // Backfill job is done, remove it + this.backfill_job = None; + } + + loop { + let Some(notification) = ready!(this.notifications.poll_recv(cx)) else { + return Poll::Ready(None) + }; + + // 1. Either committed or reverted chain from the notification. + // 2. Block number of the tip of the canonical chain: + // - For committed chain, it's the tip block number. + // - For reverted chain, it's the block number preceding the first block in the chain. + let (chain, tip) = notification + .committed_chain() + .map(|chain| (chain.clone(), chain.tip().number)) + .or_else(|| { + notification + .reverted_chain() + .map(|chain| (chain.clone(), chain.first().number - 1)) + }) + .unzip(); + + if this.node_head_catchup_in_progress { + // If we are waiting for the node head to catch up to the same height as the ExEx + // head, then we need to check if the ExEx is on the canonical chain. + + // Query the chain from the new notification for the ExEx head block number. + let exex_head_block = chain + .as_ref() + .and_then(|chain| chain.blocks().get(&this.exex_head.block.number)); + + // Compare the hash of the block from the new notification to the ExEx head + // hash. + if let Some((block, tip)) = exex_head_block.zip(tip) { + if block.hash() == this.exex_head.block.hash { + // ExEx is on the canonical chain, proceed with the notification + this.node_head_catchup_in_progress = false; + } else { + // ExEx is not on the canonical chain, synchronize + let tip = + this.provider.sealed_header(tip)?.ok_or_eyre("node head not found")?; + this.node_head = Head::new( + tip.number, + tip.hash(), + tip.difficulty, + U256::MAX, + tip.timestamp, + ); + this.synchronize()?; + } + } + } + + if notification + .committed_chain() + .or_else(|| notification.reverted_chain()) + .map_or(false, |chain| chain.first().number > this.exex_head.block.number) + { + return Poll::Ready(Some(Ok(notification))) + } + } + } +} + +#[cfg(test)] +mod tests { + use std::future::poll_fn; + + use super::*; + use alloy_consensus::Header; + use eyre::OptionExt; + use futures::StreamExt; + use reth_db_common::init::init_genesis; + use reth_evm_ethereum::execute::EthExecutorProvider; + use reth_primitives::{Block, BlockNumHash}; + use reth_provider::{ + providers::BlockchainProvider2, test_utils::create_test_provider_factory, BlockWriter, + Chain, + }; + use reth_testing_utils::generators::{self, random_block, BlockParams}; + use tokio::sync::mpsc; + + #[tokio::test] + async fn exex_notifications_behind_head_canonical() -> eyre::Result<()> { + let mut rng = generators::rng(); + + let provider_factory = create_test_provider_factory(); + let genesis_hash = init_genesis(&provider_factory)?; + let genesis_block = provider_factory + .block(genesis_hash.into())? + .ok_or_else(|| eyre::eyre!("genesis block not found"))?; + + let provider = BlockchainProvider2::new(provider_factory.clone())?; + + let node_head_block = random_block( + &mut rng, + genesis_block.number + 1, + BlockParams { parent: Some(genesis_hash), tx_count: Some(0), ..Default::default() }, + ); + let provider_rw = provider_factory.provider_rw()?; + provider_rw.insert_block( + node_head_block.clone().seal_with_senders().ok_or_eyre("failed to recover senders")?, + )?; + provider_rw.commit()?; + + let node_head = Head { + number: node_head_block.number, + hash: node_head_block.hash(), + ..Default::default() + }; + let exex_head = + ExExHead { block: BlockNumHash { number: genesis_block.number, hash: genesis_hash } }; + + let notification = ExExNotification::ChainCommitted { + new: Arc::new(Chain::new( + vec![random_block( + &mut rng, + node_head.number + 1, + BlockParams { parent: Some(node_head.hash), ..Default::default() }, + ) + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?], + Default::default(), + None, + )), + }; + + let (notifications_tx, notifications_rx) = mpsc::channel(1); + + notifications_tx.send(notification.clone()).await?; + + let mut notifications = ExExNotifications::new( + node_head, + provider, + EthExecutorProvider::mainnet(), + notifications_rx, + ) + .with_head(exex_head); + + // First notification is the backfill of missing blocks from the canonical chain + assert_eq!( + notifications.next().await.transpose()?, + Some(ExExNotification::ChainCommitted { + new: Arc::new( + BackfillJobFactory::new( + notifications.executor.clone(), + notifications.provider.clone() + ) + .backfill(1..=1) + .next() + .ok_or_eyre("failed to backfill")?? + ) + }) + ); + + // Second notification is the actual notification that we sent before + assert_eq!(notifications.next().await.transpose()?, Some(notification)); + + Ok(()) + } + + #[ignore] + #[tokio::test] + async fn exex_notifications_behind_head_non_canonical() -> eyre::Result<()> { + Ok(()) + } + + #[tokio::test] + async fn exex_notifications_same_head_canonical() -> eyre::Result<()> { + let provider_factory = create_test_provider_factory(); + let genesis_hash = init_genesis(&provider_factory)?; + let genesis_block = provider_factory + .block(genesis_hash.into())? + .ok_or_else(|| eyre::eyre!("genesis block not found"))?; + + let provider = BlockchainProvider2::new(provider_factory)?; + + let node_head = + Head { number: genesis_block.number, hash: genesis_hash, ..Default::default() }; + let exex_head = + ExExHead { block: BlockNumHash { number: node_head.number, hash: node_head.hash } }; + + let notification = ExExNotification::ChainCommitted { + new: Arc::new(Chain::new( + vec![Block { + header: Header { + parent_hash: node_head.hash, + number: node_head.number + 1, + ..Default::default() + }, + ..Default::default() + } + .seal_slow() + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?], + Default::default(), + None, + )), + }; + + let (notifications_tx, notifications_rx) = mpsc::channel(1); + + notifications_tx.send(notification.clone()).await?; + + let mut notifications = ExExNotifications::new( + node_head, + provider, + EthExecutorProvider::mainnet(), + notifications_rx, + ) + .with_head(exex_head); + + let new_notification = notifications.next().await.transpose()?; + assert_eq!(new_notification, Some(notification)); + + Ok(()) + } + + #[ignore] + #[tokio::test] + async fn exex_notifications_same_head_non_canonical() -> eyre::Result<()> { + Ok(()) + } + + #[tokio::test] + async fn test_notifications_ahead_of_head() -> eyre::Result<()> { + let mut rng = generators::rng(); + + let provider_factory = create_test_provider_factory(); + let genesis_hash = init_genesis(&provider_factory)?; + let genesis_block = provider_factory + .block(genesis_hash.into())? + .ok_or_else(|| eyre::eyre!("genesis block not found"))?; + + let provider = BlockchainProvider2::new(provider_factory)?; + + let exex_head_block = random_block( + &mut rng, + genesis_block.number + 1, + BlockParams { parent: Some(genesis_hash), tx_count: Some(0), ..Default::default() }, + ); + + let node_head = + Head { number: genesis_block.number, hash: genesis_hash, ..Default::default() }; + let exex_head = ExExHead { + block: BlockNumHash { number: exex_head_block.number, hash: exex_head_block.hash() }, + }; + + let (notifications_tx, notifications_rx) = mpsc::channel(1); + + notifications_tx + .send(ExExNotification::ChainCommitted { + new: Arc::new(Chain::new( + vec![exex_head_block + .clone() + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?], + Default::default(), + None, + )), + }) + .await?; + + let mut notifications = ExExNotifications::new( + node_head, + provider, + EthExecutorProvider::mainnet(), + notifications_rx, + ) + .with_head(exex_head); + + // First notification is skipped because the node is catching up with the ExEx + let new_notification = poll_fn(|cx| Poll::Ready(notifications.poll_next_unpin(cx))).await; + assert!(new_notification.is_pending()); + + // Imitate the node catching up with the ExEx by sending a notification for the missing + // block + let notification = ExExNotification::ChainCommitted { + new: Arc::new(Chain::new( + vec![random_block( + &mut rng, + exex_head_block.number + 1, + BlockParams { parent: Some(exex_head_block.hash()), ..Default::default() }, + ) + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?], + Default::default(), + None, + )), + }; + notifications_tx.send(notification.clone()).await?; + + // Second notification is received because the node caught up with the ExEx + assert_eq!(notifications.next().await.transpose()?, Some(notification)); + + Ok(()) + } +} From 1d56382b8d4c4e75243ca8ab4e058ad0ad95e713 Mon Sep 17 00:00:00 2001 From: James Prestwich Date: Wed, 25 Sep 2024 11:37:25 -0400 Subject: [PATCH 08/84] doc: update some exexhead docs (#11214) --- crates/exex/exex/src/notifications.rs | 9 ++++++--- crates/exex/types/src/head.rs | 4 +++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index ce954e42a33a8..54d7959dc5e86 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -100,10 +100,13 @@ where P: BlockReader + HeaderProvider + StateProviderFactory + Clone + Unpin + 'static, E: BlockExecutorProvider + Clone + Unpin + 'static, { - /// Subscribe to notifications with the given head. + /// Subscribe to notifications with the given head. This head is the ExEx's + /// latest view of the host chain. /// - /// Notifications will be sent starting from the head, not inclusive. For example, if - /// `head.number == 10`, then the first notification will be with `block.number == 11`. + /// Notifications will be sent starting from the head, not inclusive. For + /// example, if `head.number == 10`, then the first notification will be + /// with `block.number == 11`. A `head.number` of 10 indicates that the ExEx + /// has processed up to block 10, and is ready to process block 11. pub fn with_head(self, head: ExExHead) -> ExExNotificationsWithHead { ExExNotificationsWithHead::new( self.node_head, diff --git a/crates/exex/types/src/head.rs b/crates/exex/types/src/head.rs index 730b5724b37e0..8863ab327d06b 100644 --- a/crates/exex/types/src/head.rs +++ b/crates/exex/types/src/head.rs @@ -1,6 +1,8 @@ use alloy_eips::BlockNumHash; -/// A head of the ExEx. It determines the highest block committed to the internal ExEx state. +/// A head of the ExEx. It contains the highest host block committed to the +/// internal ExEx state. I.e. the latest block that the ExEx has fully +/// processed. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct ExExHead { /// The head block. From 4070498a1dbd40701f1856c45d01d582f4e470f6 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Wed, 25 Sep 2024 18:38:29 +0200 Subject: [PATCH 09/84] fix(storage): only delete static file if `last_block` is on a previous static file (#11029) Co-authored-by: Alexgao001 --- crates/static-file/types/src/segment.rs | 10 +- .../src/providers/static_file/manager.rs | 27 +++-- .../provider/src/providers/static_file/mod.rs | 107 ++++++++++++++---- .../src/providers/static_file/writer.rs | 12 +- 4 files changed, 123 insertions(+), 33 deletions(-) diff --git a/crates/static-file/types/src/segment.rs b/crates/static-file/types/src/segment.rs index cb742b3f3184c..94f09b64a51af 100644 --- a/crates/static-file/types/src/segment.rs +++ b/crates/static-file/types/src/segment.rs @@ -122,6 +122,12 @@ impl StaticFileSegment { pub const fn is_receipts(&self) -> bool { matches!(self, Self::Receipts) } + + /// Returns `true` if the segment is `StaticFileSegment::Receipts` or + /// `StaticFileSegment::Transactions`. + pub const fn is_tx_based(&self) -> bool { + matches!(self, Self::Receipts | Self::Transactions) + } } /// A segment header that contains information common to all segments. Used for storage. @@ -239,7 +245,7 @@ impl SegmentHeader { match self.segment { StaticFileSegment::Headers => { if let Some(range) = &mut self.block_range { - if num > range.end { + if num > range.end - range.start { self.block_range = None; } else { range.end = range.end.saturating_sub(num); @@ -248,7 +254,7 @@ impl SegmentHeader { } StaticFileSegment::Transactions | StaticFileSegment::Receipts => { if let Some(range) = &mut self.tx_range { - if num > range.end { + if num > range.end - range.start { self.tx_range = None; } else { range.end = range.end.saturating_sub(num); diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index 77c87664b72c3..9e6e925993d4a 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -533,15 +533,16 @@ impl StaticFileProvider { }) .or_insert_with(|| BTreeMap::from([(tx_end, current_block_range)])); } - } else if tx_index.get(&segment).map(|index| index.len()) == Some(1) { - // Only happens if we unwind all the txs/receipts from the first static file. - // Should only happen in test scenarios. - if jar.user_header().expected_block_start() == 0 && - matches!( - segment, - StaticFileSegment::Receipts | StaticFileSegment::Transactions - ) - { + } else if segment.is_tx_based() { + // The unwinded file has no more transactions/receipts. However, the highest + // block is within this files' block range. We only retain + // entries with block ranges before the current one. + tx_index.entry(segment).and_modify(|index| { + index.retain(|_, block_range| block_range.start() < fixed_range.start()); + }); + + // If the index is empty, just remove it. + if tx_index.get(&segment).is_some_and(|index| index.is_empty()) { tx_index.remove(&segment); } } @@ -1145,11 +1146,17 @@ impl StaticFileProvider { Ok(data) } - #[cfg(any(test, feature = "test-utils"))] /// Returns `static_files` directory + #[cfg(any(test, feature = "test-utils"))] pub fn path(&self) -> &Path { &self.path } + + /// Returns `static_files` transaction index + #[cfg(any(test, feature = "test-utils"))] + pub fn tx_index(&self) -> &RwLock { + &self.static_files_tx_index + } } /// Helper trait to manage different [`StaticFileProviderRW`] of an `Arc 0 { if segment.is_receipts() { - writer.append_receipt(*next_tx_num, &Receipt::default()).unwrap(); + // Used as ID for validation + receipt.cumulative_gas_used = *next_tx_num; + writer.append_receipt(*next_tx_num, &receipt).unwrap(); } else { - writer - .append_transaction(*next_tx_num, &TransactionSignedNoHash::default()) - .unwrap(); + // Used as ID for validation + tx.transaction.set_nonce(*next_tx_num); + writer.append_transaction(*next_tx_num, &tx).unwrap(); } *next_tx_num += 1; tx_count -= 1; @@ -376,10 +390,19 @@ mod tests { expected_tx_range.as_ref() ); }); + + // Ensure transaction index + let tx_index = sf_rw.tx_index().read(); + let expected_tx_index = + vec![(8, SegmentRangeInclusive::new(0, 9)), (9, SegmentRangeInclusive::new(20, 29))]; + assert_eq!( + tx_index.get(&segment).map(|index| index.iter().map(|(k, v)| (*k, *v)).collect()), + (!expected_tx_index.is_empty()).then_some(expected_tx_index), + "tx index mismatch", + ); } #[test] - #[ignore] fn test_tx_based_truncation() { let segments = [StaticFileSegment::Transactions, StaticFileSegment::Receipts]; let blocks_per_file = 10; // Number of blocks per file @@ -387,14 +410,16 @@ mod tests { let file_set_count = 3; // Number of sets of files to create let initial_file_count = files_per_range * file_set_count + 1; // Includes lockfile + #[allow(clippy::too_many_arguments)] fn prune_and_validate( sf_rw: &StaticFileProvider, static_dir: impl AsRef, segment: StaticFileSegment, prune_count: u64, last_block: u64, - expected_tx_tip: u64, + expected_tx_tip: Option, expected_file_count: i32, + expected_tx_index: Vec<(TxNumber, SegmentRangeInclusive)>, ) -> eyre::Result<()> { let mut writer = sf_rw.latest_writer(segment)?; @@ -412,11 +437,25 @@ mod tests { Some(last_block), "block mismatch", )?; - assert_eyre( - sf_rw.get_highest_static_file_tx(segment), - Some(expected_tx_tip), - "tx mismatch", - )?; + assert_eyre(sf_rw.get_highest_static_file_tx(segment), expected_tx_tip, "tx mismatch")?; + + // Verify that transactions and receipts are returned correctly. Uses + // cumulative_gas_used & nonce as ids. + if let Some(id) = expected_tx_tip { + if segment.is_receipts() { + assert_eyre( + expected_tx_tip, + sf_rw.receipt(id)?.map(|r| r.cumulative_gas_used), + "tx mismatch", + )?; + } else { + assert_eyre( + expected_tx_tip, + sf_rw.transaction_by_id(id)?.map(|t| t.nonce()), + "tx mismatch", + )?; + } + } // Ensure the file count has reduced as expected assert_eyre( @@ -424,6 +463,15 @@ mod tests { expected_file_count as usize, "file count mismatch", )?; + + // Ensure that the inner tx index (max_tx -> block range) is as expected + let tx_index = sf_rw.tx_index().read(); + assert_eyre( + tx_index.get(&segment).map(|index| index.iter().map(|(k, v)| (*k, *v)).collect()), + (!expected_tx_index.is_empty()).then_some(expected_tx_index), + "tx index mismatch", + )?; + Ok(()) } @@ -442,26 +490,46 @@ mod tests { let highest_tx = sf_rw.get_highest_static_file_tx(segment).unwrap(); // Test cases - // [prune_count, last_block, expected_tx_tip, expected_file_count) + // [prune_count, last_block, expected_tx_tip, expected_file_count, expected_tx_index) let test_cases = vec![ // Case 0: 20..=29 has only one tx. Prune the only tx of the block range. // It ensures that the file is not deleted even though there are no rows, since the // `last_block` which is passed to the prune method is the first // block of the range. - (1, blocks_per_file * 2, highest_tx - 1, initial_file_count), + ( + 1, + blocks_per_file * 2, + Some(highest_tx - 1), + initial_file_count, + vec![(highest_tx - 1, SegmentRangeInclusive::new(0, 9))], + ), // Case 1: 10..=19 has no txs. There are no txes in the whole block range, but want // to unwind to block 9. Ensures that the 20..=29 and 10..=19 files // are deleted. - (0, blocks_per_file - 1, highest_tx - 1, files_per_range + 1), // includes lockfile + ( + 0, + blocks_per_file - 1, + Some(highest_tx - 1), + files_per_range + 1, // includes lockfile + vec![(highest_tx - 1, SegmentRangeInclusive::new(0, 9))], + ), // Case 2: Prune most txs up to block 1. - (7, 1, 1, files_per_range + 1), + ( + highest_tx - 1, + 1, + Some(0), + files_per_range + 1, + vec![(0, SegmentRangeInclusive::new(0, 1))], + ), // Case 3: Prune remaining tx and ensure that file is not deleted. - (1, 0, 0, files_per_range + 1), + (1, 0, None, files_per_range + 1, vec![]), ]; // Loop through test cases - for (case, (prune_count, last_block, expected_tx_tip, expected_file_count)) in - test_cases.into_iter().enumerate() + for ( + case, + (prune_count, last_block, expected_tx_tip, expected_file_count, expected_tx_index), + ) in test_cases.into_iter().enumerate() { prune_and_validate( &sf_rw, @@ -471,6 +539,7 @@ mod tests { last_block, expected_tx_tip, expected_file_count, + expected_tx_index, ) .map_err(|err| eyre::eyre!("Test case {case}: {err}")) .unwrap(); diff --git a/crates/storage/provider/src/providers/static_file/writer.rs b/crates/storage/provider/src/providers/static_file/writer.rs index b62fc22c730aa..d086c5693ca52 100644 --- a/crates/storage/provider/src/providers/static_file/writer.rs +++ b/crates/storage/provider/src/providers/static_file/writer.rs @@ -381,8 +381,9 @@ impl StaticFileProviderRW { /// Commits to the configuration file at the end. fn truncate(&mut self, num_rows: u64, last_block: Option) -> ProviderResult<()> { let mut remaining_rows = num_rows; + let segment = self.writer.user_header().segment(); while remaining_rows > 0 { - let len = match self.writer.user_header().segment() { + let len = match segment { StaticFileSegment::Headers => { self.writer.user_header().block_len().unwrap_or_default() } @@ -396,7 +397,14 @@ impl StaticFileProviderRW { // delete the whole file and go to the next static file let block_start = self.writer.user_header().expected_block_start(); - if block_start != 0 { + // We only delete the file if it's NOT the first static file AND: + // * it's a Header segment OR + // * it's a tx-based segment AND `last_block` is lower than the first block of this + // file's block range. Otherwise, having no rows simply means that this block + // range has no transactions, but the file should remain. + if block_start != 0 && + (segment.is_headers() || last_block.is_some_and(|b| b < block_start)) + { self.delete_current_and_open_previous()?; } else { // Update `SegmentHeader` From 4ef9635fa91ad5535b7cb9a7318d5f21b2592499 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 25 Sep 2024 18:51:58 +0200 Subject: [PATCH 10/84] chore: update helper fn (#11209) --- crates/net/downloaders/src/bodies/test_utils.rs | 2 +- crates/primitives/src/block.rs | 13 ++----------- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/crates/net/downloaders/src/bodies/test_utils.rs b/crates/net/downloaders/src/bodies/test_utils.rs index d61e1f7fb6c82..af4bf8145af48 100644 --- a/crates/net/downloaders/src/bodies/test_utils.rs +++ b/crates/net/downloaders/src/bodies/test_utils.rs @@ -34,7 +34,7 @@ pub(crate) fn create_raw_bodies( .into_iter() .map(|header| { let body = bodies.remove(&header.hash()).expect("body exists"); - body.create_block(header.unseal()) + body.into_block(header.unseal()) }) .collect() } diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 5914602127e50..401757d30bb70 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -576,17 +576,8 @@ pub struct BlockBody { impl BlockBody { /// Create a [`Block`] from the body and its header. - // todo(onbjerg): should this not just take `self`? its used in one place - pub fn create_block(&self, header: Header) -> Block { - Block { - header, - body: Self { - transactions: self.transactions.clone(), - ommers: self.ommers.clone(), - withdrawals: self.withdrawals.clone(), - requests: self.requests.clone(), - }, - } + pub const fn into_block(self, header: Header) -> Block { + Block { header, body: self } } /// Calculate the transaction root for the block body. From 76a6e0db6936a164903cf9b606db34d17273651a Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Thu, 26 Sep 2024 01:10:11 +0800 Subject: [PATCH 11/84] remove IntoRecoveredTransaction (#11221) --- crates/consensus/auto-seal/src/task.rs | 1 - crates/ethereum/payload/src/lib.rs | 3 +-- crates/optimism/payload/src/builder.rs | 2 +- crates/primitives/src/lib.rs | 6 +++--- crates/primitives/src/transaction/mod.rs | 9 --------- crates/rpc/rpc-eth-api/src/helpers/pending_block.rs | 4 ++-- crates/rpc/rpc/src/eth/filter.rs | 2 +- crates/rpc/rpc/src/eth/pubsub.rs | 1 - crates/transaction-pool/src/maintain.rs | 4 ++-- crates/transaction-pool/src/pool/mod.rs | 3 +-- crates/transaction-pool/src/validate/mod.rs | 13 +++++++------ examples/custom-inspector/src/main.rs | 2 +- examples/txpool-tracing/src/main.rs | 8 ++------ 13 files changed, 21 insertions(+), 37 deletions(-) diff --git a/crates/consensus/auto-seal/src/task.rs b/crates/consensus/auto-seal/src/task.rs index 8979428bed7c2..dbbdc44631cfc 100644 --- a/crates/consensus/auto-seal/src/task.rs +++ b/crates/consensus/auto-seal/src/task.rs @@ -5,7 +5,6 @@ use reth_beacon_consensus::{BeaconEngineMessage, ForkchoiceStatus}; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_engine_primitives::EngineTypes; use reth_evm::execute::BlockExecutorProvider; -use reth_primitives::IntoRecoveredTransaction; use reth_provider::{CanonChainTracker, StateProviderFactory}; use reth_stages_api::PipelineEvent; use reth_tokio_util::EventStream; diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index ecce558f1a6fd..2593e83366114 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -33,8 +33,7 @@ use reth_primitives::{ constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, proofs::{self, calculate_requests_root}, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, - Block, BlockBody, EthereumHardforks, Header, IntoRecoveredTransaction, Receipt, - EMPTY_OMMER_ROOT_HASH, + Block, BlockBody, EthereumHardforks, Header, Receipt, EMPTY_OMMER_ROOT_HASH, }; use reth_provider::{ChainSpecProvider, StateProviderFactory}; use reth_revm::database::StateProviderDatabase; diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 5e443a4f5a089..18c1ece8ffecd 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -17,7 +17,7 @@ use reth_primitives::{ constants::BEACON_NONCE, proofs, revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg}, - Block, BlockBody, Header, IntoRecoveredTransaction, Receipt, TxType, EMPTY_OMMER_ROOT_HASH, + Block, BlockBody, Header, Receipt, TxType, EMPTY_OMMER_ROOT_HASH, }; use reth_provider::StateProviderFactory; use reth_revm::database::StateProviderDatabase; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index cdcf642c05ae4..c350c506694f4 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -62,9 +62,9 @@ pub use transaction::BlobTransactionValidationError; pub use transaction::{ util::secp256k1::{public_key_to_address, recover_signer_unchecked, sign_message}, - IntoRecoveredTransaction, InvalidTransactionError, Signature, Transaction, TransactionMeta, - TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxHashOrNumber, - TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, + InvalidTransactionError, Signature, Transaction, TransactionMeta, TransactionSigned, + TransactionSignedEcRecovered, TransactionSignedNoHash, TxHashOrNumber, TxType, + EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index f1dae00fece23..fc055adb26b49 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1634,15 +1634,6 @@ impl Decodable for TransactionSignedEcRecovered { } } -/// Ensures the transaction can be sent over the -/// network -pub trait IntoRecoveredTransaction { - /// Converts to this type into a [`TransactionSignedEcRecovered`]. - /// - /// Note: this takes `&self` since indented usage is via `Arc`. - fn to_recovered_transaction(&self) -> TransactionSignedEcRecovered; -} - /// Generic wrapper with encoded Bytes, such as transaction data. #[derive(Debug, Clone, PartialEq, Eq)] pub struct WithEncoded(Bytes, pub T); diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index d4d5fd23d4587..3f5d540723cbf 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -20,8 +20,8 @@ use reth_primitives::{ BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, EVMError, Env, ExecutionResult, InvalidTransaction, ResultAndState, SpecId, }, - Block, BlockBody, Header, IntoRecoveredTransaction, Receipt, Requests, SealedBlockWithSenders, - SealedHeader, TransactionSignedEcRecovered, EMPTY_OMMER_ROOT_HASH, + Block, BlockBody, Header, Receipt, Requests, SealedBlockWithSenders, SealedHeader, + TransactionSignedEcRecovered, EMPTY_OMMER_ROOT_HASH, }; use reth_provider::{ BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, ProviderError, diff --git a/crates/rpc/rpc/src/eth/filter.rs b/crates/rpc/rpc/src/eth/filter.rs index c4caa7300ee26..23bf0b58fc559 100644 --- a/crates/rpc/rpc/src/eth/filter.rs +++ b/crates/rpc/rpc/src/eth/filter.rs @@ -19,7 +19,7 @@ use async_trait::async_trait; use jsonrpsee::{core::RpcResult, server::IdProvider}; use reth_chainspec::ChainInfo; use reth_node_api::EthApiTypes; -use reth_primitives::{IntoRecoveredTransaction, TransactionSignedEcRecovered}; +use reth_primitives::TransactionSignedEcRecovered; use reth_provider::{BlockIdReader, BlockReader, EvmEnvProvider, ProviderError}; use reth_rpc_eth_api::{EthFilterApiServer, FullEthApiTypes, RpcTransaction, TransactionCompat}; use reth_rpc_eth_types::{ diff --git a/crates/rpc/rpc/src/eth/pubsub.rs b/crates/rpc/rpc/src/eth/pubsub.rs index 17c4f7b74b040..7bd1fd03d3b97 100644 --- a/crates/rpc/rpc/src/eth/pubsub.rs +++ b/crates/rpc/rpc/src/eth/pubsub.rs @@ -16,7 +16,6 @@ use jsonrpsee::{ server::SubscriptionMessage, types::ErrorObject, PendingSubscriptionSink, SubscriptionSink, }; use reth_network_api::NetworkInfo; -use reth_primitives::IntoRecoveredTransaction; use reth_provider::{BlockReader, CanonStateSubscriptions, EvmEnvProvider}; use reth_rpc_eth_api::{pubsub::EthPubSubApiServer, FullEthApiTypes, RpcTransaction}; use reth_rpc_eth_types::logs_utils; diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index 5a68b16607c8e..e961d03703bcc 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -17,8 +17,8 @@ use reth_chainspec::{ChainSpec, ChainSpecProvider}; use reth_execution_types::ChangedAccount; use reth_fs_util::FsPathError; use reth_primitives::{ - alloy_primitives::Sealable, BlockNumberOrTag, IntoRecoveredTransaction, - PooledTransactionsElementEcRecovered, SealedHeader, TransactionSigned, + alloy_primitives::Sealable, BlockNumberOrTag, PooledTransactionsElementEcRecovered, + SealedHeader, TransactionSigned, }; use reth_storage_api::{errors::provider::ProviderError, BlockReaderIdExt, StateProviderFactory}; use reth_tasks::TaskSpawner; diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index cfe38ea31da85..2ba101721b08c 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -87,8 +87,7 @@ use reth_eth_wire_types::HandleMempoolData; use reth_execution_types::ChangedAccount; use reth_primitives::{ - BlobTransaction, BlobTransactionSidecar, IntoRecoveredTransaction, PooledTransactionsElement, - TransactionSigned, + BlobTransaction, BlobTransactionSidecar, PooledTransactionsElement, TransactionSigned, }; use std::{ collections::{HashMap, HashSet}, diff --git a/crates/transaction-pool/src/validate/mod.rs b/crates/transaction-pool/src/validate/mod.rs index 7baa5e3f335eb..80aee0afe9eb4 100644 --- a/crates/transaction-pool/src/validate/mod.rs +++ b/crates/transaction-pool/src/validate/mod.rs @@ -8,8 +8,8 @@ use crate::{ use alloy_primitives::{Address, TxHash, B256, U256}; use futures_util::future::Either; use reth_primitives::{ - BlobTransactionSidecar, IntoRecoveredTransaction, PooledTransactionsElementEcRecovered, - SealedBlock, TransactionSignedEcRecovered, + BlobTransactionSidecar, PooledTransactionsElementEcRecovered, SealedBlock, + TransactionSignedEcRecovered, }; use std::{fmt, future::Future, time::Instant}; @@ -380,10 +380,11 @@ impl ValidPoolTransaction { } } -impl> IntoRecoveredTransaction - for ValidPoolTransaction -{ - fn to_recovered_transaction(&self) -> TransactionSignedEcRecovered { +impl> ValidPoolTransaction { + /// Converts to this type into a [`TransactionSignedEcRecovered`]. + /// + /// Note: this takes `&self` since indented usage is via `Arc`. + pub fn to_recovered_transaction(&self) -> TransactionSignedEcRecovered { self.transaction.clone().into_consensus() } } diff --git a/examples/custom-inspector/src/main.rs b/examples/custom-inspector/src/main.rs index 87f8dd7e95bfa..ce159c75cdba3 100644 --- a/examples/custom-inspector/src/main.rs +++ b/examples/custom-inspector/src/main.rs @@ -17,7 +17,7 @@ use reth::{ args::utils::DefaultChainSpecParser, builder::NodeHandle, cli::Cli, - primitives::{Address, BlockNumberOrTag, IntoRecoveredTransaction}, + primitives::{Address, BlockNumberOrTag}, revm::{ inspector_handle_register, interpreter::{Interpreter, OpCode}, diff --git a/examples/txpool-tracing/src/main.rs b/examples/txpool-tracing/src/main.rs index c9d27089d9ce6..cf721c51f0a59 100644 --- a/examples/txpool-tracing/src/main.rs +++ b/examples/txpool-tracing/src/main.rs @@ -14,12 +14,8 @@ use alloy_rpc_types_trace::{parity::TraceType, tracerequest::TraceCallRequest}; use clap::Parser; use futures_util::StreamExt; use reth::{ - args::utils::DefaultChainSpecParser, - builder::NodeHandle, - cli::Cli, - primitives::{Address, IntoRecoveredTransaction}, - rpc::compat::transaction::transaction_to_call_request, - transaction_pool::TransactionPool, + args::utils::DefaultChainSpecParser, builder::NodeHandle, cli::Cli, primitives::Address, + rpc::compat::transaction::transaction_to_call_request, transaction_pool::TransactionPool, }; use reth_node_ethereum::node::EthereumNode; From 653c0898a869c0d2a60cc1c5711c2cdded99e700 Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:49:27 -0400 Subject: [PATCH 12/84] chore(tree): add legacy tree metrics to new engine (#11175) --- crates/blockchain-tree/src/metrics.rs | 40 +++++++++++++------------- crates/engine/tree/src/tree/metrics.rs | 3 ++ crates/engine/tree/src/tree/mod.rs | 4 +++ 3 files changed, 27 insertions(+), 20 deletions(-) diff --git a/crates/blockchain-tree/src/metrics.rs b/crates/blockchain-tree/src/metrics.rs index 5d44a63911789..121d0a69786f0 100644 --- a/crates/blockchain-tree/src/metrics.rs +++ b/crates/blockchain-tree/src/metrics.rs @@ -5,26 +5,6 @@ use reth_metrics::{ }; use std::time::{Duration, Instant}; -/// Metrics for the entire blockchain tree -#[derive(Metrics)] -#[metrics(scope = "blockchain_tree")] -pub struct TreeMetrics { - /// Total number of sidechains (not including the canonical chain) - pub sidechains: Gauge, - /// The highest block number in the canonical chain - pub canonical_chain_height: Gauge, - /// The number of reorgs - pub reorgs: Counter, - /// The latest reorg depth - pub latest_reorg_depth: Gauge, - /// Longest sidechain height - pub longest_sidechain_height: Gauge, - /// The number of times cached trie updates were used for insert. - pub trie_updates_insert_cached: Counter, - /// The number of times trie updates were recomputed for insert. - pub trie_updates_insert_recomputed: Counter, -} - /// Metrics for the blockchain tree block buffer #[derive(Metrics)] #[metrics(scope = "blockchain_tree.block_buffer")] @@ -65,6 +45,26 @@ impl MakeCanonicalDurationsRecorder { } } +/// Metrics for the entire blockchain tree +#[derive(Metrics)] +#[metrics(scope = "blockchain_tree")] +pub struct TreeMetrics { + /// Total number of sidechains (not including the canonical chain) + pub sidechains: Gauge, + /// The highest block number in the canonical chain + pub canonical_chain_height: Gauge, + /// The number of reorgs + pub reorgs: Counter, + /// The latest reorg depth + pub latest_reorg_depth: Gauge, + /// Longest sidechain height + pub longest_sidechain_height: Gauge, + /// The number of times cached trie updates were used for insert. + pub trie_updates_insert_cached: Counter, + /// The number of times trie updates were recomputed for insert. + pub trie_updates_insert_recomputed: Counter, +} + /// Represents actions for making a canonical chain. #[derive(Debug, Copy, Clone)] pub(crate) enum MakeCanonicalAction { diff --git a/crates/engine/tree/src/tree/metrics.rs b/crates/engine/tree/src/tree/metrics.rs index 2df1fbdac7002..922041ae71516 100644 --- a/crates/engine/tree/src/tree/metrics.rs +++ b/crates/engine/tree/src/tree/metrics.rs @@ -1,3 +1,4 @@ +use reth_blockchain_tree::metrics::TreeMetrics; use reth_evm::metrics::ExecutorMetrics; use reth_metrics::{ metrics::{Counter, Gauge, Histogram}, @@ -13,6 +14,8 @@ pub(crate) struct EngineApiMetrics { pub(crate) executor: ExecutorMetrics, /// Metrics for block validation pub(crate) block_validation: BlockValidationMetrics, + /// A copy of legacy blockchain tree metrics, to be replaced when we replace the old tree + pub(crate) tree: TreeMetrics, } /// Metrics for the `EngineApi`. diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 3dd736cdc10f6..10654223ca41d 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -1291,6 +1291,7 @@ where backfill_num_hash, ); self.metrics.engine.executed_blocks.set(self.state.tree_state.block_count() as f64); + self.metrics.tree.canonical_chain_height.set(backfill_height as f64); // remove all buffered blocks below the backfill height self.state.buffer.remove_old_blocks(backfill_height); @@ -1956,6 +1957,9 @@ where self.canonical_in_memory_state.update_chain(chain_update); self.canonical_in_memory_state.set_canonical_head(tip.clone()); + // Update metrics based on new tip + self.metrics.tree.canonical_chain_height.set(tip.number as f64); + // sends an event to all active listeners about the new canonical chain self.canonical_in_memory_state.notify_canon_state(notification); From 35034065da113b625db1f708a13445b36f57ae1a Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Wed, 25 Sep 2024 16:58:55 -0400 Subject: [PATCH 13/84] chore(tree): make tree trace targets all engine::tree (#11227) --- crates/engine/tree/src/tree/mod.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 10654223ca41d..6c0f5a0517edd 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -1214,7 +1214,7 @@ where if let Err(err) = tx.send(output.map(|o| o.outcome).map_err(Into::into)) { - error!("Failed to send event: {err:?}"); + error!(target: "engine::tree", "Failed to send event: {err:?}"); } } BeaconEngineMessage::NewPayload { payload, cancun_fields, tx } => { @@ -1224,7 +1224,7 @@ where Box::new(e), ) })) { - error!("Failed to send event: {err:?}"); + error!(target: "engine::tree", "Failed to send event: {err:?}"); } } BeaconEngineMessage::TransitionConfigurationExchanged => { @@ -1411,10 +1411,9 @@ where debug!(target: "engine::tree", "emitting backfill action event"); } - let _ = self - .outgoing - .send(event) - .inspect_err(|err| error!("Failed to send internal event: {err:?}")); + let _ = self.outgoing.send(event).inspect_err( + |err| error!(target: "engine::tree", "Failed to send internal event: {err:?}"), + ); } /// Returns true if the canonical chain length minus the last persisted @@ -1701,6 +1700,7 @@ where fn validate_block(&self, block: &SealedBlockWithSenders) -> Result<(), ConsensusError> { if let Err(e) = self.consensus.validate_header_with_total_difficulty(block, U256::MAX) { error!( + target: "engine::tree", ?block, "Failed to validate total difficulty for block {}: {e}", block.header.hash() @@ -1709,12 +1709,12 @@ where } if let Err(e) = self.consensus.validate_header(block) { - error!(?block, "Failed to validate header {}: {e}", block.header.hash()); + error!(target: "engine::tree", ?block, "Failed to validate header {}: {e}", block.header.hash()); return Err(e) } if let Err(e) = self.consensus.validate_block_pre_execution(block) { - error!(?block, "Failed to validate block {}: {e}", block.header.hash()); + error!(target: "engine::tree", ?block, "Failed to validate block {}: {e}", block.header.hash()); return Err(e) } @@ -2148,7 +2148,7 @@ where )) })?; if let Err(e) = self.consensus.validate_header_against_parent(&block, &parent_block) { - warn!(?block, "Failed to validate header {} against parent: {e}", block.header.hash()); + warn!(target: "engine::tree", ?block, "Failed to validate header {} against parent: {e}", block.header.hash()); return Err(e.into()) } @@ -2199,7 +2199,7 @@ where { Ok((state_root, trie_output)) => Some((state_root, trie_output)), Err(AsyncStateRootError::Provider(ProviderError::ConsistentView(error))) => { - debug!(target: "engine", %error, "Async state root computation failed consistency check, falling back"); + debug!(target: "engine::tree", %error, "Async state root computation failed consistency check, falling back"); None } Err(error) => return Err(InsertBlockErrorKindTwo::Other(Box::new(error))), From ece09440705ec37bd7f6fbeb04aa3afe0a93a1e8 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Thu, 26 Sep 2024 15:02:10 +0800 Subject: [PATCH 14/84] reth-codec: remove unused derives from alloy compat types (#11231) --- crates/engine/tree/src/tree/mod.rs | 1 - crates/storage/codecs/Cargo.toml | 5 ++--- .../storage/codecs/src/alloy/authorization_list.rs | 5 ++--- crates/storage/codecs/src/alloy/genesis_account.rs | 13 ++++++------- crates/storage/codecs/src/alloy/header.rs | 7 ++++--- .../storage/codecs/src/alloy/transaction/eip1559.rs | 6 ++---- .../storage/codecs/src/alloy/transaction/eip2930.rs | 5 ++--- .../storage/codecs/src/alloy/transaction/eip4844.rs | 5 ++--- .../storage/codecs/src/alloy/transaction/eip7702.rs | 5 ++--- .../storage/codecs/src/alloy/transaction/legacy.rs | 5 ++--- .../codecs/src/alloy/transaction/optimism.rs | 5 ++--- crates/storage/codecs/src/alloy/withdrawal.rs | 5 ++--- 12 files changed, 28 insertions(+), 39 deletions(-) diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 6c0f5a0517edd..542bfc0862eb0 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -2272,7 +2272,6 @@ where /// Returns `Ok(_)` if computed successfully. /// Returns `Err(_)` if error was encountered during computation. /// `Err(ProviderError::ConsistentView(_))` can be safely ignored and fallback computation - /// should be used instead. fn compute_state_root_async( &self, diff --git a/crates/storage/codecs/Cargo.toml b/crates/storage/codecs/Cargo.toml index 7a999d35f73a2..640ec8c9561fd 100644 --- a/crates/storage/codecs/Cargo.toml +++ b/crates/storage/codecs/Cargo.toml @@ -27,7 +27,6 @@ op-alloy-consensus = { workspace = true, optional = true } # misc bytes.workspace = true modular-bitfield = { workspace = true, optional = true } -serde = { workspace = true, optional = true } [dev-dependencies] alloy-eips = { workspace = true, default-features = false, features = [ @@ -48,17 +47,17 @@ serde_json.workspace = true arbitrary = { workspace = true, features = ["derive"] } proptest.workspace = true proptest-arbitrary-interop.workspace = true +serde.workspace = true [features] default = ["std", "alloy"] -std = ["alloy-primitives/std", "bytes/std", "serde?/std"] +std = ["alloy-primitives/std", "bytes/std"] alloy = [ "dep:alloy-consensus", "dep:alloy-eips", "dep:alloy-genesis", "dep:modular-bitfield", "dep:alloy-trie", - "dep:serde" ] optimism = ["alloy", "dep:op-alloy-consensus"] test-utils = [] diff --git a/crates/storage/codecs/src/alloy/authorization_list.rs b/crates/storage/codecs/src/alloy/authorization_list.rs index 2c1495abf716a..3efe135906225 100644 --- a/crates/storage/codecs/src/alloy/authorization_list.rs +++ b/crates/storage/codecs/src/alloy/authorization_list.rs @@ -5,13 +5,12 @@ use alloy_eips::eip7702::{Authorization as AlloyAuthorization, SignedAuthorizati use alloy_primitives::{Address, U256}; use bytes::Buf; use reth_codecs_derive::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; /// Authorization acts as bridge which simplifies Compact implementation for AlloyAuthorization. /// /// Notice: Make sure this struct is 1:1 with `alloy_eips::eip7702::Authorization` -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct Authorization { chain_id: U256, diff --git a/crates/storage/codecs/src/alloy/genesis_account.rs b/crates/storage/codecs/src/alloy/genesis_account.rs index a94f4e2ef9060..938ad1375b158 100644 --- a/crates/storage/codecs/src/alloy/genesis_account.rs +++ b/crates/storage/codecs/src/alloy/genesis_account.rs @@ -3,7 +3,6 @@ use alloc::vec::Vec; use alloy_genesis::GenesisAccount as AlloyGenesisAccount; use alloy_primitives::{Bytes, B256, U256}; use reth_codecs_derive::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; /// `GenesisAccount` acts as bridge which simplifies Compact implementation for /// `AlloyGenesisAccount`. @@ -23,8 +22,8 @@ pub(crate) struct GenesisAccountRef<'a> { private_key: Option<&'a B256>, } -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct GenesisAccount { /// The nonce of the account at genesis. @@ -39,15 +38,15 @@ pub(crate) struct GenesisAccount { private_key: Option, } -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct StorageEntries { entries: Vec, } -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct StorageEntry { key: B256, diff --git a/crates/storage/codecs/src/alloy/header.rs b/crates/storage/codecs/src/alloy/header.rs index b4fc90e390a51..c6ef2affccad8 100644 --- a/crates/storage/codecs/src/alloy/header.rs +++ b/crates/storage/codecs/src/alloy/header.rs @@ -1,7 +1,6 @@ use crate::Compact; use alloy_consensus::Header as AlloyHeader; use alloy_primitives::{Address, BlockNumber, Bloom, Bytes, B256, U256}; -use serde::{Deserialize, Serialize}; /// Block header /// @@ -11,7 +10,8 @@ use serde::{Deserialize, Serialize}; /// will automatically apply to this type. /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::Header`] -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] +#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] pub(crate) struct Header { parent_hash: B256, ommers_hash: B256, @@ -42,7 +42,8 @@ pub(crate) struct Header { /// used as a field of [`Header`] for backwards compatibility. /// /// More information: & [`reth_codecs_derive::Compact`]. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] +#[cfg_attr(test, derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] pub(crate) struct HeaderExt { requests_root: Option, } diff --git a/crates/storage/codecs/src/alloy/transaction/eip1559.rs b/crates/storage/codecs/src/alloy/transaction/eip1559.rs index d2113a736ed24..9c98bf300e694 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip1559.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip1559.rs @@ -2,8 +2,6 @@ use crate::Compact; use alloy_consensus::TxEip1559 as AlloyTxEip1559; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Bytes, ChainId, TxKind, U256}; -use serde::{Deserialize, Serialize}; - /// [EIP-1559 Transaction](https://eips.ethereum.org/EIPS/eip-1559) /// /// This is a helper type to use derive on it instead of manually managing `bitfield`. @@ -12,8 +10,8 @@ use serde::{Deserialize, Serialize}; /// will automatically apply to this type. /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip1559`] -#[derive(Debug, Clone, PartialEq, Eq, Hash, Compact, Default, Serialize, Deserialize)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Compact, Default)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[cfg_attr(test, crate::add_arbitrary_tests(compact))] pub(crate) struct TxEip1559 { chain_id: ChainId, diff --git a/crates/storage/codecs/src/alloy/transaction/eip2930.rs b/crates/storage/codecs/src/alloy/transaction/eip2930.rs index b8f24db747e7b..4717884d9252c 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip2930.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip2930.rs @@ -3,7 +3,6 @@ use alloy_consensus::TxEip2930 as AlloyTxEip2930; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Bytes, ChainId, TxKind, U256}; use reth_codecs_derive::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; /// Transaction with an [`AccessList`] ([EIP-2930](https://eips.ethereum.org/EIPS/eip-2930)). /// @@ -13,8 +12,8 @@ use serde::{Deserialize, Serialize}; /// will automatically apply to this type. /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip2930`] -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct TxEip2930 { chain_id: ChainId, diff --git a/crates/storage/codecs/src/alloy/transaction/eip4844.rs b/crates/storage/codecs/src/alloy/transaction/eip4844.rs index e82b9affff476..9f58001fd3718 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip4844.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip4844.rs @@ -4,7 +4,6 @@ use alloy_consensus::TxEip4844 as AlloyTxEip4844; use alloy_eips::eip2930::AccessList; use alloy_primitives::{Address, Bytes, ChainId, B256, U256}; use reth_codecs_derive::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; /// [EIP-4844 Blob Transaction](https://eips.ethereum.org/EIPS/eip-4844#blob-transaction) /// @@ -14,8 +13,8 @@ use serde::{Deserialize, Serialize}; /// will automatically apply to this type. /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip4844`] -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct TxEip4844 { chain_id: ChainId, diff --git a/crates/storage/codecs/src/alloy/transaction/eip7702.rs b/crates/storage/codecs/src/alloy/transaction/eip7702.rs index 5f34ac1c253e2..2cc0786b1bda8 100644 --- a/crates/storage/codecs/src/alloy/transaction/eip7702.rs +++ b/crates/storage/codecs/src/alloy/transaction/eip7702.rs @@ -4,7 +4,6 @@ use alloy_consensus::TxEip7702 as AlloyTxEip7702; use alloy_eips::{eip2930::AccessList, eip7702::SignedAuthorization}; use alloy_primitives::{Address, Bytes, ChainId, U256}; use reth_codecs_derive::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; /// [EIP-7702 Set Code Transaction](https://eips.ethereum.org/EIPS/eip-7702) /// @@ -14,8 +13,8 @@ use serde::{Deserialize, Serialize}; /// will automatically apply to this type. /// /// Notice: Make sure this struct is 1:1 with [`alloy_consensus::TxEip7702`] -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct TxEip7702 { chain_id: ChainId, diff --git a/crates/storage/codecs/src/alloy/transaction/legacy.rs b/crates/storage/codecs/src/alloy/transaction/legacy.rs index 641b27bf53b29..6be6204297c70 100644 --- a/crates/storage/codecs/src/alloy/transaction/legacy.rs +++ b/crates/storage/codecs/src/alloy/transaction/legacy.rs @@ -1,11 +1,10 @@ use crate::Compact; use alloy_consensus::TxLegacy as AlloyTxLegacy; use alloy_primitives::{Bytes, ChainId, TxKind, U256}; -use serde::{Deserialize, Serialize}; /// Legacy transaction. -#[derive(Debug, Clone, PartialEq, Eq, Default, Compact, Serialize, Deserialize)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[cfg_attr(test, crate::add_arbitrary_tests(compact))] pub(crate) struct TxLegacy { /// Added as EIP-155: Simple replay attack protection diff --git a/crates/storage/codecs/src/alloy/transaction/optimism.rs b/crates/storage/codecs/src/alloy/transaction/optimism.rs index c84b19559fd53..0332c1a125cb9 100644 --- a/crates/storage/codecs/src/alloy/transaction/optimism.rs +++ b/crates/storage/codecs/src/alloy/transaction/optimism.rs @@ -2,7 +2,6 @@ use crate::Compact; use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; use op_alloy_consensus::TxDeposit as AlloyTxDeposit; use reth_codecs_derive::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; /// Deposit transactions, also known as deposits are initiated on L1, and executed on L2. /// @@ -12,8 +11,8 @@ use serde::{Deserialize, Serialize}; /// will automatically apply to this type. /// /// Notice: Make sure this struct is 1:1 with [`op_alloy_consensus::TxDeposit`] -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct TxDeposit { source_hash: B256, diff --git a/crates/storage/codecs/src/alloy/withdrawal.rs b/crates/storage/codecs/src/alloy/withdrawal.rs index 0ec1693210c06..16324c280cc2f 100644 --- a/crates/storage/codecs/src/alloy/withdrawal.rs +++ b/crates/storage/codecs/src/alloy/withdrawal.rs @@ -2,13 +2,12 @@ use crate::Compact; use alloy_eips::eip4895::Withdrawal as AlloyWithdrawal; use alloy_primitives::Address; use reth_codecs_derive::add_arbitrary_tests; -use serde::{Deserialize, Serialize}; /// Withdrawal acts as bridge which simplifies Compact implementation for AlloyWithdrawal. /// /// Notice: Make sure this struct is 1:1 with `alloy_eips::eip4895::Withdrawal` -#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize, Compact)] -#[cfg_attr(test, derive(arbitrary::Arbitrary))] +#[derive(Debug, Clone, PartialEq, Eq, Default, Compact)] +#[cfg_attr(test, derive(arbitrary::Arbitrary, serde::Serialize, serde::Deserialize))] #[add_arbitrary_tests(compact)] pub(crate) struct Withdrawal { /// Monotonically increasing identifier issued by consensus layer. From 20d695031a52f9ec698299749dc73005d9ce30b2 Mon Sep 17 00:00:00 2001 From: Abhishek kochar Date: Thu, 26 Sep 2024 15:38:03 +0800 Subject: [PATCH 15/84] chore(evm): replace reth-primitives with alloy (#11232) Signed-off-by: Abhishekkochar --- Cargo.lock | 1 + crates/evm/Cargo.toml | 4 ++++ crates/evm/execution-types/Cargo.toml | 4 ++++ crates/evm/execution-types/src/chain.rs | 9 ++++++--- crates/evm/execution-types/src/execute.rs | 3 ++- crates/evm/execution-types/src/execution_outcome.rs | 10 ++++------ crates/evm/src/either.rs | 3 ++- crates/evm/src/execute.rs | 5 +++-- crates/evm/src/lib.rs | 7 +++---- crates/evm/src/noop.rs | 3 ++- crates/evm/src/provider.rs | 3 ++- crates/evm/src/system_calls/eip2935.rs | 3 ++- crates/evm/src/system_calls/eip4788.rs | 3 ++- crates/evm/src/system_calls/eip7002.rs | 4 ++-- crates/evm/src/system_calls/eip7251.rs | 4 ++-- crates/evm/src/test_utils.rs | 3 ++- 16 files changed, 43 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a624c269a8ad1..ed12e6bf0de62 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7283,6 +7283,7 @@ name = "reth-evm" version = "1.0.7" dependencies = [ "alloy-eips", + "alloy-primitives", "auto_impl", "futures-util", "metrics", diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index f520c75eeaa92..20070d421e97e 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -23,7 +23,11 @@ reth-storage-errors.workspace = true reth-execution-types.workspace = true revm.workspace = true + +# alloy +alloy-primitives.workspace = true alloy-eips.workspace = true + auto_impl.workspace = true futures-util.workspace = true metrics = { workspace = true, optional = true } diff --git a/crates/evm/execution-types/Cargo.toml b/crates/evm/execution-types/Cargo.toml index cf50b47a03b25..352eeb79f12e3 100644 --- a/crates/evm/execution-types/Cargo.toml +++ b/crates/evm/execution-types/Cargo.toml @@ -18,6 +18,10 @@ reth-trie.workspace = true revm.workspace = true +# alloy +alloy-primitives.workspace = true +alloy-eips.workspace = true + serde = { workspace = true, optional = true } [dev-dependencies] diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index 4d0dc694a05fe..588bc832e688c 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -2,11 +2,13 @@ use crate::ExecutionOutcome; use alloc::{borrow::Cow, collections::BTreeMap}; +use alloy_eips::{eip1898::ForkBlock, BlockNumHash}; +use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash}; use core::{fmt, ops::RangeInclusive}; use reth_execution_errors::{BlockExecutionError, InternalBlockExecutionError}; use reth_primitives::{ - Address, BlockHash, BlockNumHash, BlockNumber, ForkBlock, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, TxHash, + Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionSigned, + TransactionSignedEcRecovered, }; use reth_trie::updates::TrieUpdates; use revm::db::BundleState; @@ -508,7 +510,8 @@ pub enum ChainSplit { #[cfg(test)] mod tests { use super::*; - use reth_primitives::{Receipt, Receipts, TxType, B256}; + use alloy_primitives::B256; + use reth_primitives::{Receipt, Receipts, TxType}; use revm::primitives::{AccountInfo, HashMap}; #[test] diff --git a/crates/evm/execution-types/src/execute.rs b/crates/evm/execution-types/src/execute.rs index 2933fd59815f8..0cf5d70507938 100644 --- a/crates/evm/execution-types/src/execute.rs +++ b/crates/evm/execution-types/src/execute.rs @@ -1,4 +1,5 @@ -use reth_primitives::{Request, U256}; +use alloy_primitives::U256; +use reth_primitives::Request; use revm::db::BundleState; /// A helper type for ethereum block inputs that consists of a block and the total difficulty. diff --git a/crates/evm/execution-types/src/execution_outcome.rs b/crates/evm/execution-types/src/execution_outcome.rs index 8996ac9959e37..47caa3b101f0c 100644 --- a/crates/evm/execution-types/src/execution_outcome.rs +++ b/crates/evm/execution-types/src/execution_outcome.rs @@ -1,8 +1,6 @@ use crate::BlockExecutionOutput; -use reth_primitives::{ - logs_bloom, Account, Address, BlockNumber, Bloom, Bytecode, Log, Receipt, Receipts, Requests, - StorageEntry, B256, U256, -}; +use alloy_primitives::{Address, BlockNumber, Bloom, Log, B256, U256}; +use reth_primitives::{logs_bloom, Account, Bytecode, Receipt, Receipts, Requests, StorageEntry}; use reth_trie::HashedPostState; use revm::{ db::{states::BundleState, BundleAccount}, @@ -371,8 +369,8 @@ impl From<(BlockExecutionOutput, BlockNumber)> for ExecutionOutcome { mod tests { use super::*; use alloy_eips::{eip6110::DepositRequest, eip7002::WithdrawalRequest}; - use alloy_primitives::{FixedBytes, LogData}; - use reth_primitives::{Address, Receipts, Request, Requests, TxType, B256}; + use alloy_primitives::{Address, FixedBytes, LogData, B256}; + use reth_primitives::{Receipts, Request, Requests, TxType}; use std::collections::HashMap; #[test] diff --git a/crates/evm/src/either.rs b/crates/evm/src/either.rs index fde316da9f5b1..a3fca50ec7eee 100644 --- a/crates/evm/src/either.rs +++ b/crates/evm/src/either.rs @@ -3,9 +3,10 @@ use core::fmt::Display; use crate::execute::{BatchExecutor, BlockExecutorProvider, Executor}; +use alloy_primitives::BlockNumber; use reth_execution_errors::BlockExecutionError; use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; -use reth_primitives::{BlockNumber, BlockWithSenders, Receipt}; +use reth_primitives::{BlockWithSenders, Receipt}; use reth_prune_types::PruneModes; use reth_storage_errors::provider::ProviderError; use revm_primitives::db::Database; diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 60a29d4de9fc9..ffc08469dc8d6 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -5,8 +5,9 @@ pub use reth_execution_errors::{BlockExecutionError, BlockValidationError}; pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; pub use reth_storage_errors::provider::ProviderError; +use alloy_primitives::BlockNumber; use core::fmt::Display; -use reth_primitives::{BlockNumber, BlockWithSenders, Receipt}; +use reth_primitives::{BlockWithSenders, Receipt}; use reth_prune_types::PruneModes; use revm::State; use revm_primitives::db::Database; @@ -151,8 +152,8 @@ pub trait BlockExecutorProvider: Send + Sync + Clone + Unpin + 'static { #[cfg(test)] mod tests { use super::*; + use alloy_primitives::U256; use revm::db::{CacheDB, EmptyDBTyped}; - use revm_primitives::U256; use std::marker::PhantomData; #[derive(Clone, Default)] diff --git a/crates/evm/src/lib.rs b/crates/evm/src/lib.rs index 569491a1b02c1..6fcb3d9f8c3d1 100644 --- a/crates/evm/src/lib.rs +++ b/crates/evm/src/lib.rs @@ -14,12 +14,11 @@ extern crate alloc; use core::ops::Deref; use crate::builder::RethEvmBuilder; -use reth_primitives::{Address, TransactionSigned, TransactionSignedEcRecovered, B256, U256}; +use alloy_primitives::{Address, Bytes, B256, U256}; +use reth_primitives::{TransactionSigned, TransactionSignedEcRecovered}; use reth_primitives_traits::BlockHeader; use revm::{Database, Evm, GetInspector}; -use revm_primitives::{ - BlockEnv, Bytes, CfgEnvWithHandlerCfg, Env, EnvWithHandlerCfg, SpecId, TxEnv, -}; +use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, Env, EnvWithHandlerCfg, SpecId, TxEnv}; pub mod builder; pub mod either; diff --git a/crates/evm/src/noop.rs b/crates/evm/src/noop.rs index ae6171a506a9d..392bfd0bd722d 100644 --- a/crates/evm/src/noop.rs +++ b/crates/evm/src/noop.rs @@ -1,9 +1,10 @@ //! A no operation block executor implementation. +use alloy_primitives::BlockNumber; use core::fmt::Display; use reth_execution_errors::BlockExecutionError; use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; -use reth_primitives::{BlockNumber, BlockWithSenders, Receipt}; +use reth_primitives::{BlockWithSenders, Receipt}; use reth_prune_types::PruneModes; use reth_storage_errors::provider::ProviderError; use revm::State; diff --git a/crates/evm/src/provider.rs b/crates/evm/src/provider.rs index fc3d4ff94f79a..8db828ec4a001 100644 --- a/crates/evm/src/provider.rs +++ b/crates/evm/src/provider.rs @@ -1,7 +1,8 @@ //! Provider trait for populating the EVM environment. use crate::ConfigureEvmEnv; -use reth_primitives::{BlockHashOrNumber, Header}; +use alloy_eips::eip1898::BlockHashOrNumber; +use reth_primitives::Header; use reth_storage_errors::provider::ProviderResult; use revm::primitives::{BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, SpecId}; diff --git a/crates/evm/src/system_calls/eip2935.rs b/crates/evm/src/system_calls/eip2935.rs index 4a1ec14a467e2..7b09a4813079c 100644 --- a/crates/evm/src/system_calls/eip2935.rs +++ b/crates/evm/src/system_calls/eip2935.rs @@ -4,12 +4,13 @@ use alloc::{boxed::Box, string::ToString}; use alloy_eips::eip2935::HISTORY_STORAGE_ADDRESS; use crate::ConfigureEvm; +use alloy_primitives::B256; use core::fmt::Display; use reth_chainspec::EthereumHardforks; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; use reth_primitives::Header; use revm::{interpreter::Host, Database, DatabaseCommit, Evm}; -use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, B256}; +use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState}; /// Apply the [EIP-2935](https://eips.ethereum.org/EIPS/eip-2935) pre block contract call. /// diff --git a/crates/evm/src/system_calls/eip4788.rs b/crates/evm/src/system_calls/eip4788.rs index d1148f6cdc747..a1f97bf5e9087 100644 --- a/crates/evm/src/system_calls/eip4788.rs +++ b/crates/evm/src/system_calls/eip4788.rs @@ -3,11 +3,12 @@ use alloc::{boxed::Box, string::ToString}; use crate::ConfigureEvm; use alloy_eips::eip4788::BEACON_ROOTS_ADDRESS; +use alloy_primitives::B256; use reth_chainspec::EthereumHardforks; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; use reth_primitives::Header; use revm::{interpreter::Host, Database, DatabaseCommit, Evm}; -use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, B256}; +use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState}; /// Apply the [EIP-4788](https://eips.ethereum.org/EIPS/eip-4788) pre block contract call. /// diff --git a/crates/evm/src/system_calls/eip7002.rs b/crates/evm/src/system_calls/eip7002.rs index 9b770cfceb556..d03268254f43d 100644 --- a/crates/evm/src/system_calls/eip7002.rs +++ b/crates/evm/src/system_calls/eip7002.rs @@ -4,12 +4,12 @@ use core::fmt::Display; use crate::ConfigureEvm; use alloy_eips::eip7002::{WithdrawalRequest, WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS}; +use alloy_primitives::{Address, Bytes, FixedBytes}; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; use reth_primitives::{Buf, Header, Request}; use revm::{interpreter::Host, Database, DatabaseCommit, Evm}; use revm_primitives::{ - Address, BlockEnv, Bytes, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ExecutionResult, FixedBytes, - ResultAndState, + BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ExecutionResult, ResultAndState, }; /// Apply the [EIP-7002](https://eips.ethereum.org/EIPS/eip-7002) post block contract call. diff --git a/crates/evm/src/system_calls/eip7251.rs b/crates/evm/src/system_calls/eip7251.rs index 8a70496710282..8247f06b18414 100644 --- a/crates/evm/src/system_calls/eip7251.rs +++ b/crates/evm/src/system_calls/eip7251.rs @@ -4,12 +4,12 @@ use core::fmt::Display; use crate::ConfigureEvm; use alloy_eips::eip7251::{ConsolidationRequest, CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS}; +use alloy_primitives::{Address, Bytes, FixedBytes}; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; use reth_primitives::{Buf, Header, Request}; use revm::{interpreter::Host, Database, DatabaseCommit, Evm}; use revm_primitives::{ - Address, BlockEnv, Bytes, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ExecutionResult, FixedBytes, - ResultAndState, + BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ExecutionResult, ResultAndState, }; /// Apply the [EIP-7251](https://eips.ethereum.org/EIPS/eip-7251) post block contract call. diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index f30262c281e08..cf45930aece94 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -3,10 +3,11 @@ use crate::execute::{ BatchExecutor, BlockExecutionInput, BlockExecutionOutput, BlockExecutorProvider, Executor, }; +use alloy_primitives::BlockNumber; use parking_lot::Mutex; use reth_execution_errors::BlockExecutionError; use reth_execution_types::ExecutionOutcome; -use reth_primitives::{BlockNumber, BlockWithSenders, Receipt}; +use reth_primitives::{BlockWithSenders, Receipt}; use reth_prune_types::PruneModes; use reth_storage_errors::provider::ProviderError; use revm::State; From 4a05826097c4ca2b57c4db8a4fb8384ef74d38c4 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Thu, 26 Sep 2024 10:10:57 +0200 Subject: [PATCH 16/84] ci: pin clippy to working version (#11237) --- .github/workflows/lint.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index b7d9fc3a27ac2..0c458e8eee281 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -39,6 +39,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly with: + toolchain: nightly-2024-09-25 components: clippy - uses: Swatinem/rust-cache@v2 with: From 353fd60d29ed9b1dac752b8d9eaed7da4b6118a2 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 26 Sep 2024 11:24:04 +0300 Subject: [PATCH 17/84] refactor: extract optimism receipts hashing logic to `reth-optimism-consensus` (#11230) --- Cargo.lock | 5 ++- crates/consensus/auto-seal/Cargo.toml | 5 ++- crates/consensus/auto-seal/src/lib.rs | 8 +++- crates/evm/execution-types/Cargo.toml | 3 +- .../execution-types/src/execution_outcome.rs | 17 ++++---- crates/optimism/consensus/src/lib.rs | 2 + crates/optimism/consensus/src/proof.rs | 37 ++++++++++++++++- crates/optimism/payload/Cargo.toml | 1 + crates/optimism/payload/src/builder.rs | 9 ++-- crates/optimism/rpc/Cargo.toml | 1 + crates/optimism/rpc/src/eth/pending_block.rs | 15 ++++--- crates/primitives/Cargo.toml | 5 --- crates/primitives/src/proofs.rs | 41 ------------------- crates/primitives/src/receipt.rs | 25 +++-------- 14 files changed, 80 insertions(+), 94 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed12e6bf0de62..a02679addc75d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6255,6 +6255,7 @@ dependencies = [ "reth-execution-types", "reth-network-p2p", "reth-network-peers", + "reth-optimism-consensus", "reth-primitives", "reth-provider", "reth-revm", @@ -7368,7 +7369,6 @@ version = "1.0.7" dependencies = [ "alloy-eips", "alloy-primitives", - "reth-chainspec", "reth-execution-errors", "reth-primitives", "reth-trie", @@ -8117,6 +8117,7 @@ dependencies = [ "reth-evm", "reth-evm-optimism", "reth-execution-types", + "reth-optimism-consensus", "reth-optimism-forks", "reth-payload-builder", "reth-payload-primitives", @@ -8163,6 +8164,7 @@ dependencies = [ "reth-node-api", "reth-node-builder", "reth-optimism-chainspec", + "reth-optimism-consensus", "reth-optimism-forks", "reth-primitives", "reth-provider", @@ -8262,7 +8264,6 @@ dependencies = [ "reth-codecs", "reth-ethereum-forks", "reth-optimism-chainspec", - "reth-optimism-forks", "reth-primitives-traits", "reth-static-file-types", "reth-trie-common", diff --git a/crates/consensus/auto-seal/Cargo.toml b/crates/consensus/auto-seal/Cargo.toml index d712071a19279..b4b2812303367 100644 --- a/crates/consensus/auto-seal/Cargo.toml +++ b/crates/consensus/auto-seal/Cargo.toml @@ -35,6 +35,9 @@ alloy-primitives.workspace = true revm-primitives.workspace = true alloy-rpc-types-engine.workspace = true +# optimism +reth-optimism-consensus = { workspace = true, optional = true } + # async futures-util.workspace = true tokio = { workspace = true, features = ["sync", "time"] } @@ -42,4 +45,4 @@ tokio-stream.workspace = true tracing.workspace = true [features] -optimism = ["reth-provider/optimism"] +optimism = ["reth-provider/optimism", "reth-optimism-consensus"] diff --git a/crates/consensus/auto-seal/src/lib.rs b/crates/consensus/auto-seal/src/lib.rs index fea15f84d510a..db418285cf3ee 100644 --- a/crates/consensus/auto-seal/src/lib.rs +++ b/crates/consensus/auto-seal/src/lib.rs @@ -419,7 +419,13 @@ impl StorageInner { header.receipts_root = { #[cfg(feature = "optimism")] let receipts_root = execution_outcome - .optimism_receipts_root_slow(header.number, &chain_spec, header.timestamp) + .generic_receipts_root_slow(header.number, |receipts| { + reth_optimism_consensus::calculate_receipt_root_no_memo_optimism( + receipts, + &chain_spec, + header.timestamp, + ) + }) .expect("Receipts is present"); #[cfg(not(feature = "optimism"))] diff --git a/crates/evm/execution-types/Cargo.toml b/crates/evm/execution-types/Cargo.toml index 352eeb79f12e3..c286f07906bbb 100644 --- a/crates/evm/execution-types/Cargo.toml +++ b/crates/evm/execution-types/Cargo.toml @@ -12,7 +12,6 @@ workspace = true [dependencies] reth-primitives.workspace = true -reth-chainspec = { workspace = true, optional = true } reth-execution-errors.workspace = true reth-trie.workspace = true @@ -31,6 +30,6 @@ alloy-eips.workspace = true [features] default = ["std"] -optimism = ["dep:reth-chainspec"] +optimism = [] serde = ["dep:serde", "reth-trie/serde", "revm/serde"] std = [] diff --git a/crates/evm/execution-types/src/execution_outcome.rs b/crates/evm/execution-types/src/execution_outcome.rs index 47caa3b101f0c..3b21f36dd13cd 100644 --- a/crates/evm/execution-types/src/execution_outcome.rs +++ b/crates/evm/execution-types/src/execution_outcome.rs @@ -196,24 +196,21 @@ impl ExecutionOutcome { #[cfg(feature = "optimism")] panic!("This should not be called in optimism mode. Use `optimism_receipts_root_slow` instead."); #[cfg(not(feature = "optimism"))] - self.receipts.root_slow(self.block_number_to_index(_block_number)?) + self.receipts.root_slow( + self.block_number_to_index(_block_number)?, + reth_primitives::proofs::calculate_receipt_root_no_memo, + ) } /// Returns the receipt root for all recorded receipts. /// Note: this function calculated Bloom filters for every receipt and created merkle trees /// of receipt. This is a expensive operation. - #[cfg(feature = "optimism")] - pub fn optimism_receipts_root_slow( + pub fn generic_receipts_root_slow( &self, block_number: BlockNumber, - chain_spec: impl reth_chainspec::Hardforks, - timestamp: u64, + f: impl FnOnce(&[&Receipt]) -> B256, ) -> Option { - self.receipts.optimism_root_slow( - self.block_number_to_index(block_number)?, - chain_spec, - timestamp, - ) + self.receipts.root_slow(self.block_number_to_index(block_number)?, f) } /// Returns reference to receipts. diff --git a/crates/optimism/consensus/src/lib.rs b/crates/optimism/consensus/src/lib.rs index 1f529a9a927ff..d040d32e04d1f 100644 --- a/crates/optimism/consensus/src/lib.rs +++ b/crates/optimism/consensus/src/lib.rs @@ -25,6 +25,8 @@ use reth_primitives::{ use std::{sync::Arc, time::SystemTime}; mod proof; +pub use proof::calculate_receipt_root_no_memo_optimism; + mod validation; pub use validation::validate_block_post_execution; diff --git a/crates/optimism/consensus/src/proof.rs b/crates/optimism/consensus/src/proof.rs index 383902659989a..b283356016c2d 100644 --- a/crates/optimism/consensus/src/proof.rs +++ b/crates/optimism/consensus/src/proof.rs @@ -3,7 +3,7 @@ use alloy_primitives::B256; use reth_chainspec::ChainSpec; use reth_optimism_forks::OptimismHardfork; -use reth_primitives::ReceiptWithBloom; +use reth_primitives::{Receipt, ReceiptWithBloom, ReceiptWithBloomRef}; use reth_trie_common::root::ordered_trie_root_with_encoder; /// Calculates the receipt root for a header. @@ -37,6 +37,41 @@ pub(crate) fn calculate_receipt_root_optimism( ordered_trie_root_with_encoder(receipts, |r, buf| r.encode_inner(buf, false)) } +/// Calculates the receipt root for a header for the reference type of [Receipt]. +/// +/// NOTE: Prefer calculate receipt root optimism if you have log blooms memoized. +pub fn calculate_receipt_root_no_memo_optimism( + receipts: &[&Receipt], + chain_spec: impl reth_chainspec::Hardforks, + timestamp: u64, +) -> B256 { + // There is a minor bug in op-geth and op-erigon where in the Regolith hardfork, + // the receipt root calculation does not include the deposit nonce in the receipt + // encoding. In the Regolith Hardfork, we must strip the deposit nonce from the + // receipts before calculating the receipt root. This was corrected in the Canyon + // hardfork. + if chain_spec.is_fork_active_at_timestamp(OptimismHardfork::Regolith, timestamp) && + !chain_spec.is_fork_active_at_timestamp(OptimismHardfork::Canyon, timestamp) + { + let receipts = receipts + .iter() + .map(|r| { + let mut r = (*r).clone(); + r.deposit_nonce = None; + r + }) + .collect::>(); + + return ordered_trie_root_with_encoder(&receipts, |r, buf| { + ReceiptWithBloomRef::from(r).encode_inner(buf, false) + }) + } + + ordered_trie_root_with_encoder(receipts, |r, buf| { + ReceiptWithBloomRef::from(*r).encode_inner(buf, false) + }) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index e64b72610d99c..e58b26ee83dfd 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -29,6 +29,7 @@ reth-chain-state.workspace = true # op-reth reth-evm-optimism.workspace = true +reth-optimism-consensus.workspace = true reth-optimism-forks.workspace = true # ethereum diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 18c1ece8ffecd..84e69cb92e73e 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -11,6 +11,7 @@ use reth_evm::{ NextBlockEnvAttributes, }; use reth_execution_types::ExecutionOutcome; +use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; use reth_optimism_forks::OptimismHardfork; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; use reth_primitives::{ @@ -443,11 +444,9 @@ where Vec::new(), ); let receipts_root = execution_outcome - .optimism_receipts_root_slow( - block_number, - &chain_spec, - attributes.payload_attributes.timestamp, - ) + .generic_receipts_root_slow(block_number, |receipts| { + calculate_receipt_root_no_memo_optimism(receipts, &chain_spec, attributes.timestamp()) + }) .expect("Number is in range"); let logs_bloom = execution_outcome.block_logs_bloom(block_number).expect("Number is in range"); diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index 443830cc53210..e82ff18276524 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -29,6 +29,7 @@ reth-chainspec.workspace = true # op-reth reth-evm-optimism.workspace = true +reth-optimism-consensus.workspace = true reth-optimism-forks.workspace = true # ethereum diff --git a/crates/optimism/rpc/src/eth/pending_block.rs b/crates/optimism/rpc/src/eth/pending_block.rs index c96dff40b0918..5b716f39320a5 100644 --- a/crates/optimism/rpc/src/eth/pending_block.rs +++ b/crates/optimism/rpc/src/eth/pending_block.rs @@ -4,6 +4,7 @@ use alloy_primitives::{BlockNumber, B256}; use reth_chainspec::EthereumHardforks; use reth_evm::ConfigureEvm; use reth_node_api::{FullNodeComponents, NodeTypes}; +use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; use reth_primitives::{ revm_primitives::BlockEnv, BlockNumberOrTag, Header, Receipt, SealedBlockWithSenders, }; @@ -79,16 +80,18 @@ where fn receipts_root( &self, - _block_env: &BlockEnv, + block_env: &BlockEnv, execution_outcome: &ExecutionOutcome, block_number: BlockNumber, ) -> B256 { execution_outcome - .optimism_receipts_root_slow( - block_number, - self.provider().chain_spec().as_ref(), - _block_env.timestamp.to::(), - ) + .generic_receipts_root_slow(block_number, |receipts| { + calculate_receipt_root_no_memo_optimism( + receipts, + self.provider().chain_spec().as_ref(), + block_env.timestamp.to::(), + ) + }) .expect("Block is present") } } diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 72de34e3f38ea..9745159e8f522 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -18,12 +18,10 @@ reth-ethereum-forks.workspace = true reth-static-file-types.workspace = true reth-trie-common.workspace = true revm-primitives = { workspace = true, features = ["serde"] } -reth-chainspec = { workspace = true, optional = true } reth-codecs = { workspace = true, optional = true } # op-reth reth-optimism-chainspec = { workspace = true, optional = true } -reth-optimism-forks = { workspace = true, optional = true } # ethereum alloy-consensus.workspace = true @@ -96,7 +94,6 @@ asm-keccak = ["alloy-primitives/asm-keccak"] arbitrary = [ "reth-primitives-traits/arbitrary", "revm-primitives/arbitrary", - "reth-chainspec?/arbitrary", "reth-ethereum-forks/arbitrary", "alloy-eips/arbitrary", "dep:arbitrary", @@ -111,12 +108,10 @@ c-kzg = [ "alloy-consensus/kzg", ] optimism = [ - "reth-chainspec/optimism", "revm-primitives/optimism", "reth-codecs?/optimism", "dep:reth-optimism-chainspec", "dep:op-alloy-consensus", - "reth-optimism-forks", ] alloy-compat = [ "dep:alloy-rpc-types", diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index 7f41fa9bd20f2..a19564de0938b 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -49,47 +49,6 @@ pub fn calculate_receipt_root_no_memo(receipts: &[&Receipt]) -> B256 { }) } -/// Calculates the receipt root for a header for the reference type of [Receipt]. -/// -/// NOTE: Prefer calculate receipt root optimism if you have log blooms memoized. -#[cfg(feature = "optimism")] -pub fn calculate_receipt_root_no_memo_optimism( - receipts: &[&Receipt], - chain_spec: impl reth_chainspec::Hardforks, - timestamp: u64, -) -> B256 { - // There is a minor bug in op-geth and op-erigon where in the Regolith hardfork, - // the receipt root calculation does not include the deposit nonce in the receipt - // encoding. In the Regolith Hardfork, we must strip the deposit nonce from the - // receipts before calculating the receipt root. This was corrected in the Canyon - // hardfork. - - if chain_spec - .is_fork_active_at_timestamp(reth_optimism_forks::OptimismHardfork::Regolith, timestamp) && - !chain_spec.is_fork_active_at_timestamp( - reth_optimism_forks::OptimismHardfork::Canyon, - timestamp, - ) - { - let receipts = receipts - .iter() - .map(|r| { - let mut r = (*r).clone(); - r.deposit_nonce = None; - r - }) - .collect::>(); - - return ordered_trie_root_with_encoder(&receipts, |r, buf| { - ReceiptWithBloomRef::from(r).encode_inner(buf, false) - }) - } - - ordered_trie_root_with_encoder(receipts, |r, buf| { - ReceiptWithBloomRef::from(*r).encode_inner(buf, false) - }) -} - /// Calculates the root hash for ommer/uncle headers. pub fn calculate_ommers_root(ommers: &[Header]) -> B256 { // Check if `ommers` list is empty diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 6b1b62ba6ffbe..aa9b6c6d7c238 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -100,26 +100,11 @@ impl Receipts { self.receipt_vec.push(receipts); } - /// Retrieves the receipt root for all recorded receipts from index. - pub fn root_slow(&self, index: usize) -> Option { - Some(crate::proofs::calculate_receipt_root_no_memo( - &self.receipt_vec[index].iter().map(Option::as_ref).collect::>>()?, - )) - } - - /// Retrieves the receipt root for all recorded receipts from index. - #[cfg(feature = "optimism")] - pub fn optimism_root_slow( - &self, - index: usize, - chain_spec: impl reth_chainspec::Hardforks, - timestamp: u64, - ) -> Option { - Some(crate::proofs::calculate_receipt_root_no_memo_optimism( - &self.receipt_vec[index].iter().map(Option::as_ref).collect::>>()?, - chain_spec, - timestamp, - )) + /// Retrieves all recorded receipts from index and calculates the root using the given closure. + pub fn root_slow(&self, index: usize, f: impl FnOnce(&[&Receipt]) -> B256) -> Option { + let receipts = + self.receipt_vec[index].iter().map(Option::as_ref).collect::>>()?; + Some(f(receipts.as_slice())) } } From 42d2e485deb0d0490f051ee431d0421376b01c01 Mon Sep 17 00:00:00 2001 From: Federico Gimenez Date: Thu, 26 Sep 2024 10:25:19 +0200 Subject: [PATCH 18/84] chore(trie): replace ParallelStateRoot with AsyncStateRoot (#11213) --- crates/blockchain-tree/Cargo.toml | 2 +- crates/engine/tree/src/tree/mod.rs | 16 +- crates/trie/parallel/Cargo.toml | 14 +- crates/trie/parallel/benches/root.rs | 10 +- crates/trie/parallel/src/async_root.rs | 328 ------------------ crates/trie/parallel/src/lib.rs | 5 - crates/trie/parallel/src/parallel_root.rs | 90 +++-- .../trie/parallel/src/storage_root_targets.rs | 1 - 8 files changed, 67 insertions(+), 399 deletions(-) delete mode 100644 crates/trie/parallel/src/async_root.rs diff --git a/crates/blockchain-tree/Cargo.toml b/crates/blockchain-tree/Cargo.toml index bc7b340baaba4..d0718c97b8ccd 100644 --- a/crates/blockchain-tree/Cargo.toml +++ b/crates/blockchain-tree/Cargo.toml @@ -25,7 +25,7 @@ reth-execution-types.workspace = true reth-stages-api.workspace = true reth-trie = { workspace = true, features = ["metrics"] } reth-trie-db = { workspace = true, features = ["metrics"] } -reth-trie-parallel = { workspace = true, features = ["parallel"] } +reth-trie-parallel.workspace = true reth-network.workspace = true reth-consensus.workspace = true reth-node-types.workspace = true diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 542bfc0862eb0..30c72254552db 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -40,7 +40,7 @@ use reth_provider::{ use reth_revm::database::StateProviderDatabase; use reth_stages_api::ControlFlow; use reth_trie::{updates::TrieUpdates, HashedPostState, TrieInput}; -use reth_trie_parallel::async_root::{AsyncStateRoot, AsyncStateRootError}; +use reth_trie_parallel::parallel_root::{ParallelStateRoot, ParallelStateRootError}; use std::{ cmp::Ordering, collections::{btree_map, hash_map, BTreeMap, HashMap, HashSet, VecDeque}, @@ -2195,11 +2195,11 @@ where let persistence_in_progress = self.persistence_state.in_progress(); if !persistence_in_progress { state_root_result = match self - .compute_state_root_async(block.parent_hash, &hashed_state) + .compute_state_root_parallel(block.parent_hash, &hashed_state) { Ok((state_root, trie_output)) => Some((state_root, trie_output)), - Err(AsyncStateRootError::Provider(ProviderError::ConsistentView(error))) => { - debug!(target: "engine::tree", %error, "Async state root computation failed consistency check, falling back"); + Err(ParallelStateRootError::Provider(ProviderError::ConsistentView(error))) => { + debug!(target: "engine", %error, "Parallel state root computation failed consistency check, falling back"); None } Err(error) => return Err(InsertBlockErrorKindTwo::Other(Box::new(error))), @@ -2265,7 +2265,7 @@ where Ok(InsertPayloadOk2::Inserted(BlockStatus2::Valid)) } - /// Compute state root for the given hashed post state asynchronously. + /// Compute state root for the given hashed post state in parallel. /// /// # Returns /// @@ -2273,11 +2273,11 @@ where /// Returns `Err(_)` if error was encountered during computation. /// `Err(ProviderError::ConsistentView(_))` can be safely ignored and fallback computation /// should be used instead. - fn compute_state_root_async( + fn compute_state_root_parallel( &self, parent_hash: B256, hashed_state: &HashedPostState, - ) -> Result<(B256, TrieUpdates), AsyncStateRootError> { + ) -> Result<(B256, TrieUpdates), ParallelStateRootError> { let consistent_view = ConsistentDbView::new_with_latest_tip(self.provider.clone())?; let mut input = TrieInput::default(); @@ -2299,7 +2299,7 @@ where // Extend with block we are validating root for. input.append_ref(hashed_state); - AsyncStateRoot::new(consistent_view, input).incremental_root_with_updates() + ParallelStateRoot::new(consistent_view, input).incremental_root_with_updates() } /// Handles an error that occurred while inserting a block. diff --git a/crates/trie/parallel/Cargo.toml b/crates/trie/parallel/Cargo.toml index 80fa0a70d0e19..64a4644bdce4b 100644 --- a/crates/trie/parallel/Cargo.toml +++ b/crates/trie/parallel/Cargo.toml @@ -31,13 +31,8 @@ tracing.workspace = true # misc thiserror.workspace = true derive_more.workspace = true - -# `async` feature -tokio = { workspace = true, optional = true, default-features = false } -itertools = { workspace = true, optional = true } - -# `parallel` feature -rayon = { workspace = true, optional = true } +rayon.workspace = true +itertools.workspace = true # `metrics` feature reth-metrics = { workspace = true, optional = true } @@ -58,12 +53,9 @@ proptest.workspace = true proptest-arbitrary-interop.workspace = true [features] -default = ["metrics", "async", "parallel"] +default = ["metrics"] metrics = ["reth-metrics", "dep:metrics", "reth-trie/metrics"] -async = ["tokio/sync", "itertools"] -parallel = ["rayon"] [[bench]] name = "root" -required-features = ["async", "parallel"] harness = false diff --git a/crates/trie/parallel/benches/root.rs b/crates/trie/parallel/benches/root.rs index e09fb93413890..d1ffe49dd0ad5 100644 --- a/crates/trie/parallel/benches/root.rs +++ b/crates/trie/parallel/benches/root.rs @@ -13,7 +13,7 @@ use reth_trie::{ TrieInput, }; use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseStateRoot}; -use reth_trie_parallel::{async_root::AsyncStateRoot, parallel_root::ParallelStateRoot}; +use reth_trie_parallel::parallel_root::ParallelStateRoot; use std::collections::HashMap; pub fn calculate_state_root(c: &mut Criterion) { @@ -70,14 +70,6 @@ pub fn calculate_state_root(c: &mut Criterion) { |calculator| async { calculator.incremental_root() }, ); }); - - // async root - group.bench_function(BenchmarkId::new("async root", size), |b| { - b.iter_with_setup( - || AsyncStateRoot::new(view.clone(), TrieInput::from_state(updated_state.clone())), - |calculator| calculator.incremental_root(), - ); - }); } } diff --git a/crates/trie/parallel/src/async_root.rs b/crates/trie/parallel/src/async_root.rs deleted file mode 100644 index b6b57725cb703..0000000000000 --- a/crates/trie/parallel/src/async_root.rs +++ /dev/null @@ -1,328 +0,0 @@ -#[cfg(feature = "metrics")] -use crate::metrics::ParallelStateRootMetrics; -use crate::{stats::ParallelTrieTracker, storage_root_targets::StorageRootTargets}; -use alloy_primitives::B256; -use alloy_rlp::{BufMut, Encodable}; -use itertools::Itertools; -use reth_execution_errors::StorageRootError; -use reth_provider::{ - providers::ConsistentDbView, BlockReader, DBProvider, DatabaseProviderFactory, ProviderError, -}; -use reth_trie::{ - hashed_cursor::{HashedCursorFactory, HashedPostStateCursorFactory}, - node_iter::{TrieElement, TrieNodeIter}, - trie_cursor::{InMemoryTrieCursorFactory, TrieCursorFactory}, - updates::TrieUpdates, - walker::TrieWalker, - HashBuilder, Nibbles, StorageRoot, TrieAccount, TrieInput, -}; -use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; -use std::{collections::HashMap, sync::Arc}; -use thiserror::Error; -use tracing::*; - -/// Async state root calculator. -/// -/// The calculator starts off by launching tasks to compute storage roots. -/// Then, it immediately starts walking the state trie updating the necessary trie -/// nodes in the process. Upon encountering a leaf node, it will poll the storage root -/// task for the corresponding hashed address. -/// -/// Internally, the calculator uses [`ConsistentDbView`] since -/// it needs to rely on database state saying the same until -/// the last transaction is open. -/// See docs of using [`ConsistentDbView`] for caveats. -/// -/// For sync usage, take a look at `ParallelStateRoot`. -#[derive(Debug)] -pub struct AsyncStateRoot { - /// Consistent view of the database. - view: ConsistentDbView, - /// Trie input. - input: TrieInput, - /// Parallel state root metrics. - #[cfg(feature = "metrics")] - metrics: ParallelStateRootMetrics, -} - -impl AsyncStateRoot { - /// Create new async state root calculator. - pub fn new(view: ConsistentDbView, input: TrieInput) -> Self { - Self { - view, - input, - #[cfg(feature = "metrics")] - metrics: ParallelStateRootMetrics::default(), - } - } -} - -impl AsyncStateRoot -where - Factory: DatabaseProviderFactory + Clone + Send + Sync + 'static, -{ - /// Calculate incremental state root asynchronously. - pub fn incremental_root(self) -> Result { - self.calculate(false).map(|(root, _)| root) - } - - /// Calculate incremental state root with updates asynchronously. - pub fn incremental_root_with_updates(self) -> Result<(B256, TrieUpdates), AsyncStateRootError> { - self.calculate(true) - } - - fn calculate(self, retain_updates: bool) -> Result<(B256, TrieUpdates), AsyncStateRootError> { - let mut tracker = ParallelTrieTracker::default(); - let trie_nodes_sorted = Arc::new(self.input.nodes.into_sorted()); - let hashed_state_sorted = Arc::new(self.input.state.into_sorted()); - let prefix_sets = self.input.prefix_sets.freeze(); - let storage_root_targets = StorageRootTargets::new( - prefix_sets.account_prefix_set.iter().map(|nibbles| B256::from_slice(&nibbles.pack())), - prefix_sets.storage_prefix_sets, - ); - - // Pre-calculate storage roots async for accounts which were changed. - tracker.set_precomputed_storage_roots(storage_root_targets.len() as u64); - debug!(target: "trie::async_state_root", len = storage_root_targets.len(), "pre-calculating storage roots"); - let mut storage_roots = HashMap::with_capacity(storage_root_targets.len()); - for (hashed_address, prefix_set) in - storage_root_targets.into_iter().sorted_unstable_by_key(|(address, _)| *address) - { - let view = self.view.clone(); - let hashed_state_sorted = hashed_state_sorted.clone(); - let trie_nodes_sorted = trie_nodes_sorted.clone(); - #[cfg(feature = "metrics")] - let metrics = self.metrics.storage_trie.clone(); - - let (tx, rx) = std::sync::mpsc::sync_channel(1); - - rayon::spawn_fifo(move || { - let result = (|| -> Result<_, AsyncStateRootError> { - let provider_ro = view.provider_ro()?; - let trie_cursor_factory = InMemoryTrieCursorFactory::new( - DatabaseTrieCursorFactory::new(provider_ro.tx_ref()), - &trie_nodes_sorted, - ); - let hashed_state = HashedPostStateCursorFactory::new( - DatabaseHashedCursorFactory::new(provider_ro.tx_ref()), - &hashed_state_sorted, - ); - Ok(StorageRoot::new_hashed( - trie_cursor_factory, - hashed_state, - hashed_address, - #[cfg(feature = "metrics")] - metrics, - ) - .with_prefix_set(prefix_set) - .calculate(retain_updates)?) - })(); - let _ = tx.send(result); - }); - storage_roots.insert(hashed_address, rx); - } - - trace!(target: "trie::async_state_root", "calculating state root"); - let mut trie_updates = TrieUpdates::default(); - - let provider_ro = self.view.provider_ro()?; - let trie_cursor_factory = InMemoryTrieCursorFactory::new( - DatabaseTrieCursorFactory::new(provider_ro.tx_ref()), - &trie_nodes_sorted, - ); - let hashed_cursor_factory = HashedPostStateCursorFactory::new( - DatabaseHashedCursorFactory::new(provider_ro.tx_ref()), - &hashed_state_sorted, - ); - - let walker = TrieWalker::new( - trie_cursor_factory.account_trie_cursor().map_err(ProviderError::Database)?, - prefix_sets.account_prefix_set, - ) - .with_deletions_retained(retain_updates); - let mut account_node_iter = TrieNodeIter::new( - walker, - hashed_cursor_factory.hashed_account_cursor().map_err(ProviderError::Database)?, - ); - - let mut hash_builder = HashBuilder::default().with_updates(retain_updates); - let mut account_rlp = Vec::with_capacity(128); - while let Some(node) = account_node_iter.try_next().map_err(ProviderError::Database)? { - match node { - TrieElement::Branch(node) => { - hash_builder.add_branch(node.key, node.value, node.children_are_in_trie); - } - TrieElement::Leaf(hashed_address, account) => { - let (storage_root, _, updates) = match storage_roots.remove(&hashed_address) { - Some(rx) => rx.recv().map_err(|_| { - AsyncStateRootError::StorageRootChannelClosed { hashed_address } - })??, - // Since we do not store all intermediate nodes in the database, there might - // be a possibility of re-adding a non-modified leaf to the hash builder. - None => { - tracker.inc_missed_leaves(); - StorageRoot::new_hashed( - trie_cursor_factory.clone(), - hashed_cursor_factory.clone(), - hashed_address, - #[cfg(feature = "metrics")] - self.metrics.storage_trie.clone(), - ) - .calculate(retain_updates)? - } - }; - - if retain_updates { - trie_updates.insert_storage_updates(hashed_address, updates); - } - - account_rlp.clear(); - let account = TrieAccount::from((account, storage_root)); - account.encode(&mut account_rlp as &mut dyn BufMut); - hash_builder.add_leaf(Nibbles::unpack(hashed_address), &account_rlp); - } - } - } - - let root = hash_builder.root(); - - trie_updates.finalize( - account_node_iter.walker, - hash_builder, - prefix_sets.destroyed_accounts, - ); - - let stats = tracker.finish(); - - #[cfg(feature = "metrics")] - self.metrics.record_state_trie(stats); - - trace!( - target: "trie::async_state_root", - %root, - duration = ?stats.duration(), - branches_added = stats.branches_added(), - leaves_added = stats.leaves_added(), - missed_leaves = stats.missed_leaves(), - precomputed_storage_roots = stats.precomputed_storage_roots(), - "calculated state root" - ); - - Ok((root, trie_updates)) - } -} - -/// Error during async state root calculation. -#[derive(Error, Debug)] -pub enum AsyncStateRootError { - /// Storage root channel for a given address was closed. - #[error("storage root channel for {hashed_address} got closed")] - StorageRootChannelClosed { - /// The hashed address for which channel was closed. - hashed_address: B256, - }, - /// Receive error - #[error(transparent)] - Receive(#[from] std::sync::mpsc::RecvError), - /// Error while calculating storage root. - #[error(transparent)] - StorageRoot(#[from] StorageRootError), - /// Provider error. - #[error(transparent)] - Provider(#[from] ProviderError), -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::{keccak256, Address, U256}; - use rand::Rng; - use reth_primitives::{Account, StorageEntry}; - use reth_provider::{test_utils::create_test_provider_factory, HashingWriter}; - use reth_trie::{test_utils, HashedPostState, HashedStorage}; - - #[tokio::test] - async fn random_async_root() { - let factory = create_test_provider_factory(); - let consistent_view = ConsistentDbView::new(factory.clone(), None); - - let mut rng = rand::thread_rng(); - let mut state = (0..100) - .map(|_| { - let address = Address::random(); - let account = - Account { balance: U256::from(rng.gen::()), ..Default::default() }; - let mut storage = HashMap::::default(); - let has_storage = rng.gen_bool(0.7); - if has_storage { - for _ in 0..100 { - storage.insert( - B256::from(U256::from(rng.gen::())), - U256::from(rng.gen::()), - ); - } - } - (address, (account, storage)) - }) - .collect::>(); - - { - let provider_rw = factory.provider_rw().unwrap(); - provider_rw - .insert_account_for_hashing( - state.iter().map(|(address, (account, _))| (*address, Some(*account))), - ) - .unwrap(); - provider_rw - .insert_storage_for_hashing(state.iter().map(|(address, (_, storage))| { - ( - *address, - storage - .iter() - .map(|(slot, value)| StorageEntry { key: *slot, value: *value }), - ) - })) - .unwrap(); - provider_rw.commit().unwrap(); - } - - assert_eq!( - AsyncStateRoot::new(consistent_view.clone(), Default::default(),) - .incremental_root() - .unwrap(), - test_utils::state_root(state.clone()) - ); - - let mut hashed_state = HashedPostState::default(); - for (address, (account, storage)) in &mut state { - let hashed_address = keccak256(address); - - let should_update_account = rng.gen_bool(0.5); - if should_update_account { - *account = Account { balance: U256::from(rng.gen::()), ..*account }; - hashed_state.accounts.insert(hashed_address, Some(*account)); - } - - let should_update_storage = rng.gen_bool(0.3); - if should_update_storage { - for (slot, value) in storage.iter_mut() { - let hashed_slot = keccak256(slot); - *value = U256::from(rng.gen::()); - hashed_state - .storages - .entry(hashed_address) - .or_insert_with(|| HashedStorage::new(false)) - .storage - .insert(hashed_slot, *value); - } - } - } - - assert_eq!( - AsyncStateRoot::new(consistent_view, TrieInput::from_state(hashed_state)) - .incremental_root() - .unwrap(), - test_utils::state_root(state) - ); - } -} diff --git a/crates/trie/parallel/src/lib.rs b/crates/trie/parallel/src/lib.rs index ff130b2187e71..40a6af3475800 100644 --- a/crates/trie/parallel/src/lib.rs +++ b/crates/trie/parallel/src/lib.rs @@ -13,12 +13,7 @@ pub use storage_root_targets::StorageRootTargets; /// Parallel trie calculation stats. pub mod stats; -/// Implementation of async state root computation. -#[cfg(feature = "async")] -pub mod async_root; - /// Implementation of parallel state root computation. -#[cfg(feature = "parallel")] pub mod parallel_root; /// Parallel state root metrics. diff --git a/crates/trie/parallel/src/parallel_root.rs b/crates/trie/parallel/src/parallel_root.rs index e63c3f1a17b30..a64b8351446ea 100644 --- a/crates/trie/parallel/src/parallel_root.rs +++ b/crates/trie/parallel/src/parallel_root.rs @@ -1,10 +1,10 @@ #[cfg(feature = "metrics")] use crate::metrics::ParallelStateRootMetrics; use crate::{stats::ParallelTrieTracker, storage_root_targets::StorageRootTargets}; +use alloy_primitives::B256; use alloy_rlp::{BufMut, Encodable}; -use rayon::prelude::*; +use itertools::Itertools; use reth_execution_errors::StorageRootError; -use reth_primitives::B256; use reth_provider::{ providers::ConsistentDbView, BlockReader, DBProvider, DatabaseProviderFactory, ProviderError, }; @@ -17,22 +17,21 @@ use reth_trie::{ HashBuilder, Nibbles, StorageRoot, TrieAccount, TrieInput, }; use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; -use std::collections::HashMap; +use std::{collections::HashMap, sync::Arc}; use thiserror::Error; use tracing::*; /// Parallel incremental state root calculator. /// -/// The calculator starts off by pre-computing storage roots of changed -/// accounts in parallel. Once that's done, it proceeds to walking the state -/// trie retrieving the pre-computed storage roots when needed. +/// The calculator starts off by launching tasks to compute storage roots. +/// Then, it immediately starts walking the state trie updating the necessary trie +/// nodes in the process. Upon encountering a leaf node, it will poll the storage root +/// task for the corresponding hashed address. /// /// Internally, the calculator uses [`ConsistentDbView`] since /// it needs to rely on database state saying the same until /// the last transaction is open. /// See docs of using [`ConsistentDbView`] for caveats. -/// -/// If possible, use more optimized `AsyncStateRoot` instead. #[derive(Debug)] pub struct ParallelStateRoot { /// Consistent view of the database. @@ -58,7 +57,7 @@ impl ParallelStateRoot { impl ParallelStateRoot where - Factory: DatabaseProviderFactory + Send + Sync, + Factory: DatabaseProviderFactory + Clone + Send + Sync + 'static, { /// Calculate incremental state root in parallel. pub fn incremental_root(self) -> Result { @@ -77,8 +76,8 @@ where retain_updates: bool, ) -> Result<(B256, TrieUpdates), ParallelStateRootError> { let mut tracker = ParallelTrieTracker::default(); - let trie_nodes_sorted = self.input.nodes.into_sorted(); - let hashed_state_sorted = self.input.state.into_sorted(); + let trie_nodes_sorted = Arc::new(self.input.nodes.into_sorted()); + let hashed_state_sorted = Arc::new(self.input.state.into_sorted()); let prefix_sets = self.input.prefix_sets.freeze(); let storage_root_targets = StorageRootTargets::new( prefix_sets.account_prefix_set.iter().map(|nibbles| B256::from_slice(&nibbles.pack())), @@ -88,30 +87,43 @@ where // Pre-calculate storage roots in parallel for accounts which were changed. tracker.set_precomputed_storage_roots(storage_root_targets.len() as u64); debug!(target: "trie::parallel_state_root", len = storage_root_targets.len(), "pre-calculating storage roots"); - let mut storage_roots = storage_root_targets - .into_par_iter() - .map(|(hashed_address, prefix_set)| { - let provider_ro = self.view.provider_ro()?; - let trie_cursor_factory = InMemoryTrieCursorFactory::new( - DatabaseTrieCursorFactory::new(provider_ro.tx_ref()), - &trie_nodes_sorted, - ); - let hashed_cursor_factory = HashedPostStateCursorFactory::new( - DatabaseHashedCursorFactory::new(provider_ro.tx_ref()), - &hashed_state_sorted, - ); - let storage_root_result = StorageRoot::new_hashed( - trie_cursor_factory, - hashed_cursor_factory, - hashed_address, - #[cfg(feature = "metrics")] - self.metrics.storage_trie.clone(), - ) - .with_prefix_set(prefix_set) - .calculate(retain_updates); - Ok((hashed_address, storage_root_result?)) - }) - .collect::, ParallelStateRootError>>()?; + let mut storage_roots = HashMap::with_capacity(storage_root_targets.len()); + for (hashed_address, prefix_set) in + storage_root_targets.into_iter().sorted_unstable_by_key(|(address, _)| *address) + { + let view = self.view.clone(); + let hashed_state_sorted = hashed_state_sorted.clone(); + let trie_nodes_sorted = trie_nodes_sorted.clone(); + #[cfg(feature = "metrics")] + let metrics = self.metrics.storage_trie.clone(); + + let (tx, rx) = std::sync::mpsc::sync_channel(1); + + rayon::spawn_fifo(move || { + let result = (|| -> Result<_, ParallelStateRootError> { + let provider_ro = view.provider_ro()?; + let trie_cursor_factory = InMemoryTrieCursorFactory::new( + DatabaseTrieCursorFactory::new(provider_ro.tx_ref()), + &trie_nodes_sorted, + ); + let hashed_state = HashedPostStateCursorFactory::new( + DatabaseHashedCursorFactory::new(provider_ro.tx_ref()), + &hashed_state_sorted, + ); + Ok(StorageRoot::new_hashed( + trie_cursor_factory, + hashed_state, + hashed_address, + #[cfg(feature = "metrics")] + metrics, + ) + .with_prefix_set(prefix_set) + .calculate(retain_updates)?) + })(); + let _ = tx.send(result); + }); + storage_roots.insert(hashed_address, rx); + } trace!(target: "trie::parallel_state_root", "calculating state root"); let mut trie_updates = TrieUpdates::default(); @@ -145,7 +157,13 @@ where } TrieElement::Leaf(hashed_address, account) => { let (storage_root, _, updates) = match storage_roots.remove(&hashed_address) { - Some(result) => result, + Some(rx) => rx.recv().map_err(|_| { + ParallelStateRootError::StorageRoot(StorageRootError::Database( + reth_db::DatabaseError::Other(format!( + "channel closed for {hashed_address}" + )), + )) + })??, // Since we do not store all intermediate nodes in the database, there might // be a possibility of re-adding a non-modified leaf to the hash builder. None => { diff --git a/crates/trie/parallel/src/storage_root_targets.rs b/crates/trie/parallel/src/storage_root_targets.rs index 8325fbcf7205c..9b52d49afc800 100644 --- a/crates/trie/parallel/src/storage_root_targets.rs +++ b/crates/trie/parallel/src/storage_root_targets.rs @@ -36,7 +36,6 @@ impl IntoIterator for StorageRootTargets { } } -#[cfg(feature = "parallel")] impl rayon::iter::IntoParallelIterator for StorageRootTargets { type Iter = rayon::collections::hash_map::IntoIter; type Item = (B256, PrefixSet); From edd8fe4648a5ad3c035afc23d5941b0e19de9282 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 26 Sep 2024 12:47:18 +0200 Subject: [PATCH 19/84] primitives: rm `H*` deprecated types (#11246) --- crates/primitives/src/lib.rs | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index c350c506694f4..ca807eddb533e 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -80,22 +80,6 @@ pub use alloy_primitives::{ pub use reth_ethereum_forks::*; pub use revm_primitives::{self, JumpTable}; -#[doc(hidden)] -#[deprecated = "use B64 instead"] -pub type H64 = B64; -#[doc(hidden)] -#[deprecated = "use B128 instead"] -pub type H128 = B128; -#[doc(hidden)] -#[deprecated = "use Address instead"] -pub type H160 = Address; -#[doc(hidden)] -#[deprecated = "use B256 instead"] -pub type H256 = B256; -#[doc(hidden)] -#[deprecated = "use B512 instead"] -pub type H512 = B512; - #[cfg(any(test, feature = "arbitrary"))] pub use arbitrary; From f2a508df341a9281a81c979be4f91f9fcbf363a9 Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 26 Sep 2024 13:29:18 +0200 Subject: [PATCH 20/84] primitives: rm more `alloy_primitives` reexports (#11222) --- Cargo.lock | 11 ++++++++++- bin/reth/Cargo.toml | 1 + bin/reth/src/commands/debug_cmd/build_block.rs | 3 ++- bin/reth/src/commands/debug_cmd/execution.rs | 3 ++- crates/config/Cargo.toml | 2 +- crates/config/src/config.rs | 2 +- crates/evm/execution-types/Cargo.toml | 1 - crates/node/builder/src/setup.rs | 3 ++- .../optimism/cli/src/commands/init_state/bedrock.rs | 5 ++--- crates/optimism/cli/src/receipt_file_codec.rs | 4 ++-- crates/primitives/src/block.rs | 6 +++--- crates/primitives/src/lib.rs | 5 ++--- crates/primitives/src/proofs.rs | 5 +++-- crates/primitives/src/receipt.rs | 6 +++--- crates/primitives/src/transaction/access_list.rs | 3 ++- crates/primitives/src/transaction/compat.rs | 4 ++-- crates/primitives/src/transaction/mod.rs | 8 ++++---- crates/primitives/src/transaction/pooled.rs | 5 +++-- crates/primitives/src/transaction/sidecar.rs | 3 ++- crates/primitives/src/transaction/signature.rs | 9 ++++----- crates/primitives/src/transaction/tx_type.rs | 3 ++- crates/primitives/src/transaction/util.rs | 9 ++++----- crates/primitives/src/transaction/variant.rs | 4 ++-- crates/revm/Cargo.toml | 2 ++ crates/revm/src/batch.rs | 3 ++- crates/revm/src/database.rs | 3 ++- crates/revm/src/state_change.rs | 3 ++- crates/revm/src/test_utils.rs | 5 ++--- crates/rpc/rpc/src/eth/helpers/types.rs | 4 ++-- crates/stages/stages/Cargo.toml | 2 ++ crates/stages/stages/benches/criterion.rs | 2 +- crates/stages/stages/benches/setup/account_hashing.rs | 2 +- crates/stages/stages/benches/setup/mod.rs | 5 ++--- crates/stages/stages/src/stages/bodies.rs | 4 ++-- crates/stages/stages/src/stages/execution.rs | 9 ++++----- crates/stages/stages/src/stages/hashing_account.rs | 7 ++++--- crates/stages/stages/src/stages/hashing_storage.rs | 6 ++++-- crates/stages/stages/src/stages/headers.rs | 3 ++- .../stages/stages/src/stages/index_account_history.rs | 5 +++-- .../stages/stages/src/stages/index_storage_history.rs | 3 ++- crates/stages/stages/src/stages/merkle.rs | 6 ++++-- crates/stages/stages/src/stages/mod.rs | 4 ++-- crates/stages/stages/src/stages/sender_recovery.rs | 6 ++++-- crates/stages/stages/src/stages/tx_lookup.rs | 3 ++- crates/stages/stages/src/stages/utils.rs | 2 +- crates/stages/stages/src/test_utils/test_db.rs | 5 +++-- .../storage/provider/src/providers/static_file/mod.rs | 4 ++-- crates/storage/provider/src/test_utils/blocks.rs | 8 ++++---- crates/storage/provider/src/writer/mod.rs | 4 ++-- crates/trie/db/src/state.rs | 3 ++- examples/custom-engine-types/Cargo.toml | 1 + examples/custom-engine-types/src/main.rs | 3 ++- examples/custom-evm/Cargo.toml | 1 + examples/custom-evm/src/main.rs | 3 ++- examples/custom-inspector/Cargo.toml | 1 + examples/custom-inspector/src/main.rs | 3 ++- examples/db-access/Cargo.toml | 1 + examples/db-access/src/main.rs | 3 ++- examples/stateful-precompile/Cargo.toml | 1 + examples/stateful-precompile/src/main.rs | 3 ++- examples/txpool-tracing/Cargo.toml | 1 + examples/txpool-tracing/src/main.rs | 3 ++- testing/ef-tests/src/models.rs | 5 ++--- testing/testing-utils/src/genesis_allocator.rs | 3 ++- 64 files changed, 146 insertions(+), 104 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a02679addc75d..0e0a0c44743f9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2712,6 +2712,7 @@ name = "example-custom-engine-types" version = "0.0.0" dependencies = [ "alloy-genesis", + "alloy-primitives", "alloy-rpc-types", "eyre", "reth", @@ -2734,6 +2735,7 @@ name = "example-custom-evm" version = "0.0.0" dependencies = [ "alloy-genesis", + "alloy-primitives", "eyre", "reth", "reth-chainspec", @@ -2750,6 +2752,7 @@ dependencies = [ name = "example-custom-inspector" version = "0.0.0" dependencies = [ + "alloy-primitives", "alloy-rpc-types", "clap", "futures-util", @@ -2810,6 +2813,7 @@ dependencies = [ name = "example-db-access" version = "0.0.0" dependencies = [ + "alloy-primitives", "alloy-rpc-types", "eyre", "reth-chainspec", @@ -2916,6 +2920,7 @@ name = "example-stateful-precompile" version = "0.0.0" dependencies = [ "alloy-genesis", + "alloy-primitives", "eyre", "parking_lot 0.12.3", "reth", @@ -2933,6 +2938,7 @@ dependencies = [ name = "example-txpool-tracing" version = "0.0.0" dependencies = [ + "alloy-primitives", "alloy-rpc-types-trace", "clap", "futures-util", @@ -6174,6 +6180,7 @@ name = "reth" version = "1.0.7" dependencies = [ "alloy-consensus", + "alloy-primitives", "alloy-rlp", "alloy-rpc-types", "aquamarine", @@ -6610,11 +6617,11 @@ dependencies = [ name = "reth-config" version = "1.0.7" dependencies = [ + "alloy-primitives", "eyre", "humantime-serde", "reth-network-peers", "reth-network-types", - "reth-primitives", "reth-prune-types", "reth-stages-types", "serde", @@ -8403,6 +8410,7 @@ dependencies = [ name = "reth-revm" version = "1.0.7" dependencies = [ + "alloy-primitives", "reth-chainspec", "reth-consensus-common", "reth-ethereum-forks", @@ -8757,6 +8765,7 @@ dependencies = [ name = "reth-stages" version = "1.0.7" dependencies = [ + "alloy-primitives", "alloy-rlp", "assert_matches", "criterion", diff --git a/bin/reth/Cargo.toml b/bin/reth/Cargo.toml index 19d7d18f1c086..e456871facb65 100644 --- a/bin/reth/Cargo.toml +++ b/bin/reth/Cargo.toml @@ -69,6 +69,7 @@ reth-prune.workspace = true alloy-rlp.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } alloy-consensus.workspace = true +alloy-primitives.workspace = true # tracing tracing.workspace = true diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index a8589a74ec8d4..57a8ee8a6b8db 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -1,5 +1,6 @@ //! Command for debugging block building. use alloy_consensus::TxEip4844; +use alloy_primitives::Address; use alloy_rlp::Decodable; use alloy_rpc_types::engine::{BlobsBundleV1, PayloadAttributes}; use clap::Parser; @@ -24,7 +25,7 @@ use reth_node_api::{NodeTypesWithDB, NodeTypesWithEngine, PayloadBuilderAttribut use reth_node_ethereum::{EthEvmConfig, EthExecutorProvider}; use reth_payload_builder::database::CachedReads; use reth_primitives::{ - revm_primitives::KzgSettings, Address, BlobTransaction, BlobTransactionSidecar, Bytes, + revm_primitives::KzgSettings, BlobTransaction, BlobTransactionSidecar, Bytes, PooledTransactionsElement, SealedBlock, SealedBlockWithSenders, Transaction, TransactionSigned, B256, U256, }; diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index 64ca4dc2dc848..46cc6064d25ea 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -1,6 +1,7 @@ //! Command for debugging execution. use crate::{args::NetworkArgs, utils::get_single_header}; +use alloy_primitives::BlockNumber; use clap::Parser; use futures::{stream::select as stream_select, StreamExt}; use reth_beacon_consensus::EthBeaconConsensus; @@ -22,7 +23,7 @@ use reth_network_api::NetworkInfo; use reth_network_p2p::{headers::client::HeadersClient, BlockClient}; use reth_node_api::{NodeTypesWithDB, NodeTypesWithDBAdapter, NodeTypesWithEngine}; use reth_node_ethereum::EthExecutorProvider; -use reth_primitives::{BlockHashOrNumber, BlockNumber, B256}; +use reth_primitives::{BlockHashOrNumber, B256}; use reth_provider::{ BlockExecutionWriter, ChainSpecProvider, ProviderFactory, StageCheckpointReader, }; diff --git a/crates/config/Cargo.toml b/crates/config/Cargo.toml index d8224a3d62edc..a186b8407a8a0 100644 --- a/crates/config/Cargo.toml +++ b/crates/config/Cargo.toml @@ -27,4 +27,4 @@ eyre.workspace = true [dev-dependencies] tempfile.workspace = true reth-network-peers.workspace = true -reth-primitives.workspace = true +alloy-primitives.workspace = true diff --git a/crates/config/src/config.rs b/crates/config/src/config.rs index 24c992fddd824..e4a7fc9677aac 100644 --- a/crates/config/src/config.rs +++ b/crates/config/src/config.rs @@ -444,8 +444,8 @@ where mod tests { use super::{Config, EXTENSION}; use crate::PruneConfig; + use alloy_primitives::Address; use reth_network_peers::TrustedPeer; - use reth_primitives::Address; use reth_prune_types::{PruneMode, PruneModes, ReceiptsLogPruneConfig}; use std::{collections::BTreeMap, path::Path, str::FromStr, time::Duration}; diff --git a/crates/evm/execution-types/Cargo.toml b/crates/evm/execution-types/Cargo.toml index c286f07906bbb..65426e3083b4a 100644 --- a/crates/evm/execution-types/Cargo.toml +++ b/crates/evm/execution-types/Cargo.toml @@ -25,7 +25,6 @@ serde = { workspace = true, optional = true } [dev-dependencies] reth-primitives = { workspace = true, features = ["test-utils"] } -alloy-primitives.workspace = true alloy-eips.workspace = true [features] diff --git a/crates/node/builder/src/setup.rs b/crates/node/builder/src/setup.rs index 10144d8699dec..44c9f19e84b3d 100644 --- a/crates/node/builder/src/setup.rs +++ b/crates/node/builder/src/setup.rs @@ -2,6 +2,7 @@ use std::sync::Arc; +use alloy_primitives::BlockNumber; use reth_config::{config::StageConfig, PruneConfig}; use reth_consensus::Consensus; use reth_downloaders::{ @@ -13,7 +14,7 @@ use reth_exex::ExExManagerHandle; use reth_network_p2p::{ bodies::downloader::BodyDownloader, headers::downloader::HeaderDownloader, BlockClient, }; -use reth_node_core::primitives::{BlockNumber, B256}; +use reth_node_core::primitives::B256; use reth_provider::{providers::ProviderNodeTypes, ProviderFactory}; use reth_stages::{prelude::DefaultStages, stages::ExecutionStage, Pipeline, StageSet}; use reth_static_file::StaticFileProducer; diff --git a/crates/optimism/cli/src/commands/init_state/bedrock.rs b/crates/optimism/cli/src/commands/init_state/bedrock.rs index 41cd467b2f22b..2426a195a8b62 100644 --- a/crates/optimism/cli/src/commands/init_state/bedrock.rs +++ b/crates/optimism/cli/src/commands/init_state/bedrock.rs @@ -1,8 +1,7 @@ -use alloy_primitives::B256; +use alloy_primitives::{BlockNumber, B256}; use reth_optimism_primitives::bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH, BEDROCK_HEADER_TTD}; use reth_primitives::{ - BlockBody, BlockNumber, Header, SealedBlock, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, U256, + BlockBody, Header, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, U256, }; use reth_provider::{ providers::StaticFileProvider, BlockWriter, StageCheckpointWriter, StaticFileWriter, diff --git a/crates/optimism/cli/src/receipt_file_codec.rs b/crates/optimism/cli/src/receipt_file_codec.rs index c86bfa12a6bdf..c0416fa5f072a 100644 --- a/crates/optimism/cli/src/receipt_file_codec.rs +++ b/crates/optimism/cli/src/receipt_file_codec.rs @@ -1,10 +1,10 @@ //! Codec for reading raw receipts from a file. -use alloy_primitives::B256; +use alloy_primitives::{Address, Bloom, B256}; use alloy_rlp::{Decodable, RlpDecodable}; use reth_primitives::{ bytes::{Buf, BytesMut}, - Address, Bloom, Bytes, Log, Receipt, TxType, + Bytes, Log, Receipt, TxType, }; use tokio_util::codec::Decoder; diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 401757d30bb70..a5357fcec9933 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -1,12 +1,12 @@ use crate::{ - Address, Bytes, GotExpected, Header, SealedHeader, TransactionSigned, - TransactionSignedEcRecovered, Withdrawals, B256, + Bytes, GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, + Withdrawals, B256, }; use alloc::vec::Vec; pub use alloy_eips::eip1898::{ BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash, }; -use alloy_primitives::Sealable; +use alloy_primitives::{Address, Sealable}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use derive_more::{Deref, DerefMut}; #[cfg(any(test, feature = "arbitrary"))] diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index ca807eddb533e..b0564ed07d5ab 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -72,10 +72,9 @@ pub use transaction::{ pub use alloy_primitives::{ self, address, b256, bloom, bytes, bytes::{Buf, BufMut, BytesMut}, - eip191_hash_message, hex, hex_literal, keccak256, ruint, + hex, hex_literal, ruint, utils::format_ether, - Address, BlockHash, BlockNumber, Bloom, BloomInput, Bytes, ChainId, Selector, StorageKey, - StorageValue, TxHash, TxIndex, TxNumber, B128, B256, B512, B64, U128, U256, U64, U8, + Bytes, StorageValue, TxHash, TxIndex, TxNumber, B128, B256, B512, B64, U128, U256, U64, }; pub use reth_ethereum_forks::*; pub use revm_primitives::{self, JumpTable}; diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index a19564de0938b..040ec365503e1 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -1,11 +1,12 @@ //! Helper function for calculating Merkle proofs and hashes. use crate::{ - constants::EMPTY_OMMER_ROOT_HASH, keccak256, Header, Receipt, ReceiptWithBloom, - ReceiptWithBloomRef, Request, TransactionSigned, Withdrawal, B256, + constants::EMPTY_OMMER_ROOT_HASH, Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, + Request, TransactionSigned, Withdrawal, B256, }; use alloc::vec::Vec; use alloy_eips::eip7685::Encodable7685; +use alloy_primitives::keccak256; use reth_trie_common::root::{ordered_trie_root, ordered_trie_root_with_encoder}; /// Calculate a transaction root. diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index aa9b6c6d7c238..6b43e77e913a7 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -1,11 +1,11 @@ #[cfg(feature = "reth-codec")] use crate::compression::{RECEIPT_COMPRESSOR, RECEIPT_DECOMPRESSOR}; use crate::{ - logs_bloom, Bloom, Bytes, TxType, B256, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, - EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, + logs_bloom, Bytes, TxType, B256, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, + EIP7702_TX_TYPE_ID, }; use alloc::{vec, vec::Vec}; -use alloy_primitives::Log; +use alloy_primitives::{Bloom, Log}; use alloy_rlp::{length_of_length, Decodable, Encodable, RlpDecodable, RlpEncodable}; use bytes::{Buf, BufMut}; use core::{cmp::Ordering, ops::Deref}; diff --git a/crates/primitives/src/transaction/access_list.rs b/crates/primitives/src/transaction/access_list.rs index 7a0782bef7617..32beb293f9c12 100644 --- a/crates/primitives/src/transaction/access_list.rs +++ b/crates/primitives/src/transaction/access_list.rs @@ -2,8 +2,9 @@ #[cfg(test)] mod tests { - use crate::{Address, B256}; + use crate::B256; use alloy_eips::eip2930::{AccessList, AccessListItem}; + use alloy_primitives::Address; use alloy_rlp::{RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper}; use proptest::proptest; use proptest_arbitrary_interop::arb; diff --git a/crates/primitives/src/transaction/compat.rs b/crates/primitives/src/transaction/compat.rs index 319ae55b1af1d..ac39671f34a8d 100644 --- a/crates/primitives/src/transaction/compat.rs +++ b/crates/primitives/src/transaction/compat.rs @@ -1,5 +1,5 @@ -use crate::{Address, Transaction, TransactionSigned, U256}; -use alloy_primitives::TxKind; +use crate::{Transaction, TransactionSigned, U256}; +use alloy_primitives::{Address, TxKind}; use revm_primitives::{AuthorizationList, TxEnv}; /// Implements behaviour to fill a [`TxEnv`] from another transaction. diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index fc055adb26b49..ec1ddaa878107 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,8 +1,8 @@ //! Transaction types. -use crate::{keccak256, Address, BlockHashOrNumber, Bytes, TxHash, B256, U256}; +use crate::{BlockHashOrNumber, Bytes, TxHash, B256, U256}; use alloy_eips::eip7702::SignedAuthorization; -use alloy_primitives::TxKind; +use alloy_primitives::{keccak256, Address, TxKind}; use alloy_consensus::{SignableTransaction, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; use alloy_eips::eip2930::AccessList; @@ -1693,10 +1693,10 @@ mod tests { use crate::{ hex, transaction::{signature::Signature, TxEip1559, TxKind, TxLegacy}, - Address, Bytes, Transaction, TransactionSigned, TransactionSignedEcRecovered, + Bytes, Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, B256, U256, }; - use alloy_primitives::{address, b256, bytes, Parity}; + use alloy_primitives::{address, b256, bytes, Address, Parity}; use alloy_rlp::{Decodable, Encodable, Error as RlpError}; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_codecs::Compact; diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 78ad756923c04..3f19b29b36c19 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -7,14 +7,15 @@ use super::{ TxEip7702, }; use crate::{ - Address, BlobTransaction, BlobTransactionSidecar, Bytes, Signature, Transaction, - TransactionSigned, TransactionSignedEcRecovered, TxHash, B256, EIP4844_TX_TYPE_ID, + BlobTransaction, BlobTransactionSidecar, Bytes, Signature, Transaction, TransactionSigned, + TransactionSignedEcRecovered, TxHash, B256, EIP4844_TX_TYPE_ID, }; use alloc::vec::Vec; use alloy_consensus::{ transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, SignableTransaction, TxEip4844WithSidecar, }; +use alloy_primitives::Address; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header, EMPTY_LIST_CODE}; use bytes::Buf; use derive_more::{AsRef, Deref}; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index e486fa670b6d4..94dfbe6da4b64 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -1,7 +1,8 @@ #![cfg_attr(docsrs, doc(cfg(feature = "c-kzg")))] -use crate::{keccak256, Signature, Transaction, TransactionSigned, TxHash, EIP4844_TX_TYPE_ID}; +use crate::{Signature, Transaction, TransactionSigned, TxHash, EIP4844_TX_TYPE_ID}; use alloy_consensus::{transaction::TxEip4844, TxEip4844WithSidecar}; +use alloy_primitives::keccak256; use alloy_rlp::{Decodable, Error as RlpError, Header}; use serde::{Deserialize, Serialize}; diff --git a/crates/primitives/src/transaction/signature.rs b/crates/primitives/src/transaction/signature.rs index 4b4b5bb4547e6..07a2d61957956 100644 --- a/crates/primitives/src/transaction/signature.rs +++ b/crates/primitives/src/transaction/signature.rs @@ -1,6 +1,5 @@ -use crate::{transaction::util::secp256k1, Address, B256, U256}; - -use alloy_primitives::Parity; +use crate::{transaction::util::secp256k1, B256, U256}; +use alloy_primitives::{Address, Parity}; use alloy_rlp::{Decodable, Error as RlpError}; pub use alloy_primitives::Signature; @@ -120,9 +119,9 @@ mod tests { transaction::signature::{ legacy_parity, recover_signer, recover_signer_unchecked, SECP256K1N_HALF, }, - Address, Signature, B256, U256, + Signature, B256, U256, }; - use alloy_primitives::Parity; + use alloy_primitives::{Address, Parity}; use std::str::FromStr; #[test] diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index 7331ba1ed93f8..4017d8d4b9c44 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -1,4 +1,5 @@ -use crate::{U64, U8}; +use crate::U64; +use alloy_primitives::U8; use alloy_rlp::{Decodable, Encodable}; use serde::{Deserialize, Serialize}; diff --git a/crates/primitives/src/transaction/util.rs b/crates/primitives/src/transaction/util.rs index f3f49cb316c1b..6205ec886ca09 100644 --- a/crates/primitives/src/transaction/util.rs +++ b/crates/primitives/src/transaction/util.rs @@ -1,4 +1,5 @@ -use crate::{Address, Signature}; +use crate::Signature; +use alloy_primitives::Address; use revm_primitives::B256; #[cfg(feature = "secp256k1")] @@ -14,13 +15,12 @@ pub(crate) mod secp256k1 { #[cfg(feature = "secp256k1")] mod impl_secp256k1 { use super::*; - use crate::keccak256; pub(crate) use ::secp256k1::Error; use ::secp256k1::{ ecdsa::{RecoverableSignature, RecoveryId}, Message, PublicKey, SecretKey, SECP256K1, }; - use alloy_primitives::Parity; + use alloy_primitives::{keccak256, Parity}; use revm_primitives::U256; /// Recovers the address of the sender using secp256k1 pubkey recovery. @@ -65,8 +65,7 @@ mod impl_secp256k1 { #[cfg_attr(feature = "secp256k1", allow(unused, unreachable_pub))] mod impl_k256 { use super::*; - use crate::keccak256; - use alloy_primitives::Parity; + use alloy_primitives::{keccak256, Parity}; pub(crate) use k256::ecdsa::Error; use k256::ecdsa::{RecoveryId, SigningKey, VerifyingKey}; use revm_primitives::U256; diff --git a/crates/primitives/src/transaction/variant.rs b/crates/primitives/src/transaction/variant.rs index 3e96b6dda899e..c558e7ccb9baa 100644 --- a/crates/primitives/src/transaction/variant.rs +++ b/crates/primitives/src/transaction/variant.rs @@ -2,9 +2,9 @@ //! `TransactionSignedEcRecovered` use crate::{ - Address, Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, - B256, + Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, B256, }; +use alloy_primitives::Address; use core::ops::Deref; /// Represents various different transaction formats used in reth. diff --git a/crates/revm/Cargo.toml b/crates/revm/Cargo.toml index b4f1692494444..9e4501f627701 100644 --- a/crates/revm/Cargo.toml +++ b/crates/revm/Cargo.toml @@ -21,6 +21,7 @@ reth-consensus-common.workspace = true reth-prune-types.workspace = true reth-storage-api.workspace = true reth-trie = { workspace = true, optional = true } +alloy-primitives.workspace = true # revm revm.workspace = true @@ -28,6 +29,7 @@ revm.workspace = true [dev-dependencies] reth-trie.workspace = true reth-ethereum-forks.workspace = true +alloy-primitives.workspace = true [features] default = ["std", "c-kzg"] diff --git a/crates/revm/src/batch.rs b/crates/revm/src/batch.rs index 4502732a429d3..7c8e0f29b5d71 100644 --- a/crates/revm/src/batch.rs +++ b/crates/revm/src/batch.rs @@ -182,7 +182,8 @@ impl BlockBatchRecord { mod tests { use super::*; use alloc::collections::BTreeMap; - use reth_primitives::{Address, Log, Receipt}; + use alloy_primitives::Address; + use reth_primitives::{Log, Receipt}; use reth_prune_types::{PruneMode, ReceiptsLogPruneConfig}; #[test] diff --git a/crates/revm/src/database.rs b/crates/revm/src/database.rs index fb5f71045ea10..e277d63e4bccb 100644 --- a/crates/revm/src/database.rs +++ b/crates/revm/src/database.rs @@ -1,6 +1,7 @@ use crate::primitives::alloy_primitives::{BlockNumber, StorageKey, StorageValue}; +use alloy_primitives::Address; use core::ops::{Deref, DerefMut}; -use reth_primitives::{Account, Address, B256, U256}; +use reth_primitives::{Account, B256, U256}; use reth_storage_errors::provider::{ProviderError, ProviderResult}; use revm::{ db::DatabaseRef, diff --git a/crates/revm/src/state_change.rs b/crates/revm/src/state_change.rs index 6376957f3111c..8799b8c70eb24 100644 --- a/crates/revm/src/state_change.rs +++ b/crates/revm/src/state_change.rs @@ -1,7 +1,8 @@ use crate::precompile::HashMap; +use alloy_primitives::Address; use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_consensus_common::calc; -use reth_primitives::{Address, Block, Withdrawal, Withdrawals, U256}; +use reth_primitives::{Block, Withdrawal, Withdrawals, U256}; /// Collect all balance changes at the end of the block. /// diff --git a/crates/revm/src/test_utils.rs b/crates/revm/src/test_utils.rs index fe377bc5fc3d8..bbdc196cd5282 100644 --- a/crates/revm/src/test_utils.rs +++ b/crates/revm/src/test_utils.rs @@ -2,9 +2,8 @@ use std::collections::HashSet; use crate::precompile::HashMap; use alloc::vec::Vec; -use reth_primitives::{ - keccak256, Account, Address, BlockNumber, Bytecode, Bytes, StorageKey, B256, U256, -}; +use alloy_primitives::{keccak256, Address, BlockNumber, StorageKey}; +use reth_primitives::{Account, Bytecode, Bytes, B256, U256}; use reth_storage_api::{ AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, StorageRootProvider, diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index 8cd659bb81d2d..05da8bd91563f 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -1,10 +1,10 @@ //! L1 `eth` API types. use alloy_network::{AnyNetwork, Network}; -use alloy_primitives::TxKind; +use alloy_primitives::{Address, TxKind}; use alloy_rpc_types::{Transaction, TransactionInfo}; use alloy_serde::WithOtherFields; -use reth_primitives::{Address, TransactionSignedEcRecovered}; +use reth_primitives::TransactionSignedEcRecovered; use reth_rpc_types_compat::{ transaction::{from_primitive_signature, GasPrice}, TransactionCompat, diff --git a/crates/stages/stages/Cargo.toml b/crates/stages/stages/Cargo.toml index 934e9b1090530..9ed84160477f3 100644 --- a/crates/stages/stages/Cargo.toml +++ b/crates/stages/stages/Cargo.toml @@ -37,6 +37,8 @@ reth-trie-db = { workspace = true, features = ["metrics"] } reth-testing-utils = { workspace = true, optional = true } +alloy-primitives.workspace = true + # async tokio = { workspace = true, features = ["sync"] } futures-util.workspace = true diff --git a/crates/stages/stages/benches/criterion.rs b/crates/stages/stages/benches/criterion.rs index 3957ceac4d70e..7519d81a3622d 100644 --- a/crates/stages/stages/benches/criterion.rs +++ b/crates/stages/stages/benches/criterion.rs @@ -6,7 +6,7 @@ use reth_chainspec::ChainSpec; use reth_config::config::{EtlConfig, TransactionLookupConfig}; use reth_db::{test_utils::TempDatabase, Database, DatabaseEnv}; -use reth_primitives::BlockNumber; +use alloy_primitives::BlockNumber; use reth_provider::{DatabaseProvider, DatabaseProviderFactory}; use reth_stages::{ stages::{MerkleStage, SenderRecoveryStage, TransactionLookupStage}, diff --git a/crates/stages/stages/benches/setup/account_hashing.rs b/crates/stages/stages/benches/setup/account_hashing.rs index 86831418e53f0..9926c1f3d3785 100644 --- a/crates/stages/stages/benches/setup/account_hashing.rs +++ b/crates/stages/stages/benches/setup/account_hashing.rs @@ -1,11 +1,11 @@ #![allow(unreachable_pub)] use super::constants; +use alloy_primitives::BlockNumber; use reth_db::tables; use reth_db_api::{ cursor::DbCursorRO, database::Database, transaction::DbTx, DatabaseError as DbError, }; -use reth_primitives::BlockNumber; use reth_stages::{ stages::{AccountHashingStage, SeedOpts}, test_utils::TestStageDB, diff --git a/crates/stages/stages/benches/setup/mod.rs b/crates/stages/stages/benches/setup/mod.rs index 41570c057f8e9..85e4e3e5a3189 100644 --- a/crates/stages/stages/benches/setup/mod.rs +++ b/crates/stages/stages/benches/setup/mod.rs @@ -1,4 +1,5 @@ #![allow(unreachable_pub)] +use alloy_primitives::Address; use itertools::concat; use reth_chainspec::ChainSpec; use reth_db::{tables, test_utils::TempDatabase, Database, DatabaseEnv}; @@ -6,9 +7,7 @@ use reth_db_api::{ cursor::DbCursorRO, transaction::{DbTx, DbTxMut}, }; -use reth_primitives::{ - alloy_primitives::Sealable, Account, Address, SealedBlock, SealedHeader, B256, U256, -}; +use reth_primitives::{alloy_primitives::Sealable, Account, SealedBlock, SealedHeader, B256, U256}; use reth_provider::{DatabaseProvider, DatabaseProviderFactory, TrieWriter}; use reth_stages::{ stages::{AccountHashingStage, StorageHashingStage}, diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index 7f6d2d6a066f0..cc30fc90a11de 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -621,6 +621,7 @@ mod tests { UnwindStageTestRunner, }, }; + use alloy_primitives::{BlockHash, BlockNumber}; use futures_util::Stream; use reth_db::{static_file::HeaderMask, tables}; use reth_db_api::{ @@ -636,8 +637,7 @@ mod tests { error::DownloadResult, }; use reth_primitives::{ - BlockBody, BlockHash, BlockNumber, Header, SealedBlock, SealedHeader, - StaticFileSegment, TxNumber, B256, + BlockBody, Header, SealedBlock, SealedHeader, StaticFileSegment, TxNumber, B256, }; use reth_provider::{ providers::StaticFileWriter, test_utils::MockNodeTypesWithDB, HeaderProvider, diff --git a/crates/stages/stages/src/stages/execution.rs b/crates/stages/stages/src/stages/execution.rs index 57a9cdaab7efd..04cc1dd65923e 100644 --- a/crates/stages/stages/src/stages/execution.rs +++ b/crates/stages/stages/src/stages/execution.rs @@ -1,4 +1,5 @@ use crate::stages::MERKLE_STAGE_DEFAULT_CLEAN_THRESHOLD; +use alloy_primitives::BlockNumber; use num_traits::Zero; use reth_config::config::ExecutionConfig; use reth_db::{static_file::HeaderMask, tables}; @@ -9,9 +10,7 @@ use reth_evm::{ }; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_exex::{ExExManagerHandle, ExExNotification}; -use reth_primitives::{ - alloy_primitives::Sealable, BlockNumber, Header, SealedHeader, StaticFileSegment, -}; +use reth_primitives::{alloy_primitives::Sealable, Header, SealedHeader, StaticFileSegment}; use reth_primitives_traits::format_gas_throughput; use reth_provider::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter}, @@ -659,6 +658,7 @@ where mod tests { use super::*; use crate::test_utils::TestStageDB; + use alloy_primitives::{keccak256, Address}; use alloy_rlp::Decodable; use assert_matches::assert_matches; use reth_chainspec::ChainSpecBuilder; @@ -666,8 +666,7 @@ mod tests { use reth_evm_ethereum::execute::EthExecutorProvider; use reth_execution_errors::BlockValidationError; use reth_primitives::{ - address, hex_literal::hex, keccak256, Account, Address, Bytecode, SealedBlock, - StorageEntry, B256, U256, + address, hex_literal::hex, Account, Bytecode, SealedBlock, StorageEntry, B256, U256, }; use reth_provider::{ test_utils::create_test_provider_factory, AccountReader, DatabaseProviderFactory, diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index 2e109f7557f12..bbcb9b8da270d 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -1,3 +1,4 @@ +use alloy_primitives::keccak256; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; use reth_db::{tables, RawKey, RawTable, RawValue}; @@ -6,7 +7,7 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, }; use reth_etl::Collector; -use reth_primitives::{keccak256, Account, B256}; +use reth_primitives::{Account, B256}; use reth_provider::{AccountExtReader, DBProvider, HashingWriter, StatsReader}; use reth_stages_api::{ AccountHashingCheckpoint, EntitiesCheckpoint, ExecInput, ExecOutput, Stage, StageCheckpoint, @@ -63,7 +64,7 @@ impl AccountHashingStage { >( provider: &reth_provider::DatabaseProvider, opts: SeedOpts, - ) -> Result, StageError> { + ) -> Result, StageError> { use reth_db_api::models::AccountBeforeTx; use reth_primitives::U256; use reth_provider::{StaticFileProviderFactory, StaticFileWriter}; @@ -348,7 +349,7 @@ mod tests { mod test_utils { use super::*; use crate::test_utils::TestStageDB; - use reth_primitives::Address; + use alloy_primitives::Address; use reth_provider::DatabaseProviderFactory; pub(crate) struct AccountHashingTestRunner { diff --git a/crates/stages/stages/src/stages/hashing_storage.rs b/crates/stages/stages/src/stages/hashing_storage.rs index 54a9921dd5214..ba1e03c1a296f 100644 --- a/crates/stages/stages/src/stages/hashing_storage.rs +++ b/crates/stages/stages/src/stages/hashing_storage.rs @@ -1,3 +1,4 @@ +use alloy_primitives::keccak256; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; use reth_db::tables; @@ -8,7 +9,7 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, }; use reth_etl::Collector; -use reth_primitives::{keccak256, BufMut, StorageEntry, B256}; +use reth_primitives::{BufMut, StorageEntry, B256}; use reth_provider::{DBProvider, HashingWriter, StatsReader, StorageReader}; use reth_stages_api::{ EntitiesCheckpoint, ExecInput, ExecOutput, Stage, StageCheckpoint, StageError, StageId, @@ -211,13 +212,14 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; + use alloy_primitives::Address; use assert_matches::assert_matches; use rand::Rng; use reth_db_api::{ cursor::{DbCursorRW, DbDupCursorRO}, models::StoredBlockBodyIndices, }; - use reth_primitives::{Address, SealedBlock, U256}; + use reth_primitives::{SealedBlock, U256}; use reth_provider::providers::StaticFileWriter; use reth_testing_utils::generators::{ self, random_block_range, random_contract_account_range, BlockRangeParams, diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index f2ff1c666e266..25c7abaee48a3 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -1,3 +1,4 @@ +use alloy_primitives::{BlockHash, BlockNumber}; use futures_util::StreamExt; use reth_codecs::Compact; use reth_config::config::EtlConfig; @@ -10,7 +11,7 @@ use reth_db_api::{ }; use reth_etl::Collector; use reth_network_p2p::headers::{downloader::HeaderDownloader, error::HeadersDownloaderError}; -use reth_primitives::{BlockHash, BlockNumber, SealedHeader, StaticFileSegment, B256}; +use reth_primitives::{SealedHeader, StaticFileSegment, B256}; use reth_provider::{ providers::{StaticFileProvider, StaticFileWriter}, BlockHashReader, DBProvider, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, diff --git a/crates/stages/stages/src/stages/index_account_history.rs b/crates/stages/stages/src/stages/index_account_history.rs index 8ca8d173fd85c..e0fcde2b194f5 100644 --- a/crates/stages/stages/src/stages/index_account_history.rs +++ b/crates/stages/stages/src/stages/index_account_history.rs @@ -1,8 +1,8 @@ use super::{collect_history_indices, load_history_indices}; +use alloy_primitives::Address; use reth_config::config::{EtlConfig, IndexHistoryConfig}; use reth_db::tables; use reth_db_api::{models::ShardedKey, table::Decode, transaction::DbTxMut}; -use reth_primitives::Address; use reth_provider::{DBProvider, HistoryWriter, PruneCheckpointReader, PruneCheckpointWriter}; use reth_prune_types::{PruneCheckpoint, PruneMode, PrunePurpose, PruneSegment}; use reth_stages_api::{ @@ -148,6 +148,7 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; + use alloy_primitives::BlockNumber; use itertools::Itertools; use reth_db::BlockNumberList; use reth_db_api::{ @@ -158,7 +159,7 @@ mod tests { }, transaction::DbTx, }; - use reth_primitives::{address, BlockNumber, B256}; + use reth_primitives::{address, B256}; use reth_provider::{providers::StaticFileWriter, DatabaseProviderFactory}; use reth_testing_utils::generators::{ self, random_block_range, random_changeset_range, random_contract_account_range, diff --git a/crates/stages/stages/src/stages/index_storage_history.rs b/crates/stages/stages/src/stages/index_storage_history.rs index 00646da2fd223..4af2cb3efea2e 100644 --- a/crates/stages/stages/src/stages/index_storage_history.rs +++ b/crates/stages/stages/src/stages/index_storage_history.rs @@ -153,6 +153,7 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; + use alloy_primitives::{Address, BlockNumber}; use itertools::Itertools; use reth_db::BlockNumberList; use reth_db_api::{ @@ -163,7 +164,7 @@ mod tests { }, transaction::DbTx, }; - use reth_primitives::{address, b256, Address, BlockNumber, StorageEntry, B256, U256}; + use reth_primitives::{address, b256, StorageEntry, B256, U256}; use reth_provider::{providers::StaticFileWriter, DatabaseProviderFactory}; use reth_testing_utils::generators::{ self, random_block_range, random_changeset_range, random_contract_account_range, diff --git a/crates/stages/stages/src/stages/merkle.rs b/crates/stages/stages/src/stages/merkle.rs index 83c8c52ce5614..7b94067e9926e 100644 --- a/crates/stages/stages/src/stages/merkle.rs +++ b/crates/stages/stages/src/stages/merkle.rs @@ -1,8 +1,9 @@ +use alloy_primitives::BlockNumber; use reth_codecs::Compact; use reth_consensus::ConsensusError; use reth_db::tables; use reth_db_api::transaction::{DbTx, DbTxMut}; -use reth_primitives::{alloy_primitives::Sealable, BlockNumber, GotExpected, SealedHeader, B256}; +use reth_primitives::{alloy_primitives::Sealable, GotExpected, SealedHeader, B256}; use reth_provider::{ DBProvider, HeaderProvider, ProviderError, StageCheckpointReader, StageCheckpointWriter, StatsReader, TrieWriter, @@ -373,9 +374,10 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, StorageKind, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; + use alloy_primitives::keccak256; use assert_matches::assert_matches; use reth_db_api::cursor::{DbCursorRO, DbCursorRW, DbDupCursorRO}; - use reth_primitives::{keccak256, SealedBlock, StaticFileSegment, StorageEntry, U256}; + use reth_primitives::{SealedBlock, StaticFileSegment, StorageEntry, U256}; use reth_provider::{providers::StaticFileWriter, StaticFileProviderFactory}; use reth_stages_api::StageUnitCheckpoint; use reth_testing_utils::generators::{ diff --git a/crates/stages/stages/src/stages/mod.rs b/crates/stages/stages/src/stages/mod.rs index 4eb3c6f141d4c..d3c5b7099d21c 100644 --- a/crates/stages/stages/src/stages/mod.rs +++ b/crates/stages/stages/src/stages/mod.rs @@ -42,6 +42,7 @@ use utils::*; mod tests { use super::*; use crate::test_utils::{StorageKind, TestStageDB}; + use alloy_primitives::{keccak256, BlockNumber}; use alloy_rlp::Decodable; use reth_chainspec::ChainSpecBuilder; use reth_db::{ @@ -56,8 +57,7 @@ mod tests { use reth_evm_ethereum::execute::EthExecutorProvider; use reth_exex::ExExManagerHandle; use reth_primitives::{ - address, hex_literal::hex, keccak256, Account, BlockNumber, Bytecode, SealedBlock, - StaticFileSegment, B256, U256, + address, hex_literal::hex, Account, Bytecode, SealedBlock, StaticFileSegment, B256, U256, }; use reth_provider::{ providers::{StaticFileProvider, StaticFileWriter}, diff --git a/crates/stages/stages/src/stages/sender_recovery.rs b/crates/stages/stages/src/stages/sender_recovery.rs index bf7df6147ff84..344a8ea848ee8 100644 --- a/crates/stages/stages/src/stages/sender_recovery.rs +++ b/crates/stages/stages/src/stages/sender_recovery.rs @@ -1,3 +1,4 @@ +use alloy_primitives::Address; use reth_config::config::SenderRecoveryConfig; use reth_consensus::ConsensusError; use reth_db::{static_file::TransactionMask, tables, RawValue}; @@ -6,7 +7,7 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, DbTxUnwindExt, }; -use reth_primitives::{Address, GotExpected, StaticFileSegment, TransactionSignedNoHash, TxNumber}; +use reth_primitives::{GotExpected, StaticFileSegment, TransactionSignedNoHash, TxNumber}; use reth_provider::{ BlockReader, DBProvider, HeaderProvider, ProviderError, PruneCheckpointReader, StaticFileProviderFactory, StatsReader, @@ -333,9 +334,10 @@ struct FailedSenderRecoveryError { #[cfg(test)] mod tests { + use alloy_primitives::BlockNumber; use assert_matches::assert_matches; use reth_db_api::cursor::DbCursorRO; - use reth_primitives::{BlockNumber, SealedBlock, TransactionSigned, B256}; + use reth_primitives::{SealedBlock, TransactionSigned, B256}; use reth_provider::{ providers::StaticFileWriter, DatabaseProviderFactory, PruneCheckpointWriter, StaticFileProviderFactory, TransactionsProvider, diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index e636c281829dc..4e68cfe90b767 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -250,8 +250,9 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, StorageKind, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; + use alloy_primitives::BlockNumber; use assert_matches::assert_matches; - use reth_primitives::{BlockNumber, SealedBlock, B256}; + use reth_primitives::{SealedBlock, B256}; use reth_provider::{ providers::StaticFileWriter, DatabaseProviderFactory, StaticFileProviderFactory, }; diff --git a/crates/stages/stages/src/stages/utils.rs b/crates/stages/stages/src/stages/utils.rs index cb9c729aa2345..c16886e0bdca2 100644 --- a/crates/stages/stages/src/stages/utils.rs +++ b/crates/stages/stages/src/stages/utils.rs @@ -1,4 +1,5 @@ //! Utils for `stages`. +use alloy_primitives::BlockNumber; use reth_config::config::EtlConfig; use reth_db::BlockNumberList; use reth_db_api::{ @@ -9,7 +10,6 @@ use reth_db_api::{ DatabaseError, }; use reth_etl::Collector; -use reth_primitives::BlockNumber; use reth_provider::DBProvider; use reth_stages_api::StageError; use std::{collections::HashMap, hash::Hash, ops::RangeBounds}; diff --git a/crates/stages/stages/src/test_utils/test_db.rs b/crates/stages/stages/src/test_utils/test_db.rs index 5fef9d6a2be20..89d74d9d4092a 100644 --- a/crates/stages/stages/src/test_utils/test_db.rs +++ b/crates/stages/stages/src/test_utils/test_db.rs @@ -1,3 +1,4 @@ +use alloy_primitives::{keccak256, Address, BlockNumber}; use reth_chainspec::MAINNET; use reth_db::{ tables, @@ -14,8 +15,8 @@ use reth_db_api::{ DatabaseError as DbError, }; use reth_primitives::{ - keccak256, Account, Address, BlockNumber, Receipt, SealedBlock, SealedHeader, - StaticFileSegment, StorageEntry, TxHash, TxNumber, B256, U256, + Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, TxHash, TxNumber, + B256, U256, }; use reth_provider::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter}, diff --git a/crates/storage/provider/src/providers/static_file/mod.rs b/crates/storage/provider/src/providers/static_file/mod.rs index 25c5ec7ef6aaf..04f032b0fef42 100644 --- a/crates/storage/provider/src/providers/static_file/mod.rs +++ b/crates/storage/provider/src/providers/static_file/mod.rs @@ -56,7 +56,7 @@ impl Deref for LoadedJar { mod tests { use super::*; use crate::{test_utils::create_test_provider_factory, HeaderProvider}; - use alloy_primitives::{B256, U256}; + use alloy_primitives::{BlockHash, B256, U256}; use rand::seq::SliceRandom; use reth_db::{ test_utils::create_test_static_files_dir, CanonicalHeaders, HeaderNumbers, @@ -65,7 +65,7 @@ mod tests { use reth_db_api::transaction::DbTxMut; use reth_primitives::{ static_file::{find_fixed_range, SegmentRangeInclusive, DEFAULT_BLOCKS_PER_STATIC_FILE}, - BlockHash, Header, Receipt, TransactionSignedNoHash, TxNumber, + Header, Receipt, TransactionSignedNoHash, TxNumber, }; use reth_storage_api::{ReceiptProvider, TransactionsProvider}; use reth_testing_utils::generators::{self, random_header_range}; diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 352f5314af9fc..6f6aae9924464 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -1,14 +1,14 @@ //! Dummy blocks and data for tests use crate::{DatabaseProviderRW, ExecutionOutcome}; use alloy_consensus::TxLegacy; -use alloy_primitives::{Log, Parity, Sealable, TxKind}; +use alloy_primitives::{Address, BlockNumber, Log, Parity, Sealable, TxKind}; use once_cell::sync::Lazy; use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_primitives::{ - alloy_primitives, b256, hex_literal::hex, Account, Address, BlockBody, BlockNumber, Bytes, - Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, Signature, Transaction, - TransactionSigned, TxType, Withdrawal, Withdrawals, B256, U256, + alloy_primitives, b256, hex_literal::hex, Account, BlockBody, Bytes, Header, Receipt, + SealedBlock, SealedBlockWithSenders, SealedHeader, Signature, Transaction, TransactionSigned, + TxType, Withdrawal, Withdrawals, B256, U256, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{ diff --git a/crates/storage/provider/src/writer/mod.rs b/crates/storage/provider/src/writer/mod.rs index 17bf9db81bd43..d96e722d7b404 100644 --- a/crates/storage/provider/src/writer/mod.rs +++ b/crates/storage/provider/src/writer/mod.rs @@ -544,14 +544,14 @@ mod tests { use crate::{ test_utils::create_test_provider_factory, AccountReader, StorageTrieWriter, TrieWriter, }; - use alloy_primitives::{keccak256, B256, U256}; + use alloy_primitives::{keccak256, Address, B256, U256}; use reth_db::tables; use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW, DbDupCursorRO}, models::{AccountBeforeTx, BlockNumberAddress}, transaction::{DbTx, DbTxMut}, }; - use reth_primitives::{Account, Address, Receipt, Receipts, StorageEntry}; + use reth_primitives::{Account, Receipt, Receipts, StorageEntry}; use reth_storage_api::DatabaseProviderFactory; use reth_trie::{ test_utils::{state_root, storage_root_prehashed}, diff --git a/crates/trie/db/src/state.rs b/crates/trie/db/src/state.rs index 4f27679ddad90..9cefc6b9fa83c 100644 --- a/crates/trie/db/src/state.rs +++ b/crates/trie/db/src/state.rs @@ -267,9 +267,10 @@ impl DatabaseHashedPostState for HashedPostState { #[cfg(test)] mod tests { use super::*; + use alloy_primitives::Address; use reth_db::test_utils::create_test_rw_db; use reth_db_api::database::Database; - use reth_primitives::{hex, revm_primitives::AccountInfo, Address, U256}; + use reth_primitives::{hex, revm_primitives::AccountInfo, U256}; use revm::db::BundleState; use std::collections::HashMap; diff --git a/examples/custom-engine-types/Cargo.toml b/examples/custom-engine-types/Cargo.toml index 9a949b8367fde..f826451d20389 100644 --- a/examples/custom-engine-types/Cargo.toml +++ b/examples/custom-engine-types/Cargo.toml @@ -18,6 +18,7 @@ reth-node-ethereum = { workspace = true, features = ["test-utils"] } reth-tracing.workspace = true alloy-genesis.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } +alloy-primitives.workspace = true eyre.workspace = true tokio.workspace = true diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 5e0503701ab95..c4f640e9b87f3 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -23,6 +23,7 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use alloy_genesis::Genesis; +use alloy_primitives::Address; use alloy_rpc_types::{ engine::{ ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadEnvelopeV4, @@ -63,7 +64,7 @@ use reth_payload_builder::{ EthBuiltPayload, EthPayloadBuilderAttributes, PayloadBuilderError, PayloadBuilderHandle, PayloadBuilderService, }; -use reth_primitives::{Address, Withdrawals, B256}; +use reth_primitives::{Withdrawals, B256}; use reth_tracing::{RethTracer, Tracer}; /// A custom payload attributes type. diff --git a/examples/custom-evm/Cargo.toml b/examples/custom-evm/Cargo.toml index 7642dc80cf2f4..53563ab9575b2 100644 --- a/examples/custom-evm/Cargo.toml +++ b/examples/custom-evm/Cargo.toml @@ -15,6 +15,7 @@ reth-primitives.workspace = true reth-node-ethereum = { workspace = true, features = ["test-utils"] } reth-tracing.workspace = true alloy-genesis.workspace = true +alloy-primitives.workspace = true eyre.workspace = true tokio.workspace = true diff --git a/examples/custom-evm/src/main.rs b/examples/custom-evm/src/main.rs index 3a93d85ad6fbd..4f1c0c7eb2990 100644 --- a/examples/custom-evm/src/main.rs +++ b/examples/custom-evm/src/main.rs @@ -3,6 +3,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] use alloy_genesis::Genesis; +use alloy_primitives::Address; use reth::{ builder::{ components::{ExecutorBuilder, PayloadServiceBuilder}, @@ -38,7 +39,7 @@ use reth_node_ethereum::{ }; use reth_primitives::{ revm_primitives::{CfgEnvWithHandlerCfg, TxEnv}, - Address, Header, TransactionSigned, U256, + Header, TransactionSigned, U256, }; use reth_tracing::{RethTracer, Tracer}; use std::sync::Arc; diff --git a/examples/custom-inspector/Cargo.toml b/examples/custom-inspector/Cargo.toml index 1018062760997..a94980951627e 100644 --- a/examples/custom-inspector/Cargo.toml +++ b/examples/custom-inspector/Cargo.toml @@ -12,3 +12,4 @@ reth-rpc-types.workspace = true alloy-rpc-types.workspace = true clap = { workspace = true, features = ["derive"] } futures-util.workspace = true +alloy-primitives.workspace = true diff --git a/examples/custom-inspector/src/main.rs b/examples/custom-inspector/src/main.rs index ce159c75cdba3..42271dcabe97f 100644 --- a/examples/custom-inspector/src/main.rs +++ b/examples/custom-inspector/src/main.rs @@ -10,6 +10,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] +use alloy_primitives::Address; use alloy_rpc_types::state::EvmOverrides; use clap::Parser; use futures_util::StreamExt; @@ -17,7 +18,7 @@ use reth::{ args::utils::DefaultChainSpecParser, builder::NodeHandle, cli::Cli, - primitives::{Address, BlockNumberOrTag}, + primitives::BlockNumberOrTag, revm::{ inspector_handle_register, interpreter::{Interpreter, OpCode}, diff --git a/examples/db-access/Cargo.toml b/examples/db-access/Cargo.toml index c0fbe74e18742..0180f4f90922b 100644 --- a/examples/db-access/Cargo.toml +++ b/examples/db-access/Cargo.toml @@ -16,6 +16,7 @@ reth-node-ethereum.workspace = true reth-node-types.workspace = true alloy-rpc-types.workspace = true +alloy-primitives.workspace = true eyre.workspace = true diff --git a/examples/db-access/src/main.rs b/examples/db-access/src/main.rs index 53b56b738c0ae..5730e67535229 100644 --- a/examples/db-access/src/main.rs +++ b/examples/db-access/src/main.rs @@ -1,8 +1,9 @@ +use alloy_primitives::Address; use alloy_rpc_types::{Filter, FilteredParams}; use reth_chainspec::ChainSpecBuilder; use reth_node_ethereum::EthereumNode; use reth_node_types::NodeTypesWithDBAdapter; -use reth_primitives::{alloy_primitives::Sealable, Address, SealedHeader, B256}; +use reth_primitives::{alloy_primitives::Sealable, SealedHeader, B256}; use reth_provider::{ providers::StaticFileProvider, AccountReader, BlockReader, BlockSource, HeaderProvider, ProviderFactory, ReceiptProvider, StateProvider, TransactionsProvider, diff --git a/examples/stateful-precompile/Cargo.toml b/examples/stateful-precompile/Cargo.toml index 2ae4656eee869..47a784c36e146 100644 --- a/examples/stateful-precompile/Cargo.toml +++ b/examples/stateful-precompile/Cargo.toml @@ -14,6 +14,7 @@ reth-primitives.workspace = true reth-node-ethereum = { workspace = true, features = ["test-utils"] } reth-tracing.workspace = true alloy-genesis.workspace = true +alloy-primitives.workspace = true eyre.workspace = true parking_lot.workspace = true diff --git a/examples/stateful-precompile/src/main.rs b/examples/stateful-precompile/src/main.rs index 88ca2ac1a7bb8..effece640d269 100644 --- a/examples/stateful-precompile/src/main.rs +++ b/examples/stateful-precompile/src/main.rs @@ -3,13 +3,14 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] use alloy_genesis::Genesis; +use alloy_primitives::Address; use parking_lot::RwLock; use reth::{ api::NextBlockEnvAttributes, builder::{components::ExecutorBuilder, BuilderContext, NodeBuilder}, primitives::{ revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, Env, PrecompileResult, TxEnv}, - Address, Bytes, U256, + Bytes, U256, }, revm::{ handler::register::EvmHandler, diff --git a/examples/txpool-tracing/Cargo.toml b/examples/txpool-tracing/Cargo.toml index 219292ee06867..38d0ad9409b6b 100644 --- a/examples/txpool-tracing/Cargo.toml +++ b/examples/txpool-tracing/Cargo.toml @@ -11,3 +11,4 @@ reth-node-ethereum.workspace = true alloy-rpc-types-trace.workspace = true clap = { workspace = true, features = ["derive"] } futures-util.workspace = true +alloy-primitives.workspace = true diff --git a/examples/txpool-tracing/src/main.rs b/examples/txpool-tracing/src/main.rs index cf721c51f0a59..6b916dcb50924 100644 --- a/examples/txpool-tracing/src/main.rs +++ b/examples/txpool-tracing/src/main.rs @@ -10,11 +10,12 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] +use alloy_primitives::Address; use alloy_rpc_types_trace::{parity::TraceType, tracerequest::TraceCallRequest}; use clap::Parser; use futures_util::StreamExt; use reth::{ - args::utils::DefaultChainSpecParser, builder::NodeHandle, cli::Cli, primitives::Address, + args::utils::DefaultChainSpecParser, builder::NodeHandle, cli::Cli, rpc::compat::transaction::transaction_to_call_request, transaction_pool::TransactionPool, }; use reth_node_ethereum::node::EthereumNode; diff --git a/testing/ef-tests/src/models.rs b/testing/ef-tests/src/models.rs index 8c8c3189a5d33..47d0a388a3ef4 100644 --- a/testing/ef-tests/src/models.rs +++ b/testing/ef-tests/src/models.rs @@ -1,7 +1,7 @@ //! Shared models for use crate::{assert::assert_equal, Error}; -use alloy_primitives::{Address, Bloom, Bytes, B256, B64, U256}; +use alloy_primitives::{keccak256, Address, Bloom, Bytes, B256, B64, U256}; use reth_chainspec::{ChainSpec, ChainSpecBuilder}; use reth_db::tables; use reth_db_api::{ @@ -9,8 +9,7 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, }; use reth_primitives::{ - keccak256, Account as RethAccount, Bytecode, Header as RethHeader, SealedHeader, StorageEntry, - Withdrawals, + Account as RethAccount, Bytecode, Header as RethHeader, SealedHeader, StorageEntry, Withdrawals, }; use serde::Deserialize; use std::{collections::BTreeMap, ops::Deref}; diff --git a/testing/testing-utils/src/genesis_allocator.rs b/testing/testing-utils/src/genesis_allocator.rs index 8a5adb300240c..d2da3bbabd28c 100644 --- a/testing/testing-utils/src/genesis_allocator.rs +++ b/testing/testing-utils/src/genesis_allocator.rs @@ -18,7 +18,8 @@ use std::{ /// /// # Example /// ``` -/// # use reth_primitives::{Address, U256, hex, Bytes}; +/// # use alloy_primitives::Address; +/// # use reth_primitives::{U256, hex, Bytes}; /// # use reth_testing_utils::GenesisAllocator; /// # use std::str::FromStr; /// let mut allocator = GenesisAllocator::default(); From 65f26644719231949f9701d6d3b884b387487c97 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 26 Sep 2024 14:37:20 +0300 Subject: [PATCH 21/84] feat: make `NetworkConfigBuilder` independent of concrete `ChainSpec` (#11176) --- crates/chainspec/src/api.rs | 2 +- crates/chainspec/src/spec.rs | 19 +++++-- crates/cli/commands/src/p2p/mod.rs | 3 +- crates/consensus/auto-seal/src/task.rs | 2 +- crates/ethereum-forks/src/hardforks/mod.rs | 21 ++++--- crates/net/discv5/src/network_stack_id.rs | 10 ++-- crates/net/eth-wire-types/src/status.rs | 9 ++- crates/net/network/src/config.rs | 56 ++++++++++--------- crates/net/network/src/manager.rs | 4 +- crates/net/network/src/test_utils/testnet.rs | 21 +++++-- crates/net/network/tests/it/connect.rs | 3 +- crates/net/network/tests/it/startup.rs | 5 +- crates/node/builder/src/builder/mod.rs | 7 ++- crates/node/builder/src/launch/common.rs | 2 +- crates/node/core/src/args/network.rs | 1 - crates/node/types/src/lib.rs | 6 +- .../src/providers/database/provider.rs | 4 +- crates/storage/storage-api/src/noop.rs | 29 ++++++++-- examples/bsc-p2p/src/main.rs | 3 +- examples/custom-rlpx-subprotocol/src/main.rs | 3 +- examples/polygon-p2p/src/main.rs | 4 +- 21 files changed, 130 insertions(+), 84 deletions(-) diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index 15a3a024494cc..fb9744a53164a 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -8,7 +8,7 @@ use reth_primitives_traits::Header; /// Trait representing type configuring a chain spec. #[auto_impl::auto_impl(&, Arc)] -pub trait EthChainSpec: Send + Sync + Unpin + Debug + 'static { +pub trait EthChainSpec: Send + Sync + Unpin + Debug { // todo: make chain spec type generic over hardfork //type Hardfork: Clone + Copy + 'static; diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index 463501ee4df62..c62ed4f672234 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -411,10 +411,7 @@ impl ChainSpec { /// Returns the hardfork display helper. pub fn display_hardforks(&self) -> DisplayHardforks { - DisplayHardforks::new( - &self.hardforks, - self.paris_block_and_final_difficulty.map(|(block, _)| block), - ) + DisplayHardforks::new(&self, self.paris_block_and_final_difficulty.map(|(block, _)| block)) } /// Get the fork id for the given hardfork. @@ -613,6 +610,18 @@ impl Hardforks for ChainSpec { fn forks_iter(&self) -> impl Iterator { self.hardforks.forks_iter() } + + fn fork_id(&self, head: &Head) -> ForkId { + self.fork_id(head) + } + + fn latest_fork_id(&self) -> ForkId { + self.latest_fork_id() + } + + fn fork_filter(&self, head: Head) -> ForkFilter { + self.fork_filter(head) + } } impl EthereumHardforks for ChainSpec { @@ -820,7 +829,7 @@ fn into_optimism_chain_spec(genesis: Genesis) -> ChainSpec { #[auto_impl::auto_impl(&, Arc)] pub trait ChainSpecProvider: Send + Sync { /// The chain spec type. - type ChainSpec: EthChainSpec; + type ChainSpec: EthChainSpec + 'static; /// Get an [`Arc`] to the [`ChainSpec`]. fn chain_spec(&self) -> Arc; diff --git a/crates/cli/commands/src/p2p/mod.rs b/crates/cli/commands/src/p2p/mod.rs index e8eadddbd9067..6d40e414dd336 100644 --- a/crates/cli/commands/src/p2p/mod.rs +++ b/crates/cli/commands/src/p2p/mod.rs @@ -100,13 +100,12 @@ impl> Command { let net = NetworkConfigBuilder::new(p2p_secret_key) .peer_config(config.peers_config_with_basic_nodes_from_file(None)) .external_ip_resolver(self.network.nat) - .chain_spec(self.chain.clone()) .disable_discv4_discovery_if(self.chain.chain.is_optimism()) .boot_nodes(boot_nodes.clone()) .apply(|builder| { self.network.discovery.apply_to_builder(builder, rlpx_socket, boot_nodes) }) - .build_with_noop_provider() + .build_with_noop_provider(self.chain) .manager() .await?; let network = net.handle().clone(); diff --git a/crates/consensus/auto-seal/src/task.rs b/crates/consensus/auto-seal/src/task.rs index dbbdc44631cfc..e4873615f1d0f 100644 --- a/crates/consensus/auto-seal/src/task.rs +++ b/crates/consensus/auto-seal/src/task.rs @@ -86,7 +86,7 @@ where Pool: TransactionPool + Unpin + 'static, Engine: EngineTypes, Executor: BlockExecutorProvider, - ChainSpec: EthChainSpec + EthereumHardforks, + ChainSpec: EthChainSpec + EthereumHardforks + 'static, { type Output = (); diff --git a/crates/ethereum-forks/src/hardforks/mod.rs b/crates/ethereum-forks/src/hardforks/mod.rs index 11851c738962a..78db5464cb2b7 100644 --- a/crates/ethereum-forks/src/hardforks/mod.rs +++ b/crates/ethereum-forks/src/hardforks/mod.rs @@ -2,7 +2,7 @@ mod ethereum; pub use ethereum::EthereumHardforks; -use crate::{ForkCondition, Hardfork}; +use crate::{ForkCondition, ForkFilter, ForkId, Hardfork, Head}; #[cfg(feature = "std")] use rustc_hash::FxHashMap; #[cfg(feature = "std")] @@ -31,6 +31,15 @@ pub trait Hardforks: Clone { fn is_fork_active_at_block(&self, fork: H, block_number: u64) -> bool { self.fork(fork).active_at_block(block_number) } + + /// Compute the [`ForkId`] for the given [`Head`] following eip-6122 spec + fn fork_id(&self, head: &Head) -> ForkId; + + /// Returns the [`ForkId`] for the last fork. + fn latest_fork_id(&self) -> ForkId; + + /// Creates a [`ForkFilter`] for the block described by [Head]. + fn fork_filter(&self, head: Head) -> ForkFilter; } /// Ordered list of a chain hardforks that implement [`Hardfork`]. @@ -129,16 +138,6 @@ impl ChainHardforks { } } -impl Hardforks for ChainHardforks { - fn fork(&self, fork: H) -> ForkCondition { - self.fork(fork) - } - - fn forks_iter(&self) -> impl Iterator { - self.forks_iter() - } -} - impl core::fmt::Debug for ChainHardforks { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_struct("ChainHardforks") diff --git a/crates/net/discv5/src/network_stack_id.rs b/crates/net/discv5/src/network_stack_id.rs index 9943b38c87b6a..f707c7de7b7f2 100644 --- a/crates/net/discv5/src/network_stack_id.rs +++ b/crates/net/discv5/src/network_stack_id.rs @@ -1,7 +1,7 @@ //! Keys of ENR [`ForkId`](reth_ethereum_forks::ForkId) kv-pair. Identifies which network stack a //! node belongs to. -use reth_chainspec::ChainSpec; +use reth_chainspec::EthChainSpec; /// Identifies which Ethereum network stack a node belongs to, on the discovery network. #[derive(Debug)] @@ -21,11 +21,11 @@ impl NetworkStackId { pub const OPSTACK: &'static [u8] = b"opstack"; #[allow(clippy::missing_const_for_fn)] - /// Returns the [`NetworkStackId`] that matches the given [`ChainSpec`]. - pub fn id(chain: &ChainSpec) -> Option<&'static [u8]> { - if chain.is_optimism() { + /// Returns the [`NetworkStackId`] that matches the given chain spec. + pub fn id(chain: impl EthChainSpec) -> Option<&'static [u8]> { + if chain.chain().is_optimism() { return Some(Self::OPEL) - } else if chain.is_ethereum() { + } else if chain.chain().is_ethereum() { return Some(Self::ETH) } diff --git a/crates/net/eth-wire-types/src/status.rs b/crates/net/eth-wire-types/src/status.rs index 91e4223a35a8d..baf1e2991522f 100644 --- a/crates/net/eth-wire-types/src/status.rs +++ b/crates/net/eth-wire-types/src/status.rs @@ -3,7 +3,7 @@ use alloy_chains::{Chain, NamedChain}; use alloy_genesis::Genesis; use alloy_primitives::{hex, B256, U256}; use alloy_rlp::{RlpDecodable, RlpEncodable}; -use reth_chainspec::{ChainSpec, MAINNET}; +use reth_chainspec::{ChainSpec, EthChainSpec, Hardforks, MAINNET}; use reth_codecs_derive::add_arbitrary_tests; use reth_primitives::{EthereumHardfork, ForkId, Head}; use std::fmt::{Debug, Display}; @@ -75,9 +75,12 @@ impl Status { /// /// Sets the `chain` and `genesis`, `blockhash`, and `forkid` fields based on the [`ChainSpec`] /// and head. - pub fn spec_builder(spec: &ChainSpec, head: &Head) -> StatusBuilder { + pub fn spec_builder(spec: Spec, head: &Head) -> StatusBuilder + where + Spec: EthChainSpec + Hardforks, + { Self::builder() - .chain(spec.chain) + .chain(spec.chain()) .genesis(spec.genesis_hash()) .blockhash(head.hash) .total_difficulty(head.total_difficulty) diff --git a/crates/net/network/src/config.rs b/crates/net/network/src/config.rs index 546d9f1821332..8217a02a1bab6 100644 --- a/crates/net/network/src/config.rs +++ b/crates/net/network/src/config.rs @@ -2,7 +2,7 @@ use std::{collections::HashSet, net::SocketAddr, sync::Arc}; -use reth_chainspec::{ChainSpec, MAINNET}; +use reth_chainspec::{ChainSpecProvider, EthChainSpec, Hardforks}; use reth_discv4::{Discv4Config, Discv4ConfigBuilder, NatResolver, DEFAULT_DISCOVERY_ADDRESS}; use reth_discv5::NetworkStackId; use reth_dns_discovery::DnsDiscoveryConfig; @@ -10,7 +10,7 @@ use reth_eth_wire::{HelloMessage, HelloMessageWithProtocols, Status}; use reth_network_peers::{mainnet_nodes, pk2id, sepolia_nodes, PeerId, TrustedPeer}; use reth_network_types::{PeersConfig, SessionsConfig}; use reth_primitives::{ForkFilter, Head}; -use reth_storage_api::{BlockNumReader, BlockReader, HeaderProvider}; +use reth_storage_api::{noop::NoopBlockReader, BlockNumReader, BlockReader, HeaderProvider}; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use secp256k1::SECP256K1; @@ -56,8 +56,8 @@ pub struct NetworkConfig { pub peers_config: PeersConfig, /// How to configure the [`SessionManager`](crate::session::SessionManager). pub sessions_config: SessionsConfig, - /// The chain spec - pub chain_spec: Arc, + /// The chain id + pub chain_id: u64, /// The [`ForkFilter`] to use at launch for authenticating sessions. /// /// See also @@ -99,7 +99,10 @@ impl NetworkConfig<()> { impl NetworkConfig { /// Create a new instance with all mandatory fields set, rest is field with defaults. - pub fn new(client: C, secret_key: SecretKey) -> Self { + pub fn new(client: C, secret_key: SecretKey) -> Self + where + C: ChainSpecProvider, + { NetworkConfig::builder(secret_key).build(client) } @@ -170,8 +173,6 @@ pub struct NetworkConfigBuilder { peers_config: Option, /// How to configure the sessions manager sessions_config: Option, - /// The network's chain spec - chain_spec: Arc, /// The default mode of the network. network_mode: NetworkMode, /// The executor to use for spawning tasks. @@ -211,7 +212,6 @@ impl NetworkConfigBuilder { listener_addr: None, peers_config: None, sessions_config: None, - chain_spec: MAINNET.clone(), network_mode: Default::default(), executor: None, hello_message: None, @@ -241,12 +241,6 @@ impl NetworkConfigBuilder { &self.secret_key } - /// Sets the chain spec. - pub fn chain_spec(mut self, chain_spec: Arc) -> Self { - self.chain_spec = chain_spec; - self - } - /// Sets the [`NetworkMode`]. pub const fn network_mode(mut self, network_mode: NetworkMode) -> Self { self.network_mode = network_mode; @@ -461,10 +455,14 @@ impl NetworkConfigBuilder { /// Convenience function for creating a [`NetworkConfig`] with a noop provider that does /// nothing. - pub fn build_with_noop_provider( + pub fn build_with_noop_provider( self, - ) -> NetworkConfig { - self.build(Default::default()) + chain_spec: Arc, + ) -> NetworkConfig> + where + ChainSpec: EthChainSpec + Hardforks + 'static, + { + self.build(NoopBlockReader::new(chain_spec)) } /// Consumes the type and creates the actual [`NetworkConfig`] @@ -473,8 +471,12 @@ impl NetworkConfigBuilder { /// The given client is to be used for interacting with the chain, for example fetching the /// corresponding block for a given block hash we receive from a peer in the status message when /// establishing a connection. - pub fn build(self, client: C) -> NetworkConfig { + pub fn build(self, client: C) -> NetworkConfig + where + C: ChainSpecProvider, + { let peer_id = self.get_peer_id(); + let chain_spec = client.chain_spec(); let Self { secret_key, mut dns_discovery_config, @@ -485,7 +487,6 @@ impl NetworkConfigBuilder { listener_addr, peers_config, sessions_config, - chain_spec, network_mode, executor, hello_message, @@ -514,9 +515,9 @@ impl NetworkConfigBuilder { let head = head.unwrap_or_else(|| Head { hash: chain_spec.genesis_hash(), number: 0, - timestamp: chain_spec.genesis.timestamp, - difficulty: chain_spec.genesis.difficulty, - total_difficulty: chain_spec.genesis.difficulty, + timestamp: chain_spec.genesis().timestamp, + difficulty: chain_spec.genesis().difficulty, + total_difficulty: chain_spec.genesis().difficulty, }); // set the status @@ -525,6 +526,9 @@ impl NetworkConfigBuilder { // set a fork filter based on the chain spec and head let fork_filter = chain_spec.fork_filter(head); + // get the chain id + let chain_id = chain_spec.chain().id(); + // If default DNS config is used then we add the known dns network to bootstrap from if let Some(dns_networks) = dns_discovery_config.as_mut().and_then(|c| c.bootstrap_dns_networks.as_mut()) @@ -547,7 +551,7 @@ impl NetworkConfigBuilder { listener_addr, peers_config: peers_config.unwrap_or_default(), sessions_config: sessions_config.unwrap_or_default(), - chain_spec, + chain_id, block_import: block_import.unwrap_or_else(|| Box::::default()), network_mode, executor: executor.unwrap_or_else(|| Box::::default()), @@ -587,9 +591,11 @@ impl NetworkMode { #[cfg(test)] mod tests { + use std::sync::Arc; + use super::*; use rand::thread_rng; - use reth_chainspec::Chain; + use reth_chainspec::{Chain, MAINNET}; use reth_dns_discovery::tree::LinkEntry; use reth_primitives::ForkHash; use reth_provider::test_utils::NoopProvider; @@ -622,7 +628,7 @@ mod tests { let genesis_fork_hash = ForkHash::from(chain_spec.genesis_hash()); // enforce that the fork_id set in the status is consistent with the generated fork filter - let config = builder().chain_spec(chain_spec).build(NoopProvider::default()); + let config = builder().build_with_noop_provider(chain_spec); let status = config.status; let fork_filter = config.fork_filter; diff --git a/crates/net/network/src/manager.rs b/crates/net/network/src/manager.rs index 864b6791f67a7..3e86ae2442844 100644 --- a/crates/net/network/src/manager.rs +++ b/crates/net/network/src/manager.rs @@ -178,7 +178,7 @@ impl NetworkManager { listener_addr, peers_config, sessions_config, - chain_spec, + chain_id, block_import, network_mode, boot_nodes, @@ -264,7 +264,7 @@ impl NetworkManager { local_peer_id, peers_handle, network_mode, - Arc::new(AtomicU64::new(chain_spec.chain.id())), + Arc::new(AtomicU64::new(chain_id)), tx_gossip_disabled, discv4, discv5, diff --git a/crates/net/network/src/test_utils/testnet.rs b/crates/net/network/src/test_utils/testnet.rs index 913fe55ae6fa2..d92272a871e0d 100644 --- a/crates/net/network/src/test_utils/testnet.rs +++ b/crates/net/network/src/test_utils/testnet.rs @@ -10,14 +10,14 @@ use std::{ use futures::{FutureExt, StreamExt}; use pin_project::pin_project; -use reth_chainspec::MAINNET; +use reth_chainspec::{Hardforks, MAINNET}; use reth_eth_wire::{protocol::Protocol, DisconnectReason, HelloMessageWithProtocols}; use reth_network_api::{ test_utils::{PeersHandle, PeersHandleProvider}, NetworkEvent, NetworkEventListenerProvider, NetworkInfo, Peers, }; use reth_network_peers::PeerId; -use reth_provider::test_utils::NoopProvider; +use reth_provider::{test_utils::NoopProvider, ChainSpecProvider}; use reth_storage_api::{BlockReader, BlockReaderIdExt, HeaderProvider, StateProviderFactory}; use reth_tasks::TokioTaskExecutor; use reth_tokio_util::EventStream; @@ -54,7 +54,7 @@ pub struct Testnet { impl Testnet where - C: BlockReader + HeaderProvider + Clone + 'static, + C: BlockReader + HeaderProvider + Clone + 'static + ChainSpecProvider, { /// Same as [`Self::try_create_with`] but panics on error pub async fn create_with(num_peers: usize, provider: C) -> Self { @@ -548,7 +548,10 @@ where /// Initialize the network with a random secret key, allowing the devp2p and discovery to bind /// to any available IP and port. - pub fn new(client: C) -> Self { + pub fn new(client: C) -> Self + where + C: ChainSpecProvider, + { let secret_key = SecretKey::new(&mut rand::thread_rng()); let config = Self::network_config_builder(secret_key).build(client.clone()); Self { config, client, secret_key } @@ -556,13 +559,19 @@ where /// Initialize the network with a given secret key, allowing devp2p and discovery to bind any /// available IP and port. - pub fn with_secret_key(client: C, secret_key: SecretKey) -> Self { + pub fn with_secret_key(client: C, secret_key: SecretKey) -> Self + where + C: ChainSpecProvider, + { let config = Self::network_config_builder(secret_key).build(client.clone()); Self { config, client, secret_key } } /// Initialize the network with a given capabilities. - pub fn with_protocols(client: C, protocols: impl IntoIterator) -> Self { + pub fn with_protocols(client: C, protocols: impl IntoIterator) -> Self + where + C: ChainSpecProvider, + { let secret_key = SecretKey::new(&mut rand::thread_rng()); let builder = Self::network_config_builder(secret_key); diff --git a/crates/net/network/tests/it/connect.rs b/crates/net/network/tests/it/connect.rs index 8201b3280bcff..5d91049b92afa 100644 --- a/crates/net/network/tests/it/connect.rs +++ b/crates/net/network/tests/it/connect.rs @@ -5,6 +5,7 @@ use std::{collections::HashSet, net::SocketAddr, time::Duration}; use alloy_node_bindings::Geth; use alloy_provider::{ext::AdminApi, ProviderBuilder}; use futures::StreamExt; +use reth_chainspec::MAINNET; use reth_discv4::Discv4Config; use reth_eth_wire::{DisconnectReason, HeadersDirection}; use reth_net_banlist::BanList; @@ -688,7 +689,7 @@ async fn new_random_peer(max_in_bound: usize, trusted_nodes: Vec) - .listener_port(0) .disable_discovery() .peer_config(peers_config) - .build_with_noop_provider(); + .build_with_noop_provider(MAINNET.clone()); NetworkManager::new(config).await.unwrap() } diff --git a/crates/net/network/tests/it/startup.rs b/crates/net/network/tests/it/startup.rs index 269b352e4b10e..8f7e4ff8c78ce 100644 --- a/crates/net/network/tests/it/startup.rs +++ b/crates/net/network/tests/it/startup.rs @@ -3,6 +3,7 @@ use std::{ net::{Ipv4Addr, SocketAddr, SocketAddrV4}, }; +use reth_chainspec::MAINNET; use reth_discv4::Discv4Config; use reth_network::{ error::{NetworkError, ServiceKind}, @@ -74,7 +75,7 @@ async fn test_tcp_port_node_record_no_discovery() { let config = NetworkConfigBuilder::new(secret_key) .listener_port(0) .disable_discovery() - .build_with_noop_provider(); + .build_with_noop_provider(MAINNET.clone()); let network = NetworkManager::new(config).await.unwrap(); let local_addr = network.local_addr(); @@ -93,7 +94,7 @@ async fn test_tcp_port_node_record_discovery() { .listener_port(0) .discovery_port(0) .disable_dns_discovery() - .build_with_noop_provider(); + .build_with_noop_provider(MAINNET.clone()); let network = NetworkManager::new(config).await.unwrap(); let local_addr = network.local_addr(); diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index d7cec56cab916..2bd43d3c6ac79 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -10,7 +10,7 @@ pub use states::*; use std::sync::Arc; use futures::Future; -use reth_chainspec::{ChainSpec, EthChainSpec, EthereumHardforks}; +use reth_chainspec::{ChainSpec, EthChainSpec, EthereumHardforks, Hardforks}; use reth_cli_util::get_secret_key; use reth_db_api::{ database::Database, @@ -633,7 +633,10 @@ impl BuilderContext { pub fn build_network_config( &self, network_builder: NetworkConfigBuilder, - ) -> NetworkConfig { + ) -> NetworkConfig + where + Node::Types: NodeTypes, + { network_builder.build(self.provider.clone()) } } diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index d6575d000abbe..720f69c184646 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -400,7 +400,7 @@ impl LaunchContextWith LaunchContextWith, DB>> where DB: Database + Clone + 'static, - ChainSpec: EthChainSpec + EthereumHardforks, + ChainSpec: EthChainSpec + EthereumHardforks + 'static, { /// Returns the [`ProviderFactory`] for the attached storage after executing a consistent check /// between the database and static files. **It may execute a pipeline unwind if it fails this diff --git a/crates/node/core/src/args/network.rs b/crates/node/core/src/args/network.rs index 650a3dcb05e54..d25ebd8ea1574 100644 --- a/crates/node/core/src/args/network.rs +++ b/crates/node/core/src/args/network.rs @@ -240,7 +240,6 @@ impl NetworkArgs { ) .peer_config(peers_config) .boot_nodes(chain_bootnodes.clone()) - .chain_spec(chain_spec) .transactions_manager_config(transactions_manager_config) // Configure node identity .apply(|builder| { diff --git a/crates/node/types/src/lib.rs b/crates/node/types/src/lib.rs index 2ad2f8abd873b..2c72e02d3edc7 100644 --- a/crates/node/types/src/lib.rs +++ b/crates/node/types/src/lib.rs @@ -121,7 +121,7 @@ impl AnyNodeTypes { impl NodeTypes for AnyNodeTypes where P: NodePrimitives + Send + Sync + Unpin + 'static, - C: EthChainSpec, + C: EthChainSpec + 'static, { type Primitives = P; type ChainSpec = C; @@ -157,7 +157,7 @@ impl NodeTypes for AnyNodeTypesWithEngine where P: NodePrimitives + Send + Sync + Unpin + 'static, E: EngineTypes + Send + Sync + Unpin, - C: EthChainSpec, + C: EthChainSpec + 'static, { type Primitives = P; type ChainSpec = C; @@ -167,7 +167,7 @@ impl NodeTypesWithEngine for AnyNodeTypesWithEngine where P: NodePrimitives + Send + Sync + Unpin + 'static, E: EngineTypes + Send + Sync + Unpin, - C: EthChainSpec, + C: EthChainSpec + 'static, { type Engine = E; } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index e326ad6e1d135..9bdd1a4600a98 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -151,7 +151,9 @@ impl StaticFileProviderFactory for DatabaseProvider { } } -impl ChainSpecProvider for DatabaseProvider { +impl ChainSpecProvider + for DatabaseProvider +{ type ChainSpec = Spec; fn chain_spec(&self) -> Arc { diff --git a/crates/storage/storage-api/src/noop.rs b/crates/storage/storage-api/src/noop.rs index c3c33ac37908f..7325e2b743606 100644 --- a/crates/storage/storage-api/src/noop.rs +++ b/crates/storage/storage-api/src/noop.rs @@ -1,17 +1,28 @@ //! Various noop implementations for traits. +use std::sync::Arc; + use crate::{BlockHashReader, BlockNumReader}; use alloy_primitives::{BlockNumber, B256}; -use reth_chainspec::ChainInfo; +use reth_chainspec::{ChainInfo, ChainSpecProvider, EthChainSpec}; use reth_storage_errors::provider::ProviderResult; /// Supports various api interfaces for testing purposes. -#[derive(Debug, Clone, Default, Copy)] +#[derive(Debug, Clone)] #[non_exhaustive] -pub struct NoopBlockReader; +pub struct NoopBlockReader { + chain_spec: Arc, +} + +impl NoopBlockReader { + /// Create a new instance of the `NoopBlockReader`. + pub const fn new(chain_spec: Arc) -> Self { + Self { chain_spec } + } +} /// Noop implementation for testing purposes -impl BlockHashReader for NoopBlockReader { +impl BlockHashReader for NoopBlockReader { fn block_hash(&self, _number: u64) -> ProviderResult> { Ok(None) } @@ -25,7 +36,7 @@ impl BlockHashReader for NoopBlockReader { } } -impl BlockNumReader for NoopBlockReader { +impl BlockNumReader for NoopBlockReader { fn chain_info(&self) -> ProviderResult { Ok(ChainInfo::default()) } @@ -42,3 +53,11 @@ impl BlockNumReader for NoopBlockReader { Ok(None) } } + +impl ChainSpecProvider for NoopBlockReader { + type ChainSpec = ChainSpec; + + fn chain_spec(&self) -> Arc { + self.chain_spec.clone() + } +} diff --git a/examples/bsc-p2p/src/main.rs b/examples/bsc-p2p/src/main.rs index 7756728aa9df1..e46ea4bec3574 100644 --- a/examples/bsc-p2p/src/main.rs +++ b/examples/bsc-p2p/src/main.rs @@ -49,9 +49,8 @@ async fn main() { // The network configuration let mut net_cfg = NetworkConfig::builder(secret_key) - .chain_spec(bsc_chain_spec()) .listener_addr(local_addr) - .build_with_noop_provider() + .build_with_noop_provider(bsc_chain_spec()) .set_discovery_v4( Discv4ConfigBuilder::default() .add_boot_nodes(boot_nodes()) diff --git a/examples/custom-rlpx-subprotocol/src/main.rs b/examples/custom-rlpx-subprotocol/src/main.rs index 8dc95641e2016..e16f71071c8cc 100644 --- a/examples/custom-rlpx-subprotocol/src/main.rs +++ b/examples/custom-rlpx-subprotocol/src/main.rs @@ -19,7 +19,6 @@ use reth_network::{ }; use reth_network_api::{test_utils::PeersHandleProvider, NetworkInfo}; use reth_node_ethereum::EthereumNode; -use reth_provider::test_utils::NoopProvider; use subprotocol::{ connection::CustomCommand, protocol::{ @@ -51,7 +50,7 @@ fn main() -> eyre::Result<()> { .listener_addr(SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::UNSPECIFIED, 0))) .disable_discovery() .add_rlpx_sub_protocol(custom_rlpx_handler_2.into_rlpx_sub_protocol()) - .build(NoopProvider::default()); + .build_with_noop_provider(node.chain_spec()); // spawn the second network instance let subnetwork = NetworkManager::new(net_cfg).await?; diff --git a/examples/polygon-p2p/src/main.rs b/examples/polygon-p2p/src/main.rs index d93c92cebb02e..6078ae14cb850 100644 --- a/examples/polygon-p2p/src/main.rs +++ b/examples/polygon-p2p/src/main.rs @@ -14,7 +14,6 @@ use reth_discv4::Discv4ConfigBuilder; use reth_network::{ config::NetworkMode, NetworkConfig, NetworkEvent, NetworkEventListenerProvider, NetworkManager, }; -use reth_provider::test_utils::NoopProvider; use reth_tracing::{ tracing::info, tracing_subscriber::filter::LevelFilter, LayerInfo, LogFormat, RethTracer, Tracer, @@ -47,11 +46,10 @@ async fn main() { // The network configuration let net_cfg = NetworkConfig::builder(secret_key) - .chain_spec(polygon_chain_spec()) .set_head(head()) .network_mode(NetworkMode::Work) .listener_addr(local_addr) - .build(NoopProvider::default()); + .build_with_noop_provider(polygon_chain_spec()); // Set Discv4 lookup interval to 1 second let mut discv4_cfg = Discv4ConfigBuilder::default(); From 3a255a1cabd1f19399b42c1fe9bdcc130ac61c2e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 26 Sep 2024 14:42:58 +0200 Subject: [PATCH 22/84] chore:include payload id in trace (#11249) --- crates/payload/basic/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index 7d1f3fb9eada8..f9487ec784ca3 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -351,7 +351,7 @@ where { /// Spawns a new payload build task. fn spawn_build_job(&mut self) { - trace!(target: "payload_builder", "spawn new payload build task"); + trace!(target: "payload_builder", id = %self.config.payload_id(), "spawn new payload build task"); let (tx, rx) = oneshot::channel(); let client = self.client.clone(); let pool = self.pool.clone(); From 6a7d8938c7a4e0e3c79366e25b9c806741f125fe Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 26 Sep 2024 16:33:29 +0200 Subject: [PATCH 23/84] chore: update revm and alloy primitives (#11235) Co-authored-by: Arsenii Kulikov --- Cargo.lock | 34 +++++--- Cargo.toml | 6 +- crates/blockchain-tree/src/block_indices.rs | 6 +- crates/blockchain-tree/src/blockchain_tree.rs | 10 +-- crates/chain-state/src/in_memory.rs | 31 +++---- crates/chain-state/src/memory_overlay.rs | 11 +-- crates/cli/commands/src/db/diff.rs | 2 +- .../engine/invalid-block-hooks/src/witness.rs | 7 +- crates/engine/tree/src/tree/mod.rs | 44 +++++----- crates/ethereum/evm/src/execute.rs | 12 +-- .../execution-types/src/execution_outcome.rs | 6 +- crates/net/discv5/src/config.rs | 2 +- crates/net/downloaders/src/file_client.rs | 6 +- crates/net/eth-wire-types/src/broadcast.rs | 6 +- crates/net/eth-wire/src/capability.rs | 2 +- crates/net/network/src/peers.rs | 7 +- .../net/network/src/transactions/fetcher.rs | 2 +- .../network/src/transactions/validation.rs | 8 +- crates/net/network/tests/it/connect.rs | 5 +- crates/net/p2p/src/full_block.rs | 2 +- crates/net/p2p/src/test_utils/full_block.rs | 4 +- crates/optimism/evm/src/execute.rs | 6 +- crates/optimism/evm/src/l1.rs | 2 +- crates/payload/builder/src/database.rs | 2 +- crates/primitives-traits/src/withdrawal.rs | 2 +- crates/revm/src/batch.rs | 7 +- crates/revm/src/state_change.rs | 18 ++-- crates/revm/src/test_utils.rs | 9 +- crates/rpc/rpc-eth-types/src/cache/db.rs | 10 ++- crates/rpc/rpc/src/debug.rs | 7 +- crates/rpc/rpc/src/eth/filter.rs | 2 +- crates/rpc/rpc/src/reth.rs | 2 +- crates/stages/api/src/pipeline/set.rs | 2 +- crates/stages/stages/src/stages/utils.rs | 2 +- .../src/providers/bundle_state_provider.rs | 6 +- .../src/providers/database/provider.rs | 8 +- .../src/providers/state/historical.rs | 10 +-- .../provider/src/providers/state/latest.rs | 6 +- .../provider/src/providers/state/macros.rs | 4 +- .../storage/provider/src/test_utils/blocks.rs | 11 +-- .../storage/provider/src/test_utils/mock.rs | 7 +- .../storage/provider/src/test_utils/noop.rs | 2 +- crates/storage/provider/src/writer/mod.rs | 85 +++++++++---------- crates/storage/storage-api/src/trie.rs | 6 +- crates/transaction-pool/src/config.rs | 6 +- crates/transaction-pool/src/maintain.rs | 2 +- crates/transaction-pool/src/pool/txpool.rs | 6 +- crates/trie/db/src/proof.rs | 6 +- crates/trie/db/src/state.rs | 7 +- crates/trie/db/src/witness.rs | 3 +- crates/trie/trie/benches/hash_post_state.rs | 5 +- crates/trie/trie/src/proof.rs | 9 +- crates/trie/trie/src/witness.rs | 11 ++- 53 files changed, 254 insertions(+), 230 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0e0a0c44743f9..e55e658dda6a6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -277,9 +277,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "411aff151f2a73124ee473708e82ed51b2535f68928b6a1caa8bc1246ae6f7cd" +checksum = "260d3ff3bff0bb84599f032a2f2c6828180b0ea0cd41fdaf44f39cef3ba41861" dependencies = [ "alloy-rlp", "arbitrary", @@ -289,15 +289,20 @@ dependencies = [ "derive_arbitrary", "derive_more", "getrandom 0.2.15", + "hashbrown 0.14.5", "hex-literal", + "indexmap 2.5.0", "itoa", "k256", "keccak-asm", + "paste", "proptest", "proptest-derive", "rand 0.8.5", "ruint", + "rustc-hash 2.0.0", "serde", + "sha3", "tiny-keccak", ] @@ -3929,6 +3934,7 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ + "arbitrary", "equivalent", "hashbrown 0.14.5", "serde", @@ -5923,6 +5929,7 @@ dependencies = [ "libc", "rand_chacha 0.3.1", "rand_core 0.6.4", + "serde", ] [[package]] @@ -9136,9 +9143,9 @@ dependencies = [ [[package]] name = "revm" -version = "14.0.2" +version = "14.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9f3f55d0414c3d73902d876ba3d55a654f05fe937089fbf5f34b1ced26d78d5" +checksum = "641702b12847f9ed418d552f4fcabe536d867a2c980e96b6e7e25d7b992f929f" dependencies = [ "auto_impl", "cfg-if", @@ -9170,9 +9177,9 @@ dependencies = [ [[package]] name = "revm-interpreter" -version = "10.0.2" +version = "10.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713dbb271acd13afb06dcd460c1dc43da211e7ac9bc73cdf13528f615f55f96b" +checksum = "2e5e14002afae20b5bf1566f22316122f42f57517000e559c55b25bf7a49cba2" dependencies = [ "revm-primitives", "serde", @@ -9180,9 +9187,9 @@ dependencies = [ [[package]] name = "revm-precompile" -version = "11.0.2" +version = "11.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73010c271d53fa7904e9845338e95f3955eb1200a0355e0abfdb89c41aaa9cd" +checksum = "3198c06247e8d4ad0d1312591edf049b0de4ddffa9fecb625c318fd67db8639b" dependencies = [ "aurora-engine-modexp", "blst", @@ -9200,11 +9207,12 @@ dependencies = [ [[package]] name = "revm-primitives" -version = "9.0.2" +version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7a6bff9dbde3370a5ac9555104117f7e6039b3cc76e8d5d9d01899088beca2a" +checksum = "6f1525851a03aff9a9d6a1d018b414d76252d6802ab54695b27093ecd7e7a101" dependencies = [ - "alloy-eips", + "alloy-eip2930", + "alloy-eip7702", "alloy-primitives", "auto_impl", "bitflags 2.6.0", @@ -9213,7 +9221,6 @@ dependencies = [ "cfg-if", "dyn-clone", "enumn", - "hashbrown 0.14.5", "hex", "serde", ] @@ -9359,6 +9366,9 @@ name = "rustc-hash" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +dependencies = [ + "rand 0.8.5", +] [[package]] name = "rustc-hex" diff --git a/Cargo.toml b/Cargo.toml index c386abc6f6791..0de42a83ae822 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -408,20 +408,20 @@ reth-trie-db = { path = "crates/trie/db" } reth-trie-parallel = { path = "crates/trie/parallel" } # revm -revm = { version = "14.0.2", features = [ +revm = { version = "14.0.3", features = [ "std", "secp256k1", "blst", ], default-features = false } revm-inspectors = "0.7.6" -revm-primitives = { version = "9.0.2", features = [ +revm-primitives = { version = "10.0.0", features = [ "std", ], default-features = false } # eth alloy-chains = "0.1.32" alloy-dyn-abi = "0.8.0" -alloy-primitives = { version = "0.8.3", default-features = false } +alloy-primitives = { version = "0.8.4", default-features = false } alloy-rlp = "0.3.4" alloy-sol-types = "0.8.0" alloy-trie = { version = "0.5", default-features = false } diff --git a/crates/blockchain-tree/src/block_indices.rs b/crates/blockchain-tree/src/block_indices.rs index fb132bdedc4cb..23c63bf6d243a 100644 --- a/crates/blockchain-tree/src/block_indices.rs +++ b/crates/blockchain-tree/src/block_indices.rs @@ -533,7 +533,7 @@ mod tests { block_indices.insert_non_fork_block(block_number_2, block_hash_3, chain_id_2); // Block number 1 should have two block hashes associated with it. - let mut expected_hashes_for_block_1 = HashSet::new(); + let mut expected_hashes_for_block_1 = HashSet::default(); expected_hashes_for_block_1.insert(block_hash_1); expected_hashes_for_block_1.insert(block_hash_2); assert_eq!( @@ -601,11 +601,11 @@ mod tests { assert_eq!(block_indices.blocks_to_chain.get(&block_hash_2), Some(&chain_id)); // Check that block numbers map to their respective hashes. - let mut expected_hashes_1 = HashSet::new(); + let mut expected_hashes_1 = HashSet::default(); expected_hashes_1.insert(block_hash_1); assert_eq!(block_indices.block_number_to_block_hashes.get(&1), Some(&expected_hashes_1)); - let mut expected_hashes_2 = HashSet::new(); + let mut expected_hashes_2 = HashSet::default(); expected_hashes_2.insert(block_hash_2); assert_eq!(block_indices.block_number_to_block_hashes.get(&2), Some(&expected_hashes_2)); diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 4efbc740d6824..5d3bffe8968d4 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -589,7 +589,7 @@ where // Find all forks of given block. let mut dependent_block = self.block_indices().fork_to_child().get(block).cloned().unwrap_or_default(); - let mut dependent_chains = HashSet::new(); + let mut dependent_chains = HashSet::default(); while let Some(block) = dependent_block.pop_back() { // Get chain of dependent block. @@ -2180,7 +2180,7 @@ mod tests { (block1.parent_hash, HashSet::from([block1a_hash])), (block1.hash(), HashSet::from([block2.hash()])), ])) - .with_pending_blocks((block2.number + 1, HashSet::new())) + .with_pending_blocks((block2.number + 1, HashSet::default())) .assert(&tree); assert_matches!(tree.make_canonical(block1a_hash), Ok(_)); @@ -2204,7 +2204,7 @@ mod tests { (block1.parent_hash, HashSet::from([block1.hash()])), (block1.hash(), HashSet::from([block2.hash()])), ])) - .with_pending_blocks((block1a.number + 1, HashSet::new())) + .with_pending_blocks((block1a.number + 1, HashSet::default())) .assert(&tree); // check notification. @@ -2241,7 +2241,7 @@ mod tests { (block1.parent_hash, HashSet::from([block1a_hash])), (block1.hash(), HashSet::from([block2a_hash])), ])) - .with_pending_blocks((block2.number + 1, HashSet::new())) + .with_pending_blocks((block2.number + 1, HashSet::default())) .assert(&tree); // check notification. @@ -2310,7 +2310,7 @@ mod tests { .with_chain_num(1) .with_block_to_chain(HashMap::from([(block2a_hash, 4.into())])) .with_fork_to_child(HashMap::from([(block1.hash(), HashSet::from([block2a_hash]))])) - .with_pending_blocks((block2.number + 1, HashSet::new())) + .with_pending_blocks((block2.number + 1, HashSet::default())) .assert(&tree); // check notification. diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index 2051fb308cf06..1563dad64759f 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -5,7 +5,7 @@ use crate::{ ChainInfoTracker, MemoryOverlayStateProvider, }; use alloy_eips::BlockNumHash; -use alloy_primitives::{Address, TxHash, B256}; +use alloy_primitives::{map::HashMap, Address, TxHash, B256}; use parking_lot::RwLock; use reth_chainspec::ChainInfo; use reth_execution_types::{Chain, ExecutionOutcome}; @@ -16,11 +16,7 @@ use reth_primitives::{ }; use reth_storage_api::StateProviderBox; use reth_trie::{updates::TrieUpdates, HashedPostState}; -use std::{ - collections::{BTreeMap, HashMap}, - sync::Arc, - time::Instant, -}; +use std::{collections::BTreeMap, sync::Arc, time::Instant}; use tokio::sync::{broadcast, watch}; /// Size of the broadcast channel used to notify canonical state events. @@ -197,7 +193,7 @@ impl CanonicalInMemoryState { /// Create an empty state. pub fn empty() -> Self { - Self::new(HashMap::new(), BTreeMap::new(), None, None) + Self::new(HashMap::default(), BTreeMap::new(), None, None) } /// Create a new in memory state with the given local head and finalized header @@ -843,7 +839,7 @@ impl NewCanonicalChain { mod tests { use super::*; use crate::test_utils::TestBlockBuilder; - use alloy_primitives::{BlockNumber, Bytes, StorageKey, StorageValue}; + use alloy_primitives::{map::HashSet, BlockNumber, Bytes, StorageKey, StorageValue}; use rand::Rng; use reth_errors::ProviderResult; use reth_primitives::{Account, Bytecode, Receipt, Requests}; @@ -852,7 +848,6 @@ mod tests { StorageRootProvider, }; use reth_trie::{AccountProof, HashedStorage, MultiProof, TrieInput}; - use std::collections::HashSet; fn create_mock_state( test_block_builder: &mut TestBlockBuilder, @@ -984,7 +979,7 @@ mod tests { #[test] fn test_in_memory_state_impl_state_by_hash() { - let mut state_by_hash = HashMap::new(); + let mut state_by_hash = HashMap::default(); let number = rand::thread_rng().gen::(); let mut test_block_builder = TestBlockBuilder::default(); let state = Arc::new(create_mock_state(&mut test_block_builder, number, B256::random())); @@ -998,7 +993,7 @@ mod tests { #[test] fn test_in_memory_state_impl_state_by_number() { - let mut state_by_hash = HashMap::new(); + let mut state_by_hash = HashMap::default(); let mut hash_by_number = BTreeMap::new(); let number = rand::thread_rng().gen::(); @@ -1017,7 +1012,7 @@ mod tests { #[test] fn test_in_memory_state_impl_head_state() { - let mut state_by_hash = HashMap::new(); + let mut state_by_hash = HashMap::default(); let mut hash_by_number = BTreeMap::new(); let mut test_block_builder = TestBlockBuilder::default(); let state1 = Arc::new(create_mock_state(&mut test_block_builder, 1, B256::random())); @@ -1045,7 +1040,7 @@ mod tests { let pending_hash = pending_state.hash(); let in_memory_state = - InMemoryState::new(HashMap::new(), BTreeMap::new(), Some(pending_state)); + InMemoryState::new(HashMap::default(), BTreeMap::new(), Some(pending_state)); let result = in_memory_state.pending_state(); assert!(result.is_some()); @@ -1056,7 +1051,7 @@ mod tests { #[test] fn test_in_memory_state_impl_no_pending_state() { - let in_memory_state = InMemoryState::new(HashMap::new(), BTreeMap::new(), None); + let in_memory_state = InMemoryState::new(HashMap::default(), BTreeMap::new(), None); assert_eq!(in_memory_state.pending_state(), None); } @@ -1210,7 +1205,7 @@ mod tests { let state2 = BlockState::with_parent(block2.clone(), Some(state1.clone())); let state3 = BlockState::with_parent(block3.clone(), Some(state2.clone())); - let mut blocks = HashMap::new(); + let mut blocks = HashMap::default(); blocks.insert(block1.block().hash(), Arc::new(state1)); blocks.insert(block2.block().hash(), Arc::new(state2)); blocks.insert(block3.block().hash(), Arc::new(state3)); @@ -1257,7 +1252,7 @@ mod tests { fn test_canonical_in_memory_state_canonical_chain_single_block() { let block = TestBlockBuilder::default().get_executed_block_with_number(1, B256::random()); let hash = block.block().hash(); - let mut blocks = HashMap::new(); + let mut blocks = HashMap::default(); blocks.insert(hash, Arc::new(BlockState::new(block))); let mut numbers = BTreeMap::new(); numbers.insert(1, hash); @@ -1272,7 +1267,7 @@ mod tests { #[test] fn test_canonical_in_memory_state_canonical_chain_multiple_blocks() { - let mut blocks = HashMap::new(); + let mut blocks = HashMap::default(); let mut numbers = BTreeMap::new(); let mut parent_hash = B256::random(); let mut block_builder = TestBlockBuilder::default(); @@ -1296,7 +1291,7 @@ mod tests { #[test] fn test_canonical_in_memory_state_canonical_chain_with_pending_block() { - let mut blocks = HashMap::new(); + let mut blocks = HashMap::default(); let mut numbers = BTreeMap::new(); let mut parent_hash = B256::random(); let mut block_builder = TestBlockBuilder::default(); diff --git a/crates/chain-state/src/memory_overlay.rs b/crates/chain-state/src/memory_overlay.rs index 35315fb521580..2712d1259e85b 100644 --- a/crates/chain-state/src/memory_overlay.rs +++ b/crates/chain-state/src/memory_overlay.rs @@ -1,5 +1,9 @@ use super::ExecutedBlock; -use alloy_primitives::{keccak256, Address, BlockNumber, Bytes, StorageKey, StorageValue, B256}; +use alloy_primitives::{ + keccak256, + map::{HashMap, HashSet}, + Address, BlockNumber, Bytes, StorageKey, StorageValue, B256, +}; use reth_errors::ProviderResult; use reth_primitives::{Account, Bytecode}; use reth_storage_api::{ @@ -9,10 +13,7 @@ use reth_storage_api::{ use reth_trie::{ updates::TrieUpdates, AccountProof, HashedPostState, HashedStorage, MultiProof, TrieInput, }; -use std::{ - collections::{HashMap, HashSet}, - sync::OnceLock, -}; +use std::sync::OnceLock; /// A state provider that stores references to in-memory blocks along with their state as well as /// the historical state provider for fallback lookups. diff --git a/crates/cli/commands/src/db/diff.rs b/crates/cli/commands/src/db/diff.rs index 0b7b7790732ab..c1346b3742a30 100644 --- a/crates/cli/commands/src/db/diff.rs +++ b/crates/cli/commands/src/db/diff.rs @@ -267,7 +267,7 @@ where T::Key: Hash, { fn default() -> Self { - Self { discrepancies: HashMap::new(), extra_elements: HashMap::new() } + Self { discrepancies: HashMap::default(), extra_elements: HashMap::default() } } } diff --git a/crates/engine/invalid-block-hooks/src/witness.rs b/crates/engine/invalid-block-hooks/src/witness.rs index 4b15213191db3..06bfee747cc0d 100644 --- a/crates/engine/invalid-block-hooks/src/witness.rs +++ b/crates/engine/invalid-block-hooks/src/witness.rs @@ -133,7 +133,7 @@ where let bundle_state = db.take_bundle(); // Initialize a map of preimages. - let mut state_preimages = HashMap::new(); + let mut state_preimages = HashMap::default(); // Grab all account proofs for the data accessed during block execution. // @@ -170,7 +170,10 @@ where let state = state_provider.witness(Default::default(), hashed_state.clone())?; // Write the witness to the output directory. - let response = ExecutionWitness { state, keys: Some(state_preimages) }; + let response = ExecutionWitness { + state: std::collections::HashMap::from_iter(state), + keys: Some(state_preimages), + }; let re_executed_witness_path = self.save_file( format!("{}_{}.witness.re_executed.json", block.number, block.hash()), &response, diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 30c72254552db..4ffd2031f60be 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -5,7 +5,10 @@ use crate::{ persistence::PersistenceHandle, }; use alloy_eips::BlockNumHash; -use alloy_primitives::{BlockNumber, B256, U256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + BlockNumber, B256, U256, +}; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ForkchoiceState, PayloadStatus, PayloadStatusEnum, PayloadValidationError, @@ -43,7 +46,7 @@ use reth_trie::{updates::TrieUpdates, HashedPostState, TrieInput}; use reth_trie_parallel::parallel_root::{ParallelStateRoot, ParallelStateRootError}; use std::{ cmp::Ordering, - collections::{btree_map, hash_map, BTreeMap, HashMap, HashSet, VecDeque}, + collections::{btree_map, hash_map, BTreeMap, VecDeque}, fmt::Debug, ops::Bound, sync::{ @@ -101,11 +104,11 @@ impl TreeState { /// Returns a new, empty tree state that points to the given canonical head. fn new(current_canonical_head: BlockNumHash) -> Self { Self { - blocks_by_hash: HashMap::new(), + blocks_by_hash: HashMap::default(), blocks_by_number: BTreeMap::new(), current_canonical_head, - parent_to_child: HashMap::new(), - persisted_trie_updates: HashMap::new(), + parent_to_child: HashMap::default(), + persisted_trie_updates: HashMap::default(), } } @@ -2697,12 +2700,11 @@ mod tests { } fn with_blocks(mut self, blocks: Vec) -> Self { - let mut blocks_by_hash = HashMap::with_capacity(blocks.len()); + let mut blocks_by_hash = HashMap::default(); let mut blocks_by_number = BTreeMap::new(); - let mut state_by_hash = HashMap::with_capacity(blocks.len()); + let mut state_by_hash = HashMap::default(); let mut hash_by_number = BTreeMap::new(); - let mut parent_to_child: HashMap> = - HashMap::with_capacity(blocks.len()); + let mut parent_to_child: HashMap> = HashMap::default(); let mut parent_hash = B256::ZERO; for block in &blocks { @@ -3158,7 +3160,7 @@ mod tests { assert_eq!( tree_state.parent_to_child.get(&blocks[0].block.hash()), - Some(&HashSet::from([blocks[1].block.hash()])) + Some(&HashSet::from_iter([blocks[1].block.hash()])) ); assert!(!tree_state.parent_to_child.contains_key(&blocks[1].block.hash())); @@ -3167,7 +3169,7 @@ mod tests { assert_eq!( tree_state.parent_to_child.get(&blocks[1].block.hash()), - Some(&HashSet::from([blocks[2].block.hash()])) + Some(&HashSet::from_iter([blocks[2].block.hash()])) ); assert!(tree_state.parent_to_child.contains_key(&blocks[1].block.hash())); @@ -3255,11 +3257,11 @@ mod tests { assert_eq!( tree_state.parent_to_child.get(&blocks[2].block.hash()), - Some(&HashSet::from([blocks[3].block.hash()])) + Some(&HashSet::from_iter([blocks[3].block.hash()])) ); assert_eq!( tree_state.parent_to_child.get(&blocks[3].block.hash()), - Some(&HashSet::from([blocks[4].block.hash()])) + Some(&HashSet::from_iter([blocks[4].block.hash()])) ); } @@ -3305,11 +3307,11 @@ mod tests { assert_eq!( tree_state.parent_to_child.get(&blocks[2].block.hash()), - Some(&HashSet::from([blocks[3].block.hash()])) + Some(&HashSet::from_iter([blocks[3].block.hash()])) ); assert_eq!( tree_state.parent_to_child.get(&blocks[3].block.hash()), - Some(&HashSet::from([blocks[4].block.hash()])) + Some(&HashSet::from_iter([blocks[4].block.hash()])) ); } @@ -3355,11 +3357,11 @@ mod tests { assert_eq!( tree_state.parent_to_child.get(&blocks[2].block.hash()), - Some(&HashSet::from([blocks[3].block.hash()])) + Some(&HashSet::from_iter([blocks[3].block.hash()])) ); assert_eq!( tree_state.parent_to_child.get(&blocks[3].block.hash()), - Some(&HashSet::from([blocks[4].block.hash()])) + Some(&HashSet::from_iter([blocks[4].block.hash()])) ); } @@ -3546,7 +3548,7 @@ mod tests { let event = test_harness.from_tree_rx.recv().await.unwrap(); match event { EngineApiEvent::Download(DownloadRequest::BlockSet(actual_block_set)) => { - let expected_block_set = HashSet::from([missing_block.hash()]); + let expected_block_set = HashSet::from_iter([missing_block.hash()]); assert_eq!(actual_block_set, expected_block_set); } _ => panic!("Unexpected event: {:#?}", event), @@ -3641,7 +3643,7 @@ mod tests { let event = test_harness.from_tree_rx.recv().await.unwrap(); match event { EngineApiEvent::Download(DownloadRequest::BlockSet(hash_set)) => { - assert_eq!(hash_set, HashSet::from([main_chain_last_hash])); + assert_eq!(hash_set, HashSet::from_iter([main_chain_last_hash])); } _ => panic!("Unexpected event: {:#?}", event), } @@ -3704,7 +3706,7 @@ mod tests { let event = test_harness.from_tree_rx.recv().await.unwrap(); match event { EngineApiEvent::Download(DownloadRequest::BlockSet(hash_set)) => { - assert_eq!(hash_set, HashSet::from([main_chain_backfill_target_hash])); + assert_eq!(hash_set, HashSet::from_iter([main_chain_backfill_target_hash])); } _ => panic!("Unexpected event: {:#?}", event), } @@ -3749,7 +3751,7 @@ mod tests { let event = test_harness.from_tree_rx.recv().await.unwrap(); match event { EngineApiEvent::Download(DownloadRequest::BlockSet(target_hash)) => { - assert_eq!(target_hash, HashSet::from([main_chain_last_hash])); + assert_eq!(target_hash, HashSet::from_iter([main_chain_last_hash])); } _ => panic!("Unexpected event: {:#?}", event), } diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 7af3f2e1d4812..67a44e0a3570b 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -505,7 +505,7 @@ mod tests { BEACON_ROOTS_ADDRESS, beacon_root_contract_account, Some(BEACON_ROOTS_CODE.clone()), - HashMap::new(), + HashMap::default(), ); db @@ -524,7 +524,7 @@ mod tests { WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, withdrawal_requests_contract_account, Some(WITHDRAWAL_REQUEST_PREDEPLOY_CODE.clone()), - HashMap::new(), + HashMap::default(), ); db @@ -685,7 +685,7 @@ mod tests { let mut db = create_state_provider_with_beacon_root_contract(); // insert an empty SYSTEM_ADDRESS - db.insert_account(SYSTEM_ADDRESS, Account::default(), None, HashMap::new()); + db.insert_account(SYSTEM_ADDRESS, Account::default(), None, HashMap::default()); let chain_spec = Arc::new( ChainSpecBuilder::from(&*MAINNET) @@ -883,7 +883,7 @@ mod tests { HISTORY_STORAGE_ADDRESS, blockhashes_contract_account, Some(HISTORY_STORAGE_CODE.clone()), - HashMap::new(), + HashMap::default(), ); db @@ -1230,7 +1230,7 @@ mod tests { sender_address, Account { nonce: 1, balance: U256::from(ETH_TO_WEI), bytecode_hash: None }, None, - HashMap::new(), + HashMap::default(), ); // https://github.com/lightclient/7002asm/blob/e0d68e04d15f25057af7b6d180423d94b6b3bdb3/test/Contract.t.sol.in#L49-L64 @@ -1313,7 +1313,7 @@ mod tests { sender_address, Account { nonce: 1, balance: U256::from(ETH_TO_WEI), bytecode_hash: None }, None, - HashMap::new(), + HashMap::default(), ); // Define the validator public key and withdrawal amount as fixed bytes diff --git a/crates/evm/execution-types/src/execution_outcome.rs b/crates/evm/execution-types/src/execution_outcome.rs index 3b21f36dd13cd..08ddf9e4167be 100644 --- a/crates/evm/execution-types/src/execution_outcome.rs +++ b/crates/evm/execution-types/src/execution_outcome.rs @@ -436,16 +436,16 @@ mod tests { ); // Create a BundleStateInit object and insert initial data - let mut state_init: BundleStateInit = HashMap::new(); + let mut state_init: BundleStateInit = HashMap::default(); state_init .insert(Address::new([2; 20]), (None, Some(Account::default()), HashMap::default())); // Create a HashMap for account reverts and insert initial data - let mut revert_inner: HashMap = HashMap::new(); + let mut revert_inner: HashMap = HashMap::default(); revert_inner.insert(Address::new([2; 20]), (None, vec![])); // Create a RevertsInit object and insert the revert_inner data - let mut revert_init: RevertsInit = HashMap::new(); + let mut revert_init: RevertsInit = HashMap::default(); revert_init.insert(123, revert_inner); // Assert that creating a new ExecutionOutcome using the new_init method matches diff --git a/crates/net/discv5/src/config.rs b/crates/net/discv5/src/config.rs index 4713fb8225bde..0684c263b8c71 100644 --- a/crates/net/discv5/src/config.rs +++ b/crates/net/discv5/src/config.rs @@ -297,7 +297,7 @@ impl Config { pub fn builder(rlpx_tcp_socket: SocketAddr) -> ConfigBuilder { ConfigBuilder { discv5_config: None, - bootstrap_nodes: HashSet::new(), + bootstrap_nodes: HashSet::default(), fork: None, tcp_socket: rlpx_tcp_socket, other_enr_kv_pairs: Vec::new(), diff --git a/crates/net/downloaders/src/file_client.rs b/crates/net/downloaders/src/file_client.rs index d618de5d5af64..6ac2058e4ec4e 100644 --- a/crates/net/downloaders/src/file_client.rs +++ b/crates/net/downloaders/src/file_client.rs @@ -196,9 +196,9 @@ impl FromReader for FileClient { where B: AsyncReadExt + Unpin, { - let mut headers = HashMap::new(); - let mut hash_to_number = HashMap::new(); - let mut bodies = HashMap::new(); + let mut headers = HashMap::default(); + let mut hash_to_number = HashMap::default(); + let mut bodies = HashMap::default(); // use with_capacity to make sure the internal buffer contains the entire chunk let mut stream = FramedRead::with_capacity(reader, BlockFileCodec, num_bytes as usize); diff --git a/crates/net/eth-wire-types/src/broadcast.rs b/crates/net/eth-wire-types/src/broadcast.rs index 9afd244ad2b5a..2ef6083a5001c 100644 --- a/crates/net/eth-wire-types/src/broadcast.rs +++ b/crates/net/eth-wire-types/src/broadcast.rs @@ -618,13 +618,13 @@ impl PartiallyValidData { /// Returns a new [`PartiallyValidData`] with empty data from an [`Eth68`](EthVersion::Eth68) /// announcement. pub fn empty_eth68() -> Self { - Self::from_raw_data_eth68(HashMap::new()) + Self::from_raw_data_eth68(HashMap::default()) } /// Returns a new [`PartiallyValidData`] with empty data from an [`Eth66`](EthVersion::Eth66) /// announcement. pub fn empty_eth66() -> Self { - Self::from_raw_data_eth66(HashMap::new()) + Self::from_raw_data_eth66(HashMap::default()) } /// Returns the version of the message this data was received in if different versions of the @@ -704,7 +704,7 @@ impl RequestTxHashes { /// Returns an new empty instance. fn empty() -> Self { - Self::new(HashSet::new()) + Self::new(HashSet::default()) } /// Retains the given number of elements, returning and iterator over the rest. diff --git a/crates/net/eth-wire/src/capability.rs b/crates/net/eth-wire/src/capability.rs index 2a2f43d346ce7..d60e500744c0b 100644 --- a/crates/net/eth-wire/src/capability.rs +++ b/crates/net/eth-wire/src/capability.rs @@ -291,7 +291,7 @@ pub fn shared_capability_offsets( local_protocols.into_iter().map(Protocol::split).collect::>(); // map of capability name to version - let mut shared_capabilities: HashMap<_, ProtoVersion> = HashMap::new(); + let mut shared_capabilities: HashMap<_, ProtoVersion> = HashMap::default(); // The `Ord` implementation for capability names should be equivalent to geth (and every other // client), since geth uses golang's default string comparison, which orders strings diff --git a/crates/net/network/src/peers.rs b/crates/net/network/src/peers.rs index bc2b1189774b5..19b7910f6abe0 100644 --- a/crates/net/network/src/peers.rs +++ b/crates/net/network/src/peers.rs @@ -1090,7 +1090,6 @@ impl Display for InboundConnectionError { #[cfg(test)] mod tests { use std::{ - collections::HashSet, future::{poll_fn, Future}, io, net::{IpAddr, Ipv4Addr, SocketAddr}, @@ -1968,7 +1967,7 @@ mod tests { async fn test_discovery_ban_list() { let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 1, 2)); let socket_addr = SocketAddr::new(ip, 8008); - let ban_list = BanList::new(HashSet::new(), vec![ip]); + let ban_list = BanList::new(vec![], vec![ip]); let config = PeersConfig::default().with_ban_list(ban_list); let mut peer_manager = PeersManager::new(config); peer_manager.add_peer(B512::default(), PeerAddr::from_tcp(socket_addr), None); @@ -1980,7 +1979,7 @@ mod tests { async fn test_on_pending_ban_list() { let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 1, 2)); let socket_addr = SocketAddr::new(ip, 8008); - let ban_list = BanList::new(HashSet::new(), vec![ip]); + let ban_list = BanList::new(vec![], vec![ip]); let config = PeersConfig::test().with_ban_list(ban_list); let mut peer_manager = PeersManager::new(config); let a = peer_manager.on_incoming_pending_session(socket_addr.ip()); @@ -2001,7 +2000,7 @@ mod tests { let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 1, 2)); let socket_addr = SocketAddr::new(ip, 8008); let given_peer_id = PeerId::random(); - let ban_list = BanList::new(vec![given_peer_id], HashSet::new()); + let ban_list = BanList::new(vec![given_peer_id], vec![]); let config = PeersConfig::test().with_ban_list(ban_list); let mut peer_manager = PeersManager::new(config); assert!(peer_manager.on_incoming_pending_session(socket_addr.ip()).is_ok()); diff --git a/crates/net/network/src/transactions/fetcher.rs b/crates/net/network/src/transactions/fetcher.rs index 45dfb3319d60f..e15972df08b28 100644 --- a/crates/net/network/src/transactions/fetcher.rs +++ b/crates/net/network/src/transactions/fetcher.rs @@ -1459,7 +1459,7 @@ mod test { for hash in &seen_hashes { peer_2_data.seen_transactions.insert(*hash); } - let mut peers = HashMap::new(); + let mut peers = HashMap::default(); peers.insert(peer_1, peer_1_data); peers.insert(peer_2, peer_2_data); diff --git a/crates/net/network/src/transactions/validation.rs b/crates/net/network/src/transactions/validation.rs index f7f7b8011bc15..4038f23e85ce8 100644 --- a/crates/net/network/src/transactions/validation.rs +++ b/crates/net/network/src/transactions/validation.rs @@ -386,7 +386,7 @@ mod test { assert_eq!(outcome, FilterOutcome::ReportPeer); - let mut expected_data = HashMap::new(); + let mut expected_data = HashMap::default(); expected_data.insert(hashes[1], Some((types[1], sizes[1]))); assert_eq!(expected_data, valid_data.into_data()) @@ -426,7 +426,7 @@ mod test { assert_eq!(outcome, FilterOutcome::Ok); - let mut expected_data = HashMap::new(); + let mut expected_data = HashMap::default(); expected_data.insert(hashes[2], Some((types[2], sizes[2]))); assert_eq!(expected_data, valid_data.into_data()) @@ -465,7 +465,7 @@ mod test { assert_eq!(outcome, FilterOutcome::ReportPeer); - let mut expected_data = HashMap::new(); + let mut expected_data = HashMap::default(); expected_data.insert(hashes[3], Some((types[3], sizes[3]))); expected_data.insert(hashes[0], Some((types[0], sizes[0]))); @@ -509,7 +509,7 @@ mod test { assert_eq!(outcome, FilterOutcome::ReportPeer); - let mut expected_data = HashMap::new(); + let mut expected_data = HashMap::default(); expected_data.insert(hashes[1], None); expected_data.insert(hashes[0], None); diff --git a/crates/net/network/tests/it/connect.rs b/crates/net/network/tests/it/connect.rs index 5d91049b92afa..ec891e5b39a9a 100644 --- a/crates/net/network/tests/it/connect.rs +++ b/crates/net/network/tests/it/connect.rs @@ -1,8 +1,9 @@ //! Connection tests -use std::{collections::HashSet, net::SocketAddr, time::Duration}; +use std::{net::SocketAddr, time::Duration}; use alloy_node_bindings::Geth; +use alloy_primitives::map::HashSet; use alloy_provider::{ext::AdminApi, ProviderBuilder}; use futures::StreamExt; use reth_chainspec::MAINNET; @@ -328,7 +329,7 @@ async fn test_incoming_node_id_blacklist() { let enr = provider.node_info().await.unwrap().enr; let geth_peer_id = enr_to_peer_id(enr.parse().unwrap()); - let ban_list = BanList::new(vec![geth_peer_id], HashSet::new()); + let ban_list = BanList::new(vec![geth_peer_id], vec![]); let peer_config = PeersConfig::default().with_ban_list(ban_list); let config = NetworkConfigBuilder::new(secret_key) diff --git a/crates/net/p2p/src/full_block.rs b/crates/net/p2p/src/full_block.rs index 609df9b6c826d..ce6f58fcd98bc 100644 --- a/crates/net/p2p/src/full_block.rs +++ b/crates/net/p2p/src/full_block.rs @@ -96,7 +96,7 @@ where client, headers: None, pending_headers: VecDeque::new(), - bodies: HashMap::new(), + bodies: HashMap::default(), consensus: Arc::clone(&self.consensus), } } diff --git a/crates/net/p2p/src/test_utils/full_block.rs b/crates/net/p2p/src/test_utils/full_block.rs index 1372ccc2c142c..acc01a60ef804 100644 --- a/crates/net/p2p/src/test_utils/full_block.rs +++ b/crates/net/p2p/src/test_utils/full_block.rs @@ -106,8 +106,8 @@ pub struct TestFullBlockClient { impl Default for TestFullBlockClient { fn default() -> Self { Self { - headers: Arc::new(Mutex::new(HashMap::new())), - bodies: Arc::new(Mutex::new(HashMap::new())), + headers: Arc::new(Mutex::new(HashMap::default())), + bodies: Arc::new(Mutex::new(HashMap::default())), soft_limit: 20, } } diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index d502abe6abf60..e855b38fbdcfb 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -482,7 +482,7 @@ mod tests { let l1_block_contract_account = Account { balance: U256::ZERO, bytecode_hash: None, nonce: 1 }; - let mut l1_block_storage = HashMap::with_capacity(4); + let mut l1_block_storage = HashMap::default(); // base fee l1_block_storage.insert(StorageKey::with_last_byte(1), StorageValue::from(1000000000)); // l1 fee overhead @@ -529,7 +529,7 @@ mod tests { let addr = Address::ZERO; let account = Account { balance: U256::MAX, ..Account::default() }; - db.insert_account(addr, account, None, HashMap::new()); + db.insert_account(addr, account, None, HashMap::default()); let chain_spec = Arc::new( ChainSpecBuilder::from(&Arc::new(BASE_MAINNET.inner.clone())) @@ -613,7 +613,7 @@ mod tests { let addr = Address::ZERO; let account = Account { balance: U256::MAX, ..Account::default() }; - db.insert_account(addr, account, None, HashMap::new()); + db.insert_account(addr, account, None, HashMap::default()); let chain_spec = Arc::new( ChainSpecBuilder::from(&Arc::new(BASE_MAINNET.inner.clone())) diff --git a/crates/optimism/evm/src/l1.rs b/crates/optimism/evm/src/l1.rs index 081e21d803377..717bf833f9667 100644 --- a/crates/optimism/evm/src/l1.rs +++ b/crates/optimism/evm/src/l1.rs @@ -289,7 +289,7 @@ where revm_acc.mark_touch(); // Commit the create2 deployer account to the database. - db.commit(HashMap::from([(CREATE_2_DEPLOYER_ADDR, revm_acc)])); + db.commit(HashMap::from_iter([(CREATE_2_DEPLOYER_ADDR, revm_acc)])); return Ok(()) } diff --git a/crates/payload/builder/src/database.rs b/crates/payload/builder/src/database.rs index 6abaed3424087..ea1ae08543487 100644 --- a/crates/payload/builder/src/database.rs +++ b/crates/payload/builder/src/database.rs @@ -158,6 +158,6 @@ struct CachedAccount { impl CachedAccount { fn new(info: Option) -> Self { - Self { info, storage: HashMap::new() } + Self { info, storage: HashMap::default() } } } diff --git a/crates/primitives-traits/src/withdrawal.rs b/crates/primitives-traits/src/withdrawal.rs index 995e60292c6e1..f6b0607e7f032 100644 --- a/crates/primitives-traits/src/withdrawal.rs +++ b/crates/primitives-traits/src/withdrawal.rs @@ -133,7 +133,7 @@ mod tests { // #[test] fn test_withdrawal_serde_roundtrip() { - let input = r#"[{"index":"0x0","validatorIndex":"0x0","address":"0x0000000000000000000000000000000000001000","amount":"0x1"},{"index":"0x1","validatorIndex":"0x1","address":"0x0000000000000000000000000000000000001001","amount":"0x1"},{"index":"0x2","validatorIndex":"0x2","address":"0x0000000000000000000000000000000000001002","amount":"0x1"},{"index":"0x3","validatorIndex":"0x3","address":"0x0000000000000000000000000000000000001003","amount":"0x1"},{"index":"0x4","validatorIndex":"0x4","address":"0x0000000000000000000000000000000000001004","amount":"0x1"},{"index":"0x5","validatorIndex":"0x5","address":"0x0000000000000000000000000000000000001005","amount":"0x1"},{"index":"0x6","validatorIndex":"0x6","address":"0x0000000000000000000000000000000000001006","amount":"0x1"},{"index":"0x7","validatorIndex":"0x7","address":"0x0000000000000000000000000000000000001007","amount":"0x1"},{"index":"0x8","validatorIndex":"0x8","address":"0x0000000000000000000000000000000000001008","amount":"0x1"},{"index":"0x9","validatorIndex":"0x9","address":"0x0000000000000000000000000000000000001009","amount":"0x1"},{"index":"0xa","validatorIndex":"0xa","address":"0x000000000000000000000000000000000000100a","amount":"0x1"},{"index":"0xb","validatorIndex":"0xb","address":"0x000000000000000000000000000000000000100b","amount":"0x1"},{"index":"0xc","validatorIndex":"0xc","address":"0x000000000000000000000000000000000000100c","amount":"0x1"},{"index":"0xd","validatorIndex":"0xd","address":"0x000000000000000000000000000000000000100d","amount":"0x1"},{"index":"0xe","validatorIndex":"0xe","address":"0x000000000000000000000000000000000000100e","amount":"0x1"},{"index":"0xf","validatorIndex":"0xf","address":"0x000000000000000000000000000000000000100f","amount":"0x1"}]"#; + let input = r#"[{"index":"0x0","validatorIndex":"0x0","address":"0x0000000000000000000000000000000000001000","amount":"0x1"},{"index":"0x1","validatorIndex":"0x1","address":"0x0000000000000000000000000000000000001001","amount":"0x1"},{"index":"0x2","validatorIndex":"0x2","address":"0x0000000000000000000000000000000000001002","amount":"0x1"},{"index":"0x3","validatorIndex":"0x3","address":"0x0000000000000000000000000000000000001003","amount":"0x1"},{"index":"0x4","validatorIndex":"0x4","address":"0x0000000000000000000000000000000000001004","amount":"0x1"},{"index":"0x5","validatorIndex":"0x5","address":"0x0000000000000000000000000000000000001005","amount":"0x1"},{"index":"0x6","validatorIndex":"0x6","address":"0x0000000000000000000000000000000000001006","amount":"0x1"},{"index":"0x7","validatorIndex":"0x7","address":"0x0000000000000000000000000000000000001007","amount":"0x1"},{"index":"0x8","validatorIndex":"0x8","address":"0x0000000000000000000000000000000000001008","amount":"0x1"},{"index":"0x9","validatorIndex":"0x9","address":"0x0000000000000000000000000000000000001009","amount":"0x1"},{"index":"0xa","validatorIndex":"0xa","address":"0x000000000000000000000000000000000000100A","amount":"0x1"},{"index":"0xb","validatorIndex":"0xb","address":"0x000000000000000000000000000000000000100b","amount":"0x1"},{"index":"0xc","validatorIndex":"0xc","address":"0x000000000000000000000000000000000000100C","amount":"0x1"},{"index":"0xd","validatorIndex":"0xd","address":"0x000000000000000000000000000000000000100D","amount":"0x1"},{"index":"0xe","validatorIndex":"0xe","address":"0x000000000000000000000000000000000000100e","amount":"0x1"},{"index":"0xf","validatorIndex":"0xf","address":"0x000000000000000000000000000000000000100f","amount":"0x1"}]"#; let withdrawals: Vec = serde_json::from_str(input).unwrap(); let s = serde_json::to_string(&withdrawals).unwrap(); diff --git a/crates/revm/src/batch.rs b/crates/revm/src/batch.rs index 7c8e0f29b5d71..a63681aa1321c 100644 --- a/crates/revm/src/batch.rs +++ b/crates/revm/src/batch.rs @@ -1,10 +1,7 @@ //! Helper for handling execution of multiple blocks. -use crate::{ - precompile::{Address, HashSet}, - primitives::alloy_primitives::BlockNumber, -}; use alloc::vec::Vec; +use alloy_primitives::{map::HashSet, Address, BlockNumber}; use reth_execution_errors::{BlockExecutionError, InternalBlockExecutionError}; use reth_primitives::{Receipt, Receipts, Request, Requests}; use reth_prune_types::{PruneMode, PruneModes, PruneSegmentError, MINIMUM_PRUNING_DISTANCE}; @@ -152,7 +149,7 @@ impl BlockBatchRecord { if !contract_log_pruner.is_empty() { let (prev_block, filter) = - self.pruning_address_filter.get_or_insert_with(|| (0, HashSet::new())); + self.pruning_address_filter.get_or_insert_with(|| (0, Default::default())); for (_, addresses) in contract_log_pruner.range(*prev_block..=block_number) { filter.extend(addresses.iter().copied()); } diff --git a/crates/revm/src/state_change.rs b/crates/revm/src/state_change.rs index 8799b8c70eb24..bd967a23d31db 100644 --- a/crates/revm/src/state_change.rs +++ b/crates/revm/src/state_change.rs @@ -1,5 +1,4 @@ -use crate::precompile::HashMap; -use alloy_primitives::Address; +use alloy_primitives::{map::HashMap, Address}; use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_consensus_common::calc; use reth_primitives::{Block, Withdrawal, Withdrawals, U256}; @@ -14,7 +13,7 @@ pub fn post_block_balance_increments( block: &Block, total_difficulty: U256, ) -> HashMap { - let mut balance_increments = HashMap::new(); + let mut balance_increments = HashMap::default(); // Add block rewards if they are enabled. if let Some(base_block_reward) = @@ -52,7 +51,8 @@ pub fn post_block_withdrawals_balance_increments( block_timestamp: u64, withdrawals: &[Withdrawal], ) -> HashMap { - let mut balance_increments = HashMap::with_capacity(withdrawals.len()); + let mut balance_increments = + HashMap::with_capacity_and_hasher(withdrawals.len(), Default::default()); insert_post_block_withdrawals_balance_increments( chain_spec, block_timestamp, @@ -124,7 +124,7 @@ mod tests { ]; // Create an empty HashMap to hold the balance increments - let mut balance_increments = HashMap::new(); + let mut balance_increments = HashMap::default(); // Act // Call the function with the prepared inputs @@ -168,7 +168,7 @@ mod tests { let withdrawals = Vec::::new(); // Create an empty HashMap to hold the balance increments - let mut balance_increments = HashMap::new(); + let mut balance_increments = HashMap::default(); // Act // Call the function with the prepared inputs @@ -211,7 +211,7 @@ mod tests { ]; // Create an empty HashMap to hold the balance increments - let mut balance_increments = HashMap::new(); + let mut balance_increments = HashMap::default(); // Act // Call the function with the prepared inputs @@ -260,7 +260,7 @@ mod tests { ]; // Create an empty HashMap to hold the balance increments - let mut balance_increments = HashMap::new(); + let mut balance_increments = HashMap::default(); // Act // Call the function with the prepared inputs @@ -296,7 +296,7 @@ mod tests { let withdrawals = None; // No withdrawals provided // Create an empty HashMap to hold the balance increments - let mut balance_increments = HashMap::new(); + let mut balance_increments = HashMap::default(); // Act // Call the function with the prepared inputs diff --git a/crates/revm/src/test_utils.rs b/crates/revm/src/test_utils.rs index bbdc196cd5282..bbb45687353bc 100644 --- a/crates/revm/src/test_utils.rs +++ b/crates/revm/src/test_utils.rs @@ -1,8 +1,9 @@ -use std::collections::HashSet; - -use crate::precompile::HashMap; use alloc::vec::Vec; -use alloy_primitives::{keccak256, Address, BlockNumber, StorageKey}; +use alloy_primitives::{ + keccak256, + map::{HashMap, HashSet}, + Address, BlockNumber, StorageKey, +}; use reth_primitives::{Account, Bytecode, Bytes, B256, U256}; use reth_storage_api::{ AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, diff --git a/crates/rpc/rpc-eth-types/src/cache/db.rs b/crates/rpc/rpc-eth-types/src/cache/db.rs index 10ab9a5fe8bc9..ad9804893a706 100644 --- a/crates/rpc/rpc-eth-types/src/cache/db.rs +++ b/crates/rpc/rpc-eth-types/src/cache/db.rs @@ -2,7 +2,10 @@ //! in default implementation of //! `reth_rpc_eth_api::helpers::Call`. -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, B256, U256, +}; use reth_errors::ProviderResult; use reth_revm::{database::StateProviderDatabase, db::CacheDB, DatabaseRef}; use reth_storage_api::StateProvider; @@ -70,7 +73,7 @@ impl<'a> reth_storage_api::StateProofProvider for StateProviderTraitObjWrapper<' fn multiproof( &self, input: reth_trie::TrieInput, - targets: std::collections::HashMap>, + targets: HashMap>, ) -> ProviderResult { self.0.multiproof(input, targets) } @@ -79,7 +82,8 @@ impl<'a> reth_storage_api::StateProofProvider for StateProviderTraitObjWrapper<' &self, input: reth_trie::TrieInput, target: reth_trie::HashedPostState, - ) -> reth_errors::ProviderResult> { + ) -> reth_errors::ProviderResult> + { self.0.witness(input, target) } } diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 9e8ff892ccd86..9adddbacdcc98 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -621,7 +621,7 @@ where let block_executor = this.inner.block_executor.executor(db); let mut hashed_state = HashedPostState::default(); - let mut keys = HashMap::new(); + let mut keys = HashMap::default(); let _ = block_executor .execute_with_state_witness( (&block.clone().unseal(), block.difficulty).into(), @@ -666,7 +666,10 @@ where let state = state_provider.witness(Default::default(), hashed_state).map_err(Into::into)?; - Ok(ExecutionWitness { state, keys: include_preimages.then_some(keys) }) + Ok(ExecutionWitness { + state: std::collections::HashMap::from_iter(state.into_iter()), + keys: include_preimages.then_some(keys), + }) }) .await } diff --git a/crates/rpc/rpc/src/eth/filter.rs b/crates/rpc/rpc/src/eth/filter.rs index 23bf0b58fc559..9c6562606b746 100644 --- a/crates/rpc/rpc/src/eth/filter.rs +++ b/crates/rpc/rpc/src/eth/filter.rs @@ -551,7 +551,7 @@ pub struct ActiveFilters { impl ActiveFilters { /// Returns an empty instance. pub fn new() -> Self { - Self { inner: Arc::new(Mutex::new(HashMap::new())) } + Self { inner: Arc::new(Mutex::new(HashMap::default())) } } } diff --git a/crates/rpc/rpc/src/reth.rs b/crates/rpc/rpc/src/reth.rs index 3e8e842aeda35..6d5897df1315f 100644 --- a/crates/rpc/rpc/src/reth.rs +++ b/crates/rpc/rpc/src/reth.rs @@ -71,7 +71,7 @@ where let state = self.provider().state_by_block_id(block_id)?; let accounts_before = self.provider().account_block_changeset(block_number)?; let hash_map = accounts_before.iter().try_fold( - HashMap::new(), + HashMap::default(), |mut hash_map, account_before| -> RethResult<_> { let current_balance = state.account_balance(account_before.address)?; let prev_balance = account_before.info.map(|info| info.balance); diff --git a/crates/stages/api/src/pipeline/set.rs b/crates/stages/api/src/pipeline/set.rs index d3a9b17893d74..c8fbf4c71d8eb 100644 --- a/crates/stages/api/src/pipeline/set.rs +++ b/crates/stages/api/src/pipeline/set.rs @@ -51,7 +51,7 @@ pub struct StageSetBuilder { impl Default for StageSetBuilder { fn default() -> Self { - Self { stages: HashMap::new(), order: Vec::new() } + Self { stages: HashMap::default(), order: Vec::new() } } } diff --git a/crates/stages/stages/src/stages/utils.rs b/crates/stages/stages/src/stages/utils.rs index c16886e0bdca2..7cdab4ff24489 100644 --- a/crates/stages/stages/src/stages/utils.rs +++ b/crates/stages/stages/src/stages/utils.rs @@ -51,7 +51,7 @@ where let mut changeset_cursor = provider.tx_ref().cursor_read::()?; let mut collector = Collector::new(etl_config.file_size, etl_config.dir.clone()); - let mut cache: HashMap> = HashMap::new(); + let mut cache: HashMap> = HashMap::default(); let mut collect = |cache: &HashMap>| { for (key, indice_list) in cache { diff --git a/crates/storage/provider/src/providers/bundle_state_provider.rs b/crates/storage/provider/src/providers/bundle_state_provider.rs index 50d3d46cef1f3..6fe3fa85cb895 100644 --- a/crates/storage/provider/src/providers/bundle_state_provider.rs +++ b/crates/storage/provider/src/providers/bundle_state_provider.rs @@ -1,14 +1,16 @@ use crate::{ AccountReader, BlockHashReader, ExecutionDataProvider, StateProvider, StateRootProvider, }; -use alloy_primitives::{Address, BlockNumber, Bytes, B256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, BlockNumber, Bytes, B256, +}; use reth_primitives::{Account, Bytecode}; use reth_storage_api::{StateProofProvider, StorageRootProvider}; use reth_storage_errors::provider::ProviderResult; use reth_trie::{ updates::TrieUpdates, AccountProof, HashedPostState, HashedStorage, MultiProof, TrieInput, }; -use std::collections::{HashMap, HashSet}; /// A state provider that resolves to data from either a wrapped [`crate::ExecutionOutcome`] /// or an underlying state provider. diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 9bdd1a4600a98..6e026f5c910ab 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -913,14 +913,14 @@ impl DatabaseProvider { // iterate previous value and get plain state value to create changeset // Double option around Account represent if Account state is know (first option) and // account is removed (Second Option) - let mut state: BundleStateInit = HashMap::new(); + let mut state: BundleStateInit = HashMap::default(); // This is not working for blocks that are not at tip. as plain state is not the last // state of end range. We should rename the functions or add support to access // History state. Accessing history state can be tricky but we are not gaining // anything. - let mut reverts: RevertsInit = HashMap::new(); + let mut reverts: RevertsInit = HashMap::default(); // add account changeset changes for (block_number, account_before) in account_changeset.into_iter().rev() { @@ -928,7 +928,7 @@ impl DatabaseProvider { match state.entry(address) { hash_map::Entry::Vacant(entry) => { let new_info = plain_accounts_cursor.seek_exact(address)?.map(|kv| kv.1); - entry.insert((old_info, new_info, HashMap::new())); + entry.insert((old_info, new_info, HashMap::default())); } hash_map::Entry::Occupied(mut entry) => { // overwrite old account state. @@ -946,7 +946,7 @@ impl DatabaseProvider { let account_state = match state.entry(address) { hash_map::Entry::Vacant(entry) => { let present_info = plain_accounts_cursor.seek_exact(address)?.map(|kv| kv.1); - entry.insert((present_info, present_info, HashMap::new())) + entry.insert((present_info, present_info, HashMap::default())) } hash_map::Entry::Occupied(entry) => entry.into_mut(), }; diff --git a/crates/storage/provider/src/providers/state/historical.rs b/crates/storage/provider/src/providers/state/historical.rs index a8613a8d1a874..de30f89c98ee5 100644 --- a/crates/storage/provider/src/providers/state/historical.rs +++ b/crates/storage/provider/src/providers/state/historical.rs @@ -2,7 +2,10 @@ use crate::{ providers::{state::macros::delegate_provider_impls, StaticFileProvider}, AccountReader, BlockHashReader, ProviderError, StateProvider, StateRootProvider, }; -use alloy_primitives::{Address, BlockNumber, Bytes, StorageKey, StorageValue, B256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, BlockNumber, Bytes, StorageKey, StorageValue, B256, +}; use reth_db::{tables, BlockNumberList}; use reth_db_api::{ cursor::{DbCursorRO, DbDupCursorRO}, @@ -21,10 +24,7 @@ use reth_trie_db::{ DatabaseHashedPostState, DatabaseHashedStorage, DatabaseProof, DatabaseStateRoot, DatabaseStorageRoot, DatabaseTrieWitness, }; -use std::{ - collections::{HashMap, HashSet}, - fmt::Debug, -}; +use std::fmt::Debug; /// State provider for a given block number which takes a tx reference. /// diff --git a/crates/storage/provider/src/providers/state/latest.rs b/crates/storage/provider/src/providers/state/latest.rs index 74dfdac73287d..f63eaee23862c 100644 --- a/crates/storage/provider/src/providers/state/latest.rs +++ b/crates/storage/provider/src/providers/state/latest.rs @@ -2,7 +2,10 @@ use crate::{ providers::{state::macros::delegate_provider_impls, StaticFileProvider}, AccountReader, BlockHashReader, StateProvider, StateRootProvider, }; -use alloy_primitives::{Address, BlockNumber, Bytes, StorageKey, StorageValue, B256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, BlockNumber, Bytes, StorageKey, StorageValue, B256, +}; use reth_db::tables; use reth_db_api::{ cursor::{DbCursorRO, DbDupCursorRO}, @@ -16,7 +19,6 @@ use reth_trie::{ HashedStorage, MultiProof, StateRoot, StorageRoot, TrieInput, }; use reth_trie_db::{DatabaseProof, DatabaseStateRoot, DatabaseStorageRoot, DatabaseTrieWitness}; -use std::collections::{HashMap, HashSet}; /// State provider over latest state that takes tx reference. #[derive(Debug)] diff --git a/crates/storage/provider/src/providers/state/macros.rs b/crates/storage/provider/src/providers/state/macros.rs index 49a168f4e7b4c..e6ed31938cd82 100644 --- a/crates/storage/provider/src/providers/state/macros.rs +++ b/crates/storage/provider/src/providers/state/macros.rs @@ -52,8 +52,8 @@ macro_rules! delegate_provider_impls { } StateProofProvider $(where [$($generics)*])? { fn proof(&self, input: reth_trie::TrieInput, address: alloy_primitives::Address, slots: &[alloy_primitives::B256]) -> reth_storage_errors::provider::ProviderResult; - fn multiproof(&self, input: reth_trie::TrieInput, targets: std::collections::HashMap>) -> reth_storage_errors::provider::ProviderResult; - fn witness(&self, input: reth_trie::TrieInput, target: reth_trie::HashedPostState) -> reth_storage_errors::provider::ProviderResult>; + fn multiproof(&self, input: reth_trie::TrieInput, targets: alloy_primitives::map::HashMap>) -> reth_storage_errors::provider::ProviderResult; + fn witness(&self, input: reth_trie::TrieInput, target: reth_trie::HashedPostState) -> reth_storage_errors::provider::ProviderResult>; } ); } diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 6f6aae9924464..09daf998731a6 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -1,7 +1,7 @@ //! Dummy blocks and data for tests use crate::{DatabaseProviderRW, ExecutionOutcome}; use alloy_consensus::TxLegacy; -use alloy_primitives::{Address, BlockNumber, Log, Parity, Sealable, TxKind}; +use alloy_primitives::{map::HashMap, Address, BlockNumber, Log, Parity, Sealable, TxKind}; use once_cell::sync::Lazy; use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; @@ -11,10 +11,7 @@ use reth_primitives::{ TxType, Withdrawal, Withdrawals, B256, U256, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; -use revm::{ - db::BundleState, - primitives::{AccountInfo, HashMap}, -}; +use revm::{db::BundleState, primitives::AccountInfo}; use std::str::FromStr; /// Assert genesis block @@ -200,7 +197,7 @@ fn block1(number: BlockNumber) -> (SealedBlockWithSenders, ExecutionOutcome) { .revert_account_info(number, account1, Some(None)) .state_present_account_info(account2, info) .revert_account_info(number, account2, Some(None)) - .state_storage(account1, HashMap::from([(slot, (U256::ZERO, U256::from(10)))])) + .state_storage(account1, HashMap::from_iter([(slot, (U256::ZERO, U256::from(10)))])) .build(), vec![vec![Some(Receipt { tx_type: TxType::Eip2930, @@ -256,7 +253,7 @@ fn block2( account, AccountInfo { nonce: 3, balance: U256::from(20), ..Default::default() }, ) - .state_storage(account, HashMap::from([(slot, (U256::ZERO, U256::from(15)))])) + .state_storage(account, HashMap::from_iter([(slot, (U256::ZERO, U256::from(15)))])) .revert_account_info( number, account, diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 9277df6e6c5fb..b06ae6ec1ad2a 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -8,8 +8,9 @@ use crate::{ }; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; use alloy_primitives::{ - keccak256, Address, BlockHash, BlockNumber, Bytes, StorageKey, StorageValue, TxHash, TxNumber, - B256, U256, + keccak256, + map::{HashMap, HashSet}, + Address, BlockHash, BlockNumber, Bytes, StorageKey, StorageValue, TxHash, TxNumber, B256, U256, }; use parking_lot::Mutex; use reth_chainspec::{ChainInfo, ChainSpec}; @@ -32,7 +33,7 @@ use reth_trie::{ }; use revm::primitives::{BlockEnv, CfgEnvWithHandlerCfg}; use std::{ - collections::{BTreeMap, HashMap, HashSet}, + collections::BTreeMap, ops::{RangeBounds, RangeInclusive}, sync::Arc, }; diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index 059accbd90056..e8b7760b880b7 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -1,5 +1,4 @@ use std::{ - collections::{HashMap, HashSet}, ops::{RangeBounds, RangeInclusive}, path::PathBuf, sync::Arc, @@ -7,6 +6,7 @@ use std::{ use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; use alloy_primitives::{ + map::{HashMap, HashSet}, Address, BlockHash, BlockNumber, Bytes, StorageKey, StorageValue, TxHash, TxNumber, B256, U256, }; use reth_chain_state::{ diff --git a/crates/storage/provider/src/writer/mod.rs b/crates/storage/provider/src/writer/mod.rs index d96e722d7b404..aa60605d055ba 100644 --- a/crates/storage/provider/src/writer/mod.rs +++ b/crates/storage/provider/src/writer/mod.rs @@ -544,7 +544,7 @@ mod tests { use crate::{ test_utils::create_test_provider_factory, AccountReader, StorageTrieWriter, TrieWriter, }; - use alloy_primitives::{keccak256, Address, B256, U256}; + use alloy_primitives::{keccak256, map::HashMap, Address, B256, U256}; use reth_db::tables; use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW, DbDupCursorRO}, @@ -570,10 +570,7 @@ mod tests { }, DatabaseCommit, State, }; - use std::{ - collections::{BTreeMap, HashMap}, - str::FromStr, - }; + use std::{collections::BTreeMap, str::FromStr}; #[test] fn wiped_entries_are_removed() { @@ -644,7 +641,7 @@ mod tests { state.insert_account(address_b, account_b.clone()); // 0x00.. is created - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address_a, RevmAccount { info: account_a.clone(), @@ -654,7 +651,7 @@ mod tests { )])); // 0xff.. is changed (balance + 1, nonce + 1) - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address_b, RevmAccount { info: account_b_changed.clone(), @@ -712,7 +709,7 @@ mod tests { state.insert_account(address_b, account_b_changed.clone()); // 0xff.. is destroyed - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address_b, RevmAccount { status: AccountStatus::Touched | AccountStatus::SelfDestructed, @@ -771,10 +768,10 @@ mod tests { state.insert_account_with_storage( address_b, account_b.clone(), - HashMap::from([(U256::from(1), U256::from(1))]), + HashMap::from_iter([(U256::from(1), U256::from(1))]), ); - state.commit(HashMap::from([ + state.commit(HashMap::from_iter([ ( address_a, RevmAccount { @@ -782,7 +779,7 @@ mod tests { info: RevmAccountInfo::default(), // 0x00 => 0 => 1 // 0x01 => 0 => 2 - storage: HashMap::from([ + storage: HashMap::from_iter([ ( U256::from(0), EvmStorageSlot { present_value: U256::from(1), ..Default::default() }, @@ -800,7 +797,7 @@ mod tests { status: AccountStatus::Touched, info: account_b, // 0x01 => 1 => 2 - storage: HashMap::from([( + storage: HashMap::from_iter([( U256::from(1), EvmStorageSlot { present_value: U256::from(2), @@ -905,7 +902,7 @@ mod tests { let mut state = State::builder().with_bundle_update().build(); state.insert_account(address_a, RevmAccountInfo::default()); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address_a, RevmAccount { status: AccountStatus::Touched | AccountStatus::SelfDestructed, @@ -962,14 +959,14 @@ mod tests { // Block #0: initial state. let mut init_state = State::builder().with_bundle_update().build(); init_state.insert_not_existing(address1); - init_state.commit(HashMap::from([( + init_state.commit(HashMap::from_iter([( address1, RevmAccount { info: account_info.clone(), status: AccountStatus::Touched | AccountStatus::Created, // 0x00 => 0 => 1 // 0x01 => 0 => 2 - storage: HashMap::from([ + storage: HashMap::from_iter([ ( U256::ZERO, EvmStorageSlot { present_value: U256::from(1), ..Default::default() }, @@ -994,17 +991,17 @@ mod tests { state.insert_account_with_storage( address1, account_info.clone(), - HashMap::from([(U256::ZERO, U256::from(1)), (U256::from(1), U256::from(2))]), + HashMap::from_iter([(U256::ZERO, U256::from(1)), (U256::from(1), U256::from(2))]), ); // Block #1: change storage. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched, info: account_info.clone(), // 0x00 => 1 => 2 - storage: HashMap::from([( + storage: HashMap::from_iter([( U256::ZERO, EvmStorageSlot { original_value: U256::from(1), @@ -1017,7 +1014,7 @@ mod tests { state.merge_transitions(BundleRetention::Reverts); // Block #2: destroy account. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::SelfDestructed, @@ -1028,7 +1025,7 @@ mod tests { state.merge_transitions(BundleRetention::Reverts); // Block #3: re-create account and change storage. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::Created, @@ -1039,7 +1036,7 @@ mod tests { state.merge_transitions(BundleRetention::Reverts); // Block #4: change storage. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched, @@ -1047,7 +1044,7 @@ mod tests { // 0x00 => 0 => 2 // 0x02 => 0 => 4 // 0x06 => 0 => 6 - storage: HashMap::from([ + storage: HashMap::from_iter([ ( U256::ZERO, EvmStorageSlot { present_value: U256::from(2), ..Default::default() }, @@ -1066,7 +1063,7 @@ mod tests { state.merge_transitions(BundleRetention::Reverts); // Block #5: Destroy account again. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::SelfDestructed, @@ -1077,7 +1074,7 @@ mod tests { state.merge_transitions(BundleRetention::Reverts); // Block #6: Create, change, destroy and re-create in the same block. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::Created, @@ -1085,19 +1082,19 @@ mod tests { storage: HashMap::default(), }, )])); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched, info: account_info.clone(), // 0x00 => 0 => 2 - storage: HashMap::from([( + storage: HashMap::from_iter([( U256::ZERO, EvmStorageSlot { present_value: U256::from(2), ..Default::default() }, )]), }, )])); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::SelfDestructed, @@ -1105,7 +1102,7 @@ mod tests { storage: HashMap::default(), }, )])); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::Created, @@ -1116,13 +1113,13 @@ mod tests { state.merge_transitions(BundleRetention::Reverts); // Block #7: Change storage. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched, info: account_info, // 0x00 => 0 => 9 - storage: HashMap::from([( + storage: HashMap::from_iter([( U256::ZERO, EvmStorageSlot { present_value: U256::from(9), ..Default::default() }, )]), @@ -1277,14 +1274,14 @@ mod tests { // Block #0: initial state. let mut init_state = State::builder().with_bundle_update().build(); init_state.insert_not_existing(address1); - init_state.commit(HashMap::from([( + init_state.commit(HashMap::from_iter([( address1, RevmAccount { info: account1.clone(), status: AccountStatus::Touched | AccountStatus::Created, // 0x00 => 0 => 1 // 0x01 => 0 => 2 - storage: HashMap::from([ + storage: HashMap::from_iter([ ( U256::ZERO, EvmStorageSlot { present_value: U256::from(1), ..Default::default() }, @@ -1308,11 +1305,11 @@ mod tests { state.insert_account_with_storage( address1, account1.clone(), - HashMap::from([(U256::ZERO, U256::from(1)), (U256::from(1), U256::from(2))]), + HashMap::from_iter([(U256::ZERO, U256::from(1)), (U256::from(1), U256::from(2))]), ); // Block #1: Destroy, re-create, change storage. - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::SelfDestructed, @@ -1321,7 +1318,7 @@ mod tests { }, )])); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::Created, @@ -1330,13 +1327,13 @@ mod tests { }, )])); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched, info: account1, // 0x01 => 0 => 5 - storage: HashMap::from([( + storage: HashMap::from_iter([( U256::from(1), EvmStorageSlot { present_value: U256::from(5), ..Default::default() }, )]), @@ -1468,7 +1465,7 @@ mod tests { let address1 = Address::with_last_byte(1); let account1_old = prestate.remove(&address1).unwrap(); state.insert_account(address1, account1_old.0.into()); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::SelfDestructed, @@ -1488,12 +1485,12 @@ mod tests { state.insert_account_with_storage( address2, account2.0.into(), - HashMap::from([(slot2, account2_slot2_old_value)]), + HashMap::from_iter([(slot2, account2_slot2_old_value)]), ); let account2_slot2_new_value = U256::from(100); account2.1.insert(slot2_key, account2_slot2_new_value); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address2, RevmAccount { status: AccountStatus::Touched, @@ -1513,7 +1510,7 @@ mod tests { state.insert_account(address3, account3.0.into()); account3.0.balance = U256::from(24); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address3, RevmAccount { status: AccountStatus::Touched, @@ -1530,7 +1527,7 @@ mod tests { state.insert_account(address4, account4.0.into()); account4.0.nonce = 128; - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address4, RevmAccount { status: AccountStatus::Touched, @@ -1545,7 +1542,7 @@ mod tests { let account1_new = Account { nonce: 56, balance: U256::from(123), bytecode_hash: Some(B256::random()) }; prestate.insert(address1, (account1_new, BTreeMap::default())); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::Created, @@ -1561,7 +1558,7 @@ mod tests { let slot20_key = B256::from(slot20); let account1_slot20_value = U256::from(12345); prestate.get_mut(&address1).unwrap().1.insert(slot20_key, account1_slot20_value); - state.commit(HashMap::from([( + state.commit(HashMap::from_iter([( address1, RevmAccount { status: AccountStatus::Touched | AccountStatus::Created, diff --git a/crates/storage/storage-api/src/trie.rs b/crates/storage/storage-api/src/trie.rs index e41a15e107d34..d989def8bb0d0 100644 --- a/crates/storage/storage-api/src/trie.rs +++ b/crates/storage/storage-api/src/trie.rs @@ -1,9 +1,11 @@ -use alloy_primitives::{Address, Bytes, B256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, Bytes, B256, +}; use reth_storage_errors::provider::ProviderResult; use reth_trie::{ updates::TrieUpdates, AccountProof, HashedPostState, HashedStorage, MultiProof, TrieInput, }; -use std::collections::{HashMap, HashSet}; /// A type that can compute the state root of a given post state. #[auto_impl::auto_impl(&, Box, Arc)] diff --git a/crates/transaction-pool/src/config.rs b/crates/transaction-pool/src/config.rs index b5063f5b37a1d..623493e6c9d43 100644 --- a/crates/transaction-pool/src/config.rs +++ b/crates/transaction-pool/src/config.rs @@ -266,7 +266,7 @@ mod tests { #[test] fn test_contains_local_address() { let address = Address::new([1; 20]); - let mut local_addresses = HashSet::new(); + let mut local_addresses = HashSet::default(); local_addresses.insert(address); let config = LocalTransactionConfig { local_addresses, ..Default::default() }; @@ -283,7 +283,7 @@ mod tests { let address = Address::new([1; 20]); let config = LocalTransactionConfig { no_exemptions: true, - local_addresses: HashSet::new(), + local_addresses: HashSet::default(), ..Default::default() }; @@ -294,7 +294,7 @@ mod tests { #[test] fn test_is_local_without_no_exemptions() { let address = Address::new([1; 20]); - let mut local_addresses = HashSet::new(); + let mut local_addresses = HashSet::default(); local_addresses.insert(address); let config = diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index e961d03703bcc..baa35edaf6f47 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -138,7 +138,7 @@ pub async fn maintain_transaction_pool( FinalizedBlockTracker::new(client.finalized_block_number().ok().flatten()); // keeps track of any dirty accounts that we know of are out of sync with the pool - let mut dirty_addresses = HashSet::new(); + let mut dirty_addresses = HashSet::default(); // keeps track of the state of the pool wrt to blocks let mut maintained_state = MaintainedPoolState::InSync; diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index f3b59e09454d6..912e04506a19a 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -2708,7 +2708,7 @@ mod tests { assert_eq!(pool.pending_pool.len(), 2); - let mut changed_senders = HashMap::new(); + let mut changed_senders = HashMap::default(); changed_senders.insert( id.sender, SenderInfo { state_nonce: next.get_nonce(), balance: U256::from(1_000) }, @@ -2892,7 +2892,7 @@ mod tests { assert_eq!(1, pool.pending_transactions().len()); // Simulate new block arrival - and chain nonce increasing. - let mut updated_accounts = HashMap::new(); + let mut updated_accounts = HashMap::default(); on_chain_nonce += 1; updated_accounts.insert( v0.sender_id(), @@ -2967,7 +2967,7 @@ mod tests { assert_eq!(1, pool.pending_transactions().len()); // Simulate new block arrival - and chain nonce increasing. - let mut updated_accounts = HashMap::new(); + let mut updated_accounts = HashMap::default(); on_chain_nonce += 1; updated_accounts.insert( v0.sender_id(), diff --git a/crates/trie/db/src/proof.rs b/crates/trie/db/src/proof.rs index 2d06d9e2f33b4..9416d078090cb 100644 --- a/crates/trie/db/src/proof.rs +++ b/crates/trie/db/src/proof.rs @@ -1,5 +1,8 @@ use crate::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; -use alloy_primitives::{Address, B256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, B256, +}; use reth_db_api::transaction::DbTx; use reth_execution_errors::StateProofError; use reth_trie::{ @@ -7,7 +10,6 @@ use reth_trie::{ trie_cursor::InMemoryTrieCursorFactory, MultiProof, TrieInput, }; use reth_trie_common::AccountProof; -use std::collections::{HashMap, HashSet}; /// Extends [`Proof`] with operations specific for working with a database transaction. pub trait DatabaseProof<'a, TX> { diff --git a/crates/trie/db/src/state.rs b/crates/trie/db/src/state.rs index 9cefc6b9fa83c..48a676c85a5df 100644 --- a/crates/trie/db/src/state.rs +++ b/crates/trie/db/src/state.rs @@ -267,12 +267,11 @@ impl DatabaseHashedPostState for HashedPostState { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::Address; + use alloy_primitives::{map::HashMap, Address}; use reth_db::test_utils::create_test_rw_db; use reth_db_api::database::Database; use reth_primitives::{hex, revm_primitives::AccountInfo, U256}; use revm::db::BundleState; - use std::collections::HashMap; #[test] fn from_bundle_state_with_rayon() { @@ -287,8 +286,8 @@ mod tests { let bundle_state = BundleState::builder(2..=2) .state_present_account_info(address1, account1) .state_present_account_info(address2, account2) - .state_storage(address1, HashMap::from([(slot1, (U256::ZERO, U256::from(10)))])) - .state_storage(address2, HashMap::from([(slot2, (U256::ZERO, U256::from(20)))])) + .state_storage(address1, HashMap::from_iter([(slot1, (U256::ZERO, U256::from(10)))])) + .state_storage(address2, HashMap::from_iter([(slot2, (U256::ZERO, U256::from(20)))])) .build(); assert_eq!(bundle_state.reverts.len(), 1); diff --git a/crates/trie/db/src/witness.rs b/crates/trie/db/src/witness.rs index 62b945d26dc2c..54d017780ae4a 100644 --- a/crates/trie/db/src/witness.rs +++ b/crates/trie/db/src/witness.rs @@ -1,12 +1,11 @@ use crate::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; -use alloy_primitives::{Bytes, B256}; +use alloy_primitives::{map::HashMap, Bytes, B256}; use reth_db_api::transaction::DbTx; use reth_execution_errors::TrieWitnessError; use reth_trie::{ hashed_cursor::HashedPostStateCursorFactory, trie_cursor::InMemoryTrieCursorFactory, witness::TrieWitness, HashedPostState, TrieInput, }; -use std::collections::HashMap; /// Extends [`TrieWitness`] with operations specific for working with a database transaction. pub trait DatabaseTrieWitness<'a, TX> { diff --git a/crates/trie/trie/benches/hash_post_state.rs b/crates/trie/trie/benches/hash_post_state.rs index 49759f14a969a..6e913ef78a3cb 100644 --- a/crates/trie/trie/benches/hash_post_state.rs +++ b/crates/trie/trie/benches/hash_post_state.rs @@ -1,10 +1,9 @@ #![allow(missing_docs, unreachable_pub)] -use alloy_primitives::{keccak256, Address, B256, U256}; +use alloy_primitives::{keccak256, map::HashMap, Address, B256, U256}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner}; use reth_trie::{HashedPostState, HashedStorage}; use revm::db::{states::BundleBuilder, BundleAccount}; -use std::collections::HashMap; pub fn hash_post_state(c: &mut Criterion) { let mut group = c.benchmark_group("Hash Post State"); @@ -68,7 +67,7 @@ fn generate_test_data(size: usize) -> HashMap { let mut bundle_builder = BundleBuilder::default(); for (address, storage) in state { - bundle_builder = bundle_builder.state_storage(address, storage); + bundle_builder = bundle_builder.state_storage(address, storage.into_iter().collect()); } let bundle_state = bundle_builder.build(); diff --git a/crates/trie/trie/src/proof.rs b/crates/trie/trie/src/proof.rs index 8b9d2f9d09fb4..69b648ba001dc 100644 --- a/crates/trie/trie/src/proof.rs +++ b/crates/trie/trie/src/proof.rs @@ -6,13 +6,16 @@ use crate::{ walker::TrieWalker, HashBuilder, Nibbles, }; -use alloy_primitives::{keccak256, Address, B256}; +use alloy_primitives::{ + keccak256, + map::{HashMap, HashSet}, + Address, B256, +}; use alloy_rlp::{BufMut, Encodable}; use reth_execution_errors::trie::StateProofError; use reth_trie_common::{ proof::ProofRetainer, AccountProof, MultiProof, StorageMultiProof, TrieAccount, }; -use std::collections::{HashMap, HashSet}; /// A struct for generating merkle proofs. /// @@ -70,7 +73,7 @@ impl Proof { /// Set the target account and slots. pub fn with_target(self, target: (B256, HashSet)) -> Self { - self.with_targets(HashMap::from([target])) + self.with_targets(HashMap::from_iter([target])) } /// Set the target accounts and slots. diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 0d08396160216..1f521ca7db5f4 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -1,8 +1,14 @@ +use std::collections::BTreeMap; + use crate::{ hashed_cursor::HashedCursorFactory, prefix_set::TriePrefixSetsMut, proof::Proof, trie_cursor::TrieCursorFactory, HashedPostState, }; -use alloy_primitives::{keccak256, Bytes, B256}; +use alloy_primitives::{ + keccak256, + map::{HashMap, HashSet}, + Bytes, B256, +}; use alloy_rlp::{BufMut, Decodable, Encodable}; use itertools::Either; use reth_execution_errors::{StateProofError, TrieWitnessError}; @@ -10,7 +16,6 @@ use reth_primitives::constants::EMPTY_ROOT_HASH; use reth_trie_common::{ BranchNode, HashBuilder, Nibbles, TrieAccount, TrieNode, CHILD_INDEX_RANGE, }; -use std::collections::{BTreeMap, HashMap, HashSet}; /// State transition witness for the trie. #[derive(Debug)] @@ -145,7 +150,7 @@ where self.hashed_cursor_factory.clone(), ) .with_prefix_sets_mut(self.prefix_sets.clone()) - .with_target((hashed_address, HashSet::from([target_key]))) + .with_target((hashed_address, HashSet::from_iter([target_key]))) .storage_multiproof(hashed_address)?; // The subtree only contains the proof for a single target. From d46f76264dd454ad29783af060548913cfe50445 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Thu, 26 Sep 2024 16:55:28 +0200 Subject: [PATCH 24/84] fix(cli): fix `ImportReceiptsOp` (#11216) --- book/run/sync-op-mainnet.md | 4 +- .../downloaders/src/receipt_file_client.rs | 7 +- .../cli/src/commands/import_receipts.rs | 145 ++++++++++-------- 3 files changed, 86 insertions(+), 70 deletions(-) diff --git a/book/run/sync-op-mainnet.md b/book/run/sync-op-mainnet.md index ebdacdf6167fc..057860c334943 100644 --- a/book/run/sync-op-mainnet.md +++ b/book/run/sync-op-mainnet.md @@ -49,7 +49,7 @@ Imports a `.rlp` file of blocks. Import of >100 million OVM blocks, from genesis to Bedrock, completes in 45 minutes. ```bash -$ op-reth import-op +$ op-reth import-op --chain optimism ``` #### 2. Import Receipts @@ -63,7 +63,7 @@ Imports a `.rlp` file of receipts, that has been exported with command specified Import of >100 million OVM receipts, from genesis to Bedrock, completes in 30 minutes. ```bash -$ op-reth import-receipts-op +$ op-reth import-receipts-op --chain optimism ``` #### 3. Import State diff --git a/crates/net/downloaders/src/receipt_file_client.rs b/crates/net/downloaders/src/receipt_file_client.rs index 6bf095c3b2bb2..7e66a4d876e24 100644 --- a/crates/net/downloaders/src/receipt_file_client.rs +++ b/crates/net/downloaders/src/receipt_file_client.rs @@ -5,7 +5,7 @@ use reth_primitives::{Receipt, Receipts}; use tokio::io::AsyncReadExt; use tokio_stream::StreamExt; use tokio_util::codec::{Decoder, FramedRead}; -use tracing::trace; +use tracing::{trace, warn}; use crate::{DecodedFileChunk, FileClientError}; @@ -106,6 +106,11 @@ where match receipt { Some(ReceiptWithBlockNumber { receipt, number }) => { + if block_number > number { + warn!(target: "downloaders::file", previous_block_number = block_number, "skipping receipt from a lower block: {number}"); + continue + } + total_receipts += 1; if first_block.is_none() { diff --git a/crates/optimism/cli/src/commands/import_receipts.rs b/crates/optimism/cli/src/commands/import_receipts.rs index 58a8590bd8ce1..2ec0c9d704a0d 100644 --- a/crates/optimism/cli/src/commands/import_receipts.rs +++ b/crates/optimism/cli/src/commands/import_receipts.rs @@ -19,11 +19,12 @@ use reth_optimism_primitives::bedrock::is_dup_tx; use reth_primitives::Receipts; use reth_provider::{ writer::UnifiedStorageWriter, DatabaseProviderFactory, OriginalValuesKnown, ProviderFactory, - StageCheckpointReader, StateWriter, StaticFileProviderFactory, StaticFileWriter, StatsReader, + StageCheckpointReader, StageCheckpointWriter, StateWriter, StaticFileProviderFactory, + StaticFileWriter, StatsReader, }; -use reth_stages::StageId; +use reth_stages::{StageCheckpoint, StageId}; use reth_static_file_types::StaticFileSegment; -use tracing::{debug, error, info, trace}; +use tracing::{debug, info, trace, warn}; use crate::receipt_file_codec::HackReceiptFileCodec; @@ -91,15 +92,6 @@ where P: AsRef, F: FnMut(u64, &mut Receipts) -> usize, { - let total_imported_txns = provider_factory - .static_file_provider() - .count_entries::() - .expect("transaction static files must exist before importing receipts"); - let highest_block_transactions = provider_factory - .static_file_provider() - .get_highest_static_file_block(StaticFileSegment::Transactions) - .expect("transaction static files must exist before importing receipts"); - for stage in StageId::ALL { let checkpoint = provider_factory.database_provider_ro()?.get_stage_checkpoint(stage)?; trace!(target: "reth::cli", @@ -113,53 +105,9 @@ where let reader = ChunkedFileReader::new(&path, chunk_len).await?; // import receipts - let ImportReceiptsResult { total_decoded_receipts, total_filtered_out_dup_txns } = - import_receipts_from_reader(&provider_factory, reader, filter).await?; - - if total_decoded_receipts == 0 { - error!(target: "reth::cli", "No receipts were imported, ensure the receipt file is valid and not empty"); - return Ok(()) - } - - let total_imported_receipts = provider_factory - .static_file_provider() - .count_entries::() - .expect("static files must exist after ensuring we decoded more than zero"); - - if total_imported_receipts + total_filtered_out_dup_txns != total_decoded_receipts { - error!(target: "reth::cli", - total_decoded_receipts, - total_imported_receipts, - total_filtered_out_dup_txns, - "Receipts were partially imported" - ); - } - - if total_imported_receipts != total_imported_txns { - error!(target: "reth::cli", - total_imported_receipts, - total_imported_txns, - "Receipts inconsistent with transactions" - ); - } - - let highest_block_receipts = provider_factory - .static_file_provider() - .get_highest_static_file_block(StaticFileSegment::Receipts) - .expect("static files must exist after ensuring we decoded more than zero"); - - if highest_block_receipts != highest_block_transactions { - error!(target: "reth::cli", - highest_block_receipts, - highest_block_transactions, - "Height of receipts inconsistent with transactions" - ); - } + let _ = import_receipts_from_reader(&provider_factory, reader, filter).await?; info!(target: "reth::cli", - total_imported_receipts, - total_decoded_receipts, - total_filtered_out_dup_txns, "Receipt file imported" ); @@ -181,15 +129,45 @@ where N: NodeTypesWithDB, F: FnMut(u64, &mut Receipts) -> usize, { + let static_file_provider = provider_factory.static_file_provider(); + + // Ensure that receipts hasn't been initialized apart from `init_genesis`. + if let Some(num_receipts) = + static_file_provider.get_highest_static_file_tx(StaticFileSegment::Receipts) + { + if num_receipts > 0 { + eyre::bail!("Expected no receipts in storage, but found {num_receipts}."); + } + } + match static_file_provider.get_highest_static_file_block(StaticFileSegment::Receipts) { + Some(receipts_block) => { + if receipts_block > 0 { + eyre::bail!("Expected highest receipt block to be 0, but found {receipts_block}."); + } + } + None => { + eyre::bail!("Receipts was not initialized. Please import blocks and transactions before calling this command."); + } + } + + let provider = provider_factory.provider_rw()?; let mut total_decoded_receipts = 0; + let mut total_receipts = 0; let mut total_filtered_out_dup_txns = 0; + let mut highest_block_receipts = 0; - let provider = provider_factory.provider_rw()?; - let static_file_provider = provider_factory.static_file_provider(); + let highest_block_transactions = static_file_provider + .get_highest_static_file_block(StaticFileSegment::Transactions) + .expect("transaction static files must exist before importing receipts"); while let Some(file_client) = reader.next_receipts_chunk::, HackReceiptFileCodec>().await? { + if highest_block_receipts == highest_block_transactions { + warn!(target: "reth::cli", highest_block_receipts, highest_block_transactions, "Ignoring all other blocks in the file since we have reached the desired height"); + break + } + // create a new file client from chunk read from file let ReceiptFileClient { mut receipts, @@ -221,6 +199,21 @@ where // this ensures the execution outcome and static file producer start at block 1 first_block = 1; } + highest_block_receipts = first_block + receipts.len() as u64 - 1; + + // RLP file may have too many blocks. We ignore the excess, but warn the user. + if highest_block_receipts > highest_block_transactions { + let excess = highest_block_receipts - highest_block_transactions; + highest_block_receipts -= excess; + + // Remove the last `excess` blocks + receipts.receipt_vec.truncate(receipts.len() - excess as usize); + + warn!(target: "reth::cli", highest_block_receipts, "Too many decoded blocks, ignoring the last {excess}."); + } + + // Update total_receipts after all filtering + total_receipts += receipts.iter().map(|v| v.len()).sum::(); // We're reusing receipt writing code internal to // `UnifiedStorageWriter::append_receipts_from_blocks`, so we just use a default empty @@ -228,16 +221,32 @@ where let execution_outcome = ExecutionOutcome::new(Default::default(), receipts, first_block, Default::default()); - let static_file_producer = - static_file_provider.get_writer(first_block, StaticFileSegment::Receipts)?; - // finally, write the receipts - let mut storage_writer = UnifiedStorageWriter::from(&provider, static_file_producer); + let mut storage_writer = UnifiedStorageWriter::from( + &provider, + static_file_provider.latest_writer(StaticFileSegment::Receipts)?, + ); storage_writer.write_to_storage(execution_outcome, OriginalValuesKnown::Yes)?; } - // as static files works in file ranges, internally it will be committing when creating the - // next file range already, so we only need to call explicitly at the end. + // Only commit if we have imported as many receipts as the number of transactions. + let total_imported_txns = static_file_provider + .count_entries::() + .expect("transaction static files must exist before importing receipts"); + + if total_receipts != total_imported_txns { + eyre::bail!("Number of receipts ({total_receipts}) inconsistent with transactions {total_imported_txns}") + } + + // Only commit if the receipt block height matches the one from transactions. + if highest_block_receipts != highest_block_transactions { + eyre::bail!("Receipt block height ({highest_block_receipts}) inconsistent with transactions' {highest_block_transactions}") + } + + // Required or any access-write provider factory will attempt to unwind to 0. + provider + .save_stage_checkpoint(StageId::Execution, StageCheckpoint::new(highest_block_receipts))?; + UnifiedStorageWriter::commit(provider, static_file_provider)?; Ok(ImportReceiptsResult { total_decoded_receipts, total_filtered_out_dup_txns }) @@ -246,8 +255,10 @@ where /// Result of importing receipts in chunks. #[derive(Debug)] pub struct ImportReceiptsResult { - total_decoded_receipts: usize, - total_filtered_out_dup_txns: usize, + /// Total decoded receipts. + pub total_decoded_receipts: usize, + /// Total filtered out receipts. + pub total_filtered_out_dup_txns: usize, } #[cfg(test)] From 6d0159eb7082512cc4d85b031a3c32df856479b6 Mon Sep 17 00:00:00 2001 From: Aliaksei Misiukevich Date: Thu, 26 Sep 2024 17:09:33 +0200 Subject: [PATCH 25/84] feat: tx and receipt compression utils for no-std config (#11112) --- Cargo.lock | 8 +++--- crates/primitives/src/compression/mod.rs | 31 +++++++++++++++++++-- crates/primitives/src/transaction/mod.rs | 34 +++++++++++++++++------- 3 files changed, 57 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e55e658dda6a6..b91d295048c62 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10330,18 +10330,18 @@ checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", diff --git a/crates/primitives/src/compression/mod.rs b/crates/primitives/src/compression/mod.rs index 476f5d06b2adf..ecceafc206828 100644 --- a/crates/primitives/src/compression/mod.rs +++ b/crates/primitives/src/compression/mod.rs @@ -1,6 +1,5 @@ use alloc::vec::Vec; use core::cell::RefCell; -use std::thread_local; use zstd::bulk::{Compressor, Decompressor}; /// Compression/Decompression dictionary for `Receipt`. @@ -10,7 +9,8 @@ pub static TRANSACTION_DICTIONARY: &[u8] = include_bytes!("./transaction_diction // We use `thread_local` compressors and decompressors because dictionaries can be quite big, and // zstd-rs recommends to use one context/compressor per thread -thread_local! { +#[cfg(feature = "std")] +std::thread_local! { /// Thread Transaction compressor. pub static TRANSACTION_COMPRESSOR: RefCell> = RefCell::new( Compressor::with_dictionary(0, TRANSACTION_DICTIONARY) @@ -38,6 +38,33 @@ thread_local! { )); } +/// Fn creates tx [`Compressor`] +pub fn create_tx_compressor() -> Compressor<'static> { + Compressor::with_dictionary(0, RECEIPT_DICTIONARY).expect("Failed to instantiate tx compressor") +} + +/// Fn creates tx [`Decompressor`] +pub fn create_tx_decompressor() -> ReusableDecompressor { + ReusableDecompressor::new( + Decompressor::with_dictionary(TRANSACTION_DICTIONARY) + .expect("Failed to instantiate tx decompressor"), + ) +} + +/// Fn creates receipt [`Compressor`] +pub fn create_receipt_compressor() -> Compressor<'static> { + Compressor::with_dictionary(0, RECEIPT_DICTIONARY) + .expect("Failed to instantiate receipt compressor") +} + +/// Fn creates receipt [`Decompressor`] +pub fn create_receipt_decompressor() -> ReusableDecompressor { + ReusableDecompressor::new( + Decompressor::with_dictionary(RECEIPT_DICTIONARY) + .expect("Failed to instantiate receipt decompressor"), + ) +} + /// Reusable decompressor that uses its own internal buffer. #[allow(missing_debug_implementations)] pub struct ReusableDecompressor { diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index ec1ddaa878107..8e2f2977dedcf 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -956,15 +956,20 @@ impl reth_codecs::Compact for TransactionSignedNoHash { let zstd_bit = self.transaction.input().len() >= 32; let tx_bits = if zstd_bit { - crate::compression::TRANSACTION_COMPRESSOR.with(|compressor| { - let mut compressor = compressor.borrow_mut(); - let mut tmp = Vec::with_capacity(256); + let mut tmp = Vec::with_capacity(256); + if cfg!(feature = "std") { + crate::compression::TRANSACTION_COMPRESSOR.with(|compressor| { + let mut compressor = compressor.borrow_mut(); + let tx_bits = self.transaction.to_compact(&mut tmp); + buf.put_slice(&compressor.compress(&tmp).expect("Failed to compress")); + tx_bits as u8 + }) + } else { + let mut compressor = crate::compression::create_tx_compressor(); let tx_bits = self.transaction.to_compact(&mut tmp); - buf.put_slice(&compressor.compress(&tmp).expect("Failed to compress")); - tx_bits as u8 - }) + } } else { self.transaction.to_compact(buf) as u8 }; @@ -984,17 +989,26 @@ impl reth_codecs::Compact for TransactionSignedNoHash { let zstd_bit = bitflags >> 3; let (transaction, buf) = if zstd_bit != 0 { - crate::compression::TRANSACTION_DECOMPRESSOR.with(|decompressor| { - let mut decompressor = decompressor.borrow_mut(); + if cfg!(feature = "std") { + crate::compression::TRANSACTION_DECOMPRESSOR.with(|decompressor| { + let mut decompressor = decompressor.borrow_mut(); - // TODO: enforce that zstd is only present at a "top" level type + // TODO: enforce that zstd is only present at a "top" level type + let transaction_type = (bitflags & 0b110) >> 1; + let (transaction, _) = + Transaction::from_compact(decompressor.decompress(buf), transaction_type); + + (transaction, buf) + }) + } else { + let mut decompressor = crate::compression::create_tx_decompressor(); let transaction_type = (bitflags & 0b110) >> 1; let (transaction, _) = Transaction::from_compact(decompressor.decompress(buf), transaction_type); (transaction, buf) - }) + } } else { let transaction_type = bitflags >> 1; Transaction::from_compact(buf, transaction_type) From 2e05ec008a85576c146afb8325cfc48e1785da10 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Thu, 26 Sep 2024 23:42:05 +0800 Subject: [PATCH 26/84] move op storage tests into new crate reth-optimism-storage (#11233) --- Cargo.lock | 11 ++++ Cargo.toml | 2 + crates/optimism/storage/Cargo.toml | 23 +++++++ crates/optimism/storage/src/lib.rs | 83 +++++++++++++++++++++++++ crates/storage/db-api/src/models/mod.rs | 75 +++------------------- 5 files changed, 128 insertions(+), 66 deletions(-) create mode 100644 crates/optimism/storage/Cargo.toml create mode 100644 crates/optimism/storage/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index b91d295048c62..27f4fa9b7cdb9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8195,6 +8195,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "reth-optimism-storage" +version = "1.0.7" +dependencies = [ + "reth-codecs", + "reth-db-api", + "reth-primitives", + "reth-prune-types", + "reth-stages-types", +] + [[package]] name = "reth-payload-builder" version = "1.0.7" diff --git a/Cargo.toml b/Cargo.toml index 0de42a83ae822..4ef2b272db44a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -79,6 +79,7 @@ members = [ "crates/optimism/payload/", "crates/optimism/primitives/", "crates/optimism/rpc/", + "crates/optimism/storage", "crates/payload/basic/", "crates/payload/builder/", "crates/payload/primitives/", @@ -368,6 +369,7 @@ reth-optimism-forks = { path = "crates/optimism/hardforks" } reth-optimism-payload-builder = { path = "crates/optimism/payload" } reth-optimism-primitives = { path = "crates/optimism/primitives" } reth-optimism-rpc = { path = "crates/optimism/rpc" } +reth-optimism-storage = { path = "crates/optimism/storage" } reth-payload-builder = { path = "crates/payload/builder" } reth-payload-primitives = { path = "crates/payload/primitives" } reth-payload-validator = { path = "crates/payload/validator" } diff --git a/crates/optimism/storage/Cargo.toml b/crates/optimism/storage/Cargo.toml new file mode 100644 index 0000000000000..107b64db3de03 --- /dev/null +++ b/crates/optimism/storage/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "reth-optimism-storage" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[lints] +workspace = true + +[dependencies] +reth-primitives.workspace = true + +[dev-dependencies] +reth-codecs.workspace = true +reth-db-api.workspace = true +reth-prune-types.workspace = true +reth-stages-types.workspace = true + +[features] +optimism = ["reth-primitives/optimism"] \ No newline at end of file diff --git a/crates/optimism/storage/src/lib.rs b/crates/optimism/storage/src/lib.rs new file mode 100644 index 0000000000000..3d728f18743e4 --- /dev/null +++ b/crates/optimism/storage/src/lib.rs @@ -0,0 +1,83 @@ +//! Standalone crate for Optimism-Storage Reth. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +// The `optimism` feature must be enabled to use this crate. +#![cfg(feature = "optimism")] + +#[cfg(test)] +mod tests { + use reth_codecs::{test_utils::UnusedBits, validate_bitflag_backwards_compat}; + use reth_db_api::models::{ + CompactClientVersion, CompactU256, CompactU64, StoredBlockBodyIndices, StoredBlockOmmers, + StoredBlockWithdrawals, + }; + use reth_primitives::{ + Account, Receipt, ReceiptWithBloom, Requests, SealedHeader, Withdrawals, + }; + use reth_prune_types::{PruneCheckpoint, PruneMode, PruneSegment}; + use reth_stages_types::{ + AccountHashingCheckpoint, CheckpointBlockRange, EntitiesCheckpoint, ExecutionCheckpoint, + HeadersCheckpoint, IndexHistoryCheckpoint, StageCheckpoint, StageUnitCheckpoint, + StorageHashingCheckpoint, + }; + + #[test] + fn test_ensure_backwards_compatibility() { + assert_eq!(Account::bitflag_encoded_bytes(), 2); + assert_eq!(AccountHashingCheckpoint::bitflag_encoded_bytes(), 1); + assert_eq!(CheckpointBlockRange::bitflag_encoded_bytes(), 1); + assert_eq!(CompactClientVersion::bitflag_encoded_bytes(), 0); + assert_eq!(CompactU256::bitflag_encoded_bytes(), 1); + assert_eq!(CompactU64::bitflag_encoded_bytes(), 1); + assert_eq!(EntitiesCheckpoint::bitflag_encoded_bytes(), 1); + assert_eq!(ExecutionCheckpoint::bitflag_encoded_bytes(), 0); + assert_eq!(HeadersCheckpoint::bitflag_encoded_bytes(), 0); + assert_eq!(IndexHistoryCheckpoint::bitflag_encoded_bytes(), 0); + assert_eq!(PruneCheckpoint::bitflag_encoded_bytes(), 1); + assert_eq!(PruneMode::bitflag_encoded_bytes(), 1); + assert_eq!(PruneSegment::bitflag_encoded_bytes(), 1); + assert_eq!(Receipt::bitflag_encoded_bytes(), 2); + assert_eq!(ReceiptWithBloom::bitflag_encoded_bytes(), 0); + assert_eq!(SealedHeader::bitflag_encoded_bytes(), 0); + assert_eq!(StageCheckpoint::bitflag_encoded_bytes(), 1); + assert_eq!(StageUnitCheckpoint::bitflag_encoded_bytes(), 1); + assert_eq!(StoredBlockBodyIndices::bitflag_encoded_bytes(), 1); + assert_eq!(StoredBlockOmmers::bitflag_encoded_bytes(), 0); + assert_eq!(StoredBlockWithdrawals::bitflag_encoded_bytes(), 0); + assert_eq!(StorageHashingCheckpoint::bitflag_encoded_bytes(), 1); + assert_eq!(Withdrawals::bitflag_encoded_bytes(), 0); + + // In case of failure, refer to the documentation of the + // [`validate_bitflag_backwards_compat`] macro for detailed instructions on handling + // it. + validate_bitflag_backwards_compat!(Account, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(AccountHashingCheckpoint, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(CheckpointBlockRange, UnusedBits::Zero); + validate_bitflag_backwards_compat!(CompactClientVersion, UnusedBits::Zero); + validate_bitflag_backwards_compat!(CompactU256, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(CompactU64, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(EntitiesCheckpoint, UnusedBits::Zero); + validate_bitflag_backwards_compat!(ExecutionCheckpoint, UnusedBits::Zero); + validate_bitflag_backwards_compat!(HeadersCheckpoint, UnusedBits::Zero); + validate_bitflag_backwards_compat!(IndexHistoryCheckpoint, UnusedBits::Zero); + validate_bitflag_backwards_compat!(PruneCheckpoint, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(PruneMode, UnusedBits::Zero); + validate_bitflag_backwards_compat!(PruneSegment, UnusedBits::Zero); + validate_bitflag_backwards_compat!(Receipt, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(ReceiptWithBloom, UnusedBits::Zero); + validate_bitflag_backwards_compat!(SealedHeader, UnusedBits::Zero); + validate_bitflag_backwards_compat!(StageCheckpoint, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(StageUnitCheckpoint, UnusedBits::Zero); + validate_bitflag_backwards_compat!(StoredBlockBodyIndices, UnusedBits::Zero); + validate_bitflag_backwards_compat!(StoredBlockOmmers, UnusedBits::Zero); + validate_bitflag_backwards_compat!(StoredBlockWithdrawals, UnusedBits::Zero); + validate_bitflag_backwards_compat!(StorageHashingCheckpoint, UnusedBits::NotZero); + validate_bitflag_backwards_compat!(Withdrawals, UnusedBits::Zero); + validate_bitflag_backwards_compat!(Requests, UnusedBits::Zero); + } +} diff --git a/crates/storage/db-api/src/models/mod.rs b/crates/storage/db-api/src/models/mod.rs index 942b9b27af6dc..6e832a0314f45 100644 --- a/crates/storage/db-api/src/models/mod.rs +++ b/crates/storage/db-api/src/models/mod.rs @@ -305,16 +305,6 @@ add_wrapper_struct!((ClientVersion, CompactClientVersion)); #[cfg(test)] mod tests { - use super::*; - use reth_codecs::{test_utils::UnusedBits, validate_bitflag_backwards_compat}; - use reth_primitives::{Account, Receipt, ReceiptWithBloom, SealedHeader, Withdrawals}; - use reth_prune_types::{PruneCheckpoint, PruneMode, PruneSegment}; - use reth_stages_types::{ - AccountHashingCheckpoint, CheckpointBlockRange, EntitiesCheckpoint, ExecutionCheckpoint, - HeadersCheckpoint, IndexHistoryCheckpoint, StageCheckpoint, StageUnitCheckpoint, - StorageHashingCheckpoint, - }; - // each value in the database has an extra field named flags that encodes metadata about other // fields in the value, e.g. offset and length. // @@ -323,6 +313,15 @@ mod tests { #[cfg(not(feature = "optimism"))] #[test] fn test_ensure_backwards_compatibility() { + use super::*; + use reth_codecs::{test_utils::UnusedBits, validate_bitflag_backwards_compat}; + use reth_primitives::{Account, Receipt, ReceiptWithBloom, SealedHeader, Withdrawals}; + use reth_prune_types::{PruneCheckpoint, PruneMode, PruneSegment}; + use reth_stages_types::{ + AccountHashingCheckpoint, CheckpointBlockRange, EntitiesCheckpoint, + ExecutionCheckpoint, HeadersCheckpoint, IndexHistoryCheckpoint, StageCheckpoint, + StageUnitCheckpoint, StorageHashingCheckpoint, + }; assert_eq!(Account::bitflag_encoded_bytes(), 2); assert_eq!(AccountHashingCheckpoint::bitflag_encoded_bytes(), 1); assert_eq!(CheckpointBlockRange::bitflag_encoded_bytes(), 1); @@ -372,60 +371,4 @@ mod tests { validate_bitflag_backwards_compat!(Withdrawals, UnusedBits::Zero); validate_bitflag_backwards_compat!(Requests, UnusedBits::Zero); } - - #[cfg(feature = "optimism")] - #[test] - fn test_ensure_backwards_compatibility() { - assert_eq!(Account::bitflag_encoded_bytes(), 2); - assert_eq!(AccountHashingCheckpoint::bitflag_encoded_bytes(), 1); - assert_eq!(CheckpointBlockRange::bitflag_encoded_bytes(), 1); - assert_eq!(CompactClientVersion::bitflag_encoded_bytes(), 0); - assert_eq!(CompactU256::bitflag_encoded_bytes(), 1); - assert_eq!(CompactU64::bitflag_encoded_bytes(), 1); - assert_eq!(EntitiesCheckpoint::bitflag_encoded_bytes(), 1); - assert_eq!(ExecutionCheckpoint::bitflag_encoded_bytes(), 0); - assert_eq!(HeadersCheckpoint::bitflag_encoded_bytes(), 0); - assert_eq!(IndexHistoryCheckpoint::bitflag_encoded_bytes(), 0); - assert_eq!(PruneCheckpoint::bitflag_encoded_bytes(), 1); - assert_eq!(PruneMode::bitflag_encoded_bytes(), 1); - assert_eq!(PruneSegment::bitflag_encoded_bytes(), 1); - assert_eq!(Receipt::bitflag_encoded_bytes(), 2); - assert_eq!(ReceiptWithBloom::bitflag_encoded_bytes(), 0); - assert_eq!(SealedHeader::bitflag_encoded_bytes(), 0); - assert_eq!(StageCheckpoint::bitflag_encoded_bytes(), 1); - assert_eq!(StageUnitCheckpoint::bitflag_encoded_bytes(), 1); - assert_eq!(StoredBlockBodyIndices::bitflag_encoded_bytes(), 1); - assert_eq!(StoredBlockOmmers::bitflag_encoded_bytes(), 0); - assert_eq!(StoredBlockWithdrawals::bitflag_encoded_bytes(), 0); - assert_eq!(StorageHashingCheckpoint::bitflag_encoded_bytes(), 1); - assert_eq!(Withdrawals::bitflag_encoded_bytes(), 0); - - // In case of failure, refer to the documentation of the - // [`validate_bitflag_backwards_compat`] macro for detailed instructions on handling - // it. - validate_bitflag_backwards_compat!(Account, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(AccountHashingCheckpoint, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(CheckpointBlockRange, UnusedBits::Zero); - validate_bitflag_backwards_compat!(CompactClientVersion, UnusedBits::Zero); - validate_bitflag_backwards_compat!(CompactU256, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(CompactU64, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(EntitiesCheckpoint, UnusedBits::Zero); - validate_bitflag_backwards_compat!(ExecutionCheckpoint, UnusedBits::Zero); - validate_bitflag_backwards_compat!(HeadersCheckpoint, UnusedBits::Zero); - validate_bitflag_backwards_compat!(IndexHistoryCheckpoint, UnusedBits::Zero); - validate_bitflag_backwards_compat!(PruneCheckpoint, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(PruneMode, UnusedBits::Zero); - validate_bitflag_backwards_compat!(PruneSegment, UnusedBits::Zero); - validate_bitflag_backwards_compat!(Receipt, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(ReceiptWithBloom, UnusedBits::Zero); - validate_bitflag_backwards_compat!(SealedHeader, UnusedBits::Zero); - validate_bitflag_backwards_compat!(StageCheckpoint, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(StageUnitCheckpoint, UnusedBits::Zero); - validate_bitflag_backwards_compat!(StoredBlockBodyIndices, UnusedBits::Zero); - validate_bitflag_backwards_compat!(StoredBlockOmmers, UnusedBits::Zero); - validate_bitflag_backwards_compat!(StoredBlockWithdrawals, UnusedBits::Zero); - validate_bitflag_backwards_compat!(StorageHashingCheckpoint, UnusedBits::NotZero); - validate_bitflag_backwards_compat!(Withdrawals, UnusedBits::Zero); - validate_bitflag_backwards_compat!(Requests, UnusedBits::Zero); - } } From ad64de896230c46b25046e71cfc90b8a7b95a3af Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Thu, 26 Sep 2024 18:18:56 +0200 Subject: [PATCH 27/84] primitives: rm more `alloy_primitives` reexports (#11250) Co-authored-by: Matthias Seitz --- crates/node/builder/src/launch/mod.rs | 2 +- crates/primitives/benches/recover_ecdsa_crit.rs | 3 ++- crates/primitives/src/block.rs | 2 +- crates/primitives/src/lib.rs | 4 +--- crates/primitives/src/proofs.rs | 4 ++-- crates/primitives/src/receipt.rs | 3 +-- crates/primitives/src/transaction/mod.rs | 2 +- crates/primitives/src/transaction/tx_type.rs | 3 +-- crates/revm/src/test_utils.rs | 2 +- crates/stages/stages/src/stages/bodies.rs | 7 ++++--- crates/stages/stages/src/stages/execution.rs | 6 ++---- crates/stages/stages/src/stages/mod.rs | 6 ++---- crates/stages/stages/src/stages/sender_recovery.rs | 4 ++-- crates/stages/stages/src/stages/tx_lookup.rs | 8 ++++---- crates/stages/stages/src/test_utils/test_db.rs | 6 +++--- .../storage/provider/src/providers/static_file/mod.rs | 4 ++-- crates/storage/provider/src/test_utils/blocks.rs | 10 ++++++---- crates/trie/db/src/trie_cursor.rs | 2 +- crates/trie/db/tests/trie.rs | 4 ++-- 19 files changed, 39 insertions(+), 43 deletions(-) diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 9c7d562d19d2e..db98ffacedeb0 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -11,6 +11,7 @@ pub use exex::ExExLauncher; use std::{future::Future, sync::Arc}; +use alloy_primitives::utils::format_ether; use alloy_rpc_types::engine::ClientVersionV1; use futures::{future::Either, stream, stream_select, StreamExt}; use reth_beacon_consensus::{ @@ -33,7 +34,6 @@ use reth_node_core::{ version::{CARGO_PKG_VERSION, CLIENT_CODE, NAME_CLIENT, VERGEN_GIT_SHA}, }; use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; -use reth_primitives::format_ether; use reth_provider::providers::BlockchainProvider; use reth_rpc_engine_api::{capabilities::EngineCapabilities, EngineApi}; use reth_tasks::TaskExecutor; diff --git a/crates/primitives/benches/recover_ecdsa_crit.rs b/crates/primitives/benches/recover_ecdsa_crit.rs index e1e896dbbf8ff..8e8e279b2a4ae 100644 --- a/crates/primitives/benches/recover_ecdsa_crit.rs +++ b/crates/primitives/benches/recover_ecdsa_crit.rs @@ -1,8 +1,9 @@ #![allow(missing_docs)] +use alloy_primitives::hex_literal::hex; use alloy_rlp::Decodable; use criterion::{criterion_group, criterion_main, Criterion}; use pprof::criterion::{Output, PProfProfiler}; -use reth_primitives::{hex_literal::hex, TransactionSigned}; +use reth_primitives::TransactionSigned; /// Benchmarks the recovery of the public key from the ECDSA message using criterion. pub fn criterion_benchmark(c: &mut Criterion) { diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index a5357fcec9933..78a98f78ab2ab 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -709,8 +709,8 @@ impl<'a> arbitrary::Arbitrary<'a> for BlockBody { #[cfg(test)] mod tests { use super::{BlockNumberOrTag::*, *}; - use crate::hex_literal::hex; use alloy_eips::eip1898::HexStringMissingPrefixError; + use alloy_primitives::hex_literal::hex; use alloy_rlp::{Decodable, Encodable}; use std::str::FromStr; diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index b0564ed07d5ab..778230f5f870a 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -72,9 +72,7 @@ pub use transaction::{ pub use alloy_primitives::{ self, address, b256, bloom, bytes, bytes::{Buf, BufMut, BytesMut}, - hex, hex_literal, ruint, - utils::format_ether, - Bytes, StorageValue, TxHash, TxIndex, TxNumber, B128, B256, B512, B64, U128, U256, U64, + hex, Bytes, TxHash, B256, U256, U64, }; pub use reth_ethereum_forks::*; pub use revm_primitives::{self, JumpTable}; diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index 040ec365503e1..454524081f0b4 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -65,9 +65,9 @@ pub fn calculate_ommers_root(ommers: &[Header]) -> B256 { #[cfg(test)] mod tests { use super::*; - use crate::{constants::EMPTY_ROOT_HASH, hex_literal::hex, Block, U256}; + use crate::{constants::EMPTY_ROOT_HASH, Block, U256}; use alloy_genesis::GenesisAccount; - use alloy_primitives::{b256, Address}; + use alloy_primitives::{b256, hex_literal::hex, Address}; use alloy_rlp::Decodable; use reth_chainspec::{HOLESKY, MAINNET, SEPOLIA}; use reth_trie_common::root::{state_root_ref_unhashed, state_root_unhashed}; diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 6b43e77e913a7..0b32e0899a126 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -501,8 +501,7 @@ impl<'a> Encodable for ReceiptWithBloomEncoder<'a> { #[cfg(test)] mod tests { use super::*; - use crate::hex_literal::hex; - use alloy_primitives::{address, b256, bytes}; + use alloy_primitives::{address, b256, bytes, hex_literal::hex}; // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 #[test] diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 8e2f2977dedcf..d7ab9a22bc2b5 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1921,7 +1921,7 @@ mod tests { #[test] fn decode_raw_tx_and_recover_signer() { - use crate::hex_literal::hex; + use alloy_primitives::hex_literal::hex; // transaction is from ropsten let hash: B256 = diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index 4017d8d4b9c44..14ac420e0a900 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -1,5 +1,4 @@ -use crate::U64; -use alloy_primitives::U8; +use alloy_primitives::{U64, U8}; use alloy_rlp::{Decodable, Encodable}; use serde::{Deserialize, Serialize}; diff --git a/crates/revm/src/test_utils.rs b/crates/revm/src/test_utils.rs index bbb45687353bc..3f89d99125bee 100644 --- a/crates/revm/src/test_utils.rs +++ b/crates/revm/src/test_utils.rs @@ -136,7 +136,7 @@ impl StateProvider for StateProviderTest { &self, account: Address, storage_key: StorageKey, - ) -> ProviderResult> { + ) -> ProviderResult> { Ok(self.accounts.get(&account).and_then(|(storage, _)| storage.get(&storage_key).copied())) } diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index cc30fc90a11de..248e2ff52748d 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -6,6 +6,7 @@ use std::{ use futures_util::TryStreamExt; use tracing::*; +use alloy_primitives::TxNumber; use reth_db::tables; use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW}, @@ -13,7 +14,7 @@ use reth_db_api::{ transaction::DbTxMut, }; use reth_network_p2p::bodies::{downloader::BodyDownloader, response::BlockResponse}; -use reth_primitives::{StaticFileSegment, TxNumber}; +use reth_primitives::StaticFileSegment; use reth_provider::{ providers::{StaticFileProvider, StaticFileWriter}, BlockReader, DBProvider, ProviderError, StaticFileProviderFactory, StatsReader, @@ -621,7 +622,7 @@ mod tests { UnwindStageTestRunner, }, }; - use alloy_primitives::{BlockHash, BlockNumber}; + use alloy_primitives::{BlockHash, BlockNumber, TxNumber}; use futures_util::Stream; use reth_db::{static_file::HeaderMask, tables}; use reth_db_api::{ @@ -637,7 +638,7 @@ mod tests { error::DownloadResult, }; use reth_primitives::{ - BlockBody, Header, SealedBlock, SealedHeader, StaticFileSegment, TxNumber, B256, + BlockBody, Header, SealedBlock, SealedHeader, StaticFileSegment, B256, }; use reth_provider::{ providers::StaticFileWriter, test_utils::MockNodeTypesWithDB, HeaderProvider, diff --git a/crates/stages/stages/src/stages/execution.rs b/crates/stages/stages/src/stages/execution.rs index 04cc1dd65923e..1305e6825451e 100644 --- a/crates/stages/stages/src/stages/execution.rs +++ b/crates/stages/stages/src/stages/execution.rs @@ -658,16 +658,14 @@ where mod tests { use super::*; use crate::test_utils::TestStageDB; - use alloy_primitives::{keccak256, Address}; + use alloy_primitives::{hex_literal::hex, keccak256, Address}; use alloy_rlp::Decodable; use assert_matches::assert_matches; use reth_chainspec::ChainSpecBuilder; use reth_db_api::{models::AccountBeforeTx, transaction::DbTxMut}; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_execution_errors::BlockValidationError; - use reth_primitives::{ - address, hex_literal::hex, Account, Bytecode, SealedBlock, StorageEntry, B256, U256, - }; + use reth_primitives::{address, Account, Bytecode, SealedBlock, StorageEntry, B256, U256}; use reth_provider::{ test_utils::create_test_provider_factory, AccountReader, DatabaseProviderFactory, ReceiptProvider, StaticFileProviderFactory, diff --git a/crates/stages/stages/src/stages/mod.rs b/crates/stages/stages/src/stages/mod.rs index d3c5b7099d21c..6ae1b30e7e24d 100644 --- a/crates/stages/stages/src/stages/mod.rs +++ b/crates/stages/stages/src/stages/mod.rs @@ -42,7 +42,7 @@ use utils::*; mod tests { use super::*; use crate::test_utils::{StorageKind, TestStageDB}; - use alloy_primitives::{keccak256, BlockNumber}; + use alloy_primitives::{hex_literal::hex, keccak256, BlockNumber}; use alloy_rlp::Decodable; use reth_chainspec::ChainSpecBuilder; use reth_db::{ @@ -56,9 +56,7 @@ mod tests { }; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_exex::ExExManagerHandle; - use reth_primitives::{ - address, hex_literal::hex, Account, Bytecode, SealedBlock, StaticFileSegment, B256, U256, - }; + use reth_primitives::{address, Account, Bytecode, SealedBlock, StaticFileSegment, B256, U256}; use reth_provider::{ providers::{StaticFileProvider, StaticFileWriter}, test_utils::MockNodeTypesWithDB, diff --git a/crates/stages/stages/src/stages/sender_recovery.rs b/crates/stages/stages/src/stages/sender_recovery.rs index 344a8ea848ee8..8ffbb2070aa12 100644 --- a/crates/stages/stages/src/stages/sender_recovery.rs +++ b/crates/stages/stages/src/stages/sender_recovery.rs @@ -1,4 +1,4 @@ -use alloy_primitives::Address; +use alloy_primitives::{Address, TxNumber}; use reth_config::config::SenderRecoveryConfig; use reth_consensus::ConsensusError; use reth_db::{static_file::TransactionMask, tables, RawValue}; @@ -7,7 +7,7 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, DbTxUnwindExt, }; -use reth_primitives::{GotExpected, StaticFileSegment, TransactionSignedNoHash, TxNumber}; +use reth_primitives::{GotExpected, StaticFileSegment, TransactionSignedNoHash}; use reth_provider::{ BlockReader, DBProvider, HeaderProvider, ProviderError, PruneCheckpointReader, StaticFileProviderFactory, StatsReader, diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index 4e68cfe90b767..1744b933c2d33 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -1,3 +1,4 @@ +use alloy_primitives::{TxHash, TxNumber}; use num_traits::Zero; use reth_config::config::{EtlConfig, TransactionLookupConfig}; use reth_db::{tables, RawKey, RawValue}; @@ -6,7 +7,6 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, }; use reth_etl::Collector; -use reth_primitives::{TxHash, TxNumber}; use reth_provider::{ BlockReader, DBProvider, PruneCheckpointReader, PruneCheckpointWriter, StaticFileProviderFactory, StatsReader, TransactionsProvider, TransactionsProviderExt, @@ -109,7 +109,7 @@ where } } if input.target_reached() { - return Ok(ExecOutput::done(input.checkpoint())) + return Ok(ExecOutput::done(input.checkpoint())); } // 500MB temporary files @@ -172,7 +172,7 @@ where "Transaction hashes inserted" ); - break + break; } } @@ -199,7 +199,7 @@ where let mut rev_walker = body_cursor.walk_back(Some(*range.end()))?; while let Some((number, body)) = rev_walker.next().transpose()? { if number <= unwind_to { - break + break; } // Delete all transactions that belong to this block diff --git a/crates/stages/stages/src/test_utils/test_db.rs b/crates/stages/stages/src/test_utils/test_db.rs index 89d74d9d4092a..7607b1d3142c5 100644 --- a/crates/stages/stages/src/test_utils/test_db.rs +++ b/crates/stages/stages/src/test_utils/test_db.rs @@ -1,4 +1,4 @@ -use alloy_primitives::{keccak256, Address, BlockNumber}; +use alloy_primitives::{keccak256, Address, BlockNumber, TxNumber}; use reth_chainspec::MAINNET; use reth_db::{ tables, @@ -15,8 +15,8 @@ use reth_db_api::{ DatabaseError as DbError, }; use reth_primitives::{ - Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, TxHash, TxNumber, - B256, U256, + Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, TxHash, B256, + U256, }; use reth_provider::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter}, diff --git a/crates/storage/provider/src/providers/static_file/mod.rs b/crates/storage/provider/src/providers/static_file/mod.rs index 04f032b0fef42..45b7816af02a3 100644 --- a/crates/storage/provider/src/providers/static_file/mod.rs +++ b/crates/storage/provider/src/providers/static_file/mod.rs @@ -56,7 +56,7 @@ impl Deref for LoadedJar { mod tests { use super::*; use crate::{test_utils::create_test_provider_factory, HeaderProvider}; - use alloy_primitives::{BlockHash, B256, U256}; + use alloy_primitives::{BlockHash, TxNumber, B256, U256}; use rand::seq::SliceRandom; use reth_db::{ test_utils::create_test_static_files_dir, CanonicalHeaders, HeaderNumbers, @@ -65,7 +65,7 @@ mod tests { use reth_db_api::transaction::DbTxMut; use reth_primitives::{ static_file::{find_fixed_range, SegmentRangeInclusive, DEFAULT_BLOCKS_PER_STATIC_FILE}, - Header, Receipt, TransactionSignedNoHash, TxNumber, + Header, Receipt, TransactionSignedNoHash, }; use reth_storage_api::{ReceiptProvider, TransactionsProvider}; use reth_testing_utils::generators::{self, random_header_range}; diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 09daf998731a6..32033bcb2803e 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -1,14 +1,16 @@ //! Dummy blocks and data for tests use crate::{DatabaseProviderRW, ExecutionOutcome}; use alloy_consensus::TxLegacy; -use alloy_primitives::{map::HashMap, Address, BlockNumber, Log, Parity, Sealable, TxKind}; +use alloy_primitives::{ + hex_literal::hex, map::HashMap, Address, BlockNumber, Log, Parity, Sealable, TxKind, +}; use once_cell::sync::Lazy; use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_primitives::{ - alloy_primitives, b256, hex_literal::hex, Account, BlockBody, Bytes, Header, Receipt, - SealedBlock, SealedBlockWithSenders, SealedHeader, Signature, Transaction, TransactionSigned, - TxType, Withdrawal, Withdrawals, B256, U256, + alloy_primitives, b256, Account, BlockBody, Bytes, Header, Receipt, SealedBlock, + SealedBlockWithSenders, SealedHeader, Signature, Transaction, TransactionSigned, TxType, + Withdrawal, Withdrawals, B256, U256, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{db::BundleState, primitives::AccountInfo}; diff --git a/crates/trie/db/src/trie_cursor.rs b/crates/trie/db/src/trie_cursor.rs index 124b8ccb20ca0..601100b3faee3 100644 --- a/crates/trie/db/src/trie_cursor.rs +++ b/crates/trie/db/src/trie_cursor.rs @@ -209,8 +209,8 @@ where #[cfg(test)] mod tests { use super::*; + use alloy_primitives::hex_literal::hex; use reth_db_api::{cursor::DbCursorRW, transaction::DbTxMut}; - use reth_primitives::hex_literal::hex; use reth_provider::test_utils::create_test_provider_factory; #[test] diff --git a/crates/trie/db/tests/trie.rs b/crates/trie/db/tests/trie.rs index f7413c64509f9..59fffec58d06d 100644 --- a/crates/trie/db/tests/trie.rs +++ b/crates/trie/db/tests/trie.rs @@ -1,6 +1,6 @@ #![allow(missing_docs)] -use alloy_primitives::{keccak256, Address, B256, U256}; +use alloy_primitives::{hex_literal::hex, keccak256, Address, B256, U256}; use proptest::{prelude::ProptestConfig, proptest}; use proptest_arbitrary_interop::arb; use reth_db::{tables, test_utils::TempDatabase, DatabaseEnv}; @@ -8,7 +8,7 @@ use reth_db_api::{ cursor::{DbCursorRO, DbCursorRW, DbDupCursorRO}, transaction::DbTxMut, }; -use reth_primitives::{constants::EMPTY_ROOT_HASH, hex_literal::hex, Account, StorageEntry}; +use reth_primitives::{constants::EMPTY_ROOT_HASH, Account, StorageEntry}; use reth_provider::{ test_utils::create_test_provider_factory, DatabaseProviderRW, StorageTrieWriter, TrieWriter, }; From f4cbfbcd79bbceaae21f596c08d400b8f37c7f5d Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Thu, 26 Sep 2024 13:14:44 -0400 Subject: [PATCH 28/84] feat(tree): introduce reorg count metrics in new engine (#11226) --- crates/engine/tree/src/tree/mod.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 4ffd2031f60be..c6b6d6e1287ff 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -1952,6 +1952,7 @@ where let old_first = old.first().map(|first| first.block.num_hash()); trace!(target: "engine::tree", ?new_first, ?old_first, "Reorg detected, new and old first blocks"); + self.update_reorg_metrics(old.len()); self.reinsert_reorged_blocks(new.clone()); self.reinsert_reorged_blocks(old.clone()); } @@ -1973,6 +1974,12 @@ where )); } + /// This updates metrics based on the given reorg length. + fn update_reorg_metrics(&self, old_chain_length: usize) { + self.metrics.tree.reorgs.increment(1); + self.metrics.tree.latest_reorg_depth.set(old_chain_length as f64); + } + /// This reinserts any blocks in the new chain that do not already exist in the tree fn reinsert_reorged_blocks(&mut self, new_chain: Vec) { for block in new_chain { From 77992e3254737028e5d06781f887ad4ab3b42401 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Thu, 26 Sep 2024 20:30:48 +0200 Subject: [PATCH 29/84] deps: `alloy-trie@0.6.0` (#11260) --- Cargo.lock | 5 +-- Cargo.toml | 2 +- crates/evm/execution-errors/src/trie.rs | 3 ++ crates/trie/common/src/proofs.rs | 19 +++++---- crates/trie/trie/src/proof.rs | 4 +- crates/trie/trie/src/witness.rs | 56 +++++++++++++++---------- 6 files changed, 53 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 27f4fa9b7cdb9..103da9225540b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -734,16 +734,15 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.5.3" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a46c9c4fdccda7982e7928904bd85fe235a0404ee3d7e197fff13d61eac8b4f" +checksum = "e9703ce68b97f8faae6f7739d1e003fc97621b856953cbcdbb2b515743f23288" dependencies = [ "alloy-primitives", "alloy-rlp", "arbitrary", "derive_arbitrary", "derive_more", - "hashbrown 0.14.5", "nybbles", "proptest", "proptest-derive", diff --git a/Cargo.toml b/Cargo.toml index 4ef2b272db44a..0aae5f87355e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -426,7 +426,7 @@ alloy-dyn-abi = "0.8.0" alloy-primitives = { version = "0.8.4", default-features = false } alloy-rlp = "0.3.4" alloy-sol-types = "0.8.0" -alloy-trie = { version = "0.5", default-features = false } +alloy-trie = { version = "0.6", default-features = false } alloy-consensus = { version = "0.3.6", default-features = false } alloy-eips = { version = "0.3.6", default-features = false } diff --git a/crates/evm/execution-errors/src/trie.rs b/crates/evm/execution-errors/src/trie.rs index 306cd6750a9a6..c85819ee74def 100644 --- a/crates/evm/execution-errors/src/trie.rs +++ b/crates/evm/execution-errors/src/trie.rs @@ -101,6 +101,9 @@ pub enum TrieWitnessError { /// Missing target node. #[display("target node missing from proof {_0:?}")] MissingTargetNode(Nibbles), + /// Unexpected empty root. + #[display("unexpected empty root: {_0:?}")] + UnexpectedEmptyRoot(Nibbles), } impl From for ProviderError { diff --git a/crates/trie/common/src/proofs.rs b/crates/trie/common/src/proofs.rs index df32b1cb9f6a9..b35edd96d560c 100644 --- a/crates/trie/common/src/proofs.rs +++ b/crates/trie/common/src/proofs.rs @@ -5,12 +5,13 @@ use alloy_primitives::{keccak256, Address, Bytes, B256, U256}; use alloy_rlp::{encode_fixed_size, Decodable}; use alloy_trie::{ nodes::TrieNode, - proof::{verify_proof, ProofVerificationError}, + proof::{verify_proof, ProofNodes, ProofVerificationError}, EMPTY_ROOT_HASH, }; +use itertools::Itertools; use reth_primitives_traits::{constants::KECCAK_EMPTY, Account}; use serde::{Deserialize, Serialize}; -use std::collections::{BTreeMap, HashMap}; +use std::collections::HashMap; /// The state multiproof of target accounts and multiproofs of their storage tries. /// Multiproof is effectively a state subtrie that only contains the nodes @@ -18,7 +19,7 @@ use std::collections::{BTreeMap, HashMap}; #[derive(Clone, Default, Debug)] pub struct MultiProof { /// State trie multiproof for requested accounts. - pub account_subtree: BTreeMap, + pub account_subtree: ProofNodes, /// Storage trie multiproofs. pub storages: HashMap, } @@ -36,8 +37,8 @@ impl MultiProof { // Retrieve the account proof. let proof = self .account_subtree - .iter() - .filter(|(path, _)| nibbles.starts_with(path)) + .matching_nodes_iter(&nibbles) + .sorted_by(|a, b| a.0.cmp(b.0)) .map(|(_, node)| node.clone()) .collect::>(); @@ -82,12 +83,12 @@ pub struct StorageMultiProof { /// Storage trie root. pub root: B256, /// Storage multiproof for requested slots. - pub subtree: BTreeMap, + pub subtree: ProofNodes, } impl Default for StorageMultiProof { fn default() -> Self { - Self { root: EMPTY_ROOT_HASH, subtree: BTreeMap::default() } + Self { root: EMPTY_ROOT_HASH, subtree: Default::default() } } } @@ -99,8 +100,8 @@ impl StorageMultiProof { // Retrieve the storage proof. let proof = self .subtree - .iter() - .filter(|(path, _)| nibbles.starts_with(path)) + .matching_nodes_iter(&nibbles) + .sorted_by(|a, b| a.0.cmp(b.0)) .map(|(_, node)| node.clone()) .collect::>(); diff --git a/crates/trie/trie/src/proof.rs b/crates/trie/trie/src/proof.rs index 69b648ba001dc..e3bdccafefd29 100644 --- a/crates/trie/trie/src/proof.rs +++ b/crates/trie/trie/src/proof.rs @@ -136,7 +136,7 @@ where } } let _ = hash_builder.root(); - Ok(MultiProof { account_subtree: hash_builder.take_proofs(), storages }) + Ok(MultiProof { account_subtree: hash_builder.take_proof_nodes(), storages }) } /// Generate a storage multiproof according to specified targets. @@ -181,6 +181,6 @@ where } let root = hash_builder.root(); - Ok(StorageMultiProof { root, subtree: hash_builder.take_proofs() }) + Ok(StorageMultiProof { root, subtree: hash_builder.take_proof_nodes() }) } } diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 1f521ca7db5f4..61576aabe36e5 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -10,8 +10,8 @@ use alloy_primitives::{ Bytes, B256, }; use alloy_rlp::{BufMut, Decodable, Encodable}; -use itertools::Either; -use reth_execution_errors::{StateProofError, TrieWitnessError}; +use itertools::{Either, Itertools}; +use reth_execution_errors::TrieWitnessError; use reth_primitives::constants::EMPTY_ROOT_HASH; use reth_trie_common::{ BranchNode, HashBuilder, Nibbles, TrieAccount, TrieNode, CHILD_INDEX_RANGE, @@ -120,24 +120,36 @@ where None }; let key = Nibbles::unpack(hashed_address); - let proof = account_multiproof.account_subtree.iter().filter(|e| key.starts_with(e.0)); - account_trie_nodes.extend(self.target_nodes(key.clone(), value, proof)?); + account_trie_nodes.extend( + self.target_nodes( + key.clone(), + value, + account_multiproof + .account_subtree + .matching_nodes_iter(&key) + .sorted_by(|a, b| a.0.cmp(b.0)), + )?, + ); // Gather and record storage trie nodes for this account. let mut storage_trie_nodes = BTreeMap::default(); let storage = state.storages.get(&hashed_address); for hashed_slot in hashed_slots { - let slot_key = Nibbles::unpack(hashed_slot); + let slot_nibbles = Nibbles::unpack(hashed_slot); let slot_value = storage .and_then(|s| s.storage.get(&hashed_slot)) .filter(|v| !v.is_zero()) .map(|v| alloy_rlp::encode_fixed_size(v).to_vec()); - let proof = storage_multiproof.subtree.iter().filter(|e| slot_key.starts_with(e.0)); - storage_trie_nodes.extend(self.target_nodes( - slot_key.clone(), - slot_value, - proof, - )?); + storage_trie_nodes.extend( + self.target_nodes( + slot_nibbles.clone(), + slot_value, + storage_multiproof + .subtree + .matching_nodes_iter(&slot_nibbles) + .sorted_by(|a, b| a.0.cmp(b.0)), + )?, + ); } Self::next_root_from_proofs(storage_trie_nodes, |key: Nibbles| { @@ -145,7 +157,7 @@ where let mut padded_key = key.pack(); padded_key.resize(32, 0); let target_key = B256::from_slice(&padded_key); - let mut proof = Proof::new( + let proof = Proof::new( self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone(), ) @@ -155,9 +167,9 @@ where // The subtree only contains the proof for a single target. let node = - proof.subtree.remove(&key).ok_or(TrieWitnessError::MissingTargetNode(key))?; + proof.subtree.get(&key).ok_or(TrieWitnessError::MissingTargetNode(key))?; self.witness.insert(keccak256(node.as_ref()), node.clone()); // record in witness - Ok(node) + Ok(node.clone()) })?; } @@ -165,19 +177,17 @@ where // Right pad the target with 0s. let mut padded_key = key.pack(); padded_key.resize(32, 0); - let mut proof = + let proof = Proof::new(self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone()) .with_prefix_sets_mut(self.prefix_sets.clone()) .with_target((B256::from_slice(&padded_key), HashSet::default())) .multiproof()?; // The subtree only contains the proof for a single target. - let node = proof - .account_subtree - .remove(&key) - .ok_or(TrieWitnessError::MissingTargetNode(key))?; + let node = + proof.account_subtree.get(&key).ok_or(TrieWitnessError::MissingTargetNode(key))?; self.witness.insert(keccak256(node.as_ref()), node.clone()); // record in witness - Ok(node) + Ok(node.clone()) })?; Ok(self.witness) @@ -190,7 +200,7 @@ where key: Nibbles, value: Option>, proof: impl IntoIterator, - ) -> Result>>, StateProofError> { + ) -> Result>>, TrieWitnessError> { let mut trie_nodes = BTreeMap::default(); for (path, encoded) in proof { // Record the node in witness. @@ -216,6 +226,7 @@ where trie_nodes.insert(next_path.clone(), Either::Right(leaf.value.clone())); } } + TrieNode::EmptyRoot => return Err(TrieWitnessError::UnexpectedEmptyRoot(next_path)), }; } @@ -273,6 +284,9 @@ where TrieNode::Extension(ext) => { path.extend_from_slice(&ext.key); } + TrieNode::EmptyRoot => { + return Err(TrieWitnessError::UnexpectedEmptyRoot(path)) + } } } } From 13a3c2c8cc827c183ffa9bbb2dcee649a12f5641 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Thu, 26 Sep 2024 22:04:36 +0100 Subject: [PATCH 30/84] chore(exex): remove unneeded code (#11267) --- crates/exex/exex/src/wal/mod.rs | 112 +--------------------------- crates/exex/exex/src/wal/storage.rs | 18 ----- 2 files changed, 1 insertion(+), 129 deletions(-) diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index 0b699883ead32..163d21d1bde7b 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -19,11 +19,7 @@ use reth_tracing::tracing::{debug, instrument}; /// /// The expected mode of operation is as follows: /// 1. On every new canonical chain notification, call [`Wal::commit`]. -/// 2. When ExEx is on a wrong fork, rollback the WAL using [`Wal::rollback`]. The caller is -/// expected to create reverts from the removed notifications and backfill the blocks between the -/// returned block and the given rollback block. After that, commit new notifications as usual -/// with [`Wal::commit`]. -/// 3. When the chain is finalized, call [`Wal::finalize`] to prevent the infinite growth of the +/// 2. When the chain is finalized, call [`Wal::finalize`] to prevent the infinite growth of the /// WAL. #[derive(Debug)] pub struct Wal { @@ -81,79 +77,6 @@ impl Wal { Ok(()) } - /// Rollbacks the WAL to the given block, inclusive. - /// - /// 1. Walks the WAL from the end and searches for the first notification where committed chain - /// contains a block with the same number and hash as `to_block`. - /// 2. If the notification is found, truncates the WAL. It means that if the found notification - /// contains both given block and blocks before it, the whole notification will be truncated. - /// - /// # Returns - /// - /// 1. The block number and hash of the lowest removed block. - /// 2. The notifications that were removed. - #[instrument(target = "exex::wal", skip(self))] - pub fn rollback( - &mut self, - to_block: BlockNumHash, - ) -> eyre::Result)>> { - // First, pop items from the back of the cache until we find the notification with the - // specified block. When found, save the file ID of that notification. - let mut remove_from_file_id = None; - let mut remove_to_file_id = None; - let mut lowest_removed_block = None; - while let Some((file_id, block)) = self.block_cache.pop_back() { - debug!(?file_id, ?block, "Popped back block from the block cache"); - if block.action.is_commit() && block.block.number == to_block.number { - debug!( - ?file_id, - ?block, - ?remove_from_file_id, - ?lowest_removed_block, - "Found the requested block" - ); - - if block.block.hash != to_block.hash { - eyre::bail!("block hash mismatch in WAL") - } - - remove_from_file_id = Some(file_id); - - let notification = self.storage.read_notification(file_id)?; - lowest_removed_block = notification - .committed_chain() - .as_ref() - .map(|chain| chain.first()) - .map(|block| (block.number, block.hash()).into()); - - break - } - - remove_from_file_id = Some(file_id); - remove_to_file_id.get_or_insert(file_id); - } - - // If the specified block is still not found, we can't do anything and just return. The - // cache was empty. - let Some((remove_from_file_id, remove_to_file_id)) = - remove_from_file_id.zip(remove_to_file_id) - else { - debug!("No blocks were rolled back"); - return Ok(None) - }; - - // Remove the rest of the block cache entries for the file ID that we found. - self.block_cache.remove_notification(remove_from_file_id); - debug!(?remove_from_file_id, "Block cache was rolled back"); - - // Remove notifications from the storage. - let removed_notifications = - self.storage.take_notifications(remove_from_file_id..=remove_to_file_id)?; - debug!(removed_notifications = ?removed_notifications.len(), "Storage was rolled back"); - - Ok(Some((lowest_removed_block.expect("qed"), removed_notifications))) - } - /// Finalizes the WAL to the given block, inclusive. /// /// 1. Finds a notification with first unfinalized block (first notification containing a @@ -372,39 +295,6 @@ mod tests { vec![committed_notification_1.clone(), reverted_notification.clone()] ); - // Now, rollback to block 1 and verify that both the block cache and the storage are - // empty. We expect the rollback to delete the first notification (commit block 0, 1), - // because we can't delete blocks partly from the notification, and also the second - // notification (revert block 1). Additionally, check that the block that the rolled - // back to is the block with number 0. - let rollback_result = wal.rollback((blocks[1].number, blocks[1].hash()).into())?; - assert_eq!(wal.block_cache.iter().collect::>(), vec![]); - assert_eq!(read_notifications(&wal)?, vec![]); - assert_eq!( - rollback_result, - Some(( - (blocks[0].number, blocks[0].hash()).into(), - vec![committed_notification_1.clone(), reverted_notification.clone()] - )) - ); - - // Commit notifications 1 and 2 again - wal.commit(&committed_notification_1)?; - assert_eq!( - wal.block_cache.iter().collect::>(), - [committed_notification_1_cache.clone()].concat() - ); - assert_eq!(read_notifications(&wal)?, vec![committed_notification_1.clone()]); - wal.commit(&reverted_notification)?; - assert_eq!( - wal.block_cache.iter().collect::>(), - [committed_notification_1_cache.clone(), reverted_notification_cache.clone()].concat() - ); - assert_eq!( - read_notifications(&wal)?, - vec![committed_notification_1.clone(), reverted_notification.clone()] - ); - // Third notification (commit block 1, 2) wal.commit(&committed_notification_2)?; let file_id = 2; diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index 766d70b072749..8d4e008b44661 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -81,24 +81,6 @@ impl Storage { Ok(range.count()) } - /// Removes notifications from the storage according to the given range. - /// - /// # Returns - /// - /// Notifications that were removed. - pub(super) fn take_notifications( - &self, - range: RangeInclusive, - ) -> eyre::Result> { - let notifications = self.iter_notifications(range).collect::>>()?; - - for (id, _) in ¬ifications { - self.remove_notification(*id); - } - - Ok(notifications.into_iter().map(|(_, notification)| notification).collect()) - } - pub(super) fn iter_notifications( &self, range: RangeInclusive, From da6b1e7c64417f346b3d9e0d36d2ccee2f13425e Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Thu, 26 Sep 2024 23:23:09 +0200 Subject: [PATCH 31/84] feat(exex): write notification files atomically (#11264) --- crates/exex/exex/src/wal/storage.rs | 26 +++------- crates/fs-util/src/lib.rs | 75 ++++++++++++++++++++++++++++- crates/storage/nippy-jar/src/lib.rs | 34 ++----------- 3 files changed, 83 insertions(+), 52 deletions(-) diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index 8d4e008b44661..ad2307361d0cb 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -1,6 +1,5 @@ use std::{ fs::File, - io::{Read, Write}, ops::RangeInclusive, path::{Path, PathBuf}, }; @@ -95,7 +94,8 @@ impl Storage { debug!(?file_path, "Reading notification from WAL"); let mut file = File::open(&file_path)?; - read_notification(&mut file) + // TODO(alexey): use rmp-serde when Alloy and Reth serde issues are resolved + Ok(serde_json::from_reader(&mut file)?) } /// Writes the notification to the file with the given id. @@ -108,27 +108,13 @@ impl Storage { let file_path = self.file_path(file_id); debug!(?file_path, "Writing notification to WAL"); - let mut file = File::create_new(&file_path)?; - write_notification(&mut file, notification)?; - - Ok(()) + Ok(reth_fs_util::atomic_write_file(&file_path, |file| { + // TODO(alexey): use rmp-serde when Alloy and Reth serde issues are resolved + serde_json::to_writer(file, notification) + })?) } } -// TODO(alexey): use rmp-serde when Alloy and Reth serde issues are resolved - -fn write_notification(mut w: &mut impl Write, notification: &ExExNotification) -> eyre::Result<()> { - // rmp_serde::encode::write(w, notification)?; - serde_json::to_writer(&mut w, notification)?; - w.flush()?; - Ok(()) -} - -fn read_notification(r: &mut impl Read) -> eyre::Result { - // Ok(rmp_serde::from_read(r)?) - Ok(serde_json::from_reader(r)?) -} - #[cfg(test)] mod tests { use std::sync::Arc; diff --git a/crates/fs-util/src/lib.rs b/crates/fs-util/src/lib.rs index 91e60c313f8ee..f77632cc89196 100644 --- a/crates/fs-util/src/lib.rs +++ b/crates/fs-util/src/lib.rs @@ -8,8 +8,8 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] use serde::{de::DeserializeOwned, Serialize}; use std::{ - fs::{self, File, ReadDir}, - io::{self, BufWriter, Write}, + fs::{self, File, OpenOptions, ReadDir}, + io::{self, BufWriter, Error, ErrorKind, Write}, path::{Path, PathBuf}, }; @@ -138,6 +138,14 @@ pub enum FsPathError { /// The path related to the operation. path: PathBuf, }, + /// Error variant for failed fsync operation with additional path context. + #[error("failed to sync path {path:?}: {source}")] + Fsync { + /// The source `io::Error`. + source: io::Error, + /// The path related to the operation. + path: PathBuf, + }, } impl FsPathError { @@ -195,6 +203,11 @@ impl FsPathError { pub fn metadata(source: io::Error, path: impl Into) -> Self { Self::Metadata { source, path: path.into() } } + + /// Returns the complementary error variant for `fsync`. + pub fn fsync(source: io::Error, path: impl Into) -> Self { + Self::Fsync { source, path: path.into() } + } } /// Wrapper for `std::fs::read_to_string` @@ -277,3 +290,61 @@ pub fn write_json_file(path: &Path, obj: &T) -> Result<()> { .map_err(|source| FsPathError::WriteJson { source, path: path.into() })?; writer.flush().map_err(|e| FsPathError::write(e, path)) } + +/// Writes atomically to file. +/// +/// 1. Creates a temporary file with a `.tmp` extension in the same file directory. +/// 2. Writes content with `write_fn`. +/// 3. Fsyncs the temp file to disk. +/// 4. Renames the temp file to the target path. +/// 5. Fsyncs the file directory. +/// +/// Atomic writes are hard: +/// * +/// * +pub fn atomic_write_file(file_path: &Path, write_fn: F) -> Result<()> +where + F: FnOnce(&mut File) -> std::result::Result<(), E>, + E: Into>, +{ + let mut tmp_path = file_path.to_path_buf(); + tmp_path.set_extension("tmp"); + + // Write to the temporary file + let mut file = + File::create(&tmp_path).map_err(|err| FsPathError::create_file(err, &tmp_path))?; + + write_fn(&mut file).map_err(|err| FsPathError::Write { + source: Error::new(ErrorKind::Other, err.into()), + path: tmp_path.clone(), + })?; + + // fsync() file + file.sync_all().map_err(|err| FsPathError::fsync(err, &tmp_path))?; + + // Rename file, not move + rename(&tmp_path, file_path)?; + + // fsync() directory + if let Some(parent) = file_path.parent() { + #[cfg(windows)] + OpenOptions::new() + .read(true) + .write(true) + .custom_flags(0x02000000) // FILE_FLAG_BACKUP_SEMANTICS + .open(parent) + .map_err(|err| FsPathError::open(err, parent))? + .sync_all() + .map_err(|err| FsPathError::fsync(err, parent))?; + + #[cfg(not(windows))] + OpenOptions::new() + .read(true) + .open(parent) + .map_err(|err| FsPathError::open(err, parent))? + .sync_all() + .map_err(|err| FsPathError::fsync(err, parent))?; + } + + Ok(()) +} diff --git a/crates/storage/nippy-jar/src/lib.rs b/crates/storage/nippy-jar/src/lib.rs index a720192d6a051..bdc950aa38a78 100644 --- a/crates/storage/nippy-jar/src/lib.rs +++ b/crates/storage/nippy-jar/src/lib.rs @@ -17,7 +17,7 @@ use memmap2::Mmap; use serde::{Deserialize, Serialize}; use std::{ error::Error as StdError, - fs::{File, OpenOptions}, + fs::File, ops::Range, path::{Path, PathBuf}, }; @@ -250,35 +250,9 @@ impl NippyJar { /// Writes all necessary configuration to file. fn freeze_config(&self) -> Result<(), NippyJarError> { - // Atomic writes are hard: - let mut tmp_path = self.config_path(); - tmp_path.set_extension(".tmp"); - - // Write to temporary file - let mut file = File::create(&tmp_path)?; - bincode::serialize_into(&mut file, &self)?; - - // fsync() file - file.sync_all()?; - - // Rename file, not move - reth_fs_util::rename(&tmp_path, self.config_path())?; - - // fsync() dir - if let Some(parent) = tmp_path.parent() { - //custom_flags() is only available on Windows - #[cfg(windows)] - OpenOptions::new() - .read(true) - .write(true) - .custom_flags(0x02000000) // FILE_FLAG_BACKUP_SEMANTICS - .open(parent)? - .sync_all()?; - - #[cfg(not(windows))] - OpenOptions::new().read(true).open(parent)?.sync_all()?; - } - Ok(()) + Ok(reth_fs_util::atomic_write_file(&self.config_path(), |file| { + bincode::serialize_into(file, &self) + })?) } } From 37b0c56b859eea63f02ea8b5eec669d198e055cf Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Fri, 27 Sep 2024 09:06:44 +0100 Subject: [PATCH 32/84] feat(exex): add parent hash to WAL block cache, index by hashes (#11263) --- Cargo.lock | 1 + crates/exex/exex/Cargo.toml | 1 + crates/exex/exex/src/wal/cache.rs | 66 ++++++++++++++++++------------- crates/exex/exex/src/wal/mod.rs | 8 ++++ 4 files changed, 49 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 103da9225540b..46d8789361292 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7396,6 +7396,7 @@ dependencies = [ "alloy-consensus", "alloy-genesis", "alloy-primitives", + "dashmap 6.1.0", "eyre", "futures", "metrics", diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index cbb2214192b4f..5baf5b97f3ea2 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -41,6 +41,7 @@ tokio-util.workspace = true tokio.workspace = true ## misc +dashmap.workspace = true eyre.workspace = true metrics.workspace = true serde_json.workspace = true diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index 25719d11bf938..2c79826e0e172 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -1,61 +1,70 @@ use std::collections::{BTreeMap, VecDeque}; +use dashmap::DashMap; use reth_exex_types::ExExNotification; -use reth_primitives::BlockNumHash; +use reth_primitives::{BlockNumHash, B256}; -/// The block cache of the WAL. Acts as a mapping of `File ID -> List of Blocks`. -/// -/// For each notification written to the WAL, there will be an entry per block written to -/// the cache with the same file ID. I.e. for each notification, there may be multiple blocks in the -/// cache. +/// The block cache of the WAL. /// /// This cache is needed to avoid walking the WAL directory every time we want to find a -/// notification corresponding to a block. +/// notification corresponding to a block or a block corresponding to a hash. #[derive(Debug)] -pub struct BlockCache(BTreeMap>); +pub struct BlockCache { + /// A mapping of `File ID -> List of Blocks`. + /// + /// For each notification written to the WAL, there will be an entry per block written to + /// the cache with the same file ID. I.e. for each notification, there may be multiple blocks + /// in the cache. + files: BTreeMap>, + /// A mapping of `Block Hash -> Block`. + /// + /// For each [`ExExNotification::ChainCommitted`] notification, there will be an entry per + /// block. + blocks: DashMap, +} impl BlockCache { /// Creates a new instance of [`BlockCache`]. - pub(super) const fn new() -> Self { - Self(BTreeMap::new()) + pub(super) fn new() -> Self { + Self { files: BTreeMap::new(), blocks: DashMap::new() } } /// Returns `true` if the cache is empty. pub(super) fn is_empty(&self) -> bool { - self.0.is_empty() + self.files.is_empty() } /// Returns a front-to-back iterator. pub(super) fn iter(&self) -> impl Iterator + '_ { - self.0.iter().flat_map(|(k, v)| v.iter().map(move |b| (*k, *b))) + self.files.iter().flat_map(|(k, v)| v.iter().map(move |b| (*k, *b))) } /// Provides a reference to the first block from the cache, or `None` if the cache is /// empty. pub(super) fn front(&self) -> Option<(u64, CachedBlock)> { - self.0.first_key_value().and_then(|(k, v)| v.front().map(|b| (*k, *b))) + self.files.first_key_value().and_then(|(k, v)| v.front().map(|b| (*k, *b))) } /// Provides a reference to the last block from the cache, or `None` if the cache is /// empty. pub(super) fn back(&self) -> Option<(u64, CachedBlock)> { - self.0.last_key_value().and_then(|(k, v)| v.back().map(|b| (*k, *b))) + self.files.last_key_value().and_then(|(k, v)| v.back().map(|b| (*k, *b))) } /// Removes the notification with the given file ID. pub(super) fn remove_notification(&mut self, key: u64) -> Option> { - self.0.remove(&key) + self.files.remove(&key) } /// Pops the first block from the cache. If it resulted in the whole file entry being empty, /// it will also remove the file entry. pub(super) fn pop_front(&mut self) -> Option<(u64, CachedBlock)> { - let first_entry = self.0.first_entry()?; + let first_entry = self.files.first_entry()?; let key = *first_entry.key(); let blocks = first_entry.into_mut(); let first_block = blocks.pop_front().unwrap(); if blocks.is_empty() { - self.0.remove(&key); + self.files.remove(&key); } Some((key, first_block)) @@ -64,12 +73,12 @@ impl BlockCache { /// Pops the last block from the cache. If it resulted in the whole file entry being empty, /// it will also remove the file entry. pub(super) fn pop_back(&mut self) -> Option<(u64, CachedBlock)> { - let last_entry = self.0.last_entry()?; + let last_entry = self.files.last_entry()?; let key = *last_entry.key(); let blocks = last_entry.into_mut(); let last_block = blocks.pop_back().unwrap(); if blocks.is_empty() { - self.0.remove(&key); + self.files.remove(&key); } Some((key, last_block)) @@ -77,7 +86,7 @@ impl BlockCache { /// Appends a block to the back of the specified file entry. pub(super) fn insert(&mut self, file_id: u64, block: CachedBlock) { - self.0.entry(file_id).or_default().push_back(block); + self.files.entry(file_id).or_default().push_back(block); } /// Inserts the blocks from the notification into the cache with the given file ID. @@ -98,6 +107,7 @@ impl BlockCache { CachedBlock { action: CachedBlockAction::Revert, block: (block.number, block.hash()).into(), + parent_hash: block.parent_hash, }, ); } @@ -105,13 +115,13 @@ impl BlockCache { if let Some(committed_chain) = committed_chain { for block in committed_chain.blocks().values() { - self.insert( - file_id, - CachedBlock { - action: CachedBlockAction::Commit, - block: (block.number, block.hash()).into(), - }, - ); + let cached_block = CachedBlock { + action: CachedBlockAction::Commit, + block: (block.number, block.hash()).into(), + parent_hash: block.parent_hash, + }; + self.insert(file_id, cached_block); + self.blocks.insert(block.hash(), cached_block); } } } @@ -122,6 +132,8 @@ pub(super) struct CachedBlock { pub(super) action: CachedBlockAction, /// The block number and hash of the block. pub(super) block: BlockNumHash, + /// The hash of the parent block. + pub(super) parent_hash: B256, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index 163d21d1bde7b..1efda8d84f8e6 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -262,6 +262,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Commit, block: (blocks[0].number, blocks[0].hash()).into(), + parent_hash: blocks[0].parent_hash, }, ), ( @@ -269,6 +270,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Commit, block: (blocks[1].number, blocks[1].hash()).into(), + parent_hash: blocks[1].parent_hash, }, ), ]; @@ -284,6 +286,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Revert, block: (blocks[1].number, blocks[1].hash()).into(), + parent_hash: blocks[1].parent_hash, }, )]; assert_eq!( @@ -304,6 +307,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Commit, block: (block_1_reorged.number, block_1_reorged.hash()).into(), + parent_hash: block_1_reorged.parent_hash, }, ), ( @@ -311,6 +315,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Commit, block: (blocks[2].number, blocks[2].hash()).into(), + parent_hash: blocks[2].parent_hash, }, ), ]; @@ -341,6 +346,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Revert, block: (blocks[2].number, blocks[2].hash()).into(), + parent_hash: blocks[2].parent_hash, }, ), ( @@ -348,6 +354,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Commit, block: (block_2_reorged.number, block_2_reorged.hash()).into(), + parent_hash: block_2_reorged.parent_hash, }, ), ( @@ -355,6 +362,7 @@ mod tests { CachedBlock { action: CachedBlockAction::Commit, block: (blocks[3].number, blocks[3].hash()).into(), + parent_hash: blocks[3].parent_hash, }, ), ]; From 67221247c5265c271f07db65ec28cbf47fe5d8c6 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Fri, 27 Sep 2024 10:10:35 +0100 Subject: [PATCH 33/84] feat(exex): WAL handle (#11266) --- Cargo.lock | 1 + crates/exex/exex/Cargo.toml | 1 + crates/exex/exex/src/manager.rs | 135 +++++++++++++------------ crates/exex/exex/src/notifications.rs | 25 ++++- crates/exex/exex/src/wal/cache.rs | 62 ++++++------ crates/exex/exex/src/wal/mod.rs | 84 +++++++++++---- crates/exex/exex/src/wal/storage.rs | 2 +- crates/exex/test-utils/src/lib.rs | 5 +- crates/node/builder/src/launch/exex.rs | 18 ++-- 9 files changed, 212 insertions(+), 121 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 46d8789361292..173eb2946b401 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7400,6 +7400,7 @@ dependencies = [ "eyre", "futures", "metrics", + "parking_lot 0.12.3", "reth-blockchain-tree", "reth-chain-state", "reth-chainspec", diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index 5baf5b97f3ea2..2b5b89fbd16d3 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -44,6 +44,7 @@ tokio.workspace = true dashmap.workspace = true eyre.workspace = true metrics.workspace = true +parking_lot.workspace = true serde_json.workspace = true tracing.workspace = true diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index 3230e003b28db..9b07aef0aad61 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -1,4 +1,6 @@ -use crate::{wal::Wal, ExExEvent, ExExNotification, ExExNotifications, FinishedExExHeight}; +use crate::{ + wal::Wal, ExExEvent, ExExNotification, ExExNotifications, FinishedExExHeight, WalHandle, +}; use alloy_primitives::BlockNumber; use futures::StreamExt; use metrics::Gauge; @@ -67,10 +69,12 @@ impl ExExHandle { node_head: Head, provider: P, executor: E, + wal_handle: WalHandle, ) -> (Self, UnboundedSender, ExExNotifications) { let (notification_tx, notification_rx) = mpsc::channel(1); let (event_tx, event_rx) = mpsc::unbounded_channel(); - let notifications = ExExNotifications::new(node_head, provider, executor, notification_rx); + let notifications = + ExExNotifications::new(node_head, provider, executor, notification_rx, wal_handle); ( Self { @@ -521,8 +525,11 @@ mod tests { #[tokio::test] async fn test_delivers_events() { + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (mut exex_handle, event_tx, mut _notification_rx) = - ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Send an event and check that it's delivered correctly event_tx.send(ExExEvent::FinishedHeight(42)).unwrap(); @@ -533,65 +540,48 @@ mod tests { #[tokio::test] async fn test_has_exexs() { let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (exex_handle_1, _, _) = - ExExHandle::new("test_exex_1".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex_1".to_string(), Head::default(), (), (), wal.handle()); - assert!(!ExExManager::new( - vec![], - 0, - Wal::new(temp_dir.path()).unwrap(), - empty_finalized_header_stream() - ) - .handle - .has_exexs()); + assert!(!ExExManager::new(vec![], 0, wal.clone(), empty_finalized_header_stream()) + .handle + .has_exexs()); - assert!(ExExManager::new( - vec![exex_handle_1], - 0, - Wal::new(temp_dir.path()).unwrap(), - empty_finalized_header_stream() - ) - .handle - .has_exexs()); + assert!(ExExManager::new(vec![exex_handle_1], 0, wal, empty_finalized_header_stream()) + .handle + .has_exexs()); } #[tokio::test] async fn test_has_capacity() { let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (exex_handle_1, _, _) = - ExExHandle::new("test_exex_1".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex_1".to_string(), Head::default(), (), (), wal.handle()); - assert!(!ExExManager::new( - vec![], - 0, - Wal::new(temp_dir.path()).unwrap(), - empty_finalized_header_stream() - ) - .handle - .has_capacity()); + assert!(!ExExManager::new(vec![], 0, wal.clone(), empty_finalized_header_stream()) + .handle + .has_capacity()); - assert!(ExExManager::new( - vec![exex_handle_1], - 10, - Wal::new(temp_dir.path()).unwrap(), - empty_finalized_header_stream() - ) - .handle - .has_capacity()); + assert!(ExExManager::new(vec![exex_handle_1], 10, wal, empty_finalized_header_stream()) + .handle + .has_capacity()); } #[test] fn test_push_notification() { let temp_dir = tempfile::tempdir().unwrap(); - let (exex_handle, _, _) = ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + let wal = Wal::new(temp_dir.path()).unwrap(); + + let (exex_handle, _, _) = + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Create a mock ExExManager and add the exex_handle to it - let mut exex_manager = ExExManager::new( - vec![exex_handle], - 10, - Wal::new(temp_dir.path()).unwrap(), - empty_finalized_header_stream(), - ); + let mut exex_manager = + ExExManager::new(vec![exex_handle], 10, wal, empty_finalized_header_stream()); // Define the notification for testing let mut block1 = SealedBlockWithSenders::default(); @@ -634,16 +624,15 @@ mod tests { #[test] fn test_update_capacity() { let temp_dir = tempfile::tempdir().unwrap(); - let (exex_handle, _, _) = ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + let wal = Wal::new(temp_dir.path()).unwrap(); + + let (exex_handle, _, _) = + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Create a mock ExExManager and add the exex_handle to it let max_capacity = 5; - let mut exex_manager = ExExManager::new( - vec![exex_handle], - max_capacity, - Wal::new(temp_dir.path()).unwrap(), - empty_finalized_header_stream(), - ); + let mut exex_manager = + ExExManager::new(vec![exex_handle], max_capacity, wal, empty_finalized_header_stream()); // Push some notifications to fill part of the buffer let mut block1 = SealedBlockWithSenders::default(); @@ -674,8 +663,10 @@ mod tests { #[tokio::test] async fn test_updates_block_height() { let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (exex_handle, event_tx, mut _notification_rx) = - ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Check initial block height assert!(exex_handle.finished_height.is_none()); @@ -717,11 +708,13 @@ mod tests { #[tokio::test] async fn test_updates_block_height_lower() { let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + // Create two `ExExHandle` instances let (exex_handle1, event_tx1, _) = - ExExHandle::new("test_exex1".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex1".to_string(), Head::default(), (), (), wal.handle()); let (exex_handle2, event_tx2, _) = - ExExHandle::new("test_exex2".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex2".to_string(), Head::default(), (), (), wal.handle()); // Send events to update the block heights of the two handles, with the second being lower event_tx1.send(ExExEvent::FinishedHeight(42)).unwrap(); @@ -756,11 +749,13 @@ mod tests { #[tokio::test] async fn test_updates_block_height_greater() { let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + // Create two `ExExHandle` instances let (exex_handle1, event_tx1, _) = - ExExHandle::new("test_exex1".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex1".to_string(), Head::default(), (), (), wal.handle()); let (exex_handle2, event_tx2, _) = - ExExHandle::new("test_exex2".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex2".to_string(), Head::default(), (), (), wal.handle()); // Assert that the initial block height is `None` for the first `ExExHandle`. assert!(exex_handle1.finished_height.is_none()); @@ -802,8 +797,10 @@ mod tests { #[tokio::test] async fn test_exex_manager_capacity() { let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (exex_handle_1, _, _) = - ExExHandle::new("test_exex_1".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex_1".to_string(), Head::default(), (), (), wal.handle()); // Create an ExExManager with a small max capacity let max_capacity = 2; @@ -846,8 +843,11 @@ mod tests { #[tokio::test] async fn exex_handle_new() { + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (mut exex_handle, _, mut notifications) = - ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Check initial state assert_eq!(exex_handle.id, "test_exex"); @@ -889,8 +889,11 @@ mod tests { #[tokio::test] async fn test_notification_if_finished_height_gt_chain_tip() { + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (mut exex_handle, _, mut notifications) = - ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Set finished_height to a value higher than the block tip exex_handle.finished_height = Some(15); @@ -931,8 +934,11 @@ mod tests { #[tokio::test] async fn test_sends_chain_reorged_notification() { + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (mut exex_handle, _, mut notifications) = - ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); let notification = ExExNotification::ChainReorged { old: Arc::new(Chain::default()), @@ -962,8 +968,11 @@ mod tests { #[tokio::test] async fn test_sends_chain_reverted_notification() { + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let (mut exex_handle, _, mut notifications) = - ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); let notification = ExExNotification::ChainReverted { old: Arc::new(Chain::default()) }; @@ -994,6 +1003,7 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); let mut wal = Wal::new(temp_dir.path()).unwrap(); + let block = random_block(&mut generators::rng(), 0, Default::default()) .seal_with_senders() .ok_or_eyre("failed to recover senders")?; @@ -1005,7 +1015,8 @@ mod tests { let (tx, rx) = watch::channel(None); let finalized_header_stream = ForkChoiceStream::new(rx); - let (exex_handle, _, _) = ExExHandle::new("test_exex".to_string(), Head::default(), (), ()); + let (exex_handle, _, _) = + ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); let mut exex_manager = std::pin::pin!(ExExManager::new(vec![exex_handle], 1, wal, finalized_header_stream)); diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index 54d7959dc5e86..e182f385fa793 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -1,4 +1,4 @@ -use crate::{BackfillJobFactory, ExExNotification, StreamBackfillJob}; +use crate::{BackfillJobFactory, ExExNotification, StreamBackfillJob, WalHandle}; use alloy_primitives::U256; use eyre::OptionExt; use futures::{Stream, StreamExt}; @@ -21,6 +21,7 @@ pub struct ExExNotifications { provider: P, executor: E, notifications: Receiver, + wal_handle: WalHandle, } impl Debug for ExExNotifications { @@ -40,8 +41,9 @@ impl ExExNotifications { provider: P, executor: E, notifications: Receiver, + wal_handle: WalHandle, ) -> Self { - Self { node_head, provider, executor, notifications } + Self { node_head, provider, executor, notifications, wal_handle } } /// Receives the next value for this receiver. @@ -113,6 +115,7 @@ where self.provider, self.executor, self.notifications, + self.wal_handle, head, ) } @@ -134,6 +137,8 @@ pub struct ExExNotificationsWithHead { provider: P, executor: E, notifications: Receiver, + #[allow(dead_code)] + wal_handle: WalHandle, exex_head: ExExHead, pending_sync: bool, /// The backfill job to run before consuming any notifications. @@ -154,6 +159,7 @@ where provider: P, executor: E, notifications: Receiver, + wal_handle: WalHandle, exex_head: ExExHead, ) -> Self { Self { @@ -161,6 +167,7 @@ where provider, executor, notifications, + wal_handle, exex_head, pending_sync: true, backfill_job: None, @@ -344,6 +351,8 @@ where mod tests { use std::future::poll_fn; + use crate::Wal; + use super::*; use alloy_consensus::Header; use eyre::OptionExt; @@ -362,6 +371,9 @@ mod tests { async fn exex_notifications_behind_head_canonical() -> eyre::Result<()> { let mut rng = generators::rng(); + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let provider_factory = create_test_provider_factory(); let genesis_hash = init_genesis(&provider_factory)?; let genesis_block = provider_factory @@ -412,6 +424,7 @@ mod tests { provider, EthExecutorProvider::mainnet(), notifications_rx, + wal.handle(), ) .with_head(exex_head); @@ -445,6 +458,9 @@ mod tests { #[tokio::test] async fn exex_notifications_same_head_canonical() -> eyre::Result<()> { + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let provider_factory = create_test_provider_factory(); let genesis_hash = init_genesis(&provider_factory)?; let genesis_block = provider_factory @@ -485,6 +501,7 @@ mod tests { provider, EthExecutorProvider::mainnet(), notifications_rx, + wal.handle(), ) .with_head(exex_head); @@ -504,6 +521,9 @@ mod tests { async fn test_notifications_ahead_of_head() -> eyre::Result<()> { let mut rng = generators::rng(); + let temp_dir = tempfile::tempdir().unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); + let provider_factory = create_test_provider_factory(); let genesis_hash = init_genesis(&provider_factory)?; let genesis_block = provider_factory @@ -544,6 +564,7 @@ mod tests { provider, EthExecutorProvider::mainnet(), notifications_rx, + wal.handle(), ) .with_head(exex_head); diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index 2c79826e0e172..8a432bbebef14 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -1,6 +1,7 @@ use std::collections::{BTreeMap, VecDeque}; use dashmap::DashMap; +use parking_lot::RwLock; use reth_exex_types::ExExNotification; use reth_primitives::{BlockNumHash, B256}; @@ -15,7 +16,7 @@ pub struct BlockCache { /// For each notification written to the WAL, there will be an entry per block written to /// the cache with the same file ID. I.e. for each notification, there may be multiple blocks /// in the cache. - files: BTreeMap>, + files: RwLock>>, /// A mapping of `Block Hash -> Block`. /// /// For each [`ExExNotification::ChainCommitted`] notification, there will be an entry per @@ -26,45 +27,52 @@ pub struct BlockCache { impl BlockCache { /// Creates a new instance of [`BlockCache`]. pub(super) fn new() -> Self { - Self { files: BTreeMap::new(), blocks: DashMap::new() } + Self { files: RwLock::new(BTreeMap::new()), blocks: DashMap::new() } } /// Returns `true` if the cache is empty. pub(super) fn is_empty(&self) -> bool { - self.files.is_empty() + self.files.read().is_empty() } /// Returns a front-to-back iterator. pub(super) fn iter(&self) -> impl Iterator + '_ { - self.files.iter().flat_map(|(k, v)| v.iter().map(move |b| (*k, *b))) + self.files + .read() + .iter() + .flat_map(|(k, v)| v.iter().map(move |b| (*k, *b))) + .collect::>() + .into_iter() } /// Provides a reference to the first block from the cache, or `None` if the cache is /// empty. pub(super) fn front(&self) -> Option<(u64, CachedBlock)> { - self.files.first_key_value().and_then(|(k, v)| v.front().map(|b| (*k, *b))) + self.files.read().first_key_value().and_then(|(k, v)| v.front().map(|b| (*k, *b))) } /// Provides a reference to the last block from the cache, or `None` if the cache is /// empty. pub(super) fn back(&self) -> Option<(u64, CachedBlock)> { - self.files.last_key_value().and_then(|(k, v)| v.back().map(|b| (*k, *b))) + self.files.read().last_key_value().and_then(|(k, v)| v.back().map(|b| (*k, *b))) } /// Removes the notification with the given file ID. - pub(super) fn remove_notification(&mut self, key: u64) -> Option> { - self.files.remove(&key) + pub(super) fn remove_notification(&self, key: u64) -> Option> { + self.files.write().remove(&key) } /// Pops the first block from the cache. If it resulted in the whole file entry being empty, /// it will also remove the file entry. - pub(super) fn pop_front(&mut self) -> Option<(u64, CachedBlock)> { - let first_entry = self.files.first_entry()?; + pub(super) fn pop_front(&self) -> Option<(u64, CachedBlock)> { + let mut files = self.files.write(); + + let first_entry = files.first_entry()?; let key = *first_entry.key(); let blocks = first_entry.into_mut(); let first_block = blocks.pop_front().unwrap(); if blocks.is_empty() { - self.files.remove(&key); + files.remove(&key); } Some((key, first_block)) @@ -72,44 +80,40 @@ impl BlockCache { /// Pops the last block from the cache. If it resulted in the whole file entry being empty, /// it will also remove the file entry. - pub(super) fn pop_back(&mut self) -> Option<(u64, CachedBlock)> { - let last_entry = self.files.last_entry()?; + pub(super) fn pop_back(&self) -> Option<(u64, CachedBlock)> { + let mut files = self.files.write(); + + let last_entry = files.last_entry()?; let key = *last_entry.key(); let blocks = last_entry.into_mut(); let last_block = blocks.pop_back().unwrap(); if blocks.is_empty() { - self.files.remove(&key); + files.remove(&key); } Some((key, last_block)) } - /// Appends a block to the back of the specified file entry. - pub(super) fn insert(&mut self, file_id: u64, block: CachedBlock) { - self.files.entry(file_id).or_default().push_back(block); - } - /// Inserts the blocks from the notification into the cache with the given file ID. /// /// First, inserts the reverted blocks (if any), then the committed blocks (if any). pub(super) fn insert_notification_blocks_with_file_id( - &mut self, + &self, file_id: u64, notification: &ExExNotification, ) { + let mut files = self.files.write(); + let reverted_chain = notification.reverted_chain(); let committed_chain = notification.committed_chain(); if let Some(reverted_chain) = reverted_chain { for block in reverted_chain.blocks().values() { - self.insert( - file_id, - CachedBlock { - action: CachedBlockAction::Revert, - block: (block.number, block.hash()).into(), - parent_hash: block.parent_hash, - }, - ); + files.entry(file_id).or_default().push_back(CachedBlock { + action: CachedBlockAction::Revert, + block: (block.number, block.hash()).into(), + parent_hash: block.parent_hash, + }); } } @@ -120,7 +124,7 @@ impl BlockCache { block: (block.number, block.hash()).into(), parent_hash: block.parent_hash, }; - self.insert(file_id, cached_block); + files.entry(file_id).or_default().push_back(cached_block); self.blocks.insert(block.hash(), cached_block); } } diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index 1efda8d84f8e6..91a447f23682f 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -5,7 +5,7 @@ pub use cache::BlockCache; mod storage; pub use storage::Storage; -use std::path::Path; +use std::{path::Path, sync::Arc}; use reth_exex_types::ExExNotification; use reth_primitives::BlockNumHash; @@ -15,23 +15,62 @@ use reth_tracing::tracing::{debug, instrument}; /// /// WAL is backed by a directory of binary files represented by [`Storage`] and a block cache /// represented by [`BlockCache`]. The role of the block cache is to avoid walking the WAL directory -/// and decoding notifications every time we want to rollback/finalize the WAL. +/// and decoding notifications every time we want to iterate or finalize the WAL. /// /// The expected mode of operation is as follows: /// 1. On every new canonical chain notification, call [`Wal::commit`]. /// 2. When the chain is finalized, call [`Wal::finalize`] to prevent the infinite growth of the /// WAL. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Wal { + inner: Arc, +} + +impl Wal { + /// Creates a new instance of [`Wal`]. + pub fn new(directory: impl AsRef) -> eyre::Result { + Ok(Self { inner: Arc::new(WalInner::new(directory)?) }) + } + + /// Returns a read-only handle to the WAL. + pub fn handle(&self) -> WalHandle { + WalHandle { wal: self.inner.clone() } + } + + /// Commits the notification to WAL. + pub fn commit(&mut self, notification: &ExExNotification) -> eyre::Result<()> { + self.inner.commit(notification) + } + + /// Finalizes the WAL to the given block, inclusive. + /// + /// 1. Finds a notification with first unfinalized block (first notification containing a + /// committed block higher than `to_block`). + /// 2. Removes the notifications from the beginning of WAL until the found notification. If this + /// notification includes both finalized and non-finalized blocks, it will not be removed. + pub fn finalize(&self, to_block: BlockNumHash) -> eyre::Result<()> { + self.inner.finalize(to_block) + } + + /// Returns an iterator over all notifications in the WAL. + pub fn iter_notifications( + &self, + ) -> eyre::Result> + '_>> { + self.inner.iter_notifications() + } +} + +/// Inner type for the WAL. +#[derive(Debug)] +struct WalInner { /// The underlying WAL storage backed by a file. storage: Storage, /// WAL block cache. See [`cache::BlockCache`] docs for more details. block_cache: BlockCache, } -impl Wal { - /// Creates a new instance of [`Wal`]. - pub fn new(directory: impl AsRef) -> eyre::Result { +impl WalInner { + fn new(directory: impl AsRef) -> eyre::Result { let mut wal = Self { storage: Storage::new(directory)?, block_cache: BlockCache::new() }; wal.fill_block_cache()?; Ok(wal) @@ -62,12 +101,11 @@ impl Wal { Ok(()) } - /// Commits the notification to WAL. #[instrument(target = "exex::wal", skip_all, fields( reverted_block_range = ?notification.reverted_chain().as_ref().map(|chain| chain.range()), committed_block_range = ?notification.committed_chain().as_ref().map(|chain| chain.range()) ))] - pub fn commit(&mut self, notification: &ExExNotification) -> eyre::Result<()> { + fn commit(&self, notification: &ExExNotification) -> eyre::Result<()> { let file_id = self.block_cache.back().map_or(0, |block| block.0 + 1); self.storage.write_notification(file_id, notification)?; @@ -84,7 +122,7 @@ impl Wal { /// 2. Removes the notifications from the beginning of WAL until the found notification. If this /// notification includes both finalized and non-finalized blocks, it will not be removed. #[instrument(target = "exex::wal", skip(self))] - pub fn finalize(&mut self, to_block: BlockNumHash) -> eyre::Result<()> { + fn finalize(&self, to_block: BlockNumHash) -> eyre::Result<()> { // First, walk cache to find the file ID of the notification with the finalized block and // save the file ID with the first unfinalized block. Do not remove any notifications // yet. @@ -152,7 +190,7 @@ impl Wal { } /// Returns an iterator over all notifications in the WAL. - pub(crate) fn iter_notifications( + fn iter_notifications( &self, ) -> eyre::Result> + '_>> { let Some(range) = self.storage.files_range()? else { @@ -163,6 +201,12 @@ impl Wal { } } +/// A read-only handle to the WAL that can be shared. +#[derive(Debug)] +pub struct WalHandle { + wal: Arc, +} + #[cfg(test)] mod tests { use std::sync::Arc; @@ -180,9 +224,10 @@ mod tests { }; fn read_notifications(wal: &Wal) -> eyre::Result> { - let Some(files_range) = wal.storage.files_range()? else { return Ok(Vec::new()) }; + let Some(files_range) = wal.inner.storage.files_range()? else { return Ok(Vec::new()) }; - wal.storage + wal.inner + .storage .iter_notifications(files_range) .map(|entry| Ok(entry?.1)) .collect::>() @@ -197,7 +242,7 @@ mod tests { // Create an instance of the WAL in a temporary directory let temp_dir = tempfile::tempdir()?; let mut wal = Wal::new(&temp_dir)?; - assert!(wal.block_cache.is_empty()); + assert!(wal.inner.block_cache.is_empty()); // Create 4 canonical blocks and one reorged block with number 2 let blocks = random_block_range(&mut rng, 0..=3, BlockRangeParams::default()) @@ -275,7 +320,10 @@ mod tests { ), ]; wal.commit(&committed_notification_1)?; - assert_eq!(wal.block_cache.iter().collect::>(), committed_notification_1_cache); + assert_eq!( + wal.inner.block_cache.iter().collect::>(), + committed_notification_1_cache + ); assert_eq!(read_notifications(&wal)?, vec![committed_notification_1.clone()]); // Second notification (revert block 1) @@ -290,7 +338,7 @@ mod tests { }, )]; assert_eq!( - wal.block_cache.iter().collect::>(), + wal.inner.block_cache.iter().collect::>(), [committed_notification_1_cache.clone(), reverted_notification_cache.clone()].concat() ); assert_eq!( @@ -320,7 +368,7 @@ mod tests { ), ]; assert_eq!( - wal.block_cache.iter().collect::>(), + wal.inner.block_cache.iter().collect::>(), [ committed_notification_1_cache.clone(), reverted_notification_cache.clone(), @@ -367,7 +415,7 @@ mod tests { ), ]; assert_eq!( - wal.block_cache.iter().collect::>(), + wal.inner.block_cache.iter().collect::>(), [ committed_notification_1_cache, reverted_notification_cache, @@ -392,7 +440,7 @@ mod tests { // the notifications before it. wal.finalize((block_1_reorged.number, block_1_reorged.hash()).into())?; assert_eq!( - wal.block_cache.iter().collect::>(), + wal.inner.block_cache.iter().collect::>(), [committed_notification_2_cache, reorged_notification_cache].concat() ); assert_eq!(read_notifications(&wal)?, vec![committed_notification_2, reorged_notification]); diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index ad2307361d0cb..8953a6a4edfb7 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -13,7 +13,7 @@ use tracing::instrument; /// /// Each notification is represented by a single file that contains a MessagePack-encoded /// notification. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Storage { /// The path to the WAL file. path: PathBuf, diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index 4117c0c73c9a4..906437ca7821e 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -20,7 +20,7 @@ use reth_db_common::init::init_genesis; use reth_ethereum_engine_primitives::EthereumEngineValidator; use reth_evm::test_utils::MockExecutorProvider; use reth_execution_types::Chain; -use reth_exex::{ExExContext, ExExEvent, ExExNotification, ExExNotifications}; +use reth_exex::{ExExContext, ExExEvent, ExExNotification, ExExNotifications, Wal}; use reth_network::{config::SecretKey, NetworkConfigBuilder, NetworkManager}; use reth_node_api::{ FullNodeTypes, FullNodeTypesAdapter, NodeTypes, NodeTypesWithDBAdapter, NodeTypesWithEngine, @@ -49,6 +49,7 @@ use reth_provider::{ use reth_tasks::TaskManager; use reth_transaction_pool::test_utils::{testing_pool, TestPool}; use std::{ + env::temp_dir, fmt::Debug, future::{poll_fn, Future}, sync::Arc, @@ -310,6 +311,8 @@ pub async fn test_exex_context_with_chain_spec( components.provider.clone(), components.components.executor.clone(), notifications_rx, + // TODO(alexey): do we want to expose WAL to the user? + Wal::new(temp_dir())?.handle(), ); let ctx = ExExContext { diff --git a/crates/node/builder/src/launch/exex.rs b/crates/node/builder/src/launch/exex.rs index d037200869c44..6cd705338384c 100644 --- a/crates/node/builder/src/launch/exex.rs +++ b/crates/node/builder/src/launch/exex.rs @@ -45,6 +45,15 @@ impl ExExLauncher { return Ok(None) } + let exex_wal = Wal::new( + config_container + .config + .datadir + .clone() + .resolve_datadir(config_container.config.chain.chain()) + .exex_wal(), + )?; + let mut exex_handles = Vec::with_capacity(extensions.len()); let mut exexes = Vec::with_capacity(extensions.len()); @@ -55,6 +64,7 @@ impl ExExLauncher { head, components.provider().clone(), components.block_executor().clone(), + exex_wal.handle(), ); exex_handles.push(handle); @@ -96,14 +106,6 @@ impl ExExLauncher { // spawn exex manager debug!(target: "reth::cli", "spawning exex manager"); // todo(onbjerg): rm magic number - let exex_wal = Wal::new( - config_container - .config - .datadir - .clone() - .resolve_datadir(config_container.config.chain.chain()) - .exex_wal(), - )?; let exex_manager = ExExManager::new( exex_handles, 1024, From 5706e03422f1d409491297e5a20946d0fd6413d8 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Fri, 27 Sep 2024 11:14:09 +0200 Subject: [PATCH 34/84] chore(trie): early return on empty state (#11271) --- crates/trie/trie/src/state.rs | 55 ++++++++++++++++++--------------- crates/trie/trie/src/witness.rs | 5 +++ 2 files changed, 35 insertions(+), 25 deletions(-) diff --git a/crates/trie/trie/src/state.rs b/crates/trie/trie/src/state.rs index d634f05f0f39b..3b0af5cd879b6 100644 --- a/crates/trie/trie/src/state.rs +++ b/crates/trie/trie/src/state.rs @@ -100,6 +100,36 @@ impl HashedPostState { self } + /// Returns `true` if the hashed state is empty. + pub fn is_empty(&self) -> bool { + self.accounts.is_empty() && self.storages.is_empty() + } + + /// Construct [`TriePrefixSetsMut`] from hashed post state. + /// The prefix sets contain the hashed account and storage keys that have been changed in the + /// post state. + pub fn construct_prefix_sets(&self) -> TriePrefixSetsMut { + // Populate account prefix set. + let mut account_prefix_set = PrefixSetMut::with_capacity(self.accounts.len()); + let mut destroyed_accounts = HashSet::default(); + for (hashed_address, account) in &self.accounts { + account_prefix_set.insert(Nibbles::unpack(hashed_address)); + + if account.is_none() { + destroyed_accounts.insert(*hashed_address); + } + } + + // Populate storage prefix sets. + let mut storage_prefix_sets = HashMap::with_capacity(self.storages.len()); + for (hashed_address, hashed_storage) in &self.storages { + account_prefix_set.insert(Nibbles::unpack(hashed_address)); + storage_prefix_sets.insert(*hashed_address, hashed_storage.construct_prefix_set()); + } + + TriePrefixSetsMut { account_prefix_set, storage_prefix_sets, destroyed_accounts } + } + /// Extend this hashed post state with contents of another. /// Entries in the second hashed post state take precedence. pub fn extend(&mut self, other: Self) { @@ -166,31 +196,6 @@ impl HashedPostState { HashedPostStateSorted { accounts, storages } } - - /// Construct [`TriePrefixSetsMut`] from hashed post state. - /// The prefix sets contain the hashed account and storage keys that have been changed in the - /// post state. - pub fn construct_prefix_sets(&self) -> TriePrefixSetsMut { - // Populate account prefix set. - let mut account_prefix_set = PrefixSetMut::with_capacity(self.accounts.len()); - let mut destroyed_accounts = HashSet::default(); - for (hashed_address, account) in &self.accounts { - account_prefix_set.insert(Nibbles::unpack(hashed_address)); - - if account.is_none() { - destroyed_accounts.insert(*hashed_address); - } - } - - // Populate storage prefix sets. - let mut storage_prefix_sets = HashMap::with_capacity(self.storages.len()); - for (hashed_address, hashed_storage) in &self.storages { - account_prefix_set.insert(Nibbles::unpack(hashed_address)); - storage_prefix_sets.insert(*hashed_address, hashed_storage.construct_prefix_set()); - } - - TriePrefixSetsMut { account_prefix_set, storage_prefix_sets, destroyed_accounts } - } } /// Representation of in-memory hashed storage. diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 61576aabe36e5..ef5b358d31cbf 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -83,6 +83,10 @@ where mut self, state: HashedPostState, ) -> Result, TrieWitnessError> { + if state.is_empty() { + return Ok(self.witness) + } + let proof_targets = HashMap::from_iter( state .accounts @@ -92,6 +96,7 @@ where (*hashed_address, storage.storage.keys().copied().collect()) })), ); + let mut account_multiproof = Proof::new(self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone()) .with_prefix_sets_mut(self.prefix_sets.clone()) From 136a8227bf9e2326484455e33755bfc0adccb05e Mon Sep 17 00:00:00 2001 From: greged93 <82421016+greged93@users.noreply.github.com> Date: Fri, 27 Sep 2024 11:44:52 +0200 Subject: [PATCH 35/84] feat: canonical state for local engine (#11245) --- crates/engine/local/Cargo.toml | 1 + crates/engine/local/src/service.rs | 162 ++++++++++++++++++++--------- 2 files changed, 114 insertions(+), 49 deletions(-) diff --git a/crates/engine/local/Cargo.toml b/crates/engine/local/Cargo.toml index 286b9f836aa4f..f045bb6fda1d0 100644 --- a/crates/engine/local/Cargo.toml +++ b/crates/engine/local/Cargo.toml @@ -11,6 +11,7 @@ exclude.workspace = true [dependencies] # reth reth-beacon-consensus.workspace = true +reth-chain-state.workspace = true reth-engine-tree.workspace = true reth-node-types.workspace = true reth-payload-builder.workspace = true diff --git a/crates/engine/local/src/service.rs b/crates/engine/local/src/service.rs index d276dc5c1f8a0..c9794ecfabb0f 100644 --- a/crates/engine/local/src/service.rs +++ b/crates/engine/local/src/service.rs @@ -7,8 +7,9 @@ //! building at a fixed interval. use crate::miner::MiningMode; -use alloy_primitives::B256; +use eyre::eyre; use reth_beacon_consensus::EngineNodeTypes; +use reth_chain_state::{CanonicalInMemoryState, ExecutedBlock, NewCanonicalChain}; use reth_engine_tree::persistence::PersistenceHandle; use reth_payload_builder::PayloadBuilderHandle; use reth_payload_primitives::{ @@ -17,12 +18,12 @@ use reth_payload_primitives::{ use reth_provider::ProviderFactory; use reth_prune::PrunerWithFactory; use reth_stages_api::MetricEventsSender; -use std::fmt::Formatter; use tokio::sync::oneshot; use tracing::debug; /// Provides a local dev service engine that can be used to drive the /// chain forward. +#[derive(Debug)] pub struct LocalEngineService where N: EngineNodeTypes, @@ -32,30 +33,14 @@ where payload_builder: PayloadBuilderHandle, /// The payload attribute builder for the engine payload_attributes_builder: B, + /// Keep track of the Canonical chain state that isn't persisted on disk yet + canonical_in_memory_state: CanonicalInMemoryState, /// A handle to the persistence layer persistence_handle: PersistenceHandle, - /// The hash of the current head - head: B256, /// The mining mode for the engine mode: MiningMode, } -impl std::fmt::Debug for LocalEngineService -where - N: EngineNodeTypes, - B: PayloadAttributesBuilder::PayloadAttributes>, -{ - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("LocalEngineService") - .field("payload_builder", &self.payload_builder) - .field("payload_attributes_builder", &self.payload_attributes_builder) - .field("persistence_handle", &self.persistence_handle) - .field("head", &self.head) - .field("mode", &self.mode) - .finish() - } -} - impl LocalEngineService where N: EngineNodeTypes, @@ -67,14 +52,20 @@ where payload_attributes_builder: B, provider: ProviderFactory, pruner: PrunerWithFactory>, + canonical_in_memory_state: CanonicalInMemoryState, sync_metrics_tx: MetricEventsSender, - head: B256, mode: MiningMode, ) -> Self { let persistence_handle = PersistenceHandle::spawn_service(provider, pruner, sync_metrics_tx); - Self { payload_builder, payload_attributes_builder, persistence_handle, head, mode } + Self { + payload_builder, + payload_attributes_builder, + canonical_in_memory_state, + persistence_handle, + mode, + } } /// Spawn the [`LocalEngineService`] on a tokio green thread. The service will poll the payload @@ -86,8 +77,8 @@ where payload_attributes_builder: B, provider: ProviderFactory, pruner: PrunerWithFactory>, + canonical_in_memory_state: CanonicalInMemoryState, sync_metrics_tx: MetricEventsSender, - head: B256, mode: MiningMode, ) { let engine = Self::new( @@ -95,8 +86,8 @@ where payload_attributes_builder, provider, pruner, + canonical_in_memory_state, sync_metrics_tx, - head, mode, ); @@ -112,26 +103,29 @@ where (&mut self.mode).await; // Start a new payload building job - let new_head = self.build_and_save_payload().await; + let executed_block = self.build_and_save_payload().await; - if new_head.is_err() { - debug!(target: "local_engine", err = ?new_head.unwrap_err(), "failed payload building"); + if executed_block.is_err() { + debug!(target: "local_engine", err = ?executed_block.unwrap_err(), "failed payload building"); continue } + let block = executed_block.expect("not error"); - // Update the head - self.head = new_head.expect("not error"); + let res = self.update_canonical_in_memory_state(block); + if res.is_err() { + debug!(target: "local_engine", err = ?res.unwrap_err(), "failed canonical state update"); + } } } /// Builds a payload by initiating a new payload job via the [`PayloadBuilderHandle`], - /// saving the execution outcome to persistence and returning the current head of the - /// chain. - async fn build_and_save_payload(&self) -> eyre::Result { + /// saving the execution outcome to persistence and returning the executed block. + async fn build_and_save_payload(&self) -> eyre::Result { let payload_attributes = self.payload_attributes_builder.build()?; + let parent = self.canonical_in_memory_state.get_canonical_head().hash(); let payload_builder_attributes = ::PayloadBuilderAttributes::try_new( - self.head, + parent, payload_attributes, ) .map_err(|_| eyre::eyre!("failed to fetch payload attributes"))?; @@ -142,22 +136,38 @@ where .await? .await?; - let block = payload.executed_block().map(|block| vec![block]).unwrap_or_default(); + let executed_block = + payload.executed_block().ok_or_else(|| eyre!("missing executed block"))?; let (tx, rx) = oneshot::channel(); - let _ = self.persistence_handle.save_blocks(block, tx); + let _ = self.persistence_handle.save_blocks(vec![executed_block.clone()], tx); // Wait for the persistence_handle to complete - let new_head = rx.await?.ok_or_else(|| eyre::eyre!("missing new head"))?; + let _ = rx.await?.ok_or_else(|| eyre!("missing new head"))?; + + Ok(executed_block) + } + + /// Update the canonical in memory state and send notification for a new canon state to + /// all the listeners. + fn update_canonical_in_memory_state(&self, executed_block: ExecutedBlock) -> eyre::Result<()> { + let chain = NewCanonicalChain::Commit { new: vec![executed_block] }; + let tip = chain.tip().header.clone(); + let notification = chain.to_chain_notification(); - Ok(new_head.hash) + // Update the tracked in-memory state with the new chain + self.canonical_in_memory_state.update_chain(chain); + self.canonical_in_memory_state.set_canonical_head(tip); + + // Sends an event to all active listeners about the new canonical chain + self.canonical_in_memory_state.notify_canon_state(notification); + Ok(()) } } #[cfg(test)] mod tests { use super::*; - use alloy_primitives::B256; use reth_chainspec::MAINNET; use reth_config::PruneConfig; use reth_db::test_utils::{create_test_rw_db, create_test_static_files_dir}; @@ -201,20 +211,20 @@ mod tests { let provider = ProviderFactory::>::new( create_test_rw_db(), MAINNET.clone(), - StaticFileProvider::read_write(static_dir_path).unwrap(), + StaticFileProvider::read_write(static_dir_path)?, ); let pruner = PrunerBuilder::new(PruneConfig::default()) .build_with_provider_factory(provider.clone()); + // Create an empty canonical in memory state + let canonical_in_memory_state = CanonicalInMemoryState::empty(); + // Start the payload builder service let payload_handle = spawn_test_payload_service::(); // Sync metric channel let (sync_metrics_tx, _) = unbounded_channel(); - // Get the attributes for start of block building - let genesis_hash = B256::random(); - // Launch the LocalEngineService in interval mode let period = Duration::from_secs(1); LocalEngineService::spawn_new( @@ -222,13 +232,17 @@ mod tests { TestPayloadAttributesBuilder, provider.clone(), pruner, + canonical_in_memory_state, sync_metrics_tx, - genesis_hash, MiningMode::interval(period), ); + // Check that we have no block for now + let block = provider.block_by_number(0)?; + assert!(block.is_none()); + // Wait 4 intervals - tokio::time::sleep(4 * period).await; + tokio::time::sleep(2 * period).await; // Assert a block has been build let block = provider.block_by_number(0)?; @@ -246,11 +260,14 @@ mod tests { let provider = ProviderFactory::>::new( create_test_rw_db(), MAINNET.clone(), - StaticFileProvider::read_write(static_dir_path).unwrap(), + StaticFileProvider::read_write(static_dir_path)?, ); let pruner = PrunerBuilder::new(PruneConfig::default()) .build_with_provider_factory(provider.clone()); + // Create an empty canonical in memory state + let canonical_in_memory_state = CanonicalInMemoryState::empty(); + // Start the payload builder service let payload_handle = spawn_test_payload_service::(); @@ -260,17 +277,14 @@ mod tests { // Sync metric channel let (sync_metrics_tx, _) = unbounded_channel(); - // Get the attributes for start of block building - let genesis_hash = B256::random(); - // Launch the LocalEngineService in instant mode LocalEngineService::spawn_new( payload_handle, TestPayloadAttributesBuilder, provider.clone(), pruner, + canonical_in_memory_state, sync_metrics_tx, - genesis_hash, MiningMode::instant(pool.clone()), ); @@ -295,4 +309,54 @@ mod tests { Ok(()) } + + #[tokio::test] + async fn test_canonical_chain_subscription() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + + // Start the provider and the pruner + let (_, static_dir_path) = create_test_static_files_dir(); + let provider = ProviderFactory::>::new( + create_test_rw_db(), + MAINNET.clone(), + StaticFileProvider::read_write(static_dir_path)?, + ); + let pruner = PrunerBuilder::new(PruneConfig::default()) + .build_with_provider_factory(provider.clone()); + + // Create an empty canonical in memory state + let canonical_in_memory_state = CanonicalInMemoryState::empty(); + let mut notifications = canonical_in_memory_state.subscribe_canon_state(); + + // Start the payload builder service + let payload_handle = spawn_test_payload_service::(); + + // Start a transaction pool + let pool = testing_pool(); + + // Sync metric channel + let (sync_metrics_tx, _) = unbounded_channel(); + + // Launch the LocalEngineService in instant mode + LocalEngineService::spawn_new( + payload_handle, + TestPayloadAttributesBuilder, + provider.clone(), + pruner, + canonical_in_memory_state, + sync_metrics_tx, + MiningMode::instant(pool.clone()), + ); + + // Add a transaction to the pool + let transaction = MockTransaction::legacy().with_gas_price(10); + pool.add_transaction(Default::default(), transaction).await?; + + // Check a notification is received for block 0 + let res = notifications.recv().await?; + + assert_eq!(res.tip().number, 0); + + Ok(()) + } } From 650cf755a6d9e4b43aa9ab50ae9f73725927ca88 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 27 Sep 2024 13:04:56 +0200 Subject: [PATCH 36/84] docs: clarify block order (#11279) --- crates/chain-state/src/in_memory.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index 1563dad64759f..4fd46c4a16371 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -517,7 +517,8 @@ impl CanonicalInMemoryState { MemoryOverlayStateProvider::new(historical, in_memory) } - /// Returns an iterator over all canonical blocks in the in-memory state, from newest to oldest. + /// Returns an iterator over all canonical blocks in the in-memory state, from newest to oldest + /// (highest to lowest). pub fn canonical_chain(&self) -> impl Iterator> { let pending = self.inner.in_memory_state.pending.borrow().clone(); let head = self.inner.in_memory_state.head_state(); @@ -666,8 +667,12 @@ impl BlockState { .unwrap_or_default() } - /// Returns a vector of parent `BlockStates`. - /// The block state order in the output vector is newest to oldest. + /// Returns a vector of __parent__ `BlockStates`. + /// + /// The block state order in the output vector is newest to oldest (highest to lowest): + /// `[5,4,3,2,1]` + /// + /// Note: This does not include self. pub fn parent_state_chain(&self) -> Vec<&Self> { let mut parents = Vec::new(); let mut current = self.parent.as_deref(); @@ -681,8 +686,8 @@ impl BlockState { } /// Returns a vector of `BlockStates` representing the entire in memory chain. - /// The block state order in the output vector is newest to oldest, including - /// self as the first element. + /// The block state order in the output vector is newest to oldest (highest to lowest), + /// including self as the first element. pub fn chain(&self) -> Vec<&Self> { let mut chain = vec![self]; self.append_parent_chain(&mut chain); From ba4e41110b87cccda21d588e0a59bc94ee910e80 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 27 Sep 2024 13:46:55 +0200 Subject: [PATCH 37/84] chore: use Arc over Box (#11281) --- crates/chain-state/src/in_memory.rs | 35 +++++++++++++---------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index 4fd46c4a16371..029666f6ac905 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -233,7 +233,7 @@ impl CanonicalInMemoryState { pub fn set_pending_block(&self, pending: ExecutedBlock) { // fetch the state of the pending block's parent block let parent = self.state_by_hash(pending.block().parent_hash); - let pending = BlockState::with_parent(pending, parent.map(|p| (*p).clone())); + let pending = BlockState::with_parent(pending, parent); self.inner.in_memory_state.pending.send_modify(|p| { p.replace(pending); }); @@ -261,8 +261,7 @@ impl CanonicalInMemoryState { // insert the new blocks for block in new_blocks { let parent = blocks.get(&block.block().parent_hash).cloned(); - let block_state = - BlockState::with_parent(block.clone(), parent.map(|p| (*p).clone())); + let block_state = BlockState::with_parent(block.clone(), parent); let hash = block_state.hash(); let number = block_state.number(); @@ -329,8 +328,7 @@ impl CanonicalInMemoryState { for block in old_blocks { let parent = blocks.get(&block.block().parent_hash).cloned(); - let block_state = - BlockState::with_parent(block.clone(), parent.map(|p| (*p).clone())); + let block_state = BlockState::with_parent(block.clone(), parent); let hash = block_state.hash(); let number = block_state.number(); @@ -342,10 +340,7 @@ impl CanonicalInMemoryState { // also shift the pending state if it exists self.inner.in_memory_state.pending.send_modify(|p| { if let Some(p) = p.as_mut() { - p.parent = blocks - .get(&p.block().block.parent_hash) - .cloned() - .map(|p| Box::new((*p).clone())); + p.parent = blocks.get(&p.block().block.parent_hash).cloned(); } }); } @@ -595,7 +590,7 @@ pub struct BlockState { /// The executed block that determines the state after this block has been executed. block: ExecutedBlock, /// The block's parent block if it exists. - parent: Option>, + parent: Option>, } #[allow(dead_code)] @@ -606,8 +601,8 @@ impl BlockState { } /// [`BlockState`] constructor with parent. - pub fn with_parent(block: ExecutedBlock, parent: Option) -> Self { - Self { block, parent: parent.map(Box::new) } + pub const fn with_parent(block: ExecutedBlock, parent: Option>) -> Self { + Self { block, parent } } /// Returns the hash and block of the on disk block this state can be traced back to. @@ -875,7 +870,7 @@ mod tests { for i in 1..=num_blocks { let mut state = create_mock_state(test_block_builder, i, parent_hash); if let Some(parent) = parent_state { - state.parent = Some(Box::new(parent)); + state.parent = Some(Arc::new(parent)); } parent_hash = state.hash(); parent_state = Some(state.clone()); @@ -1171,7 +1166,7 @@ mod tests { // Check the pending state assert_eq!( state.pending_state().unwrap(), - BlockState::with_parent(block2.clone(), Some(BlockState::new(block1))) + BlockState::with_parent(block2.clone(), Some(Arc::new(BlockState::new(block1)))) ); // Check the pending block @@ -1206,14 +1201,14 @@ mod tests { let block2 = test_block_builder.get_executed_block_with_number(2, block1.block().hash()); let block3 = test_block_builder.get_executed_block_with_number(3, block2.block().hash()); - let state1 = BlockState::new(block1.clone()); - let state2 = BlockState::with_parent(block2.clone(), Some(state1.clone())); - let state3 = BlockState::with_parent(block3.clone(), Some(state2.clone())); + let state1 = Arc::new(BlockState::new(block1.clone())); + let state2 = Arc::new(BlockState::with_parent(block2.clone(), Some(state1.clone()))); + let state3 = Arc::new(BlockState::with_parent(block3.clone(), Some(state2.clone()))); let mut blocks = HashMap::default(); - blocks.insert(block1.block().hash(), Arc::new(state1)); - blocks.insert(block2.block().hash(), Arc::new(state2)); - blocks.insert(block3.block().hash(), Arc::new(state3)); + blocks.insert(block1.block().hash(), state1); + blocks.insert(block2.block().hash(), state2); + blocks.insert(block3.block().hash(), state3); let mut numbers = BTreeMap::new(); numbers.insert(1, block1.block().hash()); From 6757ab81902b4c0e8a5ec51eb7d9ed10ec328367 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 27 Sep 2024 14:47:00 +0200 Subject: [PATCH 38/84] fix: make canonical_chain atomic and canonical (#11283) --- crates/chain-state/src/in_memory.rs | 53 ++++++++++++----------------- 1 file changed, 21 insertions(+), 32 deletions(-) diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index 029666f6ac905..fc142dd03a71b 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -512,23 +512,12 @@ impl CanonicalInMemoryState { MemoryOverlayStateProvider::new(historical, in_memory) } - /// Returns an iterator over all canonical blocks in the in-memory state, from newest to oldest - /// (highest to lowest). + /// Returns an iterator over all __canonical blocks__ in the in-memory state, from newest to + /// oldest (highest to lowest). + /// + /// This iterator contains a snapshot of the in-memory state at the time of the call. pub fn canonical_chain(&self) -> impl Iterator> { - let pending = self.inner.in_memory_state.pending.borrow().clone(); - let head = self.inner.in_memory_state.head_state(); - - // this clone is cheap because we only expect to keep in memory a few - // blocks and all of them are Arcs. - let blocks = self.inner.in_memory_state.blocks.read().clone(); - - std::iter::once(pending).filter_map(|p| p.map(Arc::new)).chain(std::iter::successors( - head, - move |state| { - let parent_hash = state.block().block().parent_hash; - blocks.get(&parent_hash).cloned() - }, - )) + self.inner.in_memory_state.head_state().into_iter().flat_map(|head| head.iter()) } /// Returns a `TransactionSigned` for the given `TxHash` if found. @@ -693,6 +682,13 @@ impl BlockState { pub fn append_parent_chain<'a>(&'a self, chain: &mut Vec<&'a Self>) { chain.extend(self.parent_state_chain()); } + + /// Returns an iterator over the atomically captured chain of in memory blocks. + /// + /// This yields the blocks from newest to oldest (highest to lowest). + pub fn iter(self: Arc) -> impl Iterator> { + std::iter::successors(Some(self), |state| state.parent.clone()) + } } /// Represents an executed block stored in-memory. @@ -1267,20 +1263,17 @@ mod tests { #[test] fn test_canonical_in_memory_state_canonical_chain_multiple_blocks() { - let mut blocks = HashMap::default(); - let mut numbers = BTreeMap::new(); let mut parent_hash = B256::random(); let mut block_builder = TestBlockBuilder::default(); + let state = CanonicalInMemoryState::empty(); for i in 1..=3 { let block = block_builder.get_executed_block_with_number(i, parent_hash); let hash = block.block().hash(); - blocks.insert(hash, Arc::new(BlockState::new(block.clone()))); - numbers.insert(i, hash); + state.update_blocks(Some(block), None); parent_hash = hash; } - let state = CanonicalInMemoryState::new(blocks, numbers, None, None); let chain: Vec<_> = state.canonical_chain().collect(); assert_eq!(chain.len(), 3); @@ -1289,31 +1282,27 @@ mod tests { assert_eq!(chain[2].number(), 1); } + // ensures the pending block is not part of the canonical chain #[test] fn test_canonical_in_memory_state_canonical_chain_with_pending_block() { - let mut blocks = HashMap::default(); - let mut numbers = BTreeMap::new(); let mut parent_hash = B256::random(); let mut block_builder = TestBlockBuilder::default(); + let state = CanonicalInMemoryState::empty(); for i in 1..=2 { let block = block_builder.get_executed_block_with_number(i, parent_hash); let hash = block.block().hash(); - blocks.insert(hash, Arc::new(BlockState::new(block.clone()))); - numbers.insert(i, hash); + state.update_blocks(Some(block), None); parent_hash = hash; } let pending_block = block_builder.get_executed_block_with_number(3, parent_hash); - let pending_state = BlockState::new(pending_block); - - let state = CanonicalInMemoryState::new(blocks, numbers, Some(pending_state), None); + state.set_pending_block(pending_block); let chain: Vec<_> = state.canonical_chain().collect(); - assert_eq!(chain.len(), 3); - assert_eq!(chain[0].number(), 3); - assert_eq!(chain[1].number(), 2); - assert_eq!(chain[2].number(), 1); + assert_eq!(chain.len(), 2); + assert_eq!(chain[0].number(), 2); + assert_eq!(chain[1].number(), 1); } #[test] From 247f029e2dbf377c75a5eb0e9082f825bbe37996 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 27 Sep 2024 14:56:52 +0200 Subject: [PATCH 39/84] chore: add traces for blob sidecar ops (#11284) --- crates/transaction-pool/src/pool/mod.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 2ba101721b08c..4c0dc81eed1ef 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -463,6 +463,7 @@ where } if let Some(replaced) = added.replaced_blob_transaction() { + debug!(target: "txpool", "[{:?}] delete replaced blob sidecar", replaced); // delete the replaced transaction from the blob store self.delete_blob(replaced); } @@ -798,6 +799,7 @@ where /// Inserts a blob transaction into the blob store fn insert_blob(&self, hash: TxHash, blob: BlobTransactionSidecar) { + debug!(target: "txpool", "[{:?}] storing blob sidecar", hash); if let Err(err) = self.blob_store.insert(hash, blob) { warn!(target: "txpool", %err, "[{:?}] failed to insert blob", hash); self.blob_store_metrics.blobstore_failed_inserts.increment(1); From 098018db493c48e8bc9e4a4fc4d62e2b668de724 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 27 Sep 2024 14:59:42 +0200 Subject: [PATCH 40/84] perf: remove sidecar clone if no listeners (#11285) --- crates/transaction-pool/src/pool/mod.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index 4c0dc81eed1ef..4c1a7f2c29bbc 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -580,9 +580,11 @@ where /// Notify all listeners about a blob sidecar for a newly inserted blob (eip4844) transaction. fn on_new_blob_sidecar(&self, tx_hash: &TxHash, sidecar: &BlobTransactionSidecar) { - let sidecar = Arc::new(sidecar.clone()); - let mut sidecar_listeners = self.blob_transaction_sidecar_listener.lock(); + if sidecar_listeners.is_empty() { + return + } + let sidecar = Arc::new(sidecar.clone()); sidecar_listeners.retain_mut(|listener| { let new_blob_event = NewBlobSidecar { tx_hash: *tx_hash, sidecar: sidecar.clone() }; match listener.sender.try_send(new_blob_event) { From ea060fe9a081a973e1592ad2d63958334d24c1e7 Mon Sep 17 00:00:00 2001 From: joshieDo <93316087+joshieDo@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:22:46 +0200 Subject: [PATCH 41/84] feat: `ExExEvent::FinishedHeight` takes `BlockNumHash` instead (#11278) Co-authored-by: Alexey Shekhirin --- book/developers/exex/hello-world.md | 2 +- book/developers/exex/how-it-works.md | 2 +- book/developers/exex/remote.md | 4 +-- book/developers/exex/tracking-state.md | 4 +-- crates/exex/exex/src/event.rs | 4 +-- crates/exex/exex/src/manager.rs | 43 +++++++++++++++----------- crates/exex/test-utils/src/lib.rs | 4 +-- 7 files changed, 35 insertions(+), 28 deletions(-) diff --git a/book/developers/exex/hello-world.md b/book/developers/exex/hello-world.md index 3c90e5a693d08..facb07e5307f2 100644 --- a/book/developers/exex/hello-world.md +++ b/book/developers/exex/hello-world.md @@ -125,7 +125,7 @@ async fn my_exex(mut ctx: ExExContext) -> eyre:: if let Some(committed_chain) = notification.committed_chain() { ctx.events - .send(ExExEvent::FinishedHeight(committed_chain.tip().number))?; + .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; } } diff --git a/book/developers/exex/how-it-works.md b/book/developers/exex/how-it-works.md index 7fd179bf91559..228711d3fea48 100644 --- a/book/developers/exex/how-it-works.md +++ b/book/developers/exex/how-it-works.md @@ -23,4 +23,4 @@ event to signify what blocks have been processed. This event is used by Reth to An ExEx will only receive notifications for block numbers greater than the block in the most recently emitted `FinishedHeight` event. -To clarify: if an ExEx emits `ExExEvent::FinishedHeight(0)` it will receive notifications for any `block_number > 0`. +To clarify: if an ExEx emits `ExExEvent::FinishedHeight` for `block #0` it will receive notifications for any `block_number > 0`. diff --git a/book/developers/exex/remote.md b/book/developers/exex/remote.md index 2db5074e1df75..4344e28b34fc8 100644 --- a/book/developers/exex/remote.md +++ b/book/developers/exex/remote.md @@ -279,7 +279,7 @@ async fn remote_exex( while let Some(notification) = ctx.notifications.next().await { if let Some(committed_chain) = notification.committed_chain() { ctx.events - .send(ExExEvent::FinishedHeight(committed_chain.tip().number))?; + .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; } info!("Notification sent to the gRPC server"); @@ -388,7 +388,7 @@ async fn remote_exex( while let Some(notification) = ctx.notifications.next().await { if let Some(committed_chain) = notification.committed_chain() { ctx.events - .send(ExExEvent::FinishedHeight(committed_chain.tip().number))?; + .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; } info!(?notification, "Notification sent to the gRPC server"); diff --git a/book/developers/exex/tracking-state.md b/book/developers/exex/tracking-state.md index 4d3bbd0a35ae1..52c73e6180297 100644 --- a/book/developers/exex/tracking-state.md +++ b/book/developers/exex/tracking-state.md @@ -57,7 +57,7 @@ impl Future for MyExEx { if let Some(committed_chain) = notification.committed_chain() { this.ctx .events - .send(ExExEvent::FinishedHeight(committed_chain.tip().number))?; + .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; } } @@ -152,7 +152,7 @@ impl Future for MyExEx { this.ctx .events - .send(ExExEvent::FinishedHeight(committed_chain.tip().number))?; + .send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; } if let Some(first_block) = this.first_block { diff --git a/crates/exex/exex/src/event.rs b/crates/exex/exex/src/event.rs index c26c1c5344b20..1215ea2a502a8 100644 --- a/crates/exex/exex/src/event.rs +++ b/crates/exex/exex/src/event.rs @@ -1,4 +1,4 @@ -use alloy_primitives::BlockNumber; +use reth_primitives::BlockNumHash; /// Events emitted by an `ExEx`. #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -9,5 +9,5 @@ pub enum ExExEvent { /// meaning that Reth is allowed to prune them. /// /// On reorgs, it's possible for the height to go down. - FinishedHeight(BlockNumber), + FinishedHeight(BlockNumHash), } diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index 9b07aef0aad61..5b3b38ba0d514 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -1,13 +1,12 @@ use crate::{ wal::Wal, ExExEvent, ExExNotification, ExExNotifications, FinishedExExHeight, WalHandle, }; -use alloy_primitives::BlockNumber; use futures::StreamExt; use metrics::Gauge; use reth_chain_state::ForkChoiceStream; use reth_chainspec::Head; use reth_metrics::{metrics::Counter, Metrics}; -use reth_primitives::SealedHeader; +use reth_primitives::{BlockNumHash, SealedHeader}; use reth_tracing::tracing::debug; use std::{ collections::VecDeque, @@ -53,10 +52,10 @@ pub struct ExExHandle { receiver: UnboundedReceiver, /// The ID of the next notification to send to this `ExEx`. next_notification_id: usize, - /// The finished block number of the `ExEx`. + /// The finished block of the `ExEx`. /// /// If this is `None`, the `ExEx` has not emitted a `FinishedHeight` event. - finished_height: Option, + finished_height: Option, } impl ExExHandle { @@ -105,11 +104,11 @@ impl ExExHandle { // Skip the chain commit notification if the finished height of the ExEx is // higher than or equal to the tip of the new notification. // I.e., the ExEx has already processed the notification. - if finished_height >= new.tip().number { + if finished_height.number >= new.tip().number { debug!( exex_id = %self.id, %notification_id, - %finished_height, + ?finished_height, new_tip = %new.tip().number, "Skipping notification" ); @@ -377,7 +376,7 @@ impl Future for ExExManager { // update watch channel block number let finished_height = self.exex_handles.iter_mut().try_fold(u64::MAX, |curr, exex| { - exex.finished_height.map_or(Err(()), |height| Ok(height.min(curr))) + exex.finished_height.map_or(Err(()), |height| Ok(height.number.min(curr))) }); if let Ok(finished_height) = finished_height { let _ = self.finished_height.send(FinishedExExHeight::Height(finished_height)); @@ -532,9 +531,10 @@ mod tests { ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Send an event and check that it's delivered correctly - event_tx.send(ExExEvent::FinishedHeight(42)).unwrap(); + let event = ExExEvent::FinishedHeight(BlockNumHash::new(42, B256::random())); + event_tx.send(event).unwrap(); let received_event = exex_handle.receiver.recv().await.unwrap(); - assert_eq!(received_event, ExExEvent::FinishedHeight(42)); + assert_eq!(received_event, event); } #[tokio::test] @@ -672,7 +672,8 @@ mod tests { assert!(exex_handle.finished_height.is_none()); // Update the block height via an event - event_tx.send(ExExEvent::FinishedHeight(42)).unwrap(); + let block = BlockNumHash::new(42, B256::random()); + event_tx.send(ExExEvent::FinishedHeight(block)).unwrap(); // Create a mock ExExManager and add the exex_handle to it let exex_manager = ExExManager::new( @@ -690,7 +691,7 @@ mod tests { // Check that the block height was updated let updated_exex_handle = &pinned_manager.exex_handles[0]; - assert_eq!(updated_exex_handle.finished_height, Some(42)); + assert_eq!(updated_exex_handle.finished_height, Some(block)); // Get the receiver for the finished height let mut receiver = pinned_manager.handle.finished_height(); @@ -716,9 +717,12 @@ mod tests { let (exex_handle2, event_tx2, _) = ExExHandle::new("test_exex2".to_string(), Head::default(), (), (), wal.handle()); + let block1 = BlockNumHash::new(42, B256::random()); + let block2 = BlockNumHash::new(10, B256::random()); + // Send events to update the block heights of the two handles, with the second being lower - event_tx1.send(ExExEvent::FinishedHeight(42)).unwrap(); - event_tx2.send(ExExEvent::FinishedHeight(10)).unwrap(); + event_tx1.send(ExExEvent::FinishedHeight(block1)).unwrap(); + event_tx2.send(ExExEvent::FinishedHeight(block2)).unwrap(); let exex_manager = ExExManager::new( vec![exex_handle1, exex_handle2], @@ -760,9 +764,12 @@ mod tests { // Assert that the initial block height is `None` for the first `ExExHandle`. assert!(exex_handle1.finished_height.is_none()); + let block1 = BlockNumHash::new(42, B256::random()); + let block2 = BlockNumHash::new(100, B256::random()); + // Send events to update the block heights of the two handles, with the second being higher. - event_tx1.send(ExExEvent::FinishedHeight(42)).unwrap(); - event_tx2.send(ExExEvent::FinishedHeight(100)).unwrap(); + event_tx1.send(ExExEvent::FinishedHeight(block1)).unwrap(); + event_tx2.send(ExExEvent::FinishedHeight(block2)).unwrap(); let exex_manager = ExExManager::new( vec![exex_handle1, exex_handle2], @@ -896,7 +903,7 @@ mod tests { ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); // Set finished_height to a value higher than the block tip - exex_handle.finished_height = Some(15); + exex_handle.finished_height = Some(BlockNumHash::new(15, B256::random())); let mut block1 = SealedBlockWithSenders::default(); block1.block.header.set_hash(B256::new([0x01; 32])); @@ -947,7 +954,7 @@ mod tests { // Even if the finished height is higher than the tip of the new chain, the reorg // notification should be received - exex_handle.finished_height = Some(u64::MAX); + exex_handle.finished_height = Some(BlockNumHash::new(u64::MAX, B256::random())); let mut cx = Context::from_waker(futures::task::noop_waker_ref()); @@ -978,7 +985,7 @@ mod tests { // Even if the finished height is higher than the tip of the new chain, the reorg // notification should be received - exex_handle.finished_height = Some(u64::MAX); + exex_handle.finished_height = Some(BlockNumHash::new(u64::MAX, B256::random())); let mut cx = Context::from_waker(futures::task::noop_waker_ref()); diff --git a/crates/exex/test-utils/src/lib.rs b/crates/exex/test-utils/src/lib.rs index 906437ca7821e..3a9b8dc0ab42e 100644 --- a/crates/exex/test-utils/src/lib.rs +++ b/crates/exex/test-utils/src/lib.rs @@ -41,7 +41,7 @@ use reth_node_ethereum::{ EthEngineTypes, EthEvmConfig, }; use reth_payload_builder::noop::NoopPayloadBuilderService; -use reth_primitives::{Head, SealedBlockWithSenders}; +use reth_primitives::{BlockNumHash, Head, SealedBlockWithSenders}; use reth_provider::{ providers::{BlockchainProvider, StaticFileProvider}, BlockReader, ProviderFactory, @@ -223,7 +223,7 @@ impl TestExExHandle { /// Asserts that the Execution Extension emitted a `FinishedHeight` event with the correct /// height. #[track_caller] - pub fn assert_event_finished_height(&mut self, height: u64) -> eyre::Result<()> { + pub fn assert_event_finished_height(&mut self, height: BlockNumHash) -> eyre::Result<()> { let event = self.events_rx.try_recv()?; assert_eq!(event, ExExEvent::FinishedHeight(height)); Ok(()) From fbb0b11f074fe551650d5744d8898ad2bb1c9e18 Mon Sep 17 00:00:00 2001 From: Abhishek kochar Date: Fri, 27 Sep 2024 22:28:03 +0800 Subject: [PATCH 42/84] chore(exex-eips): replace reth-primitives with alloy-eips (#11275) Signed-off-by: Abhishekkochar --- Cargo.lock | 1 + crates/exex/exex/Cargo.toml | 1 + crates/exex/exex/src/notifications.rs | 3 ++- crates/exex/exex/src/wal/cache.rs | 3 ++- crates/exex/exex/src/wal/mod.rs | 2 +- 5 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 173eb2946b401..7e302ec0471e3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7394,6 +7394,7 @@ name = "reth-exex" version = "1.0.7" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-genesis", "alloy-primitives", "dashmap 6.1.0", diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index 2b5b89fbd16d3..f10775e245872 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -34,6 +34,7 @@ reth-tracing.workspace = true # alloy alloy-primitives.workspace = true +alloy-eips.workspace = true ## async futures.workspace = true diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index e182f385fa793..3d5c4a3500dc4 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -355,11 +355,12 @@ mod tests { use super::*; use alloy_consensus::Header; + use alloy_eips::BlockNumHash; use eyre::OptionExt; use futures::StreamExt; use reth_db_common::init::init_genesis; use reth_evm_ethereum::execute::EthExecutorProvider; - use reth_primitives::{Block, BlockNumHash}; + use reth_primitives::Block; use reth_provider::{ providers::BlockchainProvider2, test_utils::create_test_provider_factory, BlockWriter, Chain, diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index 8a432bbebef14..b3d29f1b3955c 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -1,9 +1,10 @@ use std::collections::{BTreeMap, VecDeque}; +use alloy_eips::BlockNumHash; +use alloy_primitives::B256; use dashmap::DashMap; use parking_lot::RwLock; use reth_exex_types::ExExNotification; -use reth_primitives::{BlockNumHash, B256}; /// The block cache of the WAL. /// diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index 91a447f23682f..5d28cb3e0377e 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -7,8 +7,8 @@ pub use storage::Storage; use std::{path::Path, sync::Arc}; +use alloy_eips::BlockNumHash; use reth_exex_types::ExExNotification; -use reth_primitives::BlockNumHash; use reth_tracing::tracing::{debug, instrument}; /// WAL is a write-ahead log (WAL) that stores the notifications sent to ExExes. From 0446ec471a18c1520d3e0d671650ab868b888d90 Mon Sep 17 00:00:00 2001 From: Starkey Date: Fri, 27 Sep 2024 21:29:21 +0630 Subject: [PATCH 43/84] optimism: fix 'expecte' (#11290) --- crates/optimism/evm/src/l1.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/optimism/evm/src/l1.rs b/crates/optimism/evm/src/l1.rs index 717bf833f9667..7bc8cfed1c553 100644 --- a/crates/optimism/evm/src/l1.rs +++ b/crates/optimism/evm/src/l1.rs @@ -352,7 +352,7 @@ mod tests { let expected_l1_blob_base_fee = U256::from_be_bytes(hex!( "0000000000000000000000000000000000000000000000000000000d5ea528d2" // 57422457042 )); - let expecte_l1_blob_base_fee_scalar = U256::from(810949); + let expected_l1_blob_base_fee_scalar = U256::from(810949); // test @@ -361,7 +361,7 @@ mod tests { assert_eq!(l1_block_info.l1_base_fee, expected_l1_base_fee); assert_eq!(l1_block_info.l1_base_fee_scalar, expected_l1_base_fee_scalar); assert_eq!(l1_block_info.l1_blob_base_fee, Some(expected_l1_blob_base_fee)); - assert_eq!(l1_block_info.l1_blob_base_fee_scalar, Some(expecte_l1_blob_base_fee_scalar)); + assert_eq!(l1_block_info.l1_blob_base_fee_scalar, Some(expected_l1_blob_base_fee_scalar)); } #[test] From 07e94e7fa5ad26dcaab3d12c990720572b332f4f Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Fri, 27 Sep 2024 17:20:43 +0200 Subject: [PATCH 44/84] perf(db): use smallvec for mdbx table names (#11291) --- Cargo.lock | 1 + crates/cli/util/src/allocator.rs | 4 +--- crates/storage/libmdbx-rs/Cargo.toml | 6 ++++-- crates/storage/libmdbx-rs/src/database.rs | 11 ++++++++--- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7e302ec0471e3..7368caf8422ed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7544,6 +7544,7 @@ dependencies = [ "rand 0.8.5", "rand_xorshift", "reth-mdbx-sys", + "smallvec", "tempfile", "thiserror", "tracing", diff --git a/crates/cli/util/src/allocator.rs b/crates/cli/util/src/allocator.rs index b5974e2245f82..ee13e7c61cb52 100644 --- a/crates/cli/util/src/allocator.rs +++ b/crates/cli/util/src/allocator.rs @@ -12,6 +12,7 @@ cfg_if::cfg_if! { cfg_if::cfg_if! { if #[cfg(feature = "tracy-allocator")] { type AllocatorWrapper = tracy_client::ProfiledAllocator; + tracy_client::register_demangler!(); const fn new_allocator_wrapper() -> AllocatorWrapper { AllocatorWrapper::new(AllocatorInner {}, 100) } @@ -23,9 +24,6 @@ cfg_if::cfg_if! { } } -#[cfg(feature = "tracy-allocator")] -tracy_client::register_demangler!(); - /// Custom allocator. pub type Allocator = AllocatorWrapper; diff --git a/crates/storage/libmdbx-rs/Cargo.toml b/crates/storage/libmdbx-rs/Cargo.toml index 8056b68557b81..fa10a73cb330b 100644 --- a/crates/storage/libmdbx-rs/Cargo.toml +++ b/crates/storage/libmdbx-rs/Cargo.toml @@ -19,14 +19,16 @@ byteorder = "1" derive_more.workspace = true indexmap = "2" parking_lot.workspace = true +smallvec.workspace = true thiserror.workspace = true -dashmap = { workspace = true, features = ["inline"], optional = true } tracing.workspace = true +dashmap = { workspace = true, features = ["inline"], optional = true } + [features] default = [] return-borrowed = [] -read-tx-timeouts = ["dashmap", "dashmap/inline"] +read-tx-timeouts = ["dep:dashmap"] [dev-dependencies] pprof = { workspace = true, features = [ diff --git a/crates/storage/libmdbx-rs/src/database.rs b/crates/storage/libmdbx-rs/src/database.rs index 1c4739b2bee71..c8733889160e8 100644 --- a/crates/storage/libmdbx-rs/src/database.rs +++ b/crates/storage/libmdbx-rs/src/database.rs @@ -4,7 +4,7 @@ use crate::{ Environment, Transaction, }; use ffi::MDBX_db_flags_t; -use std::{ffi::CString, ptr}; +use std::{ffi::CStr, ptr}; /// A handle to an individual database in an environment. /// @@ -27,8 +27,13 @@ impl Database { name: Option<&str>, flags: MDBX_db_flags_t, ) -> Result { - let c_name = name.map(|n| CString::new(n).unwrap()); - let name_ptr = if let Some(c_name) = &c_name { c_name.as_ptr() } else { ptr::null() }; + let mut c_name_buf = smallvec::SmallVec::<[u8; 32]>::new(); + let c_name = name.map(|n| { + c_name_buf.extend_from_slice(n.as_bytes()); + c_name_buf.push(0); + CStr::from_bytes_with_nul(&c_name_buf).unwrap() + }); + let name_ptr = if let Some(c_name) = c_name { c_name.as_ptr() } else { ptr::null() }; let mut dbi: ffi::MDBX_dbi = 0; txn.txn_execute(|txn_ptr| { mdbx_result(unsafe { ffi::mdbx_dbi_open(txn_ptr, name_ptr, flags, &mut dbi) }) From dbd9a2bb35ba8fbcbaf9ec74e63efdee4a087d3f Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Fri, 27 Sep 2024 16:22:03 +0100 Subject: [PATCH 45/84] feat(exex): send canonical notifications when head is provided (#11280) --- crates/exex/exex/src/notifications.rs | 388 ++++++++++++++------------ crates/exex/exex/src/wal/cache.rs | 14 +- crates/exex/exex/src/wal/mod.rs | 22 +- crates/exex/exex/src/wal/storage.rs | 17 +- 4 files changed, 255 insertions(+), 186 deletions(-) diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index 3d5c4a3500dc4..369a0586c0c53 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -1,6 +1,4 @@ use crate::{BackfillJobFactory, ExExNotification, StreamBackfillJob, WalHandle}; -use alloy_primitives::U256; -use eyre::OptionExt; use futures::{Stream, StreamExt}; use reth_chainspec::Head; use reth_evm::execute::BlockExecutorProvider; @@ -137,15 +135,16 @@ pub struct ExExNotificationsWithHead { provider: P, executor: E, notifications: Receiver, - #[allow(dead_code)] wal_handle: WalHandle, exex_head: ExExHead, - pending_sync: bool, + /// If true, then we need to check if the ExEx head is on the canonical chain and if not, + /// revert its head. + pending_check_canonical: bool, + /// If true, then we need to check if the ExEx head is behind the node head and if so, backfill + /// the missing blocks. + pending_check_backfill: bool, /// The backfill job to run before consuming any notifications. backfill_job: Option>, - /// Whether we're currently waiting for the node head to catch up to the same height as the - /// ExEx head. - node_head_catchup_in_progress: bool, } impl ExExNotificationsWithHead @@ -169,90 +168,76 @@ where notifications, wal_handle, exex_head, - pending_sync: true, + pending_check_canonical: true, + pending_check_backfill: true, backfill_job: None, - node_head_catchup_in_progress: false, } } - /// Compares the node head against the ExEx head, and synchronizes them in case of a mismatch. + /// Checks if the ExEx head is on the canonical chain. + /// + /// If the head block is not found in the database, it means we're not on the canonical chain + /// and we need to revert the notification with the ExEx head block. + fn check_canonical(&mut self) -> eyre::Result> { + if self.provider.header(&self.exex_head.block.hash)?.is_some() { + debug!(target: "exex::notifications", "ExEx head is on the canonical chain"); + return Ok(None) + } + + // If the head block is not found in the database, it means we're not on the canonical + // chain. + + // Get the committed notification for the head block from the WAL. + let Some(notification) = + self.wal_handle.get_committed_notification_by_block_hash(&self.exex_head.block.hash)? + else { + return Err(eyre::eyre!( + "Could not find notification for block hash {:?} in the WAL", + self.exex_head.block.hash + )) + }; + + // Update the head block hash to the parent hash of the first committed block. + let committed_chain = notification.committed_chain().unwrap(); + let new_exex_head = + (committed_chain.first().parent_hash, committed_chain.first().number - 1).into(); + debug!(target: "exex::notifications", old_exex_head = ?self.exex_head.block, new_exex_head = ?new_exex_head, "ExEx head updated"); + self.exex_head.block = new_exex_head; + + // Return an inverted notification. See the documentation for + // `ExExNotification::into_inverted`. + Ok(Some(notification.into_inverted())) + } + + /// Compares the node head against the ExEx head, and backfills if needed. + /// + /// CAUTON: This method assumes that the ExEx head is <= the node head, and that it's on the + /// canonical chain. /// /// Possible situations are: - /// - ExEx is behind the node head (`node_head.number < exex_head.number`). - /// - ExEx is on the canonical chain (`exex_head.hash` is found in the node database). - /// Backfill from the node database. - /// - ExEx is not on the canonical chain (`exex_head.hash` is not found in the node database). - /// Unwind the ExEx to the first block matching between the ExEx and the node, and then - /// bacfkill from the node database. - /// - ExEx is at the same block number (`node_head.number == exex_head.number`). - /// - ExEx is on the canonical chain (`exex_head.hash` is found in the node database). Nothing - /// to do. - /// - ExEx is not on the canonical chain (`exex_head.hash` is not found in the node database). - /// Unwind the ExEx to the first block matching between the ExEx and the node, and then - /// backfill from the node database. - /// - ExEx is ahead of the node head (`node_head.number > exex_head.number`). Wait until the - /// node head catches up to the ExEx head, and then repeat the synchronization process. - fn synchronize(&mut self) -> eyre::Result<()> { + /// - ExEx is behind the node head (`node_head.number < exex_head.number`). Backfill from the + /// node database. + /// - ExEx is at the same block number as the node head (`node_head.number == + /// exex_head.number`). Nothing to do. + fn check_backfill(&mut self) -> eyre::Result<()> { debug!(target: "exex::manager", "Synchronizing ExEx head"); let backfill_job_factory = BackfillJobFactory::new(self.executor.clone(), self.provider.clone()); match self.exex_head.block.number.cmp(&self.node_head.number) { std::cmp::Ordering::Less => { - // ExEx is behind the node head - - if let Some(exex_header) = self.provider.header(&self.exex_head.block.hash)? { - // ExEx is on the canonical chain - debug!(target: "exex::manager", "ExEx is behind the node head and on the canonical chain"); - - if exex_header.number != self.exex_head.block.number { - eyre::bail!("ExEx head number does not match the hash") - } - - // ExEx is on the canonical chain, start backfill - let backfill = backfill_job_factory - .backfill(self.exex_head.block.number + 1..=self.node_head.number) - .into_stream(); - self.backfill_job = Some(backfill); - } else { - debug!(target: "exex::manager", "ExEx is behind the node head and not on the canonical chain"); - // ExEx is not on the canonical chain, first unwind it and then backfill - - // TODO(alexey): unwind and backfill - self.backfill_job = None; - } + // ExEx is behind the node head, start backfill + debug!(target: "exex::manager", "ExEx is behind the node head and on the canonical chain, starting backfill"); + let backfill = backfill_job_factory + .backfill(self.exex_head.block.number + 1..=self.node_head.number) + .into_stream(); + self.backfill_job = Some(backfill); } - #[allow(clippy::branches_sharing_code)] std::cmp::Ordering::Equal => { - // ExEx is at the same block height as the node head - - if let Some(exex_header) = self.provider.header(&self.exex_head.block.hash)? { - // ExEx is on the canonical chain - debug!(target: "exex::manager", "ExEx is at the same block height as the node head and on the canonical chain"); - - if exex_header.number != self.exex_head.block.number { - eyre::bail!("ExEx head number does not match the hash") - } - - // ExEx is on the canonical chain and the same as the node head, no need to - // backfill - self.backfill_job = None; - } else { - // ExEx is not on the canonical chain, first unwind it and then backfill - debug!(target: "exex::manager", "ExEx is at the same block height as the node head but not on the canonical chain"); - - // TODO(alexey): unwind and backfill - self.backfill_job = None; - } + debug!(target: "exex::manager", "ExEx is at the node head"); } std::cmp::Ordering::Greater => { - debug!(target: "exex::manager", "ExEx is ahead of the node head"); - - // ExEx is ahead of the node head - - // TODO(alexey): wait until the node head is at the same height as the ExEx head - // and then repeat the process above - self.node_head_catchup_in_progress = true; + return Err(eyre::eyre!("ExEx is ahead of the node head")) } }; @@ -270,9 +255,18 @@ where fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let this = self.get_mut(); - if this.pending_sync { - this.synchronize()?; - this.pending_sync = false; + if this.pending_check_canonical { + if let Some(canonical_notification) = this.check_canonical()? { + return Poll::Ready(Some(Ok(canonical_notification))) + } + + // ExEx head is on the canonical chain, we no longer need to check it + this.pending_check_canonical = false; + } + + if this.pending_check_backfill { + this.check_backfill()?; + this.pending_check_backfill = false; } if let Some(backfill_job) = &mut this.backfill_job { @@ -286,71 +280,23 @@ where this.backfill_job = None; } - loop { - let Some(notification) = ready!(this.notifications.poll_recv(cx)) else { - return Poll::Ready(None) - }; - - // 1. Either committed or reverted chain from the notification. - // 2. Block number of the tip of the canonical chain: - // - For committed chain, it's the tip block number. - // - For reverted chain, it's the block number preceding the first block in the chain. - let (chain, tip) = notification - .committed_chain() - .map(|chain| (chain.clone(), chain.tip().number)) - .or_else(|| { - notification - .reverted_chain() - .map(|chain| (chain.clone(), chain.first().number - 1)) - }) - .unzip(); - - if this.node_head_catchup_in_progress { - // If we are waiting for the node head to catch up to the same height as the ExEx - // head, then we need to check if the ExEx is on the canonical chain. - - // Query the chain from the new notification for the ExEx head block number. - let exex_head_block = chain - .as_ref() - .and_then(|chain| chain.blocks().get(&this.exex_head.block.number)); - - // Compare the hash of the block from the new notification to the ExEx head - // hash. - if let Some((block, tip)) = exex_head_block.zip(tip) { - if block.hash() == this.exex_head.block.hash { - // ExEx is on the canonical chain, proceed with the notification - this.node_head_catchup_in_progress = false; - } else { - // ExEx is not on the canonical chain, synchronize - let tip = - this.provider.sealed_header(tip)?.ok_or_eyre("node head not found")?; - this.node_head = Head::new( - tip.number, - tip.hash(), - tip.difficulty, - U256::MAX, - tip.timestamp, - ); - this.synchronize()?; - } - } - } + let Some(notification) = ready!(this.notifications.poll_recv(cx)) else { + return Poll::Ready(None) + }; - if notification - .committed_chain() - .or_else(|| notification.reverted_chain()) - .map_or(false, |chain| chain.first().number > this.exex_head.block.number) - { - return Poll::Ready(Some(Ok(notification))) - } + if let Some(committed_chain) = notification.committed_chain() { + this.exex_head.block = committed_chain.tip().num_hash(); + } else if let Some(reverted_chain) = notification.reverted_chain() { + let first_block = reverted_chain.first(); + this.exex_head.block = (first_block.parent_hash, first_block.number - 1).into(); } + + Poll::Ready(Some(Ok(notification))) } } #[cfg(test)] mod tests { - use std::future::poll_fn; - use crate::Wal; use super::*; @@ -363,7 +309,7 @@ mod tests { use reth_primitives::Block; use reth_provider::{ providers::BlockchainProvider2, test_utils::create_test_provider_factory, BlockWriter, - Chain, + Chain, DatabaseProviderFactory, }; use reth_testing_utils::generators::{self, random_block, BlockParams}; use tokio::sync::mpsc; @@ -451,12 +397,6 @@ mod tests { Ok(()) } - #[ignore] - #[tokio::test] - async fn exex_notifications_behind_head_non_canonical() -> eyre::Result<()> { - Ok(()) - } - #[tokio::test] async fn exex_notifications_same_head_canonical() -> eyre::Result<()> { let temp_dir = tempfile::tempdir().unwrap(); @@ -512,18 +452,12 @@ mod tests { Ok(()) } - #[ignore] #[tokio::test] async fn exex_notifications_same_head_non_canonical() -> eyre::Result<()> { - Ok(()) - } - - #[tokio::test] - async fn test_notifications_ahead_of_head() -> eyre::Result<()> { let mut rng = generators::rng(); let temp_dir = tempfile::tempdir().unwrap(); - let wal = Wal::new(temp_dir.path()).unwrap(); + let mut wal = Wal::new(temp_dir.path()).unwrap(); let provider_factory = create_test_provider_factory(); let genesis_hash = init_genesis(&provider_factory)?; @@ -533,32 +467,65 @@ mod tests { let provider = BlockchainProvider2::new(provider_factory)?; + let node_head_block = random_block( + &mut rng, + genesis_block.number + 1, + BlockParams { parent: Some(genesis_hash), tx_count: Some(0), ..Default::default() }, + ) + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?; + let node_head = Head { + number: node_head_block.number, + hash: node_head_block.hash(), + ..Default::default() + }; + let provider_rw = provider.database_provider_rw()?; + provider_rw.insert_block(node_head_block)?; + provider_rw.commit()?; + let node_head_notification = ExExNotification::ChainCommitted { + new: Arc::new( + BackfillJobFactory::new(EthExecutorProvider::mainnet(), provider.clone()) + .backfill(node_head.number..=node_head.number) + .next() + .ok_or_else(|| eyre::eyre!("failed to backfill"))??, + ), + }; + let exex_head_block = random_block( &mut rng, genesis_block.number + 1, BlockParams { parent: Some(genesis_hash), tx_count: Some(0), ..Default::default() }, ); + let exex_head = ExExHead { block: exex_head_block.num_hash() }; + let exex_head_notification = ExExNotification::ChainCommitted { + new: Arc::new(Chain::new( + vec![exex_head_block + .clone() + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?], + Default::default(), + None, + )), + }; + wal.commit(&exex_head_notification)?; - let node_head = - Head { number: genesis_block.number, hash: genesis_hash, ..Default::default() }; - let exex_head = ExExHead { - block: BlockNumHash { number: exex_head_block.number, hash: exex_head_block.hash() }, + let new_notification = ExExNotification::ChainCommitted { + new: Arc::new(Chain::new( + vec![random_block( + &mut rng, + node_head.number + 1, + BlockParams { parent: Some(node_head.hash), ..Default::default() }, + ) + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?], + Default::default(), + None, + )), }; let (notifications_tx, notifications_rx) = mpsc::channel(1); - notifications_tx - .send(ExExNotification::ChainCommitted { - new: Arc::new(Chain::new( - vec![exex_head_block - .clone() - .seal_with_senders() - .ok_or_eyre("failed to recover senders")?], - Default::default(), - None, - )), - }) - .await?; + notifications_tx.send(new_notification.clone()).await?; let mut notifications = ExExNotifications::new( node_head, @@ -569,18 +536,66 @@ mod tests { ) .with_head(exex_head); - // First notification is skipped because the node is catching up with the ExEx - let new_notification = poll_fn(|cx| Poll::Ready(notifications.poll_next_unpin(cx))).await; - assert!(new_notification.is_pending()); + // First notification is the revert of the ExEx head block to get back to the canonical + // chain + assert_eq!( + notifications.next().await.transpose()?, + Some(exex_head_notification.into_inverted()) + ); + // Second notification is the backfilled block from the canonical chain to get back to the + // canonical tip + assert_eq!(notifications.next().await.transpose()?, Some(node_head_notification)); + // Third notification is the actual notification that we sent before + assert_eq!(notifications.next().await.transpose()?, Some(new_notification)); + + Ok(()) + } - // Imitate the node catching up with the ExEx by sending a notification for the missing - // block - let notification = ExExNotification::ChainCommitted { + #[tokio::test] + async fn test_notifications_ahead_of_head() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + let mut rng = generators::rng(); + + let temp_dir = tempfile::tempdir().unwrap(); + let mut wal = Wal::new(temp_dir.path()).unwrap(); + + let provider_factory = create_test_provider_factory(); + let genesis_hash = init_genesis(&provider_factory)?; + let genesis_block = provider_factory + .block(genesis_hash.into())? + .ok_or_else(|| eyre::eyre!("genesis block not found"))?; + + let provider = BlockchainProvider2::new(provider_factory)?; + + let exex_head_block = random_block( + &mut rng, + genesis_block.number + 1, + BlockParams { parent: Some(genesis_hash), tx_count: Some(0), ..Default::default() }, + ); + let exex_head_notification = ExExNotification::ChainCommitted { + new: Arc::new(Chain::new( + vec![exex_head_block + .clone() + .seal_with_senders() + .ok_or_eyre("failed to recover senders")?], + Default::default(), + None, + )), + }; + wal.commit(&exex_head_notification)?; + + let node_head = + Head { number: genesis_block.number, hash: genesis_hash, ..Default::default() }; + let exex_head = ExExHead { + block: BlockNumHash { number: exex_head_block.number, hash: exex_head_block.hash() }, + }; + + let new_notification = ExExNotification::ChainCommitted { new: Arc::new(Chain::new( vec![random_block( &mut rng, - exex_head_block.number + 1, - BlockParams { parent: Some(exex_head_block.hash()), ..Default::default() }, + genesis_block.number + 1, + BlockParams { parent: Some(genesis_hash), ..Default::default() }, ) .seal_with_senders() .ok_or_eyre("failed to recover senders")?], @@ -588,10 +603,29 @@ mod tests { None, )), }; - notifications_tx.send(notification.clone()).await?; - // Second notification is received because the node caught up with the ExEx - assert_eq!(notifications.next().await.transpose()?, Some(notification)); + let (notifications_tx, notifications_rx) = mpsc::channel(1); + + notifications_tx.send(new_notification.clone()).await?; + + let mut notifications = ExExNotifications::new( + node_head, + provider, + EthExecutorProvider::mainnet(), + notifications_rx, + wal.handle(), + ) + .with_head(exex_head); + + // First notification is the revert of the ExEx head block to get back to the canonical + // chain + assert_eq!( + notifications.next().await.transpose()?, + Some(exex_head_notification.into_inverted()) + ); + + // Second notification is the actual notification that we sent before + assert_eq!(notifications.next().await.transpose()?, Some(new_notification)); Ok(()) } diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index b3d29f1b3955c..cef27369eb63c 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -18,17 +18,17 @@ pub struct BlockCache { /// the cache with the same file ID. I.e. for each notification, there may be multiple blocks /// in the cache. files: RwLock>>, - /// A mapping of `Block Hash -> Block`. + /// A mapping of committed blocks `Block Hash -> Block`. /// /// For each [`ExExNotification::ChainCommitted`] notification, there will be an entry per /// block. - blocks: DashMap, + committed_blocks: DashMap, } impl BlockCache { /// Creates a new instance of [`BlockCache`]. pub(super) fn new() -> Self { - Self { files: RwLock::new(BTreeMap::new()), blocks: DashMap::new() } + Self { files: RwLock::new(BTreeMap::new()), committed_blocks: DashMap::new() } } /// Returns `true` if the cache is empty. @@ -95,6 +95,12 @@ impl BlockCache { Some((key, last_block)) } + /// Returns the file ID for the notification containing the given committed block hash, if it + /// exists. + pub(super) fn get_file_id_by_committed_block_hash(&self, block_hash: &B256) -> Option { + self.committed_blocks.get(block_hash).map(|entry| entry.0) + } + /// Inserts the blocks from the notification into the cache with the given file ID. /// /// First, inserts the reverted blocks (if any), then the committed blocks (if any). @@ -126,7 +132,7 @@ impl BlockCache { parent_hash: block.parent_hash, }; files.entry(file_id).or_default().push_back(cached_block); - self.blocks.insert(block.hash(), cached_block); + self.committed_blocks.insert(block.hash(), (file_id, cached_block)); } } } diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index 5d28cb3e0377e..d7aea3aafdfaa 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -3,12 +3,14 @@ mod cache; pub use cache::BlockCache; mod storage; +use eyre::OptionExt; pub use storage::Storage; use std::{path::Path, sync::Arc}; use alloy_eips::BlockNumHash; use reth_exex_types::ExExNotification; +use reth_primitives::B256; use reth_tracing::tracing::{debug, instrument}; /// WAL is a write-ahead log (WAL) that stores the notifications sent to ExExes. @@ -135,7 +137,10 @@ impl WalInner { block.block.number == to_block.number && block.block.hash == to_block.hash { - let notification = self.storage.read_notification(file_id)?; + let notification = self + .storage + .read_notification(file_id)? + .ok_or_eyre("notification not found")?; if notification.committed_chain().unwrap().blocks().len() == 1 { unfinalized_from_file_id = Some( block_cache.peek().map(|(file_id, _)| *file_id).unwrap_or(u64::MAX), @@ -207,6 +212,21 @@ pub struct WalHandle { wal: Arc, } +impl WalHandle { + /// Returns the notification for the given committed block hash if it exists. + pub fn get_committed_notification_by_block_hash( + &self, + block_hash: &B256, + ) -> eyre::Result> { + let Some(file_id) = self.wal.block_cache.get_file_id_by_committed_block_hash(block_hash) + else { + return Ok(None) + }; + + self.wal.storage.read_notification(file_id) + } +} + #[cfg(test)] mod tests { use std::sync::Arc; diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index 8953a6a4edfb7..817d57d193f04 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -84,16 +84,25 @@ impl Storage { &self, range: RangeInclusive, ) -> impl Iterator> + '_ { - range.map(move |id| self.read_notification(id).map(|notification| (id, notification))) + range.map(move |id| { + let notification = self.read_notification(id)?.ok_or_eyre("notification not found")?; + + Ok((id, notification)) + }) } /// Reads the notification from the file with the given id. #[instrument(target = "exex::wal::storage", skip(self))] - pub(super) fn read_notification(&self, file_id: u64) -> eyre::Result { + pub(super) fn read_notification(&self, file_id: u64) -> eyre::Result> { let file_path = self.file_path(file_id); debug!(?file_path, "Reading notification from WAL"); - let mut file = File::open(&file_path)?; + let mut file = match File::open(&file_path) { + Ok(file) => file, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None), + Err(err) => return Err(err.into()), + }; + // TODO(alexey): use rmp-serde when Alloy and Reth serde issues are resolved Ok(serde_json::from_reader(&mut file)?) } @@ -149,7 +158,7 @@ mod tests { let file_id = 0; storage.write_notification(file_id, ¬ification)?; let deserialized_notification = storage.read_notification(file_id)?; - assert_eq!(deserialized_notification, notification); + assert_eq!(deserialized_notification, Some(notification)); Ok(()) } From 8dfab3354bbbaa255c62e654933835ad96d1de3b Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Fri, 27 Sep 2024 17:57:24 +0200 Subject: [PATCH 46/84] feat(trie): extract `StorageProof` (#11269) --- crates/trie/db/src/proof.rs | 3 +- crates/trie/trie/src/proof.rs | 116 +++++++++++++++++++++----------- crates/trie/trie/src/witness.rs | 28 +++++--- 3 files changed, 96 insertions(+), 51 deletions(-) diff --git a/crates/trie/db/src/proof.rs b/crates/trie/db/src/proof.rs index 9416d078090cb..1d5fda84cc5bc 100644 --- a/crates/trie/db/src/proof.rs +++ b/crates/trie/db/src/proof.rs @@ -78,7 +78,6 @@ impl<'a, TX: DbTx> DatabaseProof<'a, TX> &state_sorted, )) .with_prefix_sets_mut(input.prefix_sets) - .with_targets(targets) - .multiproof() + .multiproof(targets) } } diff --git a/crates/trie/trie/src/proof.rs b/crates/trie/trie/src/proof.rs index e3bdccafefd29..3e9ca5783814b 100644 --- a/crates/trie/trie/src/proof.rs +++ b/crates/trie/trie/src/proof.rs @@ -1,7 +1,7 @@ use crate::{ hashed_cursor::{HashedCursorFactory, HashedStorageCursor}, node_iter::{TrieElement, TrieNodeIter}, - prefix_set::TriePrefixSetsMut, + prefix_set::{PrefixSetMut, TriePrefixSetsMut}, trie_cursor::TrieCursorFactory, walker::TrieWalker, HashBuilder, Nibbles, @@ -30,18 +30,15 @@ pub struct Proof { hashed_cursor_factory: H, /// A set of prefix sets that have changes. prefix_sets: TriePrefixSetsMut, - /// Proof targets. - targets: HashMap>, } impl Proof { - /// Create a new [Proof] instance. + /// Create a new [`Proof`] instance. pub fn new(t: T, h: H) -> Self { Self { trie_cursor_factory: t, hashed_cursor_factory: h, prefix_sets: TriePrefixSetsMut::default(), - targets: HashMap::default(), } } @@ -51,7 +48,6 @@ impl Proof { trie_cursor_factory, hashed_cursor_factory: self.hashed_cursor_factory, prefix_sets: self.prefix_sets, - targets: self.targets, } } @@ -61,7 +57,6 @@ impl Proof { trie_cursor_factory: self.trie_cursor_factory, hashed_cursor_factory, prefix_sets: self.prefix_sets, - targets: self.targets, } } @@ -70,22 +65,11 @@ impl Proof { self.prefix_sets = prefix_sets; self } - - /// Set the target account and slots. - pub fn with_target(self, target: (B256, HashSet)) -> Self { - self.with_targets(HashMap::from_iter([target])) - } - - /// Set the target accounts and slots. - pub fn with_targets(mut self, targets: HashMap>) -> Self { - self.targets = targets; - self - } } impl Proof where - T: TrieCursorFactory, + T: TrieCursorFactory + Clone, H: HashedCursorFactory + Clone, { /// Generate an account proof from intermediate nodes. @@ -95,23 +79,28 @@ where slots: &[B256], ) -> Result { Ok(self - .with_target((keccak256(address), slots.iter().map(keccak256).collect())) - .multiproof()? + .multiproof(HashMap::from_iter([( + keccak256(address), + slots.iter().map(keccak256).collect(), + )]))? .account_proof(address, slots)?) } /// Generate a state multiproof according to specified targets. - pub fn multiproof(&self) -> Result { + pub fn multiproof( + mut self, + mut targets: HashMap>, + ) -> Result { let hashed_account_cursor = self.hashed_cursor_factory.hashed_account_cursor()?; let trie_cursor = self.trie_cursor_factory.account_trie_cursor()?; // Create the walker. let mut prefix_set = self.prefix_sets.account_prefix_set.clone(); - prefix_set.extend(self.targets.keys().map(Nibbles::unpack)); + prefix_set.extend(targets.keys().map(Nibbles::unpack)); let walker = TrieWalker::new(trie_cursor, prefix_set.freeze()); // Create a hash builder to rebuild the root node since it is not available in the database. - let retainer = ProofRetainer::from_iter(self.targets.keys().map(Nibbles::unpack)); + let retainer = ProofRetainer::from_iter(targets.keys().map(Nibbles::unpack)); let mut hash_builder = HashBuilder::default().with_proof_retainer(retainer); let mut storages = HashMap::default(); @@ -123,7 +112,19 @@ where hash_builder.add_branch(node.key, node.value, node.children_are_in_trie); } TrieElement::Leaf(hashed_address, account) => { - let storage_multiproof = self.storage_multiproof(hashed_address)?; + let storage_prefix_set = self + .prefix_sets + .storage_prefix_sets + .remove(&hashed_address) + .unwrap_or_default(); + let proof_targets = targets.remove(&hashed_address).unwrap_or_default(); + let storage_multiproof = StorageProof::new_hashed( + self.trie_cursor_factory.clone(), + self.hashed_cursor_factory.clone(), + hashed_address, + ) + .with_prefix_set_mut(storage_prefix_set) + .storage_proof(proof_targets)?; // Encode account account_rlp.clear(); @@ -138,30 +139,67 @@ where let _ = hash_builder.root(); Ok(MultiProof { account_subtree: hash_builder.take_proof_nodes(), storages }) } +} + +/// Generates storage merkle proofs. +#[derive(Debug)] +pub struct StorageProof { + /// The factory for traversing trie nodes. + trie_cursor_factory: T, + /// The factory for hashed cursors. + hashed_cursor_factory: H, + /// The hashed address of an account. + hashed_address: B256, + /// The set of storage slot prefixes that have changed. + prefix_set: PrefixSetMut, +} + +impl StorageProof { + /// Create a new [`StorageProof`] instance. + pub fn new(t: T, h: H, address: Address) -> Self { + Self::new_hashed(t, h, keccak256(address)) + } + + /// Create a new [`StorageProof`] instance with hashed address. + pub fn new_hashed(t: T, h: H, hashed_address: B256) -> Self { + Self { + trie_cursor_factory: t, + hashed_cursor_factory: h, + hashed_address, + prefix_set: PrefixSetMut::default(), + } + } - /// Generate a storage multiproof according to specified targets. - pub fn storage_multiproof( - &self, - hashed_address: B256, + /// Set the changed prefixes. + pub fn with_prefix_set_mut(mut self, prefix_set: PrefixSetMut) -> Self { + self.prefix_set = prefix_set; + self + } +} + +impl StorageProof +where + T: TrieCursorFactory, + H: HashedCursorFactory, +{ + /// Generate storage proof. + pub fn storage_proof( + mut self, + targets: HashSet, ) -> Result { let mut hashed_storage_cursor = - self.hashed_cursor_factory.hashed_storage_cursor(hashed_address)?; + self.hashed_cursor_factory.hashed_storage_cursor(self.hashed_address)?; // short circuit on empty storage if hashed_storage_cursor.is_storage_empty()? { return Ok(StorageMultiProof::default()) } - let target_nibbles = self - .targets - .get(&hashed_address) - .map_or(Vec::new(), |slots| slots.iter().map(Nibbles::unpack).collect()); + let target_nibbles = targets.into_iter().map(Nibbles::unpack).collect::>(); + self.prefix_set.extend(target_nibbles.clone()); - let mut prefix_set = - self.prefix_sets.storage_prefix_sets.get(&hashed_address).cloned().unwrap_or_default(); - prefix_set.extend(target_nibbles.clone()); - let trie_cursor = self.trie_cursor_factory.storage_trie_cursor(hashed_address)?; - let walker = TrieWalker::new(trie_cursor, prefix_set.freeze()); + let trie_cursor = self.trie_cursor_factory.storage_trie_cursor(self.hashed_address)?; + let walker = TrieWalker::new(trie_cursor, self.prefix_set.freeze()); let retainer = ProofRetainer::from_iter(target_nibbles); let mut hash_builder = HashBuilder::default().with_proof_retainer(retainer); diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index ef5b358d31cbf..972afc10c3424 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -1,8 +1,11 @@ use std::collections::BTreeMap; use crate::{ - hashed_cursor::HashedCursorFactory, prefix_set::TriePrefixSetsMut, proof::Proof, - trie_cursor::TrieCursorFactory, HashedPostState, + hashed_cursor::HashedCursorFactory, + prefix_set::TriePrefixSetsMut, + proof::{Proof, StorageProof}, + trie_cursor::TrieCursorFactory, + HashedPostState, }; use alloy_primitives::{ keccak256, @@ -100,8 +103,7 @@ where let mut account_multiproof = Proof::new(self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone()) .with_prefix_sets_mut(self.prefix_sets.clone()) - .with_targets(proof_targets.clone()) - .multiproof()?; + .multiproof(proof_targets.clone())?; // Attempt to compute state root from proofs and gather additional // information for the witness. @@ -162,13 +164,19 @@ where let mut padded_key = key.pack(); padded_key.resize(32, 0); let target_key = B256::from_slice(&padded_key); - let proof = Proof::new( + let storage_prefix_set = self + .prefix_sets + .storage_prefix_sets + .get(&hashed_address) + .cloned() + .unwrap_or_default(); + let proof = StorageProof::new_hashed( self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone(), + hashed_address, ) - .with_prefix_sets_mut(self.prefix_sets.clone()) - .with_target((hashed_address, HashSet::from_iter([target_key]))) - .storage_multiproof(hashed_address)?; + .with_prefix_set_mut(storage_prefix_set) + .storage_proof(HashSet::from_iter([target_key]))?; // The subtree only contains the proof for a single target. let node = @@ -182,11 +190,11 @@ where // Right pad the target with 0s. let mut padded_key = key.pack(); padded_key.resize(32, 0); + let targets = HashMap::from_iter([(B256::from_slice(&padded_key), HashSet::default())]); let proof = Proof::new(self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone()) .with_prefix_sets_mut(self.prefix_sets.clone()) - .with_target((B256::from_slice(&padded_key), HashSet::default())) - .multiproof()?; + .multiproof(targets)?; // The subtree only contains the proof for a single target. let node = From bf18fd927c24b89eff1e24fd240ba0cead395b6f Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:46:33 +0200 Subject: [PATCH 47/84] perf: improve IntegerList API to avoid allocations (#11292) --- Cargo.lock | 1 - crates/primitives-traits/src/integer_list.rs | 98 ++++++++++--------- crates/primitives-traits/src/lib.rs | 2 +- .../src/segments/user/account_history.rs | 6 +- .../src/segments/user/storage_history.rs | 6 +- .../src/stages/index_account_history.rs | 2 +- .../src/stages/index_storage_history.rs | 2 +- crates/stages/stages/src/stages/utils.rs | 6 +- .../storage/db-api/src/models/integer_list.rs | 1 + .../storage/db/src/implementation/mdbx/mod.rs | 6 +- .../db/src/tables/codecs/fuzz/inputs.rs | 9 +- .../src/providers/database/provider.rs | 2 +- examples/custom-inspector/Cargo.toml | 1 - 13 files changed, 71 insertions(+), 71 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7368caf8422ed..d05a3396e7fe3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2762,7 +2762,6 @@ dependencies = [ "futures-util", "reth", "reth-node-ethereum", - "reth-rpc-types", ] [[package]] diff --git a/crates/primitives-traits/src/integer_list.rs b/crates/primitives-traits/src/integer_list.rs index 767fb3ec30a1c..570c96c9fdafa 100644 --- a/crates/primitives-traits/src/integer_list.rs +++ b/crates/primitives-traits/src/integer_list.rs @@ -4,7 +4,7 @@ use core::fmt; use derive_more::Deref; use roaring::RoaringTreemap; use serde::{ - de::{SeqAccess, Unexpected, Visitor}, + de::{SeqAccess, Visitor}, ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer, }; @@ -16,34 +16,54 @@ pub struct IntegerList(pub RoaringTreemap); impl fmt::Debug for IntegerList { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let vec: Vec = self.0.iter().collect(); - write!(f, "IntegerList {vec:?}") + f.write_str("IntegerList")?; + f.debug_list().entries(self.0.iter()).finish() } } impl IntegerList { + /// Creates a new empty `IntegerList`. + pub fn empty() -> Self { + Self(RoaringTreemap::new()) + } + /// Creates an `IntegerList` from a list of integers. /// - /// # Returns - /// - /// Returns an error if the list is empty or not pre-sorted. - pub fn new>(list: T) -> Result { - Ok(Self( - RoaringTreemap::from_sorted_iter(list.as_ref().iter().copied()) - .map_err(|_| RoaringBitmapError::InvalidInput)?, - )) + /// Returns an error if the list is not pre-sorted. + pub fn new(list: impl IntoIterator) -> Result { + RoaringTreemap::from_sorted_iter(list) + .map(Self) + .map_err(|_| IntegerListError::UnsortedInput) } // Creates an IntegerList from a pre-sorted list of integers. /// /// # Panics /// - /// Panics if the list is empty or not pre-sorted. - pub fn new_pre_sorted>(list: T) -> Self { - Self( - RoaringTreemap::from_sorted_iter(list.as_ref().iter().copied()) - .expect("IntegerList must be pre-sorted and non-empty"), - ) + /// Panics if the list is not pre-sorted. + #[inline] + #[track_caller] + pub fn new_pre_sorted(list: impl IntoIterator) -> Self { + Self::new(list).expect("IntegerList must be pre-sorted and non-empty") + } + + /// Appends a list of integers to the current list. + pub fn append(&mut self, list: impl IntoIterator) -> Result { + self.0.append(list).map_err(|_| IntegerListError::UnsortedInput) + } + + /// Pushes a new integer to the list. + pub fn push(&mut self, value: u64) -> Result<(), IntegerListError> { + if self.0.push(value) { + Ok(()) + } else { + Err(IntegerListError::UnsortedInput) + } + } + + /// Clears the list. + pub fn clear(&mut self) { + self.0.clear(); } /// Serializes a [`IntegerList`] into a sequence of bytes. @@ -59,36 +79,21 @@ impl IntegerList { } /// Deserializes a sequence of bytes into a proper [`IntegerList`]. - pub fn from_bytes(data: &[u8]) -> Result { + pub fn from_bytes(data: &[u8]) -> Result { Ok(Self( RoaringTreemap::deserialize_from(data) - .map_err(|_| RoaringBitmapError::FailedToDeserialize)?, + .map_err(|_| IntegerListError::FailedToDeserialize)?, )) } } -macro_rules! impl_uint { - ($($w:tt),+) => { - $( - impl From> for IntegerList { - fn from(v: Vec<$w>) -> Self { - Self::new_pre_sorted(v.iter().map(|v| *v as u64).collect::>()) - } - } - )+ - }; -} - -impl_uint!(usize, u64, u32, u8, u16); - impl Serialize for IntegerList { fn serialize(&self, serializer: S) -> Result where S: Serializer, { - let vec = self.0.iter().collect::>(); let mut seq = serializer.serialize_seq(Some(self.len() as usize))?; - for e in vec { + for e in &self.0 { seq.serialize_element(&e)?; } seq.end() @@ -107,12 +112,11 @@ impl<'de> Visitor<'de> for IntegerListVisitor { where E: SeqAccess<'de>, { - let mut list = Vec::new(); + let mut list = IntegerList::empty(); while let Some(item) = seq.next_element()? { - list.push(item); + list.push(item).map_err(serde::de::Error::custom)?; } - - IntegerList::new(list).map_err(|_| serde::de::Error::invalid_value(Unexpected::Seq, &self)) + Ok(list) } } @@ -132,17 +136,17 @@ use arbitrary::{Arbitrary, Unstructured}; impl<'a> Arbitrary<'a> for IntegerList { fn arbitrary(u: &mut Unstructured<'a>) -> Result { let mut nums: Vec = Vec::arbitrary(u)?; - nums.sort(); + nums.sort_unstable(); Self::new(nums).map_err(|_| arbitrary::Error::IncorrectFormat) } } /// Primitives error type. #[derive(Debug, derive_more::Display, derive_more::Error)] -pub enum RoaringBitmapError { - /// The provided input is invalid. - #[display("the provided input is invalid")] - InvalidInput, +pub enum IntegerListError { + /// The provided input is unsorted. + #[display("the provided input is unsorted")] + UnsortedInput, /// Failed to deserialize data into type. #[display("failed to deserialize data into type")] FailedToDeserialize, @@ -152,6 +156,12 @@ pub enum RoaringBitmapError { mod tests { use super::*; + #[test] + fn empty_list() { + assert_eq!(IntegerList::empty().len(), 0); + assert_eq!(IntegerList::new_pre_sorted(std::iter::empty()).len(), 0); + } + #[test] fn test_integer_list() { let original_list = [1, 2, 3]; diff --git a/crates/primitives-traits/src/lib.rs b/crates/primitives-traits/src/lib.rs index e5c57de74b9c2..5445ce467114d 100644 --- a/crates/primitives-traits/src/lib.rs +++ b/crates/primitives-traits/src/lib.rs @@ -21,7 +21,7 @@ pub mod account; pub use account::{Account, Bytecode}; mod integer_list; -pub use integer_list::{IntegerList, RoaringBitmapError}; +pub use integer_list::{IntegerList, IntegerListError}; pub mod request; pub use request::{Request, Requests}; diff --git a/crates/prune/prune/src/segments/user/account_history.rs b/crates/prune/prune/src/segments/user/account_history.rs index 016d9a22fba2f..01f8c0850a187 100644 --- a/crates/prune/prune/src/segments/user/account_history.rs +++ b/crates/prune/prune/src/segments/user/account_history.rs @@ -275,10 +275,8 @@ mod tests { .iter() .filter(|(key, _)| key.highest_block_number > last_pruned_block_number) .map(|(key, blocks)| { - let new_blocks = blocks - .iter() - .skip_while(|block| *block <= last_pruned_block_number) - .collect::>(); + let new_blocks = + blocks.iter().skip_while(|block| *block <= last_pruned_block_number); (key.clone(), BlockNumberList::new_pre_sorted(new_blocks)) }) .collect::>(); diff --git a/crates/prune/prune/src/segments/user/storage_history.rs b/crates/prune/prune/src/segments/user/storage_history.rs index 5291d822cefaf..315ad750a8b77 100644 --- a/crates/prune/prune/src/segments/user/storage_history.rs +++ b/crates/prune/prune/src/segments/user/storage_history.rs @@ -281,10 +281,8 @@ mod tests { .iter() .filter(|(key, _)| key.sharded_key.highest_block_number > last_pruned_block_number) .map(|(key, blocks)| { - let new_blocks = blocks - .iter() - .skip_while(|block| *block <= last_pruned_block_number) - .collect::>(); + let new_blocks = + blocks.iter().skip_while(|block| *block <= last_pruned_block_number); (key.clone(), BlockNumberList::new_pre_sorted(new_blocks)) }) .collect::>(); diff --git a/crates/stages/stages/src/stages/index_account_history.rs b/crates/stages/stages/src/stages/index_account_history.rs index e0fcde2b194f5..39d50242aff7a 100644 --- a/crates/stages/stages/src/stages/index_account_history.rs +++ b/crates/stages/stages/src/stages/index_account_history.rs @@ -182,7 +182,7 @@ mod tests { } fn list(list: &[u64]) -> BlockNumberList { - BlockNumberList::new(list).unwrap() + BlockNumberList::new(list.iter().copied()).unwrap() } fn cast( diff --git a/crates/stages/stages/src/stages/index_storage_history.rs b/crates/stages/stages/src/stages/index_storage_history.rs index 4af2cb3efea2e..efeec4a3a217d 100644 --- a/crates/stages/stages/src/stages/index_storage_history.rs +++ b/crates/stages/stages/src/stages/index_storage_history.rs @@ -197,7 +197,7 @@ mod tests { } fn list(list: &[u64]) -> BlockNumberList { - BlockNumberList::new(list).unwrap() + BlockNumberList::new(list.iter().copied()).unwrap() } fn cast( diff --git a/crates/stages/stages/src/stages/utils.rs b/crates/stages/stages/src/stages/utils.rs index 7cdab4ff24489..caf039faca108 100644 --- a/crates/stages/stages/src/stages/utils.rs +++ b/crates/stages/stages/src/stages/utils.rs @@ -54,11 +54,11 @@ where let mut cache: HashMap> = HashMap::default(); let mut collect = |cache: &HashMap>| { - for (key, indice_list) in cache { - let last = indice_list.last().expect("qed"); + for (key, indices) in cache { + let last = indices.last().expect("qed"); collector.insert( sharded_key_factory(*key, *last), - BlockNumberList::new_pre_sorted(indice_list), + BlockNumberList::new_pre_sorted(indices.iter().copied()), )?; } Ok::<(), StageError>(()) diff --git a/crates/storage/db-api/src/models/integer_list.rs b/crates/storage/db-api/src/models/integer_list.rs index f47605bf88b52..6d28b5496a1bb 100644 --- a/crates/storage/db-api/src/models/integer_list.rs +++ b/crates/storage/db-api/src/models/integer_list.rs @@ -12,6 +12,7 @@ impl Compress for IntegerList { fn compress(self) -> Self::Compressed { self.to_bytes() } + fn compress_to_buf>(self, buf: &mut B) { self.to_mut_bytes(buf) } diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index 8b4a136c300d8..1deb86ba614f7 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -1319,7 +1319,7 @@ mod tests { for i in 1..5 { let key = ShardedKey::new(real_key, i * 100); - let list: IntegerList = vec![i * 100u64].into(); + let list = IntegerList::new_pre_sorted([i * 100u64]); db.update(|tx| tx.put::(key.clone(), list.clone()).expect("")) .unwrap(); @@ -1340,7 +1340,7 @@ mod tests { .expect("should be able to retrieve it."); assert_eq!(ShardedKey::new(real_key, 200), key); - let list200: IntegerList = vec![200u64].into(); + let list200 = IntegerList::new_pre_sorted([200u64]); assert_eq!(list200, list); } // Seek greatest index @@ -1357,7 +1357,7 @@ mod tests { .expect("should be able to retrieve it."); assert_eq!(ShardedKey::new(real_key, 400), key); - let list400: IntegerList = vec![400u64].into(); + let list400 = IntegerList::new_pre_sorted([400u64]); assert_eq!(list400, list); } } diff --git a/crates/storage/db/src/tables/codecs/fuzz/inputs.rs b/crates/storage/db/src/tables/codecs/fuzz/inputs.rs index 2c944e158eb18..bb26e8b9e217f 100644 --- a/crates/storage/db/src/tables/codecs/fuzz/inputs.rs +++ b/crates/storage/db/src/tables/codecs/fuzz/inputs.rs @@ -10,12 +10,7 @@ pub struct IntegerListInput(pub Vec); impl From for IntegerList { fn from(list: IntegerListInput) -> Self { let mut v = list.0; - - // Empty lists are not supported by `IntegerList`, so we want to skip these cases. - if v.is_empty() { - return vec![1u64].into() - } - v.sort(); - v.into() + v.sort_unstable(); + Self::new_pre_sorted(v) } } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 6e026f5c910ab..7159720bf3715 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -1356,7 +1356,7 @@ impl DatabaseProvider { }; self.tx.put::( sharded_key_factory(partial_key, highest_block_number), - BlockNumberList::new_pre_sorted(list), + BlockNumberList::new_pre_sorted(list.iter().copied()), )?; } } diff --git a/examples/custom-inspector/Cargo.toml b/examples/custom-inspector/Cargo.toml index a94980951627e..18629556c42fe 100644 --- a/examples/custom-inspector/Cargo.toml +++ b/examples/custom-inspector/Cargo.toml @@ -8,7 +8,6 @@ license.workspace = true [dependencies] reth.workspace = true reth-node-ethereum.workspace = true -reth-rpc-types.workspace = true alloy-rpc-types.workspace = true clap = { workspace = true, features = ["derive"] } futures-util.workspace = true From e962983896902ef42fcc3d5a01ba8f4adc3a0ad7 Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Fri, 27 Sep 2024 19:42:46 +0200 Subject: [PATCH 48/84] perf(db): use encode_to in Scale implementations (#11297) --- crates/storage/db-api/src/scale.rs | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/crates/storage/db-api/src/scale.rs b/crates/storage/db-api/src/scale.rs index 99382a4a91793..075eb74f8d8ef 100644 --- a/crates/storage/db-api/src/scale.rs +++ b/crates/storage/db-api/src/scale.rs @@ -22,7 +22,7 @@ where } fn compress_to_buf>(self, buf: &mut B) { - buf.put_slice(&parity_scale_codec::Encode::encode(&self)) + parity_scale_codec::Encode::encode_to(&self, OutputCompat::wrap_mut(buf)); } } @@ -50,3 +50,22 @@ impl sealed::Sealed for Vec {} impl_compression_for_scale!(U256); impl_compression_for_scale!(u8, u32, u16, u64); + +#[repr(transparent)] +struct OutputCompat(B); + +impl OutputCompat { + fn wrap_mut(buf: &mut B) -> &mut Self { + unsafe { std::mem::transmute(buf) } + } +} + +impl parity_scale_codec::Output for OutputCompat { + fn write(&mut self, bytes: &[u8]) { + self.0.put_slice(bytes); + } + + fn push_byte(&mut self, byte: u8) { + self.0.put_u8(byte); + } +} From 5b585ebc22a16558d1573166fdc9d14d2d8341f0 Mon Sep 17 00:00:00 2001 From: evalir Date: Fri, 27 Sep 2024 13:53:53 -0400 Subject: [PATCH 49/84] chore(`rpc`): use alloy's `Keccak256` hasher instead of manually concatenating hash bytes (#11298) --- crates/rpc/rpc/src/eth/bundle.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/rpc/rpc/src/eth/bundle.rs b/crates/rpc/rpc/src/eth/bundle.rs index dd8f75898bfee..2d169f1c540a9 100644 --- a/crates/rpc/rpc/src/eth/bundle.rs +++ b/crates/rpc/rpc/src/eth/bundle.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use alloy_primitives::{keccak256, U256}; +use alloy_primitives::{Keccak256, U256}; use alloy_rpc_types_mev::{EthCallBundle, EthCallBundleResponse, EthCallBundleTransactionResult}; use jsonrpsee::core::RpcResult; use reth_chainspec::EthChainSpec; @@ -161,7 +161,7 @@ where let mut coinbase_balance_after_tx = initial_coinbase; let mut total_gas_used = 0u64; let mut total_gas_fess = U256::ZERO; - let mut hash_bytes = Vec::with_capacity(32 * transactions.len()); + let mut hasher = Keccak256::new(); let mut evm = Call::evm_config(ð_api).evm_with_env(db, env); @@ -179,7 +179,7 @@ where let tx = tx.into_transaction(); - hash_bytes.extend_from_slice(tx.hash().as_slice()); + hasher.update(tx.hash()); let gas_price = tx .effective_tip_per_gas(basefee) .ok_or_else(|| RpcInvalidTransactionError::FeeCapTooLow) @@ -244,7 +244,7 @@ where coinbase_diff.checked_div(U256::from(total_gas_used)).unwrap_or_default(); let res = EthCallBundleResponse { bundle_gas_price, - bundle_hash: keccak256(&hash_bytes), + bundle_hash: hasher.finalize(), coinbase_diff, eth_sent_to_coinbase, gas_fees: total_gas_fess, From 49f044f6e1e9cd17df9e51aa3410a861745bb4eb Mon Sep 17 00:00:00 2001 From: ben Date: Fri, 27 Sep 2024 14:15:39 -0400 Subject: [PATCH 50/84] fix(reth-ipc): pass through extensions from request (#11300) --- crates/rpc/ipc/src/server/rpc_service.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/rpc/ipc/src/server/rpc_service.rs b/crates/rpc/ipc/src/server/rpc_service.rs index 2f645605da72c..5e89c6a0d7f31 100644 --- a/crates/rpc/ipc/src/server/rpc_service.rs +++ b/crates/rpc/ipc/src/server/rpc_service.rs @@ -6,8 +6,8 @@ use jsonrpsee::{ IdProvider, }, types::{error::reject_too_many_subscriptions, ErrorCode, ErrorObject, Request}, - BoundedSubscriptions, ConnectionId, Extensions, MethodCallback, MethodResponse, MethodSink, - Methods, SubscriptionState, + BoundedSubscriptions, ConnectionId, MethodCallback, MethodResponse, MethodSink, Methods, + SubscriptionState, }; use std::sync::Arc; @@ -58,7 +58,7 @@ impl<'a> RpcServiceT<'a> for RpcService { let params = req.params(); let name = req.method_name(); let id = req.id().clone(); - let extensions = Extensions::new(); + let extensions = req.extensions.clone(); match self.methods.method_with_name(name) { None => { From 9863a17d2d0d289c976a9f18c35ccccd6456b2ed Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Sat, 28 Sep 2024 02:18:04 +0800 Subject: [PATCH 51/84] chore(exex):move maximum ExExManager capacity to a constant (#11293) Co-authored-by: Oliver --- crates/exex/exex/src/manager.rs | 6 ++++++ crates/node/builder/src/launch/exex.rs | 6 ++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index 5b3b38ba0d514..ada9e7a4b315b 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -25,6 +25,12 @@ use tokio::sync::{ }; use tokio_util::sync::{PollSendError, PollSender, ReusableBoxFuture}; +/// Default max size of the internal state notifications buffer. +/// +/// 1024 notifications in the buffer is 3.5 hours of mainnet blocks, +/// or 17 minutes of 1-second blocks. +pub const DEFAULT_EXEX_MANAGER_CAPACITY: usize = 1024; + /// Metrics for an `ExEx`. #[derive(Metrics)] #[metrics(scope = "exex")] diff --git a/crates/node/builder/src/launch/exex.rs b/crates/node/builder/src/launch/exex.rs index 6cd705338384c..816335d3dbdfd 100644 --- a/crates/node/builder/src/launch/exex.rs +++ b/crates/node/builder/src/launch/exex.rs @@ -5,7 +5,9 @@ use std::{fmt, fmt::Debug}; use futures::future; use reth_chain_state::ForkChoiceSubscriptions; use reth_chainspec::EthChainSpec; -use reth_exex::{ExExContext, ExExHandle, ExExManager, ExExManagerHandle, Wal}; +use reth_exex::{ + ExExContext, ExExHandle, ExExManager, ExExManagerHandle, Wal, DEFAULT_EXEX_MANAGER_CAPACITY, +}; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_primitives::Head; use reth_provider::CanonStateSubscriptions; @@ -108,7 +110,7 @@ impl ExExLauncher { // todo(onbjerg): rm magic number let exex_manager = ExExManager::new( exex_handles, - 1024, + DEFAULT_EXEX_MANAGER_CAPACITY, exex_wal, components.provider().finalized_block_stream(), ); From 6c92f8a26d21812904b61a28ababe02d1067da64 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 27 Sep 2024 20:37:13 +0200 Subject: [PATCH 52/84] chore: rename param (#11287) --- crates/rpc/rpc-api/src/engine.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index d489d0dd7f643..50181d23a75c3 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -219,7 +219,7 @@ pub trait EngineApi { #[method(name = "getBlobsV1")] async fn get_blobs_v1( &self, - transaction_ids: Vec, + versioned_hashes: Vec, ) -> RpcResult>>; } From d8f16b83b72b3c5ae1bf476723af4b5b3d7a73e8 Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Fri, 27 Sep 2024 20:37:23 +0200 Subject: [PATCH 53/84] perf(trie): use unstable sort when sorting for computing roots (#11301) --- crates/trie/common/src/root.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/trie/common/src/root.rs b/crates/trie/common/src/root.rs index 600e818ebbaab..20f3ba1366d57 100644 --- a/crates/trie/common/src/root.rs +++ b/crates/trie/common/src/root.rs @@ -75,7 +75,7 @@ pub fn state_root_unhashed>( pub fn state_root_unsorted>( state: impl IntoIterator, ) -> B256 { - state_root(state.into_iter().sorted_by_key(|(key, _)| *key)) + state_root(state.into_iter().sorted_unstable_by_key(|(key, _)| *key)) } /// Calculates the root hash of the state represented as MPT. @@ -105,7 +105,7 @@ pub fn storage_root_unhashed(storage: impl IntoIterator) -> /// Sorts and calculates the root hash of account storage trie. /// See [`storage_root`] for more info. pub fn storage_root_unsorted(storage: impl IntoIterator) -> B256 { - storage_root(storage.into_iter().sorted_by_key(|(key, _)| *key)) + storage_root(storage.into_iter().sorted_unstable_by_key(|(key, _)| *key)) } /// Calculates the root hash of account storage trie. From 1bbf8b63063eedcb7a0bc07d1d026c0e8344635c Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Fri, 27 Sep 2024 19:38:28 +0100 Subject: [PATCH 54/84] test(rpc): filter test is taking too long (#11288) --- crates/rpc/rpc/src/eth/filter.rs | 38 ++++++++++++++++---------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/crates/rpc/rpc/src/eth/filter.rs b/crates/rpc/rpc/src/eth/filter.rs index 9c6562606b746..6be507501f6f3 100644 --- a/crates/rpc/rpc/src/eth/filter.rs +++ b/crates/rpc/rpc/src/eth/filter.rs @@ -698,28 +698,28 @@ impl Iterator for BlockRangeInclusiveIter { #[cfg(test)] mod tests { use super::*; - use rand::{thread_rng, Rng}; + use rand::Rng; + use reth_testing_utils::generators; #[test] fn test_block_range_iter() { - for _ in 0..100 { - let mut rng = thread_rng(); - let start = rng.gen::() as u64; - let end = start.saturating_add(rng.gen::() as u64); - let step = rng.gen::() as u64; - let range = start..=end; - let mut iter = BlockRangeInclusiveIter::new(range.clone(), step); - let (from, mut end) = iter.next().unwrap(); - assert_eq!(from, start); - assert_eq!(end, (from + step).min(*range.end())); - - for (next_from, next_end) in iter { - // ensure range starts with previous end + 1 - assert_eq!(next_from, end + 1); - end = next_end; - } - - assert_eq!(end, *range.end()); + let mut rng = generators::rng(); + + let start = rng.gen::() as u64; + let end = start.saturating_add(rng.gen::() as u64); + let step = rng.gen::() as u64; + let range = start..=end; + let mut iter = BlockRangeInclusiveIter::new(range.clone(), step); + let (from, mut end) = iter.next().unwrap(); + assert_eq!(from, start); + assert_eq!(end, (from + step).min(*range.end())); + + for (next_from, next_end) in iter { + // ensure range starts with previous end + 1 + assert_eq!(next_from, end + 1); + end = next_end; } + + assert_eq!(end, *range.end()); } } From 1009289c3da2bd322fd47f90dec5a9cad6bacf23 Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Fri, 27 Sep 2024 22:54:48 +0200 Subject: [PATCH 55/84] chore(deps): bump tracy (#11305) --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d05a3396e7fe3..9eebe512599ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10826,9 +10826,9 @@ dependencies = [ [[package]] name = "tracy-client" -version = "0.17.3" +version = "0.17.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373db47331c3407b343538df77eea2516884a0b126cdfb4b135acfd400015dd7" +checksum = "746b078c6a09ebfd5594609049e07116735c304671eaab06ce749854d23435bc" dependencies = [ "loom", "once_cell", @@ -10838,9 +10838,9 @@ dependencies = [ [[package]] name = "tracy-client-sys" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49cf0064dcb31c99aa1244c1b93439359e53f72ed217eef5db50abd442241e9a" +checksum = "68613466112302fdbeabc5fa55f7d57462a0b247d5a6b7d7e09401fb471a144d" dependencies = [ "cc", ] From e48f2a29cda7f6f9d80687c1144e6b382f91a841 Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Fri, 27 Sep 2024 23:49:46 +0200 Subject: [PATCH 56/84] perf(db): cache `ProcessUID::own` in memory (#11302) --- crates/storage/db/src/lockfile.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/storage/db/src/lockfile.rs b/crates/storage/db/src/lockfile.rs index 828cec6e7d293..0b2e31a968e2d 100644 --- a/crates/storage/db/src/lockfile.rs +++ b/crates/storage/db/src/lockfile.rs @@ -7,7 +7,7 @@ use reth_tracing::tracing::error; use std::{ path::{Path, PathBuf}, process, - sync::Arc, + sync::{Arc, OnceLock}, }; use sysinfo::{ProcessRefreshKind, RefreshKind, System}; @@ -91,7 +91,7 @@ impl StorageLockInner { } } -#[derive(Debug)] +#[derive(Clone, Debug)] struct ProcessUID { /// OS process identifier pid: usize, @@ -102,14 +102,16 @@ struct ProcessUID { impl ProcessUID { /// Creates [`Self`] for the provided PID. fn new(pid: usize) -> Option { - System::new_with_specifics(RefreshKind::new().with_processes(ProcessRefreshKind::new())) - .process(pid.into()) - .map(|process| Self { pid, start_time: process.start_time() }) + let mut system = System::new(); + let pid2 = sysinfo::Pid::from(pid); + system.refresh_process_specifics(pid2, ProcessRefreshKind::new()); + system.process(pid2).map(|process| Self { pid, start_time: process.start_time() }) } /// Creates [`Self`] from own process. fn own() -> Self { - Self::new(process::id() as usize).expect("own process") + static CACHE: OnceLock = OnceLock::new(); + CACHE.get_or_init(|| Self::new(process::id() as usize).expect("own process")).clone() } /// Parses [`Self`] from a file. From 50265b15db0789862dcae67dc462b565dab3d94d Mon Sep 17 00:00:00 2001 From: DaniPopes <57450786+DaniPopes@users.noreply.github.com> Date: Fri, 27 Sep 2024 23:49:49 +0200 Subject: [PATCH 57/84] chore: remove generics from Decode and Decompress (#11295) --- .../stages/src/stages/hashing_storage.rs | 2 +- .../src/stages/index_account_history.rs | 2 +- .../src/stages/index_storage_history.rs | 2 +- crates/storage/db-api/src/models/accounts.rs | 12 ++-- crates/storage/db-api/src/models/blocks.rs | 5 +- .../storage/db-api/src/models/integer_list.rs | 4 +- crates/storage/db-api/src/models/mod.rs | 70 +++++++++---------- .../storage/db-api/src/models/sharded_key.rs | 37 ++-------- .../db-api/src/models/storage_sharded_key.rs | 3 +- crates/storage/db-api/src/scale.rs | 4 +- crates/storage/db-api/src/table.rs | 11 ++- crates/storage/db-api/src/utils.rs | 5 +- crates/storage/db/benches/criterion.rs | 4 +- crates/storage/db/benches/iai.rs | 4 +- crates/storage/db/benches/utils.rs | 5 +- .../db/src/implementation/mdbx/cursor.rs | 2 +- .../storage/db/src/tables/codecs/fuzz/mod.rs | 5 +- crates/storage/db/src/tables/mod.rs | 4 +- crates/storage/db/src/tables/raw.rs | 12 ++-- crates/storage/db/src/tables/utils.rs | 8 +-- 20 files changed, 88 insertions(+), 113 deletions(-) diff --git a/crates/stages/stages/src/stages/hashing_storage.rs b/crates/stages/stages/src/stages/hashing_storage.rs index ba1e03c1a296f..1862a3248ded6 100644 --- a/crates/stages/stages/src/stages/hashing_storage.rs +++ b/crates/stages/stages/src/stages/hashing_storage.rs @@ -134,7 +134,7 @@ where B256::from_slice(&addr_key[..32]), StorageEntry { key: B256::from_slice(&addr_key[32..]), - value: CompactU256::decompress(value)?.into(), + value: CompactU256::decompress_owned(value)?.into(), }, )?; } diff --git a/crates/stages/stages/src/stages/index_account_history.rs b/crates/stages/stages/src/stages/index_account_history.rs index 39d50242aff7a..1e96419807714 100644 --- a/crates/stages/stages/src/stages/index_account_history.rs +++ b/crates/stages/stages/src/stages/index_account_history.rs @@ -118,7 +118,7 @@ where collector, first_sync, ShardedKey::new, - ShardedKey::

::decode, + ShardedKey::
::decode_owned, |key| key.key, )?; diff --git a/crates/stages/stages/src/stages/index_storage_history.rs b/crates/stages/stages/src/stages/index_storage_history.rs index efeec4a3a217d..ee68e934f4ed0 100644 --- a/crates/stages/stages/src/stages/index_storage_history.rs +++ b/crates/stages/stages/src/stages/index_storage_history.rs @@ -124,7 +124,7 @@ where |AddressStorageKey((address, storage_key)), highest_block_number| { StorageShardedKey::new(address, storage_key, highest_block_number) }, - StorageShardedKey::decode, + StorageShardedKey::decode_owned, |key| AddressStorageKey((key.address, key.sharded_key.key)), )?; diff --git a/crates/storage/db-api/src/models/accounts.rs b/crates/storage/db-api/src/models/accounts.rs index 338a3a06f6004..94922632129b9 100644 --- a/crates/storage/db-api/src/models/accounts.rs +++ b/crates/storage/db-api/src/models/accounts.rs @@ -64,11 +64,9 @@ impl Encode for BlockNumberAddress { } impl Decode for BlockNumberAddress { - fn decode>(value: B) -> Result { - let value = value.as_ref(); + fn decode(value: &[u8]) -> Result { let num = u64::from_be_bytes(value[..8].try_into().map_err(|_| DatabaseError::Decode)?); let hash = Address::from_slice(&value[8..]); - Ok(Self((num, hash))) } } @@ -97,11 +95,9 @@ impl Encode for AddressStorageKey { } impl Decode for AddressStorageKey { - fn decode>(value: B) -> Result { - let value = value.as_ref(); + fn decode(value: &[u8]) -> Result { let address = Address::from_slice(&value[..20]); let storage_key = StorageKey::from_slice(&value[20..]); - Ok(Self((address, storage_key))) } } @@ -127,7 +123,7 @@ mod tests { let encoded = Encode::encode(key); assert_eq!(encoded, bytes); - let decoded: BlockNumberAddress = Decode::decode(encoded).unwrap(); + let decoded: BlockNumberAddress = Decode::decode(&encoded).unwrap(); assert_eq!(decoded, key); } @@ -152,7 +148,7 @@ mod tests { let encoded = Encode::encode(key); assert_eq!(encoded, bytes); - let decoded: AddressStorageKey = Decode::decode(encoded).unwrap(); + let decoded: AddressStorageKey = Decode::decode(&encoded).unwrap(); assert_eq!(decoded, key); } diff --git a/crates/storage/db-api/src/models/blocks.rs b/crates/storage/db-api/src/models/blocks.rs index b48baf6d6b26f..7268d82dd3cca 100644 --- a/crates/storage/db-api/src/models/blocks.rs +++ b/crates/storage/db-api/src/models/blocks.rs @@ -29,9 +29,6 @@ mod tests { let mut ommer = StoredBlockOmmers::default(); ommer.ommers.push(Header::default()); ommer.ommers.push(Header::default()); - assert_eq!( - ommer.clone(), - StoredBlockOmmers::decompress::>(ommer.compress()).unwrap() - ); + assert_eq!(ommer.clone(), StoredBlockOmmers::decompress(&ommer.compress()).unwrap()); } } diff --git a/crates/storage/db-api/src/models/integer_list.rs b/crates/storage/db-api/src/models/integer_list.rs index 6d28b5496a1bb..480b52a9e2c09 100644 --- a/crates/storage/db-api/src/models/integer_list.rs +++ b/crates/storage/db-api/src/models/integer_list.rs @@ -19,7 +19,7 @@ impl Compress for IntegerList { } impl Decompress for IntegerList { - fn decompress>(value: B) -> Result { - Self::from_bytes(value.as_ref()).map_err(|_| DatabaseError::Decode) + fn decompress(value: &[u8]) -> Result { + Self::from_bytes(value).map_err(|_| DatabaseError::Decode) } } diff --git a/crates/storage/db-api/src/models/mod.rs b/crates/storage/db-api/src/models/mod.rs index 6e832a0314f45..9e7e8957b5a9e 100644 --- a/crates/storage/db-api/src/models/mod.rs +++ b/crates/storage/db-api/src/models/mod.rs @@ -42,10 +42,10 @@ macro_rules! impl_uints { } impl Decode for $name { - fn decode>(value: B) -> Result { + fn decode(value: &[u8]) -> Result { Ok( $name::from_be_bytes( - value.as_ref().try_into().map_err(|_| $crate::DatabaseError::Decode)? + value.try_into().map_err(|_| $crate::DatabaseError::Decode)? ) ) } @@ -65,8 +65,12 @@ impl Encode for Vec { } impl Decode for Vec { - fn decode>(value: B) -> Result { - Ok(value.as_ref().to_vec()) + fn decode(value: &[u8]) -> Result { + Ok(value.to_vec()) + } + + fn decode_owned(value: Vec) -> Result { + Ok(value) } } @@ -79,8 +83,8 @@ impl Encode for Address { } impl Decode for Address { - fn decode>(value: B) -> Result { - Ok(Self::from_slice(value.as_ref())) + fn decode(value: &[u8]) -> Result { + Ok(Self::from_slice(value)) } } @@ -93,8 +97,8 @@ impl Encode for B256 { } impl Decode for B256 { - fn decode>(value: B) -> Result { - Ok(Self::new(value.as_ref().try_into().map_err(|_| DatabaseError::Decode)?)) + fn decode(value: &[u8]) -> Result { + Ok(Self::new(value.try_into().map_err(|_| DatabaseError::Decode)?)) } } @@ -107,8 +111,12 @@ impl Encode for String { } impl Decode for String { - fn decode>(value: B) -> Result { - Self::from_utf8(value.as_ref().to_vec()).map_err(|_| DatabaseError::Decode) + fn decode(value: &[u8]) -> Result { + Self::decode_owned(value.to_vec()) + } + + fn decode_owned(value: Vec) -> Result { + Self::from_utf8(value).map_err(|_| DatabaseError::Decode) } } @@ -124,9 +132,8 @@ impl Encode for StoredNibbles { } impl Decode for StoredNibbles { - fn decode>(value: B) -> Result { - let buf = value.as_ref(); - Ok(Self::from_compact(buf, buf.len()).0) + fn decode(value: &[u8]) -> Result { + Ok(Self::from_compact(value, value.len()).0) } } @@ -142,9 +149,8 @@ impl Encode for StoredNibblesSubKey { } impl Decode for StoredNibblesSubKey { - fn decode>(value: B) -> Result { - let buf = value.as_ref(); - Ok(Self::from_compact(buf, buf.len()).0) + fn decode(value: &[u8]) -> Result { + Ok(Self::from_compact(value, value.len()).0) } } @@ -159,9 +165,8 @@ impl Encode for PruneSegment { } impl Decode for PruneSegment { - fn decode>(value: B) -> Result { - let buf = value.as_ref(); - Ok(Self::from_compact(buf, buf.len()).0) + fn decode(value: &[u8]) -> Result { + Ok(Self::from_compact(value, value.len()).0) } } @@ -177,9 +182,8 @@ impl Encode for ClientVersion { } impl Decode for ClientVersion { - fn decode>(value: B) -> Result { - let buf = value.as_ref(); - Ok(Self::from_compact(buf, buf.len()).0) + fn decode(value: &[u8]) -> Result { + Ok(Self::from_compact(value, value.len()).0) } } @@ -196,9 +200,8 @@ macro_rules! impl_compression_for_compact { } impl Decompress for $name { - fn decompress>(value: B) -> Result<$name, $crate::DatabaseError> { - let value = value.as_ref(); - let (obj, _) = Compact::from_compact(&value, value.len()); + fn decompress(value: &[u8]) -> Result<$name, $crate::DatabaseError> { + let (obj, _) = Compact::from_compact(value, value.len()); Ok(obj) } } @@ -236,23 +239,20 @@ impl_compression_for_compact!( macro_rules! impl_compression_fixed_compact { ($($name:tt),+) => { $( - impl Compress for $name - { + impl Compress for $name { type Compressed = Vec; - fn compress_to_buf>(self, buf: &mut B) { - let _ = Compact::to_compact(&self, buf); - } - fn uncompressable_ref(&self) -> Option<&[u8]> { Some(self.as_ref()) } + + fn compress_to_buf>(self, buf: &mut B) { + let _ = Compact::to_compact(&self, buf); + } } - impl Decompress for $name - { - fn decompress>(value: B) -> Result<$name, $crate::DatabaseError> { - let value = value.as_ref(); + impl Decompress for $name { + fn decompress(value: &[u8]) -> Result<$name, $crate::DatabaseError> { let (obj, _) = Compact::from_compact(&value, value.len()); Ok(obj) } diff --git a/crates/storage/db-api/src/models/sharded_key.rs b/crates/storage/db-api/src/models/sharded_key.rs index dd8702a4812bf..d1de1bd400c4e 100644 --- a/crates/storage/db-api/src/models/sharded_key.rs +++ b/crates/storage/db-api/src/models/sharded_key.rs @@ -16,7 +16,7 @@ pub const NUM_OF_INDICES_IN_SHARD: usize = 2_000; /// `Address | 200` -> data is from block 0 to 200. /// /// `Address | 300` -> data is from block 201 to 300. -#[derive(Debug, Default, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] pub struct ShardedKey { /// The key for this type. pub key: T, @@ -43,11 +43,7 @@ impl ShardedKey { } } -impl Encode for ShardedKey -where - T: Encode, - Vec: From<::Encoded>, -{ +impl Encode for ShardedKey { type Encoded = Vec; fn encode(self) -> Self::Encoded { @@ -57,30 +53,11 @@ where } } -impl Decode for ShardedKey -where - T: Decode, -{ - fn decode>(value: B) -> Result { - let value = value.as_ref(); - - let tx_num_index = value.len() - 8; - - let highest_tx_number = u64::from_be_bytes( - value[tx_num_index..].try_into().map_err(|_| DatabaseError::Decode)?, - ); - let key = T::decode(&value[..tx_num_index])?; - +impl Decode for ShardedKey { + fn decode(value: &[u8]) -> Result { + let (key, highest_tx_number) = value.split_last_chunk().unwrap(); + let key = T::decode(key)?; + let highest_tx_number = u64::from_be_bytes(*highest_tx_number); Ok(Self::new(key, highest_tx_number)) } } - -impl Hash for ShardedKey -where - T: Hash, -{ - fn hash(&self, state: &mut H) { - self.key.hash(state); - self.highest_block_number.hash(state); - } -} diff --git a/crates/storage/db-api/src/models/storage_sharded_key.rs b/crates/storage/db-api/src/models/storage_sharded_key.rs index b6538256e6299..5fd79ba655c1a 100644 --- a/crates/storage/db-api/src/models/storage_sharded_key.rs +++ b/crates/storage/db-api/src/models/storage_sharded_key.rs @@ -61,8 +61,7 @@ impl Encode for StorageShardedKey { } impl Decode for StorageShardedKey { - fn decode>(value: B) -> Result { - let value = value.as_ref(); + fn decode(value: &[u8]) -> Result { let tx_num_index = value.len() - 8; let highest_tx_number = u64::from_be_bytes( diff --git a/crates/storage/db-api/src/scale.rs b/crates/storage/db-api/src/scale.rs index 075eb74f8d8ef..591635be054e6 100644 --- a/crates/storage/db-api/src/scale.rs +++ b/crates/storage/db-api/src/scale.rs @@ -30,8 +30,8 @@ impl Decompress for T where T: ScaleValue + parity_scale_codec::Decode + Sync + Send + std::fmt::Debug, { - fn decompress>(value: B) -> Result { - parity_scale_codec::Decode::decode(&mut value.as_ref()).map_err(|_| DatabaseError::Decode) + fn decompress(mut value: &[u8]) -> Result { + parity_scale_codec::Decode::decode(&mut value).map_err(|_| DatabaseError::Decode) } } diff --git a/crates/storage/db-api/src/table.rs b/crates/storage/db-api/src/table.rs index 6d3f52198d28a..963457af05c36 100644 --- a/crates/storage/db-api/src/table.rs +++ b/crates/storage/db-api/src/table.rs @@ -38,11 +38,11 @@ pub trait Compress: Send + Sync + Sized + Debug { /// Trait that will transform the data to be read from the DB. pub trait Decompress: Send + Sync + Sized + Debug { /// Decompresses data coming from the database. - fn decompress>(value: B) -> Result; + fn decompress(value: &[u8]) -> Result; /// Decompresses owned data coming from the database. fn decompress_owned(value: Vec) -> Result { - Self::decompress(value) + Self::decompress(&value) } } @@ -58,7 +58,12 @@ pub trait Encode: Send + Sync + Sized + Debug { /// Trait that will transform the data to be read from the DB. pub trait Decode: Send + Sync + Sized + Debug { /// Decodes data coming from the database. - fn decode>(value: B) -> Result; + fn decode(value: &[u8]) -> Result; + + /// Decodes owned data coming from the database. + fn decode_owned(value: Vec) -> Result { + Self::decode(&value) + } } /// Generic trait that enforces the database key to implement [`Encode`] and [`Decode`]. diff --git a/crates/storage/db-api/src/utils.rs b/crates/storage/db-api/src/utils.rs index b9ee6277e9595..65ed5b6c01d4d 100644 --- a/crates/storage/db-api/src/utils.rs +++ b/crates/storage/db-api/src/utils.rs @@ -10,8 +10,7 @@ macro_rules! impl_fixed_arbitrary { fn arbitrary(u: &mut Unstructured<'a>) -> Result { let mut buffer = vec![0; $size]; u.fill_buffer(buffer.as_mut_slice())?; - - Decode::decode(buffer).map_err(|_| arbitrary::Error::IncorrectFormat) + Decode::decode_owned(buffer).map_err(|_| arbitrary::Error::IncorrectFormat) } } @@ -26,7 +25,7 @@ macro_rules! impl_fixed_arbitrary { fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { use proptest::strategy::Strategy; proptest::collection::vec(proptest::arbitrary::any_with::(args), $size) - .prop_map(move |vec| Decode::decode(vec).unwrap()) + .prop_map(move |vec| Decode::decode_owned(vec).unwrap()) } } )+ diff --git a/crates/storage/db/benches/criterion.rs b/crates/storage/db/benches/criterion.rs index 6d273a8ce93ca..7ac9566d80c54 100644 --- a/crates/storage/db/benches/criterion.rs +++ b/crates/storage/db/benches/criterion.rs @@ -87,7 +87,7 @@ where |input| { { for (_, k, _, _) in input { - let _ = ::Key::decode(k); + let _ = ::Key::decode(&k); } }; black_box(()); @@ -115,7 +115,7 @@ where |input| { { for (_, _, _, v) in input { - let _ = ::Value::decompress(v); + let _ = ::Value::decompress(&v); } }; black_box(()); diff --git a/crates/storage/db/benches/iai.rs b/crates/storage/db/benches/iai.rs index ebcf6c8a42c0b..167cd0860e260 100644 --- a/crates/storage/db/benches/iai.rs +++ b/crates/storage/db/benches/iai.rs @@ -25,7 +25,7 @@ macro_rules! impl_iai_callgrind_inner { #[library_benchmark] pub fn $decompress() { for (_, _, _, comp) in black_box(load_vectors::()) { - let _ = black_box(::Value::decompress(comp)); + let _ = black_box(::Value::decompress(&comp)); } } @@ -39,7 +39,7 @@ macro_rules! impl_iai_callgrind_inner { #[library_benchmark] pub fn $decode() { for (_, enc, _, _) in black_box(load_vectors::()) { - let _ = black_box(::Key::decode(enc)); + let _ = black_box(::Key::decode(&enc)); } } diff --git a/crates/storage/db/benches/utils.rs b/crates/storage/db/benches/utils.rs index 72d121aa75ccb..9700ef94b241c 100644 --- a/crates/storage/db/benches/utils.rs +++ b/crates/storage/db/benches/utils.rs @@ -1,7 +1,5 @@ -#![cfg(feature = "test-utils")] #![allow(missing_docs)] - -use std::{path::Path, sync::Arc}; +#![cfg(feature = "test-utils")] use alloy_primitives::Bytes; use reth_db::{test_utils::create_test_rw_db_with_path, DatabaseEnv}; @@ -11,6 +9,7 @@ use reth_db_api::{ Database, }; use reth_fs_util as fs; +use std::{path::Path, sync::Arc}; /// Path where the DB is initialized for benchmarks. #[allow(dead_code)] diff --git a/crates/storage/db/src/implementation/mdbx/cursor.rs b/crates/storage/db/src/implementation/mdbx/cursor.rs index c908bad459424..756a622bcb035 100644 --- a/crates/storage/db/src/implementation/mdbx/cursor.rs +++ b/crates/storage/db/src/implementation/mdbx/cursor.rs @@ -81,7 +81,7 @@ macro_rules! compress_to_buf_or_ref { if let Some(value) = $value.uncompressable_ref() { Some(value) } else { - $self.buf.truncate(0); + $self.buf.clear(); $value.compress_to_buf(&mut $self.buf); None } diff --git a/crates/storage/db/src/tables/codecs/fuzz/mod.rs b/crates/storage/db/src/tables/codecs/fuzz/mod.rs index 846ed17e1f1a1..e64a3841df49e 100644 --- a/crates/storage/db/src/tables/codecs/fuzz/mod.rs +++ b/crates/storage/db/src/tables/codecs/fuzz/mod.rs @@ -30,13 +30,12 @@ macro_rules! impl_fuzzer_with_input { /// Encodes and decodes table types returning its encoded size and the decoded object. /// This method is used for benchmarking, so its parameter should be the actual type that is being tested. - pub fn encode_and_decode(obj: $name) -> (usize, $name) - { + pub fn encode_and_decode(obj: $name) -> (usize, $name) { let data = table::$encode::$encode_method(obj); let size = data.len(); // Some `data` might be a fixed array. - (size, table::$decode::$decode_method(data.to_vec()).expect("failed to decode")) + (size, table::$decode::$decode_method(&data).expect("failed to decode")) } #[cfg(test)] diff --git a/crates/storage/db/src/tables/mod.rs b/crates/storage/db/src/tables/mod.rs index 835d1486dafe4..384139618163f 100644 --- a/crates/storage/db/src/tables/mod.rs +++ b/crates/storage/db/src/tables/mod.rs @@ -429,8 +429,8 @@ impl Encode for ChainStateKey { } impl Decode for ChainStateKey { - fn decode>(value: B) -> Result { - if value.as_ref() == [0] { + fn decode(value: &[u8]) -> Result { + if value == [0] { Ok(Self::LastFinalizedBlock) } else { Err(reth_db_api::DatabaseError::Decode) diff --git a/crates/storage/db/src/tables/raw.rs b/crates/storage/db/src/tables/raw.rs index 1e8fa56b3603c..6b6de41613eb4 100644 --- a/crates/storage/db/src/tables/raw.rs +++ b/crates/storage/db/src/tables/raw.rs @@ -96,8 +96,12 @@ impl Encode for RawKey { // Decode impl Decode for RawKey { - fn decode>(key: B) -> Result { - Ok(Self { key: key.as_ref().to_vec(), _phantom: std::marker::PhantomData }) + fn decode(value: &[u8]) -> Result { + Ok(Self { key: value.to_vec(), _phantom: std::marker::PhantomData }) + } + + fn decode_owned(value: Vec) -> Result { + Ok(Self { key: value, _phantom: std::marker::PhantomData }) } } @@ -168,8 +172,8 @@ impl Compress for RawValue { } impl Decompress for RawValue { - fn decompress>(value: B) -> Result { - Ok(Self { value: value.as_ref().to_vec(), _phantom: std::marker::PhantomData }) + fn decompress(value: &[u8]) -> Result { + Ok(Self { value: value.to_vec(), _phantom: std::marker::PhantomData }) } fn decompress_owned(value: Vec) -> Result { diff --git a/crates/storage/db/src/tables/utils.rs b/crates/storage/db/src/tables/utils.rs index 616d1038264e7..0948ee108f687 100644 --- a/crates/storage/db/src/tables/utils.rs +++ b/crates/storage/db/src/tables/utils.rs @@ -6,7 +6,7 @@ use std::borrow::Cow; /// Helper function to decode a `(key, value)` pair. pub(crate) fn decoder<'a, T>( - kv: (Cow<'a, [u8]>, Cow<'a, [u8]>), + (k, v): (Cow<'a, [u8]>, Cow<'a, [u8]>), ) -> Result, DatabaseError> where T: Table, @@ -14,11 +14,11 @@ where T::Value: Decompress, { Ok(( - match kv.0 { + match k { Cow::Borrowed(k) => Decode::decode(k)?, - Cow::Owned(k) => Decode::decode(k)?, + Cow::Owned(k) => Decode::decode_owned(k)?, }, - match kv.1 { + match v { Cow::Borrowed(v) => Decompress::decompress(v)?, Cow::Owned(v) => Decompress::decompress_owned(v)?, }, From 50461281e765efa808e6ce26a27fca33a16c00c5 Mon Sep 17 00:00:00 2001 From: Abhishek kochar Date: Sat, 28 Sep 2024 18:48:17 +0800 Subject: [PATCH 58/84] chore(blockchain-tree): replace reth-primitives with alloy-eips (#11276) Signed-off-by: Abhishekkochar --- Cargo.lock | 1 + crates/blockchain-tree/Cargo.toml | 1 + crates/blockchain-tree/src/block_buffer.rs | 3 ++- crates/blockchain-tree/src/block_indices.rs | 3 ++- crates/blockchain-tree/src/blockchain_tree.rs | 5 +++-- crates/blockchain-tree/src/bundle.rs | 2 +- crates/blockchain-tree/src/canonical_chain.rs | 2 +- crates/blockchain-tree/src/chain.rs | 3 ++- crates/blockchain-tree/src/noop.rs | 3 ++- crates/blockchain-tree/src/shareable.rs | 3 ++- 10 files changed, 17 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9eebe512599ba..c6ab33da1f554 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6394,6 +6394,7 @@ name = "reth-blockchain-tree" version = "1.0.7" dependencies = [ "alloy-consensus", + "alloy-eips", "alloy-genesis", "alloy-primitives", "aquamarine", diff --git a/crates/blockchain-tree/Cargo.toml b/crates/blockchain-tree/Cargo.toml index d0718c97b8ccd..cff117c92b05b 100644 --- a/crates/blockchain-tree/Cargo.toml +++ b/crates/blockchain-tree/Cargo.toml @@ -32,6 +32,7 @@ reth-node-types.workspace = true # ethereum alloy-primitives.workspace = true +alloy-eips.workspace = true # common parking_lot.workspace = true diff --git a/crates/blockchain-tree/src/block_buffer.rs b/crates/blockchain-tree/src/block_buffer.rs index 99729af0fae5b..e116463e4af67 100644 --- a/crates/blockchain-tree/src/block_buffer.rs +++ b/crates/blockchain-tree/src/block_buffer.rs @@ -183,8 +183,9 @@ impl BlockBuffer { #[cfg(test)] mod tests { use crate::BlockBuffer; + use alloy_eips::BlockNumHash; use alloy_primitives::BlockHash; - use reth_primitives::{BlockNumHash, SealedBlockWithSenders}; + use reth_primitives::SealedBlockWithSenders; use reth_testing_utils::generators::{self, random_block, BlockParams, Rng}; use std::collections::HashMap; diff --git a/crates/blockchain-tree/src/block_indices.rs b/crates/blockchain-tree/src/block_indices.rs index 23c63bf6d243a..0c48b3b9ce853 100644 --- a/crates/blockchain-tree/src/block_indices.rs +++ b/crates/blockchain-tree/src/block_indices.rs @@ -2,10 +2,11 @@ use super::state::SidechainId; use crate::canonical_chain::CanonicalChain; +use alloy_eips::BlockNumHash; use alloy_primitives::{BlockHash, BlockNumber}; use linked_hash_set::LinkedHashSet; use reth_execution_types::Chain; -use reth_primitives::{BlockNumHash, SealedBlockWithSenders}; +use reth_primitives::SealedBlockWithSenders; use std::collections::{btree_map, hash_map, BTreeMap, BTreeSet, HashMap, HashSet}; /// Internal indices of the blocks and chains. diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 5d3bffe8968d4..e0feee2cdc2c5 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -5,6 +5,7 @@ use crate::{ state::{SidechainId, TreeState}, AppendableChain, BlockIndices, BlockchainTreeConfig, ExecutionData, TreeExternals, }; +use alloy_eips::{BlockNumHash, ForkBlock}; use alloy_primitives::{BlockHash, BlockNumber, B256, U256}; use reth_blockchain_tree_api::{ error::{BlockchainTreeError, CanonicalError, InsertBlockError, InsertBlockErrorKind}, @@ -16,8 +17,8 @@ use reth_execution_errors::{BlockExecutionError, BlockValidationError}; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ - BlockNumHash, EthereumHardfork, ForkBlock, GotExpected, Hardforks, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, StaticFileSegment, + EthereumHardfork, GotExpected, Hardforks, Receipt, SealedBlock, SealedBlockWithSenders, + SealedHeader, StaticFileSegment, }; use reth_provider::{ providers::ProviderNodeTypes, BlockExecutionWriter, BlockNumReader, BlockWriter, diff --git a/crates/blockchain-tree/src/bundle.rs b/crates/blockchain-tree/src/bundle.rs index 226afd8fab59e..6f62d4136bb76 100644 --- a/crates/blockchain-tree/src/bundle.rs +++ b/crates/blockchain-tree/src/bundle.rs @@ -1,7 +1,7 @@ //! [`ExecutionDataProvider`] implementations used by the tree. +use alloy_eips::ForkBlock; use alloy_primitives::{BlockHash, BlockNumber}; -use reth_primitives::ForkBlock; use reth_provider::{BlockExecutionForkProvider, ExecutionDataProvider, ExecutionOutcome}; use std::collections::BTreeMap; diff --git a/crates/blockchain-tree/src/canonical_chain.rs b/crates/blockchain-tree/src/canonical_chain.rs index e3dc596ba0e72..7dcd466f7d64a 100644 --- a/crates/blockchain-tree/src/canonical_chain.rs +++ b/crates/blockchain-tree/src/canonical_chain.rs @@ -1,5 +1,5 @@ +use alloy_eips::BlockNumHash; use alloy_primitives::{BlockHash, BlockNumber}; -use reth_primitives::BlockNumHash; use std::collections::BTreeMap; /// This keeps track of (non-finalized) blocks of the canonical chain. diff --git a/crates/blockchain-tree/src/chain.rs b/crates/blockchain-tree/src/chain.rs index 596458e20390c..393e525d5ae20 100644 --- a/crates/blockchain-tree/src/chain.rs +++ b/crates/blockchain-tree/src/chain.rs @@ -5,6 +5,7 @@ use super::externals::TreeExternals; use crate::BundleStateDataRef; +use alloy_eips::ForkBlock; use alloy_primitives::{BlockHash, BlockNumber, U256}; use reth_blockchain_tree_api::{ error::{BlockchainTreeError, InsertBlockErrorKind}, @@ -14,7 +15,7 @@ use reth_consensus::{Consensus, ConsensusError, PostExecutionInput}; use reth_evm::execute::{BlockExecutorProvider, Executor}; use reth_execution_errors::BlockExecutionError; use reth_execution_types::{Chain, ExecutionOutcome}; -use reth_primitives::{ForkBlock, GotExpected, SealedBlockWithSenders, SealedHeader}; +use reth_primitives::{GotExpected, SealedBlockWithSenders, SealedHeader}; use reth_provider::{ providers::{BundleStateProvider, ConsistentDbView, ProviderNodeTypes}, FullExecutionDataProvider, ProviderError, StateRootProvider, TryIntoHistoricalStateProvider, diff --git a/crates/blockchain-tree/src/noop.rs b/crates/blockchain-tree/src/noop.rs index 76e59a47792f0..925b8f03add7b 100644 --- a/crates/blockchain-tree/src/noop.rs +++ b/crates/blockchain-tree/src/noop.rs @@ -1,3 +1,4 @@ +use alloy_eips::BlockNumHash; use alloy_primitives::{BlockHash, BlockNumber}; use reth_blockchain_tree_api::{ self, @@ -5,7 +6,7 @@ use reth_blockchain_tree_api::{ BlockValidationKind, BlockchainTreeEngine, BlockchainTreeViewer, CanonicalOutcome, InsertPayloadOk, }; -use reth_primitives::{BlockNumHash, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader}; +use reth_primitives::{Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader}; use reth_provider::{ BlockchainTreePendingStateProvider, CanonStateNotificationSender, CanonStateNotifications, CanonStateSubscriptions, FullExecutionDataProvider, diff --git a/crates/blockchain-tree/src/shareable.rs b/crates/blockchain-tree/src/shareable.rs index 333527b83ef79..8e6cceccdd19c 100644 --- a/crates/blockchain-tree/src/shareable.rs +++ b/crates/blockchain-tree/src/shareable.rs @@ -1,6 +1,7 @@ //! Wrapper around `BlockchainTree` that allows for it to be shared. use super::BlockchainTree; +use alloy_eips::BlockNumHash; use alloy_primitives::{BlockHash, BlockNumber}; use parking_lot::RwLock; use reth_blockchain_tree_api::{ @@ -10,7 +11,7 @@ use reth_blockchain_tree_api::{ }; use reth_evm::execute::BlockExecutorProvider; use reth_node_types::NodeTypesWithDB; -use reth_primitives::{BlockNumHash, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader}; +use reth_primitives::{Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader}; use reth_provider::{ providers::ProviderNodeTypes, BlockchainTreePendingStateProvider, CanonStateSubscriptions, FullExecutionDataProvider, ProviderError, From 375acdfedc17d007b08f4c5e08ad1b6ab81531a3 Mon Sep 17 00:00:00 2001 From: nk_ysg Date: Sat, 28 Sep 2024 18:49:17 +0800 Subject: [PATCH 59/84] chore(op): unify crate name reth-optimism-* (#11223) --- .github/assets/check_wasm.sh | 4 +- .github/workflows/integration.yml | 2 +- Cargo.lock | 162 +++++++++--------- Cargo.toml | 4 +- crates/node/builder/src/builder/mod.rs | 2 +- crates/optimism/bin/Cargo.toml | 6 +- crates/optimism/bin/src/main.rs | 2 +- crates/optimism/cli/Cargo.toml | 10 +- .../cli/src/commands/build_pipeline.rs | 2 +- crates/optimism/cli/src/lib.rs | 4 +- crates/optimism/evm/Cargo.toml | 2 +- crates/optimism/node/Cargo.toml | 6 +- crates/optimism/node/src/lib.rs | 2 +- crates/optimism/node/src/node.rs | 2 +- crates/optimism/node/src/txpool.rs | 4 +- crates/optimism/node/tests/e2e/utils.rs | 4 +- crates/optimism/node/tests/it/builder.rs | 2 +- crates/optimism/payload/Cargo.toml | 4 +- crates/optimism/payload/src/builder.rs | 2 +- crates/optimism/rpc/Cargo.toml | 4 +- crates/optimism/rpc/src/error.rs | 2 +- crates/optimism/rpc/src/eth/block.rs | 2 +- crates/optimism/rpc/src/eth/receipt.rs | 6 +- 23 files changed, 120 insertions(+), 120 deletions(-) diff --git a/.github/assets/check_wasm.sh b/.github/assets/check_wasm.sh index b313c32ce33d4..8d53f457af915 100755 --- a/.github/assets/check_wasm.sh +++ b/.github/assets/check_wasm.sh @@ -35,7 +35,6 @@ exclude_crates=( reth-ethereum-payload-builder reth-etl reth-evm-ethereum - reth-evm-optimism reth-execution-errors reth-exex reth-exex-test-utils @@ -49,8 +48,9 @@ exclude_crates=( reth-node-ethereum reth-node-events reth-node-metrics - reth-node-optimism reth-optimism-cli + reth-optimism-evm + reth-optimism-node reth-optimism-payload-builder reth-optimism-rpc reth-payload-builder diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 103a87706bcae..82bd5705a3200 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -47,7 +47,7 @@ jobs: name: Run tests run: | cargo nextest run \ - --locked -p reth-node-optimism --features "optimism" + --locked -p reth-optimism-node --features "optimism" integration-success: name: integration success diff --git a/Cargo.lock b/Cargo.lock index c6ab33da1f554..81d8e8f9d4c5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5213,8 +5213,8 @@ dependencies = [ "clap", "reth-cli-util", "reth-node-builder", - "reth-node-optimism", "reth-optimism-cli", + "reth-optimism-node", "reth-optimism-rpc", "reth-provider", ] @@ -7337,30 +7337,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "reth-evm-optimism" -version = "1.0.7" -dependencies = [ - "alloy-consensus", - "alloy-genesis", - "alloy-primitives", - "reth-chainspec", - "reth-ethereum-forks", - "reth-evm", - "reth-execution-errors", - "reth-execution-types", - "reth-optimism-chainspec", - "reth-optimism-consensus", - "reth-optimism-forks", - "reth-primitives", - "reth-prune-types", - "reth-revm", - "revm", - "revm-primitives", - "thiserror", - "tracing", -] - [[package]] name = "reth-execution-errors" version = "1.0.7" @@ -7973,58 +7949,6 @@ dependencies = [ "vergen", ] -[[package]] -name = "reth-node-optimism" -version = "1.0.7" -dependencies = [ - "alloy-genesis", - "alloy-primitives", - "alloy-rpc-types-engine", - "async-trait", - "clap", - "eyre", - "jsonrpsee", - "jsonrpsee-types", - "op-alloy-rpc-types-engine", - "parking_lot 0.12.3", - "reqwest", - "reth", - "reth-auto-seal-consensus", - "reth-basic-payload-builder", - "reth-beacon-consensus", - "reth-chainspec", - "reth-consensus", - "reth-db", - "reth-discv5", - "reth-e2e-test-utils", - "reth-evm", - "reth-evm-optimism", - "reth-network", - "reth-node-api", - "reth-node-builder", - "reth-optimism-chainspec", - "reth-optimism-consensus", - "reth-optimism-forks", - "reth-optimism-payload-builder", - "reth-optimism-rpc", - "reth-payload-builder", - "reth-primitives", - "reth-provider", - "reth-revm", - "reth-rpc", - "reth-rpc-eth-api", - "reth-rpc-eth-types", - "reth-rpc-types", - "reth-rpc-types-compat", - "reth-tracing", - "reth-transaction-pool", - "serde", - "serde_json", - "thiserror", - "tokio", - "tracing", -] - [[package]] name = "reth-node-types" version = "1.0.7" @@ -8071,14 +7995,14 @@ dependencies = [ "reth-db-common", "reth-downloaders", "reth-errors", - "reth-evm-optimism", "reth-execution-types", "reth-network-p2p", "reth-node-builder", "reth-node-core", "reth-node-events", - "reth-node-optimism", "reth-optimism-chainspec", + "reth-optimism-evm", + "reth-optimism-node", "reth-optimism-primitives", "reth-primitives", "reth-provider", @@ -8109,6 +8033,30 @@ dependencies = [ "tracing", ] +[[package]] +name = "reth-optimism-evm" +version = "1.0.7" +dependencies = [ + "alloy-consensus", + "alloy-genesis", + "alloy-primitives", + "reth-chainspec", + "reth-ethereum-forks", + "reth-evm", + "reth-execution-errors", + "reth-execution-types", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-forks", + "reth-primitives", + "reth-prune-types", + "reth-revm", + "revm", + "revm-primitives", + "thiserror", + "tracing", +] + [[package]] name = "reth-optimism-forks" version = "1.0.7" @@ -8120,6 +8068,58 @@ dependencies = [ "serde", ] +[[package]] +name = "reth-optimism-node" +version = "1.0.7" +dependencies = [ + "alloy-genesis", + "alloy-primitives", + "alloy-rpc-types-engine", + "async-trait", + "clap", + "eyre", + "jsonrpsee", + "jsonrpsee-types", + "op-alloy-rpc-types-engine", + "parking_lot 0.12.3", + "reqwest", + "reth", + "reth-auto-seal-consensus", + "reth-basic-payload-builder", + "reth-beacon-consensus", + "reth-chainspec", + "reth-consensus", + "reth-db", + "reth-discv5", + "reth-e2e-test-utils", + "reth-evm", + "reth-network", + "reth-node-api", + "reth-node-builder", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-payload-builder", + "reth-optimism-rpc", + "reth-payload-builder", + "reth-primitives", + "reth-provider", + "reth-revm", + "reth-rpc", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-types", + "reth-rpc-types-compat", + "reth-tracing", + "reth-transaction-pool", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", +] + [[package]] name = "reth-optimism-payload-builder" version = "1.0.7" @@ -8132,9 +8132,9 @@ dependencies = [ "reth-chain-state", "reth-chainspec", "reth-evm", - "reth-evm-optimism", "reth-execution-types", "reth-optimism-consensus", + "reth-optimism-evm", "reth-optimism-forks", "reth-payload-builder", "reth-payload-primitives", @@ -8176,12 +8176,12 @@ dependencies = [ "reqwest", "reth-chainspec", "reth-evm", - "reth-evm-optimism", "reth-network-api", "reth-node-api", "reth-node-builder", "reth-optimism-chainspec", "reth-optimism-consensus", + "reth-optimism-evm", "reth-optimism-forks", "reth-primitives", "reth-provider", diff --git a/Cargo.toml b/Cargo.toml index 0aae5f87355e0..7387029568d7e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -333,7 +333,7 @@ reth-ethereum-payload-builder = { path = "crates/ethereum/payload" } reth-etl = { path = "crates/etl" } reth-evm = { path = "crates/evm" } reth-evm-ethereum = { path = "crates/ethereum/evm" } -reth-evm-optimism = { path = "crates/optimism/evm" } +reth-optimism-evm = { path = "crates/optimism/evm" } reth-execution-errors = { path = "crates/evm/execution-errors" } reth-execution-types = { path = "crates/evm/execution-types" } reth-exex = { path = "crates/exex/exex" } @@ -360,7 +360,7 @@ reth-node-core = { path = "crates/node/core" } reth-node-ethereum = { path = "crates/ethereum/node" } reth-node-events = { path = "crates/node/events" } reth-node-metrics = { path = "crates/node/metrics" } -reth-node-optimism = { path = "crates/optimism/node" } +reth-optimism-node = { path = "crates/optimism/node" } reth-node-types = { path = "crates/node/types" } reth-optimism-chainspec = { path = "crates/optimism/chainspec" } reth-optimism-cli = { path = "crates/optimism/cli" } diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 2bd43d3c6ac79..61141f0677cce 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -80,7 +80,7 @@ pub type RethFullAdapter = FullNodeTypesAdapter< /// configured components and can interact with the node. /// /// There are convenience functions for networks that come with a preset of types and components via -/// the [Node] trait, see `reth_node_ethereum::EthereumNode` or `reth_node_optimism::OptimismNode`. +/// the [Node] trait, see `reth_node_ethereum::EthereumNode` or `reth_optimism_node::OptimismNode`. /// /// The [`NodeBuilder::node`] function configures the node's types and components in one step. /// diff --git a/crates/optimism/bin/Cargo.toml b/crates/optimism/bin/Cargo.toml index eee07c0be876b..08e13ae3800ed 100644 --- a/crates/optimism/bin/Cargo.toml +++ b/crates/optimism/bin/Cargo.toml @@ -14,7 +14,7 @@ reth-cli-util.workspace = true reth-optimism-cli.workspace = true reth-provider.workspace = true reth-optimism-rpc.workspace = true -reth-node-optimism.workspace = true +reth-optimism-node.workspace = true clap = { workspace = true, features = ["derive", "env"] } @@ -28,9 +28,9 @@ jemalloc = ["reth-cli-util/jemalloc"] jemalloc-prof = ["reth-cli-util/jemalloc-prof"] tracy-allocator = ["reth-cli-util/tracy-allocator"] -asm-keccak = ["reth-optimism-cli/asm-keccak", "reth-node-optimism/asm-keccak"] +asm-keccak = ["reth-optimism-cli/asm-keccak", "reth-optimism-node/asm-keccak"] -optimism = ["reth-optimism-cli/optimism", "reth-node-optimism/optimism"] +optimism = ["reth-optimism-cli/optimism", "reth-optimism-node/optimism"] [[bin]] name = "op-reth" diff --git a/crates/optimism/bin/src/main.rs b/crates/optimism/bin/src/main.rs index f5a88798a06b4..58d5ba7a438d4 100644 --- a/crates/optimism/bin/src/main.rs +++ b/crates/optimism/bin/src/main.rs @@ -5,8 +5,8 @@ use clap::Parser; use reth_node_builder::{engine_tree_config::TreeConfig, EngineNodeLauncher}; -use reth_node_optimism::{args::RollupArgs, node::OptimismAddOns, OptimismNode}; use reth_optimism_cli::{chainspec::OpChainSpecParser, Cli}; +use reth_optimism_node::{args::RollupArgs, node::OptimismAddOns, OptimismNode}; use reth_optimism_rpc::SequencerClient; use reth_provider::providers::BlockchainProvider2; diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index e54ab9be7c40a..99d1641e36438 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -24,7 +24,7 @@ reth-stages.workspace = true reth-static-file.workspace = true reth-execution-types.workspace = true reth-node-core.workspace = true -reth-node-optimism.workspace = true +reth-optimism-node.workspace = true reth-primitives.workspace = true ## optimism @@ -37,7 +37,7 @@ reth-node-events.workspace = true reth-network-p2p.workspace = true reth-errors.workspace = true reth-config.workspace = true -reth-evm-optimism.workspace = true +reth-optimism-evm.workspace = true reth-cli.workspace = true reth-cli-runner.workspace = true reth-node-builder.workspace = true @@ -71,14 +71,14 @@ reth-cli-commands.workspace = true [features] optimism = [ "reth-primitives/optimism", - "reth-evm-optimism/optimism", + "reth-optimism-evm/optimism", "reth-provider/optimism", "reth-node-core/optimism", - "reth-node-optimism/optimism", + "reth-optimism-node/optimism", ] asm-keccak = [ "alloy-primitives/asm-keccak", "reth-node-core/asm-keccak", - "reth-node-optimism/asm-keccak", + "reth-optimism-node/asm-keccak", "reth-primitives/asm-keccak", ] diff --git a/crates/optimism/cli/src/commands/build_pipeline.rs b/crates/optimism/cli/src/commands/build_pipeline.rs index b546a1a515a26..b2ac97eef2d17 100644 --- a/crates/optimism/cli/src/commands/build_pipeline.rs +++ b/crates/optimism/cli/src/commands/build_pipeline.rs @@ -8,13 +8,13 @@ use reth_downloaders::{ headers::reverse_headers::ReverseHeadersDownloaderBuilder, }; use reth_errors::ProviderError; -use reth_evm_optimism::OpExecutorProvider; use reth_network_p2p::{ bodies::downloader::BodyDownloader, headers::downloader::{HeaderDownloader, SyncTarget}, }; use reth_node_builder::NodeTypesWithDB; use reth_node_events::node::NodeEvent; +use reth_optimism_evm::OpExecutorProvider; use reth_provider::{BlockNumReader, ChainSpecProvider, HeaderProvider, ProviderFactory}; use reth_prune::PruneModes; use reth_stages::{sets::DefaultStages, Pipeline, StageSet}; diff --git a/crates/optimism/cli/src/lib.rs b/crates/optimism/cli/src/lib.rs index 137463ee2ffa3..ea8a77087fafa 100644 --- a/crates/optimism/cli/src/lib.rs +++ b/crates/optimism/cli/src/lib.rs @@ -40,13 +40,13 @@ use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::node::NoArgs; use reth_cli_runner::CliRunner; use reth_db::DatabaseEnv; -use reth_evm_optimism::OpExecutorProvider; use reth_node_builder::{NodeBuilder, WithLaunchContext}; use reth_node_core::{ args::LogArgs, version::{LONG_VERSION, SHORT_VERSION}, }; -use reth_node_optimism::OptimismNode; +use reth_optimism_evm::OpExecutorProvider; +use reth_optimism_node::OptimismNode; use reth_tracing::FileWorkerGuard; use tracing::info; diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index b0b66cd4235e7..ae02482623388 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "reth-evm-optimism" +name = "reth-optimism-evm" version.workspace = true edition.workspace = true rust-version.workspace = true diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 30e3fde890f2c..4239f4ad9ce70 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "reth-node-optimism" +name = "reth-optimism-node" version.workspace = true edition.workspace = true rust-version.workspace = true @@ -36,7 +36,7 @@ reth-rpc.workspace = true # op-reth reth-optimism-payload-builder.workspace = true -reth-evm-optimism.workspace = true +reth-optimism-evm.workspace = true reth-optimism-rpc.workspace = true reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true @@ -81,7 +81,7 @@ optimism = [ "reth-primitives/optimism", "reth-provider/optimism", "reth-rpc-types-compat/optimism", - "reth-evm-optimism/optimism", + "reth-optimism-evm/optimism", "reth-optimism-payload-builder/optimism", "reth-beacon-consensus/optimism", "reth-revm/optimism", diff --git a/crates/optimism/node/src/lib.rs b/crates/optimism/node/src/lib.rs index 9d7d6f3554c85..768f4d94efd5f 100644 --- a/crates/optimism/node/src/lib.rs +++ b/crates/optimism/node/src/lib.rs @@ -26,4 +26,4 @@ pub use reth_optimism_payload_builder::{ OptimismBuiltPayload, OptimismPayloadBuilder, OptimismPayloadBuilderAttributes, }; -pub use reth_evm_optimism::*; +pub use reth_optimism_evm::*; diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 8614e8d60db1a..2a28d44e2a0d6 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -5,7 +5,6 @@ use std::sync::Arc; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; use reth_chainspec::ChainSpec; use reth_evm::ConfigureEvm; -use reth_evm_optimism::{OpExecutorProvider, OptimismEvmConfig}; use reth_network::{NetworkHandle, NetworkManager}; use reth_node_api::{EngineValidator, FullNodeComponents, NodeAddOns}; use reth_node_builder::{ @@ -18,6 +17,7 @@ use reth_node_builder::{ }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::OptimismBeaconConsensus; +use reth_optimism_evm::{OpExecutorProvider, OptimismEvmConfig}; use reth_optimism_rpc::OpEthApi; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_primitives::Header; diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index 79d8c0314deb7..a27e0d4ecb75e 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -1,7 +1,7 @@ //! OP transaction pool types use parking_lot::RwLock; use reth_chainspec::ChainSpec; -use reth_evm_optimism::RethL1BlockInfo; +use reth_optimism_evm::RethL1BlockInfo; use reth_primitives::{Block, GotExpected, InvalidTransactionError, SealedBlock}; use reth_provider::{BlockReaderIdExt, StateProviderFactory}; use reth_revm::L1BlockInfo; @@ -98,7 +98,7 @@ where /// Update the L1 block info. fn update_l1_block_info(&self, block: &Block) { self.block_info.timestamp.store(block.timestamp, Ordering::Relaxed); - if let Ok(cost_addition) = reth_evm_optimism::extract_l1_info(block) { + if let Ok(cost_addition) = reth_optimism_evm::extract_l1_info(block) { *self.block_info.l1_block_info.write() = cost_addition; } } diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index 6b8e07a42e38b..a8dda7b9956be 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -5,10 +5,10 @@ use alloy_primitives::{Address, B256}; use reth::{rpc::types::engine::PayloadAttributes, tasks::TaskManager}; use reth_chainspec::ChainSpecBuilder; use reth_e2e_test_utils::{transaction::TransactionTestContext, wallet::Wallet, NodeHelperType}; -use reth_node_optimism::{ +use reth_optimism_chainspec::BASE_MAINNET; +use reth_optimism_node::{ node::OptimismAddOns, OptimismBuiltPayload, OptimismNode, OptimismPayloadBuilderAttributes, }; -use reth_optimism_chainspec::BASE_MAINNET; use reth_payload_builder::EthPayloadBuilderAttributes; use tokio::sync::Mutex; diff --git a/crates/optimism/node/tests/it/builder.rs b/crates/optimism/node/tests/it/builder.rs index cc9c772c027be..8d5cc1554e3db 100644 --- a/crates/optimism/node/tests/it/builder.rs +++ b/crates/optimism/node/tests/it/builder.rs @@ -3,7 +3,7 @@ use reth_db::test_utils::create_test_rw_db; use reth_node_api::FullNodeComponents; use reth_node_builder::{NodeBuilder, NodeConfig}; -use reth_node_optimism::node::{OptimismAddOns, OptimismNode}; +use reth_optimism_node::node::{OptimismAddOns, OptimismNode}; #[test] fn test_basic_setup() { diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index e58b26ee83dfd..047879929be6f 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -28,8 +28,8 @@ reth-trie.workspace = true reth-chain-state.workspace = true # op-reth -reth-evm-optimism.workspace = true reth-optimism-consensus.workspace = true +reth-optimism-evm.workspace = true reth-optimism-forks.workspace = true # ethereum @@ -51,6 +51,6 @@ optimism = [ "reth-primitives/optimism", "reth-provider/optimism", "reth-rpc-types-compat/optimism", - "reth-evm-optimism/optimism", + "reth-optimism-evm/optimism", "reth-revm/optimism", ] diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 84e69cb92e73e..878e9cf224de9 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -224,7 +224,7 @@ where // blocks will always have at least a single transaction in them (the L1 info transaction), // so we can safely assume that this will always be triggered upon the transition and that // the above check for empty blocks will never be hit on OP chains. - reth_evm_optimism::ensure_create2_deployer( + reth_optimism_evm::ensure_create2_deployer( chain_spec.clone(), attributes.payload_attributes.timestamp, &mut db, diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index e82ff18276524..97ac850c4f062 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -28,8 +28,8 @@ reth-node-builder.workspace = true reth-chainspec.workspace = true # op-reth -reth-evm-optimism.workspace = true reth-optimism-consensus.workspace = true +reth-optimism-evm.workspace = true reth-optimism-forks.workspace = true # ethereum @@ -60,7 +60,7 @@ reth-optimism-chainspec.workspace = true [features] optimism = [ - "reth-evm-optimism/optimism", + "reth-optimism-evm/optimism", "reth-primitives/optimism", "reth-provider/optimism", "reth-rpc-eth-api/optimism", diff --git a/crates/optimism/rpc/src/error.rs b/crates/optimism/rpc/src/error.rs index 35bc147986a97..b4d349e1cc455 100644 --- a/crates/optimism/rpc/src/error.rs +++ b/crates/optimism/rpc/src/error.rs @@ -2,7 +2,7 @@ use alloy_rpc_types::error::EthRpcErrorCode; use jsonrpsee_types::error::INTERNAL_ERROR_CODE; -use reth_evm_optimism::OptimismBlockExecutionError; +use reth_optimism_evm::OptimismBlockExecutionError; use reth_primitives::revm_primitives::{InvalidTransaction, OptimismInvalidTransaction}; use reth_rpc_eth_api::AsEthApiError; use reth_rpc_eth_types::EthApiError; diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index 41267a1e78231..da799e140d8e8 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -44,7 +44,7 @@ where let block = block.unseal(); let l1_block_info = - reth_evm_optimism::extract_l1_info(&block).map_err(OpEthApiError::from)?; + reth_optimism_evm::extract_l1_info(&block).map_err(OpEthApiError::from)?; return block .body diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index a98ee68d6f14e..bfd521635bcec 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -6,8 +6,8 @@ use op_alloy_rpc_types::{ receipt::L1BlockInfo, OpTransactionReceipt, OptimismTransactionReceiptFields, }; use reth_chainspec::ChainSpec; -use reth_evm_optimism::RethL1BlockInfo; use reth_node_api::{FullNodeComponents, NodeTypes}; +use reth_optimism_evm::RethL1BlockInfo; use reth_optimism_forks::OptimismHardforks; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; use reth_provider::ChainSpecProvider; @@ -42,7 +42,7 @@ where let block = block.unseal(); let l1_block_info = - reth_evm_optimism::extract_l1_info(&block).map_err(OpEthApiError::from)?; + reth_optimism_evm::extract_l1_info(&block).map_err(OpEthApiError::from)?; Ok(OpReceiptBuilder::new( &self.inner.provider().chain_spec(), @@ -355,7 +355,7 @@ mod test { }; let l1_block_info = - reth_evm_optimism::extract_l1_info(&block).expect("should extract l1 info"); + reth_optimism_evm::extract_l1_info(&block).expect("should extract l1 info"); // test assert!(OP_MAINNET.is_fjord_active_at_timestamp(BLOCK_124665056_TIMESTAMP)); From b090578b775bab34024abe815839cb308aeb372d Mon Sep 17 00:00:00 2001 From: caglarkaya Date: Sat, 28 Sep 2024 15:21:20 +0300 Subject: [PATCH 60/84] fix: improve ecies error fatal variants (#11303) --- crates/net/ecies/src/error.rs | 5 +++++ crates/net/network/src/error.rs | 25 +++++++++++++++++++++++-- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/crates/net/ecies/src/error.rs b/crates/net/ecies/src/error.rs index f476e00c4c1d6..79965f73303f3 100644 --- a/crates/net/ecies/src/error.rs +++ b/crates/net/ecies/src/error.rs @@ -13,6 +13,11 @@ impl ECIESError { pub fn into_inner(self) -> ECIESErrorImpl { *self.inner } + + /// Returns a reference to the inner error + pub const fn inner(&self) -> &ECIESErrorImpl { + &self.inner + } } impl fmt::Display for ECIESError { diff --git a/crates/net/network/src/error.rs b/crates/net/network/src/error.rs index 055bcddd647b7..2709c4a290758 100644 --- a/crates/net/network/src/error.rs +++ b/crates/net/network/src/error.rs @@ -3,6 +3,7 @@ use std::{fmt, io, io::ErrorKind, net::SocketAddr}; use reth_dns_discovery::resolver::ResolveError; +use reth_ecies::ECIESErrorImpl; use reth_eth_wire::{ errors::{EthHandshakeError, EthStreamError, P2PHandshakeError, P2PStreamError}, DisconnectReason, @@ -206,7 +207,17 @@ impl SessionError for PendingSessionHandshakeError { fn merits_discovery_ban(&self) -> bool { match self { Self::Eth(eth) => eth.merits_discovery_ban(), - Self::Ecies(_) => true, + Self::Ecies(err) => matches!( + err.inner(), + ECIESErrorImpl::TagCheckDecryptFailed | + ECIESErrorImpl::TagCheckHeaderFailed | + ECIESErrorImpl::TagCheckBodyFailed | + ECIESErrorImpl::InvalidAuthData | + ECIESErrorImpl::InvalidAckData | + ECIESErrorImpl::InvalidHeader | + ECIESErrorImpl::Secp256k1(_) | + ECIESErrorImpl::InvalidHandshake { .. } + ), Self::Timeout => false, } } @@ -214,7 +225,17 @@ impl SessionError for PendingSessionHandshakeError { fn is_fatal_protocol_error(&self) -> bool { match self { Self::Eth(eth) => eth.is_fatal_protocol_error(), - Self::Ecies(_) => true, + Self::Ecies(err) => matches!( + err.inner(), + ECIESErrorImpl::TagCheckDecryptFailed | + ECIESErrorImpl::TagCheckHeaderFailed | + ECIESErrorImpl::TagCheckBodyFailed | + ECIESErrorImpl::InvalidAuthData | + ECIESErrorImpl::InvalidAckData | + ECIESErrorImpl::InvalidHeader | + ECIESErrorImpl::Secp256k1(_) | + ECIESErrorImpl::InvalidHandshake { .. } + ), Self::Timeout => false, } } From 2aa3dd0fd8b02b067eb39631bc30de47c434f0f0 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sat, 28 Sep 2024 16:06:52 +0300 Subject: [PATCH 61/84] feat: use `OpChainSpec` in `OptimismNode` and its components (#11304) --- Cargo.lock | 2 + crates/chainspec/src/api.rs | 9 ++ crates/chainspec/src/spec.rs | 4 +- crates/cli/commands/src/common.rs | 8 +- crates/cli/commands/src/db/checksum.rs | 9 +- crates/cli/commands/src/db/get.rs | 14 +-- crates/cli/commands/src/db/list.rs | 4 +- crates/cli/commands/src/db/mod.rs | 6 +- crates/cli/commands/src/db/stats.rs | 11 +-- crates/cli/commands/src/dump_genesis.rs | 4 +- crates/cli/commands/src/import.rs | 12 +-- crates/cli/commands/src/init_cmd.rs | 4 +- crates/cli/commands/src/init_state.rs | 10 +- crates/cli/commands/src/node.rs | 10 +- crates/cli/commands/src/p2p/mod.rs | 8 +- crates/cli/commands/src/prune.rs | 4 +- crates/cli/commands/src/recover/mod.rs | 4 +- .../cli/commands/src/recover/storage_tries.rs | 4 +- crates/cli/commands/src/stage/drop.rs | 6 +- .../cli/commands/src/stage/dump/execution.rs | 12 ++- .../src/stage/dump/hashing_account.rs | 14 +-- .../src/stage/dump/hashing_storage.rs | 14 +-- crates/cli/commands/src/stage/dump/merkle.rs | 14 +-- crates/cli/commands/src/stage/dump/mod.rs | 12 ++- crates/cli/commands/src/stage/mod.rs | 6 +- crates/cli/commands/src/stage/run.rs | 8 +- crates/cli/commands/src/stage/unwind.rs | 10 +- crates/e2e-test-utils/src/lib.rs | 9 +- crates/e2e-test-utils/src/node.rs | 4 +- crates/e2e-test-utils/src/rpc.rs | 4 +- .../engine/invalid-block-hooks/src/witness.rs | 14 ++- crates/node/builder/src/builder/mod.rs | 4 +- crates/node/builder/src/launch/common.rs | 6 +- crates/node/builder/src/launch/engine.rs | 15 ++- crates/node/core/src/args/network.rs | 9 +- crates/node/core/src/node_config.rs | 35 ++++--- crates/optimism/chainspec/Cargo.toml | 2 + crates/optimism/chainspec/src/lib.rs | 94 ++++++++++++++++++- crates/optimism/cli/src/chainspec.rs | 5 +- .../cli/src/commands/build_pipeline.rs | 4 +- crates/optimism/cli/src/commands/import.rs | 4 +- .../cli/src/commands/import_receipts.rs | 14 +-- .../cli/src/commands/init_state/mod.rs | 4 +- crates/optimism/cli/src/commands/mod.rs | 7 +- crates/optimism/cli/src/lib.rs | 16 ++-- crates/optimism/consensus/Cargo.toml | 1 + crates/optimism/consensus/src/lib.rs | 12 +-- crates/optimism/evm/src/execute.rs | 27 +++--- crates/optimism/evm/src/l1.rs | 3 +- crates/optimism/node/src/engine.rs | 5 +- crates/optimism/node/src/node.rs | 61 +++++------- crates/optimism/node/tests/e2e/utils.rs | 6 +- crates/optimism/node/tests/it/builder.rs | 5 +- crates/optimism/payload/Cargo.toml | 1 + crates/optimism/payload/src/builder.rs | 7 +- crates/optimism/payload/src/payload.rs | 7 +- crates/optimism/rpc/Cargo.toml | 1 + crates/optimism/rpc/src/eth/block.rs | 5 +- crates/optimism/rpc/src/eth/call.rs | 4 +- crates/optimism/rpc/src/eth/mod.rs | 4 +- crates/optimism/rpc/src/eth/receipt.rs | 5 +- crates/revm/src/state_change.rs | 5 +- crates/storage/db-common/src/db_tool/mod.rs | 7 +- crates/storage/db-common/src/init.rs | 8 +- crates/transaction-pool/src/maintain.rs | 16 +--- 65 files changed, 379 insertions(+), 284 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 81d8e8f9d4c5e..d402a96b58c61 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7970,6 +7970,7 @@ dependencies = [ "op-alloy-rpc-types", "reth-chainspec", "reth-ethereum-forks", + "reth-network-peers", "reth-optimism-forks", "reth-primitives-traits", "serde_json", @@ -8133,6 +8134,7 @@ dependencies = [ "reth-chainspec", "reth-evm", "reth-execution-types", + "reth-optimism-chainspec", "reth-optimism-consensus", "reth-optimism-evm", "reth-optimism-forks", diff --git a/crates/chainspec/src/api.rs b/crates/chainspec/src/api.rs index fb9744a53164a..fb64e08ae1ef8 100644 --- a/crates/chainspec/src/api.rs +++ b/crates/chainspec/src/api.rs @@ -1,9 +1,11 @@ use crate::{ChainSpec, DepositContract}; +use alloc::vec::Vec; use alloy_chains::Chain; use alloy_eips::eip1559::BaseFeeParams; use alloy_genesis::Genesis; use alloy_primitives::B256; use core::fmt::{Debug, Display}; +use reth_network_peers::NodeRecord; use reth_primitives_traits::Header; /// Trait representing type configuring a chain spec. @@ -41,6 +43,9 @@ pub trait EthChainSpec: Send + Sync + Unpin + Debug { /// The block gas limit. fn max_gas_limit(&self) -> u64; + + /// The bootnodes for the chain, if any. + fn bootnodes(&self) -> Option>; } impl EthChainSpec for ChainSpec { @@ -83,4 +88,8 @@ impl EthChainSpec for ChainSpec { fn max_gas_limit(&self) -> u64 { self.max_gas_limit } + + fn bootnodes(&self) -> Option> { + self.bootnodes() + } } diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index c62ed4f672234..c830e5117196d 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -825,13 +825,13 @@ fn into_optimism_chain_spec(genesis: Genesis) -> ChainSpec { } } -/// A trait for reading the current [`ChainSpec`]. +/// A trait for reading the current chainspec. #[auto_impl::auto_impl(&, Arc)] pub trait ChainSpecProvider: Send + Sync { /// The chain spec type. type ChainSpec: EthChainSpec + 'static; - /// Get an [`Arc`] to the [`ChainSpec`]. + /// Get an [`Arc`] to the chainspec. fn chain_spec(&self) -> Arc; } diff --git a/crates/cli/commands/src/common.rs b/crates/cli/commands/src/common.rs index 3b4a00b2d2b20..956a63a5aa0e2 100644 --- a/crates/cli/commands/src/common.rs +++ b/crates/cli/commands/src/common.rs @@ -3,7 +3,7 @@ use alloy_primitives::B256; use clap::Parser; use reth_beacon_consensus::EthBeaconConsensus; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_config::{config::EtlConfig, Config}; use reth_db::{init_db, open_db_read_only, DatabaseEnv}; @@ -50,14 +50,14 @@ pub struct EnvironmentArgs { pub db: DatabaseArgs, } -impl> EnvironmentArgs { +impl> EnvironmentArgs { /// Initializes environment according to [`AccessRights`] and returns an instance of /// [`Environment`]. pub fn init>( &self, access: AccessRights, ) -> eyre::Result> { - let data_dir = self.datadir.clone().resolve_datadir(self.chain.chain); + let data_dir = self.datadir.clone().resolve_datadir(self.chain.chain()); let db_path = data_dir.db(); let sf_path = data_dir.static_files(); @@ -93,7 +93,7 @@ impl> EnvironmentArgs { let provider_factory = self.create_provider_factory(&config, db, sfp)?; if access.is_read_write() { - debug!(target: "reth::cli", chain=%self.chain.chain, genesis=?self.chain.genesis_hash(), "Initializing genesis"); + debug!(target: "reth::cli", chain=%self.chain.chain(), genesis=?self.chain.genesis_hash(), "Initializing genesis"); init_genesis(&provider_factory)?; } diff --git a/crates/cli/commands/src/db/checksum.rs b/crates/cli/commands/src/db/checksum.rs index 7aeed6dfe1416..60ec09c9606e8 100644 --- a/crates/cli/commands/src/db/checksum.rs +++ b/crates/cli/commands/src/db/checksum.rs @@ -1,11 +1,12 @@ use crate::db::get::{maybe_json_value_parser, table_key}; use ahash::RandomState; use clap::Parser; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthereumHardforks; use reth_db::{DatabaseEnv, RawKey, RawTable, RawValue, TableViewer, Tables}; use reth_db_api::{cursor::DbCursorRO, table::Table, transaction::DbTx}; use reth_db_common::DbTool; use reth_node_builder::{NodeTypesWithDB, NodeTypesWithDBAdapter, NodeTypesWithEngine}; +use reth_provider::providers::ProviderNodeTypes; use std::{ hash::{BuildHasher, Hasher}, sync::Arc, @@ -35,7 +36,7 @@ pub struct Command { impl Command { /// Execute `db checksum` command - pub fn execute>( + pub fn execute>( self, tool: &DbTool>>, ) -> eyre::Result<()> { @@ -63,9 +64,7 @@ impl ChecksumViewer<'_, N> { } } -impl> TableViewer<(u64, Duration)> - for ChecksumViewer<'_, N> -{ +impl TableViewer<(u64, Duration)> for ChecksumViewer<'_, N> { type Error = eyre::Report; fn view(&self) -> Result<(u64, Duration), Self::Error> { diff --git a/crates/cli/commands/src/db/get.rs b/crates/cli/commands/src/db/get.rs index 2734e1da18547..5b794feeada2d 100644 --- a/crates/cli/commands/src/db/get.rs +++ b/crates/cli/commands/src/db/get.rs @@ -1,6 +1,5 @@ -use alloy_primitives::BlockHash; +use alloy_primitives::{hex, BlockHash}; use clap::Parser; -use reth_chainspec::ChainSpec; use reth_db::{ static_file::{ColumnSelectorOne, ColumnSelectorTwo, HeaderMask, ReceiptMask, TransactionMask}, tables, RawKey, RawTable, Receipts, TableViewer, Transactions, @@ -8,8 +7,8 @@ use reth_db::{ use reth_db_api::table::{Decompress, DupSort, Table}; use reth_db_common::DbTool; use reth_node_builder::NodeTypesWithDB; -use reth_primitives::{hex, Header}; -use reth_provider::StaticFileProviderFactory; +use reth_primitives::Header; +use reth_provider::{providers::ProviderNodeTypes, StaticFileProviderFactory}; use reth_static_file_types::StaticFileSegment; use tracing::error; @@ -54,10 +53,7 @@ enum Subcommand { impl Command { /// Execute `db get` command - pub fn execute>( - self, - tool: &DbTool, - ) -> eyre::Result<()> { + pub fn execute(self, tool: &DbTool) -> eyre::Result<()> { match self.subcommand { Subcommand::Mdbx { table, key, subkey, raw } => { table.view(&GetValueViewer { tool, key, subkey, raw })? @@ -148,7 +144,7 @@ struct GetValueViewer<'a, N: NodeTypesWithDB> { raw: bool, } -impl> TableViewer<()> for GetValueViewer<'_, N> { +impl TableViewer<()> for GetValueViewer<'_, N> { type Error = eyre::Report; fn view(&self) -> Result<(), Self::Error> { diff --git a/crates/cli/commands/src/db/list.rs b/crates/cli/commands/src/db/list.rs index 3dfa4f3884868..63eca1d8683b9 100644 --- a/crates/cli/commands/src/db/list.rs +++ b/crates/cli/commands/src/db/list.rs @@ -2,7 +2,7 @@ use super::tui::DbListTUI; use alloy_primitives::hex; use clap::Parser; use eyre::WrapErr; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthereumHardforks; use reth_db::{DatabaseEnv, RawValue, TableViewer, Tables}; use reth_db_api::{database::Database, table::Table}; use reth_db_common::{DbTool, ListFilter}; @@ -53,7 +53,7 @@ pub struct Command { impl Command { /// Execute `db list` command - pub fn execute>( + pub fn execute>( self, tool: &DbTool>>, ) -> eyre::Result<()> { diff --git a/crates/cli/commands/src/db/mod.rs b/crates/cli/commands/src/db/mod.rs index 6d48256101fb9..be1b117d9e8c5 100644 --- a/crates/cli/commands/src/db/mod.rs +++ b/crates/cli/commands/src/db/mod.rs @@ -1,6 +1,6 @@ use crate::common::{AccessRights, Environment, EnvironmentArgs}; use clap::{Parser, Subcommand}; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_db::version::{get_db_version, DatabaseVersionError, DB_VERSION}; use reth_db_common::DbTool; @@ -63,12 +63,12 @@ macro_rules! db_ro_exec { }; } -impl> Command { +impl> Command { /// Execute `db` command pub async fn execute>( self, ) -> eyre::Result<()> { - let data_dir = self.env.datadir.clone().resolve_datadir(self.env.chain.chain); + let data_dir = self.env.datadir.clone().resolve_datadir(self.env.chain.chain()); let db_path = data_dir.db(); let static_files_path = data_dir.static_files(); diff --git a/crates/cli/commands/src/db/stats.rs b/crates/cli/commands/src/db/stats.rs index 76fb69b4a9566..ac36b866b07af 100644 --- a/crates/cli/commands/src/db/stats.rs +++ b/crates/cli/commands/src/db/stats.rs @@ -4,14 +4,14 @@ use comfy_table::{Cell, Row, Table as ComfyTable}; use eyre::WrapErr; use human_bytes::human_bytes; use itertools::Itertools; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthereumHardforks; use reth_db::{mdbx, static_file::iter_static_files, DatabaseEnv, TableViewer, Tables}; use reth_db_api::database::Database; use reth_db_common::DbTool; use reth_fs_util as fs; use reth_node_builder::{NodeTypesWithDB, NodeTypesWithDBAdapter, NodeTypesWithEngine}; use reth_node_core::dirs::{ChainPath, DataDirPath}; -use reth_provider::providers::StaticFileProvider; +use reth_provider::providers::{ProviderNodeTypes, StaticFileProvider}; use reth_static_file_types::SegmentRangeInclusive; use std::{sync::Arc, time::Duration}; @@ -38,7 +38,7 @@ pub struct Command { impl Command { /// Execute `db stats` command - pub fn execute>( + pub fn execute>( self, data_dir: ChainPath, tool: &DbTool>>, @@ -325,10 +325,7 @@ impl Command { Ok(table) } - fn checksum_report>( - &self, - tool: &DbTool, - ) -> eyre::Result { + fn checksum_report(&self, tool: &DbTool) -> eyre::Result { let mut table = ComfyTable::new(); table.load_preset(comfy_table::presets::ASCII_MARKDOWN); table.set_header(vec![Cell::new("Table"), Cell::new("Checksum"), Cell::new("Elapsed")]); diff --git a/crates/cli/commands/src/dump_genesis.rs b/crates/cli/commands/src/dump_genesis.rs index 30d3bc9651df2..44c0b660fc5d9 100644 --- a/crates/cli/commands/src/dump_genesis.rs +++ b/crates/cli/commands/src/dump_genesis.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use clap::Parser; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthChainSpec; use reth_cli::chainspec::ChainSpecParser; /// Dumps genesis block JSON configuration to stdout @@ -21,7 +21,7 @@ pub struct DumpGenesisCommand { chain: Arc, } -impl> DumpGenesisCommand { +impl> DumpGenesisCommand { /// Execute the `dump-genesis` command pub async fn execute(self) -> eyre::Result<()> { println!("{}", serde_json::to_string_pretty(self.chain.genesis())?); diff --git a/crates/cli/commands/src/import.rs b/crates/cli/commands/src/import.rs index 5b35e8aa1c7b3..15407f29d7886 100644 --- a/crates/cli/commands/src/import.rs +++ b/crates/cli/commands/src/import.rs @@ -4,7 +4,7 @@ use alloy_primitives::B256; use clap::Parser; use futures::{Stream, StreamExt}; use reth_beacon_consensus::EthBeaconConsensus; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_config::Config; use reth_consensus::Consensus; @@ -20,12 +20,12 @@ use reth_network_p2p::{ bodies::downloader::BodyDownloader, headers::downloader::{HeaderDownloader, SyncTarget}, }; -use reth_node_builder::{NodeTypesWithDB, NodeTypesWithEngine}; +use reth_node_builder::NodeTypesWithEngine; use reth_node_core::version::SHORT_VERSION; use reth_node_events::node::NodeEvent; use reth_provider::{ - BlockNumReader, ChainSpecProvider, HeaderProvider, ProviderError, ProviderFactory, - StageCheckpointReader, + providers::ProviderNodeTypes, BlockNumReader, ChainSpecProvider, HeaderProvider, ProviderError, + ProviderFactory, StageCheckpointReader, }; use reth_prune::PruneModes; use reth_stages::{prelude::*, Pipeline, StageId, StageSet}; @@ -56,7 +56,7 @@ pub struct ImportCommand { path: PathBuf, } -impl> ImportCommand { +impl> ImportCommand { /// Execute `import` command pub async fn execute(self, executor: F) -> eyre::Result<()> where @@ -168,7 +168,7 @@ pub fn build_import_pipeline( executor: E, ) -> eyre::Result<(Pipeline, impl Stream)> where - N: NodeTypesWithDB, + N: ProviderNodeTypes, C: Consensus + 'static, E: BlockExecutorProvider, { diff --git a/crates/cli/commands/src/init_cmd.rs b/crates/cli/commands/src/init_cmd.rs index 63a8827eb24c4..5fde9ac0d0ba8 100644 --- a/crates/cli/commands/src/init_cmd.rs +++ b/crates/cli/commands/src/init_cmd.rs @@ -2,7 +2,7 @@ use crate::common::{AccessRights, Environment, EnvironmentArgs}; use clap::Parser; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_node_builder::NodeTypesWithEngine; use reth_provider::BlockHashReader; @@ -15,7 +15,7 @@ pub struct InitCommand { env: EnvironmentArgs, } -impl> InitCommand { +impl> InitCommand { /// Execute the `init` command pub async fn execute>( self, diff --git a/crates/cli/commands/src/init_state.rs b/crates/cli/commands/src/init_state.rs index 67955d714aff8..16e99f8fe976a 100644 --- a/crates/cli/commands/src/init_state.rs +++ b/crates/cli/commands/src/init_state.rs @@ -3,12 +3,12 @@ use crate::common::{AccessRights, Environment, EnvironmentArgs}; use alloy_primitives::B256; use clap::Parser; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_config::config::EtlConfig; use reth_db_common::init::init_from_state_dump; -use reth_node_builder::{NodeTypesWithDB, NodeTypesWithEngine}; -use reth_provider::ProviderFactory; +use reth_node_builder::NodeTypesWithEngine; +use reth_provider::{providers::ProviderNodeTypes, ProviderFactory}; use std::{fs::File, io::BufReader, path::PathBuf}; use tracing::info; @@ -40,7 +40,7 @@ pub struct InitStateCommand { pub state: PathBuf, } -impl> InitStateCommand { +impl> InitStateCommand { /// Execute the `init` command pub async fn execute>( self, @@ -59,7 +59,7 @@ impl> InitStateCommand { } /// Initialize chain with state at specific block, from a file with state dump. -pub fn init_at_state>( +pub fn init_at_state( state_dump_path: PathBuf, factory: ProviderFactory, etl_config: EtlConfig, diff --git a/crates/cli/commands/src/node.rs b/crates/cli/commands/src/node.rs index ae85e0acf73d7..abdb00dff2cbe 100644 --- a/crates/cli/commands/src/node.rs +++ b/crates/cli/commands/src/node.rs @@ -1,7 +1,7 @@ //! Main node command for launching a node use clap::{value_parser, Args, Parser}; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_cli_runner::CliContext; use reth_cli_util::parse_socket_address; @@ -112,7 +112,7 @@ pub struct NodeCommand< pub ext: Ext, } -impl> NodeCommand { +impl NodeCommand { /// Parsers only the default CLI arguments pub fn parse_args() -> Self { Self::parse() @@ -128,7 +128,11 @@ impl> NodeCommand { } } -impl, Ext: clap::Args + fmt::Debug> NodeCommand { +impl< + C: ChainSpecParser, + Ext: clap::Args + fmt::Debug, + > NodeCommand +{ /// Launches the node /// /// This transforms the node command into a node config and launches the node using the given diff --git a/crates/cli/commands/src/p2p/mod.rs b/crates/cli/commands/src/p2p/mod.rs index 6d40e414dd336..f36d78bd16469 100644 --- a/crates/cli/commands/src/p2p/mod.rs +++ b/crates/cli/commands/src/p2p/mod.rs @@ -5,7 +5,7 @@ use std::{path::PathBuf, sync::Arc}; use alloy_eips::BlockHashOrNumber; use backon::{ConstantBuilder, Retryable}; use clap::{Parser, Subcommand}; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_cli_util::{get_secret_key, hash_or_num_value_parser}; use reth_config::Config; @@ -73,10 +73,10 @@ pub enum Subcommands { Rlpx(rlpx::Command), } -impl> Command { +impl> Command { /// Execute `p2p` command pub async fn execute(self) -> eyre::Result<()> { - let data_dir = self.datadir.clone().resolve_datadir(self.chain.chain); + let data_dir = self.datadir.clone().resolve_datadir(self.chain.chain()); let config_path = self.config.clone().unwrap_or_else(|| data_dir.config()); // Load configuration @@ -100,7 +100,7 @@ impl> Command { let net = NetworkConfigBuilder::new(p2p_secret_key) .peer_config(config.peers_config_with_basic_nodes_from_file(None)) .external_ip_resolver(self.network.nat) - .disable_discv4_discovery_if(self.chain.chain.is_optimism()) + .disable_discv4_discovery_if(self.chain.chain().is_optimism()) .boot_nodes(boot_nodes.clone()) .apply(|builder| { self.network.discovery.apply_to_builder(builder, rlpx_socket, boot_nodes) diff --git a/crates/cli/commands/src/prune.rs b/crates/cli/commands/src/prune.rs index d19247e21a7ba..7dbb66fc2fafe 100644 --- a/crates/cli/commands/src/prune.rs +++ b/crates/cli/commands/src/prune.rs @@ -1,7 +1,7 @@ //! Command that runs pruning without any limits. use crate::common::{AccessRights, Environment, EnvironmentArgs}; use clap::Parser; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_node_builder::NodeTypesWithEngine; use reth_prune::PrunerBuilder; @@ -15,7 +15,7 @@ pub struct PruneCommand { env: EnvironmentArgs, } -impl> PruneCommand { +impl> PruneCommand { /// Execute the `prune` command pub async fn execute>( self, diff --git a/crates/cli/commands/src/recover/mod.rs b/crates/cli/commands/src/recover/mod.rs index 9bf81817458df..3216449e49b65 100644 --- a/crates/cli/commands/src/recover/mod.rs +++ b/crates/cli/commands/src/recover/mod.rs @@ -1,7 +1,7 @@ //! `reth recover` command. use clap::{Parser, Subcommand}; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_cli_runner::CliContext; use reth_node_builder::NodeTypesWithEngine; @@ -22,7 +22,7 @@ pub enum Subcommands { StorageTries(storage_tries::Command), } -impl> Command { +impl> Command { /// Execute `recover` command pub async fn execute>( self, diff --git a/crates/cli/commands/src/recover/storage_tries.rs b/crates/cli/commands/src/recover/storage_tries.rs index 65cb741f324e8..304075ede0c14 100644 --- a/crates/cli/commands/src/recover/storage_tries.rs +++ b/crates/cli/commands/src/recover/storage_tries.rs @@ -1,6 +1,6 @@ use crate::common::{AccessRights, Environment, EnvironmentArgs}; use clap::Parser; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_cli_runner::CliContext; use reth_db::tables; @@ -21,7 +21,7 @@ pub struct Command { env: EnvironmentArgs, } -impl> Command { +impl> Command { /// Execute `storage-tries` recovery command pub async fn execute>( self, diff --git a/crates/cli/commands/src/stage/drop.rs b/crates/cli/commands/src/stage/drop.rs index 6571cbaae8645..26165497d0b78 100644 --- a/crates/cli/commands/src/stage/drop.rs +++ b/crates/cli/commands/src/stage/drop.rs @@ -2,7 +2,7 @@ use crate::common::{AccessRights, Environment, EnvironmentArgs}; use clap::Parser; use itertools::Itertools; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_db::{static_file::iter_static_files, tables}; use reth_db_api::transaction::DbTxMut; @@ -25,7 +25,7 @@ pub struct Command { stage: StageEnum, } -impl> Command { +impl> Command { /// Execute `db` command pub async fn execute>( self, @@ -164,7 +164,7 @@ impl> Command { StageId::IndexStorageHistory.to_string(), Default::default(), )?; - insert_genesis_history(&provider_rw.0, self.env.chain.genesis.alloc.iter())?; + insert_genesis_history(&provider_rw.0, self.env.chain.genesis().alloc.iter())?; } StageEnum::TxLookup => { tx.clear::()?; diff --git a/crates/cli/commands/src/stage/dump/execution.rs b/crates/cli/commands/src/stage/dump/execution.rs index c807ac94145d4..709fc59190d4a 100644 --- a/crates/cli/commands/src/stage/dump/execution.rs +++ b/crates/cli/commands/src/stage/dump/execution.rs @@ -1,7 +1,6 @@ use std::sync::Arc; use super::setup; -use reth_chainspec::ChainSpec; use reth_db::{tables, DatabaseEnv}; use reth_db_api::{ cursor::DbCursorRO, database::Database, table::TableImporter, transaction::DbTx, @@ -10,7 +9,10 @@ use reth_db_common::DbTool; use reth_evm::{execute::BlockExecutorProvider, noop::NoopBlockExecutorProvider}; use reth_node_builder::{NodeTypesWithDB, NodeTypesWithDBAdapter}; use reth_node_core::dirs::{ChainPath, DataDirPath}; -use reth_provider::{providers::StaticFileProvider, DatabaseProviderFactory, ProviderFactory}; +use reth_provider::{ + providers::{ProviderNodeTypes, StaticFileProvider}, + DatabaseProviderFactory, ProviderFactory, +}; use reth_stages::{stages::ExecutionStage, Stage, StageCheckpoint, UnwindInput}; use tracing::info; @@ -23,7 +25,7 @@ pub(crate) async fn dump_execution_stage( executor: E, ) -> eyre::Result<()> where - N: NodeTypesWithDB, + N: ProviderNodeTypes, E: BlockExecutorProvider, { let (output_db, tip_block_number) = setup(from, to, &output_datadir.db(), db_tool)?; @@ -129,7 +131,7 @@ fn import_tables_with_range( /// Dry-run an unwind to FROM block, so we can get the `PlainStorageState` and /// `PlainAccountState` safely. There might be some state dependency from an address /// which hasn't been changed in the given range. -fn unwind_and_copy>( +fn unwind_and_copy( db_tool: &DbTool, from: u64, tip_block_number: u64, @@ -166,7 +168,7 @@ fn dry_run( executor: E, ) -> eyre::Result<()> where - N: NodeTypesWithDB, + N: ProviderNodeTypes, E: BlockExecutorProvider, { info!(target: "reth::cli", "Executing stage. [dry-run]"); diff --git a/crates/cli/commands/src/stage/dump/hashing_account.rs b/crates/cli/commands/src/stage/dump/hashing_account.rs index 94d8129e03829..738dcabafa708 100644 --- a/crates/cli/commands/src/stage/dump/hashing_account.rs +++ b/crates/cli/commands/src/stage/dump/hashing_account.rs @@ -3,17 +3,19 @@ use std::sync::Arc; use super::setup; use alloy_primitives::BlockNumber; use eyre::Result; -use reth_chainspec::ChainSpec; use reth_db::{tables, DatabaseEnv}; use reth_db_api::{database::Database, table::TableImporter}; use reth_db_common::DbTool; -use reth_node_builder::{NodeTypesWithDB, NodeTypesWithDBAdapter}; +use reth_node_builder::NodeTypesWithDBAdapter; use reth_node_core::dirs::{ChainPath, DataDirPath}; -use reth_provider::{providers::StaticFileProvider, DatabaseProviderFactory, ProviderFactory}; +use reth_provider::{ + providers::{ProviderNodeTypes, StaticFileProvider}, + DatabaseProviderFactory, ProviderFactory, +}; use reth_stages::{stages::AccountHashingStage, Stage, StageCheckpoint, UnwindInput}; use tracing::info; -pub(crate) async fn dump_hashing_account_stage>( +pub(crate) async fn dump_hashing_account_stage( db_tool: &DbTool, from: BlockNumber, to: BlockNumber, @@ -49,7 +51,7 @@ pub(crate) async fn dump_hashing_account_stage>( +fn unwind_and_copy( db_tool: &DbTool, from: u64, tip_block_number: u64, @@ -74,7 +76,7 @@ fn unwind_and_copy>( } /// Try to re-execute the stage straight away -fn dry_run>( +fn dry_run( output_provider_factory: ProviderFactory, to: u64, from: u64, diff --git a/crates/cli/commands/src/stage/dump/hashing_storage.rs b/crates/cli/commands/src/stage/dump/hashing_storage.rs index 16a90eeedcb38..204c087a234d0 100644 --- a/crates/cli/commands/src/stage/dump/hashing_storage.rs +++ b/crates/cli/commands/src/stage/dump/hashing_storage.rs @@ -2,17 +2,19 @@ use std::sync::Arc; use super::setup; use eyre::Result; -use reth_chainspec::ChainSpec; use reth_db::{tables, DatabaseEnv}; use reth_db_api::{database::Database, table::TableImporter}; use reth_db_common::DbTool; -use reth_node_builder::{NodeTypesWithDB, NodeTypesWithDBAdapter}; +use reth_node_builder::NodeTypesWithDBAdapter; use reth_node_core::dirs::{ChainPath, DataDirPath}; -use reth_provider::{providers::StaticFileProvider, DatabaseProviderFactory, ProviderFactory}; +use reth_provider::{ + providers::{ProviderNodeTypes, StaticFileProvider}, + DatabaseProviderFactory, ProviderFactory, +}; use reth_stages::{stages::StorageHashingStage, Stage, StageCheckpoint, UnwindInput}; use tracing::info; -pub(crate) async fn dump_hashing_storage_stage>( +pub(crate) async fn dump_hashing_storage_stage( db_tool: &DbTool, from: u64, to: u64, @@ -39,7 +41,7 @@ pub(crate) async fn dump_hashing_storage_stage>( +fn unwind_and_copy( db_tool: &DbTool, from: u64, tip_block_number: u64, @@ -69,7 +71,7 @@ fn unwind_and_copy>( } /// Try to re-execute the stage straight away -fn dry_run>( +fn dry_run( output_provider_factory: ProviderFactory, to: u64, from: u64, diff --git a/crates/cli/commands/src/stage/dump/merkle.rs b/crates/cli/commands/src/stage/dump/merkle.rs index 4b3d9c30331e0..f7e9e2fc1afc5 100644 --- a/crates/cli/commands/src/stage/dump/merkle.rs +++ b/crates/cli/commands/src/stage/dump/merkle.rs @@ -3,16 +3,18 @@ use std::sync::Arc; use super::setup; use alloy_primitives::BlockNumber; use eyre::Result; -use reth_chainspec::ChainSpec; use reth_config::config::EtlConfig; use reth_db::{tables, DatabaseEnv}; use reth_db_api::{database::Database, table::TableImporter}; use reth_db_common::DbTool; use reth_evm::noop::NoopBlockExecutorProvider; use reth_exex::ExExManagerHandle; -use reth_node_builder::{NodeTypesWithDB, NodeTypesWithDBAdapter}; +use reth_node_builder::NodeTypesWithDBAdapter; use reth_node_core::dirs::{ChainPath, DataDirPath}; -use reth_provider::{providers::StaticFileProvider, DatabaseProviderFactory, ProviderFactory}; +use reth_provider::{ + providers::{ProviderNodeTypes, StaticFileProvider}, + DatabaseProviderFactory, ProviderFactory, +}; use reth_prune::PruneModes; use reth_stages::{ stages::{ @@ -23,7 +25,7 @@ use reth_stages::{ }; use tracing::info; -pub(crate) async fn dump_merkle_stage>( +pub(crate) async fn dump_merkle_stage( db_tool: &DbTool, from: BlockNumber, to: BlockNumber, @@ -66,7 +68,7 @@ pub(crate) async fn dump_merkle_stage> } /// Dry-run an unwind to FROM block and copy the necessary table data to the new database. -fn unwind_and_copy>( +fn unwind_and_copy( db_tool: &DbTool, range: (u64, u64), tip_block_number: u64, @@ -144,7 +146,7 @@ fn unwind_and_copy>( } /// Try to re-execute the stage straight away -fn dry_run>( +fn dry_run( output_provider_factory: ProviderFactory, to: u64, from: u64, diff --git a/crates/cli/commands/src/stage/dump/mod.rs b/crates/cli/commands/src/stage/dump/mod.rs index 44161d9b3bb2d..6fd2f23aa0e59 100644 --- a/crates/cli/commands/src/stage/dump/mod.rs +++ b/crates/cli/commands/src/stage/dump/mod.rs @@ -1,7 +1,7 @@ //! Database debugging tool use crate::common::{AccessRights, Environment, EnvironmentArgs}; use clap::Parser; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_db::{init_db, mdbx::DatabaseArguments, tables, DatabaseEnv}; use reth_db_api::{ @@ -75,24 +75,26 @@ pub struct StageCommand { macro_rules! handle_stage { ($stage_fn:ident, $tool:expr, $command:expr) => {{ let StageCommand { output_datadir, from, to, dry_run, .. } = $command; - let output_datadir = output_datadir.with_chain($tool.chain().chain, DatadirArgs::default()); + let output_datadir = + output_datadir.with_chain($tool.chain().chain(), DatadirArgs::default()); $stage_fn($tool, *from, *to, output_datadir, *dry_run).await? }}; ($stage_fn:ident, $tool:expr, $command:expr, $executor:expr) => {{ let StageCommand { output_datadir, from, to, dry_run, .. } = $command; - let output_datadir = output_datadir.with_chain($tool.chain().chain, DatadirArgs::default()); + let output_datadir = + output_datadir.with_chain($tool.chain().chain(), DatadirArgs::default()); $stage_fn($tool, *from, *to, output_datadir, *dry_run, $executor).await? }}; } -impl> Command { +impl> Command { /// Execute `dump-stage` command pub async fn execute(self, executor: F) -> eyre::Result<()> where N: NodeTypesWithEngine, E: BlockExecutorProvider, - F: FnOnce(Arc) -> E, + F: FnOnce(Arc) -> E, { let Environment { provider_factory, .. } = self.env.init::(AccessRights::RO)?; let tool = DbTool::new(provider_factory)?; diff --git a/crates/cli/commands/src/stage/mod.rs b/crates/cli/commands/src/stage/mod.rs index a4e0d088ac941..562bd73a28d75 100644 --- a/crates/cli/commands/src/stage/mod.rs +++ b/crates/cli/commands/src/stage/mod.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use clap::{Parser, Subcommand}; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_cli_runner::CliContext; use reth_evm::execute::BlockExecutorProvider; @@ -39,13 +39,13 @@ pub enum Subcommands { Unwind(unwind::Command), } -impl> Command { +impl> Command { /// Execute `stage` command pub async fn execute(self, ctx: CliContext, executor: F) -> eyre::Result<()> where N: NodeTypesWithEngine, E: BlockExecutorProvider, - F: FnOnce(Arc) -> E, + F: FnOnce(Arc) -> E, { match self.command { Subcommands::Run(command) => command.execute::(ctx, executor).await, diff --git a/crates/cli/commands/src/stage/run.rs b/crates/cli/commands/src/stage/run.rs index 9bc0fa04a365a..23d6f6f28ac60 100644 --- a/crates/cli/commands/src/stage/run.rs +++ b/crates/cli/commands/src/stage/run.rs @@ -6,7 +6,7 @@ use crate::common::{AccessRights, Environment, EnvironmentArgs}; use alloy_eips::BlockHashOrNumber; use clap::Parser; use reth_beacon_consensus::EthBeaconConsensus; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_cli_runner::CliContext; use reth_cli_util::get_secret_key; @@ -102,13 +102,13 @@ pub struct Command { network: NetworkArgs, } -impl> Command { +impl> Command { /// Execute `stage` command pub async fn execute(self, ctx: CliContext, executor: F) -> eyre::Result<()> where N: NodeTypesWithEngine, E: BlockExecutorProvider, - F: FnOnce(Arc) -> E, + F: FnOnce(Arc) -> E, { // Raise the fd limit of the process. // Does not do anything on windows. @@ -131,7 +131,7 @@ impl> Command { target_triple: VERGEN_CARGO_TARGET_TRIPLE, build_profile: BUILD_PROFILE_NAME, }, - ChainSpecInfo { name: provider_factory.chain_spec().chain.to_string() }, + ChainSpecInfo { name: provider_factory.chain_spec().chain().to_string() }, ctx.task_executor, Hooks::new( provider_factory.db_ref().clone(), diff --git a/crates/cli/commands/src/stage/unwind.rs b/crates/cli/commands/src/stage/unwind.rs index 6c7bdebd1184c..c1029f33beea3 100644 --- a/crates/cli/commands/src/stage/unwind.rs +++ b/crates/cli/commands/src/stage/unwind.rs @@ -5,7 +5,7 @@ use alloy_eips::BlockHashOrNumber; use alloy_primitives::{BlockNumber, B256}; use clap::{Parser, Subcommand}; use reth_beacon_consensus::EthBeaconConsensus; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_config::Config; use reth_consensus::Consensus; @@ -16,8 +16,8 @@ use reth_exex::ExExManagerHandle; use reth_node_builder::{NodeTypesWithDB, NodeTypesWithEngine}; use reth_node_core::args::NetworkArgs; use reth_provider::{ - BlockExecutionWriter, BlockNumReader, ChainSpecProvider, FinalizedBlockReader, - FinalizedBlockWriter, ProviderFactory, StaticFileProviderFactory, + providers::ProviderNodeTypes, BlockExecutionWriter, BlockNumReader, ChainSpecProvider, + FinalizedBlockReader, FinalizedBlockWriter, ProviderFactory, StaticFileProviderFactory, }; use reth_prune::PruneModes; use reth_stages::{ @@ -48,7 +48,7 @@ pub struct Command { offline: bool, } -impl> Command { +impl> Command { /// Execute `db stage unwind` command pub async fn execute>( self, @@ -189,7 +189,7 @@ impl Subcommands { /// Returns the block range to unwind. /// /// This returns an inclusive range: [target..=latest] - fn unwind_range>>( + fn unwind_range>>( &self, factory: ProviderFactory, ) -> eyre::Result> { diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index 3d2961cf8cd94..998b48e70431d 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -10,7 +10,7 @@ use reth::{ rpc::api::eth::{helpers::AddDevSigners, FullEthApiServer}, tasks::TaskManager, }; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_db::{test_utils::TempDatabase, DatabaseEnv}; use reth_node_builder::{ components::NodeComponentsBuilder, rpc::EthApiBuilderProvider, FullNodeTypesAdapter, Node, @@ -47,11 +47,11 @@ mod traits; /// Creates the initial setup with `num_nodes` started and interconnected. pub async fn setup( num_nodes: usize, - chain_spec: Arc, + chain_spec: Arc, is_dev: bool, ) -> eyre::Result<(Vec>, TaskManager, Wallet)> where - N: Default + Node> + NodeTypesWithEngine, + N: Default + Node> + NodeTypesWithEngine, N::ComponentsBuilder: NodeComponentsBuilder< TmpNodeAdapter, Components: NodeComponents, Network: PeersHandleProvider>, @@ -73,8 +73,7 @@ where let mut nodes: Vec> = Vec::with_capacity(num_nodes); for idx in 0..num_nodes { - let node_config = NodeConfig::test() - .with_chain(chain_spec.clone()) + let node_config = NodeConfig::new(chain_spec.clone()) .with_network(network_config.clone()) .with_unused_ports() .with_rpc(RpcServerArgs::default().with_unused_ports().with_http()) diff --git a/crates/e2e-test-utils/src/node.rs b/crates/e2e-test-utils/src/node.rs index 391a070df7ddb..2ea39348f5de9 100644 --- a/crates/e2e-test-utils/src/node.rs +++ b/crates/e2e-test-utils/src/node.rs @@ -17,7 +17,7 @@ use reth::{ types::engine::PayloadStatusEnum, }, }; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthereumHardforks; use reth_node_builder::{NodeAddOns, NodeTypesWithEngine}; use reth_stages_types::StageId; use tokio_stream::StreamExt; @@ -50,7 +50,7 @@ impl NodeTestContext where Engine: EngineTypes, Node: FullNodeComponents, - Node::Types: NodeTypesWithEngine, + Node::Types: NodeTypesWithEngine, Node::Network: PeersHandleProvider, AddOns: NodeAddOns, { diff --git a/crates/e2e-test-utils/src/rpc.rs b/crates/e2e-test-utils/src/rpc.rs index 3ff378a08304c..b8cbe4d77add7 100644 --- a/crates/e2e-test-utils/src/rpc.rs +++ b/crates/e2e-test-utils/src/rpc.rs @@ -8,7 +8,7 @@ use reth::{ DebugApiServer, }, }; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthereumHardforks; use reth_node_builder::{EthApiTypes, NodeTypes}; #[allow(missing_debug_implementations)] @@ -18,7 +18,7 @@ pub struct RpcTestContext { impl RpcTestContext where - Node: FullNodeComponents>, + Node: FullNodeComponents>, EthApi: EthApiSpec + EthTransactions + TraceExt, { /// Injects a raw transaction into the node tx pool via RPC server diff --git a/crates/engine/invalid-block-hooks/src/witness.rs b/crates/engine/invalid-block-hooks/src/witness.rs index 06bfee747cc0d..59cab6adecae1 100644 --- a/crates/engine/invalid-block-hooks/src/witness.rs +++ b/crates/engine/invalid-block-hooks/src/witness.rs @@ -4,7 +4,7 @@ use alloy_primitives::{keccak256, B256, U256}; use alloy_rpc_types_debug::ExecutionWitness; use eyre::OptionExt; use pretty_assertions::Comparison; -use reth_chainspec::ChainSpec; +use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_engine_primitives::InvalidBlockHook; use reth_evm::{ system_calls::{apply_beacon_root_contract_call, apply_blockhashes_contract_call}, @@ -52,7 +52,11 @@ impl InvalidBlockWitnessHook { impl InvalidBlockWitnessHook where - P: StateProviderFactory + ChainSpecProvider + Send + Sync + 'static, + P: StateProviderFactory + + ChainSpecProvider + + Send + + Sync + + 'static, EvmConfig: ConfigureEvm
, { fn on_invalid_block( @@ -295,7 +299,11 @@ where impl InvalidBlockHook for InvalidBlockWitnessHook where - P: StateProviderFactory + ChainSpecProvider + Send + Sync + 'static, + P: StateProviderFactory + + ChainSpecProvider + + Send + + Sync + + 'static, EvmConfig: ConfigureEvm
, { fn on_invalid_block( diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index 61141f0677cce..4989589c9f985 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -10,7 +10,7 @@ pub use states::*; use std::sync::Arc; use futures::Future; -use reth_chainspec::{ChainSpec, EthChainSpec, EthereumHardforks, Hardforks}; +use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks}; use reth_cli_util::get_secret_key; use reth_db_api::{ database::Database, @@ -641,7 +641,7 @@ impl BuilderContext { } } -impl>> BuilderContext { +impl>> BuilderContext { /// Creates the [`NetworkBuilder`] for the node. pub async fn network_builder(&self) -> eyre::Result> { let network_config = self.network_config()?; diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index 720f69c184646..99e9b29368a0d 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -10,7 +10,7 @@ use reth_beacon_consensus::EthBeaconConsensus; use reth_blockchain_tree::{ BlockchainTree, BlockchainTreeConfig, ShareableBlockchainTree, TreeExternals, }; -use reth_chainspec::{Chain, ChainSpec, EthChainSpec, EthereumHardforks}; +use reth_chainspec::{Chain, EthChainSpec, EthereumHardforks}; use reth_config::{config::EtlConfig, PruneConfig}; use reth_consensus::Consensus; use reth_db_api::database::Database; @@ -879,8 +879,8 @@ impl > where T: FullNodeTypes< - Provider: WithTree + StateProviderFactory + ChainSpecProvider, - Types: NodeTypes, + Provider: WithTree + StateProviderFactory + ChainSpecProvider, + Types: NodeTypes, >, CB: NodeComponentsBuilder, { diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index 708d791a0e844..6f2d37c1c4905 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -7,7 +7,7 @@ use reth_beacon_consensus::{ BeaconConsensusEngineHandle, }; use reth_blockchain_tree::BlockchainTreeConfig; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthChainSpec; use reth_consensus_debug_client::{DebugConsensusClient, EtherscanBlockProvider}; use reth_engine_service::service::{ChainEvent, EngineService}; use reth_engine_tree::{ @@ -18,9 +18,7 @@ use reth_engine_util::EngineMessageStreamExt; use reth_exex::ExExManagerHandle; use reth_network::{NetworkSyncUpdater, SyncState}; use reth_network_api::{BlockDownloaderProvider, NetworkEventListenerProvider}; -use reth_node_api::{ - BuiltPayload, FullNodeTypes, NodeAddOns, NodeTypesWithDB, NodeTypesWithEngine, -}; +use reth_node_api::{BuiltPayload, FullNodeTypes, NodeAddOns, NodeTypesWithEngine}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, exit::NodeExitFuture, @@ -30,7 +28,8 @@ use reth_node_core::{ }; use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; use reth_payload_primitives::PayloadBuilder; -use reth_provider::providers::BlockchainProvider2; +use reth_primitives::EthereumHardforks; +use reth_provider::providers::{BlockchainProvider2, ProviderNodeTypes}; use reth_rpc_engine_api::{capabilities::EngineCapabilities, EngineApi}; use reth_tasks::TaskExecutor; use reth_tokio_util::EventSender; @@ -72,7 +71,7 @@ impl EngineNodeLauncher { impl LaunchNode> for EngineNodeLauncher where - Types: NodeTypesWithDB + NodeTypesWithEngine, + Types: ProviderNodeTypes + NodeTypesWithEngine, T: FullNodeTypes>, CB: NodeComponentsBuilder, AO: NodeAddOns< @@ -127,7 +126,7 @@ where debug!(target: "reth::cli", chain=%this.chain_id(), genesis=?this.genesis_hash(), "Initializing genesis"); }) .with_genesis()? - .inspect(|this: &LaunchContextWith, _>>| { + .inspect(|this: &LaunchContextWith, _>>| { info!(target: "reth::cli", "\n{}", this.chain_spec().display_hardforks()); }) .with_metrics_task() @@ -296,7 +295,7 @@ where if let Some(maybe_custom_etherscan_url) = ctx.node_config().debug.etherscan.clone() { info!(target: "reth::cli", "Using etherscan as consensus client"); - let chain = ctx.node_config().chain.chain; + let chain = ctx.node_config().chain.chain(); let etherscan_url = maybe_custom_etherscan_url.map(Ok).unwrap_or_else(|| { // If URL isn't provided, use default Etherscan URL for the chain if it is known chain diff --git a/crates/node/core/src/args/network.rs b/crates/node/core/src/args/network.rs index d25ebd8ea1574..0f1465bc5795b 100644 --- a/crates/node/core/src/args/network.rs +++ b/crates/node/core/src/args/network.rs @@ -4,11 +4,10 @@ use std::{ net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6}, ops::Not, path::PathBuf, - sync::Arc, }; use clap::Args; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthChainSpec; use reth_config::Config; use reth_discv4::{NodeRecord, DEFAULT_DISCOVERY_ADDR, DEFAULT_DISCOVERY_PORT}; use reth_discv5::{ @@ -186,8 +185,8 @@ impl NetworkArgs { }) } - /// Build a [`NetworkConfigBuilder`] from a [`Config`] and a [`ChainSpec`], in addition to the - /// values in this option struct. + /// Build a [`NetworkConfigBuilder`] from a [`Config`] and a [`EthChainSpec`], in addition to + /// the values in this option struct. /// /// The `default_peers_file` will be used as the default location to store the persistent peers /// file if `no_persist_peers` is false, and there is no provided `peers_file`. @@ -200,7 +199,7 @@ impl NetworkArgs { pub fn network_config( &self, config: &Config, - chain_spec: Arc, + chain_spec: impl EthChainSpec, secret_key: SecretKey, default_peers_file: PathBuf, ) -> NetworkConfigBuilder { diff --git a/crates/node/core/src/node_config.rs b/crates/node/core/src/node_config.rs index de69298246769..a8799d80df1c8 100644 --- a/crates/node/core/src/node_config.rs +++ b/crates/node/core/src/node_config.rs @@ -139,6 +139,25 @@ impl NodeConfig { } impl NodeConfig { + /// Creates a new config with given chain spec, setting all fields to default values. + pub fn new(chain: Arc) -> Self { + Self { + config: None, + chain, + metrics: None, + instance: 1, + network: NetworkArgs::default(), + rpc: RpcServerArgs::default(), + txpool: TxPoolArgs::default(), + builder: PayloadBuilderArgs::default(), + debug: DebugArgs::default(), + db: DatabaseArgs::default(), + dev: DevArgs::default(), + pruning: PruningArgs::default(), + datadir: DatadirArgs::default(), + } + } + /// Sets --dev mode for the node. /// /// In addition to setting the `--dev` flag, this also: @@ -407,21 +426,7 @@ impl NodeConfig { impl Default for NodeConfig { fn default() -> Self { - Self { - config: None, - chain: MAINNET.clone(), - metrics: None, - instance: 1, - network: NetworkArgs::default(), - rpc: RpcServerArgs::default(), - txpool: TxPoolArgs::default(), - builder: PayloadBuilderArgs::default(), - debug: DebugArgs::default(), - db: DatabaseArgs::default(), - dev: DevArgs::default(), - pruning: PruningArgs::default(), - datadir: DatadirArgs::default(), - } + Self::new(MAINNET.clone()) } } diff --git a/crates/optimism/chainspec/Cargo.toml b/crates/optimism/chainspec/Cargo.toml index ce899837e0614..41b54902c201b 100644 --- a/crates/optimism/chainspec/Cargo.toml +++ b/crates/optimism/chainspec/Cargo.toml @@ -16,12 +16,14 @@ workspace = true reth-chainspec = { workspace = true, features = ["optimism"] } reth-ethereum-forks.workspace = true reth-primitives-traits.workspace = true +reth-network-peers.workspace = true # op-reth reth-optimism-forks.workspace = true # ethereum alloy-chains.workspace = true +alloy-genesis.workspace = true alloy-primitives.workspace = true # io diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 63b6896f61422..2668c374500df 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -16,7 +16,10 @@ mod dev; mod op; mod op_sepolia; -use alloy_primitives::{Parity, Signature, U256}; +use std::fmt::Display; + +use alloy_genesis::Genesis; +use alloy_primitives::{Parity, Signature, B256, U256}; pub use base::BASE_MAINNET; pub use base_sepolia::BASE_SEPOLIA; pub use dev::OP_DEV; @@ -24,10 +27,15 @@ pub use op::OP_MAINNET; pub use op_sepolia::OP_SEPOLIA; use derive_more::{Constructor, Deref, Into}; -use reth_chainspec::ChainSpec; +use reth_chainspec::{ + BaseFeeParams, ChainSpec, DepositContract, EthChainSpec, EthereumHardforks, ForkFilter, ForkId, + Hardforks, Head, +}; +use reth_network_peers::NodeRecord; +use reth_primitives_traits::Header; /// OP stack chain spec type. -#[derive(Debug, Clone, Deref, Into, Constructor)] +#[derive(Debug, Clone, Deref, Into, Constructor, PartialEq, Eq)] pub struct OpChainSpec { /// [`ChainSpec`]. pub inner: ChainSpec, @@ -39,6 +47,86 @@ pub fn optimism_deposit_tx_signature() -> Signature { Signature::new(U256::ZERO, U256::ZERO, Parity::Parity(false)) } +impl EthChainSpec for OpChainSpec { + fn chain(&self) -> alloy_chains::Chain { + self.inner.chain() + } + + fn base_fee_params_at_timestamp(&self, timestamp: u64) -> BaseFeeParams { + self.inner.base_fee_params_at_timestamp(timestamp) + } + + fn base_fee_params_at_block(&self, block_number: u64) -> BaseFeeParams { + self.inner.base_fee_params_at_block(block_number) + } + + fn deposit_contract(&self) -> Option<&DepositContract> { + self.inner.deposit_contract() + } + + fn genesis_hash(&self) -> B256 { + self.inner.genesis_hash() + } + + fn prune_delete_limit(&self) -> usize { + self.inner.prune_delete_limit() + } + + fn display_hardforks(&self) -> impl Display { + self.inner.display_hardforks() + } + + fn genesis_header(&self) -> &Header { + self.inner.genesis_header() + } + + fn genesis(&self) -> &Genesis { + self.inner.genesis() + } + + fn max_gas_limit(&self) -> u64 { + self.inner.max_gas_limit() + } + + fn bootnodes(&self) -> Option> { + self.inner.bootnodes() + } +} + +impl Hardforks for OpChainSpec { + fn fork(&self, fork: H) -> reth_chainspec::ForkCondition { + self.inner.fork(fork) + } + + fn forks_iter( + &self, + ) -> impl Iterator { + self.inner.forks_iter() + } + + fn fork_id(&self, head: &Head) -> ForkId { + self.inner.fork_id(head) + } + + fn latest_fork_id(&self) -> ForkId { + self.inner.latest_fork_id() + } + + fn fork_filter(&self, head: Head) -> ForkFilter { + self.inner.fork_filter(head) + } +} + +impl EthereumHardforks for OpChainSpec { + fn final_paris_total_difficulty(&self, block_number: u64) -> Option { + self.inner.final_paris_total_difficulty(block_number) + } + + fn get_final_paris_total_difficulty(&self) -> Option { + self.inner.get_final_paris_total_difficulty() + } +} + #[cfg(test)] mod tests { use alloy_genesis::Genesis; diff --git a/crates/optimism/cli/src/chainspec.rs b/crates/optimism/cli/src/chainspec.rs index 03d78cba0a3cc..e76bfd5f0656c 100644 --- a/crates/optimism/cli/src/chainspec.rs +++ b/crates/optimism/cli/src/chainspec.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use reth_chainspec::ChainSpec; use reth_cli::chainspec::ChainSpecParser; use reth_node_core::args::utils::parse_custom_chain_spec; use reth_optimism_chainspec::{ @@ -27,7 +26,7 @@ fn chain_value_parser(s: &str) -> eyre::Result, eyre::Error> { pub struct OpChainSpecParser; impl ChainSpecParser for OpChainSpecParser { - type ChainSpec = ChainSpec; + type ChainSpec = OpChainSpec; const SUPPORTED_CHAINS: &'static [&'static str] = &[ "dev", @@ -40,7 +39,7 @@ impl ChainSpecParser for OpChainSpecParser { ]; fn parse(s: &str) -> eyre::Result> { - chain_value_parser(s).map(|s| Arc::new(Arc::unwrap_or_clone(s).inner)) + chain_value_parser(s) } } diff --git a/crates/optimism/cli/src/commands/build_pipeline.rs b/crates/optimism/cli/src/commands/build_pipeline.rs index b2ac97eef2d17..1c3dee5f1ebdb 100644 --- a/crates/optimism/cli/src/commands/build_pipeline.rs +++ b/crates/optimism/cli/src/commands/build_pipeline.rs @@ -1,6 +1,5 @@ use alloy_primitives::B256; use futures_util::{Stream, StreamExt}; -use reth_chainspec::ChainSpec; use reth_config::Config; use reth_consensus::Consensus; use reth_downloaders::{ @@ -14,6 +13,7 @@ use reth_network_p2p::{ }; use reth_node_builder::NodeTypesWithDB; use reth_node_events::node::NodeEvent; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_evm::OpExecutorProvider; use reth_provider::{BlockNumReader, ChainSpecProvider, HeaderProvider, ProviderFactory}; use reth_prune::PruneModes; @@ -36,7 +36,7 @@ pub(crate) async fn build_import_pipeline( disable_exec: bool, ) -> eyre::Result<(Pipeline, impl Stream)> where - N: NodeTypesWithDB, + N: NodeTypesWithDB, C: Consensus + 'static, { if !file_client.has_canonical_blocks() { diff --git a/crates/optimism/cli/src/commands/import.rs b/crates/optimism/cli/src/commands/import.rs index b7d2ee13fca60..e5f037c3d5cc9 100644 --- a/crates/optimism/cli/src/commands/import.rs +++ b/crates/optimism/cli/src/commands/import.rs @@ -1,7 +1,6 @@ //! Command that initializes the node by importing OP Mainnet chain segment below Bedrock, from a //! file. use clap::Parser; -use reth_chainspec::ChainSpec; use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::common::{AccessRights, Environment, EnvironmentArgs}; use reth_consensus::noop::NoopConsensus; @@ -12,6 +11,7 @@ use reth_downloaders::file_client::{ }; use reth_node_builder::NodeTypesWithEngine; use reth_node_core::version::SHORT_VERSION; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_primitives::bedrock::is_dup_tx; use reth_provider::StageCheckpointReader; use reth_prune::PruneModes; @@ -40,7 +40,7 @@ pub struct ImportOpCommand { path: PathBuf, } -impl> ImportOpCommand { +impl> ImportOpCommand { /// Execute `import` command pub async fn execute>( self, diff --git a/crates/optimism/cli/src/commands/import_receipts.rs b/crates/optimism/cli/src/commands/import_receipts.rs index 2ec0c9d704a0d..838a99818e967 100644 --- a/crates/optimism/cli/src/commands/import_receipts.rs +++ b/crates/optimism/cli/src/commands/import_receipts.rs @@ -4,7 +4,6 @@ use std::path::{Path, PathBuf}; use clap::Parser; -use reth_chainspec::ChainSpec; use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::common::{AccessRights, Environment, EnvironmentArgs}; use reth_db::tables; @@ -15,12 +14,13 @@ use reth_downloaders::{ use reth_execution_types::ExecutionOutcome; use reth_node_builder::{NodeTypesWithDB, NodeTypesWithEngine}; use reth_node_core::version::SHORT_VERSION; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_primitives::bedrock::is_dup_tx; use reth_primitives::Receipts; use reth_provider::{ - writer::UnifiedStorageWriter, DatabaseProviderFactory, OriginalValuesKnown, ProviderFactory, - StageCheckpointReader, StageCheckpointWriter, StateWriter, StaticFileProviderFactory, - StaticFileWriter, StatsReader, + providers::ProviderNodeTypes, writer::UnifiedStorageWriter, DatabaseProviderFactory, + OriginalValuesKnown, ProviderFactory, StageCheckpointReader, StageCheckpointWriter, + StateWriter, StaticFileProviderFactory, StaticFileWriter, StatsReader, }; use reth_stages::{StageCheckpoint, StageId}; use reth_static_file_types::StaticFileSegment; @@ -46,7 +46,7 @@ pub struct ImportReceiptsOpCommand { path: PathBuf, } -impl> ImportReceiptsOpCommand { +impl> ImportReceiptsOpCommand { /// Execute `import` command pub async fn execute>( self, @@ -88,7 +88,7 @@ pub async fn import_receipts_from_file( filter: F, ) -> eyre::Result<()> where - N: NodeTypesWithDB, + N: NodeTypesWithDB, P: AsRef, F: FnMut(u64, &mut Receipts) -> usize, { @@ -126,7 +126,7 @@ pub async fn import_receipts_from_reader( mut filter: F, ) -> eyre::Result where - N: NodeTypesWithDB, + N: ProviderNodeTypes, F: FnMut(u64, &mut Receipts) -> usize, { let static_file_provider = provider_factory.static_file_provider(); diff --git a/crates/optimism/cli/src/commands/init_state/mod.rs b/crates/optimism/cli/src/commands/init_state/mod.rs index a021e509b64d3..3537f89e75195 100644 --- a/crates/optimism/cli/src/commands/init_state/mod.rs +++ b/crates/optimism/cli/src/commands/init_state/mod.rs @@ -1,11 +1,11 @@ //! Command that initializes the node from a genesis file. use clap::Parser; -use reth_chainspec::ChainSpec; use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::common::{AccessRights, Environment}; use reth_db_common::init::init_from_state_dump; use reth_node_builder::NodeTypesWithEngine; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_primitives::bedrock::BEDROCK_HEADER; use reth_provider::{ BlockNumReader, ChainSpecProvider, DatabaseProviderFactory, StaticFileProviderFactory, @@ -35,7 +35,7 @@ pub struct InitStateCommandOp { without_ovm: bool, } -impl> InitStateCommandOp { +impl> InitStateCommandOp { /// Execute the `init` command pub async fn execute>( self, diff --git a/crates/optimism/cli/src/commands/mod.rs b/crates/optimism/cli/src/commands/mod.rs index 41a19e3ded5e3..a7674ec2c9bf8 100644 --- a/crates/optimism/cli/src/commands/mod.rs +++ b/crates/optimism/cli/src/commands/mod.rs @@ -2,7 +2,6 @@ use crate::chainspec::OpChainSpecParser; use clap::Subcommand; use import::ImportOpCommand; use import_receipts::ImportReceiptsOpCommand; -use reth_chainspec::ChainSpec; use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::{ config_cmd, db, dump_genesis, init_cmd, @@ -19,10 +18,8 @@ pub mod init_state; /// Commands to be executed #[derive(Debug, Subcommand)] -pub enum Commands< - Spec: ChainSpecParser = OpChainSpecParser, - Ext: clap::Args + fmt::Debug = NoArgs, -> { +pub enum Commands +{ /// Start the node #[command(name = "node")] Node(Box>), diff --git a/crates/optimism/cli/src/lib.rs b/crates/optimism/cli/src/lib.rs index ea8a77087fafa..b17cefa63287a 100644 --- a/crates/optimism/cli/src/lib.rs +++ b/crates/optimism/cli/src/lib.rs @@ -28,6 +28,7 @@ pub mod commands; pub mod receipt_file_codec; pub use commands::{import::ImportOpCommand, import_receipts::ImportReceiptsOpCommand}; +use reth_optimism_chainspec::OpChainSpec; use std::{ffi::OsString, fmt, sync::Arc}; @@ -35,7 +36,7 @@ use chainspec::OpChainSpecParser; use clap::{command, value_parser, Parser}; use commands::Commands; use futures_util::Future; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthChainSpec; use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::node::NoArgs; use reth_cli_runner::CliRunner; @@ -55,10 +56,7 @@ use tracing::info; /// This is the entrypoint to the executable. #[derive(Debug, Parser)] #[command(author, version = SHORT_VERSION, long_version = LONG_VERSION, about = "Reth", long_about = None)] -pub struct Cli< - Spec: ChainSpecParser = OpChainSpecParser, - Ext: clap::Args + fmt::Debug = NoArgs, -> { +pub struct Cli { /// The command to run #[command(subcommand)] command: Commands, @@ -112,9 +110,9 @@ impl Cli { } } -impl Cli +impl Cli where - Spec: ChainSpecParser, + C: ChainSpecParser, Ext: clap::Args + fmt::Debug, { /// Execute the configured cli command. @@ -123,12 +121,12 @@ where /// [`NodeCommand`](reth_cli_commands::node::NodeCommand). pub fn run(mut self, launcher: L) -> eyre::Result<()> where - L: FnOnce(WithLaunchContext, ChainSpec>>, Ext) -> Fut, + L: FnOnce(WithLaunchContext, C::ChainSpec>>, Ext) -> Fut, Fut: Future>, { // add network name to logs dir self.logs.log_file_directory = - self.logs.log_file_directory.join(self.chain.chain.to_string()); + self.logs.log_file_directory.join(self.chain.chain().to_string()); let _guard = self.init_tracing()?; info!(target: "reth::cli", "Initialized tracing, debug log directory: {}", self.logs.log_file_directory); diff --git a/crates/optimism/consensus/Cargo.toml b/crates/optimism/consensus/Cargo.toml index c18eaea502260..f5f061c599299 100644 --- a/crates/optimism/consensus/Cargo.toml +++ b/crates/optimism/consensus/Cargo.toml @@ -21,6 +21,7 @@ reth-trie-common.workspace = true # op-reth reth-optimism-forks.workspace = true +reth-optimism-chainspec.workspace = true # ethereum alloy-primitives.workspace = true diff --git a/crates/optimism/consensus/src/lib.rs b/crates/optimism/consensus/src/lib.rs index d040d32e04d1f..fe67ff1bcd9e1 100644 --- a/crates/optimism/consensus/src/lib.rs +++ b/crates/optimism/consensus/src/lib.rs @@ -10,7 +10,7 @@ #![cfg(feature = "optimism")] use alloy_primitives::{B64, U256}; -use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_chainspec::EthereumHardforks; use reth_consensus::{Consensus, ConsensusError, PostExecutionInput}; use reth_consensus_common::validation::{ validate_against_parent_4844, validate_against_parent_eip1559_base_fee, @@ -18,6 +18,7 @@ use reth_consensus_common::validation::{ validate_header_base_fee, validate_header_extradata, validate_header_gas, validate_shanghai_withdrawals, }; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OptimismHardforks; use reth_primitives::{ BlockWithSenders, GotExpected, Header, SealedBlock, SealedHeader, EMPTY_OMMER_ROOT_HASH, @@ -36,17 +37,12 @@ pub use validation::validate_block_post_execution; #[derive(Debug, Clone, PartialEq, Eq)] pub struct OptimismBeaconConsensus { /// Configuration - chain_spec: Arc, + chain_spec: Arc, } impl OptimismBeaconConsensus { /// Create a new instance of [`OptimismBeaconConsensus`] - /// - /// # Panics - /// - /// If given chain spec is not optimism [`ChainSpec::is_optimism`] - pub fn new(chain_spec: Arc) -> Self { - assert!(chain_spec.is_optimism(), "optimism consensus only valid for optimism chains"); + pub const fn new(chain_spec: Arc) -> Self { Self { chain_spec } } } diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index e855b38fbdcfb..2f49ce5147ddc 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -32,23 +32,20 @@ use tracing::trace; /// Provides executors to execute regular optimism blocks #[derive(Debug, Clone)] pub struct OpExecutorProvider { - chain_spec: Arc, + chain_spec: Arc, evm_config: EvmConfig, } impl OpExecutorProvider { /// Creates a new default optimism executor provider. - pub fn optimism(chain_spec: Arc) -> Self { - Self::new( - chain_spec.clone(), - OptimismEvmConfig::new(Arc::new(OpChainSpec { inner: (*chain_spec).clone() })), - ) + pub fn optimism(chain_spec: Arc) -> Self { + Self::new(chain_spec.clone(), OptimismEvmConfig::new(chain_spec)) } } impl OpExecutorProvider { /// Creates a new executor provider. - pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { + pub const fn new(chain_spec: Arc, evm_config: EvmConfig) -> Self { Self { chain_spec, evm_config } } } @@ -98,7 +95,7 @@ where #[derive(Debug, Clone)] pub struct OpEvmExecutor { /// The chainspec - chain_spec: Arc, + chain_spec: Arc, /// How to create an EVM. evm_config: EvmConfig, } @@ -240,7 +237,11 @@ pub struct OpBlockExecutor { impl OpBlockExecutor { /// Creates a new Optimism block executor. - pub const fn new(chain_spec: Arc, evm_config: EvmConfig, state: State) -> Self { + pub const fn new( + chain_spec: Arc, + evm_config: EvmConfig, + state: State, + ) -> Self { Self { executor: OpEvmExecutor { chain_spec, evm_config }, state } } @@ -504,12 +505,8 @@ mod tests { } fn executor_provider(chain_spec: Arc) -> OpExecutorProvider { - OpExecutorProvider { - evm_config: OptimismEvmConfig::new(Arc::new(OpChainSpec { - inner: (*chain_spec).clone(), - })), - chain_spec, - } + let chain_spec = Arc::new(OpChainSpec::new(Arc::unwrap_or_clone(chain_spec))); + OpExecutorProvider { evm_config: OptimismEvmConfig::new(chain_spec.clone()), chain_spec } } #[test] diff --git a/crates/optimism/evm/src/l1.rs b/crates/optimism/evm/src/l1.rs index 7bc8cfed1c553..a54bf08ce525e 100644 --- a/crates/optimism/evm/src/l1.rs +++ b/crates/optimism/evm/src/l1.rs @@ -4,6 +4,7 @@ use crate::OptimismBlockExecutionError; use alloy_primitives::{address, b256, hex, Address, Bytes, B256, U256}; use reth_chainspec::ChainSpec; use reth_execution_errors::BlockExecutionError; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OptimismHardfork; use reth_primitives::Block; use revm::{ @@ -260,7 +261,7 @@ impl RethL1BlockInfo for L1BlockInfo { /// deployer contract. This is done by directly setting the code of the create2 deployer account /// prior to executing any transactions on the timestamp activation of the fork. pub fn ensure_create2_deployer( - chain_spec: Arc, + chain_spec: Arc, timestamp: u64, db: &mut revm::State, ) -> Result<(), DB::Error> diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index 06059083ff57f..a9adadd3068e8 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -14,6 +14,7 @@ use reth_node_api::{ }, validate_version_specific_fields, EngineTypes, EngineValidator, }; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OptimismHardfork; use reth_optimism_payload_builder::{OptimismBuiltPayload, OptimismPayloadBuilderAttributes}; @@ -25,12 +26,12 @@ pub struct OptimismEngineTypes; /// Validator for Optimism engine API. #[derive(Debug, Clone)] pub struct OptimismEngineValidator { - chain_spec: Arc, + chain_spec: Arc, } impl OptimismEngineValidator { /// Instantiates a new validator. - pub const fn new(chain_spec: Arc) -> Self { + pub const fn new(chain_spec: Arc) -> Self { Self { chain_spec } } } diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index 2a28d44e2a0d6..d82cb70a3ffec 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -3,7 +3,6 @@ use std::sync::Arc; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; -use reth_chainspec::ChainSpec; use reth_evm::ConfigureEvm; use reth_network::{NetworkHandle, NetworkManager}; use reth_node_api::{EngineValidator, FullNodeComponents, NodeAddOns}; @@ -63,7 +62,7 @@ impl OptimismNode { > where Node: FullNodeTypes< - Types: NodeTypesWithEngine, + Types: NodeTypesWithEngine, >, { let RollupArgs { disable_txpool_gossip, compute_pending_block, discovery_v4, .. } = args; @@ -84,7 +83,7 @@ impl OptimismNode { impl Node for OptimismNode where N: FullNodeTypes< - Types: NodeTypesWithEngine, + Types: NodeTypesWithEngine, >, { type ComponentsBuilder = ComponentsBuilder< @@ -107,7 +106,7 @@ where impl NodeTypes for OptimismNode { type Primitives = (); - type ChainSpec = ChainSpec; + type ChainSpec = OpChainSpec; } impl NodeTypesWithEngine for OptimismNode { @@ -129,7 +128,7 @@ pub struct OptimismExecutorBuilder; impl ExecutorBuilder for OptimismExecutorBuilder where - Node: FullNodeTypes>, + Node: FullNodeTypes>, { type EVM = OptimismEvmConfig; type Executor = OpExecutorProvider; @@ -138,10 +137,8 @@ where self, ctx: &BuilderContext, ) -> eyre::Result<(Self::EVM, Self::Executor)> { - let chain_spec = ctx.chain_spec(); - let evm_config = - OptimismEvmConfig::new(Arc::new(OpChainSpec { inner: (*chain_spec).clone() })); - let executor = OpExecutorProvider::new(chain_spec, evm_config.clone()); + let evm_config = OptimismEvmConfig::new(ctx.chain_spec()); + let executor = OpExecutorProvider::new(ctx.chain_spec(), evm_config.clone()); Ok((evm_config, executor)) } @@ -157,7 +154,7 @@ pub struct OptimismPoolBuilder; impl PoolBuilder for OptimismPoolBuilder where - Node: FullNodeTypes>, + Node: FullNodeTypes>, { type Pool = OpTransactionPool; @@ -165,21 +162,19 @@ where let data_dir = ctx.config().datadir(); let blob_store = DiskFileBlobStore::open(data_dir.blobstore(), Default::default())?; - let validator = TransactionValidationTaskExecutor::eth_builder(ctx.chain_spec()) - .with_head_timestamp(ctx.head().timestamp) - .kzg_settings(ctx.kzg_settings()?) - .with_additional_tasks(ctx.config().txpool.additional_validation_tasks) - .build_with_tasks( - ctx.provider().clone(), - ctx.task_executor().clone(), - blob_store.clone(), - ) - .map(|validator| { - OpTransactionValidator::new(validator) - // In --dev mode we can't require gas fees because we're unable to decode the L1 - // block info - .require_l1_data_gas_fee(!ctx.config().dev.dev) - }); + let validator = TransactionValidationTaskExecutor::eth_builder(Arc::new( + ctx.chain_spec().inner.clone(), + )) + .with_head_timestamp(ctx.head().timestamp) + .kzg_settings(ctx.kzg_settings()?) + .with_additional_tasks(ctx.config().txpool.additional_validation_tasks) + .build_with_tasks(ctx.provider().clone(), ctx.task_executor().clone(), blob_store.clone()) + .map(|validator| { + OpTransactionValidator::new(validator) + // In --dev mode we can't require gas fees because we're unable to decode + // the L1 block info + .require_l1_data_gas_fee(!ctx.config().dev.dev) + }); let transaction_pool = reth_transaction_pool::Pool::new( validator, @@ -256,7 +251,7 @@ impl OptimismPayloadBuilder { ) -> eyre::Result> where Node: FullNodeTypes< - Types: NodeTypesWithEngine, + Types: NodeTypesWithEngine, >, Pool: TransactionPool + Unpin + 'static, Evm: ConfigureEvm
, @@ -292,7 +287,7 @@ impl OptimismPayloadBuilder { impl PayloadServiceBuilder for OptimismPayloadBuilder where Node: FullNodeTypes< - Types: NodeTypesWithEngine, + Types: NodeTypesWithEngine, >, Pool: TransactionPool + Unpin + 'static, { @@ -301,11 +296,7 @@ where ctx: &BuilderContext, pool: Pool, ) -> eyre::Result> { - self.spawn( - OptimismEvmConfig::new(Arc::new(OpChainSpec { inner: (*ctx.chain_spec()).clone() })), - ctx, - pool, - ) + self.spawn(OptimismEvmConfig::new(ctx.chain_spec()), ctx, pool) } } @@ -320,7 +311,7 @@ pub struct OptimismNetworkBuilder { impl NetworkBuilder for OptimismNetworkBuilder where - Node: FullNodeTypes>, + Node: FullNodeTypes>, Pool: TransactionPool + Unpin + 'static, { async fn build_network( @@ -377,7 +368,7 @@ pub struct OptimismConsensusBuilder; impl ConsensusBuilder for OptimismConsensusBuilder where - Node: FullNodeTypes>, + Node: FullNodeTypes>, { type Consensus = Arc; @@ -397,7 +388,7 @@ pub struct OptimismEngineValidatorBuilder; impl EngineValidatorBuilder for OptimismEngineValidatorBuilder where - Types: NodeTypesWithEngine, + Types: NodeTypesWithEngine, Node: FullNodeTypes, OptimismEngineValidator: EngineValidator, { diff --git a/crates/optimism/node/tests/e2e/utils.rs b/crates/optimism/node/tests/e2e/utils.rs index a8dda7b9956be..1e9ffa652f1cc 100644 --- a/crates/optimism/node/tests/e2e/utils.rs +++ b/crates/optimism/node/tests/e2e/utils.rs @@ -5,7 +5,7 @@ use alloy_primitives::{Address, B256}; use reth::{rpc::types::engine::PayloadAttributes, tasks::TaskManager}; use reth_chainspec::ChainSpecBuilder; use reth_e2e_test_utils::{transaction::TransactionTestContext, wallet::Wallet, NodeHelperType}; -use reth_optimism_chainspec::BASE_MAINNET; +use reth_optimism_chainspec::{OpChainSpec, BASE_MAINNET}; use reth_optimism_node::{ node::OptimismAddOns, OptimismBuiltPayload, OptimismNode, OptimismPayloadBuilderAttributes, }; @@ -19,13 +19,13 @@ pub(crate) async fn setup(num_nodes: usize) -> eyre::Result<(Vec, TaskMa let genesis: Genesis = serde_json::from_str(include_str!("../assets/genesis.json")).unwrap(); reth_e2e_test_utils::setup( num_nodes, - Arc::new( + Arc::new(OpChainSpec::new( ChainSpecBuilder::default() .chain(BASE_MAINNET.chain) .genesis(genesis) .ecotone_activated() .build(), - ), + )), false, ) .await diff --git a/crates/optimism/node/tests/it/builder.rs b/crates/optimism/node/tests/it/builder.rs index 8d5cc1554e3db..df1dd71fb086e 100644 --- a/crates/optimism/node/tests/it/builder.rs +++ b/crates/optimism/node/tests/it/builder.rs @@ -3,12 +3,13 @@ use reth_db::test_utils::create_test_rw_db; use reth_node_api::FullNodeComponents; use reth_node_builder::{NodeBuilder, NodeConfig}; -use reth_optimism_node::node::{OptimismAddOns, OptimismNode}; +use reth_optimism_node::{node::OptimismAddOns, OptimismNode}; +use reth_primitives::BASE_MAINNET; #[test] fn test_basic_setup() { // parse CLI -> config - let config = NodeConfig::test(); + let config = NodeConfig::new(BASE_MAINNET.clone()); let db = create_test_rw_db(); let _builder = NodeBuilder::new(config) .with_database(db) diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index 047879929be6f..e731b50c07677 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -28,6 +28,7 @@ reth-trie.workspace = true reth-chain-state.workspace = true # op-reth +reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true reth-optimism-evm.workspace = true reth-optimism-forks.workspace = true diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 878e9cf224de9..1c0876b2a726f 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -5,12 +5,13 @@ use std::sync::Arc; use alloy_primitives::U256; use reth_basic_payload_builder::*; use reth_chain_state::ExecutedBlock; -use reth_chainspec::{ChainSpec, ChainSpecProvider, EthereumHardforks}; +use reth_chainspec::{ChainSpecProvider, EthereumHardforks}; use reth_evm::{ system_calls::pre_block_beacon_root_contract_call, ConfigureEvm, ConfigureEvmEnv, NextBlockEnvAttributes, }; use reth_execution_types::ExecutionOutcome; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; use reth_optimism_forks::OptimismHardfork; use reth_payload_primitives::{PayloadBuilderAttributes, PayloadBuilderError}; @@ -94,7 +95,7 @@ where /// Implementation of the [`PayloadBuilder`] trait for [`OptimismPayloadBuilder`]. impl PayloadBuilder for OptimismPayloadBuilder where - Client: StateProviderFactory + ChainSpecProvider, + Client: StateProviderFactory + ChainSpecProvider, Pool: TransactionPool, EvmConfig: ConfigureEvm
, { @@ -165,7 +166,7 @@ pub(crate) fn optimism_payload( ) -> Result, PayloadBuilderError> where EvmConfig: ConfigureEvm
, - Client: StateProviderFactory + ChainSpecProvider, + Client: StateProviderFactory + ChainSpecProvider, Pool: TransactionPool, { let BuildArguments { client, pool, mut cached_reads, config, cancel, best_payload } = args; diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 67eddb8e18b77..ac73f05390ff2 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -11,7 +11,8 @@ use op_alloy_rpc_types_engine::{ OptimismExecutionPayloadEnvelopeV3, OptimismExecutionPayloadEnvelopeV4, }; use reth_chain_state::ExecutedBlock; -use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_chainspec::EthereumHardforks; +use reth_optimism_chainspec::OpChainSpec; use reth_payload_builder::EthPayloadBuilderAttributes; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; use reth_primitives::{ @@ -119,7 +120,7 @@ pub struct OptimismBuiltPayload { /// empty. pub(crate) sidecars: Vec, /// The rollup's chainspec. - pub(crate) chain_spec: Arc, + pub(crate) chain_spec: Arc, /// The payload attributes. pub(crate) attributes: OptimismPayloadBuilderAttributes, } @@ -132,7 +133,7 @@ impl OptimismBuiltPayload { id: PayloadId, block: SealedBlock, fees: U256, - chain_spec: Arc, + chain_spec: Arc, attributes: OptimismPayloadBuilderAttributes, executed_block: Option, ) -> Self { diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index 97ac850c4f062..46967fba262db 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -28,6 +28,7 @@ reth-node-builder.workspace = true reth-chainspec.workspace = true # op-reth +reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true reth-optimism-evm.workspace = true reth-optimism-forks.workspace = true diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index da799e140d8e8..a0565fcc4486b 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -3,8 +3,9 @@ use alloy_rpc_types::BlockId; use op_alloy_network::Network; use op_alloy_rpc_types::OpTransactionReceipt; -use reth_chainspec::{ChainSpec, ChainSpecProvider}; +use reth_chainspec::ChainSpecProvider; use reth_node_api::{FullNodeComponents, NodeTypes}; +use reth_optimism_chainspec::OpChainSpec; use reth_primitives::TransactionMeta; use reth_provider::{BlockReaderIdExt, HeaderProvider}; use reth_rpc_eth_api::{ @@ -21,7 +22,7 @@ where Error = OpEthApiError, NetworkTypes: Network, >, - N: FullNodeComponents>, + N: FullNodeComponents>, { #[inline] fn provider(&self) -> impl HeaderProvider { diff --git a/crates/optimism/rpc/src/eth/call.rs b/crates/optimism/rpc/src/eth/call.rs index ded4c656b0630..3d39b5e22750e 100644 --- a/crates/optimism/rpc/src/eth/call.rs +++ b/crates/optimism/rpc/src/eth/call.rs @@ -1,6 +1,6 @@ use alloy_primitives::{Bytes, TxKind, U256}; use alloy_rpc_types_eth::transaction::TransactionRequest; -use reth_chainspec::ChainSpec; +use reth_chainspec::EthereumHardforks; use reth_evm::ConfigureEvm; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_primitives::{ @@ -18,7 +18,7 @@ use crate::{OpEthApi, OpEthApiError}; impl EthCall for OpEthApi where Self: Call, - N: FullNodeComponents>, + N: FullNodeComponents>, { } diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 403e3b8a73b8c..844ea170075ba 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -14,7 +14,7 @@ use std::{fmt, sync::Arc}; use alloy_primitives::U256; use derive_more::Deref; use op_alloy_network::Optimism; -use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_chainspec::EthereumHardforks; use reth_evm::ConfigureEvm; use reth_network_api::NetworkInfo; use reth_node_api::{BuilderProvider, FullNodeComponents, FullNodeTypes, NodeTypes}; @@ -239,7 +239,7 @@ where impl AddDevSigners for OpEthApi where - N: FullNodeComponents>, + N: FullNodeComponents>, { fn with_dev_accounts(&self) { *self.signers().write() = DevSigner::random_signers(20) diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index bfd521635bcec..a953d3f3096f9 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -7,6 +7,7 @@ use op_alloy_rpc_types::{ }; use reth_chainspec::ChainSpec; use reth_node_api::{FullNodeComponents, NodeTypes}; +use reth_optimism_chainspec::OpChainSpec; use reth_optimism_evm::RethL1BlockInfo; use reth_optimism_forks::OptimismHardforks; use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; @@ -19,7 +20,7 @@ use crate::{OpEthApi, OpEthApiError}; impl LoadReceipt for OpEthApi where Self: Send + Sync, - N: FullNodeComponents>, + N: FullNodeComponents>, { #[inline] fn cache(&self) -> &EthStateCache { @@ -205,7 +206,7 @@ pub struct OpReceiptBuilder { impl OpReceiptBuilder { /// Returns a new builder. pub fn new( - chain_spec: &ChainSpec, + chain_spec: &OpChainSpec, transaction: &TransactionSigned, meta: TransactionMeta, receipt: &Receipt, diff --git a/crates/revm/src/state_change.rs b/crates/revm/src/state_change.rs index bd967a23d31db..a4f5c6cb04a86 100644 --- a/crates/revm/src/state_change.rs +++ b/crates/revm/src/state_change.rs @@ -1,5 +1,5 @@ use alloy_primitives::{map::HashMap, Address}; -use reth_chainspec::{ChainSpec, EthereumHardforks}; +use reth_chainspec::EthereumHardforks; use reth_consensus_common::calc; use reth_primitives::{Block, Withdrawal, Withdrawals, U256}; @@ -8,7 +8,7 @@ use reth_primitives::{Block, Withdrawal, Withdrawals, U256}; /// Balance changes might include the block reward, uncle rewards, withdrawals, or irregular /// state changes (DAO fork). #[inline] -pub fn post_block_balance_increments( +pub fn post_block_balance_increments( chain_spec: &ChainSpec, block: &Block, total_difficulty: U256, @@ -89,6 +89,7 @@ pub fn insert_post_block_withdrawals_balance_increments { } impl DbTool { - /// Get an [`Arc`] to the [`ChainSpec`]. + /// Get an [`Arc`] to the underlying chainspec. pub fn chain(&self) -> Arc { self.provider_factory.chain_spec() } @@ -110,7 +109,7 @@ impl DbTool { } } -impl> DbTool { +impl DbTool { /// Takes a DB where the tables have already been created. pub fn new(provider_factory: ProviderFactory) -> eyre::Result { // Disable timeout because we are entering a TUI which might read for a long time. We diff --git a/crates/storage/db-common/src/init.rs b/crates/storage/db-common/src/init.rs index 6e94677abfc60..83786153312d9 100644 --- a/crates/storage/db-common/src/init.rs +++ b/crates/storage/db-common/src/init.rs @@ -2,7 +2,7 @@ use alloy_genesis::GenesisAccount; use alloy_primitives::{Address, B256, U256}; -use reth_chainspec::{ChainSpec, EthChainSpec}; +use reth_chainspec::EthChainSpec; use reth_codecs::Compact; use reth_config::config::EtlConfig; use reth_db::tables; @@ -333,7 +333,7 @@ where Provider: DBProvider + BlockNumReader + BlockHashReader - + ChainSpecProvider + + ChainSpecProvider + StageCheckpointWriter + HistoryWriter + HeaderProvider @@ -366,7 +366,7 @@ where debug!(target: "reth::cli", block, - chain=%provider_rw.chain_spec().chain, + chain=%provider_rw.chain_spec().chain(), "Initializing state at block" ); @@ -582,7 +582,7 @@ struct GenesisAccountWithAddress { mod tests { use super::*; use alloy_genesis::Genesis; - use reth_chainspec::{Chain, HOLESKY, MAINNET, SEPOLIA}; + use reth_chainspec::{Chain, ChainSpec, HOLESKY, MAINNET, SEPOLIA}; use reth_db::DatabaseEnv; use reth_db_api::{ cursor::DbCursorRO, diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index baa35edaf6f47..ce37cb2e7a8d2 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -13,7 +13,7 @@ use futures_util::{ FutureExt, Stream, StreamExt, }; use reth_chain_state::CanonStateNotification; -use reth_chainspec::{ChainSpec, ChainSpecProvider}; +use reth_chainspec::{ChainSpecProvider, EthChainSpec}; use reth_execution_types::ChangedAccount; use reth_fs_util::FsPathError; use reth_primitives::{ @@ -74,12 +74,7 @@ pub fn maintain_transaction_pool_future( config: MaintainPoolConfig, ) -> BoxFuture<'static, ()> where - Client: StateProviderFactory - + BlockReaderIdExt - + ChainSpecProvider - + Clone - + Send - + 'static, + Client: StateProviderFactory + BlockReaderIdExt + ChainSpecProvider + Clone + Send + 'static, P: TransactionPoolExt + 'static, St: Stream + Send + Unpin + 'static, Tasks: TaskSpawner + 'static, @@ -100,12 +95,7 @@ pub async fn maintain_transaction_pool( task_spawner: Tasks, config: MaintainPoolConfig, ) where - Client: StateProviderFactory - + BlockReaderIdExt - + ChainSpecProvider - + Clone - + Send - + 'static, + Client: StateProviderFactory + BlockReaderIdExt + ChainSpecProvider + Clone + Send + 'static, P: TransactionPoolExt + 'static, St: Stream + Send + Unpin + 'static, Tasks: TaskSpawner + 'static, From 4850c298f0e3060bac4055947135e16c6bfc08b9 Mon Sep 17 00:00:00 2001 From: Skanda Bhat Date: Sat, 28 Sep 2024 18:53:05 +0530 Subject: [PATCH 62/84] feat: mev_simBundle (#11252) Co-authored-by: Matthias Seitz --- crates/rpc/rpc/src/eth/bundle.rs | 1 - crates/rpc/rpc/src/eth/mod.rs | 1 + crates/rpc/rpc/src/eth/sim_bundle.rs | 76 ++++++++++++++++++++++++++++ 3 files changed, 77 insertions(+), 1 deletion(-) create mode 100644 crates/rpc/rpc/src/eth/sim_bundle.rs diff --git a/crates/rpc/rpc/src/eth/bundle.rs b/crates/rpc/rpc/src/eth/bundle.rs index 2d169f1c540a9..bede4599e1a1b 100644 --- a/crates/rpc/rpc/src/eth/bundle.rs +++ b/crates/rpc/rpc/src/eth/bundle.rs @@ -26,7 +26,6 @@ use reth_rpc_eth_api::{ EthCallBundleApiServer, }; use reth_rpc_eth_types::{utils::recover_raw_transaction, EthApiError, RpcInvalidTransactionError}; - /// `Eth` bundle implementation. pub struct EthBundle { /// All nested fields bundled together. diff --git a/crates/rpc/rpc/src/eth/mod.rs b/crates/rpc/rpc/src/eth/mod.rs index 83db119f88041..99919110da7b9 100644 --- a/crates/rpc/rpc/src/eth/mod.rs +++ b/crates/rpc/rpc/src/eth/mod.rs @@ -5,6 +5,7 @@ pub mod core; pub mod filter; pub mod helpers; pub mod pubsub; +pub mod sim_bundle; /// Implementation of `eth` namespace API. pub use bundle::EthBundle; diff --git a/crates/rpc/rpc/src/eth/sim_bundle.rs b/crates/rpc/rpc/src/eth/sim_bundle.rs new file mode 100644 index 0000000000000..46dbb45d962e1 --- /dev/null +++ b/crates/rpc/rpc/src/eth/sim_bundle.rs @@ -0,0 +1,76 @@ +//! `Eth` Sim bundle implementation and helpers. + +use std::sync::Arc; + +use alloy_rpc_types_mev::{SendBundleRequest, SimBundleOverrides, SimBundleResponse}; +use jsonrpsee::core::RpcResult; +use reth_rpc_api::MevSimApiServer; +use reth_rpc_eth_api::helpers::{Call, EthTransactions, LoadPendingBlock}; +use reth_rpc_eth_types::EthApiError; +use reth_tasks::pool::BlockingTaskGuard; +use tracing::info; + +/// `Eth` sim bundle implementation. +pub struct EthSimBundle { + /// All nested fields bundled together. + inner: Arc>, +} + +impl EthSimBundle { + /// Create a new `EthSimBundle` instance. + pub fn new(eth_api: Eth, blocking_task_guard: BlockingTaskGuard) -> Self { + Self { inner: Arc::new(EthSimBundleInner { eth_api, blocking_task_guard }) } + } +} + +impl EthSimBundle +where + Eth: EthTransactions + LoadPendingBlock + Call + 'static, +{ + /// Simulates a bundle of transactions. + pub async fn sim_bundle( + &self, + request: SendBundleRequest, + overrides: SimBundleOverrides, + ) -> RpcResult { + info!("mev_simBundle called, request: {:?}, overrides: {:?}", request, overrides); + Err(EthApiError::Unsupported("mev_simBundle is not supported").into()) + } +} + +#[async_trait::async_trait] +impl MevSimApiServer for EthSimBundle +where + Eth: EthTransactions + LoadPendingBlock + Call + 'static, +{ + async fn sim_bundle( + &self, + request: SendBundleRequest, + overrides: SimBundleOverrides, + ) -> RpcResult { + Self::sim_bundle(self, request, overrides).await + } +} + +/// Container type for `EthSimBundle` internals +#[derive(Debug)] +struct EthSimBundleInner { + /// Access to commonly used code of the `eth` namespace + #[allow(dead_code)] + eth_api: Eth, + // restrict the number of concurrent tracing calls. + #[allow(dead_code)] + blocking_task_guard: BlockingTaskGuard, +} + +impl std::fmt::Debug for EthSimBundle { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("EthSimBundle").finish_non_exhaustive() + } +} + +impl Clone for EthSimBundle { + fn clone(&self) -> Self { + Self { inner: Arc::clone(&self.inner) } + } +} From 1bead52d57c7abc0147e455c2af0e55a56af8e66 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 28 Sep 2024 15:37:40 +0200 Subject: [PATCH 63/84] chore: relax trait bounds on transact fns (#11310) --- crates/evm/src/system_calls/eip2935.rs | 2 +- crates/evm/src/system_calls/eip4788.rs | 2 +- crates/evm/src/system_calls/eip7002.rs | 2 +- crates/evm/src/system_calls/eip7251.rs | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/evm/src/system_calls/eip2935.rs b/crates/evm/src/system_calls/eip2935.rs index 7b09a4813079c..9dcec89f4db3a 100644 --- a/crates/evm/src/system_calls/eip2935.rs +++ b/crates/evm/src/system_calls/eip2935.rs @@ -74,7 +74,7 @@ pub fn transact_blockhashes_contract_call( evm: &mut Evm<'_, EXT, DB>, ) -> Result, BlockExecutionError> where - DB: Database + DatabaseCommit, + DB: Database, DB::Error: core::fmt::Display, EvmConfig: ConfigureEvm
, { diff --git a/crates/evm/src/system_calls/eip4788.rs b/crates/evm/src/system_calls/eip4788.rs index a1f97bf5e9087..6352a53070f6c 100644 --- a/crates/evm/src/system_calls/eip4788.rs +++ b/crates/evm/src/system_calls/eip4788.rs @@ -70,7 +70,7 @@ pub fn transact_beacon_root_contract_call( evm: &mut Evm<'_, EXT, DB>, ) -> Result, BlockExecutionError> where - DB: Database + DatabaseCommit, + DB: Database, DB::Error: core::fmt::Display, EvmConfig: ConfigureEvm
, Spec: EthereumHardforks, diff --git a/crates/evm/src/system_calls/eip7002.rs b/crates/evm/src/system_calls/eip7002.rs index d03268254f43d..f0149943e495e 100644 --- a/crates/evm/src/system_calls/eip7002.rs +++ b/crates/evm/src/system_calls/eip7002.rs @@ -55,7 +55,7 @@ pub fn transact_withdrawal_requests_contract_call( evm: &mut Evm<'_, EXT, DB>, ) -> Result where - DB: Database + DatabaseCommit, + DB: Database, DB::Error: core::fmt::Display, EvmConfig: ConfigureEvm
, { diff --git a/crates/evm/src/system_calls/eip7251.rs b/crates/evm/src/system_calls/eip7251.rs index 8247f06b18414..f5a7dca14bf91 100644 --- a/crates/evm/src/system_calls/eip7251.rs +++ b/crates/evm/src/system_calls/eip7251.rs @@ -56,7 +56,7 @@ pub fn transact_consolidation_requests_contract_call( evm: &mut Evm<'_, EXT, DB>, ) -> Result where - DB: Database + DatabaseCommit, + DB: Database, DB::Error: core::fmt::Display, EvmConfig: ConfigureEvm
, { From 6828bbacc9c94759b3768dc37e624c505efb3571 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 28 Sep 2024 18:05:50 +0200 Subject: [PATCH 64/84] chore: use core::error (#11313) --- crates/primitives-traits/src/error.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/primitives-traits/src/error.rs b/crates/primitives-traits/src/error.rs index ef088a920e159..97b33bd618aec 100644 --- a/crates/primitives-traits/src/error.rs +++ b/crates/primitives-traits/src/error.rs @@ -19,8 +19,7 @@ impl fmt::Display for GotExpected { } } -#[cfg(feature = "std")] -impl std::error::Error for GotExpected {} +impl core::error::Error for GotExpected {} impl From<(T, T)> for GotExpected { #[inline] @@ -57,8 +56,7 @@ impl fmt::Display for GotExpectedBoxed { } } -#[cfg(feature = "std")] -impl std::error::Error for GotExpectedBoxed {} +impl core::error::Error for GotExpectedBoxed {} impl Deref for GotExpectedBoxed { type Target = GotExpected; From 65f7e883e79e869fc2b6f08e57abb105bc29ebb7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 28 Sep 2024 18:06:17 +0200 Subject: [PATCH 65/84] chore: rm ToRpcError usage from engine crate (#11311) --- Cargo.lock | 1 - crates/rpc/rpc-engine-api/Cargo.toml | 1 - crates/rpc/rpc-engine-api/src/error.rs | 11 +++++------ 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d402a96b58c61..6c1e6122028df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8635,7 +8635,6 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-rpc-api", - "reth-rpc-types", "reth-rpc-types-compat", "reth-storage-api", "reth-tasks", diff --git a/crates/rpc/rpc-engine-api/Cargo.toml b/crates/rpc/rpc-engine-api/Cargo.toml index 2e5ec0af8ce54..4463d375a0345 100644 --- a/crates/rpc/rpc-engine-api/Cargo.toml +++ b/crates/rpc/rpc-engine-api/Cargo.toml @@ -16,7 +16,6 @@ workspace = true reth-chainspec.workspace = true reth-primitives.workspace = true reth-rpc-api.workspace = true -reth-rpc-types.workspace = true reth-storage-api.workspace = true reth-beacon-consensus.workspace = true reth-payload-builder.workspace = true diff --git a/crates/rpc/rpc-engine-api/src/error.rs b/crates/rpc/rpc-engine-api/src/error.rs index 8e86af4c9c435..56491916188d4 100644 --- a/crates/rpc/rpc-engine-api/src/error.rs +++ b/crates/rpc/rpc-engine-api/src/error.rs @@ -4,7 +4,6 @@ use jsonrpsee_types::error::{ }; use reth_beacon_consensus::{BeaconForkChoiceUpdateError, BeaconOnNewPayloadError}; use reth_payload_primitives::{EngineObjectValidationError, PayloadBuilderError}; -use reth_rpc_types::ToRpcError; use thiserror::Error; /// The Engine API result type @@ -92,15 +91,15 @@ pub enum EngineApiError { /// The payload or attributes are known to be malformed before processing. #[error(transparent)] EngineObjectValidationError(#[from] EngineObjectValidationError), - /// Any other error + /// Any other rpc error #[error("{0}")] - Other(Box), + Other(jsonrpsee_types::ErrorObject<'static>), } impl EngineApiError { /// Crates a new [`EngineApiError::Other`] variant. - pub fn other(err: E) -> Self { - Self::Other(Box::new(err)) + pub const fn other(err: jsonrpsee_types::ErrorObject<'static>) -> Self { + Self::Other(err) } } @@ -187,7 +186,7 @@ impl From for jsonrpsee_types::error::ErrorObject<'static> { SERVER_ERROR_MSG, Some(ErrorData::new(error)), ), - EngineApiError::Other(err) => err.to_rpc_error(), + EngineApiError::Other(err) => err, } } } From 3297dcb4eb0e26eeac7940ec7adcd64b8ee19fbc Mon Sep 17 00:00:00 2001 From: Caio Date: Sun, 29 Sep 2024 06:23:18 -0300 Subject: [PATCH 66/84] Remove unused dependencies (#11320) --- Cargo.lock | 55 ------------------------------------------------------ Cargo.toml | 6 +++--- 2 files changed, 3 insertions(+), 58 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6c1e6122028df..a9c3601f55f72 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1481,17 +1481,6 @@ dependencies = [ "tinyvec", ] -[[package]] -name = "bstr" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" -dependencies = [ - "memchr", - "regex-automata 0.4.7", - "serde", -] - [[package]] name = "bumpalo" version = "3.16.0" @@ -2577,12 +2566,6 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" -[[package]] -name = "endian-type" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" - [[package]] name = "enr" version = "0.12.1" @@ -4694,16 +4677,12 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4259040465c955f9f2f1a4a8a16dc46726169bca0f88e8fb2dbeced487c3e828" dependencies = [ - "aho-corasick", "crossbeam-epoch", "crossbeam-utils", "hashbrown 0.14.5", - "indexmap 2.5.0", "metrics", "num_cpus", - "ordered-float", "quanta", - "radix_trie", "sketches-ddsketch", ] @@ -4876,15 +4855,6 @@ dependencies = [ "unsigned-varint", ] -[[package]] -name = "nibble_vec" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" -dependencies = [ - "smallvec", -] - [[package]] name = "nix" version = "0.26.4" @@ -5237,15 +5207,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" -[[package]] -name = "ordered-float" -version = "4.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a91171844676f8c7990ce64959210cd2eaef32c2612c50f9fae9f8aaa6065a6" -dependencies = [ - "num-traits", -] - [[package]] name = "overload" version = "0.1.1" @@ -5895,16 +5856,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "radix_trie" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" -dependencies = [ - "endian-type", - "nibble_vec", -] - [[package]] name = "rand" version = "0.7.3" @@ -9812,8 +9763,6 @@ version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" dependencies = [ - "futures", - "log", "once_cell", "parking_lot 0.12.3", "scc", @@ -9955,10 +9904,6 @@ name = "similar" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" -dependencies = [ - "bstr", - "unicode-segmentation", -] [[package]] name = "similar-asserts" diff --git a/Cargo.toml b/Cargo.toml index 7387029568d7e..5beca0009b8c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -519,7 +519,7 @@ zstd = "0.13" metrics = "0.23.0" metrics-exporter-prometheus = { version = "0.15.0", default-features = false } metrics-process = "2.1.0" -metrics-util = "0.17.0" +metrics-util = { default-features = false, version = "0.17.0" } # proc-macros proc-macro2 = "1.0" @@ -581,8 +581,8 @@ iai-callgrind = "0.11" pprof = "0.13" proptest = "1.4" proptest-derive = "0.5" -serial_test = "3" -similar-asserts = "1.5.0" +serial_test = { default-features = false, version = "3" } +similar-asserts = { default-features = false, version = "1.5.0" } tempfile = "3.8" test-fuzz = "5" From d94462bfefb82f9aa96707659c2d0aa7f72ba23b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 09:54:20 +0000 Subject: [PATCH 67/84] chore(deps): weekly `cargo update` (#11319) Co-authored-by: github-merge-queue <118344674+github-merge-queue@users.noreply.github.com> Co-authored-by: Matthias Seitz --- Cargo.lock | 275 +++++++++++++++++++++++++++-------------------------- 1 file changed, 140 insertions(+), 135 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a9c3601f55f72..124b086497b76 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -97,9 +97,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-chains" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "805f7a974de5804f5c053edc6ca43b20883bdd3a733b3691200ae3a4b454a2db" +checksum = "8158b4878c67837e5413721cc44298e6a2d88d39203175ea025e51892a16ba4c" dependencies = [ "alloy-rlp", "arbitrary", @@ -126,9 +126,9 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4004925bff5ba0a11739ae84dbb6601a981ea692f3bd45b626935ee90a6b8471" +checksum = "0b499852e1d0e9b8c6db0f24c48998e647c0d5762a01090f955106a7700e4611" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -201,9 +201,9 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9996daf962fd0a90d3c93b388033228865953b92de7bb1959b891d78750a4091" +checksum = "a438d4486b5d525df3b3004188f9d5cd1d65cd30ecc41e5a3ccef6f6342e8af9" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -382,7 +382,7 @@ checksum = "4d0f2d905ebd295e7effec65e5f6868d153936130ae718352771de3e7d03c75c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -593,23 +593,23 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0458ccb02a564228fcd76efb8eb5a520521a8347becde37b402afec9a1b83859" +checksum = "68e7f6e8fe5b443f82b3f1e15abfa191128f71569148428e49449d01f6f49e8b" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bc65475025fc1e84bf86fc840f04f63fcccdcf3cf12053c99918e4054dfbc69" +checksum = "6b96ce28d2fde09abb6135f410c41fad670a3a770b6776869bd852f1df102e6f" dependencies = [ "alloy-sol-macro-input", "const-hex", @@ -618,31 +618,31 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed10f0715a0b69fde3236ff3b9ae5f6f7c97db5a387747100070d3016b9266b" +checksum = "906746396a8296537745711630d9185746c0b50c033d5e9d18b0a6eba3d53f90" dependencies = [ "const-hex", "dunce", "heck", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3edae8ea1de519ccba896b6834dec874230f72fe695ff3c9c118e90ec7cff783" +checksum = "bc85178909a49c8827ffccfc9103a7ce1767ae66a801b69bdc326913870bf8e6" dependencies = [ "serde", "winnow", @@ -650,9 +650,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1eb88e4da0a1b697ed6a9f811fdba223cf4d5c21410804fd1707836af73a462b" +checksum = "d86a533ce22525969661b25dfe296c112d35eb6861f188fd284f8bd4bb3842ae" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -838,7 +838,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1058,18 +1058,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] name = "async-trait" -version = "0.1.82" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1107,14 +1107,14 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backon" @@ -1217,7 +1217,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1399,7 +1399,7 @@ checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "synstructure", ] @@ -1510,7 +1510,7 @@ checksum = "0cc8b54b395f2fcfbb3d90c47b01c7f444d94d05bdeb775811dec868ac3bbc26" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1598,9 +1598,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.21" +version = "1.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" +checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0" dependencies = [ "jobserver", "libc", @@ -1722,7 +1722,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2159,7 +2159,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2183,7 +2183,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2194,7 +2194,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2314,7 +2314,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2335,7 +2335,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "unicode-xid", ] @@ -2449,7 +2449,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2595,7 +2595,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2606,7 +2606,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -3038,9 +3038,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.33" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" +checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" dependencies = [ "crc32fast", "miniz_oxide", @@ -3153,7 +3153,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -3626,9 +3626,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba" +checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" dependencies = [ "bytes", "futures-channel", @@ -3639,7 +3639,6 @@ dependencies = [ "pin-project-lite", "socket2 0.5.7", "tokio", - "tower 0.4.13", "tower-service", "tracing", ] @@ -3664,7 +3663,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -3814,7 +3813,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -4028,9 +4027,9 @@ checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" [[package]] name = "iri-string" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c25163201be6ded9e686703e85532f8f852ea1f92ba625cb3c51f7fe6d07a4a" +checksum = "44bd7eced44cfe2cebc674adb2a7124a754a4b5269288d22e9f39f8fada3562d" dependencies = [ "memchr", "serde", @@ -4126,9 +4125,9 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fd1ead9fb95614e8dc5556d12a8681c2f6d352d0c1d3efc8708c7ccbba47bc6" +checksum = "126b48a5acc3c52fbd5381a77898cb60e145123179588a29e7ac48f9c06e401b" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4144,9 +4143,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89841d4f03a14c055eb41d4f41901819573ef948e8ee0d5c86466fd286b2ce7f" +checksum = "bf679a8e0e083c77997f7c4bb4ca826577105906027ae462aac70ff348d02c6a" dependencies = [ "base64 0.22.1", "futures-channel", @@ -4169,9 +4168,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff79651479f69ada7bda604ef2acf3f1aa50755d97cc36d25ff04c2664f9d96f" +checksum = "b0e503369a76e195b65af35058add0e6900b794a4e9a9316900ddd3a87a80477" dependencies = [ "async-trait", "bytes", @@ -4196,9 +4195,9 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ed8b301b19f4dad8ddc66ed956a70fc227def5c19b3898e0a29ce8f0edee06" +checksum = "f2c0caba4a6a8efbafeec9baa986aa22a75a96c29d3e4b0091b0098d6470efb5" dependencies = [ "async-trait", "base64 0.22.1", @@ -4221,22 +4220,22 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0d4c6bec4909c966f59f52db3655c0e9d4685faae8b49185973d9d7389bb884" +checksum = "fc660a9389e2748e794a40673a4155d501f32db667757cdb80edeff0306b489b" dependencies = [ "heck", "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] name = "jsonrpsee-server" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebe2198e5fd96cf2153ecc123364f699b6e2151317ea09c7bf799c43c2fe1415" +checksum = "af6e6c9b6d975edcb443565d648b605f3e85a04ec63aa6941811a8894cc9cded" dependencies = [ "futures-util", "http", @@ -4261,9 +4260,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "531e386460425e49679587871a056f2895a47dade21457324ad1262cd78ef6d9" +checksum = "d8fb16314327cbc94fdf7965ef7e4422509cd5597f76d137bd104eb34aeede67" dependencies = [ "http", "serde", @@ -4273,9 +4272,9 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a2d2206c8f04c6b79a11bd1d92d6726b6f7fd3dec57c91e07fa53e867268bbb" +checksum = "e0da62b43702bd5640ea305d35df95da30abc878e79a7b4b01feda3beaf35d3c" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -4284,9 +4283,9 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.24.4" +version = "0.24.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87bc869e143d430e748988261d19b630e8f1692074e68f1a7f0eb4c521d2fc58" +checksum = "39aabf5d6c6f22da8d5b808eea1fab0736059f11fb42f71f141b14f404e5046a" dependencies = [ "http", "jsonrpsee-client-transport", @@ -4395,9 +4394,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" [[package]] name = "libloading" @@ -4453,7 +4452,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.6.0", "libc", - "redox_syscall 0.5.4", + "redox_syscall 0.5.6", ] [[package]] @@ -4785,7 +4784,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -4817,9 +4816,9 @@ checksum = "1fafa6961cabd9c63bcd77a45d7e3b7f3b552b70417831fb0f56db717e72407e" [[package]] name = "multiaddr" -version = "0.18.1" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b852bc02a2da5feed68cd14fa50d0774b92790a5bdbfa932a813926c8472070" +checksum = "fe6351f60b488e04c1d21bc69e56b89cb3f5e8f5d22557d6e8031bdfd79b6961" dependencies = [ "arrayref", "byteorder", @@ -4830,7 +4829,7 @@ dependencies = [ "percent-encoding", "serde", "static_assertions", - "unsigned-varint", + "unsigned-varint 0.8.0", "url", ] @@ -4852,7 +4851,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "076d548d76a0e2a0d4ab471d0b1c36c577786dfc4471242035d97a12a735c492" dependencies = [ "core2", - "unsigned-varint", + "unsigned-varint 0.7.2", ] [[package]] @@ -5032,7 +5031,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -5311,7 +5310,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.4", + "redox_syscall 0.5.6", "smallvec", "windows-targets 0.52.6", ] @@ -5399,7 +5398,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -5428,7 +5427,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -5455,9 +5454,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "plain_hasher" @@ -5516,9 +5515,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d30538d42559de6b034bc76fd6dd4c38961b1ee5c6c56e3808c50128fdbc22ce" +checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" [[package]] name = "powerfmt" @@ -5600,7 +5599,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" dependencies = [ "proc-macro2", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -5675,7 +5674,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -5751,7 +5750,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -6004,9 +6003,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" +checksum = "355ae415ccd3a04315d3f8246e86d67689ea74d88d915576e1589a351062a13b" dependencies = [ "bitflags 2.6.0", ] @@ -6567,7 +6566,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -7505,7 +7504,7 @@ dependencies = [ "quote", "regex", "serial_test", - "syn 2.0.77", + "syn 2.0.79", "trybuild", ] @@ -9124,9 +9123,9 @@ dependencies = [ [[package]] name = "revm-inspectors" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b57b33a24b5b8b8efa1da3f60d44f02d6e649f06ef925d7780723ff14ff55321" +checksum = "cd8e3bae0d5c824da0ac883e2521c5e83870d6521eeeccd4ee54266aa3cc1a51" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -9426,9 +9425,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" [[package]] name = "rustls-platform-verifier" @@ -9668,7 +9667,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -9703,14 +9702,14 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] name = "serde_spanned" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -9754,7 +9753,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -9777,7 +9776,7 @@ checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10032,7 +10031,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac" dependencies = [ "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10078,7 +10077,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10136,9 +10135,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.77" +version = "2.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" +checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" dependencies = [ "proc-macro2", "quote", @@ -10147,14 +10146,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b95156f8b577cb59dc0b1df15c6f29a10afc5f8a7ac9786b0b5c68c19149278" +checksum = "0ab661c8148c2261222a4d641ad5477fd4bea79406a99056096a0b41b35617a5" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10180,7 +10179,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10205,9 +10204,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.12.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" dependencies = [ "cfg-if", "fastrand 2.1.1", @@ -10266,7 +10265,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10305,7 +10304,7 @@ checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10482,7 +10481,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10562,9 +10561,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.21" +version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b072cee73c449a636ffd6f32bd8de3a9f7119139aff882f44943ce2986dc5cf" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ "indexmap 2.5.0", "serde", @@ -10683,7 +10682,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -10993,6 +10992,12 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" +[[package]] +name = "unsigned-varint" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" + [[package]] name = "untrusted" version = "0.9.0" @@ -11138,7 +11143,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "wasm-bindgen-shared", ] @@ -11172,7 +11177,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11185,9 +11190,9 @@ checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "wasm-streams" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" dependencies = [ "futures-util", "js-sys", @@ -11324,7 +11329,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -11335,7 +11340,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -11346,7 +11351,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -11357,7 +11362,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -11549,9 +11554,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.18" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] @@ -11632,7 +11637,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "synstructure", ] @@ -11654,7 +11659,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -11674,7 +11679,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "synstructure", ] @@ -11695,7 +11700,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -11717,7 +11722,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] From ce1f669b9bbef641e90d9275a712882513a36449 Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Sun, 29 Sep 2024 13:29:12 +0300 Subject: [PATCH 68/84] feat(exex): finalize WAL only when all ExExes are on the canonical chain (#11289) --- Cargo.lock | 2 + crates/exex/exex/Cargo.toml | 2 + crates/exex/exex/src/manager.rs | 254 +++++++++++++++++++------ crates/exex/exex/src/notifications.rs | 2 +- crates/exex/exex/src/wal/mod.rs | 6 - crates/node/builder/src/launch/exex.rs | 1 + 6 files changed, 203 insertions(+), 64 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 124b086497b76..9954a4c655f76 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7326,8 +7326,10 @@ dependencies = [ "dashmap 6.1.0", "eyre", "futures", + "itertools 0.13.0", "metrics", "parking_lot 0.12.3", + "rand 0.8.5", "reth-blockchain-tree", "reth-chain-state", "reth-chainspec", diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index f10775e245872..0e76ef3d40df9 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -44,6 +44,7 @@ tokio.workspace = true ## misc dashmap.workspace = true eyre.workspace = true +itertools.workspace = true metrics.workspace = true parking_lot.workspace = true serde_json.workspace = true @@ -62,6 +63,7 @@ reth-testing-utils.workspace = true alloy-genesis.workspace = true alloy-consensus.workspace = true +rand.workspace = true secp256k1.workspace = true tempfile.workspace = true diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index ada9e7a4b315b..8c52bc6590abc 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -2,16 +2,19 @@ use crate::{ wal::Wal, ExExEvent, ExExNotification, ExExNotifications, FinishedExExHeight, WalHandle, }; use futures::StreamExt; +use itertools::Itertools; use metrics::Gauge; use reth_chain_state::ForkChoiceStream; use reth_chainspec::Head; use reth_metrics::{metrics::Counter, Metrics}; use reth_primitives::{BlockNumHash, SealedHeader}; +use reth_provider::HeaderProvider; use reth_tracing::tracing::debug; use std::{ collections::VecDeque, fmt::Debug, future::{poll_fn, Future}, + ops::Not, pin::Pin, sync::{ atomic::{AtomicUsize, Ordering}, @@ -183,7 +186,10 @@ pub struct ExExManagerMetrics { /// - Error handling /// - Monitoring #[derive(Debug)] -pub struct ExExManager { +pub struct ExExManager

{ + /// Provider for querying headers. + provider: P, + /// Handles to communicate with the `ExEx`'s. exex_handles: Vec, @@ -223,7 +229,7 @@ pub struct ExExManager { metrics: ExExManagerMetrics, } -impl ExExManager { +impl

ExExManager

{ /// Create a new [`ExExManager`]. /// /// You must provide an [`ExExHandle`] for each `ExEx` and the maximum capacity of the @@ -232,6 +238,7 @@ impl ExExManager { /// When the capacity is exceeded (which can happen if an `ExEx` is slow) no one can send /// notifications over [`ExExManagerHandle`]s until there is capacity again. pub fn new( + provider: P, handles: Vec, max_capacity: usize, wal: Wal, @@ -254,6 +261,8 @@ impl ExExManager { metrics.num_exexs.set(num_exexs as f64); Self { + provider, + exex_handles: handles, handle_rx, @@ -309,83 +318,152 @@ impl ExExManager { } } -impl Future for ExExManager { +impl

ExExManager

+where + P: HeaderProvider, +{ + /// Finalizes the WAL according to the passed finalized header. + /// + /// This function checks if all ExExes are on the canonical chain and finalizes the WAL if + /// necessary. + fn finalize_wal(&self, finalized_header: SealedHeader) -> eyre::Result<()> { + debug!(header = ?finalized_header.num_hash(), "Received finalized header"); + + // Check if all ExExes are on the canonical chain + let exex_finished_heights = self + .exex_handles + .iter() + // Get ExEx ID and hash of the finished height for each ExEx + .map(|exex_handle| { + (&exex_handle.id, exex_handle.finished_height.map(|block| block.hash)) + }) + // Deduplicate all hashes + .unique_by(|(_, hash)| *hash) + // Check if hashes are canonical + .map(|(exex_id, hash)| { + hash.map_or(Ok((exex_id, hash, false)), |hash| { + self.provider + .is_known(&hash) + // Save the ExEx ID, hash of the finished height, and whether the hash + // is canonical + .map(|is_canonical| (exex_id, Some(hash), is_canonical)) + }) + }) + // We collect here to be able to log the unfinalized ExExes below + .collect::, _>>()?; + if exex_finished_heights.iter().all(|(_, _, is_canonical)| *is_canonical) { + // If there is a finalized header and all ExExs are on the canonical chain, finalize + // the WAL with the new finalized header + self.wal.finalize(finalized_header.num_hash())?; + } else { + let unfinalized_exexes = exex_finished_heights + .into_iter() + .filter_map(|(exex_id, hash, is_canonical)| { + is_canonical.not().then_some((exex_id, hash)) + }) + .format_with(", ", |(exex_id, hash), f| f(&format_args!("{exex_id:?} = {hash:?}"))); + debug!( + %unfinalized_exexes, + "Not all ExExes are on the canonical chain, can't finalize the WAL" + ); + } + + Ok(()) + } +} + +impl

Future for ExExManager

+where + P: HeaderProvider + Unpin + 'static, +{ type Output = eyre::Result<()>; - fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - // Drain the finalized header stream and grab the last finalized header + /// Main loop of the [`ExExManager`]. The order of operations is as follows: + /// 1. Handle incoming ExEx events. We do it before finalizing the WAL, because it depends on + /// the latest state of [`ExExEvent::FinishedHeight`] events. + /// 2. Finalize the WAL with the finalized header, if necessary. + /// 3. Drain [`ExExManagerHandle`] notifications, push them to the internal buffer and update + /// the internal buffer capacity. + /// 5. Send notifications from the internal buffer to those ExExes that are ready to receive new + /// notifications. + /// 5. Remove notifications from the internal buffer that have been sent to **all** ExExes and + /// update the internal buffer capacity. + /// 6. Update the channel with the lowest [`FinishedExExHeight`] among all ExExes. + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.get_mut(); + + // Handle incoming ExEx events + for exex in &mut this.exex_handles { + while let Poll::Ready(Some(event)) = exex.receiver.poll_recv(cx) { + debug!(exex_id = %exex.id, ?event, "Received event from ExEx"); + exex.metrics.events_sent_total.increment(1); + match event { + ExExEvent::FinishedHeight(height) => exex.finished_height = Some(height), + } + } + } + + // Drain the finalized header stream and finalize the WAL with the last header let mut last_finalized_header = None; - while let Poll::Ready(finalized_header) = self.finalized_header_stream.poll_next_unpin(cx) { + while let Poll::Ready(finalized_header) = this.finalized_header_stream.poll_next_unpin(cx) { last_finalized_header = finalized_header; } - // If there is a finalized header, finalize the WAL with it if let Some(header) = last_finalized_header { - self.wal.finalize((header.number, header.hash()).into())?; + this.finalize_wal(header)?; } - // drain handle notifications - while self.buffer.len() < self.max_capacity { - if let Poll::Ready(Some(notification)) = self.handle_rx.poll_recv(cx) { + // Drain handle notifications + while this.buffer.len() < this.max_capacity { + if let Poll::Ready(Some(notification)) = this.handle_rx.poll_recv(cx) { debug!( committed_tip = ?notification.committed_chain().map(|chain| chain.tip().number), reverted_tip = ?notification.reverted_chain().map(|chain| chain.tip().number), "Received new notification" ); - self.push_notification(notification); + this.push_notification(notification); continue } break } - // update capacity - self.update_capacity(); + // Update capacity + this.update_capacity(); - // advance all poll senders + // Advance all poll senders let mut min_id = usize::MAX; - for idx in (0..self.exex_handles.len()).rev() { - let mut exex = self.exex_handles.swap_remove(idx); + for idx in (0..this.exex_handles.len()).rev() { + let mut exex = this.exex_handles.swap_remove(idx); - // it is a logic error for this to ever underflow since the manager manages the + // It is a logic error for this to ever underflow since the manager manages the // notification IDs let notification_index = exex .next_notification_id - .checked_sub(self.min_id) + .checked_sub(this.min_id) .expect("exex expected notification ID outside the manager's range"); - if let Some(notification) = self.buffer.get(notification_index) { + if let Some(notification) = this.buffer.get(notification_index) { if let Poll::Ready(Err(err)) = exex.send(cx, notification) { - // the channel was closed, which is irrecoverable for the manager + // The channel was closed, which is irrecoverable for the manager return Poll::Ready(Err(err.into())) } } min_id = min_id.min(exex.next_notification_id); - self.exex_handles.push(exex); + this.exex_handles.push(exex); } - // remove processed buffered notifications + // Remove processed buffered notifications debug!(%min_id, "Updating lowest notification id in buffer"); - self.buffer.retain(|&(id, _)| id >= min_id); - self.min_id = min_id; + this.buffer.retain(|&(id, _)| id >= min_id); + this.min_id = min_id; - // update capacity - self.update_capacity(); - - // handle incoming exex events - for exex in &mut self.exex_handles { - while let Poll::Ready(Some(event)) = exex.receiver.poll_recv(cx) { - debug!(exex_id = %exex.id, ?event, "Received event from exex"); - exex.metrics.events_sent_total.increment(1); - match event { - ExExEvent::FinishedHeight(height) => exex.finished_height = Some(height), - } - } - } + // Update capacity + this.update_capacity(); - // update watch channel block number - let finished_height = self.exex_handles.iter_mut().try_fold(u64::MAX, |curr, exex| { + // Update watch channel block number + let finished_height = this.exex_handles.iter_mut().try_fold(u64::MAX, |curr, exex| { exex.finished_height.map_or(Err(()), |height| Ok(height.number.min(curr))) }); if let Ok(finished_height) = finished_height { - let _ = self.finished_height.send(FinishedExExHeight::Height(finished_height)); + let _ = this.finished_height.send(FinishedExExHeight::Height(finished_height)); } Poll::Pending @@ -517,8 +595,9 @@ mod tests { use alloy_primitives::B256; use eyre::OptionExt; use futures::StreamExt; + use rand::Rng; use reth_primitives::SealedBlockWithSenders; - use reth_provider::Chain; + use reth_provider::{test_utils::create_test_provider_factory, BlockWriter, Chain}; use reth_testing_utils::generators::{self, random_block}; fn empty_finalized_header_stream() -> ForkChoiceStream { @@ -551,11 +630,11 @@ mod tests { let (exex_handle_1, _, _) = ExExHandle::new("test_exex_1".to_string(), Head::default(), (), (), wal.handle()); - assert!(!ExExManager::new(vec![], 0, wal.clone(), empty_finalized_header_stream()) + assert!(!ExExManager::new((), vec![], 0, wal.clone(), empty_finalized_header_stream()) .handle .has_exexs()); - assert!(ExExManager::new(vec![exex_handle_1], 0, wal, empty_finalized_header_stream()) + assert!(ExExManager::new((), vec![exex_handle_1], 0, wal, empty_finalized_header_stream()) .handle .has_exexs()); } @@ -568,13 +647,19 @@ mod tests { let (exex_handle_1, _, _) = ExExHandle::new("test_exex_1".to_string(), Head::default(), (), (), wal.handle()); - assert!(!ExExManager::new(vec![], 0, wal.clone(), empty_finalized_header_stream()) + assert!(!ExExManager::new((), vec![], 0, wal.clone(), empty_finalized_header_stream()) .handle .has_capacity()); - assert!(ExExManager::new(vec![exex_handle_1], 10, wal, empty_finalized_header_stream()) - .handle - .has_capacity()); + assert!(ExExManager::new( + (), + vec![exex_handle_1], + 10, + wal, + empty_finalized_header_stream() + ) + .handle + .has_capacity()); } #[test] @@ -587,7 +672,7 @@ mod tests { // Create a mock ExExManager and add the exex_handle to it let mut exex_manager = - ExExManager::new(vec![exex_handle], 10, wal, empty_finalized_header_stream()); + ExExManager::new((), vec![exex_handle], 10, wal, empty_finalized_header_stream()); // Define the notification for testing let mut block1 = SealedBlockWithSenders::default(); @@ -637,8 +722,13 @@ mod tests { // Create a mock ExExManager and add the exex_handle to it let max_capacity = 5; - let mut exex_manager = - ExExManager::new(vec![exex_handle], max_capacity, wal, empty_finalized_header_stream()); + let mut exex_manager = ExExManager::new( + (), + vec![exex_handle], + max_capacity, + wal, + empty_finalized_header_stream(), + ); // Push some notifications to fill part of the buffer let mut block1 = SealedBlockWithSenders::default(); @@ -671,6 +761,8 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); let wal = Wal::new(temp_dir.path()).unwrap(); + let provider_factory = create_test_provider_factory(); + let (exex_handle, event_tx, mut _notification_rx) = ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); @@ -683,6 +775,7 @@ mod tests { // Create a mock ExExManager and add the exex_handle to it let exex_manager = ExExManager::new( + provider_factory, vec![exex_handle], 10, Wal::new(temp_dir.path()).unwrap(), @@ -717,6 +810,8 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); let wal = Wal::new(temp_dir.path()).unwrap(); + let provider_factory = create_test_provider_factory(); + // Create two `ExExHandle` instances let (exex_handle1, event_tx1, _) = ExExHandle::new("test_exex1".to_string(), Head::default(), (), (), wal.handle()); @@ -731,6 +826,7 @@ mod tests { event_tx2.send(ExExEvent::FinishedHeight(block2)).unwrap(); let exex_manager = ExExManager::new( + provider_factory, vec![exex_handle1, exex_handle2], 10, Wal::new(temp_dir.path()).unwrap(), @@ -761,6 +857,8 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); let wal = Wal::new(temp_dir.path()).unwrap(); + let provider_factory = create_test_provider_factory(); + // Create two `ExExHandle` instances let (exex_handle1, event_tx1, _) = ExExHandle::new("test_exex1".to_string(), Head::default(), (), (), wal.handle()); @@ -778,6 +876,7 @@ mod tests { event_tx2.send(ExExEvent::FinishedHeight(block2)).unwrap(); let exex_manager = ExExManager::new( + provider_factory, vec![exex_handle1, exex_handle2], 10, Wal::new(temp_dir.path()).unwrap(), @@ -812,12 +911,15 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); let wal = Wal::new(temp_dir.path()).unwrap(); + let provider_factory = create_test_provider_factory(); + let (exex_handle_1, _, _) = ExExHandle::new("test_exex_1".to_string(), Head::default(), (), (), wal.handle()); // Create an ExExManager with a small max capacity let max_capacity = 2; let mut exex_manager = ExExManager::new( + provider_factory, vec![exex_handle_1], max_capacity, Wal::new(temp_dir.path()).unwrap(), @@ -1014,37 +1116,75 @@ mod tests { async fn test_exex_wal_finalize() -> eyre::Result<()> { reth_tracing::init_test_tracing(); + let mut rng = generators::rng(); + let temp_dir = tempfile::tempdir().unwrap(); let mut wal = Wal::new(temp_dir.path()).unwrap(); - let block = random_block(&mut generators::rng(), 0, Default::default()) + let provider_factory = create_test_provider_factory(); + + let block = random_block(&mut rng, 0, Default::default()) .seal_with_senders() .ok_or_eyre("failed to recover senders")?; + let provider_rw = provider_factory.provider_rw()?; + provider_rw.insert_block(block.clone())?; + provider_rw.commit()?; + let notification = ExExNotification::ChainCommitted { new: Arc::new(Chain::new(vec![block.clone()], Default::default(), None)), }; wal.commit(¬ification)?; - let (tx, rx) = watch::channel(None); + let (finalized_headers_tx, rx) = watch::channel(None); let finalized_header_stream = ForkChoiceStream::new(rx); - let (exex_handle, _, _) = + let (exex_handle, events_tx, _) = ExExHandle::new("test_exex".to_string(), Head::default(), (), (), wal.handle()); - let mut exex_manager = - std::pin::pin!(ExExManager::new(vec![exex_handle], 1, wal, finalized_header_stream)); + let mut exex_manager = std::pin::pin!(ExExManager::new( + provider_factory, + vec![exex_handle], + 1, + wal, + finalized_header_stream + )); let mut cx = Context::from_waker(futures::task::noop_waker_ref()); assert!(exex_manager.as_mut().poll(&mut cx).is_pending()); + assert_eq!( + exex_manager.wal.iter_notifications()?.collect::>>()?, + [notification.clone()] + ); + + finalized_headers_tx.send(Some(block.header.clone()))?; + assert!(exex_manager.as_mut().poll(&mut cx).is_pending()); + // WAL isn't finalized because the ExEx didn't emit the `FinishedHeight` event + assert_eq!( + exex_manager.wal.iter_notifications()?.collect::>>()?, + [notification.clone()] + ); + + // Send a `FinishedHeight` event with a non-canonical block + events_tx + .send(ExExEvent::FinishedHeight((rng.gen::(), rng.gen::()).into())) + .unwrap(); + + finalized_headers_tx.send(Some(block.header.clone()))?; + assert!(exex_manager.as_mut().poll(&mut cx).is_pending()); + // WAL isn't finalized because the ExEx emitted a `FinishedHeight` event with a + // non-canonical block assert_eq!( exex_manager.wal.iter_notifications()?.collect::>>()?, [notification] ); - tx.send(Some(block.header.clone()))?; + // Send a `FinishedHeight` event with a canonical block + events_tx.send(ExExEvent::FinishedHeight(block.num_hash())).unwrap(); + finalized_headers_tx.send(Some(block.header.clone()))?; assert!(exex_manager.as_mut().poll(&mut cx).is_pending()); + // WAL is finalized assert!(exex_manager.wal.iter_notifications()?.next().is_none()); Ok(()) diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index 369a0586c0c53..9e9ee78e6cd71 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -179,7 +179,7 @@ where /// If the head block is not found in the database, it means we're not on the canonical chain /// and we need to revert the notification with the ExEx head block. fn check_canonical(&mut self) -> eyre::Result> { - if self.provider.header(&self.exex_head.block.hash)?.is_some() { + if self.provider.is_known(&self.exex_head.block.hash)? { debug!(target: "exex::notifications", "ExEx head is on the canonical chain"); return Ok(None) } diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index d7aea3aafdfaa..72e60fe1a3db3 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -117,12 +117,6 @@ impl WalInner { Ok(()) } - /// Finalizes the WAL to the given block, inclusive. - /// - /// 1. Finds a notification with first unfinalized block (first notification containing a - /// committed block higher than `to_block`). - /// 2. Removes the notifications from the beginning of WAL until the found notification. If this - /// notification includes both finalized and non-finalized blocks, it will not be removed. #[instrument(target = "exex::wal", skip(self))] fn finalize(&self, to_block: BlockNumHash) -> eyre::Result<()> { // First, walk cache to find the file ID of the notification with the finalized block and diff --git a/crates/node/builder/src/launch/exex.rs b/crates/node/builder/src/launch/exex.rs index 816335d3dbdfd..233605ef867de 100644 --- a/crates/node/builder/src/launch/exex.rs +++ b/crates/node/builder/src/launch/exex.rs @@ -109,6 +109,7 @@ impl ExExLauncher { debug!(target: "reth::cli", "spawning exex manager"); // todo(onbjerg): rm magic number let exex_manager = ExExManager::new( + components.provider().clone(), exex_handles, DEFAULT_EXEX_MANAGER_CAPACITY, exex_wal, From b8aeecae6276b7b3475259e1e16383bbea803866 Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Sun, 29 Sep 2024 06:32:52 -0400 Subject: [PATCH 69/84] chore: update default payload builder logs (#11315) --- crates/ethereum/payload/src/lib.rs | 6 +++--- crates/optimism/payload/src/builder.rs | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 2593e83366114..84301617a5939 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -180,7 +180,7 @@ where warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, - "failed to apply beacon root contract call for empty payload" + "failed to apply beacon root contract call for payload" ); PayloadBuilderError::Internal(err.into()) })?; @@ -195,7 +195,7 @@ where parent_block.hash(), ) .map_err(|err| { - warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, "failed to update blockhashes for empty payload"); + warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, "failed to update blockhashes for payload"); PayloadBuilderError::Internal(err.into()) })?; @@ -367,7 +367,7 @@ where warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, - "failed to calculate state root for empty payload" + "failed to calculate state root for payload" ); })? }; diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 1c0876b2a726f..ecdbb7e7a3218 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -216,7 +216,7 @@ where warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, - "failed to apply beacon root contract call for empty payload" + "failed to apply beacon root contract call for payload" ); PayloadBuilderError::Internal(err.into()) })?; @@ -459,7 +459,7 @@ where warn!(target: "payload_builder", parent_hash=%parent_block.hash(), %err, - "failed to calculate state root for empty payload" + "failed to calculate state root for payload" ); })? }; From 55bf29e6d2ee51c84a12a371c9f79d8e07b8d1db Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sun, 29 Sep 2024 18:47:48 +0200 Subject: [PATCH 70/84] use `core::error::Error` (#11317) --- crates/blockchain-tree-api/src/error.rs | 12 +++---- crates/consensus/beacon/src/engine/error.rs | 8 ++--- .../consensus/beacon/src/engine/hooks/mod.rs | 2 +- crates/errors/src/error.rs | 4 +-- crates/evm/execution-errors/src/lib.rs | 36 +++++++++---------- crates/evm/execution-errors/src/trie.rs | 34 ++++++++---------- crates/fs-util/src/lib.rs | 2 +- crates/payload/primitives/src/error.rs | 8 ++--- crates/payload/primitives/src/traits.rs | 4 +-- crates/primitives/src/transaction/error.rs | 6 ++-- crates/rpc/ipc/src/client/mod.rs | 2 +- crates/rpc/ipc/src/server/connection.rs | 2 +- crates/rpc/ipc/src/server/mod.rs | 12 +++---- crates/rpc/rpc-engine-api/src/error.rs | 2 +- crates/rpc/rpc-eth-types/src/error.rs | 4 +-- crates/rpc/rpc-types/src/eth/error.rs | 2 +- crates/stages/api/src/error.rs | 4 +-- crates/stages/stages/src/test_utils/runner.rs | 2 +- crates/storage/errors/src/db.rs | 10 +++--- crates/storage/errors/src/lockfile.rs | 3 +- crates/storage/errors/src/provider.rs | 13 ++++--- crates/storage/nippy-jar/src/error.rs | 2 +- crates/transaction-pool/src/blobstore/mod.rs | 2 +- crates/transaction-pool/src/error.rs | 13 ++++--- crates/transaction-pool/src/validate/mod.rs | 2 +- 25 files changed, 90 insertions(+), 101 deletions(-) diff --git a/crates/blockchain-tree-api/src/error.rs b/crates/blockchain-tree-api/src/error.rs index 155e57c5eca05..4dd42c889a368 100644 --- a/crates/blockchain-tree-api/src/error.rs +++ b/crates/blockchain-tree-api/src/error.rs @@ -194,8 +194,8 @@ impl std::fmt::Debug for InsertBlockErrorData { } } -impl std::error::Error for InsertBlockErrorData { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for InsertBlockErrorData { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { Some(&self.kind) } } @@ -240,8 +240,8 @@ impl std::fmt::Debug for InsertBlockErrorDataTwo { } } -impl std::error::Error for InsertBlockErrorDataTwo { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for InsertBlockErrorDataTwo { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { Some(&self.kind) } } @@ -335,7 +335,7 @@ pub enum InsertBlockErrorKindTwo { Provider(#[from] ProviderError), /// Other errors. #[error(transparent)] - Other(#[from] Box), + Other(#[from] Box), } impl InsertBlockErrorKindTwo { @@ -425,7 +425,7 @@ pub enum InsertBlockErrorKind { Provider(#[from] ProviderError), /// An internal error occurred, like interacting with the database. #[error(transparent)] - Internal(#[from] Box), + Internal(#[from] Box), /// Canonical error. #[error(transparent)] Canonical(#[from] CanonicalError), diff --git a/crates/consensus/beacon/src/engine/error.rs b/crates/consensus/beacon/src/engine/error.rs index 4f58b7300d358..5fc6df2b884d9 100644 --- a/crates/consensus/beacon/src/engine/error.rs +++ b/crates/consensus/beacon/src/engine/error.rs @@ -57,12 +57,12 @@ pub enum BeaconForkChoiceUpdateError { EngineUnavailable, /// An internal error occurred, not necessarily related to the update. #[error(transparent)] - Internal(Box), + Internal(Box), } impl BeaconForkChoiceUpdateError { /// Create a new internal error. - pub fn internal(e: E) -> Self { + pub fn internal(e: E) -> Self { Self::Internal(Box::new(e)) } } @@ -89,12 +89,12 @@ pub enum BeaconOnNewPayloadError { EngineUnavailable, /// An internal error occurred, not necessarily related to the payload. #[error(transparent)] - Internal(Box), + Internal(Box), } impl BeaconOnNewPayloadError { /// Create a new internal error. - pub fn internal(e: E) -> Self { + pub fn internal(e: E) -> Self { Self::Internal(Box::new(e)) } } diff --git a/crates/consensus/beacon/src/engine/hooks/mod.rs b/crates/consensus/beacon/src/engine/hooks/mod.rs index b5e6ea61e38a5..828a6f9685001 100644 --- a/crates/consensus/beacon/src/engine/hooks/mod.rs +++ b/crates/consensus/beacon/src/engine/hooks/mod.rs @@ -104,7 +104,7 @@ pub enum EngineHookError { Common(#[from] RethError), /// An internal error occurred. #[error(transparent)] - Internal(#[from] Box), + Internal(#[from] Box), } /// Level of database access the hook needs for execution. diff --git a/crates/errors/src/error.rs b/crates/errors/src/error.rs index e74d582759b79..869d5732746da 100644 --- a/crates/errors/src/error.rs +++ b/crates/errors/src/error.rs @@ -37,14 +37,14 @@ pub enum RethError { /// Any other error. #[error(transparent)] - Other(Box), + Other(Box), } impl RethError { /// Create a new `RethError` from a given error. pub fn other(error: E) -> Self where - E: std::error::Error + Send + Sync + 'static, + E: core::error::Error + Send + Sync + 'static, { Self::Other(Box::new(error)) } diff --git a/crates/evm/execution-errors/src/lib.rs b/crates/evm/execution-errors/src/lib.rs index 1113cc83d2ea0..4dbbfb7abdce8 100644 --- a/crates/evm/execution-errors/src/lib.rs +++ b/crates/evm/execution-errors/src/lib.rs @@ -125,12 +125,11 @@ impl From for BlockValidationError { } } -#[cfg(feature = "std")] -impl std::error::Error for BlockValidationError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for BlockValidationError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::EVM { error, .. } => std::error::Error::source(error), - Self::StateRoot(source) => std::error::Error::source(source), + Self::EVM { error, .. } => core::error::Error::source(error), + Self::StateRoot(source) => core::error::Error::source(source), _ => Option::None, } } @@ -153,7 +152,7 @@ impl BlockExecutionError { #[cfg(feature = "std")] pub fn other(error: E) -> Self where - E: std::error::Error + Send + Sync + 'static, + E: core::error::Error + Send + Sync + 'static, { Self::Internal(InternalBlockExecutionError::other(error)) } @@ -185,13 +184,12 @@ impl From for BlockExecutionError { } } -#[cfg(feature = "std")] -impl std::error::Error for BlockExecutionError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for BlockExecutionError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Validation(source) => std::error::Error::source(source), - Self::Consensus(source) => std::error::Error::source(source), - Self::Internal(source) => std::error::Error::source(source), + Self::Validation(source) => core::error::Error::source(source), + Self::Consensus(source) => core::error::Error::source(source), + Self::Internal(source) => core::error::Error::source(source), } } } @@ -216,8 +214,7 @@ pub enum InternalBlockExecutionError { #[from] LatestBlock(ProviderError), /// Arbitrary Block Executor Errors - #[cfg(feature = "std")] - Other(Box), + Other(Box), } impl InternalBlockExecutionError { @@ -225,7 +222,7 @@ impl InternalBlockExecutionError { #[cfg(feature = "std")] pub fn other(error: E) -> Self where - E: std::error::Error + Send + Sync + 'static, + E: core::error::Error + Send + Sync + 'static, { Self::Other(Box::new(error)) } @@ -237,12 +234,11 @@ impl InternalBlockExecutionError { } } -#[cfg(feature = "std")] -impl std::error::Error for InternalBlockExecutionError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for InternalBlockExecutionError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Pruning(source) => std::error::Error::source(source), - Self::LatestBlock(source) => std::error::Error::source(source), + Self::Pruning(source) => core::error::Error::source(source), + Self::LatestBlock(source) => core::error::Error::source(source), _ => Option::None, } } diff --git a/crates/evm/execution-errors/src/trie.rs b/crates/evm/execution-errors/src/trie.rs index c85819ee74def..9e4b16d8d0c27 100644 --- a/crates/evm/execution-errors/src/trie.rs +++ b/crates/evm/execution-errors/src/trie.rs @@ -15,12 +15,11 @@ pub enum StateRootError { StorageRootError(StorageRootError), } -#[cfg(feature = "std")] -impl std::error::Error for StateRootError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for StateRootError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Database(source) => std::error::Error::source(source), - Self::StorageRootError(source) => std::error::Error::source(source), + Self::Database(source) => core::error::Error::source(source), + Self::StorageRootError(source) => core::error::Error::source(source), } } } @@ -49,11 +48,10 @@ impl From for DatabaseError { } } -#[cfg(feature = "std")] -impl std::error::Error for StorageRootError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for StorageRootError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Database(source) => std::error::Error::source(source), + Self::Database(source) => core::error::Error::source(source), } } } @@ -76,12 +74,11 @@ impl From for ProviderError { } } -#[cfg(feature = "std")] -impl std::error::Error for StateProofError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for StateProofError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Database(source) => std::error::Error::source(source), - Self::Rlp(source) => std::error::Error::source(source), + Self::Database(source) => core::error::Error::source(source), + Self::Rlp(source) => core::error::Error::source(source), } } } @@ -112,12 +109,11 @@ impl From for ProviderError { } } -#[cfg(feature = "std")] -impl std::error::Error for TrieWitnessError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for TrieWitnessError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Proof(source) => std::error::Error::source(source), - Self::Rlp(source) => std::error::Error::source(source), + Self::Proof(source) => core::error::Error::source(source), + Self::Rlp(source) => core::error::Error::source(source), _ => Option::None, } } diff --git a/crates/fs-util/src/lib.rs b/crates/fs-util/src/lib.rs index f77632cc89196..0cfcf04539bdd 100644 --- a/crates/fs-util/src/lib.rs +++ b/crates/fs-util/src/lib.rs @@ -305,7 +305,7 @@ pub fn write_json_file(path: &Path, obj: &T) -> Result<()> { pub fn atomic_write_file(file_path: &Path, write_fn: F) -> Result<()> where F: FnOnce(&mut File) -> std::result::Result<(), E>, - E: Into>, + E: Into>, { let mut tmp_path = file_path.to_path_buf(); tmp_path.set_extension("tmp"); diff --git a/crates/payload/primitives/src/error.rs b/crates/payload/primitives/src/error.rs index 0b8113f67dc2d..00df9e8d290f5 100644 --- a/crates/payload/primitives/src/error.rs +++ b/crates/payload/primitives/src/error.rs @@ -35,14 +35,14 @@ pub enum PayloadBuilderError { WithdrawalsBeforeShanghai, /// Any other payload building errors. #[error(transparent)] - Other(Box), + Other(Box), } impl PayloadBuilderError { /// Create a new error from a boxed error. pub fn other(error: E) -> Self where - E: std::error::Error + Send + Sync + 'static, + E: core::error::Error + Send + Sync + 'static, { Self::Other(Box::new(error)) } @@ -84,7 +84,7 @@ pub enum EngineObjectValidationError { UnsupportedFork, /// Another type of error that is not covered by the above variants. #[error("Invalid params: {0}")] - InvalidParams(#[from] Box), + InvalidParams(#[from] Box), } /// Thrown when validating an execution payload OR payload attributes fails due to: @@ -117,7 +117,7 @@ impl EngineObjectValidationError { /// Creates an instance of the `InvalidParams` variant with the given error. pub fn invalid_params(error: E) -> Self where - E: std::error::Error + Send + Sync + 'static, + E: core::error::Error + Send + Sync + 'static, { Self::InvalidParams(Box::new(error)) } diff --git a/crates/payload/primitives/src/traits.rs b/crates/payload/primitives/src/traits.rs index 9551b75a7774f..b0647691f760b 100644 --- a/crates/payload/primitives/src/traits.rs +++ b/crates/payload/primitives/src/traits.rs @@ -80,7 +80,7 @@ pub trait PayloadBuilderAttributes: Send + Sync + std::fmt::Debug { /// [`PayloadBuilderAttributes::try_new`]. type RpcPayloadAttributes; /// The error type used in [`PayloadBuilderAttributes::try_new`]. - type Error: std::error::Error; + type Error: core::error::Error; /// Creates a new payload builder for the given parent block and the attributes. /// @@ -164,7 +164,7 @@ pub trait PayloadAttributesBuilder: std::fmt::Debug + Send + Sync + 'static { /// The payload attributes type returned by the builder. type PayloadAttributes: PayloadAttributes; /// The error type returned by [`PayloadAttributesBuilder::build`]. - type Error: std::error::Error + Send + Sync; + type Error: core::error::Error + Send + Sync; /// Return a new payload attribute from the builder. fn build(&self) -> Result; diff --git a/crates/primitives/src/transaction/error.rs b/crates/primitives/src/transaction/error.rs index de4efa4d8f0bd..0c53c2c8dcf95 100644 --- a/crates/primitives/src/transaction/error.rs +++ b/crates/primitives/src/transaction/error.rs @@ -61,8 +61,7 @@ pub enum InvalidTransactionError { SignerAccountHasBytecode, } -#[cfg(feature = "std")] -impl std::error::Error for InvalidTransactionError {} +impl core::error::Error for InvalidTransactionError {} /// Represents error variants that can happen when trying to convert a transaction to /// [`PooledTransactionsElement`](crate::PooledTransactionsElement) @@ -87,5 +86,4 @@ pub enum TryFromRecoveredTransactionError { BlobSidecarMissing, } -#[cfg(feature = "std")] -impl std::error::Error for TryFromRecoveredTransactionError {} +impl core::error::Error for TryFromRecoveredTransactionError {} diff --git a/crates/rpc/ipc/src/client/mod.rs b/crates/rpc/ipc/src/client/mod.rs index e8eff9c8f454f..8f2fe0255c7db 100644 --- a/crates/rpc/ipc/src/client/mod.rs +++ b/crates/rpc/ipc/src/client/mod.rs @@ -90,7 +90,7 @@ impl IpcClientBuilder { /// use jsonrpsee::{core::client::ClientT, rpc_params}; /// use reth_ipc::client::IpcClientBuilder; /// - /// # async fn run_client() -> Result<(), Box> { + /// # async fn run_client() -> Result<(), Box> { /// let client = IpcClientBuilder::default().build("/tmp/my-uds").await?; /// let response: String = client.request("say_hello", rpc_params![]).await?; /// # Ok(()) } diff --git a/crates/rpc/ipc/src/server/connection.rs b/crates/rpc/ipc/src/server/connection.rs index 5e7497cb9e594..aaf6731d045ec 100644 --- a/crates/rpc/ipc/src/server/connection.rs +++ b/crates/rpc/ipc/src/server/connection.rs @@ -82,7 +82,7 @@ impl IpcConnDriver { impl Future for IpcConnDriver where S: Service> + Send + 'static, - S::Error: Into>, + S::Error: Into>, S::Future: Send + Unpin, T: AsyncRead + AsyncWrite + Unpin + Send + 'static, { diff --git a/crates/rpc/ipc/src/server/mod.rs b/crates/rpc/ipc/src/server/mod.rs index 168819754f7cf..a02d3ca32b65f 100644 --- a/crates/rpc/ipc/src/server/mod.rs +++ b/crates/rpc/ipc/src/server/mod.rs @@ -72,7 +72,7 @@ where Service: Service< String, Response = Option, - Error = Box, + Error = Box, Future: Send + Unpin, > + Send, > + Send @@ -86,7 +86,7 @@ where /// ``` /// use jsonrpsee::RpcModule; /// use reth_ipc::server::Builder; - /// async fn run_server() -> Result<(), Box> { + /// async fn run_server() -> Result<(), Box> { /// let server = Builder::default().build("/tmp/my-uds".into()); /// let mut module = RpcModule::new(()); /// module.register_method("say_hello", |_, _, _| "lo")?; @@ -366,7 +366,7 @@ where /// response will be emitted via the `method_sink`. type Response = Option; - type Error = Box; + type Error = Box; type Future = Pin> + Send>>; @@ -441,7 +441,7 @@ fn process_connection<'b, RpcMiddleware, HttpMiddleware>( + Service< String, Response = Option, - Error = Box, + Error = Box, >, <>>::Service as Service>::Future: Send + Unpin, @@ -496,7 +496,7 @@ async fn to_ipc_service( rx: mpsc::Receiver, ) where S: Service> + Send + 'static, - S::Error: Into>, + S::Error: Into>, S::Future: Send + Unpin, T: AsyncRead + AsyncWrite + Unpin + Send + 'static, { @@ -823,7 +823,7 @@ mod tests { async fn pipe_from_stream_with_bounded_buffer( pending: PendingSubscriptionSink, stream: BroadcastStream, - ) -> Result<(), Box> { + ) -> Result<(), Box> { let sink = pending.accept().await.unwrap(); let closed = sink.closed(); diff --git a/crates/rpc/rpc-engine-api/src/error.rs b/crates/rpc/rpc-engine-api/src/error.rs index 56491916188d4..677bd2fb246d5 100644 --- a/crates/rpc/rpc-engine-api/src/error.rs +++ b/crates/rpc/rpc-engine-api/src/error.rs @@ -84,7 +84,7 @@ pub enum EngineApiError { NewPayload(#[from] BeaconOnNewPayloadError), /// Encountered an internal error. #[error(transparent)] - Internal(#[from] Box), + Internal(#[from] Box), /// Fetching the payload failed #[error(transparent)] GetPayloadError(#[from] PayloadBuilderError), diff --git a/crates/rpc/rpc-eth-types/src/error.rs b/crates/rpc/rpc-eth-types/src/error.rs index 90aec212730d4..8387439f62c63 100644 --- a/crates/rpc/rpc-eth-types/src/error.rs +++ b/crates/rpc/rpc-eth-types/src/error.rs @@ -591,7 +591,7 @@ impl std::fmt::Display for RevertError { } } -impl std::error::Error for RevertError {} +impl core::error::Error for RevertError {} /// A helper error type that's mainly used to mirror `geth` Txpool's error messages #[derive(Debug, thiserror::Error)] @@ -643,7 +643,7 @@ pub enum RpcPoolError { AddressAlreadyReserved, /// Other unspecified error #[error(transparent)] - Other(Box), + Other(Box), } impl From for jsonrpsee_types::error::ErrorObject<'static> { diff --git a/crates/rpc/rpc-types/src/eth/error.rs b/crates/rpc/rpc-types/src/eth/error.rs index 9212b2f17a751..71b71b04a0736 100644 --- a/crates/rpc/rpc-types/src/eth/error.rs +++ b/crates/rpc/rpc-types/src/eth/error.rs @@ -2,7 +2,7 @@ /// A trait to convert an error to an RPC error. #[cfg(feature = "jsonrpsee-types")] -pub trait ToRpcError: std::error::Error + Send + Sync + 'static { +pub trait ToRpcError: core::error::Error + Send + Sync + 'static { /// Converts the error to a JSON-RPC error object. fn to_rpc_error(&self) -> jsonrpsee_types::ErrorObject<'static>; } diff --git a/crates/stages/api/src/error.rs b/crates/stages/api/src/error.rs index 1941375d21e2a..4285b97208f54 100644 --- a/crates/stages/api/src/error.rs +++ b/crates/stages/api/src/error.rs @@ -133,12 +133,12 @@ pub enum StageError { /// These types of errors are caught by the [Pipeline][crate::Pipeline] and trigger a restart /// of the stage. #[error(transparent)] - Recoverable(Box), + Recoverable(Box), /// The stage encountered a fatal error. /// /// These types of errors stop the pipeline. #[error(transparent)] - Fatal(Box), + Fatal(Box), } impl StageError { diff --git a/crates/stages/stages/src/test_utils/runner.rs b/crates/stages/stages/src/test_utils/runner.rs index d2c4e68beb50e..26f245c1304d3 100644 --- a/crates/stages/stages/src/test_utils/runner.rs +++ b/crates/stages/stages/src/test_utils/runner.rs @@ -13,7 +13,7 @@ pub(crate) enum TestRunnerError { #[error(transparent)] Database(#[from] DatabaseError), #[error(transparent)] - Internal(#[from] Box), + Internal(#[from] Box), #[error(transparent)] Provider(#[from] ProviderError), } diff --git a/crates/storage/errors/src/db.rs b/crates/storage/errors/src/db.rs index f27dacdc309f2..22efbb1fb4f8c 100644 --- a/crates/storage/errors/src/db.rs +++ b/crates/storage/errors/src/db.rs @@ -50,11 +50,10 @@ pub enum DatabaseError { Other(String), } -#[cfg(feature = "std")] -impl std::error::Error for DatabaseError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for DatabaseError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Write(err) => std::error::Error::source(err), + Self::Write(err) => core::error::Error::source(err), _ => Option::None, } } @@ -113,8 +112,7 @@ impl fmt::Display for DatabaseWriteError { } } -#[cfg(feature = "std")] -impl std::error::Error for DatabaseWriteError {} +impl core::error::Error for DatabaseWriteError {} /// Database write operation type. #[derive(Clone, Copy, Debug, PartialEq, Eq)] diff --git a/crates/storage/errors/src/lockfile.rs b/crates/storage/errors/src/lockfile.rs index 667197f571d7d..9a3af4ba325a6 100644 --- a/crates/storage/errors/src/lockfile.rs +++ b/crates/storage/errors/src/lockfile.rs @@ -12,8 +12,7 @@ pub enum StorageLockError { Other(String), } -#[cfg(feature = "std")] -impl std::error::Error for StorageLockError {} +impl core::error::Error for StorageLockError {} /// TODO: turn into variant once `ProviderError` impl From for StorageLockError { diff --git a/crates/storage/errors/src/provider.rs b/crates/storage/errors/src/provider.rs index f0ecfbea804da..d60a2adb92bb2 100644 --- a/crates/storage/errors/src/provider.rs +++ b/crates/storage/errors/src/provider.rs @@ -172,14 +172,13 @@ impl From for ProviderError { } } -#[cfg(feature = "std")] -impl std::error::Error for ProviderError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for ProviderError { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { match self { - Self::Database(source) => std::error::Error::source(source), - Self::Rlp(source) => std::error::Error::source(source), - Self::StorageLockError(source) => std::error::Error::source(source), - Self::UnifiedStorageWriterError(source) => std::error::Error::source(source), + Self::Database(source) => core::error::Error::source(source), + Self::Rlp(source) => core::error::Error::source(source), + Self::StorageLockError(source) => core::error::Error::source(source), + Self::UnifiedStorageWriterError(source) => core::error::Error::source(source), _ => Option::None, } } diff --git a/crates/storage/nippy-jar/src/error.rs b/crates/storage/nippy-jar/src/error.rs index ffeb5f3939ed4..fc096cf848c6b 100644 --- a/crates/storage/nippy-jar/src/error.rs +++ b/crates/storage/nippy-jar/src/error.rs @@ -5,7 +5,7 @@ use thiserror::Error; #[derive(Error, Debug)] pub enum NippyJarError { #[error(transparent)] - Internal(#[from] Box), + Internal(#[from] Box), #[error(transparent)] Disconnect(#[from] std::io::Error), #[error(transparent)] diff --git a/crates/transaction-pool/src/blobstore/mod.rs b/crates/transaction-pool/src/blobstore/mod.rs index d127b3e8e67d9..ee98e3eed85e5 100644 --- a/crates/transaction-pool/src/blobstore/mod.rs +++ b/crates/transaction-pool/src/blobstore/mod.rs @@ -90,7 +90,7 @@ pub enum BlobStoreError { DecodeError(#[from] alloy_rlp::Error), /// Other implementation specific error. #[error(transparent)] - Other(Box), + Other(Box), } /// Keeps track of the size of the blob store. diff --git a/crates/transaction-pool/src/error.rs b/crates/transaction-pool/src/error.rs index e5142e18a0d37..a4766a89d5c1a 100644 --- a/crates/transaction-pool/src/error.rs +++ b/crates/transaction-pool/src/error.rs @@ -10,7 +10,7 @@ pub type PoolResult = Result; /// /// For example during validation /// [`TransactionValidator::validate_transaction`](crate::validate::TransactionValidator::validate_transaction) -pub trait PoolTransactionError: std::error::Error + Send + Sync { +pub trait PoolTransactionError: core::error::Error + Send + Sync { /// Returns `true` if the error was caused by a transaction that is considered bad in the /// context of the transaction pool and warrants peer penalization. /// @@ -19,8 +19,8 @@ pub trait PoolTransactionError: std::error::Error + Send + Sync { } // Needed for `#[error(transparent)]` -impl std::error::Error for Box { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { +impl core::error::Error for Box { + fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { (**self).source() } } @@ -63,7 +63,7 @@ pub enum PoolErrorKind { /// Any other error that occurred while inserting/validating a transaction. e.g. IO database /// error #[error(transparent)] - Other(#[from] Box), + Other(#[from] Box), } // === impl PoolError === @@ -75,7 +75,10 @@ impl PoolError { } /// Creates a new pool error with the `Other` kind. - pub fn other(hash: TxHash, error: impl Into>) -> Self { + pub fn other( + hash: TxHash, + error: impl Into>, + ) -> Self { Self { hash, kind: PoolErrorKind::Other(error.into()) } } diff --git a/crates/transaction-pool/src/validate/mod.rs b/crates/transaction-pool/src/validate/mod.rs index 80aee0afe9eb4..b8fe7cbb1de02 100644 --- a/crates/transaction-pool/src/validate/mod.rs +++ b/crates/transaction-pool/src/validate/mod.rs @@ -51,7 +51,7 @@ pub enum TransactionValidationOutcome { /// this transaction from ever becoming valid. Invalid(T, InvalidPoolTransactionError), /// An error occurred while trying to validate the transaction - Error(TxHash, Box), + Error(TxHash, Box), } impl TransactionValidationOutcome { From ea1d04aa75cbd8fcf680c79671290b108642de1a Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Sun, 29 Sep 2024 18:48:54 +0200 Subject: [PATCH 71/84] primitives: rm more `alloy_primitives` reexports (#11255) --- Cargo.lock | 8 +++++++- bin/reth-bench/Cargo.toml | 1 + bin/reth-bench/src/bench/new_payload_fcu.rs | 3 ++- bin/reth-bench/src/bench/new_payload_only.rs | 3 ++- bin/reth-bench/src/valid_payload.rs | 2 +- bin/reth/src/commands/debug_cmd/build_block.rs | 3 +-- bin/reth/src/commands/debug_cmd/execution.rs | 4 ++-- crates/consensus/debug-client/Cargo.toml | 2 +- crates/consensus/debug-client/src/client.rs | 2 +- crates/engine/tree/src/tree/persistence_state.rs | 2 +- crates/exex/exex/src/wal/mod.rs | 2 +- crates/net/downloaders/src/receipt_file_client.rs | 4 ++-- crates/net/network/src/protocol.rs | 2 +- crates/net/network/tests/it/multiplex.rs | 5 +++-- crates/node/builder/src/setup.rs | 3 +-- crates/node/core/src/args/gas_price_oracle.rs | 2 +- crates/optimism/cli/src/commands/init_state/bedrock.rs | 4 ++-- crates/optimism/primitives/src/bedrock.rs | 4 ++-- crates/payload/builder/src/lib.rs | 3 ++- crates/primitives/src/block.rs | 4 ++-- crates/primitives/src/lib.rs | 6 +----- crates/primitives/src/proofs.rs | 8 ++++---- crates/primitives/src/receipt.rs | 4 ++-- crates/primitives/src/transaction/access_list.rs | 3 +-- crates/primitives/src/transaction/compat.rs | 4 ++-- crates/primitives/src/transaction/error.rs | 3 ++- crates/primitives/src/transaction/meta.rs | 2 +- crates/primitives/src/transaction/mod.rs | 8 ++++---- crates/primitives/src/transaction/pooled.rs | 4 ++-- crates/primitives/src/transaction/signature.rs | 8 ++++---- crates/primitives/src/transaction/variant.rs | 4 ++-- crates/revm/src/database.rs | 4 ++-- crates/revm/src/state_change.rs | 4 ++-- crates/revm/src/test_utils.rs | 4 ++-- crates/stages/stages/benches/setup/mod.rs | 4 ++-- crates/stages/stages/src/lib.rs | 2 +- crates/stages/stages/src/sets.rs | 2 +- crates/stages/stages/src/stages/bodies.rs | 6 ++---- crates/stages/stages/src/stages/execution.rs | 4 ++-- crates/stages/stages/src/stages/hashing_account.rs | 9 +++++---- crates/stages/stages/src/stages/hashing_storage.rs | 8 ++++---- crates/stages/stages/src/stages/headers.rs | 7 ++++--- crates/stages/stages/src/stages/index_account_history.rs | 3 +-- crates/stages/stages/src/stages/index_storage_history.rs | 4 ++-- crates/stages/stages/src/stages/merkle.rs | 8 ++++---- crates/stages/stages/src/stages/mod.rs | 4 ++-- crates/stages/stages/src/stages/prune.rs | 3 ++- crates/stages/stages/src/stages/sender_recovery.rs | 4 ++-- crates/stages/stages/src/stages/tx_lookup.rs | 4 ++-- crates/stages/stages/src/test_utils/test_db.rs | 5 ++--- crates/storage/provider/src/providers/state/macros.rs | 2 +- crates/storage/provider/src/test_utils/blocks.rs | 7 ++++--- crates/trie/db/src/state.rs | 7 ++++--- examples/beacon-api-sidecar-fetcher/Cargo.toml | 1 + examples/beacon-api-sidecar-fetcher/src/main.rs | 3 ++- examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs | 3 ++- examples/bsc-p2p/Cargo.toml | 8 +++++++- examples/bsc-p2p/src/chainspec.rs | 2 +- examples/custom-dev-node/Cargo.toml | 1 + examples/custom-dev-node/src/main.rs | 2 +- examples/custom-engine-types/src/main.rs | 4 ++-- examples/custom-evm/src/main.rs | 5 ++--- examples/custom-rlpx-subprotocol/Cargo.toml | 1 + .../src/subprotocol/connection/mod.rs | 2 +- .../src/subprotocol/protocol/proto.rs | 2 +- examples/db-access/src/main.rs | 4 ++-- examples/polygon-p2p/Cargo.toml | 7 ++++++- examples/polygon-p2p/src/chain_cfg.rs | 3 ++- examples/stateful-precompile/src/main.rs | 4 ++-- testing/testing-utils/src/genesis_allocator.rs | 4 ++-- 70 files changed, 150 insertions(+), 128 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9954a4c655f76..7b7cc7f2ca8b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2635,6 +2635,7 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" name = "example-beacon-api-sidecar-fetcher" version = "0.1.0" dependencies = [ + "alloy-primitives", "alloy-rpc-types-beacon", "clap", "eyre", @@ -2665,6 +2666,7 @@ dependencies = [ name = "example-bsc-p2p" version = "0.0.0" dependencies = [ + "alloy-primitives", "reth-chainspec", "reth-discv4", "reth-network", @@ -2683,6 +2685,7 @@ name = "example-custom-dev-node" version = "0.0.0" dependencies = [ "alloy-genesis", + "alloy-primitives", "eyre", "futures-util", "reth", @@ -2779,6 +2782,7 @@ dependencies = [ name = "example-custom-rlpx-subprotocol" version = "0.0.0" dependencies = [ + "alloy-primitives", "eyre", "futures", "rand 0.8.5", @@ -2874,6 +2878,7 @@ dependencies = [ name = "example-polygon-p2p" version = "0.0.0" dependencies = [ + "alloy-primitives", "reth-chainspec", "reth-discv4", "reth-network", @@ -6311,6 +6316,7 @@ version = "1.0.7" dependencies = [ "alloy-eips", "alloy-json-rpc", + "alloy-primitives", "alloy-provider", "alloy-pubsub", "alloy-rpc-client", @@ -6616,6 +6622,7 @@ version = "1.0.7" dependencies = [ "alloy-consensus", "alloy-eips", + "alloy-primitives", "alloy-provider", "alloy-rpc-types", "alloy-rpc-types-engine", @@ -6624,7 +6631,6 @@ dependencies = [ "futures", "reqwest", "reth-node-api", - "reth-node-core", "reth-rpc-api", "reth-rpc-builder", "reth-tracing", diff --git a/bin/reth-bench/Cargo.toml b/bin/reth-bench/Cargo.toml index d9e980f3b0b88..e4e40daeca914 100644 --- a/bin/reth-bench/Cargo.toml +++ b/bin/reth-bench/Cargo.toml @@ -36,6 +36,7 @@ alloy-pubsub.workspace = true alloy-json-rpc.workspace = true alloy-rpc-client.workspace = true alloy-eips.workspace = true +alloy-primitives.workspace = true # reqwest reqwest = { workspace = true, default-features = false, features = [ diff --git a/bin/reth-bench/src/bench/new_payload_fcu.rs b/bin/reth-bench/src/bench/new_payload_fcu.rs index 61780b3ab208a..b50b59db3c6e9 100644 --- a/bin/reth-bench/src/bench/new_payload_fcu.rs +++ b/bin/reth-bench/src/bench/new_payload_fcu.rs @@ -11,13 +11,14 @@ use crate::{ }, valid_payload::{call_forkchoice_updated, call_new_payload}, }; +use alloy_primitives::B256; use alloy_provider::Provider; use alloy_rpc_types_engine::ForkchoiceState; use clap::Parser; use csv::Writer; use reth_cli_runner::CliContext; use reth_node_core::args::BenchmarkArgs; -use reth_primitives::{Block, B256}; +use reth_primitives::Block; use reth_rpc_types_compat::engine::payload::block_to_payload; use std::time::Instant; use tracing::{debug, info}; diff --git a/bin/reth-bench/src/bench/new_payload_only.rs b/bin/reth-bench/src/bench/new_payload_only.rs index 866ffa5b46e8c..ad2d012108b0b 100644 --- a/bin/reth-bench/src/bench/new_payload_only.rs +++ b/bin/reth-bench/src/bench/new_payload_only.rs @@ -10,12 +10,13 @@ use crate::{ }, valid_payload::call_new_payload, }; +use alloy_primitives::B256; use alloy_provider::Provider; use clap::Parser; use csv::Writer; use reth_cli_runner::CliContext; use reth_node_core::args::BenchmarkArgs; -use reth_primitives::{Block, B256}; +use reth_primitives::Block; use reth_rpc_types_compat::engine::payload::block_to_payload; use std::time::Instant; use tracing::{debug, info}; diff --git a/bin/reth-bench/src/valid_payload.rs b/bin/reth-bench/src/valid_payload.rs index 51b48227cb2f1..6353aea71233d 100644 --- a/bin/reth-bench/src/valid_payload.rs +++ b/bin/reth-bench/src/valid_payload.rs @@ -2,6 +2,7 @@ //! response. This is useful for benchmarking, as it allows us to wait for a payload to be valid //! before sending additional calls. +use alloy_primitives::B256; use alloy_provider::{ext::EngineApi, Network}; use alloy_rpc_types_engine::{ ExecutionPayload, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV3, @@ -9,7 +10,6 @@ use alloy_rpc_types_engine::{ }; use alloy_transport::{Transport, TransportResult}; use reth_node_api::EngineApiMessageVersion; -use reth_primitives::B256; use tracing::error; /// An extension trait for providers that implement the engine API, to wait for a VALID response. diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index 57a8ee8a6b8db..3d2dfa62e8734 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -1,6 +1,6 @@ //! Command for debugging block building. use alloy_consensus::TxEip4844; -use alloy_primitives::Address; +use alloy_primitives::{Address, B256, U256}; use alloy_rlp::Decodable; use alloy_rpc_types::engine::{BlobsBundleV1, PayloadAttributes}; use clap::Parser; @@ -27,7 +27,6 @@ use reth_payload_builder::database::CachedReads; use reth_primitives::{ revm_primitives::KzgSettings, BlobTransaction, BlobTransactionSidecar, Bytes, PooledTransactionsElement, SealedBlock, SealedBlockWithSenders, Transaction, TransactionSigned, - B256, U256, }; use reth_provider::{ providers::BlockchainProvider, BlockHashReader, BlockReader, BlockWriter, ChainSpecProvider, diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index 46cc6064d25ea..215afacb583c4 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -1,7 +1,7 @@ //! Command for debugging execution. use crate::{args::NetworkArgs, utils::get_single_header}; -use alloy_primitives::BlockNumber; +use alloy_primitives::{BlockNumber, B256}; use clap::Parser; use futures::{stream::select as stream_select, StreamExt}; use reth_beacon_consensus::EthBeaconConsensus; @@ -23,7 +23,7 @@ use reth_network_api::NetworkInfo; use reth_network_p2p::{headers::client::HeadersClient, BlockClient}; use reth_node_api::{NodeTypesWithDB, NodeTypesWithDBAdapter, NodeTypesWithEngine}; use reth_node_ethereum::EthExecutorProvider; -use reth_primitives::{BlockHashOrNumber, B256}; +use reth_primitives::BlockHashOrNumber; use reth_provider::{ BlockExecutionWriter, ChainSpecProvider, ProviderFactory, StageCheckpointReader, }; diff --git a/crates/consensus/debug-client/Cargo.toml b/crates/consensus/debug-client/Cargo.toml index 74ea61da34514..c37beef10742e 100644 --- a/crates/consensus/debug-client/Cargo.toml +++ b/crates/consensus/debug-client/Cargo.toml @@ -13,7 +13,6 @@ workspace = true [dependencies] # reth reth-node-api.workspace = true -reth-node-core.workspace = true reth-rpc-api.workspace = true reth-rpc-builder.workspace = true reth-tracing.workspace = true @@ -24,6 +23,7 @@ alloy-eips.workspace = true alloy-provider = { workspace = true, features = ["ws"] } alloy-rpc-types.workspace = true alloy-rpc-types-engine.workspace = true +alloy-primitives.workspace = true auto_impl.workspace = true futures.workspace = true diff --git a/crates/consensus/debug-client/src/client.rs b/crates/consensus/debug-client/src/client.rs index 1acdd197bfa94..f95d02fbb2f2a 100644 --- a/crates/consensus/debug-client/src/client.rs +++ b/crates/consensus/debug-client/src/client.rs @@ -1,9 +1,9 @@ use alloy_consensus::TxEnvelope; use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::B256; use alloy_rpc_types::{Block, BlockTransactions}; use alloy_rpc_types_engine::{ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3}; use reth_node_api::EngineTypes; -use reth_node_core::primitives::B256; use reth_rpc_builder::auth::AuthServerHandle; use reth_tracing::tracing::warn; use ringbuffer::{AllocRingBuffer, RingBuffer}; diff --git a/crates/engine/tree/src/tree/persistence_state.rs b/crates/engine/tree/src/tree/persistence_state.rs index fca51291bb5d6..b00b7175ff537 100644 --- a/crates/engine/tree/src/tree/persistence_state.rs +++ b/crates/engine/tree/src/tree/persistence_state.rs @@ -1,5 +1,5 @@ use alloy_eips::BlockNumHash; -use reth_primitives::B256; +use alloy_primitives::B256; use std::{collections::VecDeque, time::Instant}; use tokio::sync::oneshot; use tracing::{debug, trace}; diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index 72e60fe1a3db3..06c2f6485f000 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -9,8 +9,8 @@ pub use storage::Storage; use std::{path::Path, sync::Arc}; use alloy_eips::BlockNumHash; +use alloy_primitives::B256; use reth_exex_types::ExExNotification; -use reth_primitives::B256; use reth_tracing::tracing::{debug, instrument}; /// WAL is a write-ahead log (WAL) that stores the notifications sent to ExExes. diff --git a/crates/net/downloaders/src/receipt_file_client.rs b/crates/net/downloaders/src/receipt_file_client.rs index 7e66a4d876e24..980a72025407d 100644 --- a/crates/net/downloaders/src/receipt_file_client.rs +++ b/crates/net/downloaders/src/receipt_file_client.rs @@ -213,9 +213,9 @@ pub struct ReceiptWithBlockNumber { #[cfg(test)] mod test { - use alloy_primitives::{hex, Address, Bytes, Log, LogData, B256}; + use alloy_primitives::{bytes::BytesMut, hex, Address, Bytes, Log, LogData, B256}; use alloy_rlp::{Decodable, RlpDecodable}; - use reth_primitives::{Buf, BytesMut, Receipt, TxType}; + use reth_primitives::{Buf, Receipt, TxType}; use reth_tracing::init_test_tracing; use tokio_util::codec::Decoder; diff --git a/crates/net/network/src/protocol.rs b/crates/net/network/src/protocol.rs index 8ea34fff1ed6c..eeffd1c95f4f8 100644 --- a/crates/net/network/src/protocol.rs +++ b/crates/net/network/src/protocol.rs @@ -9,12 +9,12 @@ use std::{ pin::Pin, }; +use alloy_primitives::bytes::BytesMut; use futures::Stream; use reth_eth_wire::{ capability::SharedCapabilities, multiplex::ProtocolConnection, protocol::Protocol, }; use reth_network_api::{Direction, PeerId}; -use reth_primitives::BytesMut; /// A trait that allows to offer additional RLPx-based application-level protocols when establishing /// a peer-to-peer connection. diff --git a/crates/net/network/tests/it/multiplex.rs b/crates/net/network/tests/it/multiplex.rs index 9c3fce7c5ed39..3eaf0a5eb422d 100644 --- a/crates/net/network/tests/it/multiplex.rs +++ b/crates/net/network/tests/it/multiplex.rs @@ -7,6 +7,7 @@ use std::{ task::{ready, Context, Poll}, }; +use alloy_primitives::bytes::BytesMut; use futures::{Stream, StreamExt}; use reth_eth_wire::{ capability::SharedCapabilities, multiplex::ProtocolConnection, protocol::Protocol, @@ -16,7 +17,6 @@ use reth_network::{ test_utils::Testnet, }; use reth_network_api::{Direction, PeerId}; -use reth_primitives::BytesMut; use reth_provider::test_utils::MockEthProvider; use tokio::sync::{mpsc, oneshot}; use tokio_stream::wrappers::UnboundedReceiverStream; @@ -26,8 +26,9 @@ use crate::multiplex::proto::{PingPongProtoMessage, PingPongProtoMessageKind}; /// A simple Rlpx subprotocol that sends pings and pongs mod proto { use super::*; + use alloy_primitives::bytes::BufMut; use reth_eth_wire::Capability; - use reth_primitives::{Buf, BufMut}; + use reth_primitives::Buf; #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] diff --git a/crates/node/builder/src/setup.rs b/crates/node/builder/src/setup.rs index 44c9f19e84b3d..3591868ddad95 100644 --- a/crates/node/builder/src/setup.rs +++ b/crates/node/builder/src/setup.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use alloy_primitives::BlockNumber; +use alloy_primitives::{BlockNumber, B256}; use reth_config::{config::StageConfig, PruneConfig}; use reth_consensus::Consensus; use reth_downloaders::{ @@ -14,7 +14,6 @@ use reth_exex::ExExManagerHandle; use reth_network_p2p::{ bodies::downloader::BodyDownloader, headers::downloader::HeaderDownloader, BlockClient, }; -use reth_node_core::primitives::B256; use reth_provider::{providers::ProviderNodeTypes, ProviderFactory}; use reth_stages::{prelude::DefaultStages, stages::ExecutionStage, Pipeline, StageSet}; use reth_static_file::StaticFileProducer; diff --git a/crates/node/core/src/args/gas_price_oracle.rs b/crates/node/core/src/args/gas_price_oracle.rs index abdd8e14214f8..b7a704cdf55ef 100644 --- a/crates/node/core/src/args/gas_price_oracle.rs +++ b/crates/node/core/src/args/gas_price_oracle.rs @@ -1,4 +1,4 @@ -use crate::primitives::U256; +use alloy_primitives::U256; use clap::Args; use reth_rpc_eth_types::GasPriceOracleConfig; use reth_rpc_server_types::constants::gas_oracle::{ diff --git a/crates/optimism/cli/src/commands/init_state/bedrock.rs b/crates/optimism/cli/src/commands/init_state/bedrock.rs index 2426a195a8b62..efff065e50516 100644 --- a/crates/optimism/cli/src/commands/init_state/bedrock.rs +++ b/crates/optimism/cli/src/commands/init_state/bedrock.rs @@ -1,7 +1,7 @@ -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::{BlockNumber, B256, U256}; use reth_optimism_primitives::bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH, BEDROCK_HEADER_TTD}; use reth_primitives::{ - BlockBody, Header, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, U256, + BlockBody, Header, SealedBlock, SealedBlockWithSenders, SealedHeader, StaticFileSegment, }; use reth_provider::{ providers::StaticFileProvider, BlockWriter, StageCheckpointWriter, StaticFileWriter, diff --git a/crates/optimism/primitives/src/bedrock.rs b/crates/optimism/primitives/src/bedrock.rs index 00c8093ddef11..4ece12ad679ed 100644 --- a/crates/optimism/primitives/src/bedrock.rs +++ b/crates/optimism/primitives/src/bedrock.rs @@ -1,7 +1,7 @@ //! OP mainnet bedrock related data. -use alloy_primitives::{b256, bloom, bytes, B256, B64, U256}; -use reth_primitives::{address, Header}; +use alloy_primitives::{address, b256, bloom, bytes, B256, B64, U256}; +use reth_primitives::Header; use reth_primitives_traits::constants::EMPTY_OMMER_ROOT_HASH; /// Transaction 0x9ed8f713b2cc6439657db52dcd2fdb9cc944915428f3c6e2a7703e242b259cb9 in block 985, diff --git a/crates/payload/builder/src/lib.rs b/crates/payload/builder/src/lib.rs index 51f9efd301ef9..70b4296da4e67 100644 --- a/crates/payload/builder/src/lib.rs +++ b/crates/payload/builder/src/lib.rs @@ -27,8 +27,9 @@ //! use std::future::Future; //! use std::pin::Pin; //! use std::task::{Context, Poll}; +//! use alloy_primitives::U256; //! use reth_payload_builder::{EthBuiltPayload, PayloadBuilderError, KeepPayloadJobAlive, EthPayloadBuilderAttributes, PayloadJob, PayloadJobGenerator}; -//! use reth_primitives::{Block, Header, U256}; +//! use reth_primitives::{Block, Header}; //! //! /// The generator type that creates new jobs that builds empty blocks. //! pub struct EmptyBlockPayloadJobGenerator; diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 78a98f78ab2ab..8c923dab7dd9d 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -1,12 +1,12 @@ use crate::{ Bytes, GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, - Withdrawals, B256, + Withdrawals, }; use alloc::vec::Vec; pub use alloy_eips::eip1898::{ BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash, }; -use alloy_primitives::{Address, Sealable}; +use alloy_primitives::{Address, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use derive_more::{Deref, DerefMut}; #[cfg(any(test, feature = "arbitrary"))] diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 778230f5f870a..390777422b015 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -69,11 +69,7 @@ pub use transaction::{ }; // Re-exports -pub use alloy_primitives::{ - self, address, b256, bloom, bytes, - bytes::{Buf, BufMut, BytesMut}, - hex, Bytes, TxHash, B256, U256, U64, -}; +pub use alloy_primitives::{self, bloom, bytes, bytes::Buf, hex, Bytes, TxHash}; pub use reth_ethereum_forks::*; pub use revm_primitives::{self, JumpTable}; diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index 454524081f0b4..2fe611db1d2dc 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -2,11 +2,11 @@ use crate::{ constants::EMPTY_OMMER_ROOT_HASH, Header, Receipt, ReceiptWithBloom, ReceiptWithBloomRef, - Request, TransactionSigned, Withdrawal, B256, + Request, TransactionSigned, Withdrawal, }; use alloc::vec::Vec; use alloy_eips::eip7685::Encodable7685; -use alloy_primitives::keccak256; +use alloy_primitives::{keccak256, B256}; use reth_trie_common::root::{ordered_trie_root, ordered_trie_root_with_encoder}; /// Calculate a transaction root. @@ -65,9 +65,9 @@ pub fn calculate_ommers_root(ommers: &[Header]) -> B256 { #[cfg(test)] mod tests { use super::*; - use crate::{constants::EMPTY_ROOT_HASH, Block, U256}; + use crate::{constants::EMPTY_ROOT_HASH, Block}; use alloy_genesis::GenesisAccount; - use alloy_primitives::{b256, hex_literal::hex, Address}; + use alloy_primitives::{b256, hex_literal::hex, Address, U256}; use alloy_rlp::Decodable; use reth_chainspec::{HOLESKY, MAINNET, SEPOLIA}; use reth_trie_common::root::{state_root_ref_unhashed, state_root_unhashed}; diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 0b32e0899a126..59c819da9116f 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -1,11 +1,11 @@ #[cfg(feature = "reth-codec")] use crate::compression::{RECEIPT_COMPRESSOR, RECEIPT_DECOMPRESSOR}; use crate::{ - logs_bloom, Bytes, TxType, B256, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, + logs_bloom, Bytes, TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, }; use alloc::{vec, vec::Vec}; -use alloy_primitives::{Bloom, Log}; +use alloy_primitives::{Bloom, Log, B256}; use alloy_rlp::{length_of_length, Decodable, Encodable, RlpDecodable, RlpEncodable}; use bytes::{Buf, BufMut}; use core::{cmp::Ordering, ops::Deref}; diff --git a/crates/primitives/src/transaction/access_list.rs b/crates/primitives/src/transaction/access_list.rs index 32beb293f9c12..8406e5a5b481a 100644 --- a/crates/primitives/src/transaction/access_list.rs +++ b/crates/primitives/src/transaction/access_list.rs @@ -2,9 +2,8 @@ #[cfg(test)] mod tests { - use crate::B256; use alloy_eips::eip2930::{AccessList, AccessListItem}; - use alloy_primitives::Address; + use alloy_primitives::{Address, B256}; use alloy_rlp::{RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper}; use proptest::proptest; use proptest_arbitrary_interop::arb; diff --git a/crates/primitives/src/transaction/compat.rs b/crates/primitives/src/transaction/compat.rs index ac39671f34a8d..cb8a4a92d7d1e 100644 --- a/crates/primitives/src/transaction/compat.rs +++ b/crates/primitives/src/transaction/compat.rs @@ -1,5 +1,5 @@ -use crate::{Transaction, TransactionSigned, U256}; -use alloy_primitives::{Address, TxKind}; +use crate::{Transaction, TransactionSigned}; +use alloy_primitives::{Address, TxKind, U256}; use revm_primitives::{AuthorizationList, TxEnv}; /// Implements behaviour to fill a [`TxEnv`] from another transaction. diff --git a/crates/primitives/src/transaction/error.rs b/crates/primitives/src/transaction/error.rs index 0c53c2c8dcf95..790292cd82b74 100644 --- a/crates/primitives/src/transaction/error.rs +++ b/crates/primitives/src/transaction/error.rs @@ -1,4 +1,5 @@ -use crate::{GotExpectedBoxed, U256}; +use crate::GotExpectedBoxed; +use alloy_primitives::U256; /// Represents error variants that can happen when trying to validate a /// [Transaction](crate::Transaction) diff --git a/crates/primitives/src/transaction/meta.rs b/crates/primitives/src/transaction/meta.rs index 6fc752aad2feb..c7cb9d8b697d4 100644 --- a/crates/primitives/src/transaction/meta.rs +++ b/crates/primitives/src/transaction/meta.rs @@ -1,4 +1,4 @@ -use crate::B256; +use alloy_primitives::B256; /// Additional fields in the context of a block that contains this transaction. #[derive(Debug, Clone, Copy, Default, Eq, PartialEq)] diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index d7ab9a22bc2b5..425926254405c 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,8 +1,8 @@ //! Transaction types. -use crate::{BlockHashOrNumber, Bytes, TxHash, B256, U256}; +use crate::{BlockHashOrNumber, Bytes, TxHash}; use alloy_eips::eip7702::SignedAuthorization; -use alloy_primitives::{keccak256, Address, TxKind}; +use alloy_primitives::{keccak256, Address, TxKind, B256, U256}; use alloy_consensus::{SignableTransaction, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; use alloy_eips::eip2930::AccessList; @@ -1708,9 +1708,9 @@ mod tests { hex, transaction::{signature::Signature, TxEip1559, TxKind, TxLegacy}, Bytes, Transaction, TransactionSigned, TransactionSignedEcRecovered, - TransactionSignedNoHash, B256, U256, + TransactionSignedNoHash, }; - use alloy_primitives::{address, b256, bytes, Address, Parity}; + use alloy_primitives::{address, b256, bytes, Address, Parity, B256, U256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError}; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_codecs::Compact; diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 3f19b29b36c19..69ca8f88188d2 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -8,14 +8,14 @@ use super::{ }; use crate::{ BlobTransaction, BlobTransactionSidecar, Bytes, Signature, Transaction, TransactionSigned, - TransactionSignedEcRecovered, TxHash, B256, EIP4844_TX_TYPE_ID, + TransactionSignedEcRecovered, TxHash, EIP4844_TX_TYPE_ID, }; use alloc::vec::Vec; use alloy_consensus::{ transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, SignableTransaction, TxEip4844WithSidecar, }; -use alloy_primitives::Address; +use alloy_primitives::{Address, B256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header, EMPTY_LIST_CODE}; use bytes::Buf; use derive_more::{AsRef, Deref}; diff --git a/crates/primitives/src/transaction/signature.rs b/crates/primitives/src/transaction/signature.rs index 07a2d61957956..829ce24093779 100644 --- a/crates/primitives/src/transaction/signature.rs +++ b/crates/primitives/src/transaction/signature.rs @@ -1,5 +1,5 @@ -use crate::{transaction::util::secp256k1, B256, U256}; -use alloy_primitives::{Address, Parity}; +use crate::transaction::util::secp256k1; +use alloy_primitives::{Address, Parity, B256, U256}; use alloy_rlp::{Decodable, Error as RlpError}; pub use alloy_primitives::Signature; @@ -119,9 +119,9 @@ mod tests { transaction::signature::{ legacy_parity, recover_signer, recover_signer_unchecked, SECP256K1N_HALF, }, - Signature, B256, U256, + Signature, }; - use alloy_primitives::{Address, Parity}; + use alloy_primitives::{Address, Parity, B256, U256}; use std::str::FromStr; #[test] diff --git a/crates/primitives/src/transaction/variant.rs b/crates/primitives/src/transaction/variant.rs index c558e7ccb9baa..888c83946cab8 100644 --- a/crates/primitives/src/transaction/variant.rs +++ b/crates/primitives/src/transaction/variant.rs @@ -2,9 +2,9 @@ //! `TransactionSignedEcRecovered` use crate::{ - Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, B256, + Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, }; -use alloy_primitives::Address; +use alloy_primitives::{Address, B256}; use core::ops::Deref; /// Represents various different transaction formats used in reth. diff --git a/crates/revm/src/database.rs b/crates/revm/src/database.rs index e277d63e4bccb..8f40d2be8d91c 100644 --- a/crates/revm/src/database.rs +++ b/crates/revm/src/database.rs @@ -1,7 +1,7 @@ use crate::primitives::alloy_primitives::{BlockNumber, StorageKey, StorageValue}; -use alloy_primitives::Address; +use alloy_primitives::{Address, B256, U256}; use core::ops::{Deref, DerefMut}; -use reth_primitives::{Account, B256, U256}; +use reth_primitives::Account; use reth_storage_errors::provider::{ProviderError, ProviderResult}; use revm::{ db::DatabaseRef, diff --git a/crates/revm/src/state_change.rs b/crates/revm/src/state_change.rs index a4f5c6cb04a86..afe92561bcd30 100644 --- a/crates/revm/src/state_change.rs +++ b/crates/revm/src/state_change.rs @@ -1,7 +1,7 @@ -use alloy_primitives::{map::HashMap, Address}; +use alloy_primitives::{map::HashMap, Address, U256}; use reth_chainspec::EthereumHardforks; use reth_consensus_common::calc; -use reth_primitives::{Block, Withdrawal, Withdrawals, U256}; +use reth_primitives::{Block, Withdrawal, Withdrawals}; /// Collect all balance changes at the end of the block. /// diff --git a/crates/revm/src/test_utils.rs b/crates/revm/src/test_utils.rs index 3f89d99125bee..f1d1ce600e3b1 100644 --- a/crates/revm/src/test_utils.rs +++ b/crates/revm/src/test_utils.rs @@ -2,9 +2,9 @@ use alloc::vec::Vec; use alloy_primitives::{ keccak256, map::{HashMap, HashSet}, - Address, BlockNumber, StorageKey, + Address, BlockNumber, StorageKey, B256, U256, }; -use reth_primitives::{Account, Bytecode, Bytes, B256, U256}; +use reth_primitives::{Account, Bytecode, Bytes}; use reth_storage_api::{ AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, StorageRootProvider, diff --git a/crates/stages/stages/benches/setup/mod.rs b/crates/stages/stages/benches/setup/mod.rs index 85e4e3e5a3189..84d96aac848f1 100644 --- a/crates/stages/stages/benches/setup/mod.rs +++ b/crates/stages/stages/benches/setup/mod.rs @@ -1,5 +1,5 @@ #![allow(unreachable_pub)] -use alloy_primitives::Address; +use alloy_primitives::{Address, B256, U256}; use itertools::concat; use reth_chainspec::ChainSpec; use reth_db::{tables, test_utils::TempDatabase, Database, DatabaseEnv}; @@ -7,7 +7,7 @@ use reth_db_api::{ cursor::DbCursorRO, transaction::{DbTx, DbTxMut}, }; -use reth_primitives::{alloy_primitives::Sealable, Account, SealedBlock, SealedHeader, B256, U256}; +use reth_primitives::{alloy_primitives::Sealable, Account, SealedBlock, SealedHeader}; use reth_provider::{DatabaseProvider, DatabaseProviderFactory, TrieWriter}; use reth_stages::{ stages::{AccountHashingStage, StorageHashingStage}, diff --git a/crates/stages/stages/src/lib.rs b/crates/stages/stages/src/lib.rs index e33b6e7c02028..38a0f209dbdd6 100644 --- a/crates/stages/stages/src/lib.rs +++ b/crates/stages/stages/src/lib.rs @@ -17,7 +17,7 @@ //! # use reth_downloaders::headers::reverse_headers::ReverseHeadersDownloaderBuilder; //! # use reth_network_p2p::test_utils::{TestBodiesClient, TestHeadersClient}; //! # use reth_evm_ethereum::execute::EthExecutorProvider; -//! # use reth_primitives::B256; +//! # use alloy_primitives::B256; //! # use reth_chainspec::MAINNET; //! # use reth_prune_types::PruneModes; //! # use reth_network_peers::PeerId; diff --git a/crates/stages/stages/src/sets.rs b/crates/stages/stages/src/sets.rs index 5a527fbc42c27..a25fcd4e1e579 100644 --- a/crates/stages/stages/src/sets.rs +++ b/crates/stages/stages/src/sets.rs @@ -41,11 +41,11 @@ use crate::{ }, StageSet, StageSetBuilder, }; +use alloy_primitives::B256; use reth_config::config::StageConfig; use reth_consensus::Consensus; use reth_evm::execute::BlockExecutorProvider; use reth_network_p2p::{bodies::downloader::BodyDownloader, headers::downloader::HeaderDownloader}; -use reth_primitives::B256; use reth_provider::HeaderSyncGapProvider; use reth_prune_types::PruneModes; use reth_stages_api::Stage; diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index 248e2ff52748d..2d441dee292a2 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -622,7 +622,7 @@ mod tests { UnwindStageTestRunner, }, }; - use alloy_primitives::{BlockHash, BlockNumber, TxNumber}; + use alloy_primitives::{BlockHash, BlockNumber, TxNumber, B256}; use futures_util::Stream; use reth_db::{static_file::HeaderMask, tables}; use reth_db_api::{ @@ -637,9 +637,7 @@ mod tests { }, error::DownloadResult, }; - use reth_primitives::{ - BlockBody, Header, SealedBlock, SealedHeader, StaticFileSegment, B256, - }; + use reth_primitives::{BlockBody, Header, SealedBlock, SealedHeader, StaticFileSegment}; use reth_provider::{ providers::StaticFileWriter, test_utils::MockNodeTypesWithDB, HeaderProvider, ProviderFactory, StaticFileProviderFactory, TransactionsProvider, diff --git a/crates/stages/stages/src/stages/execution.rs b/crates/stages/stages/src/stages/execution.rs index 1305e6825451e..24dff34228575 100644 --- a/crates/stages/stages/src/stages/execution.rs +++ b/crates/stages/stages/src/stages/execution.rs @@ -658,14 +658,14 @@ where mod tests { use super::*; use crate::test_utils::TestStageDB; - use alloy_primitives::{hex_literal::hex, keccak256, Address}; + use alloy_primitives::{address, hex_literal::hex, keccak256, Address, B256, U256}; use alloy_rlp::Decodable; use assert_matches::assert_matches; use reth_chainspec::ChainSpecBuilder; use reth_db_api::{models::AccountBeforeTx, transaction::DbTxMut}; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_execution_errors::BlockValidationError; - use reth_primitives::{address, Account, Bytecode, SealedBlock, StorageEntry, B256, U256}; + use reth_primitives::{Account, Bytecode, SealedBlock, StorageEntry}; use reth_provider::{ test_utils::create_test_provider_factory, AccountReader, DatabaseProviderFactory, ReceiptProvider, StaticFileProviderFactory, diff --git a/crates/stages/stages/src/stages/hashing_account.rs b/crates/stages/stages/src/stages/hashing_account.rs index bbcb9b8da270d..14afb37d81db5 100644 --- a/crates/stages/stages/src/stages/hashing_account.rs +++ b/crates/stages/stages/src/stages/hashing_account.rs @@ -1,4 +1,4 @@ -use alloy_primitives::keccak256; +use alloy_primitives::{keccak256, B256}; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; use reth_db::{tables, RawKey, RawTable, RawValue}; @@ -7,7 +7,7 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, }; use reth_etl::Collector; -use reth_primitives::{Account, B256}; +use reth_primitives::Account; use reth_provider::{AccountExtReader, DBProvider, HashingWriter, StatsReader}; use reth_stages_api::{ AccountHashingCheckpoint, EntitiesCheckpoint, ExecInput, ExecOutput, Stage, StageCheckpoint, @@ -65,8 +65,8 @@ impl AccountHashingStage { provider: &reth_provider::DatabaseProvider, opts: SeedOpts, ) -> Result, StageError> { + use alloy_primitives::U256; use reth_db_api::models::AccountBeforeTx; - use reth_primitives::U256; use reth_provider::{StaticFileProviderFactory, StaticFileWriter}; use reth_testing_utils::{ generators, @@ -298,8 +298,9 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, UnwindStageTestRunner, }; + use alloy_primitives::U256; use assert_matches::assert_matches; - use reth_primitives::{Account, U256}; + use reth_primitives::Account; use reth_provider::providers::StaticFileWriter; use reth_stages_api::StageUnitCheckpoint; use test_utils::*; diff --git a/crates/stages/stages/src/stages/hashing_storage.rs b/crates/stages/stages/src/stages/hashing_storage.rs index 1862a3248ded6..ef070d30c6d68 100644 --- a/crates/stages/stages/src/stages/hashing_storage.rs +++ b/crates/stages/stages/src/stages/hashing_storage.rs @@ -1,4 +1,4 @@ -use alloy_primitives::keccak256; +use alloy_primitives::{bytes::BufMut, keccak256, B256}; use itertools::Itertools; use reth_config::config::{EtlConfig, HashingConfig}; use reth_db::tables; @@ -9,7 +9,7 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, }; use reth_etl::Collector; -use reth_primitives::{BufMut, StorageEntry, B256}; +use reth_primitives::StorageEntry; use reth_provider::{DBProvider, HashingWriter, StatsReader, StorageReader}; use reth_stages_api::{ EntitiesCheckpoint, ExecInput, ExecOutput, Stage, StageCheckpoint, StageError, StageId, @@ -212,14 +212,14 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; - use alloy_primitives::Address; + use alloy_primitives::{Address, U256}; use assert_matches::assert_matches; use rand::Rng; use reth_db_api::{ cursor::{DbCursorRW, DbDupCursorRO}, models::StoredBlockBodyIndices, }; - use reth_primitives::{SealedBlock, U256}; + use reth_primitives::SealedBlock; use reth_provider::providers::StaticFileWriter; use reth_testing_utils::generators::{ self, random_block_range, random_contract_account_range, BlockRangeParams, diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index 25c7abaee48a3..02a360b65eee2 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -1,4 +1,4 @@ -use alloy_primitives::{BlockHash, BlockNumber}; +use alloy_primitives::{BlockHash, BlockNumber, B256}; use futures_util::StreamExt; use reth_codecs::Compact; use reth_config::config::EtlConfig; @@ -11,7 +11,7 @@ use reth_db_api::{ }; use reth_etl::Collector; use reth_network_p2p::headers::{downloader::HeaderDownloader, error::HeadersDownloaderError}; -use reth_primitives::{SealedHeader, StaticFileSegment, B256}; +use reth_primitives::{SealedHeader, StaticFileSegment}; use reth_provider::{ providers::{StaticFileProvider, StaticFileWriter}, BlockHashReader, DBProvider, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, @@ -378,10 +378,11 @@ mod tests { use crate::test_utils::{ stage_test_suite, ExecuteStageTestRunner, StageTestRunner, UnwindStageTestRunner, }; + use alloy_primitives::B256; use assert_matches::assert_matches; use reth_execution_types::ExecutionOutcome; use reth_primitives::{ - alloy_primitives::Sealable, BlockBody, SealedBlock, SealedBlockWithSenders, B256, + alloy_primitives::Sealable, BlockBody, SealedBlock, SealedBlockWithSenders, }; use reth_provider::{BlockWriter, ProviderFactory, StaticFileProviderFactory}; use reth_stages_api::StageUnitCheckpoint; diff --git a/crates/stages/stages/src/stages/index_account_history.rs b/crates/stages/stages/src/stages/index_account_history.rs index 1e96419807714..8b10283fb4b7f 100644 --- a/crates/stages/stages/src/stages/index_account_history.rs +++ b/crates/stages/stages/src/stages/index_account_history.rs @@ -148,7 +148,7 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; - use alloy_primitives::BlockNumber; + use alloy_primitives::{address, BlockNumber, B256}; use itertools::Itertools; use reth_db::BlockNumberList; use reth_db_api::{ @@ -159,7 +159,6 @@ mod tests { }, transaction::DbTx, }; - use reth_primitives::{address, B256}; use reth_provider::{providers::StaticFileWriter, DatabaseProviderFactory}; use reth_testing_utils::generators::{ self, random_block_range, random_changeset_range, random_contract_account_range, diff --git a/crates/stages/stages/src/stages/index_storage_history.rs b/crates/stages/stages/src/stages/index_storage_history.rs index ee68e934f4ed0..ac645b8dd7546 100644 --- a/crates/stages/stages/src/stages/index_storage_history.rs +++ b/crates/stages/stages/src/stages/index_storage_history.rs @@ -153,7 +153,7 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; - use alloy_primitives::{Address, BlockNumber}; + use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; use itertools::Itertools; use reth_db::BlockNumberList; use reth_db_api::{ @@ -164,7 +164,7 @@ mod tests { }, transaction::DbTx, }; - use reth_primitives::{address, b256, StorageEntry, B256, U256}; + use reth_primitives::StorageEntry; use reth_provider::{providers::StaticFileWriter, DatabaseProviderFactory}; use reth_testing_utils::generators::{ self, random_block_range, random_changeset_range, random_contract_account_range, diff --git a/crates/stages/stages/src/stages/merkle.rs b/crates/stages/stages/src/stages/merkle.rs index 7b94067e9926e..15f6e9702be3e 100644 --- a/crates/stages/stages/src/stages/merkle.rs +++ b/crates/stages/stages/src/stages/merkle.rs @@ -1,9 +1,9 @@ -use alloy_primitives::BlockNumber; +use alloy_primitives::{BlockNumber, B256}; use reth_codecs::Compact; use reth_consensus::ConsensusError; use reth_db::tables; use reth_db_api::transaction::{DbTx, DbTxMut}; -use reth_primitives::{alloy_primitives::Sealable, GotExpected, SealedHeader, B256}; +use reth_primitives::{alloy_primitives::Sealable, GotExpected, SealedHeader}; use reth_provider::{ DBProvider, HeaderProvider, ProviderError, StageCheckpointReader, StageCheckpointWriter, StatsReader, TrieWriter, @@ -374,10 +374,10 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, StorageKind, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; - use alloy_primitives::keccak256; + use alloy_primitives::{keccak256, U256}; use assert_matches::assert_matches; use reth_db_api::cursor::{DbCursorRO, DbCursorRW, DbDupCursorRO}; - use reth_primitives::{SealedBlock, StaticFileSegment, StorageEntry, U256}; + use reth_primitives::{SealedBlock, StaticFileSegment, StorageEntry}; use reth_provider::{providers::StaticFileWriter, StaticFileProviderFactory}; use reth_stages_api::StageUnitCheckpoint; use reth_testing_utils::generators::{ diff --git a/crates/stages/stages/src/stages/mod.rs b/crates/stages/stages/src/stages/mod.rs index 6ae1b30e7e24d..17ffcf2e90ebd 100644 --- a/crates/stages/stages/src/stages/mod.rs +++ b/crates/stages/stages/src/stages/mod.rs @@ -42,7 +42,7 @@ use utils::*; mod tests { use super::*; use crate::test_utils::{StorageKind, TestStageDB}; - use alloy_primitives::{hex_literal::hex, keccak256, BlockNumber}; + use alloy_primitives::{address, hex_literal::hex, keccak256, BlockNumber, B256, U256}; use alloy_rlp::Decodable; use reth_chainspec::ChainSpecBuilder; use reth_db::{ @@ -56,7 +56,7 @@ mod tests { }; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_exex::ExExManagerHandle; - use reth_primitives::{address, Account, Bytecode, SealedBlock, StaticFileSegment, B256, U256}; + use reth_primitives::{Account, Bytecode, SealedBlock, StaticFileSegment}; use reth_provider::{ providers::{StaticFileProvider, StaticFileWriter}, test_utils::MockNodeTypesWithDB, diff --git a/crates/stages/stages/src/stages/prune.rs b/crates/stages/stages/src/stages/prune.rs index 3512e6210183c..8adf2fcad5466 100644 --- a/crates/stages/stages/src/stages/prune.rs +++ b/crates/stages/stages/src/stages/prune.rs @@ -169,7 +169,8 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, StorageKind, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; - use reth_primitives::{SealedBlock, B256}; + use alloy_primitives::B256; + use reth_primitives::SealedBlock; use reth_provider::{ providers::StaticFileWriter, TransactionsProvider, TransactionsProviderExt, }; diff --git a/crates/stages/stages/src/stages/sender_recovery.rs b/crates/stages/stages/src/stages/sender_recovery.rs index 8ffbb2070aa12..14b77ccbf9d08 100644 --- a/crates/stages/stages/src/stages/sender_recovery.rs +++ b/crates/stages/stages/src/stages/sender_recovery.rs @@ -334,10 +334,10 @@ struct FailedSenderRecoveryError { #[cfg(test)] mod tests { - use alloy_primitives::BlockNumber; + use alloy_primitives::{BlockNumber, B256}; use assert_matches::assert_matches; use reth_db_api::cursor::DbCursorRO; - use reth_primitives::{SealedBlock, TransactionSigned, B256}; + use reth_primitives::{SealedBlock, TransactionSigned}; use reth_provider::{ providers::StaticFileWriter, DatabaseProviderFactory, PruneCheckpointWriter, StaticFileProviderFactory, TransactionsProvider, diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index 1744b933c2d33..60c958abf8623 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -250,9 +250,9 @@ mod tests { stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, StorageKind, TestRunnerError, TestStageDB, UnwindStageTestRunner, }; - use alloy_primitives::BlockNumber; + use alloy_primitives::{BlockNumber, B256}; use assert_matches::assert_matches; - use reth_primitives::{SealedBlock, B256}; + use reth_primitives::SealedBlock; use reth_provider::{ providers::StaticFileWriter, DatabaseProviderFactory, StaticFileProviderFactory, }; diff --git a/crates/stages/stages/src/test_utils/test_db.rs b/crates/stages/stages/src/test_utils/test_db.rs index 7607b1d3142c5..f062f15914fbf 100644 --- a/crates/stages/stages/src/test_utils/test_db.rs +++ b/crates/stages/stages/src/test_utils/test_db.rs @@ -1,4 +1,4 @@ -use alloy_primitives::{keccak256, Address, BlockNumber, TxNumber}; +use alloy_primitives::{keccak256, Address, BlockNumber, TxNumber, B256, U256}; use reth_chainspec::MAINNET; use reth_db::{ tables, @@ -15,8 +15,7 @@ use reth_db_api::{ DatabaseError as DbError, }; use reth_primitives::{ - Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, TxHash, B256, - U256, + Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, TxHash, }; use reth_provider::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter}, diff --git a/crates/storage/provider/src/providers/state/macros.rs b/crates/storage/provider/src/providers/state/macros.rs index e6ed31938cd82..388b59ab0a1e1 100644 --- a/crates/storage/provider/src/providers/state/macros.rs +++ b/crates/storage/provider/src/providers/state/macros.rs @@ -52,7 +52,7 @@ macro_rules! delegate_provider_impls { } StateProofProvider $(where [$($generics)*])? { fn proof(&self, input: reth_trie::TrieInput, address: alloy_primitives::Address, slots: &[alloy_primitives::B256]) -> reth_storage_errors::provider::ProviderResult; - fn multiproof(&self, input: reth_trie::TrieInput, targets: alloy_primitives::map::HashMap>) -> reth_storage_errors::provider::ProviderResult; + fn multiproof(&self, input: reth_trie::TrieInput, targets: alloy_primitives::map::HashMap>) -> reth_storage_errors::provider::ProviderResult; fn witness(&self, input: reth_trie::TrieInput, target: reth_trie::HashedPostState) -> reth_storage_errors::provider::ProviderResult>; } ); diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 32033bcb2803e..0ad933fc7aee5 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -2,15 +2,16 @@ use crate::{DatabaseProviderRW, ExecutionOutcome}; use alloy_consensus::TxLegacy; use alloy_primitives::{ - hex_literal::hex, map::HashMap, Address, BlockNumber, Log, Parity, Sealable, TxKind, + b256, hex_literal::hex, map::HashMap, Address, BlockNumber, Log, Parity, Sealable, TxKind, + B256, U256, }; use once_cell::sync::Lazy; use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_primitives::{ - alloy_primitives, b256, Account, BlockBody, Bytes, Header, Receipt, SealedBlock, + alloy_primitives, Account, BlockBody, Bytes, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, Signature, Transaction, TransactionSigned, TxType, - Withdrawal, Withdrawals, B256, U256, + Withdrawal, Withdrawals, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{db::BundleState, primitives::AccountInfo}; diff --git a/crates/trie/db/src/state.rs b/crates/trie/db/src/state.rs index 48a676c85a5df..35ed064e5231e 100644 --- a/crates/trie/db/src/state.rs +++ b/crates/trie/db/src/state.rs @@ -77,9 +77,10 @@ pub trait DatabaseStateRoot<'a, TX>: Sized { /// # Example /// /// ``` + /// use alloy_primitives::U256; /// use reth_db::test_utils::create_test_rw_db; /// use reth_db_api::database::Database; - /// use reth_primitives::{Account, U256}; + /// use reth_primitives::Account; /// use reth_trie::{updates::TrieUpdates, HashedPostState, StateRoot}; /// use reth_trie_db::DatabaseStateRoot; /// @@ -267,10 +268,10 @@ impl DatabaseHashedPostState for HashedPostState { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::{map::HashMap, Address}; + use alloy_primitives::{map::HashMap, Address, U256}; use reth_db::test_utils::create_test_rw_db; use reth_db_api::database::Database; - use reth_primitives::{hex, revm_primitives::AccountInfo, U256}; + use reth_primitives::{hex, revm_primitives::AccountInfo}; use revm::db::BundleState; #[test] diff --git a/examples/beacon-api-sidecar-fetcher/Cargo.toml b/examples/beacon-api-sidecar-fetcher/Cargo.toml index 80f5f726d96d4..47a2a181f7e5c 100644 --- a/examples/beacon-api-sidecar-fetcher/Cargo.toml +++ b/examples/beacon-api-sidecar-fetcher/Cargo.toml @@ -10,6 +10,7 @@ reth.workspace = true reth-node-ethereum.workspace = true alloy-rpc-types-beacon.workspace = true +alloy-primitives.workspace = true clap.workspace = true eyre.workspace = true diff --git a/examples/beacon-api-sidecar-fetcher/src/main.rs b/examples/beacon-api-sidecar-fetcher/src/main.rs index ff66596427054..631a17a095c6d 100644 --- a/examples/beacon-api-sidecar-fetcher/src/main.rs +++ b/examples/beacon-api-sidecar-fetcher/src/main.rs @@ -18,11 +18,12 @@ use std::{ net::{IpAddr, Ipv4Addr}, }; +use alloy_primitives::B256; use clap::Parser; use futures_util::{stream::FuturesUnordered, StreamExt}; use mined_sidecar::MinedSidecarStream; use reth::{ - args::utils::DefaultChainSpecParser, builder::NodeHandle, cli::Cli, primitives::B256, + args::utils::DefaultChainSpecParser, builder::NodeHandle, cli::Cli, providers::CanonStateSubscriptions, }; use reth_node_ethereum::EthereumNode; diff --git a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs index 04fbd5fbfbc08..d2d9181592b5b 100644 --- a/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs +++ b/examples/beacon-api-sidecar-fetcher/src/mined_sidecar.rs @@ -1,10 +1,11 @@ use crate::BeaconSidecarConfig; +use alloy_primitives::B256; use alloy_rpc_types_beacon::sidecar::{BeaconBlobBundle, SidecarIterator}; use eyre::Result; use futures_util::{stream::FuturesUnordered, Future, Stream, StreamExt}; use reqwest::{Error, StatusCode}; use reth::{ - primitives::{BlobTransaction, SealedBlockWithSenders, B256}, + primitives::{BlobTransaction, SealedBlockWithSenders}, providers::CanonStateNotification, transaction_pool::{BlobStoreError, TransactionPoolExt}, }; diff --git a/examples/bsc-p2p/Cargo.toml b/examples/bsc-p2p/Cargo.toml index dde02080d135b..fecbab7a8f93b 100644 --- a/examples/bsc-p2p/Cargo.toml +++ b/examples/bsc-p2p/Cargo.toml @@ -16,9 +16,15 @@ reth-network-peers.workspace = true reth-primitives.workspace = true reth-tracing.workspace = true -secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery"] } +secp256k1 = { workspace = true, features = [ + "global-context", + "rand-std", + "recovery", +] } tokio.workspace = true tokio-stream.workspace = true serde_json.workspace = true + +alloy-primitives.workspace = true diff --git a/examples/bsc-p2p/src/chainspec.rs b/examples/bsc-p2p/src/chainspec.rs index 11c5702332633..8a47a604e7267 100644 --- a/examples/bsc-p2p/src/chainspec.rs +++ b/examples/bsc-p2p/src/chainspec.rs @@ -1,8 +1,8 @@ +use alloy_primitives::{b256, B256}; use reth_chainspec::{ once_cell_set, BaseFeeParams, Chain, ChainHardforks, ChainSpec, EthereumHardfork, ForkCondition, }; use reth_network_peers::NodeRecord; -use reth_primitives::{b256, B256}; use std::sync::Arc; diff --git a/examples/custom-dev-node/Cargo.toml b/examples/custom-dev-node/Cargo.toml index e4c5a9dab2edb..2586795b45b53 100644 --- a/examples/custom-dev-node/Cargo.toml +++ b/examples/custom-dev-node/Cargo.toml @@ -19,3 +19,4 @@ tokio.workspace = true serde_json.workspace = true alloy-genesis.workspace = true +alloy-primitives.workspace = true diff --git a/examples/custom-dev-node/src/main.rs b/examples/custom-dev-node/src/main.rs index 93e5df6287c4f..7fa44418c523e 100644 --- a/examples/custom-dev-node/src/main.rs +++ b/examples/custom-dev-node/src/main.rs @@ -6,6 +6,7 @@ use std::sync::Arc; use alloy_genesis::Genesis; +use alloy_primitives::{b256, hex}; use futures_util::StreamExt; use reth::{ builder::{NodeBuilder, NodeHandle}, @@ -16,7 +17,6 @@ use reth::{ use reth_chainspec::ChainSpec; use reth_node_core::{args::RpcServerArgs, node_config::NodeConfig}; use reth_node_ethereum::EthereumNode; -use reth_primitives::{b256, hex}; #[tokio::main] async fn main() -> eyre::Result<()> { diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index c4f640e9b87f3..213a156af8fd6 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -23,7 +23,7 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use alloy_genesis::Genesis; -use alloy_primitives::Address; +use alloy_primitives::{Address, B256}; use alloy_rpc_types::{ engine::{ ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadEnvelopeV4, @@ -64,7 +64,7 @@ use reth_payload_builder::{ EthBuiltPayload, EthPayloadBuilderAttributes, PayloadBuilderError, PayloadBuilderHandle, PayloadBuilderService, }; -use reth_primitives::{Withdrawals, B256}; +use reth_primitives::Withdrawals; use reth_tracing::{RethTracer, Tracer}; /// A custom payload attributes type. diff --git a/examples/custom-evm/src/main.rs b/examples/custom-evm/src/main.rs index 4f1c0c7eb2990..b3bf36ce6d5d4 100644 --- a/examples/custom-evm/src/main.rs +++ b/examples/custom-evm/src/main.rs @@ -3,7 +3,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] use alloy_genesis::Genesis; -use alloy_primitives::Address; +use alloy_primitives::{address, Address, U256}; use reth::{ builder::{ components::{ExecutorBuilder, PayloadServiceBuilder}, @@ -11,7 +11,6 @@ use reth::{ }, payload::{EthBuiltPayload, EthPayloadBuilderAttributes}, primitives::{ - address, revm_primitives::{Env, PrecompileResult}, Bytes, }, @@ -39,7 +38,7 @@ use reth_node_ethereum::{ }; use reth_primitives::{ revm_primitives::{CfgEnvWithHandlerCfg, TxEnv}, - Header, TransactionSigned, U256, + Header, TransactionSigned, }; use reth_tracing::{RethTracer, Tracer}; use std::sync::Arc; diff --git a/examples/custom-rlpx-subprotocol/Cargo.toml b/examples/custom-rlpx-subprotocol/Cargo.toml index d2d1caab63555..886d83da48cdc 100644 --- a/examples/custom-rlpx-subprotocol/Cargo.toml +++ b/examples/custom-rlpx-subprotocol/Cargo.toml @@ -21,3 +21,4 @@ tokio-stream.workspace = true eyre.workspace = true rand.workspace = true tracing.workspace = true +alloy-primitives.workspace = true diff --git a/examples/custom-rlpx-subprotocol/src/subprotocol/connection/mod.rs b/examples/custom-rlpx-subprotocol/src/subprotocol/connection/mod.rs index a6d835b70c263..6017871d2f997 100644 --- a/examples/custom-rlpx-subprotocol/src/subprotocol/connection/mod.rs +++ b/examples/custom-rlpx-subprotocol/src/subprotocol/connection/mod.rs @@ -1,7 +1,7 @@ use super::protocol::proto::{CustomRlpxProtoMessage, CustomRlpxProtoMessageKind}; +use alloy_primitives::bytes::BytesMut; use futures::{Stream, StreamExt}; use reth_eth_wire::multiplex::ProtocolConnection; -use reth_primitives::BytesMut; use std::{ pin::Pin, task::{ready, Context, Poll}, diff --git a/examples/custom-rlpx-subprotocol/src/subprotocol/protocol/proto.rs b/examples/custom-rlpx-subprotocol/src/subprotocol/protocol/proto.rs index 8b179a447d9f8..043d37c4f6ae8 100644 --- a/examples/custom-rlpx-subprotocol/src/subprotocol/protocol/proto.rs +++ b/examples/custom-rlpx-subprotocol/src/subprotocol/protocol/proto.rs @@ -1,8 +1,8 @@ //! Simple RLPx Ping Pong protocol that also support sending messages, //! following [RLPx specs](https://github.com/ethereum/devp2p/blob/master/rlpx.md) +use alloy_primitives::bytes::{Buf, BufMut, BytesMut}; use reth_eth_wire::{protocol::Protocol, Capability}; -use reth_primitives::{Buf, BufMut, BytesMut}; #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] diff --git a/examples/db-access/src/main.rs b/examples/db-access/src/main.rs index 5730e67535229..7878ba3c8a89f 100644 --- a/examples/db-access/src/main.rs +++ b/examples/db-access/src/main.rs @@ -1,9 +1,9 @@ -use alloy_primitives::Address; +use alloy_primitives::{Address, B256}; use alloy_rpc_types::{Filter, FilteredParams}; use reth_chainspec::ChainSpecBuilder; use reth_node_ethereum::EthereumNode; use reth_node_types::NodeTypesWithDBAdapter; -use reth_primitives::{alloy_primitives::Sealable, SealedHeader, B256}; +use reth_primitives::{alloy_primitives::Sealable, SealedHeader}; use reth_provider::{ providers::StaticFileProvider, AccountReader, BlockReader, BlockSource, HeaderProvider, ProviderFactory, ReceiptProvider, StateProvider, TransactionsProvider, diff --git a/examples/polygon-p2p/Cargo.toml b/examples/polygon-p2p/Cargo.toml index b3a7af7506b49..bdf9a27ce5608 100644 --- a/examples/polygon-p2p/Cargo.toml +++ b/examples/polygon-p2p/Cargo.toml @@ -8,7 +8,11 @@ license.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery"] } +secp256k1 = { workspace = true, features = [ + "global-context", + "rand-std", + "recovery", +] } tokio.workspace = true reth-network.workspace = true reth-chainspec.workspace = true @@ -18,3 +22,4 @@ reth-tracing.workspace = true tokio-stream.workspace = true reth-provider = { workspace = true, features = ["test-utils"] } reth-discv4 = { workspace = true, features = ["test-utils"] } +alloy-primitives.workspace = true diff --git a/examples/polygon-p2p/src/chain_cfg.rs b/examples/polygon-p2p/src/chain_cfg.rs index eabcfb2e71db6..84bfac8f209f6 100644 --- a/examples/polygon-p2p/src/chain_cfg.rs +++ b/examples/polygon-p2p/src/chain_cfg.rs @@ -1,8 +1,9 @@ +use alloy_primitives::{b256, B256}; use reth_chainspec::{ once_cell_set, BaseFeeParams, Chain, ChainHardforks, ChainSpec, EthereumHardfork, ForkCondition, }; use reth_discv4::NodeRecord; -use reth_primitives::{b256, Head, B256}; +use reth_primitives::Head; use std::sync::Arc; diff --git a/examples/stateful-precompile/src/main.rs b/examples/stateful-precompile/src/main.rs index effece640d269..f58c7a557b8a8 100644 --- a/examples/stateful-precompile/src/main.rs +++ b/examples/stateful-precompile/src/main.rs @@ -3,14 +3,14 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] use alloy_genesis::Genesis; -use alloy_primitives::Address; +use alloy_primitives::{Address, U256}; use parking_lot::RwLock; use reth::{ api::NextBlockEnvAttributes, builder::{components::ExecutorBuilder, BuilderContext, NodeBuilder}, primitives::{ revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, Env, PrecompileResult, TxEnv}, - Bytes, U256, + Bytes, }, revm::{ handler::register::EvmHandler, diff --git a/testing/testing-utils/src/genesis_allocator.rs b/testing/testing-utils/src/genesis_allocator.rs index d2da3bbabd28c..ebdef9eb1932a 100644 --- a/testing/testing-utils/src/genesis_allocator.rs +++ b/testing/testing-utils/src/genesis_allocator.rs @@ -18,8 +18,8 @@ use std::{ /// /// # Example /// ``` -/// # use alloy_primitives::Address; -/// # use reth_primitives::{U256, hex, Bytes}; +/// # use alloy_primitives::{Address, U256}; +/// # use reth_primitives::{hex, Bytes}; /// # use reth_testing_utils::GenesisAllocator; /// # use std::str::FromStr; /// let mut allocator = GenesisAllocator::default(); From e21d75f9aa345342296fb531e7d6760cc4240b7b Mon Sep 17 00:00:00 2001 From: Thomas Coratger <60488569+tcoratger@users.noreply.github.com> Date: Mon, 30 Sep 2024 03:06:47 +0200 Subject: [PATCH 72/84] primitives: rm more `alloy_primitives` reexports (#11325) --- Cargo.lock | 1 + bin/reth/src/commands/debug_cmd/build_block.rs | 4 ++-- crates/chain-state/src/test_utils.rs | 3 +-- crates/engine/tree/src/backfill.rs | 4 ++-- crates/engine/tree/src/download.rs | 3 ++- crates/engine/tree/src/test_utils.rs | 4 ++-- crates/engine/tree/src/tree/mod.rs | 3 +-- crates/ethereum/consensus/src/validation.rs | 2 +- crates/evm/src/system_calls/eip7002.rs | 4 ++-- crates/evm/src/system_calls/eip7251.rs | 4 ++-- crates/net/downloaders/src/file_client.rs | 4 ++-- crates/net/downloaders/src/headers/reverse_headers.rs | 4 ++-- crates/net/downloaders/src/headers/test_utils.rs | 3 ++- crates/net/downloaders/src/receipt_file_client.rs | 7 +++++-- crates/net/eth-wire-types/src/blocks.rs | 7 ++----- crates/net/eth-wire-types/src/transactions.rs | 7 ++----- crates/net/network/tests/it/multiplex.rs | 3 +-- crates/net/network/tests/it/requests.rs | 6 ++---- crates/net/p2p/src/full_block.rs | 6 ++---- crates/net/p2p/src/test_utils/headers.rs | 3 ++- crates/optimism/cli/src/receipt_file_codec.rs | 11 +++++------ crates/primitives/src/block.rs | 5 ++--- crates/primitives/src/lib.rs | 1 - crates/primitives/src/receipt.rs | 4 ++-- crates/primitives/src/transaction/mod.rs | 10 ++++------ crates/primitives/src/transaction/pooled.rs | 6 +++--- crates/primitives/src/transaction/sidecar.rs | 7 ++++--- crates/primitives/src/transaction/signature.rs | 3 +-- crates/primitives/src/transaction/tx_type.rs | 2 +- crates/revm/src/test_utils.rs | 4 ++-- crates/rpc/rpc-engine-api/tests/it/payload.rs | 7 ++----- crates/stages/stages/benches/setup/mod.rs | 4 ++-- crates/stages/stages/src/stages/execution.rs | 4 ++-- crates/stages/stages/src/stages/headers.rs | 6 ++---- crates/stages/stages/src/stages/merkle.rs | 4 ++-- crates/stages/stages/src/test_utils/test_db.rs | 4 ++-- crates/storage/db-models/src/accounts.rs | 4 ++-- .../provider/src/providers/blockchain_provider.rs | 6 +++--- crates/storage/provider/src/providers/mod.rs | 8 ++++---- crates/storage/provider/src/test_utils/blocks.rs | 9 ++++----- crates/storage/provider/src/test_utils/mock.rs | 7 ++++--- crates/storage/storage-api/src/block.rs | 5 ++--- crates/transaction-pool/src/maintain.rs | 5 ++--- crates/transaction-pool/src/test_utils/gen.rs | 4 ++-- crates/trie/db/src/state.rs | 4 ++-- crates/trie/trie/src/updates.rs | 4 ++-- examples/custom-evm/src/main.rs | 7 ++----- examples/custom-payload-builder/Cargo.toml | 2 ++ examples/custom-payload-builder/src/generator.rs | 3 ++- examples/db-access/src/main.rs | 4 ++-- examples/stateful-precompile/src/main.rs | 7 ++----- testing/testing-utils/src/generators.rs | 4 ++-- testing/testing-utils/src/genesis_allocator.rs | 3 +-- 53 files changed, 114 insertions(+), 136 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7b7cc7f2ca8b7..5682674034c76 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2765,6 +2765,7 @@ dependencies = [ name = "example-custom-payload-builder" version = "0.0.0" dependencies = [ + "alloy-primitives", "eyre", "futures-util", "reth", diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index 3d2dfa62e8734..6f97276839b1e 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -1,6 +1,6 @@ //! Command for debugging block building. use alloy_consensus::TxEip4844; -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rlp::Decodable; use alloy_rpc_types::engine::{BlobsBundleV1, PayloadAttributes}; use clap::Parser; @@ -25,7 +25,7 @@ use reth_node_api::{NodeTypesWithDB, NodeTypesWithEngine, PayloadBuilderAttribut use reth_node_ethereum::{EthEvmConfig, EthExecutorProvider}; use reth_payload_builder::database::CachedReads; use reth_primitives::{ - revm_primitives::KzgSettings, BlobTransaction, BlobTransactionSidecar, Bytes, + revm_primitives::KzgSettings, BlobTransaction, BlobTransactionSidecar, PooledTransactionsElement, SealedBlock, SealedBlockWithSenders, Transaction, TransactionSigned, }; use reth_provider::{ diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index f2446eb151fce..9d5b14cf725f6 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -3,14 +3,13 @@ use crate::{ CanonStateSubscriptions, }; use alloy_consensus::TxEip1559; -use alloy_primitives::{Address, BlockNumber, B256, U256}; +use alloy_primitives::{Address, BlockNumber, Sealable, B256, U256}; use alloy_signer::SignerSync; use alloy_signer_local::PrivateKeySigner; use rand::{thread_rng, Rng}; use reth_chainspec::{ChainSpec, EthereumHardfork, MIN_TRANSACTION_GAS}; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::{ - alloy_primitives::Sealable, constants::{EIP1559_INITIAL_BASE_FEE, EMPTY_ROOT_HASH}, proofs::{calculate_receipt_root, calculate_transaction_root, calculate_withdrawals_root}, BlockBody, Header, Receipt, Receipts, Requests, SealedBlock, SealedBlockWithSenders, diff --git a/crates/engine/tree/src/backfill.rs b/crates/engine/tree/src/backfill.rs index 440e86693c77c..f3ac904e83a34 100644 --- a/crates/engine/tree/src/backfill.rs +++ b/crates/engine/tree/src/backfill.rs @@ -230,12 +230,12 @@ impl PipelineState { mod tests { use super::*; use crate::test_utils::{insert_headers_into_client, TestPipelineBuilder}; - use alloy_primitives::{BlockNumber, B256}; + use alloy_primitives::{BlockNumber, Sealable, B256}; use assert_matches::assert_matches; use futures::poll; use reth_chainspec::{ChainSpecBuilder, MAINNET}; use reth_network_p2p::test_utils::TestFullBlockClient; - use reth_primitives::{alloy_primitives::Sealable, Header, SealedHeader}; + use reth_primitives::{Header, SealedHeader}; use reth_provider::test_utils::MockNodeTypesWithDB; use reth_stages::ExecOutput; use reth_stages_api::StageCheckpoint; diff --git a/crates/engine/tree/src/download.rs b/crates/engine/tree/src/download.rs index aff99c8840654..173de8b4b4678 100644 --- a/crates/engine/tree/src/download.rs +++ b/crates/engine/tree/src/download.rs @@ -305,11 +305,12 @@ impl BlockDownloader for NoopBlockDownloader { mod tests { use super::*; use crate::test_utils::insert_headers_into_client; + use alloy_primitives::Sealable; use assert_matches::assert_matches; use reth_beacon_consensus::EthBeaconConsensus; use reth_chainspec::{ChainSpecBuilder, MAINNET}; use reth_network_p2p::test_utils::TestFullBlockClient; - use reth_primitives::{alloy_primitives::Sealable, Header, SealedHeader}; + use reth_primitives::{Header, SealedHeader}; use std::{future::poll_fn, sync::Arc}; struct TestHarness { diff --git a/crates/engine/tree/src/test_utils.rs b/crates/engine/tree/src/test_utils.rs index 3d04e2db765fb..f17766a43ed70 100644 --- a/crates/engine/tree/src/test_utils.rs +++ b/crates/engine/tree/src/test_utils.rs @@ -1,7 +1,7 @@ -use alloy_primitives::B256; +use alloy_primitives::{Sealable, B256}; use reth_chainspec::ChainSpec; use reth_network_p2p::test_utils::TestFullBlockClient; -use reth_primitives::{alloy_primitives::Sealable, BlockBody, SealedHeader}; +use reth_primitives::{BlockBody, SealedHeader}; use reth_provider::{ test_utils::{create_test_provider_factory_with_chain_spec, MockNodeTypesWithDB}, ExecutionOutcome, diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index c6b6d6e1287ff..2ee7085dc6677 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -2558,14 +2558,13 @@ pub enum AdvancePersistenceError { mod tests { use super::*; use crate::persistence::PersistenceAction; - use alloy_primitives::Bytes; + use alloy_primitives::{Bytes, Sealable}; use alloy_rlp::Decodable; use reth_beacon_consensus::{EthBeaconConsensus, ForkchoiceStatus}; use reth_chain_state::{test_utils::TestBlockBuilder, BlockState}; use reth_chainspec::{ChainSpec, HOLESKY, MAINNET}; use reth_ethereum_engine_primitives::EthEngineTypes; use reth_evm::test_utils::MockExecutorProvider; - use reth_primitives::alloy_primitives::Sealable; use reth_provider::test_utils::MockEthProvider; use reth_rpc_types_compat::engine::{block_to_payload_v1, payload::block_to_payload_v3}; use reth_trie::updates::TrieUpdates; diff --git a/crates/ethereum/consensus/src/validation.rs b/crates/ethereum/consensus/src/validation.rs index 1da648cb3ca6b..98bc22b5bdff1 100644 --- a/crates/ethereum/consensus/src/validation.rs +++ b/crates/ethereum/consensus/src/validation.rs @@ -101,7 +101,7 @@ fn compare_receipts_root_and_logs_bloom( #[cfg(test)] mod tests { - use reth_primitives::hex; + use alloy_primitives::hex; use super::*; diff --git a/crates/evm/src/system_calls/eip7002.rs b/crates/evm/src/system_calls/eip7002.rs index f0149943e495e..aa64b8af0c1d4 100644 --- a/crates/evm/src/system_calls/eip7002.rs +++ b/crates/evm/src/system_calls/eip7002.rs @@ -4,9 +4,9 @@ use core::fmt::Display; use crate::ConfigureEvm; use alloy_eips::eip7002::{WithdrawalRequest, WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS}; -use alloy_primitives::{Address, Bytes, FixedBytes}; +use alloy_primitives::{bytes::Buf, Address, Bytes, FixedBytes}; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; -use reth_primitives::{Buf, Header, Request}; +use reth_primitives::{Header, Request}; use revm::{interpreter::Host, Database, DatabaseCommit, Evm}; use revm_primitives::{ BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ExecutionResult, ResultAndState, diff --git a/crates/evm/src/system_calls/eip7251.rs b/crates/evm/src/system_calls/eip7251.rs index f5a7dca14bf91..a3d3aff2e2f63 100644 --- a/crates/evm/src/system_calls/eip7251.rs +++ b/crates/evm/src/system_calls/eip7251.rs @@ -4,9 +4,9 @@ use core::fmt::Display; use crate::ConfigureEvm; use alloy_eips::eip7251::{ConsolidationRequest, CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS}; -use alloy_primitives::{Address, Bytes, FixedBytes}; +use alloy_primitives::{bytes::Buf, Address, Bytes, FixedBytes}; use reth_execution_errors::{BlockExecutionError, BlockValidationError}; -use reth_primitives::{Buf, Header, Request}; +use reth_primitives::{Header, Request}; use revm::{interpreter::Host, Database, DatabaseCommit, Evm}; use revm_primitives::{ BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ExecutionResult, ResultAndState, diff --git a/crates/net/downloaders/src/file_client.rs b/crates/net/downloaders/src/file_client.rs index 6ac2058e4ec4e..5b21c82fb3f8d 100644 --- a/crates/net/downloaders/src/file_client.rs +++ b/crates/net/downloaders/src/file_client.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, io, path::Path}; use alloy_eips::BlockHashOrNumber; -use alloy_primitives::{BlockHash, BlockNumber, B256}; +use alloy_primitives::{BlockHash, BlockNumber, Sealable, B256}; use futures::Future; use itertools::Either; use reth_network_p2p::{ @@ -12,7 +12,7 @@ use reth_network_p2p::{ priority::Priority, }; use reth_network_peers::PeerId; -use reth_primitives::{alloy_primitives::Sealable, BlockBody, Header, SealedHeader}; +use reth_primitives::{BlockBody, Header, SealedHeader}; use thiserror::Error; use tokio::{fs::File, io::AsyncReadExt}; use tokio_stream::StreamExt; diff --git a/crates/net/downloaders/src/headers/reverse_headers.rs b/crates/net/downloaders/src/headers/reverse_headers.rs index f0bbe82952abe..3cca3dcd1b21b 100644 --- a/crates/net/downloaders/src/headers/reverse_headers.rs +++ b/crates/net/downloaders/src/headers/reverse_headers.rs @@ -3,7 +3,7 @@ use super::task::TaskDownloader; use crate::metrics::HeaderDownloaderMetrics; use alloy_eips::BlockHashOrNumber; -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::{BlockNumber, Sealable, B256}; use futures::{stream::Stream, FutureExt}; use futures_util::{stream::FuturesUnordered, StreamExt}; use rayon::prelude::*; @@ -19,7 +19,7 @@ use reth_network_p2p::{ priority::Priority, }; use reth_network_peers::PeerId; -use reth_primitives::{alloy_primitives::Sealable, GotExpected, Header, SealedHeader}; +use reth_primitives::{GotExpected, Header, SealedHeader}; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use std::{ cmp::{Ordering, Reverse}, diff --git a/crates/net/downloaders/src/headers/test_utils.rs b/crates/net/downloaders/src/headers/test_utils.rs index 321b0a335e57b..923ad99693731 100644 --- a/crates/net/downloaders/src/headers/test_utils.rs +++ b/crates/net/downloaders/src/headers/test_utils.rs @@ -2,7 +2,8 @@ #![allow(dead_code)] -use reth_primitives::{alloy_primitives::Sealable, SealedHeader}; +use alloy_primitives::Sealable; +use reth_primitives::SealedHeader; /// Returns a new [`SealedHeader`] that's the child header of the given `parent`. pub(crate) fn child_header(parent: &SealedHeader) -> SealedHeader { diff --git a/crates/net/downloaders/src/receipt_file_client.rs b/crates/net/downloaders/src/receipt_file_client.rs index 980a72025407d..41d46be941049 100644 --- a/crates/net/downloaders/src/receipt_file_client.rs +++ b/crates/net/downloaders/src/receipt_file_client.rs @@ -213,9 +213,12 @@ pub struct ReceiptWithBlockNumber { #[cfg(test)] mod test { - use alloy_primitives::{bytes::BytesMut, hex, Address, Bytes, Log, LogData, B256}; + use alloy_primitives::{ + bytes::{Buf, BytesMut}, + hex, Address, Bytes, Log, LogData, B256, + }; use alloy_rlp::{Decodable, RlpDecodable}; - use reth_primitives::{Buf, Receipt, TxType}; + use reth_primitives::{Receipt, TxType}; use reth_tracing::init_test_tracing; use tokio_util::codec::Decoder; diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index 7cea39a276d21..bccb3a96733ee 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -113,12 +113,9 @@ mod tests { HeadersDirection, }; use alloy_consensus::TxLegacy; - use alloy_primitives::{hex, TxKind, U256}; + use alloy_primitives::{hex, Parity, TxKind, U256}; use alloy_rlp::{Decodable, Encodable}; - use reth_primitives::{ - alloy_primitives::Parity, BlockHashOrNumber, Header, Signature, Transaction, - TransactionSigned, - }; + use reth_primitives::{BlockHashOrNumber, Header, Signature, Transaction, TransactionSigned}; use std::str::FromStr; use super::BlockBody; diff --git a/crates/net/eth-wire-types/src/transactions.rs b/crates/net/eth-wire-types/src/transactions.rs index ae3975d1b799f..77072a31869e5 100644 --- a/crates/net/eth-wire-types/src/transactions.rs +++ b/crates/net/eth-wire-types/src/transactions.rs @@ -78,13 +78,10 @@ impl FromIterator for PooledTransactions { mod tests { use crate::{message::RequestPair, GetPooledTransactions, PooledTransactions}; use alloy_consensus::{TxEip1559, TxLegacy}; - use alloy_primitives::{hex, TxKind, U256}; + use alloy_primitives::{hex, Parity, TxKind, U256}; use alloy_rlp::{Decodable, Encodable}; use reth_chainspec::MIN_TRANSACTION_GAS; - use reth_primitives::{ - alloy_primitives::Parity, PooledTransactionsElement, Signature, Transaction, - TransactionSigned, - }; + use reth_primitives::{PooledTransactionsElement, Signature, Transaction, TransactionSigned}; use std::str::FromStr; #[test] diff --git a/crates/net/network/tests/it/multiplex.rs b/crates/net/network/tests/it/multiplex.rs index 3eaf0a5eb422d..ca35f24fa6f2c 100644 --- a/crates/net/network/tests/it/multiplex.rs +++ b/crates/net/network/tests/it/multiplex.rs @@ -26,9 +26,8 @@ use crate::multiplex::proto::{PingPongProtoMessage, PingPongProtoMessageKind}; /// A simple Rlpx subprotocol that sends pings and pongs mod proto { use super::*; - use alloy_primitives::bytes::BufMut; + use alloy_primitives::bytes::{Buf, BufMut}; use reth_eth_wire::Capability; - use reth_primitives::Buf; #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] diff --git a/crates/net/network/tests/it/requests.rs b/crates/net/network/tests/it/requests.rs index 42802046daa4a..61241f02d2ded 100644 --- a/crates/net/network/tests/it/requests.rs +++ b/crates/net/network/tests/it/requests.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use alloy_consensus::TxEip2930; -use alloy_primitives::{Bytes, TxKind, U256}; +use alloy_primitives::{Bytes, Parity, TxKind, U256}; use rand::Rng; use reth_eth_wire::HeadersDirection; use reth_network::{ @@ -16,9 +16,7 @@ use reth_network_p2p::{ bodies::client::BodiesClient, headers::client::{HeadersClient, HeadersRequest}, }; -use reth_primitives::{ - alloy_primitives::Parity, Block, Header, Signature, Transaction, TransactionSigned, -}; +use reth_primitives::{Block, Header, Signature, Transaction, TransactionSigned}; use reth_provider::test_utils::MockEthProvider; /// Returns a new [`TransactionSigned`] with some random parameters diff --git a/crates/net/p2p/src/full_block.rs b/crates/net/p2p/src/full_block.rs index ce6f58fcd98bc..91b786e410ca2 100644 --- a/crates/net/p2p/src/full_block.rs +++ b/crates/net/p2p/src/full_block.rs @@ -5,13 +5,11 @@ use crate::{ headers::client::{HeadersClient, SingleHeaderRequest}, BlockClient, }; -use alloy_primitives::B256; +use alloy_primitives::{Sealable, B256}; use reth_consensus::{Consensus, ConsensusError}; use reth_eth_wire_types::HeadersDirection; use reth_network_peers::WithPeerId; -use reth_primitives::{ - alloy_primitives::Sealable, BlockBody, GotExpected, Header, SealedBlock, SealedHeader, -}; +use reth_primitives::{BlockBody, GotExpected, Header, SealedBlock, SealedHeader}; use std::{ cmp::Reverse, collections::{HashMap, VecDeque}, diff --git a/crates/net/p2p/src/test_utils/headers.rs b/crates/net/p2p/src/test_utils/headers.rs index 14a2e3fca5548..e61183d22e4bf 100644 --- a/crates/net/p2p/src/test_utils/headers.rs +++ b/crates/net/p2p/src/test_utils/headers.rs @@ -10,11 +10,12 @@ use crate::{ }, priority::Priority, }; +use alloy_primitives::Sealable; use futures::{Future, FutureExt, Stream, StreamExt}; use reth_consensus::{test_utils::TestConsensus, Consensus}; use reth_eth_wire_types::HeadersDirection; use reth_network_peers::{PeerId, WithPeerId}; -use reth_primitives::{alloy_primitives::Sealable, Header, SealedHeader}; +use reth_primitives::{Header, SealedHeader}; use std::{ fmt, pin::Pin, diff --git a/crates/optimism/cli/src/receipt_file_codec.rs b/crates/optimism/cli/src/receipt_file_codec.rs index c0416fa5f072a..05760a49aa0b1 100644 --- a/crates/optimism/cli/src/receipt_file_codec.rs +++ b/crates/optimism/cli/src/receipt_file_codec.rs @@ -1,11 +1,11 @@ //! Codec for reading raw receipts from a file. -use alloy_primitives::{Address, Bloom, B256}; -use alloy_rlp::{Decodable, RlpDecodable}; -use reth_primitives::{ +use alloy_primitives::{ bytes::{Buf, BytesMut}, - Bytes, Log, Receipt, TxType, + Address, Bloom, Bytes, B256, }; +use alloy_rlp::{Decodable, RlpDecodable}; +use reth_primitives::{Log, Receipt, TxType}; use tokio_util::codec::Decoder; use reth_downloaders::{file_client::FileClientError, receipt_file_client::ReceiptWithBlockNumber}; @@ -95,8 +95,7 @@ impl TryFrom for ReceiptWithBlockNumber { #[cfg(test)] pub(crate) mod test { - use alloy_primitives::hex; - use reth_primitives::alloy_primitives::LogData; + use alloy_primitives::{hex, LogData}; use super::*; diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 8c923dab7dd9d..fdec440302344 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -1,12 +1,11 @@ use crate::{ - Bytes, GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, - Withdrawals, + GotExpected, Header, SealedHeader, TransactionSigned, TransactionSignedEcRecovered, Withdrawals, }; use alloc::vec::Vec; pub use alloy_eips::eip1898::{ BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, ForkBlock, RpcBlockHash, }; -use alloy_primitives::{Address, Sealable, B256}; +use alloy_primitives::{Address, Bytes, Sealable, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use derive_more::{Deref, DerefMut}; #[cfg(any(test, feature = "arbitrary"))] diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 390777422b015..bf45ce3ba0883 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -69,7 +69,6 @@ pub use transaction::{ }; // Re-exports -pub use alloy_primitives::{self, bloom, bytes, bytes::Buf, hex, Bytes, TxHash}; pub use reth_ethereum_forks::*; pub use revm_primitives::{self, JumpTable}; diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 59c819da9116f..60ea42e94a434 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -1,11 +1,11 @@ #[cfg(feature = "reth-codec")] use crate::compression::{RECEIPT_COMPRESSOR, RECEIPT_DECOMPRESSOR}; use crate::{ - logs_bloom, Bytes, TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, + logs_bloom, TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID, }; use alloc::{vec, vec::Vec}; -use alloy_primitives::{Bloom, Log, B256}; +use alloy_primitives::{Bloom, Bytes, Log, B256}; use alloy_rlp::{length_of_length, Decodable, Encodable, RlpDecodable, RlpEncodable}; use bytes::{Buf, BufMut}; use core::{cmp::Ordering, ops::Deref}; diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 425926254405c..7889ff641edf1 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,12 +1,12 @@ //! Transaction types. -use crate::{BlockHashOrNumber, Bytes, TxHash}; +use crate::BlockHashOrNumber; use alloy_eips::eip7702::SignedAuthorization; use alloy_primitives::{keccak256, Address, TxKind, B256, U256}; use alloy_consensus::{SignableTransaction, TxEip1559, TxEip2930, TxEip4844, TxEip7702, TxLegacy}; use alloy_eips::eip2930::AccessList; -use alloy_primitives::Parity; +use alloy_primitives::{Bytes, Parity, TxHash}; use alloy_rlp::{ Decodable, Encodable, Error as RlpError, Header, EMPTY_LIST_CODE, EMPTY_STRING_CODE, }; @@ -1705,12 +1705,10 @@ impl WithEncoded> { #[cfg(test)] mod tests { use crate::{ - hex, transaction::{signature::Signature, TxEip1559, TxKind, TxLegacy}, - Bytes, Transaction, TransactionSigned, TransactionSignedEcRecovered, - TransactionSignedNoHash, + Transaction, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, }; - use alloy_primitives::{address, b256, bytes, Address, Parity, B256, U256}; + use alloy_primitives::{address, b256, bytes, hex, Address, Bytes, Parity, B256, U256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError}; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_codecs::Compact; diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 69ca8f88188d2..04026839538d2 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -7,15 +7,15 @@ use super::{ TxEip7702, }; use crate::{ - BlobTransaction, BlobTransactionSidecar, Bytes, Signature, Transaction, TransactionSigned, - TransactionSignedEcRecovered, TxHash, EIP4844_TX_TYPE_ID, + BlobTransaction, BlobTransactionSidecar, Signature, Transaction, TransactionSigned, + TransactionSignedEcRecovered, EIP4844_TX_TYPE_ID, }; use alloc::vec::Vec; use alloy_consensus::{ transaction::{TxEip1559, TxEip2930, TxEip4844, TxLegacy}, SignableTransaction, TxEip4844WithSidecar, }; -use alloy_primitives::{Address, B256}; +use alloy_primitives::{Address, Bytes, TxHash, B256}; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header, EMPTY_LIST_CODE}; use bytes::Buf; use derive_more::{AsRef, Deref}; diff --git a/crates/primitives/src/transaction/sidecar.rs b/crates/primitives/src/transaction/sidecar.rs index 94dfbe6da4b64..52c3c68ef9db7 100644 --- a/crates/primitives/src/transaction/sidecar.rs +++ b/crates/primitives/src/transaction/sidecar.rs @@ -1,8 +1,8 @@ #![cfg_attr(docsrs, doc(cfg(feature = "c-kzg")))] -use crate::{Signature, Transaction, TransactionSigned, TxHash, EIP4844_TX_TYPE_ID}; +use crate::{Signature, Transaction, TransactionSigned, EIP4844_TX_TYPE_ID}; use alloy_consensus::{transaction::TxEip4844, TxEip4844WithSidecar}; -use alloy_primitives::keccak256; +use alloy_primitives::{keccak256, TxHash}; use alloy_rlp::{Decodable, Error as RlpError, Header}; use serde::{Deserialize, Serialize}; @@ -282,8 +282,9 @@ pub fn generate_blob_sidecar(blobs: Vec) -> BlobTransactionSidecar #[cfg(all(test, feature = "c-kzg"))] mod tests { use super::*; - use crate::{hex, kzg::Blob, PooledTransactionsElement}; + use crate::{kzg::Blob, PooledTransactionsElement}; use alloy_eips::eip4844::Bytes48; + use alloy_primitives::hex; use alloy_rlp::Encodable; use std::{fs, path::PathBuf, str::FromStr}; diff --git a/crates/primitives/src/transaction/signature.rs b/crates/primitives/src/transaction/signature.rs index 829ce24093779..e99fc92324ff5 100644 --- a/crates/primitives/src/transaction/signature.rs +++ b/crates/primitives/src/transaction/signature.rs @@ -115,13 +115,12 @@ pub const fn extract_chain_id(v: u64) -> alloy_rlp::Result<(bool, Option)> #[cfg(test)] mod tests { use crate::{ - hex, transaction::signature::{ legacy_parity, recover_signer, recover_signer_unchecked, SECP256K1N_HALF, }, Signature, }; - use alloy_primitives::{Address, Parity, B256, U256}; + use alloy_primitives::{hex, Address, Parity, B256, U256}; use std::str::FromStr; #[test] diff --git a/crates/primitives/src/transaction/tx_type.rs b/crates/primitives/src/transaction/tx_type.rs index 14ac420e0a900..c55e0d3c6193c 100644 --- a/crates/primitives/src/transaction/tx_type.rs +++ b/crates/primitives/src/transaction/tx_type.rs @@ -246,7 +246,7 @@ impl From for TxType { #[cfg(test)] mod tests { - use crate::hex; + use alloy_primitives::hex; use rand::Rng; use reth_codecs::Compact; diff --git a/crates/revm/src/test_utils.rs b/crates/revm/src/test_utils.rs index f1d1ce600e3b1..d42ec49599076 100644 --- a/crates/revm/src/test_utils.rs +++ b/crates/revm/src/test_utils.rs @@ -2,9 +2,9 @@ use alloc::vec::Vec; use alloy_primitives::{ keccak256, map::{HashMap, HashSet}, - Address, BlockNumber, StorageKey, B256, U256, + Address, BlockNumber, Bytes, StorageKey, B256, U256, }; -use reth_primitives::{Account, Bytecode, Bytes}; +use reth_primitives::{Account, Bytecode}; use reth_storage_api::{ AccountReader, BlockHashReader, StateProofProvider, StateProvider, StateRootProvider, StorageRootProvider, diff --git a/crates/rpc/rpc-engine-api/tests/it/payload.rs b/crates/rpc/rpc-engine-api/tests/it/payload.rs index e98f585c002d1..c08c30c1de09f 100644 --- a/crates/rpc/rpc-engine-api/tests/it/payload.rs +++ b/crates/rpc/rpc-engine-api/tests/it/payload.rs @@ -1,15 +1,12 @@ //! Some payload tests -use alloy_primitives::{Bytes, U256}; +use alloy_primitives::{Bytes, Sealable, U256}; use alloy_rlp::{Decodable, Error as RlpError}; use alloy_rpc_types_engine::{ ExecutionPayload, ExecutionPayloadBodyV1, ExecutionPayloadV1, PayloadError, }; use assert_matches::assert_matches; -use reth_primitives::{ - alloy_primitives::Sealable, proofs, Block, SealedBlock, SealedHeader, TransactionSigned, - Withdrawals, -}; +use reth_primitives::{proofs, Block, SealedBlock, SealedHeader, TransactionSigned, Withdrawals}; use reth_rpc_types_compat::engine::payload::{ block_to_payload, block_to_payload_v1, convert_to_payload_body_v1, try_into_sealed_block, try_payload_v1_to_block, diff --git a/crates/stages/stages/benches/setup/mod.rs b/crates/stages/stages/benches/setup/mod.rs index 84d96aac848f1..4812fb13c39aa 100644 --- a/crates/stages/stages/benches/setup/mod.rs +++ b/crates/stages/stages/benches/setup/mod.rs @@ -1,5 +1,5 @@ #![allow(unreachable_pub)] -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::{Address, Sealable, B256, U256}; use itertools::concat; use reth_chainspec::ChainSpec; use reth_db::{tables, test_utils::TempDatabase, Database, DatabaseEnv}; @@ -7,7 +7,7 @@ use reth_db_api::{ cursor::DbCursorRO, transaction::{DbTx, DbTxMut}, }; -use reth_primitives::{alloy_primitives::Sealable, Account, SealedBlock, SealedHeader}; +use reth_primitives::{Account, SealedBlock, SealedHeader}; use reth_provider::{DatabaseProvider, DatabaseProviderFactory, TrieWriter}; use reth_stages::{ stages::{AccountHashingStage, StorageHashingStage}, diff --git a/crates/stages/stages/src/stages/execution.rs b/crates/stages/stages/src/stages/execution.rs index 24dff34228575..ea2849fb230f4 100644 --- a/crates/stages/stages/src/stages/execution.rs +++ b/crates/stages/stages/src/stages/execution.rs @@ -1,5 +1,5 @@ use crate::stages::MERKLE_STAGE_DEFAULT_CLEAN_THRESHOLD; -use alloy_primitives::BlockNumber; +use alloy_primitives::{BlockNumber, Sealable}; use num_traits::Zero; use reth_config::config::ExecutionConfig; use reth_db::{static_file::HeaderMask, tables}; @@ -10,7 +10,7 @@ use reth_evm::{ }; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_exex::{ExExManagerHandle, ExExNotification}; -use reth_primitives::{alloy_primitives::Sealable, Header, SealedHeader, StaticFileSegment}; +use reth_primitives::{Header, SealedHeader, StaticFileSegment}; use reth_primitives_traits::format_gas_throughput; use reth_provider::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter}, diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index 02a360b65eee2..83771c4969d95 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -378,12 +378,10 @@ mod tests { use crate::test_utils::{ stage_test_suite, ExecuteStageTestRunner, StageTestRunner, UnwindStageTestRunner, }; - use alloy_primitives::B256; + use alloy_primitives::{Sealable, B256}; use assert_matches::assert_matches; use reth_execution_types::ExecutionOutcome; - use reth_primitives::{ - alloy_primitives::Sealable, BlockBody, SealedBlock, SealedBlockWithSenders, - }; + use reth_primitives::{BlockBody, SealedBlock, SealedBlockWithSenders}; use reth_provider::{BlockWriter, ProviderFactory, StaticFileProviderFactory}; use reth_stages_api::StageUnitCheckpoint; use reth_testing_utils::generators::{self, random_header, random_header_range}; diff --git a/crates/stages/stages/src/stages/merkle.rs b/crates/stages/stages/src/stages/merkle.rs index 15f6e9702be3e..d1d3496d917a9 100644 --- a/crates/stages/stages/src/stages/merkle.rs +++ b/crates/stages/stages/src/stages/merkle.rs @@ -1,9 +1,9 @@ -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::{BlockNumber, Sealable, B256}; use reth_codecs::Compact; use reth_consensus::ConsensusError; use reth_db::tables; use reth_db_api::transaction::{DbTx, DbTxMut}; -use reth_primitives::{alloy_primitives::Sealable, GotExpected, SealedHeader}; +use reth_primitives::{GotExpected, SealedHeader}; use reth_provider::{ DBProvider, HeaderProvider, ProviderError, StageCheckpointReader, StageCheckpointWriter, StatsReader, TrieWriter, diff --git a/crates/stages/stages/src/test_utils/test_db.rs b/crates/stages/stages/src/test_utils/test_db.rs index f062f15914fbf..4c43d4cdcd1d9 100644 --- a/crates/stages/stages/src/test_utils/test_db.rs +++ b/crates/stages/stages/src/test_utils/test_db.rs @@ -1,4 +1,4 @@ -use alloy_primitives::{keccak256, Address, BlockNumber, TxNumber, B256, U256}; +use alloy_primitives::{keccak256, Address, BlockNumber, TxHash, TxNumber, B256, U256}; use reth_chainspec::MAINNET; use reth_db::{ tables, @@ -15,7 +15,7 @@ use reth_db_api::{ DatabaseError as DbError, }; use reth_primitives::{ - Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, TxHash, + Account, Receipt, SealedBlock, SealedHeader, StaticFileSegment, StorageEntry, }; use reth_provider::{ providers::{StaticFileProvider, StaticFileProviderRWRefMut, StaticFileWriter}, diff --git a/crates/storage/db-models/src/accounts.rs b/crates/storage/db-models/src/accounts.rs index 74736247a6fb2..e1f4773960fac 100644 --- a/crates/storage/db-models/src/accounts.rs +++ b/crates/storage/db-models/src/accounts.rs @@ -1,8 +1,8 @@ use reth_codecs::{add_arbitrary_tests, Compact}; use serde::Serialize; -use alloy_primitives::Address; -use reth_primitives::{Account, Buf}; +use alloy_primitives::{bytes::Buf, Address}; +use reth_primitives::Account; /// Account as it is saved in the database. /// diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index cb90cc08a61c3..32829748361fe 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -8,7 +8,7 @@ use crate::{ StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; -use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; +use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; use alloy_rpc_types_engine::ForkchoiceState; use reth_chain_state::{ BlockState, CanonicalInMemoryState, ForkChoiceNotifications, ForkChoiceSubscriptions, @@ -20,8 +20,8 @@ use reth_evm::ConfigureEvmEnv; use reth_execution_types::ExecutionOutcome; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ - alloy_primitives::Sealable, Account, Block, BlockWithSenders, EthereumHardforks, Header, - Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, + Account, Block, BlockWithSenders, EthereumHardforks, Header, Receipt, SealedBlock, + SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawal, Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index 1a43b611bc318..50b914778db3d 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -8,7 +8,7 @@ use crate::{ TransactionVariant, TransactionsProvider, TreeViewer, WithdrawalsProvider, }; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag}; -use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; +use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable, TxHash, TxNumber, B256, U256}; use reth_blockchain_tree_api::{ error::{CanonicalError, InsertBlockError}, BlockValidationKind, BlockchainTreeEngine, BlockchainTreeViewer, CanonicalOutcome, @@ -20,9 +20,9 @@ use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_node_types::NodeTypesWithDB; use reth_primitives::{ - alloy_primitives::Sealable, Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, - TransactionSignedNoHash, Withdrawal, Withdrawals, + Account, Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, + SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawal, + Withdrawals, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 0ad933fc7aee5..daed906646d3b 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -2,16 +2,15 @@ use crate::{DatabaseProviderRW, ExecutionOutcome}; use alloy_consensus::TxLegacy; use alloy_primitives::{ - b256, hex_literal::hex, map::HashMap, Address, BlockNumber, Log, Parity, Sealable, TxKind, - B256, U256, + b256, hex_literal::hex, map::HashMap, Address, BlockNumber, Bytes, Log, Parity, Sealable, + TxKind, B256, U256, }; use once_cell::sync::Lazy; use reth_db::tables; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_primitives::{ - alloy_primitives, Account, BlockBody, Bytes, Header, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, Signature, Transaction, TransactionSigned, TxType, - Withdrawal, Withdrawals, + Account, BlockBody, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, + Signature, Transaction, TransactionSigned, TxType, Withdrawal, Withdrawals, }; use reth_trie::root::{state_root_unhashed, storage_root_unhashed}; use revm::{db::BundleState, primitives::AccountInfo}; diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index b06ae6ec1ad2a..0be8f7708d3cd 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -10,7 +10,8 @@ use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; use alloy_primitives::{ keccak256, map::{HashMap, HashSet}, - Address, BlockHash, BlockNumber, Bytes, StorageKey, StorageValue, TxHash, TxNumber, B256, U256, + Address, BlockHash, BlockNumber, Bytes, Sealable, StorageKey, StorageValue, TxHash, TxNumber, + B256, U256, }; use parking_lot::Mutex; use reth_chainspec::{ChainInfo, ChainSpec}; @@ -19,8 +20,8 @@ use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; use reth_evm::ConfigureEvmEnv; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_primitives::{ - alloy_primitives::Sealable, Account, Block, BlockWithSenders, Bytecode, GotExpected, Header, - Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, + Account, Block, BlockWithSenders, Bytecode, GotExpected, Header, Receipt, SealedBlock, + SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, Withdrawal, Withdrawals, }; use reth_stages_types::{StageCheckpoint, StageId}; diff --git a/crates/storage/storage-api/src/block.rs b/crates/storage/storage-api/src/block.rs index 6590b47b4524d..a3b0cc7438f36 100644 --- a/crates/storage/storage-api/src/block.rs +++ b/crates/storage/storage-api/src/block.rs @@ -3,11 +3,10 @@ use crate::{ TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; use alloy_eips::{BlockHashOrNumber, BlockId, BlockNumberOrTag}; -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::{BlockNumber, Sealable, B256}; use reth_db_models::StoredBlockBodyIndices; use reth_primitives::{ - alloy_primitives::Sealable, Block, BlockWithSenders, Header, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, + Block, BlockWithSenders, Header, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, }; use reth_storage_errors::provider::ProviderResult; use std::ops::RangeInclusive; diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index ce37cb2e7a8d2..8a6ce5d04a3a8 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -7,7 +7,7 @@ use crate::{ traits::{CanonicalStateUpdate, TransactionPool, TransactionPoolExt}, BlockInfo, PoolTransaction, }; -use alloy_primitives::{Address, BlockHash, BlockNumber}; +use alloy_primitives::{Address, BlockHash, BlockNumber, Sealable}; use futures_util::{ future::{BoxFuture, Fuse, FusedFuture}, FutureExt, Stream, StreamExt, @@ -17,8 +17,7 @@ use reth_chainspec::{ChainSpecProvider, EthChainSpec}; use reth_execution_types::ChangedAccount; use reth_fs_util::FsPathError; use reth_primitives::{ - alloy_primitives::Sealable, BlockNumberOrTag, PooledTransactionsElementEcRecovered, - SealedHeader, TransactionSigned, + BlockNumberOrTag, PooledTransactionsElementEcRecovered, SealedHeader, TransactionSigned, }; use reth_storage_api::{errors::provider::ProviderError, BlockReaderIdExt, StateProviderFactory}; use reth_tasks::TaskSpawner; diff --git a/crates/transaction-pool/src/test_utils/gen.rs b/crates/transaction-pool/src/test_utils/gen.rs index e5fceb9150ed7..6123260b1d9d4 100644 --- a/crates/transaction-pool/src/test_utils/gen.rs +++ b/crates/transaction-pool/src/test_utils/gen.rs @@ -1,11 +1,11 @@ use crate::EthPooledTransaction; use alloy_consensus::{TxEip1559, TxEip4844, TxLegacy}; use alloy_eips::eip2930::AccessList; -use alloy_primitives::{Address, TxKind, B256, U256}; +use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; use rand::Rng; use reth_chainspec::MAINNET; use reth_primitives::{ - constants::MIN_PROTOCOL_BASE_FEE, sign_message, Bytes, Transaction, TransactionSigned, + constants::MIN_PROTOCOL_BASE_FEE, sign_message, Transaction, TransactionSigned, }; /// A generator for transactions for testing purposes. diff --git a/crates/trie/db/src/state.rs b/crates/trie/db/src/state.rs index 35ed064e5231e..5acb9e0d1b491 100644 --- a/crates/trie/db/src/state.rs +++ b/crates/trie/db/src/state.rs @@ -268,10 +268,10 @@ impl DatabaseHashedPostState for HashedPostState { #[cfg(test)] mod tests { use super::*; - use alloy_primitives::{map::HashMap, Address, U256}; + use alloy_primitives::{hex, map::HashMap, Address, U256}; use reth_db::test_utils::create_test_rw_db; use reth_db_api::database::Database; - use reth_primitives::{hex, revm_primitives::AccountInfo}; + use reth_primitives::revm_primitives::AccountInfo; use revm::db::BundleState; #[test] diff --git a/crates/trie/trie/src/updates.rs b/crates/trie/trie/src/updates.rs index c499d7eefae5b..f299a893d6c9e 100644 --- a/crates/trie/trie/src/updates.rs +++ b/crates/trie/trie/src/updates.rs @@ -232,7 +232,7 @@ where S: Serializer, { let mut storage_nodes = - Vec::from_iter(map.iter().map(|elem| reth_primitives::hex::encode(elem.pack()))); + Vec::from_iter(map.iter().map(|elem| alloy_primitives::hex::encode(elem.pack()))); storage_nodes.sort_unstable(); storage_nodes.serialize(serializer) } @@ -252,7 +252,7 @@ where storage_nodes.sort_unstable_by(|a, b| a.0.cmp(b.0)); for (k, v) in storage_nodes { // pack, then hex encode the Nibbles - let packed = reth_primitives::hex::encode(k.pack()); + let packed = alloy_primitives::hex::encode(k.pack()); map_serializer.serialize_entry(&packed, &v)?; } map_serializer.end() diff --git a/examples/custom-evm/src/main.rs b/examples/custom-evm/src/main.rs index b3bf36ce6d5d4..d931c3b275bf4 100644 --- a/examples/custom-evm/src/main.rs +++ b/examples/custom-evm/src/main.rs @@ -3,17 +3,14 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] use alloy_genesis::Genesis; -use alloy_primitives::{address, Address, U256}; +use alloy_primitives::{address, Address, Bytes, U256}; use reth::{ builder::{ components::{ExecutorBuilder, PayloadServiceBuilder}, BuilderContext, NodeBuilder, }, payload::{EthBuiltPayload, EthPayloadBuilderAttributes}, - primitives::{ - revm_primitives::{Env, PrecompileResult}, - Bytes, - }, + primitives::revm_primitives::{Env, PrecompileResult}, revm::{ handler::register::EvmHandler, inspector_handle_register, diff --git a/examples/custom-payload-builder/Cargo.toml b/examples/custom-payload-builder/Cargo.toml index f10bd8058b64c..1c160fe5ec87f 100644 --- a/examples/custom-payload-builder/Cargo.toml +++ b/examples/custom-payload-builder/Cargo.toml @@ -15,6 +15,8 @@ reth-payload-builder.workspace = true reth-node-ethereum.workspace = true reth-ethereum-payload-builder.workspace = true +alloy-primitives.workspace = true + tracing.workspace = true futures-util.workspace = true eyre.workspace = true diff --git a/examples/custom-payload-builder/src/generator.rs b/examples/custom-payload-builder/src/generator.rs index 807cbf6a53bb4..f5d64e41cd095 100644 --- a/examples/custom-payload-builder/src/generator.rs +++ b/examples/custom-payload-builder/src/generator.rs @@ -1,4 +1,5 @@ use crate::job::EmptyBlockPayloadJob; +use alloy_primitives::Bytes; use reth::{ providers::{BlockReaderIdExt, BlockSource, StateProviderFactory}, tasks::TaskSpawner, @@ -7,7 +8,7 @@ use reth::{ use reth_basic_payload_builder::{BasicPayloadJobGeneratorConfig, PayloadBuilder, PayloadConfig}; use reth_node_api::PayloadBuilderAttributes; use reth_payload_builder::{PayloadBuilderError, PayloadJobGenerator}; -use reth_primitives::{BlockNumberOrTag, Bytes}; +use reth_primitives::BlockNumberOrTag; use std::sync::Arc; /// The generator type that creates new jobs that builds empty blocks. diff --git a/examples/db-access/src/main.rs b/examples/db-access/src/main.rs index 7878ba3c8a89f..ab018a0b07a69 100644 --- a/examples/db-access/src/main.rs +++ b/examples/db-access/src/main.rs @@ -1,9 +1,9 @@ -use alloy_primitives::{Address, B256}; +use alloy_primitives::{Address, Sealable, B256}; use alloy_rpc_types::{Filter, FilteredParams}; use reth_chainspec::ChainSpecBuilder; use reth_node_ethereum::EthereumNode; use reth_node_types::NodeTypesWithDBAdapter; -use reth_primitives::{alloy_primitives::Sealable, SealedHeader}; +use reth_primitives::SealedHeader; use reth_provider::{ providers::StaticFileProvider, AccountReader, BlockReader, BlockSource, HeaderProvider, ProviderFactory, ReceiptProvider, StateProvider, TransactionsProvider, diff --git a/examples/stateful-precompile/src/main.rs b/examples/stateful-precompile/src/main.rs index f58c7a557b8a8..05a6fd86c9350 100644 --- a/examples/stateful-precompile/src/main.rs +++ b/examples/stateful-precompile/src/main.rs @@ -3,15 +3,12 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] use alloy_genesis::Genesis; -use alloy_primitives::{Address, U256}; +use alloy_primitives::{Address, Bytes, U256}; use parking_lot::RwLock; use reth::{ api::NextBlockEnvAttributes, builder::{components::ExecutorBuilder, BuilderContext, NodeBuilder}, - primitives::{ - revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, Env, PrecompileResult, TxEnv}, - Bytes, - }, + primitives::revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, Env, PrecompileResult, TxEnv}, revm::{ handler::register::EvmHandler, inspector_handle_register, diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index c1c23c9986706..85506f9d3a63c 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -500,8 +500,8 @@ mod tests { use super::*; use alloy_consensus::TxEip1559; use alloy_eips::eip2930::AccessList; - use alloy_primitives::Parity; - use reth_primitives::{hex, public_key_to_address, Signature}; + use alloy_primitives::{hex, Parity}; + use reth_primitives::{public_key_to_address, Signature}; use std::str::FromStr; #[test] diff --git a/testing/testing-utils/src/genesis_allocator.rs b/testing/testing-utils/src/genesis_allocator.rs index ebdef9eb1932a..acf5e091cba3b 100644 --- a/testing/testing-utils/src/genesis_allocator.rs +++ b/testing/testing-utils/src/genesis_allocator.rs @@ -18,8 +18,7 @@ use std::{ /// /// # Example /// ``` -/// # use alloy_primitives::{Address, U256}; -/// # use reth_primitives::{hex, Bytes}; +/// # use alloy_primitives::{Address, U256, hex, Bytes}; /// # use reth_testing_utils::GenesisAllocator; /// # use std::str::FromStr; /// let mut allocator = GenesisAllocator::default(); From e02a2de2d0d4746285de5ac0e75fd00e07369b9f Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 30 Sep 2024 11:06:40 +0300 Subject: [PATCH 73/84] feat(exex): finalize WAL with the lowest finished height (#11323) --- crates/exex/exex/src/manager.rs | 37 ++++++++++++++++++------------- crates/exex/exex/src/wal/cache.rs | 2 +- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index 8c52bc6590abc..44b08b097881c 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -333,35 +333,40 @@ where let exex_finished_heights = self .exex_handles .iter() - // Get ExEx ID and hash of the finished height for each ExEx - .map(|exex_handle| { - (&exex_handle.id, exex_handle.finished_height.map(|block| block.hash)) - }) + // Get ID and finished height for each ExEx + .map(|exex_handle| (&exex_handle.id, exex_handle.finished_height)) // Deduplicate all hashes - .unique_by(|(_, hash)| *hash) + .unique_by(|(_, num_hash)| num_hash.map(|num_hash| num_hash.hash)) // Check if hashes are canonical - .map(|(exex_id, hash)| { - hash.map_or(Ok((exex_id, hash, false)), |hash| { + .map(|(exex_id, num_hash)| { + num_hash.map_or(Ok((exex_id, num_hash, false)), |num_hash| { self.provider - .is_known(&hash) - // Save the ExEx ID, hash of the finished height, and whether the hash - // is canonical - .map(|is_canonical| (exex_id, Some(hash), is_canonical)) + .is_known(&num_hash.hash) + // Save the ExEx ID, finished height, and whether the hash is canonical + .map(|is_canonical| (exex_id, Some(num_hash), is_canonical)) }) }) // We collect here to be able to log the unfinalized ExExes below .collect::, _>>()?; if exex_finished_heights.iter().all(|(_, _, is_canonical)| *is_canonical) { // If there is a finalized header and all ExExs are on the canonical chain, finalize - // the WAL with the new finalized header - self.wal.finalize(finalized_header.num_hash())?; + // the WAL with the lowest finished height among all ExExes + let lowest_finished_height = exex_finished_heights + .iter() + .copied() + .filter_map(|(_, num_hash, _)| num_hash) + .min_by_key(|num_hash| num_hash.number); + self.wal + .finalize(lowest_finished_height.expect("ExExManager has at least one ExEx"))?; } else { let unfinalized_exexes = exex_finished_heights .into_iter() - .filter_map(|(exex_id, hash, is_canonical)| { - is_canonical.not().then_some((exex_id, hash)) + .filter_map(|(exex_id, num_hash, is_canonical)| { + is_canonical.not().then_some((exex_id, num_hash)) }) - .format_with(", ", |(exex_id, hash), f| f(&format_args!("{exex_id:?} = {hash:?}"))); + .format_with(", ", |(exex_id, num_hash), f| { + f(&format_args!("{exex_id:?} = {num_hash:?}")) + }); debug!( %unfinalized_exexes, "Not all ExExes are on the canonical chain, can't finalize the WAL" diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index cef27369eb63c..097a07c7a9f52 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -18,7 +18,7 @@ pub struct BlockCache { /// the cache with the same file ID. I.e. for each notification, there may be multiple blocks /// in the cache. files: RwLock>>, - /// A mapping of committed blocks `Block Hash -> Block`. + /// A mapping of committed blocks `Block Hash -> (File ID, Block)`. /// /// For each [`ExExNotification::ChainCommitted`] notification, there will be an entry per /// block. From 17aa3d61f629bd1556c9d244218b5ab27577fbf0 Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Mon, 30 Sep 2024 10:08:45 +0200 Subject: [PATCH 74/84] fix(trie): witness empty root node (#10972) --- crates/trie/common/src/proofs.rs | 23 +++++++++---- crates/trie/db/tests/proof.rs | 6 +++- crates/trie/db/tests/witness.rs | 57 ++++++++++++++++++++++++++++++++ crates/trie/trie/src/proof.rs | 2 +- crates/trie/trie/src/witness.rs | 17 +++++++--- 5 files changed, 92 insertions(+), 13 deletions(-) create mode 100644 crates/trie/db/tests/witness.rs diff --git a/crates/trie/common/src/proofs.rs b/crates/trie/common/src/proofs.rs index b35edd96d560c..8aca67f8d1ad3 100644 --- a/crates/trie/common/src/proofs.rs +++ b/crates/trie/common/src/proofs.rs @@ -2,7 +2,7 @@ use crate::{Nibbles, TrieAccount}; use alloy_primitives::{keccak256, Address, Bytes, B256, U256}; -use alloy_rlp::{encode_fixed_size, Decodable}; +use alloy_rlp::{encode_fixed_size, Decodable, EMPTY_STRING_CODE}; use alloy_trie::{ nodes::TrieNode, proof::{verify_proof, ProofNodes, ProofVerificationError}, @@ -86,13 +86,18 @@ pub struct StorageMultiProof { pub subtree: ProofNodes, } -impl Default for StorageMultiProof { - fn default() -> Self { - Self { root: EMPTY_ROOT_HASH, subtree: Default::default() } +impl StorageMultiProof { + /// Create new storage multiproof for empty trie. + pub fn empty() -> Self { + Self { + root: EMPTY_ROOT_HASH, + subtree: ProofNodes::from_iter([( + Nibbles::default(), + Bytes::from([EMPTY_STRING_CODE]), + )]), + } } -} -impl StorageMultiProof { /// Return storage proofs for the target storage slot (unhashed). pub fn storage_proof(&self, slot: B256) -> Result { let nibbles = Nibbles::unpack(keccak256(slot)); @@ -209,6 +214,12 @@ impl StorageProof { Self { key, nibbles, ..Default::default() } } + /// Set proof nodes on storage proof. + pub fn with_proof(mut self, proof: Vec) -> Self { + self.proof = proof; + self + } + /// Verify the proof against the provided storage root. pub fn verify(&self, root: B256) -> Result<(), ProofVerificationError> { let expected = diff --git a/crates/trie/db/tests/proof.rs b/crates/trie/db/tests/proof.rs index 33a19de38037c..5ffa6729b49a8 100644 --- a/crates/trie/db/tests/proof.rs +++ b/crates/trie/db/tests/proof.rs @@ -1,6 +1,7 @@ #![allow(missing_docs)] use alloy_primitives::{keccak256, Address, Bytes, B256, U256}; +use alloy_rlp::EMPTY_STRING_CODE; use reth_chainspec::{Chain, ChainSpec, HOLESKY, MAINNET}; use reth_primitives::{constants::EMPTY_ROOT_HASH, Account}; use reth_provider::test_utils::{create_test_provider_factory, insert_genesis}; @@ -111,7 +112,10 @@ fn testspec_empty_storage_proof() { assert_eq!(slots.len(), account_proof.storage_proofs.len()); for (idx, slot) in slots.into_iter().enumerate() { let proof = account_proof.storage_proofs.get(idx).unwrap(); - assert_eq!(proof, &StorageProof::new(slot)); + assert_eq!( + proof, + &StorageProof::new(slot).with_proof(vec![Bytes::from([EMPTY_STRING_CODE])]) + ); assert_eq!(proof.verify(account_proof.storage_root), Ok(())); } assert_eq!(account_proof.verify(root), Ok(())); diff --git a/crates/trie/db/tests/witness.rs b/crates/trie/db/tests/witness.rs new file mode 100644 index 0000000000000..cc921f6570877 --- /dev/null +++ b/crates/trie/db/tests/witness.rs @@ -0,0 +1,57 @@ +#![allow(missing_docs)] + +use alloy_primitives::{ + keccak256, + map::{HashMap, HashSet}, + Address, Bytes, B256, U256, +}; +use alloy_rlp::EMPTY_STRING_CODE; +use reth_primitives::{constants::EMPTY_ROOT_HASH, Account}; +use reth_provider::{test_utils::create_test_provider_factory, HashingWriter}; +use reth_trie::{proof::Proof, witness::TrieWitness, HashedPostState, HashedStorage, StateRoot}; +use reth_trie_db::{DatabaseProof, DatabaseStateRoot, DatabaseTrieWitness}; + +#[test] +fn includes_empty_node_preimage() { + let factory = create_test_provider_factory(); + let provider = factory.provider_rw().unwrap(); + + let address = Address::random(); + let hashed_address = keccak256(address); + let hashed_slot = B256::random(); + + // witness includes empty state trie root node + assert_eq!( + TrieWitness::from_tx(provider.tx_ref()) + .compute(HashedPostState { + accounts: HashMap::from([(hashed_address, Some(Account::default()))]), + storages: HashMap::default(), + }) + .unwrap(), + HashMap::from_iter([(EMPTY_ROOT_HASH, Bytes::from([EMPTY_STRING_CODE]))]) + ); + + // Insert account into database + provider.insert_account_for_hashing([(address, Some(Account::default()))]).unwrap(); + + let state_root = StateRoot::from_tx(provider.tx_ref()).root().unwrap(); + let multiproof = Proof::from_tx(provider.tx_ref()) + .multiproof(HashMap::from_iter([(hashed_address, HashSet::from_iter([hashed_slot]))])) + .unwrap(); + + let witness = TrieWitness::from_tx(provider.tx_ref()) + .compute(HashedPostState { + accounts: HashMap::from([(hashed_address, Some(Account::default()))]), + storages: HashMap::from([( + hashed_address, + HashedStorage::from_iter(false, [(hashed_slot, U256::from(1))]), + )]), + }) + .unwrap(); + assert!(witness.contains_key(&state_root)); + for node in multiproof.account_subtree.values() { + assert_eq!(witness.get(&keccak256(node)), Some(node)); + } + // witness includes empty state trie root node + assert_eq!(witness.get(&EMPTY_ROOT_HASH), Some(&Bytes::from([EMPTY_STRING_CODE]))); +} diff --git a/crates/trie/trie/src/proof.rs b/crates/trie/trie/src/proof.rs index 3e9ca5783814b..95d9505218bf5 100644 --- a/crates/trie/trie/src/proof.rs +++ b/crates/trie/trie/src/proof.rs @@ -192,7 +192,7 @@ where // short circuit on empty storage if hashed_storage_cursor.is_storage_empty()? { - return Ok(StorageMultiProof::default()) + return Ok(StorageMultiProof::empty()) } let target_nibbles = targets.into_iter().map(Nibbles::unpack).collect::>(); diff --git a/crates/trie/trie/src/witness.rs b/crates/trie/trie/src/witness.rs index 972afc10c3424..b0fcfb021ae1f 100644 --- a/crates/trie/trie/src/witness.rs +++ b/crates/trie/trie/src/witness.rs @@ -17,7 +17,7 @@ use itertools::{Either, Itertools}; use reth_execution_errors::TrieWitnessError; use reth_primitives::constants::EMPTY_ROOT_HASH; use reth_trie_common::{ - BranchNode, HashBuilder, Nibbles, TrieAccount, TrieNode, CHILD_INDEX_RANGE, + BranchNode, HashBuilder, Nibbles, StorageMultiProof, TrieAccount, TrieNode, CHILD_INDEX_RANGE, }; /// State transition witness for the trie. @@ -110,8 +110,10 @@ where let mut account_rlp = Vec::with_capacity(128); let mut account_trie_nodes = BTreeMap::default(); for (hashed_address, hashed_slots) in proof_targets { - let storage_multiproof = - account_multiproof.storages.remove(&hashed_address).unwrap_or_default(); + let storage_multiproof = account_multiproof + .storages + .remove(&hashed_address) + .unwrap_or_else(StorageMultiProof::empty); // Gather and record account trie nodes. let account = state @@ -215,7 +217,8 @@ where proof: impl IntoIterator, ) -> Result>>, TrieWitnessError> { let mut trie_nodes = BTreeMap::default(); - for (path, encoded) in proof { + let mut proof_iter = proof.into_iter().enumerate().peekable(); + while let Some((idx, (path, encoded))) = proof_iter.next() { // Record the node in witness. self.witness.insert(keccak256(encoded.as_ref()), encoded.clone()); @@ -239,7 +242,11 @@ where trie_nodes.insert(next_path.clone(), Either::Right(leaf.value.clone())); } } - TrieNode::EmptyRoot => return Err(TrieWitnessError::UnexpectedEmptyRoot(next_path)), + TrieNode::EmptyRoot => { + if idx != 0 || proof_iter.peek().is_some() { + return Err(TrieWitnessError::UnexpectedEmptyRoot(next_path)) + } + } }; } From 2ae8ea3b1463e18ab2d382183186b611df7acf6f Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 30 Sep 2024 11:59:23 +0200 Subject: [PATCH 75/84] feat: add SystemCaller helper type (#11068) Co-authored-by: Federico Gimenez Co-authored-by: Federico Gimenez --- crates/evm/src/system_calls/eip7002.rs | 6 + crates/evm/src/system_calls/eip7251.rs | 6 + crates/evm/src/system_calls/mod.rs | 318 +++++++++++++++++++++++++ 3 files changed, 330 insertions(+) diff --git a/crates/evm/src/system_calls/eip7002.rs b/crates/evm/src/system_calls/eip7002.rs index aa64b8af0c1d4..b84bcbc9a6319 100644 --- a/crates/evm/src/system_calls/eip7002.rs +++ b/crates/evm/src/system_calls/eip7002.rs @@ -118,6 +118,12 @@ where // commit the state evm.context.evm.db.commit(state); + post_commit(result) +} + +/// Parses the withdrawal requests from the execution output. +#[inline] +pub(crate) fn post_commit(result: ExecutionResult) -> Result, BlockExecutionError> { let mut data = match result { ExecutionResult::Success { output, .. } => Ok(output.into_data()), ExecutionResult::Revert { output, .. } => { diff --git a/crates/evm/src/system_calls/eip7251.rs b/crates/evm/src/system_calls/eip7251.rs index a3d3aff2e2f63..df122293be705 100644 --- a/crates/evm/src/system_calls/eip7251.rs +++ b/crates/evm/src/system_calls/eip7251.rs @@ -120,6 +120,12 @@ where // commit the state evm.context.evm.db.commit(state); + post_commit(result) +} + +/// Parses the consolidation requests from the execution output. +#[inline] +pub(crate) fn post_commit(result: ExecutionResult) -> Result, BlockExecutionError> { let mut data = match result { ExecutionResult::Success { output, .. } => Ok(output.into_data()), ExecutionResult::Revert { output, .. } => { diff --git a/crates/evm/src/system_calls/mod.rs b/crates/evm/src/system_calls/mod.rs index 50d5c4c857ff8..2e2df10ad0493 100644 --- a/crates/evm/src/system_calls/mod.rs +++ b/crates/evm/src/system_calls/mod.rs @@ -1,5 +1,14 @@ //! System contract call functions. +use crate::ConfigureEvm; +use alloc::vec::Vec; +use core::fmt::Display; +use reth_chainspec::EthereumHardforks; +use reth_execution_errors::BlockExecutionError; +use reth_primitives::{Block, Header, Request}; +use revm::{Database, DatabaseCommit, Evm}; +use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, EnvWithHandlerCfg, ResultAndState, B256}; + mod eip2935; pub use eip2935::*; @@ -11,3 +20,312 @@ pub use eip7002::*; mod eip7251; pub use eip7251::*; + +/// A hook that is called after each state change. +pub trait OnStateHook { + /// Invoked with the result and state after each system call. + fn on_state(&mut self, state: &ResultAndState); +} + +impl OnStateHook for F +where + F: FnMut(&ResultAndState), +{ + fn on_state(&mut self, state: &ResultAndState) { + self(state) + } +} + +/// An [`OnStateHook`] that does nothing. +#[derive(Default, Debug, Clone)] +#[non_exhaustive] +pub struct NoopHook; + +impl OnStateHook for NoopHook { + fn on_state(&mut self, _state: &ResultAndState) {} +} + +/// An ephemeral helper type for executing system calls. +/// +/// This can be used to chain system transaction calls. +#[allow(missing_debug_implementations)] +pub struct SystemCaller<'a, EvmConfig, Chainspec, Hook = NoopHook> { + evm_config: &'a EvmConfig, + chain_spec: Chainspec, + /// Optional hook to be called after each state change. + hook: Option, +} + +impl<'a, EvmConfig, Chainspec> SystemCaller<'a, EvmConfig, Chainspec> { + /// Create a new system caller with the given EVM config, database, and chain spec, and creates + /// the EVM with the given initialized config and block environment. + pub const fn new(evm_config: &'a EvmConfig, chain_spec: Chainspec) -> Self { + Self { evm_config, chain_spec, hook: None } + } +} + +impl<'a, EvmConfig, Chainspec, Hook> SystemCaller<'a, EvmConfig, Chainspec, Hook> { + /// Installs a custom hook to be called after each state change. + pub fn with_state_hook( + self, + hook: H, + ) -> SystemCaller<'a, EvmConfig, Chainspec, H> { + let Self { evm_config, chain_spec, .. } = self; + SystemCaller { evm_config, chain_spec, hook: Some(hook) } + } + /// Convenience method to consume the type and drop borrowed fields + pub fn finish(self) {} +} + +fn initialize_evm<'a, DB>( + db: &'a mut DB, + initialized_cfg: &'a CfgEnvWithHandlerCfg, + initialized_block_env: &'a BlockEnv, +) -> Evm<'a, (), &'a mut DB> +where + DB: Database, +{ + Evm::builder() + .with_db(db) + .with_env_with_handler_cfg(EnvWithHandlerCfg::new_with_cfg_env( + initialized_cfg.clone(), + initialized_block_env.clone(), + Default::default(), + )) + .build() +} + +impl<'a, EvmConfig, Chainspec, Hook> SystemCaller<'a, EvmConfig, Chainspec, Hook> +where + EvmConfig: ConfigureEvm

, + Chainspec: EthereumHardforks, + Hook: OnStateHook, +{ + /// Apply pre execution changes. + pub fn apply_pre_execution_changes( + &mut self, + block: &Block, + evm: &mut Evm<'_, Ext, DB>, + ) -> Result<(), BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + self.apply_blockhashes_contract_call( + block.timestamp, + block.number, + block.parent_hash, + evm, + )?; + self.apply_beacon_root_contract_call( + block.timestamp, + block.number, + block.parent_beacon_block_root, + evm, + )?; + + Ok(()) + } + + /// Apply post execution changes. + pub fn apply_post_execution_changes( + &mut self, + evm: &mut Evm<'_, Ext, DB>, + ) -> Result, BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + // Collect all EIP-7685 requests + let withdrawal_requests = self.apply_withdrawal_requests_contract_call(evm)?; + + // Collect all EIP-7251 requests + let consolidation_requests = self.apply_consolidation_requests_contract_call(evm)?; + Ok([withdrawal_requests, consolidation_requests].concat()) + } + + /// Applies the pre-block call to the EIP-2935 blockhashes contract. + pub fn pre_block_blockhashes_contract_call( + &mut self, + db: &mut DB, + initialized_cfg: &CfgEnvWithHandlerCfg, + initialized_block_env: &BlockEnv, + parent_block_hash: B256, + ) -> Result<(), BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let mut evm = initialize_evm(db, initialized_cfg, initialized_block_env); + self.apply_blockhashes_contract_call( + initialized_block_env.timestamp.to(), + initialized_block_env.number.to(), + parent_block_hash, + &mut evm, + )?; + + Ok(()) + } + + /// Applies the pre-block call to the EIP-2935 blockhashes contract. + pub fn apply_blockhashes_contract_call( + &mut self, + timestamp: u64, + block_number: u64, + parent_block_hash: B256, + evm: &mut Evm<'_, Ext, DB>, + ) -> Result<(), BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let result_and_state = eip2935::transact_blockhashes_contract_call( + &self.evm_config.clone(), + &self.chain_spec, + timestamp, + block_number, + parent_block_hash, + evm, + )?; + + if let Some(res) = result_and_state { + if let Some(ref mut hook) = self.hook { + hook.on_state(&res); + } + evm.context.evm.db.commit(res.state); + } + + Ok(()) + } + + /// Applies the pre-block call to the EIP-4788 beacon root contract. + pub fn pre_block_beacon_root_contract_call( + &mut self, + db: &mut DB, + initialized_cfg: &CfgEnvWithHandlerCfg, + initialized_block_env: &BlockEnv, + parent_beacon_block_root: Option, + ) -> Result<(), BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let mut evm = initialize_evm(db, initialized_cfg, initialized_block_env); + + self.apply_beacon_root_contract_call( + initialized_block_env.timestamp.to(), + initialized_block_env.number.to(), + parent_beacon_block_root, + &mut evm, + )?; + + Ok(()) + } + + /// Applies the pre-block call to the EIP-4788 beacon root contract. + pub fn apply_beacon_root_contract_call( + &mut self, + timestamp: u64, + block_number: u64, + parent_block_hash: Option, + evm: &mut Evm<'_, Ext, DB>, + ) -> Result<(), BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let result_and_state = eip4788::transact_beacon_root_contract_call( + &self.evm_config.clone(), + &self.chain_spec, + timestamp, + block_number, + parent_block_hash, + evm, + )?; + + if let Some(res) = result_and_state { + if let Some(ref mut hook) = self.hook { + hook.on_state(&res); + } + evm.context.evm.db.commit(res.state); + } + + Ok(()) + } + + /// Applies the post-block call to the EIP-7002 withdrawal request contract. + pub fn post_block_withdrawal_requests_contract_call( + &mut self, + db: &mut DB, + initialized_cfg: &CfgEnvWithHandlerCfg, + initialized_block_env: &BlockEnv, + ) -> Result, BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let mut evm = initialize_evm(db, initialized_cfg, initialized_block_env); + + let result = self.apply_withdrawal_requests_contract_call(&mut evm)?; + + Ok(result) + } + + /// Applies the post-block call to the EIP-7002 withdrawal request contract. + pub fn apply_withdrawal_requests_contract_call( + &mut self, + evm: &mut Evm<'_, Ext, DB>, + ) -> Result, BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let result_and_state = + eip7002::transact_withdrawal_requests_contract_call(&self.evm_config.clone(), evm)?; + + if let Some(ref mut hook) = self.hook { + hook.on_state(&result_and_state); + } + evm.context.evm.db.commit(result_and_state.state); + + eip7002::post_commit(result_and_state.result) + } + + /// Applies the post-block call to the EIP-7251 consolidation requests contract. + pub fn post_block_consolidation_requests_contract_call( + &mut self, + db: &mut DB, + initialized_cfg: &CfgEnvWithHandlerCfg, + initialized_block_env: &BlockEnv, + ) -> Result, BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let mut evm = initialize_evm(db, initialized_cfg, initialized_block_env); + + let res = self.apply_consolidation_requests_contract_call(&mut evm)?; + + Ok(res) + } + + /// Applies the post-block call to the EIP-7251 consolidation requests contract. + pub fn apply_consolidation_requests_contract_call( + &mut self, + evm: &mut Evm<'_, Ext, DB>, + ) -> Result, BlockExecutionError> + where + DB: Database + DatabaseCommit, + DB::Error: Display, + { + let result_and_state = + eip7251::transact_consolidation_requests_contract_call(&self.evm_config.clone(), evm)?; + + if let Some(ref mut hook) = self.hook { + hook.on_state(&result_and_state); + } + evm.context.evm.db.commit(result_and_state.state); + + eip7251::post_commit(result_and_state.result) + } +} From ae4aeb44d9fc82f9f3e56b40b2966fef2d0780ca Mon Sep 17 00:00:00 2001 From: Dan Cline <6798349+Rjected@users.noreply.github.com> Date: Mon, 30 Sep 2024 06:04:47 -0400 Subject: [PATCH 76/84] chore(builder): reorder revm `State` import (#11316) --- crates/ethereum/payload/src/lib.rs | 4 ++-- crates/optimism/payload/src/builder.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 84301617a5939..655df7bbddc8a 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -42,9 +42,9 @@ use reth_transaction_pool::{ }; use reth_trie::HashedPostState; use revm::{ - db::states::bundle_state::BundleRetention, + db::{states::bundle_state::BundleRetention, State}, primitives::{EVMError, EnvWithHandlerCfg, InvalidTransaction, ResultAndState}, - DatabaseCommit, State, + DatabaseCommit, }; use revm_primitives::calc_excess_blob_gas; use std::sync::Arc; diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index ecdbb7e7a3218..c79dfc1cd4ccf 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -28,9 +28,9 @@ use reth_transaction_pool::{ }; use reth_trie::HashedPostState; use revm::{ - db::states::bundle_state::BundleRetention, + db::{states::bundle_state::BundleRetention, State}, primitives::{EVMError, EnvWithHandlerCfg, InvalidTransaction, ResultAndState}, - DatabaseCommit, State, + DatabaseCommit, }; use revm_primitives::calc_excess_blob_gas; use tracing::{debug, trace, warn}; From bf679b430ea535d2b1f4e68a7b45bb5e258efbde Mon Sep 17 00:00:00 2001 From: Alexey Shekhirin Date: Mon, 30 Sep 2024 13:25:17 +0300 Subject: [PATCH 77/84] feat(exex): finalize WAL below the given block (#11324) --- Cargo.lock | 2 - crates/exex/exex/Cargo.toml | 1 - crates/exex/exex/src/manager.rs | 2 +- crates/exex/exex/src/notifications.rs | 4 +- crates/exex/exex/src/wal/cache.rs | 154 ++++++---------- crates/exex/exex/src/wal/mod.rs | 247 +++++++++++++------------- crates/exex/exex/src/wal/storage.rs | 33 ++-- examples/custom-dev-node/Cargo.toml | 1 - 8 files changed, 200 insertions(+), 244 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5682674034c76..c39be60722927 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2692,7 +2692,6 @@ dependencies = [ "reth-chainspec", "reth-node-core", "reth-node-ethereum", - "reth-primitives", "serde_json", "tokio", ] @@ -7330,7 +7329,6 @@ dependencies = [ "alloy-eips", "alloy-genesis", "alloy-primitives", - "dashmap 6.1.0", "eyre", "futures", "itertools 0.13.0", diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index 0e76ef3d40df9..9c3d47365d19b 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -42,7 +42,6 @@ tokio-util.workspace = true tokio.workspace = true ## misc -dashmap.workspace = true eyre.workspace = true itertools.workspace = true metrics.workspace = true diff --git a/crates/exex/exex/src/manager.rs b/crates/exex/exex/src/manager.rs index 44b08b097881c..e7c9a6504bf81 100644 --- a/crates/exex/exex/src/manager.rs +++ b/crates/exex/exex/src/manager.rs @@ -1124,7 +1124,7 @@ mod tests { let mut rng = generators::rng(); let temp_dir = tempfile::tempdir().unwrap(); - let mut wal = Wal::new(temp_dir.path()).unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); let provider_factory = create_test_provider_factory(); diff --git a/crates/exex/exex/src/notifications.rs b/crates/exex/exex/src/notifications.rs index 9e9ee78e6cd71..5440e57b18fab 100644 --- a/crates/exex/exex/src/notifications.rs +++ b/crates/exex/exex/src/notifications.rs @@ -457,7 +457,7 @@ mod tests { let mut rng = generators::rng(); let temp_dir = tempfile::tempdir().unwrap(); - let mut wal = Wal::new(temp_dir.path()).unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); let provider_factory = create_test_provider_factory(); let genesis_hash = init_genesis(&provider_factory)?; @@ -557,7 +557,7 @@ mod tests { let mut rng = generators::rng(); let temp_dir = tempfile::tempdir().unwrap(); - let mut wal = Wal::new(temp_dir.path()).unwrap(); + let wal = Wal::new(temp_dir.path()).unwrap(); let provider_factory = create_test_provider_factory(); let genesis_hash = init_genesis(&provider_factory)?; diff --git a/crates/exex/exex/src/wal/cache.rs b/crates/exex/exex/src/wal/cache.rs index 097a07c7a9f52..861ae9b506b1e 100644 --- a/crates/exex/exex/src/wal/cache.rs +++ b/crates/exex/exex/src/wal/cache.rs @@ -1,98 +1,55 @@ -use std::collections::{BTreeMap, VecDeque}; +use std::{ + cmp::Reverse, + collections::{BinaryHeap, HashSet}, +}; use alloy_eips::BlockNumHash; -use alloy_primitives::B256; -use dashmap::DashMap; -use parking_lot::RwLock; +use alloy_primitives::{map::FbHashMap, BlockNumber, B256}; use reth_exex_types::ExExNotification; /// The block cache of the WAL. /// /// This cache is needed to avoid walking the WAL directory every time we want to find a /// notification corresponding to a block or a block corresponding to a hash. -#[derive(Debug)] +#[derive(Debug, Default)] pub struct BlockCache { - /// A mapping of `File ID -> List of Blocks`. - /// - /// For each notification written to the WAL, there will be an entry per block written to - /// the cache with the same file ID. I.e. for each notification, there may be multiple blocks - /// in the cache. - files: RwLock>>, - /// A mapping of committed blocks `Block Hash -> (File ID, Block)`. + /// A min heap of `(Block Number, File ID)` tuples. + pub(super) blocks: BinaryHeap>, + /// A mapping of committed blocks `Block Hash -> Block`. /// /// For each [`ExExNotification::ChainCommitted`] notification, there will be an entry per /// block. - committed_blocks: DashMap, + pub(super) committed_blocks: FbHashMap<32, (u64, CachedBlock)>, } impl BlockCache { - /// Creates a new instance of [`BlockCache`]. - pub(super) fn new() -> Self { - Self { files: RwLock::new(BTreeMap::new()), committed_blocks: DashMap::new() } - } - /// Returns `true` if the cache is empty. pub(super) fn is_empty(&self) -> bool { - self.files.read().is_empty() - } - - /// Returns a front-to-back iterator. - pub(super) fn iter(&self) -> impl Iterator + '_ { - self.files - .read() - .iter() - .flat_map(|(k, v)| v.iter().map(move |b| (*k, *b))) - .collect::>() - .into_iter() - } - - /// Provides a reference to the first block from the cache, or `None` if the cache is - /// empty. - pub(super) fn front(&self) -> Option<(u64, CachedBlock)> { - self.files.read().first_key_value().and_then(|(k, v)| v.front().map(|b| (*k, *b))) - } - - /// Provides a reference to the last block from the cache, or `None` if the cache is - /// empty. - pub(super) fn back(&self) -> Option<(u64, CachedBlock)> { - self.files.read().last_key_value().and_then(|(k, v)| v.back().map(|b| (*k, *b))) + self.blocks.is_empty() } - /// Removes the notification with the given file ID. - pub(super) fn remove_notification(&self, key: u64) -> Option> { - self.files.write().remove(&key) - } - - /// Pops the first block from the cache. If it resulted in the whole file entry being empty, - /// it will also remove the file entry. - pub(super) fn pop_front(&self) -> Option<(u64, CachedBlock)> { - let mut files = self.files.write(); - - let first_entry = files.first_entry()?; - let key = *first_entry.key(); - let blocks = first_entry.into_mut(); - let first_block = blocks.pop_front().unwrap(); - if blocks.is_empty() { - files.remove(&key); + /// Removes all files from the cache that has notifications with a tip block less than or equal + /// to the given block number. + /// + /// # Returns + /// + /// A set of file IDs that were removed. + pub(super) fn remove_before(&mut self, block_number: BlockNumber) -> HashSet { + let mut file_ids = HashSet::default(); + + while let Some(block @ Reverse((max_block, file_id))) = self.blocks.peek().copied() { + if max_block <= block_number { + let popped_block = self.blocks.pop().unwrap(); + debug_assert_eq!(popped_block, block); + file_ids.insert(file_id); + } else { + break + } } - Some((key, first_block)) - } - - /// Pops the last block from the cache. If it resulted in the whole file entry being empty, - /// it will also remove the file entry. - pub(super) fn pop_back(&self) -> Option<(u64, CachedBlock)> { - let mut files = self.files.write(); + self.committed_blocks.retain(|_, (file_id, _)| !file_ids.contains(file_id)); - let last_entry = files.last_entry()?; - let key = *last_entry.key(); - let blocks = last_entry.into_mut(); - let last_block = blocks.pop_back().unwrap(); - if blocks.is_empty() { - files.remove(&key); - } - - Some((key, last_block)) + file_ids } /// Returns the file ID for the notification containing the given committed block hash, if it @@ -102,59 +59,52 @@ impl BlockCache { } /// Inserts the blocks from the notification into the cache with the given file ID. - /// - /// First, inserts the reverted blocks (if any), then the committed blocks (if any). pub(super) fn insert_notification_blocks_with_file_id( - &self, + &mut self, file_id: u64, notification: &ExExNotification, ) { - let mut files = self.files.write(); - let reverted_chain = notification.reverted_chain(); let committed_chain = notification.committed_chain(); - if let Some(reverted_chain) = reverted_chain { - for block in reverted_chain.blocks().values() { - files.entry(file_id).or_default().push_back(CachedBlock { - action: CachedBlockAction::Revert, - block: (block.number, block.hash()).into(), - parent_hash: block.parent_hash, - }); - } + let max_block = + reverted_chain.iter().chain(&committed_chain).map(|chain| chain.tip().number).max(); + if let Some(max_block) = max_block { + self.blocks.push(Reverse((max_block, file_id))); } - if let Some(committed_chain) = committed_chain { + if let Some(committed_chain) = &committed_chain { for block in committed_chain.blocks().values() { let cached_block = CachedBlock { - action: CachedBlockAction::Commit, block: (block.number, block.hash()).into(), parent_hash: block.parent_hash, }; - files.entry(file_id).or_default().push_back(cached_block); self.committed_blocks.insert(block.hash(), (file_id, cached_block)); } } } + + #[cfg(test)] + pub(super) fn blocks_sorted(&self) -> Vec<(BlockNumber, u64)> { + self.blocks.clone().into_sorted_vec().into_iter().map(|entry| entry.0).collect() + } + + #[cfg(test)] + pub(super) fn committed_blocks_sorted(&self) -> Vec<(B256, u64, CachedBlock)> { + use itertools::Itertools; + + self.committed_blocks + .iter() + .map(|(hash, (file_id, block))| (*hash, *file_id, *block)) + .sorted_by_key(|(_, _, block)| (block.block.number, block.block.hash)) + .collect() + } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(super) struct CachedBlock { - pub(super) action: CachedBlockAction, /// The block number and hash of the block. pub(super) block: BlockNumHash, /// The hash of the parent block. pub(super) parent_hash: B256, } - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(super) enum CachedBlockAction { - Commit, - Revert, -} - -impl CachedBlockAction { - pub(super) const fn is_commit(&self) -> bool { - matches!(self, Self::Commit) - } -} diff --git a/crates/exex/exex/src/wal/mod.rs b/crates/exex/exex/src/wal/mod.rs index 06c2f6485f000..593b065daea71 100644 --- a/crates/exex/exex/src/wal/mod.rs +++ b/crates/exex/exex/src/wal/mod.rs @@ -3,10 +3,16 @@ mod cache; pub use cache::BlockCache; mod storage; -use eyre::OptionExt; +use parking_lot::{RwLock, RwLockReadGuard}; pub use storage::Storage; -use std::{path::Path, sync::Arc}; +use std::{ + path::Path, + sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }, +}; use alloy_eips::BlockNumHash; use alloy_primitives::B256; @@ -40,16 +46,14 @@ impl Wal { } /// Commits the notification to WAL. - pub fn commit(&mut self, notification: &ExExNotification) -> eyre::Result<()> { + pub fn commit(&self, notification: &ExExNotification) -> eyre::Result<()> { self.inner.commit(notification) } - /// Finalizes the WAL to the given block, inclusive. + /// Finalizes the WAL up to the given canonical block, inclusive. /// - /// 1. Finds a notification with first unfinalized block (first notification containing a - /// committed block higher than `to_block`). - /// 2. Removes the notifications from the beginning of WAL until the found notification. If this - /// notification includes both finalized and non-finalized blocks, it will not be removed. + /// The caller should check that all ExExes are on the canonical chain and will not need any + /// blocks from the WAL below the provided block, inclusive. pub fn finalize(&self, to_block: BlockNumHash) -> eyre::Result<()> { self.inner.finalize(to_block) } @@ -65,24 +69,35 @@ impl Wal { /// Inner type for the WAL. #[derive(Debug)] struct WalInner { + next_file_id: AtomicUsize, /// The underlying WAL storage backed by a file. storage: Storage, /// WAL block cache. See [`cache::BlockCache`] docs for more details. - block_cache: BlockCache, + block_cache: RwLock, } impl WalInner { fn new(directory: impl AsRef) -> eyre::Result { - let mut wal = Self { storage: Storage::new(directory)?, block_cache: BlockCache::new() }; + let mut wal = Self { + next_file_id: AtomicUsize::new(0), + storage: Storage::new(directory)?, + block_cache: RwLock::new(BlockCache::default()), + }; wal.fill_block_cache()?; Ok(wal) } + fn block_cache(&self) -> RwLockReadGuard<'_, BlockCache> { + self.block_cache.read() + } + /// Fills the block cache with the notifications from the storage. #[instrument(target = "exex::wal", skip(self))] fn fill_block_cache(&mut self) -> eyre::Result<()> { let Some(files_range) = self.storage.files_range()? else { return Ok(()) }; + let mut block_cache = self.block_cache.write(); + for entry in self.storage.iter_notifications(files_range) { let (file_id, notification) = entry?; @@ -97,7 +112,9 @@ impl WalInner { "Inserting block cache entries" ); - self.block_cache.insert_notification_blocks_with_file_id(file_id, ¬ification); + block_cache.insert_notification_blocks_with_file_id(file_id, ¬ification); + + self.next_file_id.fetch_max(1, Ordering::Relaxed); } Ok(()) @@ -108,83 +125,30 @@ impl WalInner { committed_block_range = ?notification.committed_chain().as_ref().map(|chain| chain.range()) ))] fn commit(&self, notification: &ExExNotification) -> eyre::Result<()> { - let file_id = self.block_cache.back().map_or(0, |block| block.0 + 1); + let mut block_cache = self.block_cache.write(); + + let file_id = self.next_file_id.fetch_add(1, Ordering::Relaxed) as u64; self.storage.write_notification(file_id, notification)?; debug!(?file_id, "Inserting notification blocks into the block cache"); - self.block_cache.insert_notification_blocks_with_file_id(file_id, notification); + block_cache.insert_notification_blocks_with_file_id(file_id, notification); Ok(()) } #[instrument(target = "exex::wal", skip(self))] fn finalize(&self, to_block: BlockNumHash) -> eyre::Result<()> { - // First, walk cache to find the file ID of the notification with the finalized block and - // save the file ID with the first unfinalized block. Do not remove any notifications - // yet. - let mut unfinalized_from_file_id = None; - { - let mut block_cache = self.block_cache.iter().peekable(); - while let Some((file_id, block)) = block_cache.next() { - debug!(?file_id, ?block, "Iterating over the block cache"); - if block.action.is_commit() && - block.block.number == to_block.number && - block.block.hash == to_block.hash - { - let notification = self - .storage - .read_notification(file_id)? - .ok_or_eyre("notification not found")?; - if notification.committed_chain().unwrap().blocks().len() == 1 { - unfinalized_from_file_id = Some( - block_cache.peek().map(|(file_id, _)| *file_id).unwrap_or(u64::MAX), - ); - } else { - unfinalized_from_file_id = Some(file_id); - } - - debug!( - ?file_id, - ?block, - ?unfinalized_from_file_id, - "Found the finalized block in the block cache" - ); - break - } - - unfinalized_from_file_id = Some(file_id); - } - } - - // If the finalized block is still not found, we can't do anything and just return. - let Some(remove_to_file_id) = unfinalized_from_file_id else { - debug!("Could not find the finalized block in WAL"); - return Ok(()) - }; - - // Remove notifications from the storage from the beginning up to the unfinalized block, not - // inclusive. - let (mut file_range_start, mut file_range_end) = (None, None); - while let Some((file_id, _)) = self.block_cache.front() { - if file_id == remove_to_file_id { - break - } - self.block_cache.pop_front(); - - file_range_start.get_or_insert(file_id); - file_range_end = Some(file_id); - } - debug!(?remove_to_file_id, "Block cache was finalized"); + let file_ids = self.block_cache.write().remove_before(to_block.number); // Remove notifications from the storage. - if let Some((file_range_start, file_range_end)) = file_range_start.zip(file_range_end) { - let removed_notifications = - self.storage.remove_notifications(file_range_start..=file_range_end)?; - debug!(?removed_notifications, "Storage was finalized"); - } else { + if file_ids.is_empty() { debug!("No notifications were finalized from the storage"); + return Ok(()) } + let removed_notifications = self.storage.remove_notifications(file_ids)?; + debug!(?removed_notifications, "Storage was finalized"); + Ok(()) } @@ -212,7 +176,7 @@ impl WalHandle { &self, block_hash: &B256, ) -> eyre::Result> { - let Some(file_id) = self.wal.block_cache.get_file_id_by_committed_block_hash(block_hash) + let Some(file_id) = self.wal.block_cache().get_file_id_by_committed_block_hash(block_hash) else { return Ok(None) }; @@ -225,17 +189,16 @@ impl WalHandle { mod tests { use std::sync::Arc; + use alloy_primitives::B256; use eyre::OptionExt; + use itertools::Itertools; use reth_exex_types::ExExNotification; use reth_provider::Chain; use reth_testing_utils::generators::{ self, random_block, random_block_range, BlockParams, BlockRangeParams, }; - use crate::wal::{ - cache::{CachedBlock, CachedBlockAction}, - Wal, - }; + use crate::wal::{cache::CachedBlock, Wal}; fn read_notifications(wal: &Wal) -> eyre::Result> { let Some(files_range) = wal.inner.storage.files_range()? else { return Ok(Vec::new()) }; @@ -247,6 +210,15 @@ mod tests { .collect::>() } + fn sort_committed_blocks( + committed_blocks: Vec<(B256, u64, CachedBlock)>, + ) -> Vec<(B256, u64, CachedBlock)> { + committed_blocks + .into_iter() + .sorted_by_key(|(_, _, block)| (block.block.number, block.block.hash)) + .collect() + } + #[test] fn test_wal() -> eyre::Result<()> { reth_tracing::init_test_tracing(); @@ -255,8 +227,8 @@ mod tests { // Create an instance of the WAL in a temporary directory let temp_dir = tempfile::tempdir()?; - let mut wal = Wal::new(&temp_dir)?; - assert!(wal.inner.block_cache.is_empty()); + let wal = Wal::new(&temp_dir)?; + assert!(wal.inner.block_cache().is_empty()); // Create 4 canonical blocks and one reorged block with number 2 let blocks = random_block_range(&mut rng, 0..=3, BlockRangeParams::default()) @@ -315,19 +287,20 @@ mod tests { // First notification (commit block 0, 1) let file_id = 0; - let committed_notification_1_cache = vec![ + let committed_notification_1_cache_blocks = (blocks[1].number, file_id); + let committed_notification_1_cache_committed_blocks = vec![ ( + blocks[0].hash(), file_id, CachedBlock { - action: CachedBlockAction::Commit, block: (blocks[0].number, blocks[0].hash()).into(), parent_hash: blocks[0].parent_hash, }, ), ( + blocks[1].hash(), file_id, CachedBlock { - action: CachedBlockAction::Commit, block: (blocks[1].number, blocks[1].hash()).into(), parent_hash: blocks[1].parent_hash, }, @@ -335,25 +308,26 @@ mod tests { ]; wal.commit(&committed_notification_1)?; assert_eq!( - wal.inner.block_cache.iter().collect::>(), - committed_notification_1_cache + wal.inner.block_cache().blocks_sorted(), + [committed_notification_1_cache_blocks] + ); + assert_eq!( + wal.inner.block_cache().committed_blocks_sorted(), + committed_notification_1_cache_committed_blocks ); assert_eq!(read_notifications(&wal)?, vec![committed_notification_1.clone()]); // Second notification (revert block 1) wal.commit(&reverted_notification)?; let file_id = 1; - let reverted_notification_cache = vec![( - file_id, - CachedBlock { - action: CachedBlockAction::Revert, - block: (blocks[1].number, blocks[1].hash()).into(), - parent_hash: blocks[1].parent_hash, - }, - )]; + let reverted_notification_cache_blocks = (blocks[1].number, file_id); + assert_eq!( + wal.inner.block_cache().blocks_sorted(), + [reverted_notification_cache_blocks, committed_notification_1_cache_blocks] + ); assert_eq!( - wal.inner.block_cache.iter().collect::>(), - [committed_notification_1_cache.clone(), reverted_notification_cache.clone()].concat() + wal.inner.block_cache().committed_blocks_sorted(), + committed_notification_1_cache_committed_blocks ); assert_eq!( read_notifications(&wal)?, @@ -363,32 +337,42 @@ mod tests { // Third notification (commit block 1, 2) wal.commit(&committed_notification_2)?; let file_id = 2; - let committed_notification_2_cache = vec![ + let committed_notification_2_cache_blocks = (blocks[2].number, file_id); + let committed_notification_2_cache_committed_blocks = vec![ ( + block_1_reorged.hash(), file_id, CachedBlock { - action: CachedBlockAction::Commit, block: (block_1_reorged.number, block_1_reorged.hash()).into(), parent_hash: block_1_reorged.parent_hash, }, ), ( + blocks[2].hash(), file_id, CachedBlock { - action: CachedBlockAction::Commit, block: (blocks[2].number, blocks[2].hash()).into(), parent_hash: blocks[2].parent_hash, }, ), ]; assert_eq!( - wal.inner.block_cache.iter().collect::>(), + wal.inner.block_cache().blocks_sorted(), [ - committed_notification_1_cache.clone(), - reverted_notification_cache.clone(), - committed_notification_2_cache.clone() + committed_notification_2_cache_blocks, + reverted_notification_cache_blocks, + committed_notification_1_cache_blocks, ] - .concat() + ); + assert_eq!( + wal.inner.block_cache().committed_blocks_sorted(), + sort_committed_blocks( + [ + committed_notification_1_cache_committed_blocks.clone(), + committed_notification_2_cache_committed_blocks.clone() + ] + .concat() + ) ); assert_eq!( read_notifications(&wal)?, @@ -402,47 +386,50 @@ mod tests { // Fourth notification (revert block 2, commit block 2, 3) wal.commit(&reorged_notification)?; let file_id = 3; - let reorged_notification_cache = vec![ - ( - file_id, - CachedBlock { - action: CachedBlockAction::Revert, - block: (blocks[2].number, blocks[2].hash()).into(), - parent_hash: blocks[2].parent_hash, - }, - ), + let reorged_notification_cache_blocks = (blocks[3].number, file_id); + let reorged_notification_cache_committed_blocks = vec![ ( + block_2_reorged.hash(), file_id, CachedBlock { - action: CachedBlockAction::Commit, block: (block_2_reorged.number, block_2_reorged.hash()).into(), parent_hash: block_2_reorged.parent_hash, }, ), ( + blocks[3].hash(), file_id, CachedBlock { - action: CachedBlockAction::Commit, block: (blocks[3].number, blocks[3].hash()).into(), parent_hash: blocks[3].parent_hash, }, ), ]; assert_eq!( - wal.inner.block_cache.iter().collect::>(), + wal.inner.block_cache().blocks_sorted(), [ - committed_notification_1_cache, - reverted_notification_cache, - committed_notification_2_cache.clone(), - reorged_notification_cache.clone() + reorged_notification_cache_blocks, + committed_notification_2_cache_blocks, + reverted_notification_cache_blocks, + committed_notification_1_cache_blocks, ] - .concat() + ); + assert_eq!( + wal.inner.block_cache().committed_blocks_sorted(), + sort_committed_blocks( + [ + committed_notification_1_cache_committed_blocks, + committed_notification_2_cache_committed_blocks.clone(), + reorged_notification_cache_committed_blocks.clone() + ] + .concat() + ) ); assert_eq!( read_notifications(&wal)?, vec![ - committed_notification_1.clone(), - reverted_notification.clone(), + committed_notification_1, + reverted_notification, committed_notification_2.clone(), reorged_notification.clone() ] @@ -454,8 +441,18 @@ mod tests { // the notifications before it. wal.finalize((block_1_reorged.number, block_1_reorged.hash()).into())?; assert_eq!( - wal.inner.block_cache.iter().collect::>(), - [committed_notification_2_cache, reorged_notification_cache].concat() + wal.inner.block_cache().blocks_sorted(), + [reorged_notification_cache_blocks, committed_notification_2_cache_blocks] + ); + assert_eq!( + wal.inner.block_cache().committed_blocks_sorted(), + sort_committed_blocks( + [ + committed_notification_2_cache_committed_blocks, + reorged_notification_cache_committed_blocks + ] + .concat() + ) ); assert_eq!(read_notifications(&wal)?, vec![committed_notification_2, reorged_notification]); diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index 817d57d193f04..7ae98077e63d5 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -41,10 +41,16 @@ impl Storage { /// Removes notification for the given file ID from the storage. #[instrument(target = "exex::wal::storage", skip(self))] - fn remove_notification(&self, file_id: u64) { + fn remove_notification(&self, file_id: u64) -> bool { match reth_fs_util::remove_file(self.file_path(file_id)) { - Ok(()) => debug!("Notification was removed from the storage"), - Err(err) => debug!(?err, "Failed to remove notification from the storage"), + Ok(()) => { + debug!("Notification was removed from the storage"); + true + } + Err(err) => { + debug!(?err, "Failed to remove notification from the storage"); + false + } } } @@ -67,17 +73,24 @@ impl Storage { Ok(min_id.zip(max_id).map(|(min_id, max_id)| min_id..=max_id)) } - /// Removes notifications from the storage according to the given range. + /// Removes notifications from the storage according to the given list of file IDs. /// /// # Returns /// /// Number of removed notifications. - pub(super) fn remove_notifications(&self, range: RangeInclusive) -> eyre::Result { - for id in range.clone() { - self.remove_notification(id); + pub(super) fn remove_notifications( + &self, + file_ids: impl IntoIterator, + ) -> eyre::Result { + let mut deleted = 0; + + for id in file_ids { + if self.remove_notification(id) { + deleted += 1; + } } - Ok(range.count()) + Ok(deleted) } pub(super) fn iter_notifications( @@ -91,7 +104,7 @@ impl Storage { }) } - /// Reads the notification from the file with the given id. + /// Reads the notification from the file with the given ID. #[instrument(target = "exex::wal::storage", skip(self))] pub(super) fn read_notification(&self, file_id: u64) -> eyre::Result> { let file_path = self.file_path(file_id); @@ -107,7 +120,7 @@ impl Storage { Ok(serde_json::from_reader(&mut file)?) } - /// Writes the notification to the file with the given id. + /// Writes the notification to the file with the given ID. #[instrument(target = "exex::wal::storage", skip(self, notification))] pub(super) fn write_notification( &self, diff --git a/examples/custom-dev-node/Cargo.toml b/examples/custom-dev-node/Cargo.toml index 2586795b45b53..8ed277686f4e5 100644 --- a/examples/custom-dev-node/Cargo.toml +++ b/examples/custom-dev-node/Cargo.toml @@ -10,7 +10,6 @@ license.workspace = true reth.workspace = true reth-chainspec.workspace = true reth-node-core.workspace = true -reth-primitives.workspace = true reth-node-ethereum = { workspace = true, features = ["test-utils"] } futures-util.workspace = true From e8153e5e2c80caf2f8a8b40a785bdb3044ac18ab Mon Sep 17 00:00:00 2001 From: Roman Krasiuk Date: Mon, 30 Sep 2024 14:13:56 +0200 Subject: [PATCH 78/84] deps: remove `reth-metrics-derive` for `metrics-derive` (#11335) --- Cargo.lock | 50 +- Cargo.toml | 3 +- crates/metrics/Cargo.toml | 4 +- crates/metrics/metrics-derive/Cargo.toml | 25 - crates/metrics/metrics-derive/src/expand.rs | 436 ------------------ crates/metrics/metrics-derive/src/lib.rs | 139 ------ crates/metrics/metrics-derive/src/metric.rs | 59 --- .../metrics/metrics-derive/src/with_attrs.rs | 17 - .../tests/compile-fail/metric_attr.rs | 62 --- .../tests/compile-fail/metric_attr.stderr | 48 -- .../tests/compile-fail/metrics_attr.rs | 56 --- .../tests/compile-fail/metrics_attr.stderr | 81 ---- .../metrics/metrics-derive/tests/metrics.rs | 351 -------------- .../metrics/metrics-derive/tests/trybuild.rs | 6 - crates/metrics/src/common/mpsc.rs | 2 +- crates/metrics/src/lib.rs | 2 +- 16 files changed, 17 insertions(+), 1324 deletions(-) delete mode 100644 crates/metrics/metrics-derive/Cargo.toml delete mode 100644 crates/metrics/metrics-derive/src/expand.rs delete mode 100644 crates/metrics/metrics-derive/src/lib.rs delete mode 100644 crates/metrics/metrics-derive/src/metric.rs delete mode 100644 crates/metrics/metrics-derive/src/with_attrs.rs delete mode 100644 crates/metrics/metrics-derive/tests/compile-fail/metric_attr.rs delete mode 100644 crates/metrics/metrics-derive/tests/compile-fail/metric_attr.stderr delete mode 100644 crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.rs delete mode 100644 crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.stderr delete mode 100644 crates/metrics/metrics-derive/tests/metrics.rs delete mode 100644 crates/metrics/metrics-derive/tests/trybuild.rs diff --git a/Cargo.lock b/Cargo.lock index c39be60722927..70cee889bee82 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4646,6 +4646,18 @@ dependencies = [ "portable-atomic", ] +[[package]] +name = "metrics-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3dbdd96ed57d565ec744cba02862d707acf373c5772d152abae6ec5c4e24f6c" +dependencies = [ + "proc-macro2", + "quote", + "regex", + "syn 2.0.79", +] + [[package]] name = "metrics-exporter-prometheus" version = "0.15.3" @@ -7497,24 +7509,11 @@ version = "1.0.7" dependencies = [ "futures", "metrics", - "reth-metrics-derive", + "metrics-derive", "tokio", "tokio-util", ] -[[package]] -name = "reth-metrics-derive" -version = "1.0.7" -dependencies = [ - "metrics", - "proc-macro2", - "quote", - "regex", - "serial_test", - "syn 2.0.79", - "trybuild", -] - [[package]] name = "reth-net-banlist" version = "1.0.7" @@ -10222,15 +10221,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - [[package]] name = "termtree" version = "0.4.1" @@ -10860,20 +10850,6 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" -[[package]] -name = "trybuild" -version = "1.0.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "207aa50d36c4be8d8c6ea829478be44a372c6a77669937bb39c698e52f1491e8" -dependencies = [ - "glob", - "serde", - "serde_derive", - "serde_json", - "termcolor", - "toml", -] - [[package]] name = "tungstenite" version = "0.23.0" diff --git a/Cargo.toml b/Cargo.toml index 5beca0009b8c5..aa360d9fa33f2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,7 +48,6 @@ members = [ "crates/exex/test-utils/", "crates/exex/types/", "crates/metrics/", - "crates/metrics/metrics-derive/", "crates/net/banlist/", "crates/net/discv4/", "crates/net/discv5/", @@ -345,7 +344,6 @@ reth-ipc = { path = "crates/rpc/ipc" } reth-libmdbx = { path = "crates/storage/libmdbx-rs" } reth-mdbx-sys = { path = "crates/storage/libmdbx-rs/mdbx-sys" } reth-metrics = { path = "crates/metrics" } -reth-metrics-derive = { path = "crates/metrics/metrics-derive" } reth-net-banlist = { path = "crates/net/banlist" } reth-net-nat = { path = "crates/net/nat" } reth-network = { path = "crates/net/network" } @@ -517,6 +515,7 @@ zstd = "0.13" # metrics metrics = "0.23.0" +metrics-derive = "0.1" metrics-exporter-prometheus = { version = "0.15.0", default-features = false } metrics-process = "2.1.0" metrics-util = { default-features = false, version = "0.17.0" } diff --git a/crates/metrics/Cargo.toml b/crates/metrics/Cargo.toml index 015f24d232f8c..df3c7fa2161a9 100644 --- a/crates/metrics/Cargo.toml +++ b/crates/metrics/Cargo.toml @@ -12,11 +12,9 @@ description = "reth metrics utilities" workspace = true [dependencies] -# reth -reth-metrics-derive.workspace = true - # metrics metrics.workspace = true +metrics-derive.workspace = true # async tokio = { workspace = true, features = ["full"], optional = true } diff --git a/crates/metrics/metrics-derive/Cargo.toml b/crates/metrics/metrics-derive/Cargo.toml deleted file mode 100644 index 509dec73057cd..0000000000000 --- a/crates/metrics/metrics-derive/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "reth-metrics-derive" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[lints] -workspace = true - -[lib] -proc-macro = true - -[dependencies] -proc-macro2.workspace = true -syn = { workspace = true, features = ["extra-traits"] } -quote.workspace = true -regex = "1.6.0" - -[dev-dependencies] -metrics.workspace = true -serial_test.workspace = true -trybuild = "1.0" diff --git a/crates/metrics/metrics-derive/src/expand.rs b/crates/metrics/metrics-derive/src/expand.rs deleted file mode 100644 index 9f9148120e26c..0000000000000 --- a/crates/metrics/metrics-derive/src/expand.rs +++ /dev/null @@ -1,436 +0,0 @@ -use quote::{quote, ToTokens}; -use regex::Regex; -use std::sync::LazyLock; -use syn::{ - punctuated::Punctuated, Attribute, Data, DeriveInput, Error, Expr, Field, Lit, LitBool, LitStr, - Meta, MetaNameValue, Result, Token, -}; - -use crate::{metric::Metric, with_attrs::WithAttrs}; - -/// Metric name regex according to Prometheus data model -/// -/// See -static METRIC_NAME_RE: LazyLock = - LazyLock::new(|| Regex::new(r"^[a-zA-Z_:.][a-zA-Z0-9_:.]*$").unwrap()); - -/// Supported metrics separators -const SUPPORTED_SEPARATORS: &[&str] = &[".", "_", ":"]; - -enum MetricField<'a> { - Included(Metric<'a>), - Skipped(&'a Field), -} - -impl<'a> MetricField<'a> { - const fn field(&self) -> &'a Field { - match self { - MetricField::Included(Metric { field, .. }) | MetricField::Skipped(field) => field, - } - } -} - -pub(crate) fn derive(node: &DeriveInput) -> Result { - let ty = &node.ident; - let vis = &node.vis; - let ident_name = ty.to_string(); - - let metrics_attr = parse_metrics_attr(node)?; - let metric_fields = parse_metric_fields(node)?; - - let describe_doc = quote! { - /// Describe all exposed metrics. Internally calls `describe_*` macros from - /// the metrics crate according to the metric type. - /// - /// See - }; - let register_and_describe = match &metrics_attr.scope { - MetricsScope::Static(scope) => { - let (defaults, labeled_defaults, describes): (Vec<_>, Vec<_>, Vec<_>) = metric_fields - .iter() - .map(|metric| { - let field_name = &metric.field().ident; - match metric { - MetricField::Included(metric) => { - let metric_name = format!( - "{}{}{}", - scope.value(), - metrics_attr.separator(), - metric.name() - ); - let registrar = metric.register_stmt()?; - let describe = metric.describe_stmt()?; - let description = &metric.description; - Ok(( - quote! { - #field_name: #registrar(#metric_name), - }, - quote! { - #field_name: #registrar(#metric_name, labels.clone()), - }, - Some(quote! { - #describe(#metric_name, #description); - }), - )) - } - MetricField::Skipped(_) => Ok(( - quote! { - #field_name: Default::default(), - }, - quote! { - #field_name: Default::default(), - }, - None, - )), - } - }) - .collect::>>()? - .into_iter() - .fold((vec![], vec![], vec![]), |mut acc, x| { - acc.0.push(x.0); - acc.1.push(x.1); - if let Some(describe) = x.2 { - acc.2.push(describe); - } - acc - }); - - quote! { - impl Default for #ty { - fn default() -> Self { - #ty::describe(); - - Self { - #(#defaults)* - } - } - } - - impl #ty { - /// Create new instance of metrics with provided labels. - #vis fn new_with_labels(labels: impl metrics::IntoLabels + Clone) -> Self { - Self { - #(#labeled_defaults)* - } - } - - #describe_doc - #vis fn describe() { - #(#describes)* - } - } - } - } - MetricsScope::Dynamic => { - let (defaults, labeled_defaults, describes): (Vec<_>, Vec<_>, Vec<_>) = metric_fields - .iter() - .map(|metric| { - let field_name = &metric.field().ident; - match metric { - MetricField::Included(metric) => { - let name = metric.name(); - let separator = metrics_attr.separator(); - let metric_name = quote! { - format!("{}{}{}", scope, #separator, #name) - }; - - let registrar = metric.register_stmt()?; - let describe = metric.describe_stmt()?; - let description = &metric.description; - - Ok(( - quote! { - #field_name: #registrar(#metric_name), - }, - quote! { - #field_name: #registrar(#metric_name, labels.clone()), - }, - Some(quote! { - #describe(#metric_name, #description); - }), - )) - } - MetricField::Skipped(_) => Ok(( - quote! { - #field_name: Default::default(), - }, - quote! { - #field_name: Default::default(), - }, - None, - )), - } - }) - .collect::>>()? - .into_iter() - .fold((vec![], vec![], vec![]), |mut acc, x| { - acc.0.push(x.0); - acc.1.push(x.1); - if let Some(describe) = x.2 { - acc.2.push(describe); - } - acc - }); - - quote! { - impl #ty { - /// Create new instance of metrics with provided scope. - #vis fn new(scope: &str) -> Self { - #ty::describe(scope); - - Self { - #(#defaults)* - } - } - - /// Create new instance of metrics with provided labels. - #vis fn new_with_labels(scope: &str, labels: impl metrics::IntoLabels + Clone) -> Self { - Self { - #(#labeled_defaults)* - } - } - - #describe_doc - #vis fn describe(scope: &str) { - #(#describes)* - } - } - } - } - }; - Ok(quote! { - #register_and_describe - - impl std::fmt::Debug for #ty { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct(#ident_name).finish() - } - } - }) -} - -pub(crate) struct MetricsAttr { - pub(crate) scope: MetricsScope, - pub(crate) separator: Option, -} - -impl MetricsAttr { - const DEFAULT_SEPARATOR: &'static str = "."; - - fn separator(&self) -> String { - match &self.separator { - Some(sep) => sep.value(), - None => Self::DEFAULT_SEPARATOR.to_owned(), - } - } -} - -pub(crate) enum MetricsScope { - Static(LitStr), - Dynamic, -} - -fn parse_metrics_attr(node: &DeriveInput) -> Result { - let metrics_attr = parse_single_required_attr(node, "metrics")?; - let parsed = - metrics_attr.parse_args_with(Punctuated::::parse_terminated)?; - let (mut scope, mut separator, mut dynamic) = (None, None, None); - for kv in parsed { - let lit = match kv.value { - Expr::Lit(ref expr) => &expr.lit, - _ => continue, - }; - if kv.path.is_ident("scope") { - if scope.is_some() { - return Err(Error::new_spanned(kv, "Duplicate `scope` value provided.")) - } - let scope_lit = parse_str_lit(lit)?; - validate_metric_name(&scope_lit)?; - scope = Some(scope_lit); - } else if kv.path.is_ident("separator") { - if separator.is_some() { - return Err(Error::new_spanned(kv, "Duplicate `separator` value provided.")) - } - let separator_lit = parse_str_lit(lit)?; - if !SUPPORTED_SEPARATORS.contains(&&*separator_lit.value()) { - return Err(Error::new_spanned( - kv, - format!( - "Unsupported `separator` value. Supported: {}.", - SUPPORTED_SEPARATORS - .iter() - .map(|sep| format!("`{sep}`")) - .collect::>() - .join(", ") - ), - )) - } - separator = Some(separator_lit); - } else if kv.path.is_ident("dynamic") { - if dynamic.is_some() { - return Err(Error::new_spanned(kv, "Duplicate `dynamic` flag provided.")) - } - dynamic = Some(parse_bool_lit(lit)?.value); - } else { - return Err(Error::new_spanned(kv, "Unsupported attribute entry.")) - } - } - - let scope = match (scope, dynamic) { - (Some(scope), None | Some(false)) => MetricsScope::Static(scope), - (None, Some(true)) => MetricsScope::Dynamic, - (Some(_), Some(_)) => { - return Err(Error::new_spanned(node, "`scope = ..` conflicts with `dynamic = true`.")) - } - _ => { - return Err(Error::new_spanned( - node, - "Either `scope = ..` or `dynamic = true` must be set.", - )) - } - }; - - Ok(MetricsAttr { scope, separator }) -} - -fn parse_metric_fields(node: &DeriveInput) -> Result>> { - let Data::Struct(ref data) = node.data else { - return Err(Error::new_spanned(node, "Only structs are supported.")) - }; - - let mut metrics = Vec::with_capacity(data.fields.len()); - for field in &data.fields { - let (mut describe, mut rename, mut skip) = (None, None, false); - if let Some(metric_attr) = parse_single_attr(field, "metric")? { - let parsed = - metric_attr.parse_args_with(Punctuated::::parse_terminated)?; - for meta in parsed { - match meta { - Meta::Path(path) if path.is_ident("skip") => skip = true, - Meta::NameValue(kv) => { - let lit = match kv.value { - Expr::Lit(ref expr) => &expr.lit, - _ => continue, - }; - if kv.path.is_ident("describe") { - if describe.is_some() { - return Err(Error::new_spanned( - kv, - "Duplicate `describe` value provided.", - )) - } - describe = Some(parse_str_lit(lit)?); - } else if kv.path.is_ident("rename") { - if rename.is_some() { - return Err(Error::new_spanned( - kv, - "Duplicate `rename` value provided.", - )) - } - let rename_lit = parse_str_lit(lit)?; - validate_metric_name(&rename_lit)?; - rename = Some(rename_lit) - } else { - return Err(Error::new_spanned(kv, "Unsupported attribute entry.")) - } - } - _ => return Err(Error::new_spanned(meta, "Unsupported attribute entry.")), - } - } - } - - if skip { - metrics.push(MetricField::Skipped(field)); - continue - } - - let description = match describe { - Some(lit_str) => lit_str.value(), - // Parse docs only if `describe` attribute was not provided - None => match parse_docs_to_string(field)? { - Some(docs_str) => docs_str, - None => { - return Err(Error::new_spanned( - field, - "Either doc comment or `describe = ..` must be set.", - )) - } - }, - }; - - metrics.push(MetricField::Included(Metric::new(field, description, rename))); - } - - Ok(metrics) -} - -fn validate_metric_name(name: &LitStr) -> Result<()> { - if METRIC_NAME_RE.is_match(&name.value()) { - Ok(()) - } else { - Err(Error::new_spanned(name, format!("Value must match regex {}", METRIC_NAME_RE.as_str()))) - } -} - -fn parse_single_attr<'a, T: WithAttrs + ToTokens>( - token: &'a T, - ident: &str, -) -> Result> { - let mut attr_iter = token.attrs().iter().filter(|a| a.path().is_ident(ident)); - if let Some(attr) = attr_iter.next() { - if let Some(next_attr) = attr_iter.next() { - Err(Error::new_spanned( - next_attr, - format!("Duplicate `#[{ident}(..)]` attribute provided."), - )) - } else { - Ok(Some(attr)) - } - } else { - Ok(None) - } -} - -fn parse_single_required_attr<'a, T: WithAttrs + ToTokens>( - token: &'a T, - ident: &str, -) -> Result<&'a Attribute> { - if let Some(attr) = parse_single_attr(token, ident)? { - Ok(attr) - } else { - Err(Error::new_spanned(token, format!("`#[{ident}(..)]` attribute must be provided."))) - } -} - -fn parse_docs_to_string(token: &T) -> Result> { - let mut doc_str = None; - for attr in token.attrs() { - if let syn::Meta::NameValue(ref meta) = attr.meta { - if let Expr::Lit(ref lit) = meta.value { - if let Lit::Str(ref doc) = lit.lit { - let doc_value = doc.value().trim().to_string(); - doc_str = Some( - doc_str - .map(|prev_doc_value| format!("{prev_doc_value} {doc_value}")) - .unwrap_or(doc_value), - ); - } - } - } - } - Ok(doc_str) -} - -fn parse_str_lit(lit: &Lit) -> Result { - match lit { - Lit::Str(lit_str) => Ok(lit_str.to_owned()), - _ => Err(Error::new_spanned(lit, "Value **must** be a string literal.")), - } -} - -fn parse_bool_lit(lit: &Lit) -> Result { - match lit { - Lit::Bool(lit_bool) => Ok(lit_bool.to_owned()), - _ => Err(Error::new_spanned(lit, "Value **must** be a string literal.")), - } -} diff --git a/crates/metrics/metrics-derive/src/lib.rs b/crates/metrics/metrics-derive/src/lib.rs deleted file mode 100644 index 48b1099f476e0..0000000000000 --- a/crates/metrics/metrics-derive/src/lib.rs +++ /dev/null @@ -1,139 +0,0 @@ -//! This crate provides [Metrics] derive macro - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -use proc_macro::TokenStream; -use syn::{parse_macro_input, DeriveInput}; - -mod expand; -mod metric; -mod with_attrs; - -/// The [Metrics] derive macro instruments all of the struct fields and -/// creates a [Default] implementation for the struct registering all of -/// the metrics. -/// -/// Additionally, it creates a `describe` method on the struct, which -/// internally calls the describe statements for all metric fields. -/// -/// Sample usage: -/// ``` -/// use metrics::{Counter, Gauge, Histogram}; -/// use reth_metrics_derive::Metrics; -/// -/// #[derive(Metrics)] -/// #[metrics(scope = "metrics_custom")] -/// pub struct CustomMetrics { -/// /// A gauge with doc comment description. -/// gauge: Gauge, -/// #[metric(rename = "second_gauge", describe = "A gauge with metric attribute description.")] -/// gauge2: Gauge, -/// /// Some doc comment -/// #[metric(describe = "Metric attribute description will be preferred over doc comment.")] -/// counter: Counter, -/// /// A renamed histogram. -/// #[metric(rename = "histogram")] -/// histo: Histogram, -/// } -/// ``` -/// -/// The example above will be expanded to: -/// ``` -/// pub struct CustomMetrics { -/// /// A gauge with doc comment description. -/// gauge: metrics::Gauge, -/// gauge2: metrics::Gauge, -/// /// Some doc comment -/// counter: metrics::Counter, -/// /// A renamed histogram. -/// histo: metrics::Histogram, -/// } -/// -/// impl Default for CustomMetrics { -/// fn default() -> Self { -/// Self { -/// gauge: metrics::gauge!("metrics_custom_gauge"), -/// gauge2: metrics::gauge!("metrics_custom_second_gauge"), -/// counter: metrics::counter!("metrics_custom_counter"), -/// histo: metrics::histogram!("metrics_custom_histogram"), -/// } -/// } -/// } -/// -/// impl CustomMetrics { -/// /// Describe all exposed metrics -/// pub fn describe() { -/// metrics::describe_gauge!( -/// "metrics_custom_gauge", -/// "A gauge with doc comment description." -/// ); -/// metrics::describe_gauge!( -/// "metrics_custom_second_gauge", -/// "A gauge with metric attribute description." -/// ); -/// metrics::describe_counter!( -/// "metrics_custom_counter", -/// "Metric attribute description will be preferred over doc comment." -/// ); -/// metrics::describe_histogram!("metrics_custom_histogram", "A renamed histogram."); -/// } -/// } -/// -/// impl std::fmt::Debug for CustomMetrics { -/// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { -/// f.debug_struct("CustomMetrics").finish() -/// } -/// } -/// ``` -/// -/// Similarly, you can derive metrics with "dynamic" scope, -/// meaning their scope can be set at the time of instantiation. -/// For example: -/// ``` -/// use reth_metrics_derive::Metrics; -/// -/// #[derive(Metrics)] -/// #[metrics(dynamic = true)] -/// pub struct DynamicScopeMetrics { -/// /// A gauge with doc comment description. -/// gauge: metrics::Gauge, -/// } -/// ``` -/// -/// The example with dynamic scope will expand to -/// ``` -/// pub struct DynamicScopeMetrics { -/// /// A gauge with doc comment description. -/// gauge: metrics::Gauge, -/// } -/// -/// impl DynamicScopeMetrics { -/// pub fn new(scope: &str) -> Self { -/// Self { gauge: metrics::gauge!(format!("{}{}{}", scope, "_", "gauge")) } -/// } -/// -/// pub fn describe(scope: &str) { -/// metrics::describe_gauge!( -/// format!("{}{}{}", scope, "_", "gauge"), -/// "A gauge with doc comment description." -/// ); -/// } -/// } -/// -/// impl std::fmt::Debug for DynamicScopeMetrics { -/// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { -/// f.debug_struct("DynamicScopeMetrics").finish() -/// } -/// } -/// ``` -#[proc_macro_derive(Metrics, attributes(metrics, metric))] -pub fn derive_metrics(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - expand::derive(&input).unwrap_or_else(|err| err.to_compile_error()).into() -} diff --git a/crates/metrics/metrics-derive/src/metric.rs b/crates/metrics/metrics-derive/src/metric.rs deleted file mode 100644 index e8dfb24847a8e..0000000000000 --- a/crates/metrics/metrics-derive/src/metric.rs +++ /dev/null @@ -1,59 +0,0 @@ -use quote::quote; -use syn::{Error, Field, LitStr, Result, Type}; - -const COUNTER_TY: &str = "Counter"; -const HISTOGRAM_TY: &str = "Histogram"; -const GAUGE_TY: &str = "Gauge"; - -pub(crate) struct Metric<'a> { - pub(crate) field: &'a Field, - pub(crate) description: String, - rename: Option, -} - -impl<'a> Metric<'a> { - pub(crate) const fn new(field: &'a Field, description: String, rename: Option) -> Self { - Self { field, description, rename } - } - - pub(crate) fn name(&self) -> String { - match self.rename.as_ref() { - Some(name) => name.value(), - None => self.field.ident.as_ref().map(ToString::to_string).unwrap_or_default(), - } - } - - pub(crate) fn register_stmt(&self) -> Result { - if let Type::Path(ref path_ty) = self.field.ty { - if let Some(last) = path_ty.path.segments.last() { - let registrar = match last.ident.to_string().as_str() { - COUNTER_TY => quote! { metrics::counter! }, - HISTOGRAM_TY => quote! { metrics::histogram! }, - GAUGE_TY => quote! { metrics::gauge! }, - _ => return Err(Error::new_spanned(path_ty, "Unsupported metric type")), - }; - - return Ok(quote! { #registrar }) - } - } - - Err(Error::new_spanned(&self.field.ty, "Unsupported metric type")) - } - - pub(crate) fn describe_stmt(&self) -> Result { - if let Type::Path(ref path_ty) = self.field.ty { - if let Some(last) = path_ty.path.segments.last() { - let descriptor = match last.ident.to_string().as_str() { - COUNTER_TY => quote! { metrics::describe_counter! }, - HISTOGRAM_TY => quote! { metrics::describe_histogram! }, - GAUGE_TY => quote! { metrics::describe_gauge! }, - _ => return Err(Error::new_spanned(path_ty, "Unsupported metric type")), - }; - - return Ok(quote! { #descriptor }) - } - } - - Err(Error::new_spanned(&self.field.ty, "Unsupported metric type")) - } -} diff --git a/crates/metrics/metrics-derive/src/with_attrs.rs b/crates/metrics/metrics-derive/src/with_attrs.rs deleted file mode 100644 index 9095d99609f26..0000000000000 --- a/crates/metrics/metrics-derive/src/with_attrs.rs +++ /dev/null @@ -1,17 +0,0 @@ -use syn::{Attribute, DeriveInput, Field}; - -pub(crate) trait WithAttrs { - fn attrs(&self) -> &[Attribute]; -} - -impl WithAttrs for DeriveInput { - fn attrs(&self) -> &[Attribute] { - &self.attrs - } -} - -impl WithAttrs for Field { - fn attrs(&self) -> &[Attribute] { - &self.attrs - } -} diff --git a/crates/metrics/metrics-derive/tests/compile-fail/metric_attr.rs b/crates/metrics/metrics-derive/tests/compile-fail/metric_attr.rs deleted file mode 100644 index 8a8b277baf0d9..0000000000000 --- a/crates/metrics/metrics-derive/tests/compile-fail/metric_attr.rs +++ /dev/null @@ -1,62 +0,0 @@ -extern crate metrics; -extern crate reth_metrics_derive; - -use metrics::Gauge; -use reth_metrics_derive::Metrics; - -fn main() {} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics { - gauge: Gauge, -} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics2 { - #[metric()] - gauge: Gauge, -} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics3 { - #[metric(random = "value")] - gauge: Gauge, -} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics4 { - #[metric(describe = 123)] - gauge: Gauge, -} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics5 { - #[metric(rename = 123)] - gauge: Gauge, -} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics6 { - #[metric(describe = "", describe = "")] - gauge: Gauge, -} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics7 { - #[metric(rename = "_gauge", rename = "_gauge")] - gauge: Gauge, -} - -#[derive(Metrics)] -#[metrics(scope = "some_scope")] -struct CustomMetrics8 { - #[metric(describe = "")] - gauge: String, -} diff --git a/crates/metrics/metrics-derive/tests/compile-fail/metric_attr.stderr b/crates/metrics/metrics-derive/tests/compile-fail/metric_attr.stderr deleted file mode 100644 index 96659e49f2227..0000000000000 --- a/crates/metrics/metrics-derive/tests/compile-fail/metric_attr.stderr +++ /dev/null @@ -1,48 +0,0 @@ -error: Either doc comment or `describe = ..` must be set. - --> tests/compile-fail/metric_attr.rs:12:5 - | -12 | gauge: Gauge, - | ^^^^^^^^^^^^ - -error: Either doc comment or `describe = ..` must be set. - --> tests/compile-fail/metric_attr.rs:18:5 - | -18 | / #[metric()] -19 | | gauge: Gauge, - | |________________^ - -error: Unsupported attribute entry. - --> tests/compile-fail/metric_attr.rs:25:14 - | -25 | #[metric(random = "value")] - | ^^^^^^^^^^^^^^^^ - -error: Value **must** be a string literal. - --> tests/compile-fail/metric_attr.rs:32:25 - | -32 | #[metric(describe = 123)] - | ^^^ - -error: Value **must** be a string literal. - --> tests/compile-fail/metric_attr.rs:39:23 - | -39 | #[metric(rename = 123)] - | ^^^ - -error: Duplicate `describe` value provided. - --> tests/compile-fail/metric_attr.rs:46:29 - | -46 | #[metric(describe = "", describe = "")] - | ^^^^^^^^^^^^^ - -error: Duplicate `rename` value provided. - --> tests/compile-fail/metric_attr.rs:53:33 - | -53 | #[metric(rename = "_gauge", rename = "_gauge")] - | ^^^^^^^^^^^^^^^^^ - -error: Unsupported metric type - --> tests/compile-fail/metric_attr.rs:61:12 - | -61 | gauge: String, - | ^^^^^^ diff --git a/crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.rs b/crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.rs deleted file mode 100644 index 6c8d3f129b103..0000000000000 --- a/crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.rs +++ /dev/null @@ -1,56 +0,0 @@ -extern crate reth_metrics_derive; -use reth_metrics_derive::Metrics; - -fn main() {} - -#[derive(Metrics)] -struct CustomMetrics; - -#[derive(Metrics)] -#[metrics()] -#[metrics()] -struct CustomMetrics2; - -#[derive(Metrics)] -#[metrics()] -struct CustomMetrics3; - -#[derive(Metrics)] -#[metrics(scope = value)] -struct CustomMetrics4; - -#[derive(Metrics)] -#[metrics(scope = 123)] -struct CustomMetrics5; - -#[derive(Metrics)] -#[metrics(scope = "some-scope")] -struct CustomMetrics6; - -#[derive(Metrics)] -#[metrics(scope = "some_scope", scope = "another_scope")] -struct CustomMetrics7; - -#[derive(Metrics)] -#[metrics(separator = value)] -struct CustomMetrics8; - -#[derive(Metrics)] -#[metrics(separator = 123)] -struct CustomMetrics9; - -#[derive(Metrics)] -#[metrics(separator = "x")] -struct CustomMetrics10; - -#[derive(Metrics)] -#[metrics(separator = "_", separator = ":")] -struct CustomMetrics11; - -#[derive(Metrics)] -#[metrics(random = "value")] -struct CustomMetrics12; - -#[derive(Metrics)] -#[metrics(scope = "scope", dynamic = true)] -struct CustomMetrics13; diff --git a/crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.stderr b/crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.stderr deleted file mode 100644 index 5121258d5cb70..0000000000000 --- a/crates/metrics/metrics-derive/tests/compile-fail/metrics_attr.stderr +++ /dev/null @@ -1,81 +0,0 @@ -error: `#[metrics(..)]` attribute must be provided. - --> tests/compile-fail/metrics_attr.rs:7:1 - | -7 | struct CustomMetrics; - | ^^^^^^^^^^^^^^^^^^^^^ - -error: Duplicate `#[metrics(..)]` attribute provided. - --> tests/compile-fail/metrics_attr.rs:11:1 - | -11 | #[metrics()] - | ^^^^^^^^^^^^ - -error: Either `scope = ..` or `dynamic = true` must be set. - --> tests/compile-fail/metrics_attr.rs:15:1 - | -15 | / #[metrics()] -16 | | struct CustomMetrics3; - | |______________________^ - -error: Either `scope = ..` or `dynamic = true` must be set. - --> tests/compile-fail/metrics_attr.rs:19:1 - | -19 | / #[metrics(scope = value)] -20 | | struct CustomMetrics4; - | |______________________^ - -error: Value **must** be a string literal. - --> tests/compile-fail/metrics_attr.rs:23:19 - | -23 | #[metrics(scope = 123)] - | ^^^ - -error: Value must match regex ^[a-zA-Z_:.][a-zA-Z0-9_:.]*$ - --> tests/compile-fail/metrics_attr.rs:27:19 - | -27 | #[metrics(scope = "some-scope")] - | ^^^^^^^^^^^^ - -error: Duplicate `scope` value provided. - --> tests/compile-fail/metrics_attr.rs:31:33 - | -31 | #[metrics(scope = "some_scope", scope = "another_scope")] - | ^^^^^^^^^^^^^^^^^^^^^^^ - -error: Either `scope = ..` or `dynamic = true` must be set. - --> tests/compile-fail/metrics_attr.rs:35:1 - | -35 | / #[metrics(separator = value)] -36 | | struct CustomMetrics8; - | |______________________^ - -error: Value **must** be a string literal. - --> tests/compile-fail/metrics_attr.rs:39:23 - | -39 | #[metrics(separator = 123)] - | ^^^ - -error: Unsupported `separator` value. Supported: `.`, `_`, `:`. - --> tests/compile-fail/metrics_attr.rs:43:11 - | -43 | #[metrics(separator = "x")] - | ^^^^^^^^^^^^^^^ - -error: Duplicate `separator` value provided. - --> tests/compile-fail/metrics_attr.rs:47:28 - | -47 | #[metrics(separator = "_", separator = ":")] - | ^^^^^^^^^^^^^^^ - -error: Unsupported attribute entry. - --> tests/compile-fail/metrics_attr.rs:51:11 - | -51 | #[metrics(random = "value")] - | ^^^^^^^^^^^^^^^^ - -error: `scope = ..` conflicts with `dynamic = true`. - --> tests/compile-fail/metrics_attr.rs:55:1 - | -55 | / #[metrics(scope = "scope", dynamic = true)] -56 | | struct CustomMetrics13; - | |_______________________^ diff --git a/crates/metrics/metrics-derive/tests/metrics.rs b/crates/metrics/metrics-derive/tests/metrics.rs deleted file mode 100644 index a07ccc8a7ce13..0000000000000 --- a/crates/metrics/metrics-derive/tests/metrics.rs +++ /dev/null @@ -1,351 +0,0 @@ -#![allow(missing_docs)] -use metrics::{ - Counter, Gauge, Histogram, Key, KeyName, Label, Metadata, Recorder, SharedString, Unit, -}; -use reth_metrics_derive::Metrics; -use serial_test::serial; -use std::{ - collections::HashMap, - sync::{LazyLock, Mutex}, -}; - -#[allow(dead_code)] -#[derive(Metrics)] -#[metrics(scope = "metrics_custom")] -struct CustomMetrics { - #[metric(skip)] - skipped_field_a: u8, - /// A gauge with doc comment description. - gauge: Gauge, - #[metric(rename = "second_gauge", describe = "A gauge with metric attribute description.")] - gauge2: Gauge, - #[metric(skip)] - skipped_field_b: u16, - /// Some doc comment - #[metric(describe = "Metric attribute description will be preferred over doc comment.")] - counter: Counter, - #[metric(skip)] - skipped_field_c: u32, - #[metric(skip)] - skipped_field_d: u64, - /// A renamed histogram. - #[metric(rename = "histogram")] - histo: Histogram, - #[metric(skip)] - skipped_field_e: u128, -} - -#[allow(dead_code)] -#[derive(Metrics)] -#[metrics(dynamic = true)] -struct DynamicScopeMetrics { - #[metric(skip)] - skipped_field_a: u8, - /// A gauge with doc comment description. - gauge: Gauge, - #[metric(rename = "second_gauge", describe = "A gauge with metric attribute description.")] - gauge2: Gauge, - #[metric(skip)] - skipped_field_b: u16, - /// Some doc comment - #[metric(describe = "Metric attribute description will be preferred over doc comment.")] - counter: Counter, - #[metric(skip)] - skipped_field_c: u32, - #[metric(skip)] - skipped_field_d: u64, - /// A renamed histogram. - #[metric(rename = "histogram")] - histo: Histogram, - #[metric(skip)] - skipped_field_e: u128, -} - -static RECORDER: LazyLock = LazyLock::new(TestRecorder::new); - -fn test_describe(scope: &str) { - assert_eq!(RECORDER.metrics_len(), 4); - - let gauge = RECORDER.get_metric(&format!("{scope}.gauge")); - assert!(gauge.is_some()); - assert_eq!( - gauge.unwrap(), - TestMetric { - ty: TestMetricTy::Gauge, - description: Some("A gauge with doc comment description.".to_owned()), - labels: None, - } - ); - - let second_gauge = RECORDER.get_metric(&format!("{scope}.second_gauge")); - assert!(second_gauge.is_some()); - assert_eq!( - second_gauge.unwrap(), - TestMetric { - ty: TestMetricTy::Gauge, - description: Some("A gauge with metric attribute description.".to_owned()), - labels: None, - } - ); - - let counter = RECORDER.get_metric(&format!("{scope}.counter")); - assert!(counter.is_some()); - assert_eq!( - counter.unwrap(), - TestMetric { - ty: TestMetricTy::Counter, - description: Some( - "Metric attribute description will be preferred over doc comment.".to_owned() - ), - labels: None, - } - ); - - let histogram = RECORDER.get_metric(&format!("{scope}.histogram")); - assert!(histogram.is_some()); - assert_eq!( - histogram.unwrap(), - TestMetric { - ty: TestMetricTy::Histogram, - description: Some("A renamed histogram.".to_owned()), - labels: None, - } - ); -} - -#[test] -#[serial] -fn describe_metrics() { - let _guard = RECORDER.enter(); - - CustomMetrics::describe(); - - test_describe("metrics_custom"); -} - -#[test] -#[serial] -fn describe_dynamic_metrics() { - let _guard = RECORDER.enter(); - - let scope = "local_scope"; - - DynamicScopeMetrics::describe(scope); - - test_describe(scope); -} - -fn test_register(scope: &str) { - assert_eq!(RECORDER.metrics_len(), 4); - - let gauge = RECORDER.get_metric(&format!("{scope}.gauge")); - assert!(gauge.is_some()); - assert_eq!( - gauge.unwrap(), - TestMetric { ty: TestMetricTy::Gauge, description: None, labels: None } - ); - - let second_gauge = RECORDER.get_metric(&format!("{scope}.second_gauge")); - assert!(second_gauge.is_some()); - assert_eq!( - second_gauge.unwrap(), - TestMetric { ty: TestMetricTy::Gauge, description: None, labels: None } - ); - - let counter = RECORDER.get_metric(&format!("{scope}.counter")); - assert!(counter.is_some()); - assert_eq!( - counter.unwrap(), - TestMetric { ty: TestMetricTy::Counter, description: None, labels: None } - ); - - let histogram = RECORDER.get_metric(&format!("{scope}.histogram")); - assert!(histogram.is_some()); - assert_eq!( - histogram.unwrap(), - TestMetric { ty: TestMetricTy::Histogram, description: None, labels: None } - ); -} - -#[test] -#[serial] -fn register_metrics() { - let _guard = RECORDER.enter(); - - let _metrics = CustomMetrics::default(); - - test_register("metrics_custom"); -} - -#[test] -#[serial] -fn register_dynamic_metrics() { - let _guard = RECORDER.enter(); - - let scope = "local_scope"; - - let _metrics = DynamicScopeMetrics::new(scope); - - test_register(scope); -} - -fn test_labels(scope: &str) { - let test_labels = vec![Label::new("key", "value")]; - - let gauge = RECORDER.get_metric(&format!("{scope}.gauge")); - assert!(gauge.is_some()); - let labels = gauge.unwrap().labels; - assert!(labels.is_some()); - assert_eq!(labels.unwrap(), test_labels,); - - let second_gauge = RECORDER.get_metric(&format!("{scope}.second_gauge")); - assert!(second_gauge.is_some()); - let labels = second_gauge.unwrap().labels; - assert!(labels.is_some()); - assert_eq!(labels.unwrap(), test_labels,); - - let counter = RECORDER.get_metric(&format!("{scope}.counter")); - assert!(counter.is_some()); - let labels = counter.unwrap().labels; - assert!(labels.is_some()); - assert_eq!(labels.unwrap(), test_labels,); - - let histogram = RECORDER.get_metric(&format!("{scope}.histogram")); - assert!(histogram.is_some()); - let labels = histogram.unwrap().labels; - assert!(labels.is_some()); - assert_eq!(labels.unwrap(), test_labels,); -} - -#[test] -#[serial] -fn label_metrics() { - let _guard = RECORDER.enter(); - - let _metrics = CustomMetrics::new_with_labels(&[("key", "value")]); - - test_labels("metrics_custom"); -} - -#[test] -#[serial] -fn dynamic_label_metrics() { - let _guard = RECORDER.enter(); - - let scope = "local_scope"; - - let _metrics = DynamicScopeMetrics::new_with_labels(scope, &[("key", "value")]); - - test_labels(scope); -} - -struct TestRecorder { - // Metrics map: key => Option - metrics: Mutex>, -} - -#[derive(PartialEq, Clone, Debug)] -enum TestMetricTy { - Counter, - Gauge, - Histogram, -} - -#[derive(PartialEq, Clone, Debug)] -struct TestMetric { - ty: TestMetricTy, - description: Option, - labels: Option>, -} - -impl TestRecorder { - fn new() -> Self { - Self { metrics: Mutex::new(HashMap::default()) } - } - - /// Sets this recorder as the global recorder for the duration of the returned guard. - #[must_use] - fn enter(&'static self) -> impl Drop { - struct Reset { - recorder: &'static TestRecorder, - } - impl Drop for Reset { - fn drop(&mut self) { - self.recorder.clear(); - } - } - - let _ = metrics::set_global_recorder(self); - Reset { recorder: self } - } - - fn metrics_len(&self) -> usize { - self.metrics.lock().expect("failed to lock metrics").len() - } - - fn get_metric(&self, key: &str) -> Option { - self.metrics.lock().expect("failed to lock metrics").get(key).cloned() - } - - fn record_metric( - &self, - key: &str, - ty: TestMetricTy, - description: Option, - labels: Option>, - ) { - self.metrics - .lock() - .expect("failed to lock metrics") - .insert(key.to_owned(), TestMetric { ty, description, labels }); - } - - fn clear(&self) { - self.metrics.lock().expect("failed to lock metrics").clear(); - } -} - -impl Recorder for &'static TestRecorder { - fn describe_counter(&self, key: KeyName, _unit: Option, description: SharedString) { - self.record_metric( - key.as_str(), - TestMetricTy::Counter, - Some(description.into_owned()), - None, - ) - } - - fn describe_gauge(&self, key: KeyName, _unit: Option, description: SharedString) { - self.record_metric(key.as_str(), TestMetricTy::Gauge, Some(description.into_owned()), None) - } - - fn describe_histogram(&self, key: KeyName, _unit: Option, description: SharedString) { - self.record_metric( - key.as_str(), - TestMetricTy::Histogram, - Some(description.into_owned()), - None, - ) - } - - fn register_counter(&self, key: &Key, _metadata: &Metadata<'_>) -> Counter { - let labels_vec: Vec