Skip to content

Commit 81e4e5c

Browse files
committed
feat: blob cache
1 parent 92628a9 commit 81e4e5c

File tree

8 files changed

+290
-134
lines changed

8 files changed

+290
-134
lines changed

README.md

Lines changed: 26 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,32 @@
33
A collection of components for building the Signet node. These components
44
implement core node functionality, but are potentially indepedently useful.
55

6-
### What's new in Signet?
6+
## What's in the Components?
7+
8+
- **signet-node-types** - Shim types wrapping reth's internal node types
9+
system to make it more usable in Signet.
10+
- **signet-blobber** - Blob retrieval and parsing, using blob explorers,
11+
Signet's Pylon, and the local node transaction API.
12+
- **signet-rpc** - An Ethereum JSON-RPC Server for Signet nodes. Makes heavy
13+
use of reth internals.
14+
- **signet-db** - An extension of reth's database, providing a Signet-specific
15+
database schema and utilities for working with Signet blocks and transactions.
16+
17+
### Contributing to the Node Components
18+
19+
Please see [CONTRIBUTING.md](CONTRIBUTING.md).
20+
21+
[Signet docs]: https://docs.signet.sh
22+
23+
## Note on Semver
24+
25+
This repo is UNPUBLISHED and may NOT respect semantic versioning between tagged
26+
versions. In general, it is versioned to match the signet-sdk version with
27+
which it is compatible. I.e. `[email protected]` is expected to be
28+
compatible with any signet-sdk `0.8.x` version. However, a release of
29+
`[email protected]` may have breaking changes from `[email protected]`.
30+
31+
## What's new in Signet?
732

833
Signet is a pragmatic Ethereum rollup that offers a new set of ideas and aims
934
to radically modernize rollup technology.
@@ -22,20 +47,3 @@ knowledge. Signet does not have a native token.
2247
Signet is just a rollup.
2348

2449
See the [Signet docs] for more info.
25-
26-
### What's in the Components?
27-
28-
- **signet-node-types** - Shim types wrapping reth's internal node types
29-
system to make it more usable in Signet.
30-
- **signet-blobber** - Blob retrieval and parsing, using blob explorers,
31-
Signet's Pylon, and the local node transaction API.
32-
- **signet-rpc** - An Ethereum JSON-RPC Server for Signet nodes. Makes heavy
33-
use of reth internals.
34-
- **signet-db** - An extension of reth's database, providing a Signet-specific
35-
database schema and utilities for working with Signet blocks and transactions.
36-
37-
### Contributing to the Node Components
38-
39-
Please see [CONTRIBUTING.md](CONTRIBUTING.md).
40-
41-
[Signet docs]: https://docs.signet.sh

crates/blobber/README.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
# Block Extractor
22

3-
The [`BlockExtractor`] retrieves blobs from host chain blocks and parses them
3+
The [`BlobFetcher`] retrieves blobs from host chain blocks and parses them
44
into [`ZenithBlock`]s. It is used by the node during notification processing
55
when a [`Zenith::BlockSubmitted`] event is extracted from a host chain block.
66

7+
The [`BlobCacher`] is a wrapper around the [`BlobFetcher`] that caches
8+
blobs in an in-memory cache. It is used to avoid fetching the same blob and to
9+
manage retry logic during fetching.
10+
711
## Data Sources
812

913
The following sources can be configured:

crates/blobber/src/builder.rs

Lines changed: 26 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1-
use crate::{block_data::BlockExtractor, BlockExtractorConfig};
1+
use crate::{fetch::BlobFetcher, BlobCacher, BlobFetcherConfig};
22
use init4_bin_base::utils::calc::SlotCalculator;
33
use reth::transaction_pool::TransactionPool;
44
use url::Url;
55

6-
/// Errors that can occur while building the [`BlockExtractor`] with a
7-
/// [`BlockExtractorBuilder`].
6+
/// Errors that can occur while building the [`BlobFetcher`] with a
7+
/// [`BlobFetcherBuilder`].
88
#[derive(Debug, thiserror::Error)]
99
pub enum BuilderError {
1010
/// The transaction pool was not provided.
@@ -27,9 +27,9 @@ pub enum BuilderError {
2727
MissingSlotCalculator,
2828
}
2929

30-
/// Builder for the [`BlockExtractor`].
30+
/// Builder for the [`BlobFetcher`].
3131
#[derive(Debug, Default, Clone)]
32-
pub struct BlockExtractorBuilder<Pool> {
32+
pub struct BlobFetcherBuilder<Pool> {
3333
pool: Option<Pool>,
3434
explorer_url: Option<String>,
3535
client: Option<reqwest::Client>,
@@ -38,10 +38,10 @@ pub struct BlockExtractorBuilder<Pool> {
3838
slot_calculator: Option<SlotCalculator>,
3939
}
4040

41-
impl<Pool> BlockExtractorBuilder<Pool> {
41+
impl<Pool> BlobFetcherBuilder<Pool> {
4242
/// Set the transaction pool to use for the extractor.
43-
pub fn with_pool<P2>(self, pool: P2) -> BlockExtractorBuilder<P2> {
44-
BlockExtractorBuilder {
43+
pub fn with_pool<P2>(self, pool: P2) -> BlobFetcherBuilder<P2> {
44+
BlobFetcherBuilder {
4545
pool: Some(pool),
4646
explorer_url: self.explorer_url,
4747
client: self.client,
@@ -53,15 +53,13 @@ impl<Pool> BlockExtractorBuilder<Pool> {
5353

5454
/// Set the transaction pool to use a mock test pool.
5555
#[cfg(feature = "test-utils")]
56-
pub fn with_test_pool(
57-
self,
58-
) -> BlockExtractorBuilder<reth_transaction_pool::test_utils::TestPool> {
56+
pub fn with_test_pool(self) -> BlobFetcherBuilder<reth_transaction_pool::test_utils::TestPool> {
5957
self.with_pool(reth_transaction_pool::test_utils::testing_pool())
6058
}
6159

6260
/// Set the configuration for the CL url, pylon url, from the provided
63-
/// [`BlockExtractorConfig`].
64-
pub fn with_config(self, config: &BlockExtractorConfig) -> Result<Self, BuilderError> {
61+
/// [`BlobFetcherConfig`].
62+
pub fn with_config(self, config: &BlobFetcherConfig) -> Result<Self, BuilderError> {
6563
let this = self.with_explorer_url(config.blob_explorer_url());
6664
let this =
6765
if let Some(cl_url) = config.cl_url() { this.with_cl_url(cl_url)? } else { this };
@@ -114,22 +112,22 @@ impl<Pool> BlockExtractorBuilder<Pool> {
114112
pub const fn with_slot_calculator(
115113
mut self,
116114
slot_calculator: SlotCalculator,
117-
) -> BlockExtractorBuilder<Pool> {
115+
) -> BlobFetcherBuilder<Pool> {
118116
self.slot_calculator = Some(slot_calculator);
119117
self
120118
}
121119

122120
/// Set the slot calculator to use for the extractor, using the Pecornino
123121
/// host configuration.
124-
pub const fn with_pecornino_slots(mut self) -> BlockExtractorBuilder<Pool> {
122+
pub const fn with_pecornino_slots(mut self) -> BlobFetcherBuilder<Pool> {
125123
self.slot_calculator = Some(SlotCalculator::pecorino_host());
126124
self
127125
}
128126
}
129127

130-
impl<Pool: TransactionPool> BlockExtractorBuilder<Pool> {
131-
/// Build the [`BlockExtractor`] with the provided parameters.
132-
pub fn build(self) -> Result<BlockExtractor<Pool>, BuilderError> {
128+
impl<Pool: TransactionPool> BlobFetcherBuilder<Pool> {
129+
/// Build the [`BlobFetcher`] with the provided parameters.
130+
pub fn build(self) -> Result<BlobFetcher<Pool>, BuilderError> {
133131
let pool = self.pool.ok_or(BuilderError::MissingPool)?;
134132

135133
let explorer_url = self.explorer_url.ok_or(BuilderError::MissingExplorerUrl)?;
@@ -145,7 +143,16 @@ impl<Pool: TransactionPool> BlockExtractorBuilder<Pool> {
145143

146144
let slot_calculator = self.slot_calculator.ok_or(BuilderError::MissingSlotCalculator)?;
147145

148-
Ok(BlockExtractor::new(pool, explorer, client, cl_url, pylon_url, slot_calculator))
146+
Ok(BlobFetcher::new(pool, explorer, client, cl_url, pylon_url, slot_calculator))
147+
}
148+
149+
/// Build a [`BlobCacher`] with the provided parameters.
150+
pub fn build_cache(self) -> Result<BlobCacher<Pool>, BuilderError>
151+
where
152+
Pool: 'static,
153+
{
154+
let fetcher = self.build()?;
155+
Ok(BlobCacher::new(fetcher))
149156
}
150157
}
151158

crates/blobber/src/cache.rs

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,138 @@
1+
use crate::{BlobFetcherError, Blobs, FetchResult};
2+
use alloy::primitives::B256;
3+
use reth::network::cache::LruMap;
4+
use reth::transaction_pool::TransactionPool;
5+
use std::{
6+
sync::{Arc, Mutex},
7+
time::Duration,
8+
};
9+
use tokio::sync::{mpsc, oneshot};
10+
use tracing::{error, info, instrument, warn};
11+
12+
const BLOB_CACHE_SIZE: u32 = 144;
13+
const FETCH_RETRIES: usize = 3;
14+
const BETWEEN_RETRIES: Duration = Duration::from_millis(250);
15+
16+
/// Instructions for the cache.
17+
///
18+
/// These instructions are sent to the cache handle to perform operations like
19+
/// retrieving blobs.
20+
#[derive(Debug)]
21+
enum CacheInst {
22+
Retrieve { slot: u64, tx_hash: B256, version_hashes: Vec<B256>, resp: oneshot::Sender<Blobs> },
23+
}
24+
25+
/// Handle for the cache.
26+
#[derive(Debug, Clone)]
27+
pub struct CacheHandle {
28+
sender: mpsc::Sender<CacheInst>,
29+
}
30+
31+
impl CacheHandle {
32+
/// Sends a cache instruction.
33+
async fn send(&self, inst: CacheInst) {
34+
let _ = self.sender.send(inst).await;
35+
}
36+
37+
/// Fetches blobs from the cache. This triggers a background task to
38+
/// fetch blobs if they are not found in the cache.
39+
pub async fn fetch_blobs(
40+
&self,
41+
slot: u64,
42+
tx_hash: B256,
43+
version_hashes: Vec<B256>,
44+
) -> FetchResult<Blobs> {
45+
let (resp, receiver) = oneshot::channel();
46+
47+
self.send(CacheInst::Retrieve { slot, tx_hash, version_hashes, resp }).await;
48+
49+
receiver.await.map_err(|_| BlobFetcherError::missing_sidecar(tx_hash))
50+
}
51+
}
52+
53+
/// Retrieves blobs and stores them in a cache for later use.
54+
pub struct BlobCacher<Pool> {
55+
fetcher: crate::BlobFetcher<Pool>,
56+
57+
cache: Mutex<LruMap<(u64, B256), Blobs>>,
58+
}
59+
60+
impl<Pool: core::fmt::Debug> core::fmt::Debug for BlobCacher<Pool> {
61+
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
62+
f.debug_struct("BlobCacher").field("fetcher", &self.fetcher).finish_non_exhaustive()
63+
}
64+
}
65+
66+
impl<Pool: TransactionPool + 'static> BlobCacher<Pool> {
67+
/// Creates a new `BlobCacher` with the provided extractor and cache size.
68+
pub fn new(fetcher: crate::BlobFetcher<Pool>) -> Self {
69+
Self { fetcher, cache: LruMap::new(BLOB_CACHE_SIZE).into() }
70+
}
71+
72+
/// Fetches blobs for a given slot and transaction hash.
73+
#[instrument(skip(self), target = "signet_blobber::BlobCacher", fields(retries = FETCH_RETRIES))]
74+
async fn fetch_blobs(
75+
&self,
76+
slot: u64,
77+
tx_hash: B256,
78+
versioned_hashes: Vec<B256>,
79+
) -> FetchResult<Blobs> {
80+
// Cache hit
81+
if let Some(blobs) = self.cache.lock().unwrap().get(&(slot, tx_hash)) {
82+
info!(target: "signet_blobber::BlobCacher", "Cache hit");
83+
return Ok(blobs.clone());
84+
}
85+
86+
// Cache miss, use the fetcher to retrieve blobs
87+
// Retry fetching blobs up to `FETCH_RETRIES` times
88+
for attempt in 1..=FETCH_RETRIES {
89+
let blobs = self.fetcher.fetch_blobs(slot, tx_hash, &versioned_hashes).await;
90+
91+
match blobs {
92+
Ok(blobs) => {
93+
self.cache.lock().unwrap().insert((slot, tx_hash), blobs.clone());
94+
return Ok(blobs);
95+
}
96+
Err(BlobFetcherError::Ignorable(e)) => {
97+
warn!(target: "signet_blobber::BlobCacher", attempt, %e, "Blob fetch attempt failed.");
98+
tokio::time::sleep(BETWEEN_RETRIES).await;
99+
continue;
100+
}
101+
Err(e) => return Err(e), // unrecoverable error
102+
}
103+
}
104+
error!(target: "signet_blobber::BlobCacher", "All fetch attempts failed");
105+
Err(BlobFetcherError::missing_sidecar(tx_hash))
106+
}
107+
108+
/// Processes the cache instructions.
109+
async fn handle_inst(self: Arc<Self>, inst: CacheInst) {
110+
match inst {
111+
CacheInst::Retrieve { slot, tx_hash, version_hashes, resp } => {
112+
if let Ok(blobs) = self.fetch_blobs(slot, tx_hash, version_hashes).await {
113+
// if listener has gone away, that's okay, we just won't send the response
114+
let _ = resp.send(blobs);
115+
}
116+
}
117+
}
118+
}
119+
120+
async fn task_future(self: Arc<Self>, mut inst: mpsc::Receiver<CacheInst>) {
121+
while let Some(inst) = inst.recv().await {
122+
let this = Arc::clone(&self);
123+
tokio::spawn(async move {
124+
this.handle_inst(inst).await;
125+
});
126+
}
127+
}
128+
129+
/// Spawns the cache task to handle incoming instructions.
130+
///
131+
/// # Panics
132+
/// This function will panic if the cache task fails to spawn.
133+
pub fn spawn(self) -> CacheHandle {
134+
let (sender, inst) = mpsc::channel(12);
135+
tokio::spawn(Arc::new(self).task_future(inst));
136+
CacheHandle { sender }
137+
}
138+
}

crates/blobber/src/config.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ use std::borrow::Cow;
44
/// Configuration for the block extractor.
55
#[derive(Debug, Clone, serde::Deserialize, FromEnv)]
66
#[serde(rename_all = "camelCase")]
7-
pub struct BlockExtractorConfig {
7+
pub struct BlobFetcherConfig {
88
/// URL of the blob explorer.
99
#[from_env(var = "BLOB_EXPLORER_URL", desc = "URL of the blob explorer", infallible)]
1010
blob_explorer_url: Cow<'static, str>,
@@ -18,8 +18,8 @@ pub struct BlockExtractorConfig {
1818
pylon_url: Option<Cow<'static, str>>,
1919
}
2020

21-
impl BlockExtractorConfig {
22-
/// Create a new `BlockExtractorConfig` with default values.
21+
impl BlobFetcherConfig {
22+
/// Create a new `BlobFetcherConfig` with default values.
2323
pub const fn new(blob_explorer_url: Cow<'static, str>) -> Self {
2424
Self { blob_explorer_url, cl_url: None, pylon_url: None }
2525
}
@@ -39,7 +39,7 @@ impl BlockExtractorConfig {
3939
self.pylon_url = Some(pylon_url);
4040
}
4141

42-
/// Create a new `BlockExtractorConfig` with the provided CL URL, Pylon URL,
42+
/// Create a new `BlobFetcherConfig` with the provided CL URL, Pylon URL,
4343
pub fn cl_url(&self) -> Option<&str> {
4444
self.cl_url.as_deref()
4545
}

0 commit comments

Comments
 (0)