Skip to content

Commit

Permalink
feat: on store sierra contract class in madara backend db (keep-stark…
Browse files Browse the repository at this point in the history
…net-strange#1409)

Co-authored-by: Lucas @ StarkWare <[email protected]>
  • Loading branch information
tdelabro and 0xLucqs authored Jan 29, 2024
1 parent 24f4cec commit 3c11964
Show file tree
Hide file tree
Showing 21 changed files with 195 additions and 73 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

## Next release

- feat(client): on `add_declare_transaction` store sierra contract classes in
the madara backend
- chore: use struct error in client/db
- fix: don't ignore Sierra to CASM mapping in genesis config
- refacto: early exit txs fee estimation when one fails
- dev: fix linter warning in README.md
Expand Down
4 changes: 3 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ starknet-core-contract-client = { git = "https://github.com/keep-starknet-strang
# Other third party dependencies
anyhow = "1.0.75"
flate2 = "1.0.28"
scale-codec = { package = "parity-scale-codec", version = "3.2.2", default-features = false }
parity-scale-codec = { version = "3.2.2", default-features = false }
scale-info = { version = "2.10.0", default-features = false }
lazy_static = { version = "1.4.0", default-features = false }
Expand Down
3 changes: 2 additions & 1 deletion crates/client/data-availability/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,9 +236,10 @@ pub async fn update_state<B: BlockT, H: HasherT>(
log::info!("validity da mode not implemented");
}
DaMode::Sovereign => match madara_backend.da().state_diff(&block_hash) {
Ok(state_diff) => {
Ok(Some(state_diff)) => {
da_client.publish_state_diff(state_diff).await.map_err(|e| anyhow!("DA PUBLISH ERROR: {e}"))?;
}
Ok(None) => Err(anyhow!("there is no state diff stored for block {}", block_hash))?,
Err(e) => Err(anyhow!("could not pull state diff for block {}: {}", block_hash, e))?,
},
DaMode::Volition => log::info!("volition da mode not implemented"),
Expand Down
8 changes: 5 additions & 3 deletions crates/client/db/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,16 @@ ethers = { workspace = true }
kvdb-rocksdb = { version = "0.19.0", optional = true }
log = { workspace = true, default-features = true }
parity-db = { version = "0.4.12", optional = true }
sc-client-db = { workspace = true, default-features = true }
scale-codec = { workspace = true, default-features = true, features = [
parity-scale-codec = { workspace = true, default-features = true, features = [
"derive",
] }
sc-client-db = { workspace = true, default-features = true }
sp-core = { workspace = true, default-features = true }
sp-database = { workspace = true, default-features = true }
sp-runtime = { workspace = true, default-features = true }
starknet_api = { workspace = true, default-features = true }
starknet_api = { workspace = true, default-features = true, features = [
"parity-scale-codec",
] }
thiserror = { workspace = true }
uuid = "1.4.1"

Expand Down
40 changes: 22 additions & 18 deletions crates/client/db/src/da_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@ use std::sync::Arc;

use ethers::types::U256;
// Substrate
use scale_codec::{Decode, Encode};
use parity_scale_codec::{Decode, Encode};
use sp_database::Database;
use sp_runtime::traits::Block as BlockT;
// Starknet
use starknet_api::block::BlockHash;
use starknet_api::hash::StarkFelt;
use uuid::Uuid;

use crate::DbHash;
use crate::{DbError, DbHash};

// The fact db stores DA facts that need to be written to L1
pub struct DaDb<B: BlockT> {
Expand All @@ -21,56 +21,60 @@ pub struct DaDb<B: BlockT> {

// TODO: purge old cairo job keys
impl<B: BlockT> DaDb<B> {
pub fn state_diff(&self, block_hash: &BlockHash) -> Result<Vec<U256>, String> {
pub fn state_diff(&self, block_hash: &BlockHash) -> Result<Option<Vec<U256>>, DbError> {
match self.db.get(crate::columns::DA, block_hash.0.bytes()) {
Some(raw) => Ok(Vec::<U256>::decode(&mut &raw[..]).map_err(|e| format!("{:?}", e))?),
None => Err(String::from("can't write state diff")),
Some(raw) => Ok(Some(Vec::<U256>::decode(&mut &raw[..])?)),
None => Ok(None),
}
}

pub fn store_state_diff(&self, block_hash: &BlockHash, diff: Vec<U256>) -> Result<(), String> {
pub fn store_state_diff(&self, block_hash: &BlockHash, diff: Vec<U256>) -> Result<(), DbError> {
let mut transaction = sp_database::Transaction::new();

transaction.set(crate::columns::DA, block_hash.0.bytes(), &diff.encode());

self.db.commit(transaction).map_err(|e| format!("{:?}", e))?;
self.db.commit(transaction)?;

Ok(())
}

pub fn cairo_job(&self, block_hash: &BlockHash) -> Result<Uuid, String> {
pub fn cairo_job(&self, block_hash: &BlockHash) -> Result<Option<Uuid>, DbError> {
match self.db.get(crate::columns::DA, block_hash.0.bytes()) {
Some(raw) => Ok(Uuid::from_slice(&raw[..]).map_err(|e| format!("{:?}", e))?),
None => Err(String::from("can't locate cairo job")),
Some(raw) => Ok(Some(Uuid::from_slice(&raw[..])?)),
None => Ok(None),
}
}

pub fn update_cairo_job(&self, block_hash: &BlockHash, job_id: Uuid) -> Result<(), String> {
pub fn update_cairo_job(&self, block_hash: &BlockHash, job_id: Uuid) -> Result<(), DbError> {
let mut transaction = sp_database::Transaction::new();

transaction.set(crate::columns::DA, block_hash.0.bytes(), &job_id.into_bytes());

self.db.commit(transaction).map_err(|e| format!("{:?}", e))?;
self.db.commit(transaction)?;

Ok(())
}

pub fn last_proved_block(&self) -> Result<BlockHash, String> {
pub fn last_proved_block(&self) -> Result<BlockHash, DbError> {
match self.db.get(crate::columns::DA, crate::static_keys::LAST_PROVED_BLOCK) {
Some(raw) => {
let felt = StarkFelt::deserialize(&raw[..]).ok_or("Failed to deserialize block hash")?;
let felt = StarkFelt::decode(&mut &raw[..])?;
Ok(BlockHash(felt))
}
None => Err(String::from("can't locate last proved block")),
None => Err(DbError::ValueNotInitialized(
crate::columns::DA,
// Safe coze `LAST_PROVED_BLOCK` is utf8
unsafe { std::str::from_utf8_unchecked(crate::static_keys::LAST_PROVED_BLOCK) }.to_string(),
)),
}
}

pub fn update_last_proved_block(&self, block_hash: &BlockHash) -> Result<(), String> {
pub fn update_last_proved_block(&self, block_hash: &BlockHash) -> Result<(), DbError> {
let mut transaction = sp_database::Transaction::new();

transaction.set(crate::columns::DA, crate::static_keys::LAST_PROVED_BLOCK, block_hash.0.bytes());
transaction.set(crate::columns::DA, crate::static_keys::LAST_PROVED_BLOCK, &block_hash.0.encode());

self.db.commit(transaction).map_err(|e| format!("{:?}", e))?;
self.db.commit(transaction)?;

Ok(())
}
Expand Down
6 changes: 5 additions & 1 deletion crates/client/db/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,9 @@ pub enum DbError {
#[error("Failed to commit DB Update: `{0}`")]
CommitError(#[from] sp_database::error::DatabaseError),
#[error("Failed to deserialize DB Data: `{0}`")]
DeserializeError(#[from] scale_codec::Error),
DeserializeError(#[from] parity_scale_codec::Error),
#[error("Failed to build Uuid: `{0}`")]
Uuid(#[from] uuid::Error),
#[error("A value was queryied that was not initialized at column: `{0}` key: `{1}`")]
ValueNotInitialized(u32, String),
}
15 changes: 14 additions & 1 deletion crates/client/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@ pub use error::DbError;

mod mapping_db;
pub use mapping_db::MappingCommitment;
use sierra_classes_db::SierraClassesDb;
use starknet_api::hash::StarkHash;
mod da_db;
mod db_opening_utils;
mod messaging_db;
mod sierra_classes_db;
pub use messaging_db::LastSyncedEventBlock;
mod meta_db;

Expand Down Expand Up @@ -49,13 +51,14 @@ pub(crate) mod columns {
// ===== /!\ ===================================================================================
// MUST BE INCREMENTED WHEN A NEW COLUMN IN ADDED
// ===== /!\ ===================================================================================
pub const NUM_COLUMNS: u32 = 7;
pub const NUM_COLUMNS: u32 = 8;

pub const META: u32 = 0;
pub const BLOCK_MAPPING: u32 = 1;
pub const TRANSACTION_MAPPING: u32 = 2;
pub const SYNCED_MAPPING: u32 = 3;
pub const DA: u32 = 4;

/// This column is used to map starknet block hashes to a list of transaction hashes that are
/// contained in the block.
///
Expand All @@ -64,6 +67,9 @@ pub(crate) mod columns {

/// This column contains last synchronized L1 block.
pub const MESSAGING: u32 = 6;

/// This column contains the Sierra contract classes
pub const SIERRA_CONTRACT_CLASSES: u32 = 7;
}

pub mod static_keys {
Expand All @@ -82,6 +88,7 @@ pub struct Backend<B: BlockT> {
mapping: Arc<MappingDb<B>>,
da: Arc<DaDb<B>>,
messaging: Arc<MessagingDb<B>>,
sierra_classes: Arc<SierraClassesDb<B>>,
}

/// Returns the Starknet database directory.
Expand Down Expand Up @@ -123,6 +130,7 @@ impl<B: BlockT> Backend<B> {
meta: Arc::new(MetaDb { db: db.clone(), _marker: PhantomData }),
da: Arc::new(DaDb { db: db.clone(), _marker: PhantomData }),
messaging: Arc::new(MessagingDb { db: db.clone(), _marker: PhantomData }),
sierra_classes: Arc::new(SierraClassesDb { db: db.clone(), _marker: PhantomData }),
})
}

Expand All @@ -146,6 +154,11 @@ impl<B: BlockT> Backend<B> {
&self.messaging
}

/// Return the sierra classes database manager
pub fn sierra_classes(&self) -> &Arc<SierraClassesDb<B>> {
&self.sierra_classes
}

/// In the future, we will compute the block global state root asynchronously in the client,
/// using the Starknet-Bonzai-trie.
/// That what replaces it for now :)
Expand Down
28 changes: 14 additions & 14 deletions crates/client/db/src/mapping_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@ use std::marker::PhantomData;
use std::sync::{Arc, Mutex};

// Substrate
use scale_codec::{Decode, Encode};
use parity_scale_codec::{Decode, Encode};
use sp_core::H256;
use sp_database::Database;
use sp_runtime::traits::Block as BlockT;

use crate::DbHash;
use crate::{DbError, DbHash};

/// The mapping to write in db
#[derive(Debug)]
Expand All @@ -33,9 +33,9 @@ impl<B: BlockT> MappingDb<B> {
}

/// Check if the given block hash has already been processed
pub fn is_synced(&self, block_hash: &B::Hash) -> Result<bool, String> {
pub fn is_synced(&self, block_hash: &B::Hash) -> Result<bool, DbError> {
match self.db.get(crate::columns::SYNCED_MAPPING, &block_hash.encode()) {
Some(raw) => Ok(bool::decode(&mut &raw[..]).map_err(|e| format!("{:?}", e))?),
Some(raw) => Ok(bool::decode(&mut &raw[..])?),
None => Ok(false),
}
}
Expand All @@ -44,28 +44,28 @@ impl<B: BlockT> MappingDb<B> {
///
/// Under some circumstances it can return multiples blocks hashes, meaning that the result has
/// to be checked against the actual blockchain state in order to find the good one.
pub fn block_hash(&self, starknet_block_hash: &H256) -> Result<Option<Vec<B::Hash>>, String> {
pub fn block_hash(&self, starknet_block_hash: &H256) -> Result<Option<Vec<B::Hash>>, DbError> {
match self.db.get(crate::columns::BLOCK_MAPPING, &starknet_block_hash.encode()) {
Some(raw) => Ok(Some(Vec::<B::Hash>::decode(&mut &raw[..]).map_err(|e| format!("{:?}", e))?)),
Some(raw) => Ok(Some(Vec::<B::Hash>::decode(&mut &raw[..])?)),
None => Ok(None),
}
}

/// Register that a Substrate block has been seen, without it containing a Starknet one
pub fn write_none(&self, block_hash: B::Hash) -> Result<(), String> {
pub fn write_none(&self, block_hash: B::Hash) -> Result<(), DbError> {
let _lock = self.write_lock.lock();

let mut transaction = sp_database::Transaction::new();

transaction.set(crate::columns::SYNCED_MAPPING, &block_hash.encode(), &true.encode());

self.db.commit(transaction).map_err(|e| format!("{:?}", e))?;
self.db.commit(transaction)?;

Ok(())
}

/// Register that a Substate block has been seen and map it to the Statknet block it contains
pub fn write_hashes(&self, commitment: MappingCommitment<B>) -> Result<(), String> {
pub fn write_hashes(&self, commitment: MappingCommitment<B>) -> Result<(), DbError> {
let _lock = self.write_lock.lock();

let mut transaction = sp_database::Transaction::new();
Expand Down Expand Up @@ -108,7 +108,7 @@ impl<B: BlockT> MappingDb<B> {
);
}

self.db.commit(transaction).map_err(|e| format!("{:?}", e))?;
self.db.commit(transaction)?;

Ok(())
}
Expand All @@ -121,9 +121,9 @@ impl<B: BlockT> MappingDb<B> {
/// * `transaction_hash` - the transaction hash to search for. H256 is used here because it's a
/// native type of substrate, and we are sure it's SCALE encoding is optimized and will not
/// change.
pub fn block_hash_from_transaction_hash(&self, transaction_hash: H256) -> Result<Option<B::Hash>, String> {
pub fn block_hash_from_transaction_hash(&self, transaction_hash: H256) -> Result<Option<B::Hash>, DbError> {
match self.db.get(crate::columns::TRANSACTION_MAPPING, &transaction_hash.encode()) {
Some(raw) => Ok(Some(<B::Hash>::decode(&mut &raw[..]).map_err(|e| format!("{:?}", e))?)),
Some(raw) => Ok(Some(<B::Hash>::decode(&mut &raw[..])?)),
None => Ok(None),
}
}
Expand All @@ -142,14 +142,14 @@ impl<B: BlockT> MappingDb<B> {
///
/// - The cache is disabled.
/// - The provided `starknet_hash` is not present in the cache.
pub fn cached_transaction_hashes_from_block_hash(&self, starknet_hash: H256) -> Result<Option<Vec<H256>>, String> {
pub fn cached_transaction_hashes_from_block_hash(&self, starknet_hash: H256) -> Result<Option<Vec<H256>>, DbError> {
if !self.cache_more_things {
// The cache is not enabled, no need to even touch the database.
return Ok(None);
}

match self.db.get(crate::columns::STARKNET_TRANSACTION_HASHES_CACHE, &starknet_hash.encode()) {
Some(raw) => Ok(Some(Vec::<H256>::decode(&mut &raw[..]).map_err(|e| format!("{:?}", e))?)),
Some(raw) => Ok(Some(Vec::<H256>::decode(&mut &raw[..])?)),
None => Ok(None),
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/client/db/src/messaging_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::marker::PhantomData;
use std::sync::Arc;

// Substrate
use scale_codec::{Decode, Encode};
use parity_scale_codec::{Decode, Encode};
use sp_database::Database;
use sp_runtime::traits::Block as BlockT;

Expand Down
12 changes: 6 additions & 6 deletions crates/client/db/src/meta_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@ use std::marker::PhantomData;
use std::sync::Arc;

// Substrate
use scale_codec::{Decode, Encode};
use parity_scale_codec::{Decode, Encode};
use sp_database::Database;
use sp_runtime::traits::Block as BlockT;

use crate::DbHash;
use crate::{DbError, DbHash};

/// Allow interaction with the meta db
///
Expand All @@ -19,20 +19,20 @@ pub struct MetaDb<B: BlockT> {

impl<B: BlockT> MetaDb<B> {
/// Retrieve the current tips of the synced chain
pub fn current_syncing_tips(&self) -> Result<Vec<B::Hash>, String> {
pub fn current_syncing_tips(&self) -> Result<Vec<B::Hash>, DbError> {
match self.db.get(crate::columns::META, crate::static_keys::CURRENT_SYNCING_TIPS) {
Some(raw) => Ok(Vec::<B::Hash>::decode(&mut &raw[..]).map_err(|e| format!("{:?}", e))?),
Some(raw) => Ok(Vec::<B::Hash>::decode(&mut &raw[..])?),
None => Ok(Vec::new()),
}
}

/// Store the current tips of the synced chain
pub fn write_current_syncing_tips(&self, tips: Vec<B::Hash>) -> Result<(), String> {
pub fn write_current_syncing_tips(&self, tips: Vec<B::Hash>) -> Result<(), DbError> {
let mut transaction = sp_database::Transaction::new();

transaction.set(crate::columns::META, crate::static_keys::CURRENT_SYNCING_TIPS, &tips.encode());

self.db.commit(transaction).map_err(|e| format!("{:?}", e))?;
self.db.commit(transaction)?;

Ok(())
}
Expand Down
Loading

0 comments on commit 3c11964

Please sign in to comment.