diff --git a/Cargo.lock b/Cargo.lock index a3a7a95d2..cc73b1c87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2721,6 +2721,7 @@ dependencies = [ "miden-node-proto-build", "miden-node-store", "miden-node-test-macro", + "miden-node-tracing", "miden-node-utils", "miden-protocol", "miden-remote-prover-client", @@ -2761,6 +2762,7 @@ dependencies = [ "indexmap 2.12.1", "miden-node-proto", "miden-node-test-macro", + "miden-node-tracing", "miden-node-utils", "miden-protocol", "miden-remote-prover-client", @@ -2858,6 +2860,7 @@ dependencies = [ "miden-node-proto", "miden-node-proto-build", "miden-node-test-macro", + "miden-node-tracing", "miden-node-utils", "miden-protocol", "miden-standards", @@ -2907,6 +2910,25 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "miden-node-tracing" +version = "0.13.0" +dependencies = [ + "miden-node-tracing-macro", + "miden-node-utils", + "opentelemetry", + "tracing", +] + +[[package]] +name = "miden-node-tracing-macro" +version = "0.13.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "miden-node-utils" version = "0.13.0" diff --git a/Cargo.toml b/Cargo.toml index 53e5182bb..ec17911b4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,8 @@ members = [ "crates/rpc", "crates/store", "crates/test-macro", + "crates/tracing", + "crates/tracing-macro", "crates/utils", "crates/validator", "proto", @@ -44,6 +46,8 @@ miden-node-proto-build = { path = "proto", version = "0.13" } miden-node-rpc = { path = "crates/rpc", version = "0.13" } miden-node-store = { path = "crates/store", version = "0.13" } miden-node-test-macro = { path = "crates/test-macro" } +miden-node-tracing = { path = "crates/tracing", version = "0.13" } +miden-node-tracing-macro = { path = "crates/tracing-macro", version = "0.13" } miden-node-utils = { path = "crates/utils", version = "0.13" } miden-node-validator = { path = "crates/validator", version = "0.13" } miden-remote-prover-client = { path = "crates/remote-prover-client", version = "0.13" } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index e5e5511ad..d4f4e2745 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -25,6 +25,7 @@ itertools = { workspace = true } miden-block-prover = { workspace = true } miden-node-proto = { workspace = true } miden-node-proto-build = { features = ["internal"], workspace = true } +miden-node-tracing = { workspace = true } miden-node-utils = { features = ["testing"], workspace = true } miden-protocol = { default-features = true, workspace = true } miden-remote-prover-client = { features = ["batch-prover", "block-prover"], workspace = true } diff --git a/crates/block-producer/src/batch_builder/mod.rs b/crates/block-producer/src/batch_builder/mod.rs index e3cc714c2..c505fd214 100644 --- a/crates/block-producer/src/batch_builder/mod.rs +++ b/crates/block-producer/src/batch_builder/mod.rs @@ -6,6 +6,7 @@ use std::time::Duration; use futures::never::Never; use futures::{FutureExt, TryFutureExt}; use miden_node_proto::domain::batch::BatchInputs; +use miden_node_tracing::instrument_with_err_report; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_protocol::MIN_PROOF_SECURITY_LEVEL; use miden_protocol::batch::{BatchId, ProposedBatch, ProvenBatch}; @@ -14,7 +15,7 @@ use miden_tx_batch_prover::LocalBatchProver; use rand::Rng; use tokio::task::JoinSet; use tokio::time; -use tracing::{Instrument, Span, instrument}; +use tracing::{Instrument, Span}; use url::Url; use crate::domain::batch::SelectedBatch; @@ -102,7 +103,7 @@ impl BatchBuilder { } } - #[instrument(parent = None, target = COMPONENT, name = "batch_builder.build_batch", skip_all)] + #[instrument_with_err_report(parent = None, target = COMPONENT, name = "batch_builder.build_batch", skip_all)] async fn build_batch(&mut self, mempool: SharedMempool) { Span::current().set_attribute("workers.count", self.worker_pool.len()); @@ -130,7 +131,7 @@ impl BatchBuilder { /// require the same logic as here to handle the case when the pool is at capacity. This /// design was chosen instead as it removes this branching logic by "always" having the pool /// at max capacity. Instead completed workers wait to be culled by this function. - #[instrument(target = COMPONENT, name = "batch_builder.wait_for_available_worker", skip_all)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.wait_for_available_worker", skip_all)] async fn wait_for_available_worker(&mut self) { // We must crash here because otherwise we have a batch that has been selected from the // mempool, but which is now in limbo. This effectively corrupts the mempool. @@ -189,12 +190,12 @@ impl BatchJob { .await; } - #[instrument(target = COMPONENT, name = "batch_builder.select_batch", skip_all)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.select_batch", skip_all)] async fn select_batch(&self) -> Option { self.mempool.lock().await.select_batch() } - #[instrument(target = COMPONENT, name = "batch_builder.get_batch_inputs", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.get_batch_inputs", skip_all, err)] async fn get_batch_inputs( &self, batch: SelectedBatch, @@ -217,7 +218,7 @@ impl BatchJob { .map(|inputs| (batch, inputs)) } - #[instrument(target = COMPONENT, name = "batch_builder.propose_batch", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.propose_batch", skip_all, err)] async fn propose_batch( selected: SelectedBatch, inputs: BatchInputs, @@ -237,7 +238,7 @@ impl BatchJob { .map_err(BuildBatchError::ProposeBatchError) } - #[instrument(target = COMPONENT, name = "batch_builder.prove_batch", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.prove_batch", skip_all, err)] async fn prove_batch( &self, proposed_batch: ProposedBatch, @@ -267,7 +268,7 @@ impl BatchJob { } } - #[instrument(target = COMPONENT, name = "batch_builder.inject_failure", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.inject_failure", skip_all, err)] async fn inject_failure(&self, value: T) -> Result { let roll = rand::rng().random::(); @@ -281,12 +282,12 @@ impl BatchJob { } } - #[instrument(target = COMPONENT, name = "batch_builder.commit_batch", skip_all)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.commit_batch", skip_all)] async fn commit_batch(&self, batch: Arc) { self.mempool.lock().await.commit_batch(batch); } - #[instrument(target = COMPONENT, name = "batch_builder.rollback_batch", skip_all)] + #[instrument_with_err_report(target = COMPONENT, name = "batch_builder.rollback_batch", skip_all)] async fn rollback_batch(&self, batch_id: BatchId) { self.mempool.lock().await.rollback_batch(batch_id); } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index e63bc8184..d7e1c2f39 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use futures::FutureExt; use futures::never::Never; use miden_block_prover::LocalBlockProver; +use miden_node_tracing::instrument_with_err_report; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_protocol::MIN_PROOF_SECURITY_LEVEL; use miden_protocol::batch::{OrderedBatches, ProvenBatch}; @@ -22,7 +23,7 @@ use miden_protocol::transaction::{OrderedTransactionHeaders, TransactionHeader}; use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use rand::Rng; use tokio::time::Duration; -use tracing::{Span, info, instrument}; +use tracing::{Span, info}; use url::Url; use crate::errors::BuildBlockError; @@ -116,7 +117,7 @@ impl BlockBuilder { /// - Each stage has its own child span and are free to add further field data. /// - A failed stage will emit an error event, and both its own span and the root span will be /// marked as errors. - #[instrument(parent = None, target = COMPONENT, name = "block_builder.build_block", skip_all)] + #[instrument_with_err_report(parent = None, target = COMPONENT, name = "block_builder.build_block", skip_all)] async fn build_block(&self, mempool: &SharedMempool) { use futures::TryFutureExt; @@ -145,7 +146,7 @@ impl BlockBuilder { .await } - #[instrument(target = COMPONENT, name = "block_builder.select_block", skip_all)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.select_block", skip_all)] async fn select_block(mempool: &SharedMempool) -> SelectedBlock { let (block_number, batches) = mempool.lock().await.select_block(); SelectedBlock { block_number, batches } @@ -167,7 +168,7 @@ impl BlockBuilder { /// which nullifiers the block will actually create, we fetch witnesses for all nullifiers /// created by batches. If we knew that a certain note will be erased, we would not have to /// supply a nullifier witness for it. - #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] async fn get_block_inputs( &self, selected_block: SelectedBlock, @@ -210,7 +211,7 @@ impl BlockBuilder { Ok(BlockBatchesAndInputs { batches, inputs }) } - #[instrument(target = COMPONENT, name = "block_builder.propose_block", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.propose_block", skip_all, err)] async fn propose_block( &self, batches_inputs: BlockBatchesAndInputs, @@ -224,7 +225,7 @@ impl BlockBuilder { Ok((proposed_block, inputs)) } - #[instrument(target = COMPONENT, name = "block_builder.validate_block", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.validate_block", skip_all, err)] async fn validate_block( &self, proposed_block: ProposedBlock, @@ -256,7 +257,7 @@ impl BlockBuilder { Ok((ordered_batches, block_inputs, header, signature, body)) } - #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] async fn prove_block( &self, ordered_batches: OrderedBatches, @@ -288,7 +289,7 @@ impl BlockBuilder { Ok(proven_block) } - #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, @@ -304,12 +305,12 @@ impl BlockBuilder { Ok(()) } - #[instrument(target = COMPONENT, name = "block_builder.rollback_block", skip_all)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.rollback_block", skip_all)] async fn rollback_block(&self, mempool: &SharedMempool, block: BlockNumber) { mempool.lock().await.rollback_block(block); } - #[instrument(target = COMPONENT, name = "block_builder.simulate_proving", skip_all)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.simulate_proving", skip_all)] async fn simulate_proving(&self) { let proving_duration = rand::rng().random_range(self.simulated_proof_time.clone()); @@ -320,7 +321,7 @@ impl BlockBuilder { tokio::time::sleep(proving_duration).await; } - #[instrument(target = COMPONENT, name = "block_builder.inject_failure", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "block_builder.inject_failure", skip_all, err)] fn inject_failure(&self, value: T) -> Result { let roll = rand::rng().random::(); @@ -449,7 +450,7 @@ impl BlockProver { Self::Remote(RemoteBlockProver::new(endpoint)) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, skip_all, err)] async fn prove( &self, tx_batches: OrderedBatches, diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index a82a60582..b0c61ead6 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -7,6 +7,7 @@ use miden_node_proto::clients::{Builder, StoreBlockProducerClient}; use miden_node_proto::domain::batch::BatchInputs; use miden_node_proto::errors::{ConversionError, MissingFieldHelper}; use miden_node_proto::{AccountState, generated as proto}; +use miden_node_tracing::instrument_with_err_report; use miden_node_utils::formatting::format_opt; use miden_protocol::Word; use miden_protocol::account::AccountId; @@ -14,7 +15,7 @@ use miden_protocol::block::{BlockHeader, BlockInputs, BlockNumber, ProvenBlock}; use miden_protocol::note::Nullifier; use miden_protocol::transaction::ProvenTransaction; use miden_protocol::utils::Serializable; -use tracing::{debug, info, instrument}; +use tracing::{debug, info}; use url::Url; use crate::COMPONENT; @@ -136,7 +137,7 @@ impl StoreClient { } /// Returns the latest block's header from the store. - #[instrument(target = COMPONENT, name = "store.client.latest_header", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.latest_header", skip_all, err)] pub async fn latest_header(&self) -> Result { let response = self .client @@ -154,7 +155,7 @@ impl StoreClient { BlockHeader::try_from(response).map_err(Into::into) } - #[instrument(target = COMPONENT, name = "store.client.get_tx_inputs", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_tx_inputs", skip_all, err)] pub async fn get_tx_inputs( &self, proven_tx: &ProvenTransaction, @@ -199,7 +200,7 @@ impl StoreClient { Ok(tx_inputs) } - #[instrument(target = COMPONENT, name = "store.client.get_block_inputs", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_block_inputs", skip_all, err)] pub async fn get_block_inputs( &self, updated_accounts: impl Iterator + Send, @@ -221,7 +222,7 @@ impl StoreClient { store_response.try_into().map_err(Into::into) } - #[instrument(target = COMPONENT, name = "store.client.get_batch_inputs", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_batch_inputs", skip_all, err)] pub async fn get_batch_inputs( &self, block_references: impl Iterator + Send, @@ -237,7 +238,7 @@ impl StoreClient { store_response.try_into().map_err(Into::into) } - #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { let request = tonic::Request::new(proto::blockchain::Block { block: block.to_bytes() }); diff --git a/crates/block-producer/src/validator/mod.rs b/crates/block-producer/src/validator/mod.rs index 9844e2d9b..5763f637a 100644 --- a/crates/block-producer/src/validator/mod.rs +++ b/crates/block-producer/src/validator/mod.rs @@ -1,10 +1,11 @@ use miden_node_proto::clients::{Builder, ValidatorClient}; use miden_node_proto::generated as proto; +use miden_node_tracing::instrument_with_err_report; use miden_protocol::block::ProposedBlock; use miden_protocol::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_protocol::utils::{Deserializable, DeserializationError, Serializable}; use thiserror::Error; -use tracing::{info, instrument}; +use tracing::info; use url::Url; use crate::COMPONENT; @@ -47,7 +48,7 @@ impl BlockProducerValidatorClient { Self { client: validator } } - #[instrument(target = COMPONENT, name = "validator.client.validate_block", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "validator.client.validate_block", skip_all, err)] pub async fn sign_block( &self, proposed_block: ProposedBlock, diff --git a/crates/ntx-builder/Cargo.toml b/crates/ntx-builder/Cargo.toml index 06ed8eb3b..af2b21bd8 100644 --- a/crates/ntx-builder/Cargo.toml +++ b/crates/ntx-builder/Cargo.toml @@ -18,6 +18,7 @@ anyhow = { workspace = true } futures = { workspace = true } indexmap = { workspace = true } miden-node-proto = { workspace = true } +miden-node-tracing = { workspace = true } miden-node-utils = { workspace = true } miden-protocol = { default-features = true, workspace = true } miden-remote-prover-client = { features = ["tx-prover"], workspace = true } diff --git a/crates/ntx-builder/src/actor/execute.rs b/crates/ntx-builder/src/actor/execute.rs index 83c1d09c9..be7d3f6e4 100644 --- a/crates/ntx-builder/src/actor/execute.rs +++ b/crates/ntx-builder/src/actor/execute.rs @@ -1,5 +1,6 @@ use std::collections::BTreeSet; +use miden_node_tracing::instrument_with_err_report; use miden_node_utils::lru_cache::LruCache; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_protocol::account::{ @@ -188,7 +189,7 @@ impl NtxContext { /// Returns an [`NtxError`] if: /// - The consumability check fails unexpectedly. /// - All notes fail the check (i.e., no note is consumable). - #[instrument(target = COMPONENT, name = "ntx.execute_transaction.filter_notes", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "ntx.execute_transaction.filter_notes", skip_all, err)] async fn filter_notes( &self, data_store: &NtxDataStore, @@ -223,7 +224,7 @@ impl NtxContext { } /// Creates an executes a transaction with the network account and the given set of notes. - #[instrument(target = COMPONENT, name = "ntx.execute_transaction.execute", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "ntx.execute_transaction.execute", skip_all, err)] async fn execute( &self, data_store: &NtxDataStore, @@ -244,7 +245,7 @@ impl NtxContext { /// Delegates the transaction proof to the remote prover if configured, otherwise performs the /// proof locally. - #[instrument(target = COMPONENT, name = "ntx.execute_transaction.prove", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "ntx.execute_transaction.prove", skip_all, err)] async fn prove(&self, tx_inputs: TransactionInputs) -> NtxResult { if let Some(remote) = &self.prover { remote.prove(tx_inputs).await @@ -257,7 +258,7 @@ impl NtxContext { } /// Submits the transaction to the block producer. - #[instrument(target = COMPONENT, name = "ntx.execute_transaction.submit", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "ntx.execute_transaction.submit", skip_all, err)] async fn submit(&self, tx: ProvenTransaction) -> NtxResult<()> { self.block_producer .submit_proven_transaction(tx) diff --git a/crates/ntx-builder/src/block_producer.rs b/crates/ntx-builder/src/block_producer.rs index 7c1af9d8f..9feaeb302 100644 --- a/crates/ntx-builder/src/block_producer.rs +++ b/crates/ntx-builder/src/block_producer.rs @@ -10,6 +10,7 @@ use miden_protocol::transaction::ProvenTransaction; use miden_tx::utils::Serializable; use tokio_stream::StreamExt; use tonic::Status; +// Use standard tracing for Status errors (which don't impl std::error::Error) use tracing::{info, instrument}; use url::Url; diff --git a/crates/ntx-builder/src/store.rs b/crates/ntx-builder/src/store.rs index 447571a5a..7d92aa0fc 100644 --- a/crates/ntx-builder/src/store.rs +++ b/crates/ntx-builder/src/store.rs @@ -7,6 +7,7 @@ use miden_node_proto::errors::ConversionError; use miden_node_proto::generated::rpc::BlockRange; use miden_node_proto::generated::{self as proto}; use miden_node_proto::try_convert; +use miden_node_tracing::instrument_with_err_report; use miden_protocol::Word; use miden_protocol::account::{Account, AccountId}; use miden_protocol::block::{BlockHeader, BlockNumber}; @@ -75,7 +76,7 @@ impl StoreClient { } } - #[instrument(target = COMPONENT, name = "store.client.get_latest_blockchain_data", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_latest_blockchain_data", skip_all, err)] async fn get_latest_blockchain_data( &self, ) -> Result, StoreError> { @@ -106,7 +107,7 @@ impl StoreClient { } } - #[instrument(target = COMPONENT, name = "store.client.get_network_account", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_network_account", skip_all, err)] pub async fn get_network_account( &self, prefix: NetworkAccountPrefix, @@ -137,7 +138,7 @@ impl StoreClient { /// Returns the list of unconsumed network notes for a specific network account up to a /// specified block. - #[instrument(target = COMPONENT, name = "store.client.get_unconsumed_network_notes", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_unconsumed_network_notes", skip_all, err)] pub async fn get_unconsumed_network_notes( &self, network_account_prefix: NetworkAccountPrefix, @@ -181,7 +182,7 @@ impl StoreClient { /// Each page can return up to `MAX_RESPONSE_PAYLOAD_BYTES / AccountId::SERIALIZED_SIZE` /// accounts (~289,000). With `100_000` iterations, which is assumed to be sufficient for the /// foreseeable future. - #[instrument(target = COMPONENT, name = "store.client.get_network_account_ids", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_network_account_ids", skip_all, err)] pub async fn get_network_account_ids(&self) -> Result, StoreError> { const MAX_ITERATIONS: u32 = 100_000; @@ -231,7 +232,7 @@ impl StoreClient { Ok(ids) } - #[instrument(target = COMPONENT, name = "store.client.get_note_script_by_root", skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, name = "store.client.get_note_script_by_root", skip_all, err)] pub async fn get_note_script_by_root( &self, root: Word, diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index 86bb35e59..8eecef973 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -25,7 +25,9 @@ use miden_protocol::utils::serde::{Deserializable, Serializable}; use miden_protocol::{MIN_PROOF_SECURITY_LEVEL, Word}; use miden_tx::TransactionVerifier; use tonic::{IntoRequest, Request, Response, Status}; -use tracing::{debug, info, instrument}; +// Using standard tracing::instrument because tonic::Status doesn't impl std::error::Error +use tracing::instrument; +use tracing::{debug, info}; use url::Url; use crate::COMPONENT; diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 22037e4b9..02dfde41e 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -26,6 +26,7 @@ hex = { version = "0.4" } indexmap = { workspace = true } miden-node-proto = { workspace = true } miden-node-proto-build = { features = ["internal"], workspace = true } +miden-node-tracing = { workspace = true } miden-node-utils = { workspace = true } miden-standards = { workspace = true } # TODO remove `testing` from `miden-protocol`, required for `BlockProof::new_dummy` diff --git a/crates/store/src/db/migrations.rs b/crates/store/src/db/migrations.rs index 01521e578..26841a6e1 100644 --- a/crates/store/src/db/migrations.rs +++ b/crates/store/src/db/migrations.rs @@ -1,6 +1,6 @@ use diesel::SqliteConnection; use diesel_migrations::{EmbeddedMigrations, MigrationHarness, embed_migrations}; -use tracing::instrument; +use miden_node_tracing::instrument_with_err_report; use crate::COMPONENT; use crate::db::schema_hash::verify_schema; @@ -10,7 +10,7 @@ use crate::db::schema_hash::verify_schema; pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("src/db/migrations"); // TODO we have not tested this in practice! -#[instrument(level = "debug", target = COMPONENT, skip_all, err)] +#[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, err)] pub fn apply_migrations( conn: &mut SqliteConnection, ) -> std::result::Result<(), crate::errors::DatabaseError> { diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 3d54a501f..5ca2327a3 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -6,6 +6,7 @@ use anyhow::Context; use diesel::{Connection, RunQueryDsl, SqliteConnection}; use miden_node_proto::domain::account::{AccountInfo, AccountSummary, NetworkAccountPrefix}; use miden_node_proto::generated as proto; +use miden_node_tracing::instrument_with_err_report; use miden_protocol::Word; use miden_protocol::account::{AccountHeader, AccountId, AccountStorage}; use miden_protocol::asset::{Asset, AssetVaultKey}; @@ -223,7 +224,7 @@ impl Db { target = COMPONENT, name = "store.database.bootstrap", skip_all, - fields(path=%database_filepath.display()) + fields(path=%database_filepath.display()), err, )] pub fn bootstrap(database_filepath: PathBuf, genesis: &GenesisBlock) -> anyhow::Result<()> { @@ -305,7 +306,7 @@ impl Db { } /// Open a connection to the DB and apply any pending migrations. - #[instrument(target = COMPONENT, skip_all)] + #[instrument_with_err_report(target = COMPONENT, skip_all)] pub async fn load(database_filepath: PathBuf) -> Result { let manager = ConnectionManager::new(database_filepath.to_str().unwrap()); let pool = deadpool_diesel::Pool::builder(manager).max_size(16).build()?; @@ -322,7 +323,7 @@ impl Db { } /// Loads all the nullifiers from the DB. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub(crate) async fn select_all_nullifiers(&self) -> Result> { self.transact("all nullifiers", move |conn| { let nullifiers = queries::select_all_nullifiers(conn)?; @@ -332,7 +333,7 @@ impl Db { } /// Loads the nullifiers that match the prefixes from the DB. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_nullifiers_by_prefix( &self, prefix_len: u32, @@ -357,7 +358,7 @@ impl Db { /// Search for a [`BlockHeader`] from the database by its `block_num`. /// /// When `block_number` is [None], the latest block header is returned. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_block_header_by_block_num( &self, maybe_block_number: Option, @@ -370,7 +371,7 @@ impl Db { } /// Loads multiple block headers from the DB. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_block_headers( &self, blocks: impl Iterator + Send + 'static, @@ -383,7 +384,7 @@ impl Db { } /// Loads all the block headers from the DB. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_all_block_headers(&self) -> Result> { self.transact("all block headers", |conn| { let raw = queries::select_all_block_headers(conn)?; @@ -393,7 +394,7 @@ impl Db { } /// TODO marked for removal, replace with paged version - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_all_account_commitments(&self) -> Result> { self.transact("read all account commitments", move |conn| { queries::select_all_account_commitments(conn) @@ -402,7 +403,7 @@ impl Db { } /// Returns all account IDs that have public state. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_all_public_account_ids(&self) -> Result> { self.transact("read all public account IDs", move |conn| { queries::select_all_public_account_ids(conn) @@ -411,14 +412,14 @@ impl Db { } /// Loads public account details from the DB. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_account(&self, id: AccountId) -> Result { self.transact("Get account details", move |conn| queries::select_account(conn, id)) .await } /// Loads public account details from the DB based on the account ID's prefix. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_network_account_by_prefix( &self, id_prefix: u32, @@ -442,7 +443,7 @@ impl Db { /// - A vector of network account IDs. /// - The last block number that was fully included in the result. When truncated, this will be /// less than the requested range end. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_all_network_account_ids( &self, block_range: RangeInclusive, @@ -458,7 +459,7 @@ impl Db { /// This method queries the decomposed storage tables and reconstructs the full /// `AccountStorage` with SMT backing for Map slots. // TODO split querying the header from the content - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_account_storage_at_block( &self, account_id: AccountId, @@ -471,7 +472,7 @@ impl Db { } /// Queries vault assets at a specific block - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_account_vault_at_block( &self, account_id: AccountId, @@ -511,7 +512,7 @@ impl Db { .await } - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn get_state_sync( &self, block_number: BlockNumber, @@ -524,7 +525,7 @@ impl Db { .await } - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn get_note_sync( &self, block_range: RangeInclusive, @@ -538,7 +539,7 @@ impl Db { /// Loads all the [`miden_protocol::note::Note`]s matching a certain [`NoteId`] from the /// database. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_notes_by_id(&self, note_ids: Vec) -> Result> { self.transact("note by id", move |conn| { queries::select_notes_by_id(conn, note_ids.as_slice()) @@ -547,7 +548,7 @@ impl Db { } /// Returns all note commitments from the DB that match the provided ones. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_existing_note_commitments( &self, note_commitments: Vec, @@ -559,7 +560,7 @@ impl Db { } /// Loads inclusion proofs for notes matching the given note commitments. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn select_note_inclusion_proofs( &self, note_commitments: BTreeSet, @@ -575,7 +576,7 @@ impl Db { /// `allow_acquire` and `acquire_done` are used to synchronize writes to the DB with writes to /// the in-memory trees. Further details available on [`super::state::State::apply_block`]. // TODO: This span is logged in a root span, we should connect it to the parent one. - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, skip_all, err)] pub async fn apply_block( &self, allow_acquire: oneshot::Sender<()>, diff --git a/crates/store/src/db/schema_hash.rs b/crates/store/src/db/schema_hash.rs index 28e480fc0..26f2485ad 100644 --- a/crates/store/src/db/schema_hash.rs +++ b/crates/store/src/db/schema_hash.rs @@ -11,7 +11,7 @@ use diesel::{Connection, RunQueryDsl, SqliteConnection}; use diesel_migrations::MigrationHarness; -use tracing::instrument; +use miden_node_tracing::instrument_with_err_report; use crate::COMPONENT; use crate::db::migrations::MIGRATIONS; @@ -83,7 +83,7 @@ fn compute_expected_schema() -> Result, SchemaVerificationErro /// # Errors /// /// Returns `SchemaVerificationError::Mismatch` if schemas differ. -#[instrument(level = "info", target = COMPONENT, skip_all, err)] +#[instrument_with_err_report(level = "info", target = COMPONENT, skip_all, err)] pub fn verify_schema(conn: &mut SqliteConnection) -> Result<(), SchemaVerificationError> { let expected = compute_expected_schema()?; let actual = extract_schema(conn)?; diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 43e653dcf..99c196457 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -21,6 +21,7 @@ use miden_node_proto::domain::account::{ StorageMapRequest, }; use miden_node_proto::domain::batch::BatchInputs; +use miden_node_tracing::instrument_with_err_report; use miden_node_utils::ErrorReport; use miden_node_utils::formatting::format_array; use miden_protocol::account::delta::AccountUpdateDetails; @@ -41,7 +42,7 @@ use miden_protocol::transaction::{OutputNote, PartialBlockchain}; use miden_protocol::utils::Serializable; use miden_protocol::{AccountError, Word}; use tokio::sync::{Mutex, RwLock, oneshot}; -use tracing::{info, info_span, instrument}; +use tracing::{info, info_span}; use crate::accounts::{AccountTreeWithHistory, HistoricalError}; use crate::blocks::BlockStore; @@ -133,7 +134,7 @@ impl State { // -------------------------------------------------------------------------------------------- /// Loads the state from the `db`. - #[instrument(target = COMPONENT, skip_all)] + #[instrument_with_err_report(target = COMPONENT, skip_all)] pub async fn load(data_path: &Path) -> Result { let data_directory = DataDirectory::load(data_path.to_path_buf()) .map_err(StateInitializationError::DataDirectoryLoadError)?; @@ -191,7 +192,7 @@ impl State { /// released. // TODO: This span is logged in a root span, we should connect it to the parent span. #[allow(clippy::too_many_lines)] - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument_with_err_report(target = COMPONENT, skip_all, err)] pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { let _lock = self.writer.try_lock().map_err(|_| ApplyBlockError::ConcurrentWrite)?; @@ -445,7 +446,7 @@ impl State { /// /// If [None] is given as the value of `block_num`, the data for the latest [BlockHeader] is /// returned. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn get_block_header( &self, block_num: Option, @@ -481,7 +482,7 @@ impl State { /// tree. /// /// Note: these proofs are invalidated once the nullifier tree is modified, i.e. on a new block. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret)] pub async fn check_nullifiers(&self, nullifiers: &[Nullifier]) -> Vec { let inner = self.inner.read().await; nullifiers @@ -662,7 +663,7 @@ impl State { /// block range. /// - `note_tags`: The tags the client is interested in, result is restricted to the first block /// with any matches tags. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn sync_state( &self, block_num: BlockNumber, @@ -712,7 +713,7 @@ impl State { /// - `note_tags`: The tags the client is interested in, resulting notes are restricted to the /// first block containing a matching note. /// - `block_range`: The range of blocks from which to synchronize notes. - #[instrument(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] + #[instrument_with_err_report(level = "debug", target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn sync_notes( &self, note_tags: Vec, @@ -868,7 +869,7 @@ impl State { } /// Returns data needed by the block producer to verify transactions validity. - #[instrument(target = COMPONENT, skip_all, ret)] + #[instrument_with_err_report(target = COMPONENT, skip_all, ret)] pub async fn get_transaction_inputs( &self, account_id: AccountId, @@ -1176,7 +1177,7 @@ impl State { // INNER STATE LOADING // ================================================================================================ -#[instrument(level = "info", target = COMPONENT, skip_all)] +#[instrument_with_err_report(level = "info", target = COMPONENT, skip_all)] async fn load_mmr(db: &mut Db) -> Result { let block_commitments: Vec = db .select_all_block_headers() @@ -1192,7 +1193,7 @@ async fn load_mmr(db: &mut Db) -> Result { Ok(chain_mmr) } -#[instrument(level = "info", target = COMPONENT, skip_all)] +#[instrument_with_err_report(level = "info", target = COMPONENT, skip_all)] async fn load_nullifier_tree( db: &mut Db, ) -> Result>, StateInitializationError> { @@ -1206,7 +1207,7 @@ async fn load_nullifier_tree( .map_err(StateInitializationError::FailedToCreateNullifierTree) } -#[instrument(level = "info", target = COMPONENT, skip_all)] +#[instrument_with_err_report(level = "info", target = COMPONENT, skip_all)] async fn load_account_tree( db: &mut Db, block_number: BlockNumber, @@ -1236,7 +1237,7 @@ async fn load_account_tree( } /// Loads SMT forest with storage map and vault Merkle paths for all public accounts. -#[instrument(target = COMPONENT, skip_all, fields(block_num = %block_num))] +#[instrument_with_err_report(target = COMPONENT, skip_all, fields(block_num = %block_num))] async fn load_smt_forest( db: &mut Db, block_num: BlockNumber, diff --git a/crates/tracing-macro/Cargo.toml b/crates/tracing-macro/Cargo.toml new file mode 100644 index 000000000..950f77663 --- /dev/null +++ b/crates/tracing-macro/Cargo.toml @@ -0,0 +1,23 @@ +[package] +categories = ["development-tools::procedural-macro-helpers"] +description = "Procedural macro for tracing instrumentation with full error report context" +keywords = ["miden", "node", "tracing"] +name = "miden-node-tracing-macro" + +edition.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[lints] +workspace = true + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = "1.0" +quote = "1.0" +syn = { features = ["extra-traits", "full", "parsing"], version = "2.0" } diff --git a/crates/tracing-macro/README.md b/crates/tracing-macro/README.md new file mode 100644 index 000000000..84f06e6f4 --- /dev/null +++ b/crates/tracing-macro/README.md @@ -0,0 +1,16 @@ +# Miden Node Tracing Macro + +Internal procedural macro crate for `miden-node-tracing`. + +**Do not use this crate directly.** Use `miden-node-tracing` instead, which re-exports this macro along with all required dependencies. + +## Usage + +```rust +use miden_node_tracing::instrument_with_err_report; + +#[instrument_with_err_report(target = COMPONENT, skip_all, err)] +pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { + // Function body... +} +``` diff --git a/crates/tracing-macro/src/lib.rs b/crates/tracing-macro/src/lib.rs new file mode 100644 index 000000000..f0f08259c --- /dev/null +++ b/crates/tracing-macro/src/lib.rs @@ -0,0 +1,261 @@ +//! Procedural macro for tracing instrumentation with full error report context. +//! +//! This macro provides an `#[instrument]`-like attribute that uses `ErrorReport::as_report()` +//! to capture the full error chain in tracing spans, rather than just the `Display` output. +//! +//! **Note**: This crate should not be used directly. Use `miden-node-tracing` instead, +//! which re-exports this macro along with all required dependencies. +//! +//! # Problem +//! +//! The standard `#[instrument(err)]` from `tracing` uses `Display` or `Debug` to format errors +//! in span events. This loses the error chain context, showing only the top-level error message. +//! +//! # Solution +//! +//! This macro wraps functions that return `Result` and: +//! 1. Creates a tracing span with the configured attributes (delegating to `tracing::instrument`) +//! 2. On error, records the full error chain using `ErrorReport::as_report()` +//! 3. Sets the OpenTelemetry span status to error with the full report + +use proc_macro::TokenStream; +use proc_macro2::TokenStream as TokenStream2; +use quote::quote; +use syn::{ItemFn, ReturnType, parse_macro_input}; + +/// Instruments a function with tracing, using full error reports for context. +/// +/// This attribute macro wraps functions that return `Result` and creates +/// a tracing span. When the function returns an error, it records the full +/// error chain using `ErrorReport::as_report()` instead of just `Display`. +/// +/// This macro accepts all the same arguments as `tracing::instrument`, and +/// delegates to it for span creation. The added functionality is enhanced +/// error reporting when `err` is specified. +/// +/// # Arguments +/// +/// All arguments from `tracing::instrument` are supported: +/// - `target = "..."` - Sets the tracing target +/// - `level = "..."` - Sets the tracing level (default: "info") +/// - `name = "..."` - Sets a custom span name (default: function name) +/// - `err` - Record errors with full error chain (enhanced by this macro) +/// - `ret` / `ret(level = "...")` - Record return values +/// - `skip(arg1, arg2)` - Skip specific arguments from span fields +/// - `skip_all` - Skip all arguments from span fields +/// - `fields(key = value, ...)` - Add custom fields to the span +/// - `parent = None` - Create a root span (no parent) +/// +/// # Example +/// +/// ```rust,ignore +/// use miden_node_tracing::instrument_with_err_report; +/// +/// #[instrument_with_err_report(target = COMPONENT, skip_all, err)] +/// pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { +/// // Function body... +/// } +/// ``` +#[proc_macro_attribute] +pub fn instrument_with_err_report(attr: TokenStream, item: TokenStream) -> TokenStream { + let attr2 = TokenStream2::from(attr.clone()); + let input = parse_macro_input!(item as ItemFn); + + // Check if 'err' is present in the attributes + let attr_str = attr2.to_string(); + let has_err = attr_str.contains("err"); + + // Check if the function returns a Result + let returns_result = match &input.sig.output { + ReturnType::Default => false, + ReturnType::Type(_, ty) => { + let ty_str = quote! { #ty }.to_string(); + ty_str.contains("Result") + }, + }; + + if has_err && returns_result { + generate_with_error_reporting(&attr2, input).into() + } else { + // Just delegate to standard tracing::instrument + let ItemFn { attrs, vis, sig, block } = input; + quote! { + #(#attrs)* + #[::tracing::instrument(#attr2)] + #vis #sig + #block + } + .into() + } +} + +fn generate_with_error_reporting(attr: &TokenStream2, input: ItemFn) -> TokenStream2 { + let ItemFn { attrs, vis, sig, block } = input; + + // Remove 'err' from the attributes we pass to tracing::instrument + // since we handle error reporting ourselves + let tracing_attr = remove_err_from_attr(attr); + + // Get the return type for type annotation + let result_type = match &sig.output { + ReturnType::Type(_, ty) => quote! { #ty }, + ReturnType::Default => quote! { () }, + }; + + // Use absolute paths via the miden_node_tracing crate + quote! { + #(#attrs)* + #[::tracing::instrument(#tracing_attr)] + #vis #sig + { + let __result: #result_type = #block; + + if let ::core::result::Result::Err(ref __err) = __result { + // Use ErrorReport to get the full error chain + let __report = { + use ::miden_node_tracing::ErrorReport as _; + __err.as_report() + }; + + // Record the error event with the full report + ::miden_node_tracing::error!(error = %__report); + + // Set OpenTelemetry span status if available + { + use ::miden_node_tracing::OpenTelemetrySpanExt as _; + ::miden_node_tracing::Span::current().set_error(__err as &dyn ::std::error::Error); + } + } + + __result + } + } +} + +fn remove_err_from_attr(attr: &TokenStream2) -> TokenStream2 { + // Simple string-based removal of 'err' from the attribute + // This handles both 'err' and 'err,' patterns + let attr_str = attr.to_string(); + + // Remove 'err,' or ', err' or 'err' patterns + let cleaned = attr_str + .replace(", err,", ",") + .replace(", err", "") + .replace("err,", "") + .replace("err", ""); + + // Parse the cleaned string back into tokens + cleaned.parse().unwrap_or_else(|_| attr.clone()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_remove_err_from_attr_trailing() { + let attr: TokenStream2 = "target = COMPONENT, skip_all, err".parse().unwrap(); + let result = remove_err_from_attr(&attr); + let result_str = result.to_string(); + assert!(!result_str.contains("err"), "Should remove 'err': {result_str}"); + assert!(result_str.contains("target"), "Should keep 'target': {result_str}"); + assert!(result_str.contains("skip_all"), "Should keep 'skip_all': {result_str}"); + } + + #[test] + fn test_remove_err_from_attr_middle() { + let attr: TokenStream2 = "target = COMPONENT, err, skip_all".parse().unwrap(); + let result = remove_err_from_attr(&attr); + let result_str = result.to_string(); + assert!(!result_str.contains("err"), "Should remove 'err': {result_str}"); + assert!(result_str.contains("target"), "Should keep 'target': {result_str}"); + assert!(result_str.contains("skip_all"), "Should keep 'skip_all': {result_str}"); + } + + #[test] + fn test_remove_err_from_attr_only() { + let attr: TokenStream2 = "err".parse().unwrap(); + let result = remove_err_from_attr(&attr); + let result_str = result.to_string(); + assert!(!result_str.contains("err"), "Should remove 'err': {result_str}"); + } + + #[test] + fn test_remove_err_from_attr_with_ret() { + let attr: TokenStream2 = + "level = \"debug\", target = COMPONENT, err, ret(level = \"debug\")" + .parse() + .unwrap(); + let result = remove_err_from_attr(&attr); + let result_str = result.to_string(); + assert!(!result_str.contains(", err,"), "Should remove ', err,': {result_str}"); + assert!(result_str.contains("level"), "Should keep 'level': {result_str}"); + assert!(result_str.contains("ret"), "Should keep 'ret': {result_str}"); + } + + #[test] + fn test_returns_result_detection() { + // Test with Result type + let fn_with_result: syn::ItemFn = syn::parse_quote! { + fn test_fn() -> Result<(), Error> { + Ok(()) + } + }; + let returns_result = match &fn_with_result.sig.output { + ReturnType::Default => false, + ReturnType::Type(_, ty) => { + let ty_str = quote! { #ty }.to_string(); + ty_str.contains("Result") + }, + }; + assert!(returns_result, "Should detect Result return type"); + + // Test without Result type + let fn_without_result: syn::ItemFn = syn::parse_quote! { + fn test_fn() -> i32 { + 42 + } + }; + let returns_result = match &fn_without_result.sig.output { + ReturnType::Default => false, + ReturnType::Type(_, ty) => { + let ty_str = quote! { #ty }.to_string(); + ty_str.contains("Result") + }, + }; + assert!(!returns_result, "Should not detect Result for i32 return type"); + + // Test with no return type + let fn_no_return: syn::ItemFn = syn::parse_quote! { + fn test_fn() { + println!("hello"); + } + }; + let returns_result = match &fn_no_return.sig.output { + ReturnType::Default => false, + ReturnType::Type(_, ty) => { + let ty_str = quote! { #ty }.to_string(); + ty_str.contains("Result") + }, + }; + assert!(!returns_result, "Should not detect Result for unit return type"); + } + + #[test] + fn test_err_detection_in_attrs() { + // Test with err present + let attr_with_err = "target = COMPONENT, skip_all, err"; + assert!(attr_with_err.contains("err"), "Should find 'err'"); + + // Test without err + let attr_without_err = "target = COMPONENT, skip_all"; + assert!(!attr_without_err.contains("err"), "Should not find 'err'"); + + // Test with err in field name (should still match, but that's acceptable) + let attr_with_error_field = "target = COMPONENT, fields(error = true)"; + // Note: This is a known limitation - "error" contains "err" + // In practice this doesn't cause issues because if someone uses fields(error=...) + // they likely don't intend to use our error reporting anyway + assert!(attr_with_error_field.contains("err"), "Contains 'err' substring"); + } +} diff --git a/crates/tracing/Cargo.toml b/crates/tracing/Cargo.toml new file mode 100644 index 000000000..f3e463716 --- /dev/null +++ b/crates/tracing/Cargo.toml @@ -0,0 +1,21 @@ +[package] +categories = ["development-tools::debugging"] +description = "Tracing utilities for Miden node with enhanced error reporting" +keywords = ["miden", "node", "tracing"] +name = "miden-node-tracing" + +edition.workspace = true +license.workspace = true +readme.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[lints] +workspace = true + +[dependencies] +miden-node-tracing-macro = { workspace = true } +miden-node-utils = { workspace = true } +opentelemetry = { version = "0.31" } +tracing = { workspace = true } diff --git a/crates/tracing/README.md b/crates/tracing/README.md new file mode 100644 index 000000000..dc0c39b93 --- /dev/null +++ b/crates/tracing/README.md @@ -0,0 +1,50 @@ +# Miden Node Tracing + +Tracing utilities for Miden node with enhanced error reporting. + +## Overview + +This crate provides an `#[instrument_with_err_report]` attribute macro that enhances the standard `tracing::instrument` with full error chain reporting. When a function returns an error, instead of just logging the top-level error message, it captures and logs the entire error chain. + +## Usage + +```rust +use miden_node_tracing::instrument_with_err_report; + +#[instrument_with_err_report(target = "my_component", skip_all, err)] +pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { + // Function body... +} +``` + +## Enhanced Error Reporting + +When an error occurs, the full error chain is recorded (matching `tracing::instrument` style): + +```text +ERROR apply_block: error = "failed to apply block +caused by: database error +caused by: SQLite error: table 'blocks' has 10 columns but 9 values were supplied" +``` + +This is much more useful for debugging than just seeing "failed to apply block". + +## Supported Arguments + +All arguments from `tracing::instrument` are supported: + +| Argument | Example | Description | +|----------|---------|-------------| +| `target` | `target = "my_component"` | Sets the tracing target | +| `level` | `level = "debug"` | Sets the tracing level | +| `name` | `name = "custom.span.name"` | Sets a custom span name | +| `err` | `err` | Record errors with full error chain | +| `ret` | `ret` or `ret(level = "debug")` | Record return values | +| `skip` | `skip(arg1, arg2)` | Skip specific arguments | +| `skip_all` | `skip_all` | Skip all arguments | +| `fields` | `fields(key = value)` | Add custom fields | +| `parent` | `parent = None` | Create a root span | + +## Requirements + +The enhanced error reporting requires the error type to implement `std::error::Error`. For error types that don't (like `tonic::Status` or `anyhow::Error`), use the standard `tracing::instrument` instead. diff --git a/crates/tracing/src/lib.rs b/crates/tracing/src/lib.rs new file mode 100644 index 000000000..a49b45e01 --- /dev/null +++ b/crates/tracing/src/lib.rs @@ -0,0 +1,34 @@ +//! Tracing utilities for Miden node with enhanced error reporting. +//! +//! This crate provides an `#[instrument_with_err_report]` attribute macro that enhances +//! the standard `tracing::instrument` with full error chain reporting. +//! +//! # Usage +//! +//! ```rust,ignore +//! use miden_node_tracing::instrument_with_err_report; +//! +//! #[instrument_with_err_report(target = "my_component", skip_all, err)] +//! async fn apply_block(&self, block: Block) -> Result<(), ApplyBlockError> { +//! // ... +//! } +//! ``` +//! +//! When an error occurs, the full error chain is recorded: +//! ```text +//! failed to apply block to store +//! caused by: database error +//! caused by: SQLite error: table 'blocks' has 10 columns but 9 values were supplied +//! ``` + +// Re-export the instrument macro +pub use miden_node_tracing_macro::instrument_with_err_report; +// Re-export ErrorReport from miden-node-utils +pub use miden_node_utils::ErrorReport; +// Re-export OpenTelemetrySpanExt from miden-node-utils +pub use miden_node_utils::tracing::OpenTelemetrySpanExt; +// Re-export OpenTelemetry types needed by the macro +pub use opentelemetry::trace::Status as OtelStatus; +pub use tracing::field::Empty as FieldEmpty; +// Re-export tracing types needed by the macro +pub use tracing::{Instrument, Level, Span, error, event};