From 5f3c8a3682306f2d2d185a08834bd16984ddbfb4 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 11 Apr 2025 17:06:03 -0400 Subject: [PATCH 01/56] v2: fumaole_v2.proto + yellowstone-fumarole-client first draft --- Cargo.lock | 6 +- Cargo.toml | 2 + crates/yellowstone-fumarole-client/Cargo.toml | 4 +- crates/yellowstone-fumarole-client/build.rs | 5 +- crates/yellowstone-fumarole-client/src/lib.rs | 271 ++---------- .../src/runtime/mod.rs | 329 ++++++++++++++ .../src/runtime/tokio.rs | 405 ++++++++++++++++++ .../src/util/collections.rs | 53 +++ .../src/util/mod.rs | 1 + proto/fumarole_v2.proto | 163 +++++++ 10 files changed, 1006 insertions(+), 233 deletions(-) create mode 100644 crates/yellowstone-fumarole-client/src/runtime/mod.rs create mode 100644 crates/yellowstone-fumarole-client/src/runtime/tokio.rs create mode 100644 crates/yellowstone-fumarole-client/src/util/collections.rs create mode 100644 crates/yellowstone-fumarole-client/src/util/mod.rs create mode 100644 proto/fumarole_v2.proto diff --git a/Cargo.lock b/Cargo.lock index 2c47cf0..854ab4d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -349,9 +349,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.86" +version = "0.1.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" +checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", @@ -5008,6 +5008,7 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" name = "yellowstone-fumarole-client" version = "0.1.1-pre.2+solana.2.1.11" dependencies = [ + "async-trait", "http 1.2.0", "hyper 1.6.0", "prost", @@ -5022,6 +5023,7 @@ dependencies = [ "tonic", "tonic-build", "tower 0.5.2", + "tracing", "yellowstone-grpc-proto", ] diff --git a/Cargo.toml b/Cargo.toml index 1d7666d..05122b1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,7 @@ publish = false [workspace.dependencies] +async-trait = "0.1.88" clap = "4.5.7" http = "1.2.0" hyper = "1.3.1" @@ -32,6 +33,7 @@ tokio-stream = "0.1.11" tonic = "0.12.3" tonic-build = "0.12.3" tower = "0.5.2" +tracing = "0.1.41" yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client" } yellowstone-grpc-client = "5.0.0" yellowstone-grpc-proto = "5.0.0" diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index 058d34e..0d0dc82 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -17,6 +17,7 @@ include = [ ] [dependencies] +async-trait = { workspace = true } solana-sdk = { workspace = true } http = { workspace = true } hyper = { workspace = true } @@ -25,10 +26,11 @@ serde = { workspace = true, features = ["derive"] } serde_with = { workspace = true } serde_yaml = { workspace = true } thiserror = { workspace = true } -tokio = { workspace = true } +tokio = { workspace = true, features = ["time"] } tokio-stream = { workspace = true } tonic = { workspace = true, features = ["tls", "tls-native-roots"] } tower = { workspace = true } +tracing = { workspace = true } yellowstone-grpc-proto = { workspace = true } [build-dependencies] diff --git a/crates/yellowstone-fumarole-client/build.rs b/crates/yellowstone-fumarole-client/build.rs index 915025a..c9687a8 100644 --- a/crates/yellowstone-fumarole-client/build.rs +++ b/crates/yellowstone-fumarole-client/build.rs @@ -13,7 +13,10 @@ fn main() { tonic_build::configure() .build_server(false) .compile_protos( - &[proto_dir.join("fumarole.proto")], + &[ + proto_dir.join("fumarole.proto"), + proto_dir.join("fumarole_v2.proto"), + ], &[proto_dir, yellowstone_grpc_proto_dir], ) .expect("Failed to compile protos"); diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index 562ac96..97cc055 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -3,13 +3,25 @@ /// pub mod config; +pub(crate) mod runtime; +pub(crate) mod util; + use { config::FumaroleConfig, - solana_sdk::pubkey::Pubkey, - std::collections::HashMap, - tokio::sync::mpsc, - tokio_stream::wrappers::ReceiverStream, + core::num, + proto::{BlockFilters, BlockchainEvent, ControlCommand, PollBlockchainHistory}, + solana_sdk::{clock::Slot, commitment_config::CommitmentLevel, pubkey::Pubkey}, + std::{ + cmp::Reverse, + collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}, + }, + tokio::{ + sync::mpsc, + task::{self, JoinError, JoinSet}, + }, + tokio_stream::{wrappers::ReceiverStream, StreamMap}, tonic::{ + async_trait, metadata::{ errors::{InvalidMetadataKey, InvalidMetadataValue}, Ascii, MetadataKey, MetadataValue, @@ -18,12 +30,13 @@ use { transport::{Channel, ClientTlsConfig}, }, tower::{util::BoxService, ServiceBuilder, ServiceExt}, + util::collections::KeyedVecDeque, yellowstone_grpc_proto::geyser::{ - SubscribeRequestFilterAccounts, SubscribeRequestFilterTransactions, + SubscribeRequest, SubscribeRequestFilterAccounts, SubscribeRequestFilterTransactions, }, }; -pub(crate) mod solana { +mod solana { #[allow(unused_imports)] pub use yellowstone_grpc_proto::solana::{ storage, @@ -31,12 +44,12 @@ pub(crate) mod solana { }; } -pub(crate) mod geyser { +mod geyser { pub use yellowstone_grpc_proto::geyser::*; } pub mod proto { - include!(concat!(env!("OUT_DIR"), "/fumarole.rs")); + include!(concat!(env!("OUT_DIR"), "/fumarole_v2.rs")); } use proto::fumarole_client::FumaroleClient as TonicFumaroleClient; @@ -105,32 +118,26 @@ pub struct FumaroleClient { inner: BoxedTonicFumaroleClient, } +#[async_trait::async_trait] +pub trait FumaroleSender { + // async fn send_request( + // &mut self, + // request: proto::SubscribeRequest, + // ) -> Result>, tonic::Status>; +} + impl FumaroleClient { - pub async fn subscribe_with_request( - &mut self, - request: proto::SubscribeRequest, - ) -> Result>, tonic::Status> + /// + /// Subscribe to a stream of updates from the Fumarole service + /// + pub async fn dragonsmouth_subscribe( + consumer_group_name: S, + request: geyser::SubscribeRequest, + ) -> mpsc::Receiver + where + S: AsRef, { - let (tx, rx) = mpsc::channel(100); - let rx = ReceiverStream::new(rx); - - // NOTE: Make sure send request before giving the stream to the service - // Otherwise, the service will not be able to send the response - // This is due to how fumarole works in the background for auto-commit offset management. - tx.send(request) - .await - .expect("Failed to send request to Fumarole service"); - self.inner.subscribe(rx).await - } - - pub async fn list_available_commitment_levels( - &mut self, - request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner.list_available_commitment_levels(request).await + todo!() } pub async fn list_consumer_groups( @@ -158,10 +165,10 @@ impl FumaroleClient { pub async fn create_consumer_group( &mut self, - request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { - self.inner.create_static_consumer_group(request).await + self.inner.create_consumer_group(request).await } } @@ -279,197 +286,3 @@ impl FumaroleClientBuilder { }) } } - -/// -/// A builder for creating a SubscribeRequest. -/// -/// Example: -/// -/// ```rust -/// use yellowstone_fumarole_client::SubscribeRequestBuilder; -/// use solana_sdk::pubkey::Pubkey; -/// -/// let accounts = vec![Pubkey::new_keypair()]; -/// let owners = vec![Pubkey::new_keypair()]; -/// let tx_accounts = vec![Pubkey::new_keypair()]; -/// -/// let request = SubscribeRequestBuilder::default() -/// .with_accounts(Some(accounts)) -/// .with_owners(Some(owners)) -/// .with_tx_accounts(Some(tx_accounts)) -/// .build("my_consumer".to_string()); -/// ``` -#[derive(Clone)] -pub struct SubscribeRequestBuilder { - accounts: Option>, - owners: Option>, - tx_includes: Option>, - tx_excludes: Option>, - tx_requires: Option>, - tx_fail: Option, - tx_vote: Option, -} - -impl Default for SubscribeRequestBuilder { - fn default() -> Self { - Self::new() - } -} - -impl SubscribeRequestBuilder { - pub const fn new() -> Self { - Self { - accounts: None, - owners: None, - tx_includes: None, - tx_excludes: None, - tx_requires: None, - tx_fail: None, - tx_vote: None, - } - } - - /// - /// Sets the accounts to subscribe to. - /// - pub fn with_accounts(mut self, accounts: Option>) -> Self { - self.accounts = accounts; - self - } - - /// - /// Sets the owners of the accounts to subscribe to. - /// - pub fn with_owners(mut self, owners: Option>) -> Self { - self.owners = owners; - self - } - - /// - /// A transaction is included if it has at least one of the provided accounts in its list of instructions. - /// - pub fn with_tx_includes(mut self, tx_accounts: Option>) -> Self { - self.tx_includes = tx_accounts; - self - } - - /// - /// A transaction is excluded if it has at least one of the provided accounts in its list of instructions. - /// - pub fn with_tx_excludes(mut self, tx_excludes: Option>) -> Self { - self.tx_includes = tx_excludes; - self - } - - /// - /// A transaction is included if all of the provided accounts in its list of instructions. - /// - pub fn with_tx_requires(mut self, tx_requires: Option>) -> Self { - self.tx_requires = tx_requires; - self - } - - /// - /// Include failed transactions. - /// - pub const fn include_fail_tx(mut self) -> Self { - self.tx_fail = None; - self - } - - /// - /// Include vote transactions. - /// - pub const fn include_vote_tx(mut self) -> Self { - self.tx_vote = None; - self - } - - /// - /// Exclude failed transactions. - /// - pub const fn no_vote_tx(mut self) -> Self { - self.tx_vote = Some(false); - self - } - - /// - /// Exclude vote transactions. - /// - pub const fn no_fail_tx(mut self) -> Self { - self.tx_fail = Some(false); - self - } - - /// - /// Builds a SubscribeRequest. - /// - /// If the consumer index is not provided, it defaults to 0. - /// - pub fn build(self, consumer_group: String) -> proto::SubscribeRequest { - self.build_with_consumer_idx(consumer_group, 0) - } - - /// - /// Builds a vector of SubscribeRequests where each request has a different consumer index. - /// - pub fn build_vec(self, consumer_group: String, counts: u32) -> Vec { - (0..counts) - .map(|i| { - self.clone() - .build_with_consumer_idx(consumer_group.clone(), i) - }) - .collect() - } - - /// - /// Builds a SubscribeRequest with a consumer index. - /// - pub fn build_with_consumer_idx( - self, - consumer_group: String, - consumer_idx: u32, - ) -> proto::SubscribeRequest { - let account = self - .accounts - .map(|vec| vec.into_iter().map(|pubkey| pubkey.to_string()).collect()); - - let owner = self - .owners - .map(|vec| vec.into_iter().map(|pubkey| pubkey.to_string()).collect()); - - let tx_includes = self - .tx_includes - .map(|vec| vec.iter().map(|pubkey| pubkey.to_string()).collect()); - - let tx_excludes = self - .tx_excludes - .map(|vec| vec.iter().map(|pubkey| pubkey.to_string()).collect()); - - let tx_requires = self - .tx_requires - .map(|vec| vec.iter().map(|pubkey| pubkey.to_string()).collect()); - - let tx_filter = SubscribeRequestFilterTransactions { - vote: self.tx_vote, - failed: self.tx_fail, - account_exclude: tx_excludes.unwrap_or_default(), - account_include: tx_includes.unwrap_or_default(), - account_required: tx_requires.unwrap_or_default(), - signature: None, - }; - - let account_filter = SubscribeRequestFilterAccounts { - account: account.unwrap_or_default(), - owner: owner.unwrap_or_default(), - ..Default::default() - }; - - proto::SubscribeRequest { - consumer_group_label: consumer_group, - consumer_id: Some(consumer_idx), - accounts: HashMap::from([("default".to_string(), account_filter)]), - transactions: HashMap::from([("default".to_string(), tx_filter)]), - } - } -} diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs new file mode 100644 index 0000000..9b9d181 --- /dev/null +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -0,0 +1,329 @@ +pub(crate) mod tokio; + +use { + crate::{ + proto::{self, BlockchainEvent}, + util::collections::KeyedVecDeque, + }, + solana_sdk::clock::Slot, + std::{ + cmp::Reverse, + collections::{BTreeMap, BinaryHeap, HashMap, HashSet, VecDeque}, + convert::identity, + }, + yellowstone_grpc_proto::geyser, +}; + +type FumeBlockchainId = [u8; 16]; + +type FumeBlockUID = [u8; 16]; + +type FumeNumShards = u32; + +type FumeShardIdx = u32; + +type FumeBlockShard = u32; + +type FumeDataBusId = u8; + +type FumeOffset = u64; + +#[derive(Debug, Clone)] +struct FumeDownloadRequest { + slot: Slot, + blockchain_id: FumeBlockchainId, + block_uid: FumeBlockUID, + num_shards: FumeNumShards, // First version of fumarole, it should always be 1 +} + +#[derive(Clone, Debug)] +struct FumeSlotStatus { + parent_offset: FumeOffset, + offset: FumeOffset, + slot: Slot, + parent_slot: Option, + block_uid: FumeBlockUID, + blockchain_id: FumeBlockchainId, + num_shards: FumeNumShards, + commitment_level: geyser::CommitmentLevel, +} + +#[derive(Debug, Default)] +struct SlotInfoProcessed { + processed_commitment_levels: HashSet, +} + +struct SlotDownloadProgress { + num_shards: FumeNumShards, + shard_remaining: Vec, +} + +enum SlotDownloadState { + Downloading, + Done, +} + +impl SlotDownloadProgress { + pub fn do_progress(&mut self, shard_idx: FumeShardIdx) -> SlotDownloadState { + self.shard_remaining[shard_idx as usize % self.num_shards as usize] = true; + + if self.shard_remaining.iter().all(|b| *b) { + SlotDownloadState::Done + } else { + SlotDownloadState::Downloading + } + } +} + +/// +/// Sans-IO Fumarole State Machine +/// +/// This state machine manages in-flight slot downloads and ensures correct ordering of slot statuses, +/// without performing any actual I/O operations itself. +/// +/// # Overview +/// +/// The state machine starts empty. To drive progress, feed it blockchain events using +/// [`FumaroleSM::queue_blockchain_event`]. This allows the state machine to advance and reveal "work" that +/// needs to be done. +/// +/// ## Type of Work: Slot Downloads +/// +/// To determine which slot should be downloaded, call [`FumaroleSM::pop_slot_to_download`]. +/// If it returns a [`FumeDownloadRequest`], it’s up to the runtime to interpret the request and handle the +/// actual I/O using the framework of your choice. +/// +/// **Note:** +/// Once [`pop_slot_to_download`] returns a [`FumeDownloadRequest`], the state machine considers the download +/// in progress. The runtime must report progress using [`FumaroleSM::make_slot_download_progress`] by +/// specifying the slot number and shard number that has been downloaded. +/// +/// As of now, the Fumarole backend does **not** support block-sharding. +/// Therefore, you can assume [`FumeDownloadRequest::num_shards`] will always be `1`. +/// However, the API is already shard-aware, allowing runtimes to opt into sharding support in the future. +/// +/// ## Type of Work: Slot Statuses +/// +/// Once a slot download is complete (via [`make_slot_download_progress`]), the state machine may release +/// corresponding slot statuses that were waiting on that download. These can be retrieved using +/// [`FumaroleSM::pop_next_slot_status`]. +/// +/// Each [`FumeSlotStatus`] has an offset. Once your runtime processes it, acknowledge it by calling +/// [`FumaroleSM::mark_offset_as_processed`]. This ensures that the [`FumaroleSM::committable_offset`] only +/// advances when there are no gaps in the slot status timeline. +/// +/// # Concurrency and Progress +/// +/// There is no strict call order for the `FumaroleSM` API. The state machine tracks all progress concurrently, +/// ensuring coherence. It automatically blocks operations that depend on unfinished work. +/// +/// # Suggested Runtime Loop +/// +/// A typical runtime loop using the state machine might look like: +/// +/// 1. Check if new blockchain events are needed with [`FumaroleSM::need_new_blockchain_events`]. +/// - If so, fetch some and call [`FumaroleSM::queue_blockchain_event`]. +/// 2. Check for any slots to download. +/// - If so, call [`FumaroleSM::pop_slot_to_download`] and handle the download. +/// 3. Check for completed downloads from the previous iteration. +/// - If any, report progress with [`FumaroleSM::make_slot_download_progress`]. +/// 4. Check for any available slot statuses to consume. +/// - Use [`FumaroleSM::pop_next_slot_status`] to retrieve them. +/// +/// [Safety] +/// +/// The state-machine manage deduping of slot-status, so is slot-download request. +/// You will never get [`FumeDownloadRequest`] twice for the same slot, even if multiple slot status happens for that given slot. +/// +pub(crate) struct FumaroleSM { + /// The last committed offset + pub last_committed_offset: FumeOffset, + /// Slot that have been downloaded in the current session along side slot status update + slot_downloaded: BTreeMap, + /// Inlfight slot download + inflight_slot_shard_download: HashMap, + /// Slot download queue + slot_download_queue: KeyedVecDeque, + /// Slot blocked by a slot download (inflight or in queue) + blocked_slot_status_update: HashMap>, + /// Slot status queue whose slot have been completely downloaded in the current session. + slot_status_update_queue: VecDeque, + /// Keeps track of each offset have been processed by the underlying runtime. + /// Fumarole State Machine emits slot status in disorder, but still requires ordering + /// when computing the `committable_offset` + processed_offset: BinaryHeap>, + + /// Represents the high-water mark fume offset that can be committed to the remote fumarole service. + /// It means the runtime processed everything <= committable offset. + pub committable_offset: FumeOffset, +} + +impl FumaroleSM { + /// + /// Updates the committed offset + /// + pub(crate) fn update_committed_offset(&mut self, offset: FumeOffset) { + assert!( + offset > self.last_committed_offset, + "offset must be greater than last committed offset" + ); + self.last_committed_offset = offset; + } + + /// + /// Queues incoming **ordered** blockchain events + pub(crate) fn queue_blockchain_event(&mut self, events: IT) + where + IT: IntoIterator, + { + let mut last_offset = self.last_committed_offset; + for events in events { + let BlockchainEvent { + offset, + blockchain_id, + block_uid, + num_shards, + slot, + parent_slot, + commitment_level, + } = events; + + assert!( + offset > last_offset, + "offset must be greater than last offset" + ); + let blockchain_id: [u8; 16] = blockchain_id + .try_into() + .expect("blockchain_id must be 16 bytes"); + let block_uid: [u8; 16] = block_uid.try_into().expect("block_uid must be 16 bytes"); + + let cl = geyser::CommitmentLevel::try_from(commitment_level) + .expect("invalid commitment level"); + let fume_slot_status = FumeSlotStatus { + parent_offset: last_offset, + offset, + slot, + block_uid, + parent_slot, + blockchain_id, + num_shards, + commitment_level: cl, + }; + last_offset = offset; + // We don't download the same slot twice in the same session. + if !self.slot_downloaded.contains_key(&slot) { + // if the slot is already in-download, we don't need to schedule it for download again + if !self.inflight_slot_shard_download.contains_key(&slot) { + let download_request = FumeDownloadRequest { + slot, + blockchain_id, + block_uid, + num_shards, + }; + self.slot_download_queue.push_back(slot, download_request); + } + self.blocked_slot_status_update + .entry(slot) + .or_default() + .push_back(fume_slot_status); + } else { + self.slot_status_update_queue.push_back(fume_slot_status); + } + } + } + + /// + /// Returns true if there are slot to download, otherwise false. + /// + pub(crate) fn has_any_slot_to_download(&self) -> bool { + !self.slot_download_queue.is_empty() + } + + /// + /// Returns the [`Some(FumeDownloadRequest)`] to download if any, otherwise `None`. + /// + pub(crate) fn pop_slot_to_download(&mut self) -> Option { + let download_req = self.slot_download_queue.pop_front()?; + let download_progress = SlotDownloadProgress { + num_shards: download_req.num_shards, + shard_remaining: vec![false; download_req.num_shards as usize], + }; + let old = self + .inflight_slot_shard_download + .insert(download_req.slot, download_progress); + assert!(old.is_none(), "slot already in download"); + Some(download_req) + } + + /// + /// Update download progression for a given `Slot` download + /// + pub(crate) fn make_slot_download_progress(&mut self, slot: Slot, shard_idx: FumeShardIdx) { + let download_progress = self + .inflight_slot_shard_download + .get_mut(&slot) + .expect("slot not in download"); + + let download_state = download_progress.do_progress(shard_idx); + + if matches!(download_state, SlotDownloadState::Done) { + // all shards downloaded + self.inflight_slot_shard_download.remove(&slot); + self.slot_downloaded.insert(slot, Default::default()); + + let blocked_slot_status = self + .blocked_slot_status_update + .remove(&slot) + .unwrap_or_default(); + self.slot_status_update_queue.extend(blocked_slot_status); + } + } + + /// + /// Pop next slot status to process + /// + pub(crate) fn pop_next_slot_status(&mut self) -> Option { + let slot_status = self.slot_status_update_queue.pop_front()?; + let info = self.slot_downloaded.get_mut(&slot_status.slot)?; + if info + .processed_commitment_levels + .insert(slot_status.commitment_level) + { + // We handle duplicate slot status event here. + Some(slot_status) + } else { + None + } + } + + #[inline] + fn missing_process_offset(&self) -> FumeOffset { + self.committable_offset + 1 + } + + /// + /// Marks this [`FumeOffset`] has processed by the runtime. + /// + pub(crate) fn mark_offset_as_processed(&mut self, offset: FumeOffset) { + if offset == self.missing_process_offset() { + self.committable_offset = offset; + + while let Some(offset2) = self.processed_offset.peek().copied() { + let offset2 = offset2.0; + if offset2 == self.missing_process_offset() { + assert!(self.processed_offset.pop().is_some()); + self.committable_offset = offset2 + } + } + } else { + self.processed_offset.push(Reverse(offset)); + } + } + + /// + /// Returns true if there is no blockchain event history to track or progress on. + /// + pub(crate) fn need_new_blockchain_events(&self) -> bool { + self.slot_status_update_queue.is_empty() && self.blocked_slot_status_update.is_empty() + } +} diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs new file mode 100644 index 0000000..441ec66 --- /dev/null +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -0,0 +1,405 @@ +use { + super::{FumaroleSM, FumeDownloadRequest, FumeOffset, FumeSlotStatus}, + crate::{ + proto::{ + self, data_command, BlockFilters, CommitOffset, ControlCommand, DataCommand, + DownloadBlockShard, PollBlockchainHistory, + }, + util::collections::KeyedVecDeque, + }, + solana_sdk::clock::Slot, + std::{ + cmp::Reverse, + collections::{BTreeMap, BinaryHeap, HashMap, HashSet, VecDeque}, + f32::consts::E, + sync::Arc, + time::{Duration, Instant}, + }, + tokio::{ + sync::mpsc, + task::{self, JoinError, JoinSet}, + }, + yellowstone_grpc_proto::geyser::{ + self, SubscribeRequest, SubscribeUpdate, SubscribeUpdateSlot, + }, +}; + +/// +/// Data-Plane bidirectional stream +struct DataPlaneBidi { + tx: mpsc::Sender, + rx: mpsc::Receiver, +} + +/// +/// Holds information about on-going data plane task. +/// +#[derive(Clone, Debug)] +struct DataPlaneTaskMeta { + download_request: FumeDownloadRequest, + scheduled_at: Instant, + download_attempt: u8, +} + +/// +/// Base trait for Data-plane bidirectional stream factories. +/// +#[async_trait::async_trait] +pub trait DataPlaneBidiFactory { + /// + /// Builds a [`DataPlaneBidi`] + /// + async fn build(&self) -> DataPlaneBidi; +} + +/// +/// Fumarole runtime based on Tokio outputting Dragonsmouth only events. +/// +pub(crate) struct TokioFumeDragonsmouthRuntime { + rt: tokio::runtime::Handle, + sm: FumaroleSM, + block_filters: BlockFilters, + data_plane_bidi_factory: Arc, + subscribe_request: SubscribeRequest, + consumer_group_name: String, + control_plane_tx: mpsc::Sender, + control_plane_rx: mpsc::Receiver, + data_plane_bidi_vec: VecDeque, + data_plane_tasks: JoinSet>, + data_plane_task_meta: HashMap, + dragonsmouth_outlet: mpsc::Sender, + download_to_retry: VecDeque, + download_attempts: HashMap, + max_slot_download_attempt: u8, + commit_interval: Duration, + last_commit: Instant, +} + +fn build_poll_history_cmd(from: Option) -> ControlCommand { + ControlCommand { + command: Some(proto::control_command::Command::PollHist( + // from None means poll the entire history from wherever we left off since last commit. + PollBlockchainHistory { from }, + )), + } +} + +fn build_commit_offset_cmd(offset: FumeOffset) -> ControlCommand { + ControlCommand { + command: Some(proto::control_command::Command::CommitOffset( + CommitOffset { offset }, + )), + } +} + +struct DownloadBlockTask { + download_request: FumeDownloadRequest, + block_filter: BlockFilters, + bidi: DataPlaneBidi, + dragonsmouth_oulet: mpsc::Sender, +} + +struct DownloadBlockCompleted { + bidi: DataPlaneBidi, +} + +enum DownloadBlockError { + Disconnected, + OutletDisconnected, + BlockShardNotFound, +} + +impl DownloadBlockTask { + async fn run(mut self) -> Result { + let DataPlaneBidi { tx, mut rx } = self.bidi; + + // Make sure the stream is empty + loop { + match rx.try_recv() { + Err(mpsc::error::TryRecvError::Disconnected) => { + return Err(DownloadBlockError::Disconnected) + } + Err(mpsc::error::TryRecvError::Empty) => break, + Ok(_) => {} + } + } + let data_cmd = data_command::Command::DownloadBlockShard(DownloadBlockShard { + blockchain_id: self.download_request.blockchain_id.to_vec(), + block_uid: self.download_request.block_uid.to_vec(), + shard_idx: 0, // ONLY SUPPORTS 1 shard in V1. + }); + let data_cmd = DataCommand { + command: Some(data_cmd), + }; + tx.send(data_cmd) + .await + .map_err(|_| DownloadBlockError::Disconnected)?; + + loop { + let Some(data) = rx.recv().await else { + return Err(DownloadBlockError::Disconnected); + }; + + let Some(resp) = data.response else { continue }; + + match resp { + proto::data_response::Response::Update(subscribe_update) => { + if self + .dragonsmouth_oulet + .send(subscribe_update) + .await + .is_err() + { + return Err(DownloadBlockError::OutletDisconnected); + } + } + proto::data_response::Response::BlockShardDownloadFinish( + _block_shard_download_finish, + ) => { + break; + } + proto::data_response::Response::Error(data_error) => { + let Some(e) = data_error.error else { continue }; + match e { + proto::data_error::Error::NotFound(block_not_found) => { + if block_not_found.block_uid.as_slice() + == self.download_request.block_uid.as_slice() + { + return Err(DownloadBlockError::BlockShardNotFound); + } else { + panic!("unexpected block uid") + } + } + } + } + } + } + + let bidi = DataPlaneBidi { tx, rx }; + Ok(DownloadBlockCompleted { bidi }) + } +} + +impl TokioFumeDragonsmouthRuntime { + fn handle_control_response(&mut self, control_response: proto::ControlResponse) { + let Some(response) = control_response.response else { + return; + }; + match response { + proto::control_response::Response::CommitOffset(commit_offset_result) => { + tracing::trace!("received commit offset : {commit_offset_result:?}"); + self.sm.update_committed_offset(commit_offset_result.offset); + } + proto::control_response::Response::PollNext(blockchain_history) => { + tracing::trace!( + "polled blockchain history : {} events", + blockchain_history.events.len() + ); + self.sm.queue_blockchain_event(blockchain_history.events); + } + proto::control_response::Response::Pong(_pong) => { + tracing::trace!("pong"); + } + } + } + + async fn poll_history_if_needed(&mut self) { + let cmd = build_poll_history_cmd(Some(self.sm.committable_offset)); + if self.sm.need_new_blockchain_events() { + self.control_plane_tx.send(cmd).await.expect("disconnected"); + } + } + + fn schedule_download_task_if_any(&mut self) { + // This loop drains as many download slot request as possible, + // limited to available [`DataPlaneBidi`]. + loop { + if self.data_plane_bidi_vec.is_empty() { + break; + } + + let maybe_download_request = self + .download_to_retry + .pop_front() + .or_else(|| self.sm.pop_slot_to_download()); + + let Some(download_request) = maybe_download_request else { + break; + }; + + assert!(download_request.num_shards == 1, "this client is incompatible with remote server since it does not support sharded block download"); + + let data_plane_bidi = self + .data_plane_bidi_vec + .pop_back() + .expect("should not be none"); + + let download_task = DownloadBlockTask { + download_request: download_request.clone(), + block_filter: self.block_filters.clone(), + bidi: data_plane_bidi, + dragonsmouth_oulet: self.dragonsmouth_outlet.clone(), + }; + + let download_attempts = self + .download_attempts + .entry(download_request.slot) + .or_default(); + + *download_attempts += 1; + + let ah = self + .data_plane_tasks + .spawn_on(download_task.run(), &self.rt); + self.data_plane_task_meta.insert( + ah.id(), + DataPlaneTaskMeta { + download_request, + scheduled_at: Instant::now(), + download_attempt: *download_attempts, + }, + ); + } + } + + async fn handle_data_plane_task_result( + &mut self, + task_id: task::Id, + result: Result, + ) { + let Some(task_meta) = self.data_plane_task_meta.remove(&task_id) else { + panic!("missing task meta") + }; + let slot = task_meta.download_request.slot; + tracing::trace!("download task result received for slot {}", slot); + match result { + Ok(completed) => { + let elapsed = task_meta.scheduled_at.elapsed(); + tracing::debug!("downloaded slot {slot} in {elapsed:?}"); + let _ = self.download_attempts.remove(&slot); + self.data_plane_bidi_vec.push_back(completed.bidi); + // TODO: Add support for sharded progress + self.sm.make_slot_download_progress(slot, 0); + } + Err(e) => { + match e { + DownloadBlockError::Disconnected => { + // We need to retry it + if task_meta.download_attempt >= self.max_slot_download_attempt { + panic!("Failed to download slot {slot}") + } + + let data_plane_bidi = self.data_plane_bidi_factory.build().await; + self.data_plane_bidi_vec.push_back(data_plane_bidi); + + tracing::debug!("Download slot {slot} failed, rescheduling for retry..."); + self.download_to_retry.push_back(task_meta.download_request); + } + DownloadBlockError::OutletDisconnected => { + // Will automatically be handled in the `run` main loop. + // so nothing to do. + } + DownloadBlockError::BlockShardNotFound => { + // TODO: I don't think it should ever happen, but lets panic first so we get notified by client if it ever happens. + panic!("Slot {slot} not found"); + } + } + } + } + } + + async fn commit_offset(&mut self) { + if self.sm.last_committed_offset < self.sm.committable_offset { + self.control_plane_tx + .send(build_commit_offset_cmd(self.sm.committable_offset)) + .await + .expect("failed to commit offset"); + } + + self.last_commit = Instant::now(); + } + + async fn drain_slot_status(&mut self) { + let commitment = self.subscribe_request.commitment(); + let mut slot_status_vec = VecDeque::with_capacity(10); + + while let Some(slot_status) = self.sm.pop_next_slot_status() { + slot_status_vec.push_back(slot_status); + } + + for slot_status in slot_status_vec { + let mut matched_filters = vec![]; + for (filter_name, filter) in &self.subscribe_request.slots { + if let Some(true) = filter.filter_by_commitment { + if slot_status.commitment_level == commitment { + matched_filters.push(filter_name.clone()); + } + } else { + matched_filters.push(filter_name.clone()); + } + } + + if !matched_filters.is_empty() { + let update = SubscribeUpdate { + filters: matched_filters, + created_at: None, + update_oneof: Some(geyser::subscribe_update::UpdateOneof::Slot( + SubscribeUpdateSlot { + slot: slot_status.slot, + parent: slot_status.parent_slot, + status: slot_status.commitment_level.into(), + // TODO: support dead slot + dead_error: None, + }, + )), + }; + + if self.dragonsmouth_outlet.send(update).await.is_err() { + return; + } + self.sm.mark_offset_as_processed(slot_status.offset); + } + } + } + + async fn run(mut self) { + let inital_load_history_cmd = build_poll_history_cmd(None); + self.control_plane_tx + .send(inital_load_history_cmd) + .await + .expect("disconnected"); + + loop { + if self.dragonsmouth_outlet.is_closed() { + tracing::trace!("Detected dragonsmouth outlet closed"); + break; + } + + let commit_deadline = self.last_commit + self.commit_interval; + + self.poll_history_if_needed().await; + self.schedule_download_task_if_any(); + tokio::select! { + control_response = self.control_plane_rx.recv() => { + if let Some(control_response) = control_response { + self.handle_control_response(control_response); + } else { + break; + } + } + Some(result) = self.data_plane_tasks.join_next_with_id() => { + let (task_id, download_result) = result.expect("data plane task set"); + + self.handle_data_plane_task_result(task_id, download_result); + } + + _ = tokio::time::sleep_until(commit_deadline.into()) => { + self.commit_offset().await; + } + } + self.drain_slot_status().await; + } + } +} + +pub struct TokioFumarolHandle {} diff --git a/crates/yellowstone-fumarole-client/src/util/collections.rs b/crates/yellowstone-fumarole-client/src/util/collections.rs new file mode 100644 index 0000000..948f6d5 --- /dev/null +++ b/crates/yellowstone-fumarole-client/src/util/collections.rs @@ -0,0 +1,53 @@ +use std::{ + collections::{HashSet, VecDeque}, + hash::Hash, +}; + +#[derive(Debug, Default)] +pub struct KeyedVecDeque { + vec: VecDeque<(K, V)>, + index: HashSet, +} + +impl KeyedVecDeque +where + K: Eq + Hash + Clone, +{ + pub fn new() -> Self { + KeyedVecDeque { + vec: VecDeque::new(), + index: HashSet::new(), + } + } + + pub fn push_back(&mut self, key: K, item: V) -> bool { + if self.index.insert(key.clone()) { + self.vec.push_back((key, item)); + true + } else { + false + } + } + + pub fn push_front(&mut self, key: K, item: V) -> bool { + if self.index.insert(key.clone()) { + self.vec.push_front((key, item)); + true + } else { + false + } + } + + pub fn pop_front(&mut self) -> Option { + if let Some((k, v)) = self.vec.pop_front() { + assert!(self.index.remove(&k)); + Some(v) + } else { + None + } + } + + pub fn is_empty(&self) -> bool { + self.vec.is_empty() + } +} diff --git a/crates/yellowstone-fumarole-client/src/util/mod.rs b/crates/yellowstone-fumarole-client/src/util/mod.rs new file mode 100644 index 0000000..2e4fe9b --- /dev/null +++ b/crates/yellowstone-fumarole-client/src/util/mod.rs @@ -0,0 +1 @@ +pub mod collections; diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto new file mode 100644 index 0000000..8ef50c8 --- /dev/null +++ b/proto/fumarole_v2.proto @@ -0,0 +1,163 @@ +syntax = "proto3"; + +import public "geyser.proto"; + +package fumarole_v2; + +service Fumarole { + rpc GetConsumerGroupInfo(GetConsumerGroupInfoRequest) returns (ConsumerGroupInfo) {} + rpc ListConsumerGroups(ListConsumerGroupsRequest) returns (ListConsumerGroupsResponse) {} + rpc DeleteConsumerGroup(DeleteConsumerGroupRequest) returns (DeleteConsumerGroupResponse) {} + rpc CreateConsumerGroup(CreateConsumerGroupRequest) returns (CreateConsumerGroupResponse) {} + + // Represents subscription to the data plane + rpc SubscribeData(stream DataCommand) returns (stream DataResponse) {} + + // Represents subscription to the control plane + rpc Subscribe(stream ControlCommand) returns (stream ControlResponse) {} +} + +message GetConsumerGroupInfoRequest { + string consumer_group_label = 1; +} + +message DeleteConsumerGroupRequest { + string consumer_group_label = 1; +} + +message DeleteConsumerGroupResponse { + bool success = 1; +} + +message ListConsumerGroupsRequest {} + +message ListConsumerGroupsResponse { + repeated ConsumerGroupInfo consumer_groups = 1; +} + + +message ConsumerGroupInfo { + string id = 1; + string consumer_group_label = 2; + bool is_stale = 3; +} + +message GetSlotLagInfoRequest { + string consumer_group_label = 1; +} + +message BlockFilters { + map accounts = 1; + map transactions = 2; + map entries = 3; + map blocks_meta = 4; + geyser.CommitmentLevel commitment_level = 5; +} + +message DownloadBlockShard { + bytes blockchain_id = 1; + bytes block_uid = 2; + uint32 shard_idx = 3; +} + + +message Ping { + uint32 ping_id = 1; +} + +message Pong { + uint32 ping_id = 1; +} + +message DataCommand { + oneof command { + BlockFilters update_filter = 1; + DownloadBlockShard download_block_shard = 2; + Ping ping = 3; + } +} + +message BlockShardDownloadFinish { + bytes blockchain_id = 1; + bytes block_uid = 2; + uint32 shard_idx = 3; +} + +message BlockNotFound { + bytes blockchain_id = 1; + bytes block_uid = 2; + uint32 shard_idx = 3; +} + +message DataError { + oneof error { + BlockNotFound not_found = 1; + } +} + +message DataResponse { + oneof response { + geyser.SubscribeUpdate update = 1; + BlockShardDownloadFinish block_shard_download_finish = 2; + DataError error = 3; + } +} + +message CommitOffset { + uint64 offset = 1; +} + + +message PollBlockchainHistory { + optional uint64 from = 1; +} + +message BlockchainEvent { + uint64 offset = 1; + bytes blockchain_id = 2; + bytes block_uid = 3; + uint32 num_shards = 4; + uint64 slot = 5; + optional uint64 parent_slot = 6; + geyser.CommitmentLevel commitment_level = 7; +} + +message BlockchainHistory { + repeated BlockchainEvent events = 1; +} + +message ControlCommand { + oneof command { + CommitOffset commit_offset = 1; + PollBlockchainHistory poll_hist = 2; + Ping ping = 3; + } +} + +message CommitOffsetResult { + uint64 offset = 1; +} + + +message ControlResponse { + oneof response { + CommitOffsetResult commit_offset = 1; + BlockchainHistory poll_next = 2; + Pong pong = 3; + } +} + +message CreateConsumerGroupResponse { + string group_id = 1; +} + +enum InitialOffsetPolicy { + LATEST = 0; + //FROM_SLOT = 1; +} + +message CreateConsumerGroupRequest { + string consumer_group_label = 1; + InitialOffsetPolicy initial_offset_policy = 2; + //optional uint64 from_slot = 3; +} \ No newline at end of file From 3e12e2b901f088a0a161165ed5f4eae1a609c65c Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 14 Apr 2025 16:49:45 -0400 Subject: [PATCH 02/56] v2: added FumaroleSM test --- Cargo.lock | 10 ++ Cargo.toml | 1 + crates/yellowstone-fumarole-client/Cargo.toml | 1 + crates/yellowstone-fumarole-client/src/lib.rs | 1 + .../src/runtime/mod.rs | 160 ++++++++++++++---- .../src/runtime/tokio.rs | 21 +-- .../src/util/collections.rs | 11 +- 7 files changed, 162 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 854ab4d..ea71d82 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4650,6 +4650,15 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" +dependencies = [ + "getrandom 0.3.1", +] + [[package]] name = "vcpkg" version = "0.2.15" @@ -5024,6 +5033,7 @@ dependencies = [ "tonic-build", "tower 0.5.2", "tracing", + "uuid", "yellowstone-grpc-proto", ] diff --git a/Cargo.toml b/Cargo.toml index 05122b1..50c7dae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,7 @@ tonic = "0.12.3" tonic-build = "0.12.3" tower = "0.5.2" tracing = "0.1.41" +uuid = { version = "1.16.0" } yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client" } yellowstone-grpc-client = "5.0.0" yellowstone-grpc-proto = "5.0.0" diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index 0d0dc82..f328a46 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -31,6 +31,7 @@ tokio-stream = { workspace = true } tonic = { workspace = true, features = ["tls", "tls-native-roots"] } tower = { workspace = true } tracing = { workspace = true } +uuid = { workspace = true, features = ["v4"] } yellowstone-grpc-proto = { workspace = true } [build-dependencies] diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index 97cc055..c7f8c0d 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -48,6 +48,7 @@ mod geyser { pub use yellowstone_grpc_proto::geyser::*; } +#[allow(clippy::all)] pub mod proto { include!(concat!(env!("OUT_DIR"), "/fumarole_v2.rs")); } diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 9b9d181..8d9d323 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -9,43 +9,39 @@ use { std::{ cmp::Reverse, collections::{BTreeMap, BinaryHeap, HashMap, HashSet, VecDeque}, - convert::identity, }, yellowstone_grpc_proto::geyser, }; -type FumeBlockchainId = [u8; 16]; +pub(crate) type FumeBlockchainId = [u8; 16]; -type FumeBlockUID = [u8; 16]; +pub(crate) type FumeBlockUID = [u8; 16]; -type FumeNumShards = u32; +pub(crate) type FumeNumShards = u32; -type FumeShardIdx = u32; +pub(crate) type FumeShardIdx = u32; -type FumeBlockShard = u32; +pub(crate) type FumeBlockShard = u32; -type FumeDataBusId = u8; +pub(crate) type FumeDataBusId = u8; -type FumeOffset = u64; +pub(crate) type FumeOffset = u64; #[derive(Debug, Clone)] -struct FumeDownloadRequest { - slot: Slot, - blockchain_id: FumeBlockchainId, - block_uid: FumeBlockUID, - num_shards: FumeNumShards, // First version of fumarole, it should always be 1 +pub(crate) struct FumeDownloadRequest { + pub(crate) slot: Slot, + pub(crate) blockchain_id: FumeBlockchainId, + pub(crate) block_uid: FumeBlockUID, + pub(crate) num_shards: FumeNumShards, // First version of fumarole, it should always be 1 } #[derive(Clone, Debug)] -struct FumeSlotStatus { - parent_offset: FumeOffset, - offset: FumeOffset, - slot: Slot, - parent_slot: Option, - block_uid: FumeBlockUID, - blockchain_id: FumeBlockchainId, - num_shards: FumeNumShards, - commitment_level: geyser::CommitmentLevel, +pub(crate) struct FumeSlotStatus { + pub(crate) parent_offset: FumeOffset, + pub(crate) offset: FumeOffset, + pub(crate) slot: Slot, + pub(crate) parent_slot: Option, + pub(crate) commitment_level: geyser::CommitmentLevel, } #[derive(Debug, Default)] @@ -159,6 +155,19 @@ pub(crate) struct FumaroleSM { } impl FumaroleSM { + pub fn new(last_committed_offset: FumeOffset) -> Self { + Self { + last_committed_offset, + slot_downloaded: Default::default(), + inflight_slot_shard_download: Default::default(), + slot_download_queue: Default::default(), + blocked_slot_status_update: Default::default(), + slot_status_update_queue: Default::default(), + processed_offset: Default::default(), + committable_offset: last_committed_offset, + } + } + /// /// Updates the committed offset /// @@ -188,10 +197,9 @@ impl FumaroleSM { commitment_level, } = events; - assert!( - offset > last_offset, - "offset must be greater than last offset" - ); + if offset < last_offset { + continue; + } let blockchain_id: [u8; 16] = blockchain_id .try_into() .expect("blockchain_id must be 16 bytes"); @@ -203,10 +211,7 @@ impl FumaroleSM { parent_offset: last_offset, offset, slot, - block_uid, parent_slot, - blockchain_id, - num_shards, commitment_level: cl, }; last_offset = offset; @@ -297,7 +302,7 @@ impl FumaroleSM { } #[inline] - fn missing_process_offset(&self) -> FumeOffset { + const fn missing_process_offset(&self) -> FumeOffset { self.committable_offset + 1 } @@ -327,3 +332,98 @@ impl FumaroleSM { self.slot_status_update_queue.is_empty() && self.blocked_slot_status_update.is_empty() } } + +#[cfg(test)] +mod tests { + + use {super::*, uuid::Uuid, yellowstone_grpc_proto::geyser::CommitmentLevel}; + + fn random_blockchain_event( + offset: FumeOffset, + slot: Slot, + commitment_level: CommitmentLevel, + ) -> BlockchainEvent { + let blockchain_id = Uuid::nil().as_bytes().to_vec(); + let block_uid = Uuid::new_v4().as_bytes().to_vec(); + BlockchainEvent { + offset: 1, + blockchain_id, + block_uid, + num_shards: 1, + slot, + parent_slot: None, + commitment_level: commitment_level.into(), + } + } + + #[test] + fn test_fumarole_sm_happy_path() { + let mut sm = FumaroleSM::new(0); + + let event = random_blockchain_event(1, 1, CommitmentLevel::Processed); + sm.queue_blockchain_event(vec![event.clone()]); + + // Slot status should not be available, since we didn't download it yet. + assert!(sm.pop_next_slot_status().is_none()); + + let download_req = sm.pop_slot_to_download().unwrap(); + + assert_eq!(download_req.slot, 1); + + assert!(sm.pop_slot_to_download().is_none()); + + sm.make_slot_download_progress(1, 0); + + let status = sm.pop_next_slot_status().unwrap(); + + assert_eq!(status.slot, 1); + assert_eq!(status.commitment_level, CommitmentLevel::Processed); + sm.mark_offset_as_processed(status.offset); + + // All subsequent commitment level should be available right away + let mut event2 = event.clone(); + event2.offset += 1; + event2.commitment_level = CommitmentLevel::Confirmed.into(); + sm.queue_blockchain_event(vec![event2.clone()]); + + // It should not cause new slot download request + assert!(sm.pop_slot_to_download().is_none()); + + let status = sm.pop_next_slot_status().unwrap(); + assert_eq!(status.slot, 1); + assert_eq!(status.commitment_level, CommitmentLevel::Confirmed); + sm.mark_offset_as_processed(status.offset); + + assert_eq!(sm.committable_offset, event2.offset); + } + + #[test] + fn it_should_dedup_slot_status() { + let mut sm = FumaroleSM::new(0); + + let event = random_blockchain_event(1, 1, CommitmentLevel::Processed); + sm.queue_blockchain_event(vec![event.clone()]); + + // Slot status should not be available, since we didn't download it yet. + assert!(sm.pop_next_slot_status().is_none()); + + let download_req = sm.pop_slot_to_download().unwrap(); + + assert_eq!(download_req.slot, 1); + + assert!(sm.pop_slot_to_download().is_none()); + + sm.make_slot_download_progress(1, 0); + + let status = sm.pop_next_slot_status().unwrap(); + + assert_eq!(status.slot, 1); + assert_eq!(status.commitment_level, CommitmentLevel::Processed); + + // Putting the same event back should be ignored + sm.queue_blockchain_event(vec![event]); + + assert!(sm.pop_next_slot_status().is_none()); + assert!(sm.pop_slot_to_download().is_none()); + } +} diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 441ec66..1ecd163 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -1,23 +1,20 @@ use { - super::{FumaroleSM, FumeDownloadRequest, FumeOffset, FumeSlotStatus}, - crate::{ + super::{FumaroleSM, FumeDownloadRequest, FumeOffset}, + crate:: proto::{ self, data_command, BlockFilters, CommitOffset, ControlCommand, DataCommand, DownloadBlockShard, PollBlockchainHistory, - }, - util::collections::KeyedVecDeque, - }, + } + , solana_sdk::clock::Slot, std::{ - cmp::Reverse, - collections::{BTreeMap, BinaryHeap, HashMap, HashSet, VecDeque}, - f32::consts::E, + collections::{HashMap, VecDeque}, sync::Arc, time::{Duration, Instant}, }, tokio::{ sync::mpsc, - task::{self, JoinError, JoinSet}, + task::{self, JoinSet}, }, yellowstone_grpc_proto::geyser::{ self, SubscribeRequest, SubscribeUpdate, SubscribeUpdateSlot, @@ -75,7 +72,7 @@ pub(crate) struct TokioFumeDragonsmouthRuntime { last_commit: Instant, } -fn build_poll_history_cmd(from: Option) -> ControlCommand { +const fn build_poll_history_cmd(from: Option) -> ControlCommand { ControlCommand { command: Some(proto::control_command::Command::PollHist( // from None means poll the entire history from wherever we left off since last commit. @@ -84,7 +81,7 @@ fn build_poll_history_cmd(from: Option) -> ControlCommand { } } -fn build_commit_offset_cmd(offset: FumeOffset) -> ControlCommand { +const fn build_commit_offset_cmd(offset: FumeOffset) -> ControlCommand { ControlCommand { command: Some(proto::control_command::Command::CommitOffset( CommitOffset { offset }, @@ -390,7 +387,7 @@ impl TokioFumeDragonsmouthRuntime { Some(result) = self.data_plane_tasks.join_next_with_id() => { let (task_id, download_result) = result.expect("data plane task set"); - self.handle_data_plane_task_result(task_id, download_result); + self.handle_data_plane_task_result(task_id, download_result).await; } _ = tokio::time::sleep_until(commit_deadline.into()) => { diff --git a/crates/yellowstone-fumarole-client/src/util/collections.rs b/crates/yellowstone-fumarole-client/src/util/collections.rs index 948f6d5..c366228 100644 --- a/crates/yellowstone-fumarole-client/src/util/collections.rs +++ b/crates/yellowstone-fumarole-client/src/util/collections.rs @@ -3,12 +3,21 @@ use std::{ hash::Hash, }; -#[derive(Debug, Default)] +#[derive(Debug)] pub struct KeyedVecDeque { vec: VecDeque<(K, V)>, index: HashSet, } +impl Default for KeyedVecDeque { + fn default() -> Self { + Self { + vec: Default::default(), + index: Default::default(), + } + } +} + impl KeyedVecDeque where K: Eq + Hash + Clone, From 85b623a65a64c50233fefd6fb62a762149800d64 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 15 Apr 2025 16:59:49 -0400 Subject: [PATCH 03/56] v2: Fumarole SDK --- Cargo.lock | 1 + Cargo.toml | 1 + crates/yellowstone-fumarole-client/Cargo.toml | 7 +- crates/yellowstone-fumarole-client/src/lib.rs | 451 ++++++++++++------ .../src/runtime/mod.rs | 16 +- .../src/runtime/tokio.rs | 168 +++++-- .../src/util/collections.rs | 3 + .../src/util/grpc.rs | 19 + .../src/util/mod.rs | 1 + examples/rust/src/bin/client.rs | 169 ++----- proto/fumarole_v2.proto | 19 +- 11 files changed, 502 insertions(+), 353 deletions(-) create mode 100644 crates/yellowstone-fumarole-client/src/util/grpc.rs diff --git a/Cargo.lock b/Cargo.lock index ea71d82..f956edb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5018,6 +5018,7 @@ name = "yellowstone-fumarole-client" version = "0.1.1-pre.2+solana.2.1.11" dependencies = [ "async-trait", + "futures", "http 1.2.0", "hyper 1.6.0", "prost", diff --git a/Cargo.toml b/Cargo.toml index 50c7dae..6aa540a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,7 @@ publish = false [workspace.dependencies] async-trait = "0.1.88" clap = "4.5.7" +futures = "0.3.31" http = "1.2.0" hyper = "1.3.1" prost = "0.13.0" diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index f328a46..0edb103 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -10,6 +10,10 @@ license = { workspace = true } keywords = { workspace = true } publish = true +[features] +default = ["tokio"] +tokio = [] + [package.metadata] include = [ "proto/**", @@ -18,13 +22,14 @@ include = [ [dependencies] async-trait = { workspace = true } -solana-sdk = { workspace = true } +futures = { workspace = true } http = { workspace = true } hyper = { workspace = true } prost = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_with = { workspace = true } serde_yaml = { workspace = true } +solana-sdk = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["time"] } tokio-stream = { workspace = true } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index c7f8c0d..d9af359 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -8,32 +8,28 @@ pub(crate) mod util; use { config::FumaroleConfig, - core::num, - proto::{BlockFilters, BlockchainEvent, ControlCommand, PollBlockchainHistory}, - solana_sdk::{clock::Slot, commitment_config::CommitmentLevel, pubkey::Pubkey}, - std::{ - cmp::Reverse, - collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}, + proto::control_response::Response, + runtime::{ + tokio::{DataPlaneBidi, DataPlaneBidiFactory, TokioFumeDragonsmouthRuntime}, + FumaroleSM, }, - tokio::{ - sync::mpsc, - task::{self, JoinError, JoinSet}, + std::{ + collections::{HashMap, VecDeque}, + num::NonZeroU8, + sync::Arc, + time::{Duration, Instant}, }, - tokio_stream::{wrappers::ReceiverStream, StreamMap}, + tokio::sync::mpsc, + tokio_stream::wrappers::ReceiverStream, tonic::{ - async_trait, metadata::{ errors::{InvalidMetadataKey, InvalidMetadataValue}, Ascii, MetadataKey, MetadataValue, }, - service::Interceptor, + service::{interceptor::InterceptedService, Interceptor}, transport::{Channel, ClientTlsConfig}, }, - tower::{util::BoxService, ServiceBuilder, ServiceExt}, - util::collections::KeyedVecDeque, - yellowstone_grpc_proto::geyser::{ - SubscribeRequest, SubscribeRequestFilterAccounts, SubscribeRequestFilterTransactions, - }, + util::grpc::into_bounded_mpsc_rx, }; mod solana { @@ -48,6 +44,7 @@ mod geyser { pub use yellowstone_grpc_proto::geyser::*; } +#[allow(clippy::missing_const_for_fn)] #[allow(clippy::all)] pub mod proto { include!(concat!(env!("OUT_DIR"), "/fumarole_v2.rs")); @@ -56,28 +53,18 @@ pub mod proto { use proto::fumarole_client::FumaroleClient as TonicFumaroleClient; #[derive(Clone)] -struct TritonAuthInterceptor { - x_token: MetadataValue, -} - -#[derive(Clone)] -struct AsciiMetadataInterceptor { +struct FumeInterceptor { + x_token: Option>, metadata: HashMap, MetadataValue>, } -impl Interceptor for TritonAuthInterceptor { - fn call(&mut self, request: tonic::Request<()>) -> Result, tonic::Status> { - let mut request = request; - let metadata = request.metadata_mut(); - metadata.insert("x-token", self.x_token.clone()); - Ok(request) - } -} - -impl Interceptor for AsciiMetadataInterceptor { +impl Interceptor for FumeInterceptor { fn call(&mut self, request: tonic::Request<()>) -> Result, tonic::Status> { let mut request = request; let metadata = request.metadata_mut(); + if let Some(x_token) = &self.x_token { + metadata.insert("x-token", x_token.clone()); + } for (key, value) in &self.metadata { metadata.insert(key.clone(), value.clone()); } @@ -94,6 +81,14 @@ pub struct FumaroleClientBuilder { pub with_compression: bool, } +#[derive(Debug, thiserror::Error)] +pub enum InvalidMetadataHeader { + #[error(transparent)] + InvalidMetadataKey(#[from] InvalidMetadataKey), + #[error(transparent)] + InvalidMetadataValue(#[from] InvalidMetadataValue), +} + #[derive(Debug, thiserror::Error)] pub enum ConnectError { #[error(transparent)] @@ -102,43 +97,272 @@ pub enum ConnectError { TransportError(#[from] tonic::transport::Error), #[error(transparent)] InvalidXToken(#[from] tonic::metadata::errors::InvalidMetadataValue), + #[error(transparent)] + InvalidMetadataHeader(#[from] InvalidMetadataHeader), } -pub type FumaroleBoxedChannel = BoxService< - hyper::Request, - hyper::Response, - tonic::transport::Error, ->; +/// +/// Default gRPC buffer capacity +/// +pub const DEFAULT_DRAGONSMOUTH_CAPACITY: usize = 10000; + +/// +/// Default Fumarole commit offset interval +/// +pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(60); -pub type BoxedTonicFumaroleClient = TonicFumaroleClient; +pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: u8 = 3; + +/// +/// Default number of parallel data streams +/// +pub const DEFAULT_PARA_DATA_STREAMS: u8 = 1; /// /// Yellowstone Fumarole gRPC Client /// +#[derive(Clone)] pub struct FumaroleClient { - inner: BoxedTonicFumaroleClient, + inner: TonicFumaroleClient>, } -#[async_trait::async_trait] -pub trait FumaroleSender { - // async fn send_request( - // &mut self, - // request: proto::SubscribeRequest, - // ) -> Result>, tonic::Status>; +#[derive(Debug, thiserror::Error)] +pub enum DragonsmouthSubscribeError { + #[error(transparent)] + GrpcStatus(#[from] tonic::Status), + #[error("grpc stream closed")] + StreamClosed, +} + +#[derive(Debug, thiserror::Error)] +pub enum FumaroleStreamError { + #[error(transparent)] + Custom(Box), + #[error("grpc stream closed")] + StreamClosed, +} + +/// +/// Configuration for the Fumarole subscription session +/// +pub struct FumaroleSubscribeConfig { + /// + /// Number of parallel data streams to open to fumarole + /// + pub num_data_streams: NonZeroU8, + /// + /// Commit interval for the fumarole client + /// + pub commit_interval: Duration, + /// + /// Maximum number of consecutive failed slot download attempts before failing the fumarole session. + /// + pub max_failed_slot_download_attempt: u8, + /// + /// Capacity of each data channel for the fumarole client + /// + pub data_channel_capacity: usize, +} + +impl Default for FumaroleSubscribeConfig { + fn default() -> Self { + Self { + num_data_streams: NonZeroU8::new(DEFAULT_PARA_DATA_STREAMS).unwrap(), + commit_interval: DEFAULT_COMMIT_INTERVAL, + max_failed_slot_download_attempt: DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, + data_channel_capacity: DEFAULT_DRAGONSMOUTH_CAPACITY, + } + } +} + +pub enum FumeControlPlaneError { + Disconnected, +} + +pub enum FumeDataPlaneError { + Disconnected, +} + +pub enum FumaroleError { + ControlPlaneDisconnected, + DataPlaneDisconnected, + InvalidSubscribeRequest, +} + +impl From for FumaroleError { + fn from(status: tonic::Status) -> Self { + match status.code() { + tonic::Code::Unavailable => FumaroleError::ControlPlaneDisconnected, + tonic::Code::Internal => FumaroleError::DataPlaneDisconnected, + _ => FumaroleError::InvalidSubscribeRequest, + } + } +} + +/// +/// Dragonsmouth flavor fumarole session. +/// Mimics the same API as dragonsmouth but uses fumarole as the backend. +/// +pub struct DragonsmouthAdapterSession { + /// + /// Channel to send requests to the fumarole service. + /// If you don't need to change the subscribe request, you can drop this channel. + /// + pub sink: mpsc::Sender, + /// + /// Channel to receive updates from the fumarole service. + /// Dropping this channel will stop the fumarole session. + /// + pub source: mpsc::Receiver>, + /// + /// Handle to the fumarole session client runtime. + /// Dropping this handle does not stop the fumarole session. + /// + /// If you want to stop the fumarole session, you need to drop the [`DragonsmouthAdapterSession::source`] channel, + /// then you could wait for the handle to finish. + /// + pub runtime_handle: + tokio::task::JoinHandle>>, +} + +fn string_pairs_to_metadata_header( + headers: impl IntoIterator, impl AsRef)>, +) -> Result, MetadataValue>, InvalidMetadataHeader> { + headers + .into_iter() + .map(|(k, v)| { + let key = MetadataKey::from_bytes(k.as_ref().as_bytes())?; + let value: MetadataValue = v.as_ref().try_into()?; + Ok((key, value)) + }) + .collect() } impl FumaroleClient { + pub async fn connect(config: FumaroleConfig) -> Result { + let channel = Channel::from_shared(config.endpoint.clone())? + .tls_config(ClientTlsConfig::new().with_native_roots())? + .connect() + .await?; + + Self::connect_with_channel(config, channel).await + } + + pub async fn connect_with_channel( + config: FumaroleConfig, + channel: tonic::transport::Channel, + ) -> Result { + let interceptor = FumeInterceptor { + x_token: config + .x_token + .map(|token: String| token.try_into()) + .transpose()?, + metadata: string_pairs_to_metadata_header(config.x_metadata)?, + }; + + let client = TonicFumaroleClient::with_interceptor(channel, interceptor) + .max_decoding_message_size(config.max_decoding_message_size_bytes); + + Ok(FumaroleClient { inner: client }) + } + /// /// Subscribe to a stream of updates from the Fumarole service /// + #[cfg(feature = "tokio")] pub async fn dragonsmouth_subscribe( + &mut self, consumer_group_name: S, request: geyser::SubscribeRequest, - ) -> mpsc::Receiver + config: FumaroleSubscribeConfig, + ) -> Result where S: AsRef, { - todo!() + let handle = tokio::runtime::Handle::current(); + self.dragonsmouth_subscribe_on(consumer_group_name, request, config, handle) + .await + } + + /// + /// Same as [`FumaroleClient::dragonsmouth_subscribe`] but allows you to specify a custom runtime handle + /// the underlying fumarole runtie will use + /// + #[cfg(feature = "tokio")] + pub async fn dragonsmouth_subscribe_on( + &mut self, + consumer_group_name: S, + request: geyser::SubscribeRequest, + config: FumaroleSubscribeConfig, + handle: tokio::runtime::Handle, + ) -> Result + where + S: AsRef, + { + use runtime::tokio::DragonsmouthSubscribeRequestBidi; + + let (dragonsmouth_outlet, dragonsmouth_inlet) = + mpsc::channel(DEFAULT_DRAGONSMOUTH_CAPACITY); + let (fume_control_plane_tx, fume_control_plane_rx) = mpsc::channel(100); + + let resp = self + .inner + .subscribe(ReceiverStream::new(fume_control_plane_rx)) + .await?; + let mut streaming = resp.into_inner(); + let fume_control_plane_tx = fume_control_plane_tx.clone(); + let control_response = streaming.message().await?.expect("none"); + let fume_control_plane_rx = into_bounded_mpsc_rx(100, streaming); + let response = control_response.response.expect("none"); + let Response::Init(initial_state) = response else { + panic!("unexpected initial response: {response:?}") + }; + + let sm = FumaroleSM::new(initial_state.last_committed_offset); + let data_bidi_factory = GrpcDataPlaneBidiFactory { + client: self.clone(), + channel_capacity: config.data_channel_capacity, + }; + + let mut data_bidi_vec = VecDeque::with_capacity(config.num_data_streams.get() as usize); + for _ in 0..config.num_data_streams.get() { + let data_bidi = data_bidi_factory.build().await; + data_bidi_vec.push_back(data_bidi); + } + + let (dm_tx, dm_rx) = mpsc::channel(100); + let dm_bidi = DragonsmouthSubscribeRequestBidi { + tx: dm_tx.clone(), + rx: dm_rx, + }; + + let tokio_rt = TokioFumeDragonsmouthRuntime { + rt: handle.clone(), + sm, + data_plane_bidi_factory: Arc::new(data_bidi_factory), + dragonsmouth_bidi: dm_bidi, + subscribe_request: request, + consumer_group_name: consumer_group_name.as_ref().to_string(), + control_plane_tx: fume_control_plane_tx, + control_plane_rx: fume_control_plane_rx, + data_plane_bidi_vec: data_bidi_vec, + data_plane_tasks: Default::default(), + data_plane_task_meta: Default::default(), + dragonsmouth_outlet, + download_to_retry: Default::default(), + download_attempts: Default::default(), + max_slot_download_attempt: config.max_failed_slot_download_attempt, + commit_interval: config.commit_interval, + last_commit: Instant::now(), + }; + + let jh = handle.spawn(tokio_rt.run()); + let dm_session = DragonsmouthAdapterSession { + sink: dm_tx, + source: dragonsmouth_inlet, + runtime_handle: jh, + }; + Ok(dm_session) } pub async fn list_consumer_groups( @@ -173,117 +397,28 @@ impl FumaroleClient { } } -#[derive(Debug, thiserror::Error)] -pub enum InvalidMetadataHeader { - #[error(transparent)] - InvalidMetadataKey(#[from] InvalidMetadataKey), - #[error(transparent)] - InvalidMetadataValue(#[from] InvalidMetadataValue), +#[cfg(feature = "tokio")] +pub(crate) struct GrpcDataPlaneBidiFactory { + client: FumaroleClient, + channel_capacity: usize, } -/// -/// A builder for creating a FumaroleClient. -/// -impl FumaroleClientBuilder { - /// - /// Add a metadata header to the client for each request. - /// - pub fn add_metadata_header( - mut self, - key: impl AsRef, - value: impl AsRef, - ) -> Result { - let key = MetadataKey::from_bytes(key.as_ref().as_bytes())?; - let value: MetadataValue = value.as_ref().try_into()?; - self.metadata.insert(key, value); - Ok(self) - } - - /// - /// Add multiple metadata headers to the client for each request. - /// - pub fn add_metadata_headers(self, headers: IT) -> Result - where - KV: AsRef, - IT: IntoIterator, - { - headers - .into_iter() - .try_fold(self, |this, (k, v)| this.add_metadata_header(k, v)) - } - - /// - /// Enable compression for the client. - /// - pub const fn enable_compression(mut self) -> Self { - self.with_compression = true; - self - } - - /// - /// Disable compression for the client. - /// - pub const fn disable_compression(mut self) -> Self { - self.with_compression = false; - self - } - - /// - /// Connect to a Fumarole service. - /// - pub async fn connect(self, config: FumaroleConfig) -> Result { - let tls_config = ClientTlsConfig::new().with_native_roots(); - let channel = Channel::from_shared(config.endpoint.clone())? - .tls_config(tls_config)? - .connect() - .await?; - self.connect_with_channel(config, channel).await - } - - /// - /// Connect to a Fumarole service with an existing channel. - /// - pub async fn connect_with_channel( - self, - config: FumaroleConfig, - channel: tonic::transport::Channel, - ) -> Result { - let x_token_layer = if let Some(x_token) = config.x_token { - let metadata = x_token.try_into()?; - let interceptor = TritonAuthInterceptor { x_token: metadata }; - Some(tonic::service::interceptor(interceptor)) - } else { - None - }; - - let metadata_layer = if self.metadata.is_empty() { - None - } else { - let interceptor = AsciiMetadataInterceptor { - metadata: self.metadata, - }; - Some(tonic::service::interceptor(interceptor)) - }; - - let svc = ServiceBuilder::new() - .option_layer(x_token_layer) - .option_layer(metadata_layer) - .service(channel) - .boxed(); - - let tonic_client = TonicFumaroleClient::new(svc) - .max_decoding_message_size(config.max_decoding_message_size_bytes); - - let tonic_client = if self.with_compression { - tonic_client - .accept_compressed(tonic::codec::CompressionEncoding::Gzip) - .send_compressed(tonic::codec::CompressionEncoding::Gzip) - } else { - tonic_client - }; - - Ok(FumaroleClient { - inner: tonic_client, - }) +#[cfg(feature = "tokio")] +#[async_trait::async_trait] +impl DataPlaneBidiFactory for GrpcDataPlaneBidiFactory { + async fn build(&self) -> DataPlaneBidi { + let mut client = self.client.clone(); + let (tx, rx) = mpsc::channel(self.channel_capacity); + let rx = ReceiverStream::new(rx); + let resp = client + .inner + .subscribe_data(rx) + .await + .expect("failed to subscribe"); + let streaming = resp.into_inner(); + + let rx = into_bounded_mpsc_rx(self.channel_capacity, streaming); + + DataPlaneBidi { tx, rx } } } diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 8d9d323..736cf99 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -1,3 +1,4 @@ +#[cfg(feature = "tokio")] pub(crate) mod tokio; use { @@ -21,10 +22,6 @@ pub(crate) type FumeNumShards = u32; pub(crate) type FumeShardIdx = u32; -pub(crate) type FumeBlockShard = u32; - -pub(crate) type FumeDataBusId = u8; - pub(crate) type FumeOffset = u64; #[derive(Debug, Clone)] @@ -37,7 +34,6 @@ pub(crate) struct FumeDownloadRequest { #[derive(Clone, Debug)] pub(crate) struct FumeSlotStatus { - pub(crate) parent_offset: FumeOffset, pub(crate) offset: FumeOffset, pub(crate) slot: Slot, pub(crate) parent_slot: Option, @@ -208,7 +204,6 @@ impl FumaroleSM { let cl = geyser::CommitmentLevel::try_from(commitment_level) .expect("invalid commitment level"); let fume_slot_status = FumeSlotStatus { - parent_offset: last_offset, offset, slot, parent_slot, @@ -237,13 +232,6 @@ impl FumaroleSM { } } - /// - /// Returns true if there are slot to download, otherwise false. - /// - pub(crate) fn has_any_slot_to_download(&self) -> bool { - !self.slot_download_queue.is_empty() - } - /// /// Returns the [`Some(FumeDownloadRequest)`] to download if any, otherwise `None`. /// @@ -346,7 +334,7 @@ mod tests { let blockchain_id = Uuid::nil().as_bytes().to_vec(); let block_uid = Uuid::new_v4().as_bytes().to_vec(); BlockchainEvent { - offset: 1, + offset, blockchain_id, block_uid, num_shards: 1, diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 1ecd163..aa26627 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -1,11 +1,9 @@ use { super::{FumaroleSM, FumeDownloadRequest, FumeOffset}, - crate:: - proto::{ - self, data_command, BlockFilters, CommitOffset, ControlCommand, DataCommand, - DownloadBlockShard, PollBlockchainHistory, - } - , + crate::proto::{ + self, data_command, BlockFilters, CommitOffset, ControlCommand, DataCommand, + DownloadBlockShard, PollBlockchainHistory, + }, solana_sdk::clock::Slot, std::{ collections::{HashMap, VecDeque}, @@ -23,16 +21,16 @@ use { /// /// Data-Plane bidirectional stream -struct DataPlaneBidi { - tx: mpsc::Sender, - rx: mpsc::Receiver, +pub(crate) struct DataPlaneBidi { + pub tx: mpsc::Sender, + pub rx: mpsc::Receiver>, } /// /// Holds information about on-going data plane task. /// #[derive(Clone, Debug)] -struct DataPlaneTaskMeta { +pub(crate) struct DataPlaneTaskMeta { download_request: FumeDownloadRequest, scheduled_at: Instant, download_attempt: u8, @@ -42,34 +40,44 @@ struct DataPlaneTaskMeta { /// Base trait for Data-plane bidirectional stream factories. /// #[async_trait::async_trait] -pub trait DataPlaneBidiFactory { +pub(crate) trait DataPlaneBidiFactory { /// /// Builds a [`DataPlaneBidi`] /// async fn build(&self) -> DataPlaneBidi; } +/// +/// Mimics Dragonsmouth subscribe request bidirectional stream. +/// +pub struct DragonsmouthSubscribeRequestBidi { + #[allow(dead_code)] + pub tx: mpsc::Sender, + pub rx: mpsc::Receiver, +} + /// /// Fumarole runtime based on Tokio outputting Dragonsmouth only events. /// pub(crate) struct TokioFumeDragonsmouthRuntime { - rt: tokio::runtime::Handle, - sm: FumaroleSM, - block_filters: BlockFilters, - data_plane_bidi_factory: Arc, - subscribe_request: SubscribeRequest, - consumer_group_name: String, - control_plane_tx: mpsc::Sender, - control_plane_rx: mpsc::Receiver, - data_plane_bidi_vec: VecDeque, - data_plane_tasks: JoinSet>, - data_plane_task_meta: HashMap, - dragonsmouth_outlet: mpsc::Sender, - download_to_retry: VecDeque, - download_attempts: HashMap, - max_slot_download_attempt: u8, - commit_interval: Duration, - last_commit: Instant, + pub rt: tokio::runtime::Handle, + pub sm: FumaroleSM, + pub dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, + pub data_plane_bidi_factory: Arc, + pub subscribe_request: SubscribeRequest, + #[allow(dead_code)] + pub consumer_group_name: String, + pub control_plane_tx: mpsc::Sender, + pub control_plane_rx: mpsc::Receiver>, + pub data_plane_bidi_vec: VecDeque, + pub data_plane_tasks: JoinSet>, + pub data_plane_task_meta: HashMap, + pub dragonsmouth_outlet: mpsc::Sender>, + pub download_to_retry: VecDeque, + pub download_attempts: HashMap, + pub max_slot_download_attempt: u8, + pub commit_interval: Duration, + pub last_commit: Instant, } const fn build_poll_history_cmd(from: Option) -> ControlCommand { @@ -89,25 +97,31 @@ const fn build_commit_offset_cmd(offset: FumeOffset) -> ControlCommand { } } -struct DownloadBlockTask { +pub(crate) struct DownloadBlockTask { download_request: FumeDownloadRequest, - block_filter: BlockFilters, bidi: DataPlaneBidi, - dragonsmouth_oulet: mpsc::Sender, + filters: Option, + dragonsmouth_oulet: mpsc::Sender>, } -struct DownloadBlockCompleted { +pub(crate) struct DownloadBlockCompleted { bidi: DataPlaneBidi, } -enum DownloadBlockError { +#[derive(Debug, thiserror::Error)] +pub(crate) enum DownloadBlockError { + #[error("download block task disconnected")] Disconnected, + #[error("dragonsmouth outlet disconnected")] OutletDisconnected, + #[error("block shard not found")] BlockShardNotFound, + #[error(transparent)] + GrpcError(#[from] tonic::Status), } impl DownloadBlockTask { - async fn run(mut self) -> Result { + async fn run(self) -> Result { let DataPlaneBidi { tx, mut rx } = self.bidi; // Make sure the stream is empty @@ -123,7 +137,8 @@ impl DownloadBlockTask { let data_cmd = data_command::Command::DownloadBlockShard(DownloadBlockShard { blockchain_id: self.download_request.blockchain_id.to_vec(), block_uid: self.download_request.block_uid.to_vec(), - shard_idx: 0, // ONLY SUPPORTS 1 shard in V1. + shard_idx: 0, + block_filters: self.filters, }); let data_cmd = DataCommand { command: Some(data_cmd), @@ -133,17 +148,19 @@ impl DownloadBlockTask { .map_err(|_| DownloadBlockError::Disconnected)?; loop { - let Some(data) = rx.recv().await else { + let Some(result) = rx.recv().await else { return Err(DownloadBlockError::Disconnected); }; + let data = result?; + let Some(resp) = data.response else { continue }; match resp { proto::data_response::Response::Update(subscribe_update) => { if self .dragonsmouth_oulet - .send(subscribe_update) + .send(Ok(subscribe_update)) .await .is_err() { @@ -177,6 +194,24 @@ impl DownloadBlockTask { } } +#[derive(Debug, thiserror::Error)] +pub enum RuntimeError { + #[error(transparent)] + GrpcError(#[from] tonic::Status), +} + +impl From for BlockFilters { + fn from(val: SubscribeRequest) -> Self { + BlockFilters { + accounts: val.accounts, + transactions: val.transactions, + entries: val.entry, + blocks_meta: val.blocks_meta, + commitment_level: val.commitment, + } + } +} + impl TokioFumeDragonsmouthRuntime { fn handle_control_response(&mut self, control_response: proto::ControlResponse) { let Some(response) = control_response.response else { @@ -197,6 +232,9 @@ impl TokioFumeDragonsmouthRuntime { proto::control_response::Response::Pong(_pong) => { tracing::trace!("pong"); } + proto::control_response::Response::Init(_init) => { + unreachable!("init should not be received here"); + } } } @@ -233,8 +271,8 @@ impl TokioFumeDragonsmouthRuntime { let download_task = DownloadBlockTask { download_request: download_request.clone(), - block_filter: self.block_filters.clone(), bidi: data_plane_bidi, + filters: Some(self.subscribe_request.clone().into()), dragonsmouth_oulet: self.dragonsmouth_outlet.clone(), }; @@ -263,7 +301,7 @@ impl TokioFumeDragonsmouthRuntime { &mut self, task_id: task::Id, result: Result, - ) { + ) -> Result<(), DownloadBlockError> { let Some(task_meta) = self.data_plane_task_meta.remove(&task_id) else { panic!("missing task meta") }; @@ -280,10 +318,10 @@ impl TokioFumeDragonsmouthRuntime { } Err(e) => { match e { - DownloadBlockError::Disconnected => { + x @ (DownloadBlockError::Disconnected | DownloadBlockError::GrpcError(_)) => { // We need to retry it if task_meta.download_attempt >= self.max_slot_download_attempt { - panic!("Failed to download slot {slot}") + return Err(x); } let data_plane_bidi = self.data_plane_bidi_factory.build().await; @@ -303,6 +341,7 @@ impl TokioFumeDragonsmouthRuntime { } } } + Ok(()) } async fn commit_offset(&mut self) { @@ -351,7 +390,7 @@ impl TokioFumeDragonsmouthRuntime { )), }; - if self.dragonsmouth_outlet.send(update).await.is_err() { + if self.dragonsmouth_outlet.send(Ok(update)).await.is_err() { return; } self.sm.mark_offset_as_processed(slot_status.offset); @@ -359,7 +398,17 @@ impl TokioFumeDragonsmouthRuntime { } } - async fn run(mut self) { + async fn unsafe_cancel_all_tasks(&mut self) { + self.data_plane_tasks.abort_all(); + self.data_plane_task_meta.clear(); + self.download_attempts.clear(); + + while (self.data_plane_tasks.join_next().await).is_some() { + // Drain all tasks + } + } + + pub(crate) async fn run(mut self) -> Result<(), Box> { let inital_load_history_cmd = build_poll_history_cmd(None); self.control_plane_tx .send(inital_load_history_cmd) @@ -377,17 +426,35 @@ impl TokioFumeDragonsmouthRuntime { self.poll_history_if_needed().await; self.schedule_download_task_if_any(); tokio::select! { + Some(subscribe_request) = self.dragonsmouth_bidi.rx.recv() => { + self.subscribe_request = subscribe_request + } control_response = self.control_plane_rx.recv() => { - if let Some(control_response) = control_response { - self.handle_control_response(control_response); - } else { - break; + match control_response { + Some(Ok(control_response)) => { + tracing::trace!("control response received"); + self.handle_control_response(control_response); + } + Some(Err(e)) => { + tracing::error!("control plane error: {e}"); + return Err(Box::new(RuntimeError::GrpcError(e))); + } + None => { + tracing::trace!("control plane disconnected"); + break; + } } } Some(result) = self.data_plane_tasks.join_next_with_id() => { let (task_id, download_result) = result.expect("data plane task set"); - - self.handle_data_plane_task_result(task_id, download_result).await; + let result = self.handle_data_plane_task_result(task_id, download_result).await; + if let Err(e) = result { + self.unsafe_cancel_all_tasks().await; + if let DownloadBlockError::GrpcError(e) = e { + let _ = self.dragonsmouth_outlet.send(Err(e)).await; + } + break; + } } _ = tokio::time::sleep_until(commit_deadline.into()) => { @@ -396,7 +463,6 @@ impl TokioFumeDragonsmouthRuntime { } self.drain_slot_status().await; } + Ok(()) } } - -pub struct TokioFumarolHandle {} diff --git a/crates/yellowstone-fumarole-client/src/util/collections.rs b/crates/yellowstone-fumarole-client/src/util/collections.rs index c366228..3254dcd 100644 --- a/crates/yellowstone-fumarole-client/src/util/collections.rs +++ b/crates/yellowstone-fumarole-client/src/util/collections.rs @@ -22,6 +22,7 @@ impl KeyedVecDeque where K: Eq + Hash + Clone, { + #[allow(dead_code)] pub fn new() -> Self { KeyedVecDeque { vec: VecDeque::new(), @@ -38,6 +39,7 @@ where } } + #[allow(dead_code)] pub fn push_front(&mut self, key: K, item: V) -> bool { if self.index.insert(key.clone()) { self.vec.push_front((key, item)); @@ -56,6 +58,7 @@ where } } + #[allow(dead_code)] pub fn is_empty(&self) -> bool { self.vec.is_empty() } diff --git a/crates/yellowstone-fumarole-client/src/util/grpc.rs b/crates/yellowstone-fumarole-client/src/util/grpc.rs new file mode 100644 index 0000000..4373c04 --- /dev/null +++ b/crates/yellowstone-fumarole-client/src/util/grpc.rs @@ -0,0 +1,19 @@ +use {tokio::sync::mpsc, tonic::Streaming}; + +pub fn into_bounded_mpsc_rx( + capacity: usize, + mut streaming: Streaming, +) -> mpsc::Receiver> +where + T: Send + 'static, +{ + let (tx, rx) = mpsc::channel(capacity); + tokio::spawn(async move { + while let Some(result) = streaming.message().await.transpose() { + if tx.send(result).await.is_err() { + break; + } + } + }); + rx +} diff --git a/crates/yellowstone-fumarole-client/src/util/mod.rs b/crates/yellowstone-fumarole-client/src/util/mod.rs index 2e4fe9b..7777d52 100644 --- a/crates/yellowstone-fumarole-client/src/util/mod.rs +++ b/crates/yellowstone-fumarole-client/src/util/mod.rs @@ -1 +1,2 @@ pub mod collections; +pub mod grpc; diff --git a/examples/rust/src/bin/client.rs b/examples/rust/src/bin/client.rs index 98cb309..6267c1e 100644 --- a/examples/rust/src/bin/client.rs +++ b/examples/rust/src/bin/client.rs @@ -1,12 +1,14 @@ use { clap::Parser, solana_sdk::{bs58, pubkey::Pubkey}, - std::path::PathBuf, - tokio::{sync::mpsc, task::JoinSet}, + std::{collections::HashMap, path::PathBuf}, yellowstone_fumarole_client::{ - config::FumaroleConfig, proto::SubscribeRequest, FumaroleClient, FumaroleClientBuilder, + config::FumaroleConfig, DragonsmouthAdapterSession, FumaroleClient, FumaroleSubscribeConfig, + }, + yellowstone_grpc_proto::geyser::{ + subscribe_update::UpdateOneof, SubscribeRequest, SubscribeRequestFilterAccounts, + SubscribeRequestFilterTransactions, SubscribeUpdateAccount, SubscribeUpdateTransaction, }, - yellowstone_grpc_proto::geyser::{SubscribeUpdateAccount, SubscribeUpdateTransaction}, }; #[derive(Debug, Clone, Parser)] @@ -32,32 +34,6 @@ struct SubscribeArgs { #[clap(long)] cg_name: String, - /// List of account pubkeys that we want to suscribe to account updates - #[clap(long, required = false)] - accounts: Option>, - - /// List of owner pubkeys that we want to suscribe to account updates - #[clap(long, required = false)] - owners: Option>, - - /// A transaction is included if it has at least one of the provided accounts in its list of instructions - #[clap(long, required = false)] - tx_includes: Option>, - - /// A transaction is excluded if it has at least one of the provided accounts in its list of instructions - #[clap(long, required = false)] - tx_excludes: Option>, - - /// A transaction is included if all of the provided accounts in its list of instructions - #[clap(long, required = false)] - tx_requires: Option>, - - #[clap(long, required = false)] - include_vote_tx: Option, - - #[clap(long, required = false)] - include_failed_tx: Option, - /// Number of parallel streams to open: must be lower or equal to the size of your consumer group, otherwise the program will return an error #[clap(long)] par: Option, @@ -78,101 +54,50 @@ fn summarize_tx(tx: SubscribeUpdateTransaction) -> Option { Some(format!("tx,{slot},{sig}")) } -async fn subscribe_with_request( - mut fumarole: FumaroleClient, - request: SubscribeRequest, - out_tx: mpsc::Sender, -) { - // NOTE: Make sure send request before giving the stream to the service - // Otherwise, the service will not be able to send the response - // This is due to how fumarole works in the background for auto-commit offset management. - let rx = fumarole - .subscribe_with_request(request) - .await - .expect("Failed to subscribe to Fumarole service"); - println!("Subscribed to Fumarole service"); - println!("Request sent"); - let mut rx = rx.into_inner(); - - loop { - match rx.message().await { - Ok(Some(event)) => { - let message = if let Some(oneof) = event.update_oneof { - match oneof { - yellowstone_grpc_proto::geyser::subscribe_update::UpdateOneof::Account(account_update) => { - summarize_account(account_update) - } - yellowstone_grpc_proto::geyser::subscribe_update::UpdateOneof::Transaction(tx) => { - summarize_tx(tx) - } - _ => None, - } - } else { - None - }; - if let Some(message) = message { - if out_tx.send(message).await.is_err() { - break; - } - } - } - Ok(None) => println!("Stream finished!"), - Err(e) => { - eprintln!("Error receiving event: {:?}", e); - break; - } - } - } -} - async fn subscribe(args: SubscribeArgs, config: FumaroleConfig) { - let accounts = args.accounts; - let owners = args.owners; - let tx_includes = args.tx_includes; - let tx_requires = args.tx_requires; - let tx_excludes = args.tx_excludes; - let requests = yellowstone_fumarole_client::SubscribeRequestBuilder::default() - .with_accounts(accounts) - .with_owners(owners) - .with_tx_includes(tx_includes) - .with_tx_requires(tx_requires) - .with_tx_excludes(tx_excludes) - .build_vec(args.cg_name, args.par.unwrap_or(1)); - - let mut task_set = JoinSet::new(); - - let (shared_tx, mut rx) = mpsc::channel(1000); - for request in requests { - let fumarole = FumaroleClientBuilder::default() - .add_metadata_headers(&config.x_metadata) - .expect("Failed to add metadata headers") - .enable_compression() - .connect(config.clone()) - .await - .expect("Failed to connect to Fumarole service"); - let tx = shared_tx.clone(); - task_set.spawn(subscribe_with_request(fumarole, request, tx)); - } + // This request listen for all account updates and transaction updates + let request = SubscribeRequest { + accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), + transactions: HashMap::from([( + "f1".to_owned(), + SubscribeRequestFilterTransactions::default(), + )]), + ..Default::default() + }; + + let mut fumarole_client = FumaroleClient::connect(config) + .await + .expect("Failed to connect to fumarole"); - loop { - tokio::select! { - maybe = task_set.join_next() => { - let result = maybe.expect("no task"); - if let Err(e) = result { - eprintln!("Task failed: {:?}", e); - } - break - } - maybe = rx.recv() => { - match maybe { - Some(message) => { - println!("{}", message); - } - None => { - break; - } - } + let subscribe_config = FumaroleSubscribeConfig { + ..Default::default() + }; + let dragonsmouth_session = fumarole_client + .dragonsmouth_subscribe(args.cg_name, request, subscribe_config) + .await + .expect("Failed to subscribe"); + + let DragonsmouthAdapterSession { + sink: _, + mut source, + runtime_handle: _, + } = dragonsmouth_session; + + while let Some(result) = source.recv().await { + let event = result.expect("Failed to receive event"); + + let message = if let Some(oneof) = event.update_oneof { + match oneof { + UpdateOneof::Account(account_update) => summarize_account(account_update), + UpdateOneof::Transaction(tx) => summarize_tx(tx), + _ => None, } + } else { + None + }; + + if let Some(message) = message { + println!("{}", message); } } } diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index 8ef50c8..7b0fa13 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -51,13 +51,14 @@ message BlockFilters { map transactions = 2; map entries = 3; map blocks_meta = 4; - geyser.CommitmentLevel commitment_level = 5; + optional geyser.CommitmentLevel commitment_level = 5; } message DownloadBlockShard { bytes blockchain_id = 1; bytes block_uid = 2; uint32 shard_idx = 3; + optional BlockFilters blockFilters = 4; } @@ -71,9 +72,8 @@ message Pong { message DataCommand { oneof command { - BlockFilters update_filter = 1; - DownloadBlockShard download_block_shard = 2; - Ping ping = 3; + DownloadBlockShard download_block_shard = 1; + Ping ping = 2; } } @@ -138,12 +138,17 @@ message CommitOffsetResult { uint64 offset = 1; } +message InitialConsumerGroupState { + uint64 last_committed_offset = 1; + bytes blockchain_id = 2; +} message ControlResponse { oneof response { - CommitOffsetResult commit_offset = 1; - BlockchainHistory poll_next = 2; - Pong pong = 3; + InitialConsumerGroupState init = 1; + CommitOffsetResult commit_offset = 2; + BlockchainHistory poll_next = 3; + Pong pong = 4; } } From d8a462d4982325e1372d87d3c857d3a57a1dfe32 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Wed, 16 Apr 2025 10:51:48 -0400 Subject: [PATCH 04/56] added shard_id to blockchain event --- crates/yellowstone-fumarole-client/src/runtime/mod.rs | 2 ++ crates/yellowstone-fumarole-client/src/runtime/tokio.rs | 5 ++++- proto/fumarole_v2.proto | 3 +++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 736cf99..ab8c977 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -191,6 +191,7 @@ impl FumaroleSM { slot, parent_slot, commitment_level, + blockchain_shard_id: _, /*First version this is value does not mean nothing */ } = events; if offset < last_offset { @@ -341,6 +342,7 @@ mod tests { slot, parent_slot: None, commitment_level: commitment_level.into(), + blockchain_shard_id: 0, } } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index aa26627..dec7a48 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -92,7 +92,10 @@ const fn build_poll_history_cmd(from: Option) -> ControlCommand { const fn build_commit_offset_cmd(offset: FumeOffset) -> ControlCommand { ControlCommand { command: Some(proto::control_command::Command::CommitOffset( - CommitOffset { offset }, + CommitOffset { + offset, + shard_id: 0, /*ALWAYS 0-FOR FIRST VERSION OF FUMAROLE */ + }, )), } } diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index 7b0fa13..db5d15b 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -105,6 +105,7 @@ message DataResponse { message CommitOffset { uint64 offset = 1; + int32 shard_id = 2; } @@ -120,6 +121,7 @@ message BlockchainEvent { uint64 slot = 5; optional uint64 parent_slot = 6; geyser.CommitmentLevel commitment_level = 7; + int32 blockchain_shard_id = 8; } message BlockchainHistory { @@ -136,6 +138,7 @@ message ControlCommand { message CommitOffsetResult { uint64 offset = 1; + int32 shard_id = 2; } message InitialConsumerGroupState { From 40f962261ea592321e572b87df52f94c28814aa2 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 17 Apr 2025 16:27:10 -0400 Subject: [PATCH 05/56] v2: fume rust cli --- Cargo.lock | 232 +++++++++- Cargo.toml | 10 +- apps/fume/Cargo.toml | 29 ++ apps/fume/src/main.rs | 405 ++++++++++++++++++ crates/yellowstone-fumarole-client/Cargo.toml | 7 +- crates/yellowstone-fumarole-client/src/lib.rs | 71 ++- .../src/metrics.rs | 153 +++++++ .../src/runtime/mod.rs | 54 ++- .../src/runtime/tokio.rs | 89 +++- .../src/util/collections.rs | 4 + examples/rust/src/bin/client.rs | 7 +- proto/fumarole_v2.proto | 45 +- 12 files changed, 1039 insertions(+), 67 deletions(-) create mode 100644 apps/fume/Cargo.toml create mode 100644 apps/fume/src/main.rs create mode 100644 crates/yellowstone-fumarole-client/src/metrics.rs diff --git a/Cargo.lock b/Cargo.lock index f956edb..ae69166 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -629,6 +629,12 @@ dependencies = [ "serde", ] +[[package]] +name = "bytecount" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" + [[package]] name = "bytemuck" version = "1.21.0" @@ -730,6 +736,16 @@ dependencies = [ "clap_derive", ] +[[package]] +name = "clap-verbosity-flag" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2678fade3b77aa3a8ff3aae87e9c008d3fb00473a41c71fbf74e91c8c7b37e84" +dependencies = [ + "clap", + "log", +] + [[package]] name = "clap_builder" version = "4.5.29" @@ -1206,6 +1222,26 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fume" +version = "0.1.1+solana.2.1.11" +dependencies = [ + "clap", + "clap-verbosity-flag", + "futures", + "serde_yaml", + "solana-sdk", + "tabled", + "tokio", + "tokio-stream", + "tonic", + "tracing", + "tracing-subscriber", + "yellowstone-fumarole-client", + "yellowstone-grpc-client", + "yellowstone-grpc-proto", +] + [[package]] name = "futures" version = "0.3.31" @@ -1989,6 +2025,15 @@ version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "matchit" version = "0.7.3" @@ -2063,6 +2108,16 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num" version = "0.2.1" @@ -2252,6 +2307,23 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "papergrid" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b915f831b85d984193fdc3d3611505871dc139b2534530fa01c1a6a6707b6723" +dependencies = [ + "bytecount", + "fnv", + "unicode-width", +] + [[package]] name = "parking_lot" version = "0.12.3" @@ -2408,6 +2480,28 @@ dependencies = [ "toml_edit", ] +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "proc-macro2" version = "1.0.93" @@ -2417,6 +2511,21 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "prometheus" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror", +] + [[package]] name = "prost" version = "0.13.4" @@ -2469,6 +2578,12 @@ dependencies = [ "prost", ] +[[package]] +name = "protobuf" +version = "2.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" + [[package]] name = "protobuf-src" version = "1.1.0+21.5" @@ -2584,8 +2699,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", ] [[package]] @@ -2596,9 +2720,15 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.5", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.8.5" @@ -2977,12 +3107,30 @@ dependencies = [ "keccak", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + [[package]] name = "signature" version = "1.6.4" @@ -4236,6 +4384,29 @@ dependencies = [ "libc", ] +[[package]] +name = "tabled" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121d8171ee5687a4978d1b244f7d99c43e7385a272185a2f1e1fa4dc0979d444" +dependencies = [ + "papergrid", + "tabled_derive", +] + +[[package]] +name = "tabled_derive" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52d9946811baad81710ec921809e2af67ad77719418673b2a3794932d57b7538" +dependencies = [ + "heck", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "tempfile" version = "3.16.0" @@ -4270,6 +4441,16 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + [[package]] name = "time" version = "0.3.37" @@ -4337,6 +4518,7 @@ dependencies = [ "libc", "mio", "pin-project-lite", + "signal-hook-registry", "socket2", "tokio-macros", "windows-sys 0.52.0", @@ -4560,6 +4742,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", ] [[package]] @@ -4580,6 +4792,12 @@ version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "universal-hash" version = "0.5.1" @@ -4659,6 +4877,12 @@ dependencies = [ "getrandom 0.3.1", ] +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "vcpkg" version = "0.2.15" @@ -5021,6 +5245,8 @@ dependencies = [ "futures", "http 1.2.0", "hyper 1.6.0", + "lazy_static", + "prometheus", "prost", "protobuf-src", "serde", diff --git a/Cargo.toml b/Cargo.toml index 6aa540a..4051b37 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,6 @@ [workspace] -members = [ +members = [ + "apps/fume", "crates/yellowstone-fumarole-client", "examples/rust", ] @@ -19,15 +20,19 @@ publish = false [workspace.dependencies] async-trait = "0.1.88" clap = "4.5.7" +clap-verbosity-flag = "3.0.2" futures = "0.3.31" http = "1.2.0" hyper = "1.3.1" +lazy_static = "~1.5.0" +prometheus = "~0.13.0" prost = "0.13.0" protobuf-src = "1.1.0" serde = "1.0.203" serde_with = "3.8.1" serde_yaml = "0.9.34" solana-sdk = "=2.1.11" +tabled = "0.18.0" thiserror = "1.0.61" tokio = "1.38.0" tokio-stream = "0.1.11" @@ -35,6 +40,7 @@ tonic = "0.12.3" tonic-build = "0.12.3" tower = "0.5.2" tracing = "0.1.41" +tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } uuid = { version = "1.16.0" } yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client" } yellowstone-grpc-client = "5.0.0" @@ -43,4 +49,4 @@ yellowstone-grpc-proto = "5.0.0" [workspace.lints.clippy] clone_on_ref_ptr = "deny" missing_const_for_fn = "deny" -trivially_copy_pass_by_ref = "deny" \ No newline at end of file +trivially_copy_pass_by_ref = "deny" diff --git a/apps/fume/Cargo.toml b/apps/fume/Cargo.toml new file mode 100644 index 0000000..1e0bddb --- /dev/null +++ b/apps/fume/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "fume" +authors.workspace = true +version.workspace = true +edition.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +publish.workspace = true + +[dependencies] +clap = { workspace = true, features = ["derive"] } +clap-verbosity-flag = { workspace = true } +futures = { workspace = true } +solana-sdk = { workspace = true } +serde_yaml = { workspace = true } +tabled = { workspace = true } +tokio = { workspace = true, features = ["rt-multi-thread", "signal"] } +tokio-stream = { workspace = true } +tonic = { workspace = true } +yellowstone-fumarole-client = { workspace = true } +yellowstone-grpc-client = { workspace = true } +yellowstone-grpc-proto = { workspace = true } +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter"] } + +[lints] +workspace = true diff --git a/apps/fume/src/main.rs b/apps/fume/src/main.rs new file mode 100644 index 0000000..70077ac --- /dev/null +++ b/apps/fume/src/main.rs @@ -0,0 +1,405 @@ +use { + clap::Parser, + futures::{future::BoxFuture, FutureExt}, + solana_sdk::{bs58, pubkey::Pubkey}, + std::{ + collections::HashMap, + io::{stderr, stdout, IsTerminal}, + path::PathBuf, + }, + tabled::{builder::Builder, Table}, + tokio::{ + io::{self, AsyncBufReadExt, BufReader}, + signal::unix::{signal, SignalKind}, + }, + tonic::Code, + tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter}, + yellowstone_fumarole_client::{ + config::FumaroleConfig, + proto::{ + ConsumerGroupInfo, CreateConsumerGroupRequest, DeleteConsumerGroupRequest, + GetConsumerGroupInfoRequest, InitialOffsetPolicy, ListConsumerGroupsRequest, + }, + DragonsmouthAdapterSession, FumaroleClient, + }, + yellowstone_grpc_proto::geyser::{ + subscribe_update::UpdateOneof, CommitmentLevel, SubscribeRequest, + SubscribeRequestFilterAccounts, SubscribeRequestFilterSlots, + SubscribeRequestFilterTransactions, SubscribeUpdateAccount, SubscribeUpdateSlot, + SubscribeUpdateTransaction, + }, +}; + +#[derive(Debug, Clone, Parser)] +#[clap(author, version, about = "Yellowstone gRPC ScyllaDB Tool")] +struct Args { + /// Path to static config file + #[clap(long)] + config: PathBuf, + + #[clap(flatten)] + verbose: clap_verbosity_flag::Verbosity, + + #[clap(subcommand)] + action: Action, +} + +#[derive(Debug, Clone, Parser)] +enum Action { + /// Get Consumer Group Info + GetCgInfo(GetCgInfoArgs), + /// Create a consumer group + CreateCg(CreateCgArgs), + /// Delete a consumer group + DeleteCg(DeleteCgArgs), + /// List all consumer groups + ListCg, + /// Delete all consumer groups + DeleteAllCg, + /// Subscribe to fumarole events + Subscribe(SubscribeArgs), +} + +#[derive(Debug, Clone, Parser)] +pub struct GetCgInfoArgs { + /// Name of the consumer group to get info for + #[clap(long)] + name: String, +} + +#[derive(Debug, Clone, Parser)] +pub struct CreateCgArgs { + /// Name of the consumer group to create + #[clap(long)] + name: String, +} + +#[derive(Debug, Clone, Parser)] +pub struct DeleteCgArgs { + /// Name of the consumer group to delete + #[clap(long)] + name: String, +} + +#[derive(Debug, Clone, Parser)] +struct SubscribeArgs { + /// Name of the consumer group to subscribe to + #[clap(long)] + cg_name: String, +} + +fn summarize_account(account: SubscribeUpdateAccount) -> Option { + let slot = account.slot; + let account = account.account?; + let pubkey = Pubkey::try_from(account.pubkey).expect("Failed to parse pubkey"); + let owner = Pubkey::try_from(account.owner).expect("Failed to parse owner"); + Some(format!("account,{},{},{}", slot, pubkey, owner)) +} + +fn summarize_tx(tx: SubscribeUpdateTransaction) -> Option { + let slot = tx.slot; + let tx = tx.transaction?; + let sig = bs58::encode(tx.signature).into_string(); + Some(format!("tx,{slot},{sig}")) +} + +fn build_consumer_group_table(infos: IT) -> Table +where + IT: IntoIterator, +{ + let mut b = Builder::default(); + + b.push_record(vec!["Uid", "Name", "Stale"]); + for info in infos { + let uid = info.id; + let name = info.consumer_group_name; + let stale = info.is_stale; + b.push_record(vec![uid, name, stale.to_string()]); + } + + b.build() +} + +async fn get_cg_info(args: GetCgInfoArgs, mut client: FumaroleClient) { + let GetCgInfoArgs { name } = args; + + let request = GetConsumerGroupInfoRequest { + consumer_group_name: name.clone(), + }; + + let response = client.get_consumer_group_info(request).await; + + match response { + Ok(response) => { + let info = response.into_inner(); + let table = build_consumer_group_table(vec![info.clone()]); + println!("{}", table); + } + Err(e) => { + if e.code() == Code::NotFound { + eprintln!("Consumer group {name} not found"); + return; + } + eprintln!( + "Failed to get consumer group info: {} {}", + e.code(), + e.message() + ); + } + } +} + +async fn create_cg(args: CreateCgArgs, mut client: FumaroleClient) { + let CreateCgArgs { name } = args; + let request = CreateConsumerGroupRequest { + consumer_group_name: name.clone(), + initial_offset_policy: InitialOffsetPolicy::Latest.into(), + }; + + let result = client.create_consumer_group(request).await; + // .expect("Failed to create consumer group"); + + match result { + Ok(_) => { + println!("Consumer group {name} created!"); + } + Err(e) => { + if e.code() == Code::AlreadyExists { + eprintln!("Consumer group {name} already exists"); + return; + } + eprintln!( + "Failed to create consumer group: {} {}", + e.code(), + e.message() + ); + } + } +} + +async fn list_all_cg(mut client: FumaroleClient) { + let request = ListConsumerGroupsRequest {}; + let response = client + .list_consumer_groups(request) + .await + .expect("Failed to list consumer groups"); + + let infos = response.into_inner().consumer_groups; + if infos.is_empty() { + println!("No consumer groups found"); + return; + } + let table = build_consumer_group_table(infos); + println!("{}", table); +} + +async fn delete_cg(args: DeleteCgArgs, mut client: FumaroleClient) { + let DeleteCgArgs { name } = args; + let request = DeleteConsumerGroupRequest { + consumer_group_name: name.clone(), + }; + let response = client + .delete_consumer_group(request) + .await + .expect("Failed to list consumer groups"); + + if response.into_inner().success { + println!("Consumer group {name} deleted"); + } else { + eprintln!("Failed to delete consumer group {name}"); + } +} + +async fn prompt_yes_no(question: &str) -> io::Result { + let stdin = io::stdin(); + let mut reader = BufReader::new(stdin).lines(); + + println!("{question} [y/n]"); + + let Some(line) = reader.next_line().await? else { + return Ok(false); + }; + + match line.trim().to_lowercase().as_str() { + "y" | "yes" => Ok(true), + _ => Ok(false), + } +} + +async fn delete_all_cg(mut client: FumaroleClient) { + let request = ListConsumerGroupsRequest {}; + let response = client + .list_consumer_groups(request) + .await + .expect("Failed to list consumer groups"); + + let infos = response.into_inner().consumer_groups; + + if infos.is_empty() { + println!("No consumer groups found"); + return; + } + + let table = build_consumer_group_table(infos.clone()); + + println!("{}", table); + + let yes = prompt_yes_no("Are you sure you want to delete all consumer groups?") + .await + .expect("Failed to read input"); + + if !yes { + println!("Aborting delete operation"); + return; + } + + for info in infos { + let name = info.consumer_group_name; + let request = DeleteConsumerGroupRequest { + consumer_group_name: name.clone(), + }; + client + .delete_consumer_group(request) + .await + .expect("Failed to delete consumer group"); + println!("Consumer group {name} deleted"); + } +} + +pub fn create_shutdown() -> BoxFuture<'static, ()> { + let mut sigint = signal(SignalKind::interrupt()).expect("Failed to create signal"); + let mut sigterm = signal(SignalKind::terminate()).expect("Failed to create signal"); + async move { + tokio::select! { + _ = sigint.recv() => {}, + _ = sigterm.recv() => {} + }; + } + .boxed() +} + +async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { + let SubscribeArgs { cg_name } = args; + + // This request listen for all account updates and transaction updates + let request = SubscribeRequest { + accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), + transactions: HashMap::from([( + "f1".to_owned(), + SubscribeRequestFilterTransactions::default(), + )]), + slots: HashMap::from([("f1".to_owned(), SubscribeRequestFilterSlots::default())]), + ..Default::default() + }; + + println!("Subscribing to consumer group {}", cg_name); + let dragonsmouth_session = client + .dragonsmouth_subscribe(cg_name.clone(), request) + .await + .expect("Failed to subscribe"); + println!("Subscribed to consumer group {}", cg_name); + let DragonsmouthAdapterSession { + sink: _, + mut source, + runtime_handle: _, + } = dragonsmouth_session; + + let mut shutdown = create_shutdown(); + + loop { + tokio::select! { + _ = &mut shutdown => { + println!("Shutting down..."); + break; + } + result = source.recv() => { + let Some(result) = result else { + break; + }; + + let event = result.expect("Failed to receive event"); + + let message = if let Some(oneof) = event.update_oneof { + match oneof { + UpdateOneof::Account(account_update) => summarize_account(account_update), + UpdateOneof::Transaction(tx) => summarize_tx(tx), + UpdateOneof::Slot(slot) => { + let SubscribeUpdateSlot { + slot, + parent, + status, + dead_error: _ + } = slot; + let cl = CommitmentLevel::try_from(status).unwrap(); + Some(format!("slot={slot}, parent={parent:?}, status={cl:?}")) + } + _ => None, + } + } else { + None + }; + + if let Some(message) = message { + println!("{}", message); + } + } + } + } +} + +#[allow(dead_code)] +fn setup_tracing_test_many( + modules: impl IntoIterator, +) -> Result<(), tracing_subscriber::util::TryInitError> { + let is_atty = stdout().is_terminal() && stderr().is_terminal(); + let io_layer = tracing_subscriber::fmt::layer() + .with_ansi(is_atty) + .with_line_number(true); + + let directives = modules + .into_iter() + .fold(EnvFilter::default(), |filter, module| { + filter.add_directive(format!("{module}=debug").parse().expect("invalid module")) + }); + + tracing_subscriber::registry() + .with(io_layer) + .with(directives) + .try_init() +} + +#[tokio::main] +async fn main() { + let args = Args::parse(); + + // setup_tracing_test_many(["yellowstone_fumarole_client"]); + let config = std::fs::read_to_string(&args.config).expect("Failed to read config file"); + + let config = serde_yaml::from_str::(config.as_str()) + .expect("failed to parse fumarole config"); + + let fumarole_client = FumaroleClient::connect(config.clone()) + .await + .expect("Failed to connect to fumarole"); + + match args.action { + Action::GetCgInfo(get_cg_info_args) => { + get_cg_info(get_cg_info_args, fumarole_client).await; + } + Action::CreateCg(create_cg_args) => { + create_cg(create_cg_args, fumarole_client).await; + } + Action::DeleteCg(delete_cg_args) => { + delete_cg(delete_cg_args, fumarole_client).await; + } + Action::ListCg => { + list_all_cg(fumarole_client).await; + } + Action::DeleteAllCg => { + delete_all_cg(fumarole_client).await; + } + Action::Subscribe(subscribe_args) => { + subscribe(fumarole_client, subscribe_args).await; + } + } +} diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index 0edb103..bacc585 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -11,8 +11,8 @@ keywords = { workspace = true } publish = true [features] -default = ["tokio"] -tokio = [] +default = ["prometheus"] +prometheus = ["dep:prometheus"] [package.metadata] include = [ @@ -20,11 +20,14 @@ include = [ "yellowstone-grpc-proto/**", ] + [dependencies] async-trait = { workspace = true } futures = { workspace = true } http = { workspace = true } hyper = { workspace = true } +lazy_static = { workspace = true } +prometheus = { workspace = true, optional = true } prost = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_with = { workspace = true } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index d9af359..4e53ab4 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -3,6 +3,9 @@ /// pub mod config; +#[cfg(feature = "prometheus")] +pub mod metrics; + pub(crate) mod runtime; pub(crate) mod util; @@ -50,7 +53,7 @@ pub mod proto { include!(concat!(env!("OUT_DIR"), "/fumarole_v2.rs")); } -use proto::fumarole_client::FumaroleClient as TonicFumaroleClient; +use proto::{fumarole_client::FumaroleClient as TonicFumaroleClient, JoinControlPlane}; #[derive(Clone)] struct FumeInterceptor { @@ -109,14 +112,17 @@ pub const DEFAULT_DRAGONSMOUTH_CAPACITY: usize = 10000; /// /// Default Fumarole commit offset interval /// -pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(60); +pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(5); +/// +/// Default maximum number of consecutive failed slot download attempts before failing the fumarole session. +/// pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: u8 = 3; /// -/// Default number of parallel data streams +/// Default number of parallel data streams (TCP connections) to open to fumarole. /// -pub const DEFAULT_PARA_DATA_STREAMS: u8 = 1; +pub const DEFAULT_PARA_DATA_STREAMS: u8 = 3; /// /// Yellowstone Fumarole gRPC Client @@ -147,7 +153,7 @@ pub enum FumaroleStreamError { /// pub struct FumaroleSubscribeConfig { /// - /// Number of parallel data streams to open to fumarole + /// Number of parallel data streams (TCP connections) to open to fumarole /// pub num_data_streams: NonZeroU8, /// @@ -269,18 +275,35 @@ impl FumaroleClient { /// /// Subscribe to a stream of updates from the Fumarole service /// - #[cfg(feature = "tokio")] pub async fn dragonsmouth_subscribe( &mut self, consumer_group_name: S, request: geyser::SubscribeRequest, + ) -> Result + where + S: AsRef, + { + let handle = tokio::runtime::Handle::current(); + self.dragonsmouth_subscribe_with_config_on( + consumer_group_name, + request, + Default::default(), + handle, + ) + .await + } + + pub async fn dragonsmouth_subscribe_with_config( + &mut self, + consumer_group_name: S, + request: geyser::SubscribeRequest, config: FumaroleSubscribeConfig, ) -> Result where S: AsRef, { let handle = tokio::runtime::Handle::current(); - self.dragonsmouth_subscribe_on(consumer_group_name, request, config, handle) + self.dragonsmouth_subscribe_with_config_on(consumer_group_name, request, config, handle) .await } @@ -288,8 +311,7 @@ impl FumaroleClient { /// Same as [`FumaroleClient::dragonsmouth_subscribe`] but allows you to specify a custom runtime handle /// the underlying fumarole runtie will use /// - #[cfg(feature = "tokio")] - pub async fn dragonsmouth_subscribe_on( + pub async fn dragonsmouth_subscribe_with_config_on( &mut self, consumer_group_name: S, request: geyser::SubscribeRequest, @@ -299,16 +321,31 @@ impl FumaroleClient { where S: AsRef, { - use runtime::tokio::DragonsmouthSubscribeRequestBidi; + use {proto::ControlCommand, runtime::tokio::DragonsmouthSubscribeRequestBidi}; let (dragonsmouth_outlet, dragonsmouth_inlet) = mpsc::channel(DEFAULT_DRAGONSMOUTH_CAPACITY); let (fume_control_plane_tx, fume_control_plane_rx) = mpsc::channel(100); + let initial_join = JoinControlPlane { + consumer_group_name: Some(consumer_group_name.as_ref().to_string()), + }; + let initial_join_command = ControlCommand { + command: Some(proto::control_command::Command::InitialJoin(initial_join)), + }; + + // IMPORTANT: Make sure we send the request here before we subscribe to the stream + // Otherwise this will block until timeout by remote server. + fume_control_plane_tx + .send(initial_join_command) + .await + .expect("failed to send initial join"); + let resp = self .inner .subscribe(ReceiverStream::new(fume_control_plane_rx)) .await?; + let mut streaming = resp.into_inner(); let fume_control_plane_tx = fume_control_plane_tx.clone(); let control_response = streaming.message().await?.expect("none"); @@ -318,7 +355,17 @@ impl FumaroleClient { panic!("unexpected initial response: {response:?}") }; - let sm = FumaroleSM::new(initial_state.last_committed_offset); + /* WE DON'T SUPPORT SHARDING YET */ + assert!( + initial_state.last_committed_offsets.len() == 1, + "sharding not supported" + ); + let last_committed_offset = initial_state + .last_committed_offsets + .get(&0) + .expect("no last committed offset"); + + let sm = FumaroleSM::new(*last_committed_offset); let data_bidi_factory = GrpcDataPlaneBidiFactory { client: self.clone(), channel_capacity: config.data_channel_capacity, @@ -397,13 +444,11 @@ impl FumaroleClient { } } -#[cfg(feature = "tokio")] pub(crate) struct GrpcDataPlaneBidiFactory { client: FumaroleClient, channel_capacity: usize, } -#[cfg(feature = "tokio")] #[async_trait::async_trait] impl DataPlaneBidiFactory for GrpcDataPlaneBidiFactory { async fn build(&self) -> DataPlaneBidi { diff --git a/crates/yellowstone-fumarole-client/src/metrics.rs b/crates/yellowstone-fumarole-client/src/metrics.rs new file mode 100644 index 0000000..0fcf14a --- /dev/null +++ b/crates/yellowstone-fumarole-client/src/metrics.rs @@ -0,0 +1,153 @@ +use { + lazy_static::lazy_static, + prometheus::{HistogramOpts, HistogramVec, IntCounterVec, IntGaugeVec, Opts}, + std::time::Duration, +}; + +lazy_static! { + pub(crate) static ref FAILED_SLOT_DOWNLOAD_ATTEMPT: IntCounterVec = IntCounterVec::new( + Opts::new( + "fumarole_failed_slot_download_attempt", + "Number of failed slot download attempts from Fumarole", + ), + &["runtime"], + ) + .unwrap(); + pub(crate) static ref SLOT_DOWNLOAD_COUNT: IntCounterVec = IntCounterVec::new( + Opts::new( + "fumarole_slot_download_count", + "Number of slots downloaded from Fumarole", + ), + &["runtime"], + ) + .unwrap(); + pub(crate) static ref INFLIGHT_SLOT_DOWNLOAD: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_inflight_slot_download", + "Number of parallel inflight slots downloaded from Fumarole", + ), + &["runtime"], + ) + .unwrap(); + pub(crate) static ref SLOT_DOWNLOAD_QUEUE_SIZE: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_slot_download_queue", + "Number slot download requests in the queue, waiting to be downloaded", + ), + &["runtime"], + ) + .unwrap(); + pub(crate) static ref SLOT_DOWNLOAD_DURATION: HistogramVec = HistogramVec::new( + HistogramOpts::new( + "fumarole_slot_download_duration_ms", + "Slot download duration distribution from Fumarole in milliseconds", + ) + .buckets(vec![ + 1.0, + 10.0, + 20.0, + 40.0, + 80.0, + 160.0, + 320.0, + 400.0, + 800.0, + 1000.0, + 2000.0, + f64::INFINITY + ]), + &["runtime"], + ) + .unwrap(); + pub(crate) static ref MAX_SLOT_DETECTED: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_max_slot_detected", + "Max slot detected from Fumarole SDK runtime, can be used to detect rough slot lag", + ), + &["runtime"], + ) + .unwrap(); + pub(crate) static ref OFFSET_COMMITMENT_COUNT: IntCounterVec = IntCounterVec::new( + Opts::new( + "fumarole_offset_commitment_count", + "Number of offset commitment done to remote Fumarole service", + ), + &["runtime"], + ) + .unwrap(); +} + +pub(crate) fn set_max_slot_detected(name: impl AsRef, slot: u64) { + MAX_SLOT_DETECTED + .with_label_values(&[name.as_ref()]) + .set(slot as i64); +} + +pub(crate) fn inc_slot_download_count(name: impl AsRef) { + SLOT_DOWNLOAD_COUNT + .with_label_values(&[name.as_ref()]) + .inc(); +} + +pub(crate) fn inc_inflight_slot_download(name: impl AsRef) { + INFLIGHT_SLOT_DOWNLOAD + .with_label_values(&[name.as_ref()]) + .inc(); +} + +pub(crate) fn dec_inflight_slot_download(name: impl AsRef) { + INFLIGHT_SLOT_DOWNLOAD + .with_label_values(&[name.as_ref()]) + .dec(); +} + +pub(crate) fn set_slot_download_queue_size(name: impl AsRef, size: usize) { + SLOT_DOWNLOAD_QUEUE_SIZE + .with_label_values(&[name.as_ref()]) + .set(size as i64); +} + +pub(crate) fn inc_offset_commitment_count(name: impl AsRef) { + OFFSET_COMMITMENT_COUNT + .with_label_values(&[name.as_ref()]) + .inc(); +} + +pub(crate) fn observe_slot_download_duration(name: impl AsRef, duration: Duration) { + SLOT_DOWNLOAD_DURATION + .with_label_values(&[name.as_ref()]) + .observe(duration.as_millis() as f64); +} + +pub(crate) fn inc_failed_slot_download_attempt(name: impl AsRef) { + FAILED_SLOT_DOWNLOAD_ATTEMPT + .with_label_values(&[name.as_ref()]) + .inc(); +} + +/// +/// Register Fumarole metrics to the given registry. +/// +pub fn register_metrics(registry: &prometheus::Registry) { + registry + .register(Box::new(SLOT_DOWNLOAD_COUNT.clone())) + .unwrap(); + registry + .register(Box::new(INFLIGHT_SLOT_DOWNLOAD.clone())) + .unwrap(); + registry + .register(Box::new(SLOT_DOWNLOAD_QUEUE_SIZE.clone())) + .unwrap(); + registry + .register(Box::new(SLOT_DOWNLOAD_DURATION.clone())) + .unwrap(); + registry + .register(Box::new(MAX_SLOT_DETECTED.clone())) + .unwrap(); + registry + .register(Box::new(OFFSET_COMMITMENT_COUNT.clone())) + .unwrap(); + registry + .register(Box::new(FAILED_SLOT_DOWNLOAD_ATTEMPT.clone())) + .unwrap(); +} diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index ab8c977..5f9aec7 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -1,4 +1,6 @@ -#[cfg(feature = "tokio")] +/// +/// Fumarole runtime based of tokio Async I/O. +/// pub(crate) mod tokio; use { @@ -22,7 +24,7 @@ pub(crate) type FumeNumShards = u32; pub(crate) type FumeShardIdx = u32; -pub(crate) type FumeOffset = u64; +pub(crate) type FumeOffset = i64; #[derive(Debug, Clone)] pub(crate) struct FumeDownloadRequest { @@ -38,6 +40,7 @@ pub(crate) struct FumeSlotStatus { pub(crate) slot: Slot, pub(crate) parent_slot: Option, pub(crate) commitment_level: geyser::CommitmentLevel, + pub(crate) dead_error: Option, } #[derive(Debug, Default)] @@ -148,6 +151,11 @@ pub(crate) struct FumaroleSM { /// Represents the high-water mark fume offset that can be committed to the remote fumarole service. /// It means the runtime processed everything <= committable offset. pub committable_offset: FumeOffset, + + /// Represents the max slot detected in the current session. + /// This is used to detect rough slot lag. + /// this slot is not necessarily processed by the underlying runtime yet. + pub max_slot_detected: Slot, } impl FumaroleSM { @@ -161,13 +169,14 @@ impl FumaroleSM { slot_status_update_queue: Default::default(), processed_offset: Default::default(), committable_offset: last_committed_offset, + max_slot_detected: 0, } } /// /// Updates the committed offset /// - pub(crate) fn update_committed_offset(&mut self, offset: FumeOffset) { + pub fn update_committed_offset(&mut self, offset: FumeOffset) { assert!( offset > self.last_committed_offset, "offset must be greater than last committed offset" @@ -177,7 +186,7 @@ impl FumaroleSM { /// /// Queues incoming **ordered** blockchain events - pub(crate) fn queue_blockchain_event(&mut self, events: IT) + pub fn queue_blockchain_event(&mut self, events: IT) where IT: IntoIterator, { @@ -192,6 +201,7 @@ impl FumaroleSM { parent_slot, commitment_level, blockchain_shard_id: _, /*First version this is value does not mean nothing */ + dead_error, } = events; if offset < last_offset { @@ -209,8 +219,12 @@ impl FumaroleSM { slot, parent_slot, commitment_level: cl, + dead_error, }; last_offset = offset; + if slot > self.max_slot_detected { + self.max_slot_detected = slot; + } // We don't download the same slot twice in the same session. if !self.slot_downloaded.contains_key(&slot) { // if the slot is already in-download, we don't need to schedule it for download again @@ -249,10 +263,17 @@ impl FumaroleSM { Some(download_req) } + /// + /// Returns the number of slots in the download queue + /// + pub fn slot_download_queue_size(&self) -> usize { + self.slot_download_queue.len() + } + /// /// Update download progression for a given `Slot` download /// - pub(crate) fn make_slot_download_progress(&mut self, slot: Slot, shard_idx: FumeShardIdx) { + pub fn make_slot_download_progress(&mut self, slot: Slot, shard_idx: FumeShardIdx) { let download_progress = self .inflight_slot_shard_download .get_mut(&slot) @@ -276,7 +297,7 @@ impl FumaroleSM { /// /// Pop next slot status to process /// - pub(crate) fn pop_next_slot_status(&mut self) -> Option { + pub fn pop_next_slot_status(&mut self) -> Option { let slot_status = self.slot_status_update_queue.pop_front()?; let info = self.slot_downloaded.get_mut(&slot_status.slot)?; if info @@ -298,16 +319,22 @@ impl FumaroleSM { /// /// Marks this [`FumeOffset`] has processed by the runtime. /// - pub(crate) fn mark_offset_as_processed(&mut self, offset: FumeOffset) { + pub fn mark_offset_as_processed(&mut self, offset: FumeOffset) { if offset == self.missing_process_offset() { self.committable_offset = offset; - while let Some(offset2) = self.processed_offset.peek().copied() { - let offset2 = offset2.0; - if offset2 == self.missing_process_offset() { - assert!(self.processed_offset.pop().is_some()); - self.committable_offset = offset2 + loop { + let Some(offset2) = self.processed_offset.peek().copied() else { + break; + }; + + if offset2.0 != self.missing_process_offset() { + break; } + + let offset2 = self.processed_offset.pop().unwrap().0; + assert_eq!(offset2, self.missing_process_offset()); + self.committable_offset = offset2; } } else { self.processed_offset.push(Reverse(offset)); @@ -317,7 +344,7 @@ impl FumaroleSM { /// /// Returns true if there is no blockchain event history to track or progress on. /// - pub(crate) fn need_new_blockchain_events(&self) -> bool { + pub fn need_new_blockchain_events(&self) -> bool { self.slot_status_update_queue.is_empty() && self.blocked_slot_status_update.is_empty() } } @@ -343,6 +370,7 @@ mod tests { parent_slot: None, commitment_level: commitment_level.into(), blockchain_shard_id: 0, + dead_error: None, } } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index dec7a48..bce5410 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -1,3 +1,9 @@ +#[cfg(feature = "prometheus")] +use crate::metrics::{ + dec_inflight_slot_download, inc_failed_slot_download_attempt, inc_inflight_slot_download, + inc_offset_commitment_count, inc_slot_download_count, observe_slot_download_duration, + set_max_slot_detected, set_slot_download_queue_size, +}; use { super::{FumaroleSM, FumeDownloadRequest, FumeOffset}, crate::proto::{ @@ -21,6 +27,7 @@ use { /// /// Data-Plane bidirectional stream +/// pub(crate) struct DataPlaneBidi { pub tx: mpsc::Sender, pub rx: mpsc::Receiver>, @@ -84,7 +91,11 @@ const fn build_poll_history_cmd(from: Option) -> ControlCommand { ControlCommand { command: Some(proto::control_command::Command::PollHist( // from None means poll the entire history from wherever we left off since last commit. - PollBlockchainHistory { from }, + PollBlockchainHistory { + shard_id: 0, /*ALWAYS 0-FOR FIRST VERSION OF FUMAROLE */ + from, + limit: None, + }, )), } } @@ -189,6 +200,9 @@ impl DownloadBlockTask { } } } + proto::data_response::Response::Pong(_pong) => { + tracing::debug!("pong"); + } } } @@ -216,24 +230,30 @@ impl From for BlockFilters { } impl TokioFumeDragonsmouthRuntime { + const RUNTIME_NAME: &'static str = "tokio"; + fn handle_control_response(&mut self, control_response: proto::ControlResponse) { let Some(response) = control_response.response else { return; }; match response { proto::control_response::Response::CommitOffset(commit_offset_result) => { - tracing::trace!("received commit offset : {commit_offset_result:?}"); + tracing::debug!("received commit offset : {commit_offset_result:?}"); self.sm.update_committed_offset(commit_offset_result.offset); } proto::control_response::Response::PollNext(blockchain_history) => { - tracing::trace!( + tracing::debug!( "polled blockchain history : {} events", blockchain_history.events.len() ); self.sm.queue_blockchain_event(blockchain_history.events); + #[cfg(feature = "prometheus")] + { + set_max_slot_detected(Self::RUNTIME_NAME, self.sm.max_slot_detected); + } } proto::control_response::Response::Pong(_pong) => { - tracing::trace!("pong"); + tracing::debug!("pong"); } proto::control_response::Response::Init(_init) => { unreachable!("init should not be received here"); @@ -242,8 +262,9 @@ impl TokioFumeDragonsmouthRuntime { } async fn poll_history_if_needed(&mut self) { - let cmd = build_poll_history_cmd(Some(self.sm.committable_offset)); if self.sm.need_new_blockchain_events() { + let cmd = build_poll_history_cmd(Some(self.sm.committable_offset)); + tracing::debug!("polling history..."); self.control_plane_tx.send(cmd).await.expect("disconnected"); } } @@ -289,6 +310,7 @@ impl TokioFumeDragonsmouthRuntime { let ah = self .data_plane_tasks .spawn_on(download_task.run(), &self.rt); + tracing::debug!("download task scheduled for slot {}", download_request.slot); self.data_plane_task_meta.insert( ah.id(), DataPlaneTaskMeta { @@ -297,6 +319,17 @@ impl TokioFumeDragonsmouthRuntime { download_attempt: *download_attempts, }, ); + + #[cfg(feature = "prometheus")] + { + inc_inflight_slot_download(Self::RUNTIME_NAME); + } + } + + #[cfg(feature = "prometheus")] + { + let size = self.sm.slot_download_queue_size() + self.download_to_retry.len(); + set_slot_download_queue_size(Self::RUNTIME_NAME, size); } } @@ -308,11 +341,24 @@ impl TokioFumeDragonsmouthRuntime { let Some(task_meta) = self.data_plane_task_meta.remove(&task_id) else { panic!("missing task meta") }; + + #[cfg(feature = "prometheus")] + { + dec_inflight_slot_download(Self::RUNTIME_NAME); + } + let slot = task_meta.download_request.slot; - tracing::trace!("download task result received for slot {}", slot); + tracing::debug!("download task result received for slot {}", slot); match result { Ok(completed) => { let elapsed = task_meta.scheduled_at.elapsed(); + + #[cfg(feature = "prometheus")] + { + observe_slot_download_duration(Self::RUNTIME_NAME, elapsed); + inc_slot_download_count(Self::RUNTIME_NAME); + } + tracing::debug!("downloaded slot {slot} in {elapsed:?}"); let _ = self.download_attempts.remove(&slot); self.data_plane_bidi_vec.push_back(completed.bidi); @@ -320,6 +366,11 @@ impl TokioFumeDragonsmouthRuntime { self.sm.make_slot_download_progress(slot, 0); } Err(e) => { + #[cfg(feature = "prometheus")] + { + inc_failed_slot_download_attempt(Self::RUNTIME_NAME); + } + match e { x @ (DownloadBlockError::Disconnected | DownloadBlockError::GrpcError(_)) => { // We need to retry it @@ -327,7 +378,13 @@ impl TokioFumeDragonsmouthRuntime { return Err(x); } + tracing::debug!( + "download slot {slot} failed: {x:?}, rebuilding data plane bidi..." + ); + // Recreate the data plane bidi + let t = Instant::now(); let data_plane_bidi = self.data_plane_bidi_factory.build().await; + tracing::debug!("data plane bidi rebuilt in {:?}", t.elapsed()); self.data_plane_bidi_vec.push_back(data_plane_bidi); tracing::debug!("Download slot {slot} failed, rescheduling for retry..."); @@ -336,6 +393,7 @@ impl TokioFumeDragonsmouthRuntime { DownloadBlockError::OutletDisconnected => { // Will automatically be handled in the `run` main loop. // so nothing to do. + tracing::debug!("dragonsmouth outlet disconnected"); } DownloadBlockError::BlockShardNotFound => { // TODO: I don't think it should ever happen, but lets panic first so we get notified by client if it ever happens. @@ -349,10 +407,15 @@ impl TokioFumeDragonsmouthRuntime { async fn commit_offset(&mut self) { if self.sm.last_committed_offset < self.sm.committable_offset { + tracing::debug!("committing offset {}", self.sm.committable_offset); self.control_plane_tx .send(build_commit_offset_cmd(self.sm.committable_offset)) .await .expect("failed to commit offset"); + #[cfg(feature = "prometheus")] + { + inc_offset_commitment_count(Self::RUNTIME_NAME); + } } self.last_commit = Instant::now(); @@ -366,6 +429,8 @@ impl TokioFumeDragonsmouthRuntime { slot_status_vec.push_back(slot_status); } + tracing::debug!("draining slot status: {} events", slot_status_vec.len()); + for slot_status in slot_status_vec { let mut matched_filters = vec![]; for (filter_name, filter) in &self.subscribe_request.slots { @@ -388,11 +453,10 @@ impl TokioFumeDragonsmouthRuntime { parent: slot_status.parent_slot, status: slot_status.commitment_level.into(), // TODO: support dead slot - dead_error: None, + dead_error: slot_status.dead_error, }, )), }; - if self.dragonsmouth_outlet.send(Ok(update)).await.is_err() { return; } @@ -402,6 +466,7 @@ impl TokioFumeDragonsmouthRuntime { } async fn unsafe_cancel_all_tasks(&mut self) { + tracing::debug!("aborting all data plane tasks"); self.data_plane_tasks.abort_all(); self.data_plane_task_meta.clear(); self.download_attempts.clear(); @@ -420,7 +485,7 @@ impl TokioFumeDragonsmouthRuntime { loop { if self.dragonsmouth_outlet.is_closed() { - tracing::trace!("Detected dragonsmouth outlet closed"); + tracing::debug!("Detected dragonsmouth outlet closed"); break; } @@ -430,12 +495,12 @@ impl TokioFumeDragonsmouthRuntime { self.schedule_download_task_if_any(); tokio::select! { Some(subscribe_request) = self.dragonsmouth_bidi.rx.recv() => { + tracing::debug!("dragonsmouth subscribe request received"); self.subscribe_request = subscribe_request } control_response = self.control_plane_rx.recv() => { match control_response { Some(Ok(control_response)) => { - tracing::trace!("control response received"); self.handle_control_response(control_response); } Some(Err(e)) => { @@ -443,7 +508,7 @@ impl TokioFumeDragonsmouthRuntime { return Err(Box::new(RuntimeError::GrpcError(e))); } None => { - tracing::trace!("control plane disconnected"); + tracing::debug!("control plane disconnected"); break; } } @@ -461,11 +526,13 @@ impl TokioFumeDragonsmouthRuntime { } _ = tokio::time::sleep_until(commit_deadline.into()) => { + tracing::debug!("commit deadline reached"); self.commit_offset().await; } } self.drain_slot_status().await; } + tracing::debug!("fumarole runtime exiting"); Ok(()) } } diff --git a/crates/yellowstone-fumarole-client/src/util/collections.rs b/crates/yellowstone-fumarole-client/src/util/collections.rs index 3254dcd..d3713d6 100644 --- a/crates/yellowstone-fumarole-client/src/util/collections.rs +++ b/crates/yellowstone-fumarole-client/src/util/collections.rs @@ -30,6 +30,10 @@ where } } + pub fn len(&self) -> usize { + self.vec.len() + } + pub fn push_back(&mut self, key: K, item: V) -> bool { if self.index.insert(key.clone()) { self.vec.push_back((key, item)); diff --git a/examples/rust/src/bin/client.rs b/examples/rust/src/bin/client.rs index 6267c1e..ab8ed62 100644 --- a/examples/rust/src/bin/client.rs +++ b/examples/rust/src/bin/client.rs @@ -3,7 +3,7 @@ use { solana_sdk::{bs58, pubkey::Pubkey}, std::{collections::HashMap, path::PathBuf}, yellowstone_fumarole_client::{ - config::FumaroleConfig, DragonsmouthAdapterSession, FumaroleClient, FumaroleSubscribeConfig, + config::FumaroleConfig, DragonsmouthAdapterSession, FumaroleClient, }, yellowstone_grpc_proto::geyser::{ subscribe_update::UpdateOneof, SubscribeRequest, SubscribeRequestFilterAccounts, @@ -69,11 +69,8 @@ async fn subscribe(args: SubscribeArgs, config: FumaroleConfig) { .await .expect("Failed to connect to fumarole"); - let subscribe_config = FumaroleSubscribeConfig { - ..Default::default() - }; let dragonsmouth_session = fumarole_client - .dragonsmouth_subscribe(args.cg_name, request, subscribe_config) + .dragonsmouth_subscribe(args.cg_name, request) .await .expect("Failed to subscribe"); diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index db5d15b..459d5e6 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -18,11 +18,11 @@ service Fumarole { } message GetConsumerGroupInfoRequest { - string consumer_group_label = 1; + string consumer_group_name = 1; } message DeleteConsumerGroupRequest { - string consumer_group_label = 1; + string consumer_group_name = 1; } message DeleteConsumerGroupResponse { @@ -38,12 +38,12 @@ message ListConsumerGroupsResponse { message ConsumerGroupInfo { string id = 1; - string consumer_group_label = 2; + string consumer_group_name = 2; bool is_stale = 3; } message GetSlotLagInfoRequest { - string consumer_group_label = 1; + string consumer_group_name = 1; } message BlockFilters { @@ -57,7 +57,7 @@ message BlockFilters { message DownloadBlockShard { bytes blockchain_id = 1; bytes block_uid = 2; - uint32 shard_idx = 3; + int32 shard_idx = 3; optional BlockFilters blockFilters = 4; } @@ -80,13 +80,13 @@ message DataCommand { message BlockShardDownloadFinish { bytes blockchain_id = 1; bytes block_uid = 2; - uint32 shard_idx = 3; + int32 shard_idx = 3; } message BlockNotFound { bytes blockchain_id = 1; bytes block_uid = 2; - uint32 shard_idx = 3; + int32 shard_idx = 3; } message DataError { @@ -100,21 +100,24 @@ message DataResponse { geyser.SubscribeUpdate update = 1; BlockShardDownloadFinish block_shard_download_finish = 2; DataError error = 3; + Pong pong = 4; } } message CommitOffset { - uint64 offset = 1; + int64 offset = 1; int32 shard_id = 2; } message PollBlockchainHistory { - optional uint64 from = 1; + int32 shard_id = 1; + optional int64 from = 2; + optional int64 limit = 3; } message BlockchainEvent { - uint64 offset = 1; + int64 offset = 1; bytes blockchain_id = 2; bytes block_uid = 3; uint32 num_shards = 4; @@ -122,28 +125,34 @@ message BlockchainEvent { optional uint64 parent_slot = 6; geyser.CommitmentLevel commitment_level = 7; int32 blockchain_shard_id = 8; + optional string dead_error = 9; } message BlockchainHistory { repeated BlockchainEvent events = 1; } +message JoinControlPlane { + optional string consumer_group_name = 1; +} + message ControlCommand { oneof command { - CommitOffset commit_offset = 1; - PollBlockchainHistory poll_hist = 2; - Ping ping = 3; + JoinControlPlane initial_join = 1; + CommitOffset commit_offset = 2; + PollBlockchainHistory poll_hist = 3; + Ping ping = 4; } } message CommitOffsetResult { - uint64 offset = 1; + int64 offset = 1; int32 shard_id = 2; } message InitialConsumerGroupState { - uint64 last_committed_offset = 1; - bytes blockchain_id = 2; + bytes blockchain_id = 1; + map last_committed_offsets = 2; } message ControlResponse { @@ -156,7 +165,7 @@ message ControlResponse { } message CreateConsumerGroupResponse { - string group_id = 1; + string consumer_group_id = 1; } enum InitialOffsetPolicy { @@ -165,7 +174,7 @@ enum InitialOffsetPolicy { } message CreateConsumerGroupRequest { - string consumer_group_label = 1; + string consumer_group_name = 1; InitialOffsetPolicy initial_offset_policy = 2; //optional uint64 from_slot = 3; } \ No newline at end of file From 27552fe8bcf83c7ba2b391b44bd56de87751e857 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 24 Apr 2025 11:46:34 -0400 Subject: [PATCH 06/56] v2: fumarole-client added crate doc --- .../yellowstone-fumarole-client/src/config.rs | 2 +- crates/yellowstone-fumarole-client/src/lib.rs | 113 +++++++++++++++++- 2 files changed, 111 insertions(+), 4 deletions(-) diff --git a/crates/yellowstone-fumarole-client/src/config.rs b/crates/yellowstone-fumarole-client/src/config.rs index c5206d3..4851f06 100644 --- a/crates/yellowstone-fumarole-client/src/config.rs +++ b/crates/yellowstone-fumarole-client/src/config.rs @@ -26,7 +26,7 @@ impl FumaroleConfig { /// /// Returns the default maximum size of a message that can be decoded /// - const fn default_max_decoding_message_size_bytes() -> usize { + pub const fn default_max_decoding_message_size_bytes() -> usize { 512_000_000 } } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index 4e53ab4..eafd8c9 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -1,6 +1,113 @@ -/// -/// Fumarole's client library. -/// +//! +//! A Rust implementation of the Yellowstone Fumarole Client using Tokio and Tonic. +//! +//! Fumarole Client uses gRPC connections to communicate with the Fumarole service. +//! +//! # Yellowstone-GRPC vs Yellowstone-Fumarole +//! +//! For the most part, the API is similar to the original [`yellowstone-grpc`] client. +//! +//! However, there are some differences: +//! +//! - The `yellowstone-fumarole` client uses multiple gRPC connections to communicate with the Fumarole service : avoids [`HoL`] blocking. +//! - The `yellowstone-fumarole` subscribers are persistent and can be reused across multiple sessions (not computer). +//! - The `yellowstone-fumarole` can reconnect to the Fumarole service if the connection is lost. +//! +//! # Examples +//! +//! Examples can be found in the [`examples`] directory. +//! +//! ## Create a `FumaroleClient` +//! +//! ```ignore +//! use yellowstone_fumarole_client::FumaroleClient; +//! use yellowstone_fumarole_client::config::FumaroleConfig; +//! +//! #[tokio::main] +//! async fn main() { +//! let config = FumaroleConfig { +//! endpoint: "https://example.com".to_string(), +//! x_token: Some("00000000-0000-0000-0000-000000000000".to_string()), +//! max_decoding_message_size_bytes: FumaroleConfig::default_max_decoding_message_size_bytes(), +//! x_metadata: Default::default(), +//! }; +//! let fumarole_client = FumaroleClient::connect(config) +//! .await +//! .expect("Failed to connect to fumarole"); +//! } +//! ``` +//! +//! **NOTE**: The struct `FumaroleConfig` supports deserialization from a YAML file. +//! +//! Here's an example of a YAML file: +//! +//! ```yaml +//! endpoint: https://example.com +//! x-token: 00000000-0000-0000-0000-000000000000 +//! ``` +//! ## Dragonsmouth-like Subscribe +//! +//! ```ignore +//! use yellowstone_fumarole_client::FumaroleClient; +//! +//! +//! let mut client = FumaroleClient::connect(config).await.unwrap(); +//! +//! let request = geyser::SubscribeRequest { +//! accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), +//! transactions: HashMap::from([("f1".to_owned(), SubscribeRequestFilterTransactions::default())]), +//! ..Default::default() +//! }; +//! +//! +//! let dragonsmouth_adapter = client.dragonsmouth_subscribe("my-consumer-group", request).await.unwrap(); +//! +//! let DragonsmouthAdapterSession { +//! sink: _, // Channel to update [`SubscribeRequest`] requests to the fumarole service +//! mut source, // Channel to receive updates from the fumarole service +//! runtime_handle: _, // Handle to the fumarole session client runtime +//! } = dragonsmouth_adapter; +//! +//! while let Some(result) = source.recv().await { +//! let event = result.expect("Failed to receive event"); +//! // ... do something with the event +//! } +//! ``` +//! +//! ## Enable Prometheus Metrics +//! +//! To enable Prometheus metrics, add the `features = [prometheus]` to your `Cargo.toml` file: +//! ```toml +//! [dependencies] +//! yellowstone-fumarole-client = { version = "x.y.z", features = ["prometheus"] } +//! ``` +//! +//! Then, you can use the `metrics` module to register and expose metrics: +//! +//! ```rust +//! use yellowstone_fumarole_client::metrics; +//! use prometheus::{Registry}; +//! +//! let r = Registry::new(); +//! +//! metrics::register_metrics(&r); +//! +//! // After registering, you should see `fumarole_` prefixed metrics in the registry. +//! ``` +//! +//! # Getting Started +//! +//! Follows the instruction in the [`README`] file to get started. +//! +//! # Feature Flags +//! +//! - `prometheus`: Enables Prometheus metrics for the Fumarole client. +//! +//! [`examples`]: https://github.com/rpcpool/yellowstone-fumarole/tree/main/examples +//! [`README`]: https://github.com/rpcpool/yellowstone-fumarole/tree/main/README.md +//! [`yellowstone-grpc`]: https://github.com/rpcpool/yellowstone-grpc +//! [`HoL`]: https://en.wikipedia.org/wiki/Head-of-line_blocking + pub mod config; #[cfg(feature = "prometheus")] From e69c78768fe729e52d3e209a535b2f751cd7101d Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 24 Apr 2025 11:48:08 -0400 Subject: [PATCH 07/56] v2: set version to 0.2.0-pre.1 for yellowstone-fumarole-client --- Cargo.lock | 2 +- crates/yellowstone-fumarole-client/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ae69166..77e6b99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5239,7 +5239,7 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "yellowstone-fumarole-client" -version = "0.1.1-pre.2+solana.2.1.11" +version = "0.2.0-pre.1+solana.2.1.11" dependencies = [ "async-trait", "futures", diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index bacc585..dc7afd1 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-client" description = "Yellowstone Fumarole Client" -version = "0.1.1-pre.2+solana.2.1.11" +version = "0.2.0-pre.1+solana.2.1.11" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } From fc16f65040e53e639f48d3b3fa36a36fe129f63a Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 25 Apr 2025 17:37:22 -0400 Subject: [PATCH 08/56] v2: support data plane --- apps/fume/src/main.rs | 10 +- crates/yellowstone-fumarole-client/Cargo.toml | 1 - crates/yellowstone-fumarole-client/src/lib.rs | 148 +++++----- .../src/metrics.rs | 17 ++ .../src/runtime/mod.rs | 12 +- .../src/runtime/tokio.rs | 276 +++++++++++------- proto/fumarole_v2.proto | 29 +- 7 files changed, 290 insertions(+), 203 deletions(-) diff --git a/apps/fume/src/main.rs b/apps/fume/src/main.rs index 70077ac..2a3f778 100644 --- a/apps/fume/src/main.rs +++ b/apps/fume/src/main.rs @@ -5,6 +5,7 @@ use { std::{ collections::HashMap, io::{stderr, stdout, IsTerminal}, + num::{NonZeroU8, NonZeroUsize}, path::PathBuf, }, tabled::{builder::Builder, Table}, @@ -20,7 +21,7 @@ use { ConsumerGroupInfo, CreateConsumerGroupRequest, DeleteConsumerGroupRequest, GetConsumerGroupInfoRequest, InitialOffsetPolicy, ListConsumerGroupsRequest, }, - DragonsmouthAdapterSession, FumaroleClient, + DragonsmouthAdapterSession, FumaroleClient, FumaroleSubscribeConfig, }, yellowstone_grpc_proto::geyser::{ subscribe_update::UpdateOneof, CommitmentLevel, SubscribeRequest, @@ -293,8 +294,13 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { }; println!("Subscribing to consumer group {}", cg_name); + let subscribe_config = FumaroleSubscribeConfig { + num_data_plane_tcp_connections: NonZeroU8::new(1).unwrap(), + concurrent_download_limit_per_tcp: NonZeroUsize::new(1).unwrap(), + ..Default::default() + }; let dragonsmouth_session = client - .dragonsmouth_subscribe(cg_name.clone(), request) + .dragonsmouth_subscribe_with_config(cg_name.clone(), request, subscribe_config) .await .expect("Failed to subscribe"); println!("Subscribed to consumer group {}", cg_name); diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index dc7afd1..f783bf6 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -20,7 +20,6 @@ include = [ "yellowstone-grpc-proto/**", ] - [dependencies] async-trait = { workspace = true } futures = { workspace = true } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index eafd8c9..f92371e 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -119,14 +119,10 @@ pub(crate) mod util; use { config::FumaroleConfig, proto::control_response::Response, - runtime::{ - tokio::{DataPlaneBidi, DataPlaneBidiFactory, TokioFumeDragonsmouthRuntime}, - FumaroleSM, - }, + runtime::{tokio::TokioFumeDragonsmouthRuntime, FumaroleSM}, std::{ - collections::{HashMap, VecDeque}, - num::NonZeroU8, - sync::Arc, + collections::HashMap, + num::{NonZeroU8, NonZeroUsize}, time::{Duration, Instant}, }, tokio::sync::mpsc, @@ -160,7 +156,11 @@ pub mod proto { include!(concat!(env!("OUT_DIR"), "/fumarole_v2.rs")); } -use proto::{fumarole_client::FumaroleClient as TonicFumaroleClient, JoinControlPlane}; +use { + proto::{fumarole_client::FumaroleClient as TonicFumaroleClient, JoinControlPlane}, + runtime::tokio::DataPlaneConn, + tonic::transport::Endpoint, +}; #[derive(Clone)] struct FumeInterceptor { @@ -232,11 +232,19 @@ pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: u8 = 3; pub const DEFAULT_PARA_DATA_STREAMS: u8 = 3; /// -/// Yellowstone Fumarole gRPC Client +/// Default maximum number of concurrent download requests to the fumarole service inside a single data plane TCP connection. +/// +pub const DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP: usize = 10; + +pub(crate) type GrpcFumaroleClient = + TonicFumaroleClient>; +/// +/// Yellowstone Fumarole SDK. /// #[derive(Clone)] pub struct FumaroleClient { - inner: TonicFumaroleClient>, + connector: FumaroleGrpcConnector, + inner: GrpcFumaroleClient, } #[derive(Debug, thiserror::Error)] @@ -262,28 +270,40 @@ pub struct FumaroleSubscribeConfig { /// /// Number of parallel data streams (TCP connections) to open to fumarole /// - pub num_data_streams: NonZeroU8, + pub num_data_plane_tcp_connections: NonZeroU8, + + /// + /// Maximum number of concurrent download requests to the fumarole service inside a single data plane TCP connection. + /// + pub concurrent_download_limit_per_tcp: NonZeroUsize, + /// /// Commit interval for the fumarole client /// pub commit_interval: Duration, + /// /// Maximum number of consecutive failed slot download attempts before failing the fumarole session. /// pub max_failed_slot_download_attempt: u8, + /// /// Capacity of each data channel for the fumarole client /// - pub data_channel_capacity: usize, + pub data_channel_capacity: NonZeroUsize, } impl Default for FumaroleSubscribeConfig { fn default() -> Self { Self { - num_data_streams: NonZeroU8::new(DEFAULT_PARA_DATA_STREAMS).unwrap(), + num_data_plane_tcp_connections: NonZeroU8::new(DEFAULT_PARA_DATA_STREAMS).unwrap(), + concurrent_download_limit_per_tcp: NonZeroUsize::new( + DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, + ) + .unwrap(), commit_interval: DEFAULT_COMMIT_INTERVAL, max_failed_slot_download_attempt: DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, - data_channel_capacity: DEFAULT_DRAGONSMOUTH_CAPACITY, + data_channel_capacity: NonZeroUsize::new(DEFAULT_DRAGONSMOUTH_CAPACITY).unwrap(), } } } @@ -353,30 +373,19 @@ fn string_pairs_to_metadata_header( impl FumaroleClient { pub async fn connect(config: FumaroleConfig) -> Result { - let channel = Channel::from_shared(config.endpoint.clone())? - .tls_config(ClientTlsConfig::new().with_native_roots())? - .connect() - .await?; - - Self::connect_with_channel(config, channel).await - } + let endpoint = Endpoint::from_shared(config.endpoint.clone())? + .tls_config(ClientTlsConfig::new().with_native_roots())?; - pub async fn connect_with_channel( - config: FumaroleConfig, - channel: tonic::transport::Channel, - ) -> Result { - let interceptor = FumeInterceptor { - x_token: config - .x_token - .map(|token: String| token.try_into()) - .transpose()?, - metadata: string_pairs_to_metadata_header(config.x_metadata)?, + let connector = FumaroleGrpcConnector { + config: config.clone(), + endpoint: endpoint.clone(), }; - let client = TonicFumaroleClient::with_interceptor(channel, interceptor) - .max_decoding_message_size(config.max_decoding_message_size_bytes); - - Ok(FumaroleClient { inner: client }) + let client = connector.connect().await?; + Ok(FumaroleClient { + connector, + inner: client, + }) } /// @@ -473,16 +482,6 @@ impl FumaroleClient { .expect("no last committed offset"); let sm = FumaroleSM::new(*last_committed_offset); - let data_bidi_factory = GrpcDataPlaneBidiFactory { - client: self.clone(), - channel_capacity: config.data_channel_capacity, - }; - - let mut data_bidi_vec = VecDeque::with_capacity(config.num_data_streams.get() as usize); - for _ in 0..config.num_data_streams.get() { - let data_bidi = data_bidi_factory.build().await; - data_bidi_vec.push_back(data_bidi); - } let (dm_tx, dm_rx) = mpsc::channel(100); let dm_bidi = DragonsmouthSubscribeRequestBidi { @@ -490,16 +489,28 @@ impl FumaroleClient { rx: dm_rx, }; + let mut data_plane_channel_vec = + Vec::with_capacity(config.num_data_plane_tcp_connections.get() as usize); + for _ in 0..config.num_data_plane_tcp_connections.get() { + let client = self + .connector + .connect() + .await + .expect("failed to connect to fumarole"); + let conn = DataPlaneConn::new(client, config.concurrent_download_limit_per_tcp.get()); + data_plane_channel_vec.push(conn); + } + let tokio_rt = TokioFumeDragonsmouthRuntime { rt: handle.clone(), sm, - data_plane_bidi_factory: Arc::new(data_bidi_factory), dragonsmouth_bidi: dm_bidi, subscribe_request: request, + fumarole_connector: self.connector.clone(), consumer_group_name: consumer_group_name.as_ref().to_string(), control_plane_tx: fume_control_plane_tx, control_plane_rx: fume_control_plane_rx, - data_plane_bidi_vec: data_bidi_vec, + data_plane_channel_vec, data_plane_tasks: Default::default(), data_plane_task_meta: Default::default(), dragonsmouth_outlet, @@ -551,26 +562,31 @@ impl FumaroleClient { } } -pub(crate) struct GrpcDataPlaneBidiFactory { - client: FumaroleClient, - channel_capacity: usize, +#[derive(Clone)] +pub(crate) struct FumaroleGrpcConnector { + config: FumaroleConfig, + endpoint: Endpoint, } -#[async_trait::async_trait] -impl DataPlaneBidiFactory for GrpcDataPlaneBidiFactory { - async fn build(&self) -> DataPlaneBidi { - let mut client = self.client.clone(); - let (tx, rx) = mpsc::channel(self.channel_capacity); - let rx = ReceiverStream::new(rx); - let resp = client - .inner - .subscribe_data(rx) - .await - .expect("failed to subscribe"); - let streaming = resp.into_inner(); - - let rx = into_bounded_mpsc_rx(self.channel_capacity, streaming); - - DataPlaneBidi { tx, rx } +impl FumaroleGrpcConnector { + async fn connect( + &self, + ) -> Result< + TonicFumaroleClient>, + tonic::transport::Error, + > { + let channel = self.endpoint.connect().await?; + let interceptor = FumeInterceptor { + x_token: self + .config + .x_token + .as_ref() + .map(|token| token.try_into()) + .transpose() + .unwrap(), + metadata: string_pairs_to_metadata_header(self.config.x_metadata.clone()).unwrap(), + }; + Ok(TonicFumaroleClient::with_interceptor(channel, interceptor) + .max_decoding_message_size(self.config.max_decoding_message_size_bytes)) } } diff --git a/crates/yellowstone-fumarole-client/src/metrics.rs b/crates/yellowstone-fumarole-client/src/metrics.rs index 0fcf14a..a5d26a0 100644 --- a/crates/yellowstone-fumarole-client/src/metrics.rs +++ b/crates/yellowstone-fumarole-client/src/metrics.rs @@ -75,6 +75,20 @@ lazy_static! { &["runtime"], ) .unwrap(); + pub(crate) static ref TOTAL_EVENT_DOWNLOADED: IntCounterVec = IntCounterVec::new( + Opts::new( + "fumarole_total_event_downloaded", + "Total number of events downloaded from Fumarole", + ), + &["runtime"], + ) + .unwrap(); +} + +pub(crate) fn inc_total_event_downloaded(name: impl AsRef, amount: usize) { + TOTAL_EVENT_DOWNLOADED + .with_label_values(&[name.as_ref()]) + .inc_by(amount as u64); } pub(crate) fn set_max_slot_detected(name: impl AsRef, slot: u64) { @@ -150,4 +164,7 @@ pub fn register_metrics(registry: &prometheus::Registry) { registry .register(Box::new(FAILED_SLOT_DOWNLOAD_ATTEMPT.clone())) .unwrap(); + registry + .register(Box::new(TOTAL_EVENT_DOWNLOADED.clone())) + .unwrap(); } diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 5f9aec7..751116d 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -133,8 +133,8 @@ impl SlotDownloadProgress { pub(crate) struct FumaroleSM { /// The last committed offset pub last_committed_offset: FumeOffset, - /// Slot that have been downloaded in the current session along side slot status update - slot_downloaded: BTreeMap, + /// As we download and process slot status, we keep track of the progression of each slot here. + slot_progression: BTreeMap, /// Inlfight slot download inflight_slot_shard_download: HashMap, /// Slot download queue @@ -162,7 +162,7 @@ impl FumaroleSM { pub fn new(last_committed_offset: FumeOffset) -> Self { Self { last_committed_offset, - slot_downloaded: Default::default(), + slot_progression: Default::default(), inflight_slot_shard_download: Default::default(), slot_download_queue: Default::default(), blocked_slot_status_update: Default::default(), @@ -226,7 +226,7 @@ impl FumaroleSM { self.max_slot_detected = slot; } // We don't download the same slot twice in the same session. - if !self.slot_downloaded.contains_key(&slot) { + if !self.slot_progression.contains_key(&slot) { // if the slot is already in-download, we don't need to schedule it for download again if !self.inflight_slot_shard_download.contains_key(&slot) { let download_request = FumeDownloadRequest { @@ -284,7 +284,7 @@ impl FumaroleSM { if matches!(download_state, SlotDownloadState::Done) { // all shards downloaded self.inflight_slot_shard_download.remove(&slot); - self.slot_downloaded.insert(slot, Default::default()); + self.slot_progression.insert(slot, Default::default()); let blocked_slot_status = self .blocked_slot_status_update @@ -299,7 +299,7 @@ impl FumaroleSM { /// pub fn pop_next_slot_status(&mut self) -> Option { let slot_status = self.slot_status_update_queue.pop_front()?; - let info = self.slot_downloaded.get_mut(&slot_status.slot)?; + let info = self.slot_progression.get_mut(&slot_status.slot)?; if info .processed_commitment_levels .insert(slot_status.commitment_level) diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index bce5410..eee76b7 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -6,10 +6,15 @@ use crate::metrics::{ }; use { super::{FumaroleSM, FumeDownloadRequest, FumeOffset}, - crate::proto::{ - self, data_command, BlockFilters, CommitOffset, ControlCommand, DataCommand, - DownloadBlockShard, PollBlockchainHistory, + crate::{ + metrics::inc_total_event_downloaded, + proto::{ + self, data_response, BlockFilters, CommitOffset, ControlCommand, DownloadBlockShard, + PollBlockchainHistory, + }, + FumaroleGrpcConnector, GrpcFumaroleClient, }, + futures::StreamExt, solana_sdk::clock::Slot, std::{ collections::{HashMap, VecDeque}, @@ -17,22 +22,15 @@ use { time::{Duration, Instant}, }, tokio::{ - sync::mpsc, + sync::{mpsc, Semaphore}, task::{self, JoinSet}, }, + tonic::Code, yellowstone_grpc_proto::geyser::{ self, SubscribeRequest, SubscribeUpdate, SubscribeUpdateSlot, }, }; -/// -/// Data-Plane bidirectional stream -/// -pub(crate) struct DataPlaneBidi { - pub tx: mpsc::Sender, - pub rx: mpsc::Receiver>, -} - /// /// Holds information about on-going data plane task. /// @@ -41,17 +39,8 @@ pub(crate) struct DataPlaneTaskMeta { download_request: FumeDownloadRequest, scheduled_at: Instant, download_attempt: u8, -} - -/// -/// Base trait for Data-plane bidirectional stream factories. -/// -#[async_trait::async_trait] -pub(crate) trait DataPlaneBidiFactory { - /// - /// Builds a [`DataPlaneBidi`] - /// - async fn build(&self) -> DataPlaneBidi; + client_rev: u64, + client_idx: usize, } /// @@ -63,6 +52,41 @@ pub struct DragonsmouthSubscribeRequestBidi { pub rx: mpsc::Receiver, } +pub(crate) struct DataPlaneConn { + sem: Arc, + client: GrpcFumaroleClient, + rev: u64, +} + +struct ProtectedGrpcFumaroleClient { + client: GrpcFumaroleClient, + _permit: tokio::sync::OwnedSemaphorePermit, +} + +impl DataPlaneConn { + pub fn new(client: GrpcFumaroleClient, concurrency_limit: usize) -> Self { + Self { + sem: Arc::new(Semaphore::new(concurrency_limit)), + client, + rev: 0, + } + } + + fn has_permit(&self) -> bool { + self.sem.available_permits() > 0 + } + + fn acquire(&mut self) -> ProtectedGrpcFumaroleClient { + let permit = Arc::clone(&self.sem) + .try_acquire_owned() + .expect("failed to acquire semaphore permit"); + ProtectedGrpcFumaroleClient { + client: self.client.clone(), + _permit: permit, + } + } +} + /// /// Fumarole runtime based on Tokio outputting Dragonsmouth only events. /// @@ -70,14 +94,14 @@ pub(crate) struct TokioFumeDragonsmouthRuntime { pub rt: tokio::runtime::Handle, pub sm: FumaroleSM, pub dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, - pub data_plane_bidi_factory: Arc, pub subscribe_request: SubscribeRequest, + pub fumarole_connector: FumaroleGrpcConnector, #[allow(dead_code)] pub consumer_group_name: String, pub control_plane_tx: mpsc::Sender, pub control_plane_rx: mpsc::Receiver>, - pub data_plane_bidi_vec: VecDeque, - pub data_plane_tasks: JoinSet>, + pub data_plane_channel_vec: Vec, + pub data_plane_tasks: JoinSet>, pub data_plane_task_meta: HashMap, pub dragonsmouth_outlet: mpsc::Sender>, pub download_to_retry: VecDeque, @@ -113,15 +137,11 @@ const fn build_commit_offset_cmd(offset: FumeOffset) -> ControlCommand { pub(crate) struct DownloadBlockTask { download_request: FumeDownloadRequest, - bidi: DataPlaneBidi, + protected: ProtectedGrpcFumaroleClient, filters: Option, dragonsmouth_oulet: mpsc::Sender>, } -pub(crate) struct DownloadBlockCompleted { - bidi: DataPlaneBidi, -} - #[derive(Debug, thiserror::Error)] pub(crate) enum DownloadBlockError { #[error("download block task disconnected")] @@ -130,84 +150,79 @@ pub(crate) enum DownloadBlockError { OutletDisconnected, #[error("block shard not found")] BlockShardNotFound, - #[error(transparent)] - GrpcError(#[from] tonic::Status), + #[error("error during transportation or processing")] + FailedDownload, + #[error("unknown error: {0}")] + Fatal(#[from] tonic::Status), } -impl DownloadBlockTask { - async fn run(self) -> Result { - let DataPlaneBidi { tx, mut rx } = self.bidi; - - // Make sure the stream is empty - loop { - match rx.try_recv() { - Err(mpsc::error::TryRecvError::Disconnected) => { - return Err(DownloadBlockError::Disconnected) - } - Err(mpsc::error::TryRecvError::Empty) => break, - Ok(_) => {} - } +fn map_tonic_error_code_to_download_block_error(code: Code) -> DownloadBlockError { + match code { + Code::NotFound => DownloadBlockError::BlockShardNotFound, + Code::Unavailable => DownloadBlockError::Disconnected, + Code::Internal + | Code::Aborted + | Code::DataLoss + | Code::ResourceExhausted + | Code::Unknown + | Code::Cancelled => DownloadBlockError::FailedDownload, + Code::Ok => { + unreachable!("ok") } - let data_cmd = data_command::Command::DownloadBlockShard(DownloadBlockShard { + Code::InvalidArgument => { + panic!("invalid argument"); + } + Code::DeadlineExceeded => DownloadBlockError::FailedDownload, + rest => DownloadBlockError::Fatal(tonic::Status::new(rest, "unknown error")), + } +} + +pub(crate) struct CompletedDownloadBlockTask { + total_event_downloaded: usize, +} + +impl DownloadBlockTask { + async fn run(mut self) -> Result { + let request = DownloadBlockShard { blockchain_id: self.download_request.blockchain_id.to_vec(), block_uid: self.download_request.block_uid.to_vec(), shard_idx: 0, block_filters: self.filters, - }); - let data_cmd = DataCommand { - command: Some(data_cmd), }; - tx.send(data_cmd) - .await - .map_err(|_| DownloadBlockError::Disconnected)?; - - loop { - let Some(result) = rx.recv().await else { - return Err(DownloadBlockError::Disconnected); - }; - - let data = result?; + let resp = self.protected.client.download_block(request).await; - let Some(resp) = data.response else { continue }; + let mut rx = match resp { + Ok(resp) => resp.into_inner(), + Err(e) => { + return Err(map_tonic_error_code_to_download_block_error(e.code())); + } + }; + let mut total_event_downloaded = 0; + while let Some(data) = rx.next().await { + let resp = data + .map_err(|e| { + let code = e.code(); + tracing::error!("download block error: {code:?}"); + map_tonic_error_code_to_download_block_error(code) + })? + .response + .expect("missing response"); match resp { - proto::data_response::Response::Update(subscribe_update) => { - if self - .dragonsmouth_oulet - .send(Ok(subscribe_update)) - .await - .is_err() - { + data_response::Response::Update(update) => { + total_event_downloaded += 1; + if self.dragonsmouth_oulet.send(Ok(update)).await.is_err() { return Err(DownloadBlockError::OutletDisconnected); } } - proto::data_response::Response::BlockShardDownloadFinish( - _block_shard_download_finish, - ) => { - break; - } - proto::data_response::Response::Error(data_error) => { - let Some(e) = data_error.error else { continue }; - match e { - proto::data_error::Error::NotFound(block_not_found) => { - if block_not_found.block_uid.as_slice() - == self.download_request.block_uid.as_slice() - { - return Err(DownloadBlockError::BlockShardNotFound); - } else { - panic!("unexpected block uid") - } - } - } - } - proto::data_response::Response::Pong(_pong) => { - tracing::debug!("pong"); + data_response::Response::BlockShardDownloadFinish(_) => { + return Ok(CompletedDownloadBlockTask { + total_event_downloaded, + }); } } } - - let bidi = DataPlaneBidi { tx, rx }; - Ok(DownloadBlockCompleted { bidi }) + Err(DownloadBlockError::FailedDownload) } } @@ -224,7 +239,6 @@ impl From for BlockFilters { transactions: val.transactions, entries: val.entry, blocks_meta: val.blocks_meta, - commitment_level: val.commitment, } } } @@ -241,7 +255,7 @@ impl TokioFumeDragonsmouthRuntime { tracing::debug!("received commit offset : {commit_offset_result:?}"); self.sm.update_committed_offset(commit_offset_result.offset); } - proto::control_response::Response::PollNext(blockchain_history) => { + proto::control_response::Response::PollHist(blockchain_history) => { tracing::debug!( "polled blockchain history : {} events", blockchain_history.events.len() @@ -269,13 +283,22 @@ impl TokioFumeDragonsmouthRuntime { } } + fn find_most_under_utilized_data_plane_client(&self) -> Option { + self.data_plane_channel_vec + .iter() + .enumerate() + .filter(|(_, conn)| conn.has_permit()) + .max_by_key(|(_, conn)| conn.sem.available_permits()) + .map(|(idx, _)| idx) + } + fn schedule_download_task_if_any(&mut self) { // This loop drains as many download slot request as possible, // limited to available [`DataPlaneBidi`]. loop { - if self.data_plane_bidi_vec.is_empty() { + let Some(client_idx) = self.find_most_under_utilized_data_plane_client() else { break; - } + }; let maybe_download_request = self .download_to_retry @@ -287,15 +310,15 @@ impl TokioFumeDragonsmouthRuntime { }; assert!(download_request.num_shards == 1, "this client is incompatible with remote server since it does not support sharded block download"); - - let data_plane_bidi = self - .data_plane_bidi_vec - .pop_back() + let client = self + .data_plane_channel_vec + .get_mut(client_idx) .expect("should not be none"); - + let permit = client.acquire(); + let client_rev = client.rev; let download_task = DownloadBlockTask { download_request: download_request.clone(), - bidi: data_plane_bidi, + protected: permit, filters: Some(self.subscribe_request.clone().into()), dragonsmouth_oulet: self.dragonsmouth_outlet.clone(), }; @@ -317,6 +340,8 @@ impl TokioFumeDragonsmouthRuntime { download_request, scheduled_at: Instant::now(), download_attempt: *download_attempts, + client_rev, + client_idx, }, ); @@ -336,7 +361,7 @@ impl TokioFumeDragonsmouthRuntime { async fn handle_data_plane_task_result( &mut self, task_id: task::Id, - result: Result, + result: Result, ) -> Result<(), DownloadBlockError> { let Some(task_meta) = self.data_plane_task_meta.remove(&task_id) else { panic!("missing task meta") @@ -351,17 +376,22 @@ impl TokioFumeDragonsmouthRuntime { tracing::debug!("download task result received for slot {}", slot); match result { Ok(completed) => { + let CompletedDownloadBlockTask { + total_event_downloaded, + } = completed; let elapsed = task_meta.scheduled_at.elapsed(); #[cfg(feature = "prometheus")] { observe_slot_download_duration(Self::RUNTIME_NAME, elapsed); inc_slot_download_count(Self::RUNTIME_NAME); + inc_total_event_downloaded(Self::RUNTIME_NAME, total_event_downloaded); } - tracing::debug!("downloaded slot {slot} in {elapsed:?}"); + tracing::debug!( + "downloaded slot {slot} in {elapsed:?}, total events: {total_event_downloaded}" + ); let _ = self.download_attempts.remove(&slot); - self.data_plane_bidi_vec.push_back(completed.bidi); // TODO: Add support for sharded progress self.sm.make_slot_download_progress(slot, 0); } @@ -372,20 +402,38 @@ impl TokioFumeDragonsmouthRuntime { } match e { - x @ (DownloadBlockError::Disconnected | DownloadBlockError::GrpcError(_)) => { + x @ (DownloadBlockError::Disconnected | DownloadBlockError::FailedDownload) => { // We need to retry it if task_meta.download_attempt >= self.max_slot_download_attempt { + tracing::error!( + "download slot {slot} failed: {x:?}, max attempts reached" + ); return Err(x); } - + let remaining_attempt = self + .max_slot_download_attempt + .saturating_sub(task_meta.download_attempt); tracing::debug!( - "download slot {slot} failed: {x:?}, rebuilding data plane bidi..." + "download slot {slot} failed: {x:?}, remaining attempts: {remaining_attempt}" ); // Recreate the data plane bidi let t = Instant::now(); - let data_plane_bidi = self.data_plane_bidi_factory.build().await; + tracing::debug!("data plane bidi rebuilt in {:?}", t.elapsed()); - self.data_plane_bidi_vec.push_back(data_plane_bidi); + + let conn = self + .data_plane_channel_vec + .get_mut(task_meta.client_idx) + .expect("should not be none"); + + if task_meta.client_rev == conn.rev { + let new_client = self + .fumarole_connector + .connect() + .await + .expect("failed to reconnect data plane client"); + conn.client = new_client; + } tracing::debug!("Download slot {slot} failed, rescheduling for retry..."); self.download_to_retry.push_back(task_meta.download_request); @@ -397,7 +445,11 @@ impl TokioFumeDragonsmouthRuntime { } DownloadBlockError::BlockShardNotFound => { // TODO: I don't think it should ever happen, but lets panic first so we get notified by client if it ever happens. - panic!("Slot {slot} not found"); + tracing::error!("Slot {slot} not found"); + panic!("slot {slot} not found"); + } + DownloadBlockError::Fatal(e) => { + panic!("fatal error: {e}"); } } } @@ -518,7 +570,7 @@ impl TokioFumeDragonsmouthRuntime { let result = self.handle_data_plane_task_result(task_id, download_result).await; if let Err(e) = result { self.unsafe_cancel_all_tasks().await; - if let DownloadBlockError::GrpcError(e) = e { + if let DownloadBlockError::Fatal(e) = e { let _ = self.dragonsmouth_outlet.send(Err(e)).await; } break; diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index 459d5e6..03b076c 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -10,6 +10,8 @@ service Fumarole { rpc DeleteConsumerGroup(DeleteConsumerGroupRequest) returns (DeleteConsumerGroupResponse) {} rpc CreateConsumerGroup(CreateConsumerGroupRequest) returns (CreateConsumerGroupResponse) {} + rpc DownloadBlock(DownloadBlockShard) returns (stream DataResponse) {} + // Represents subscription to the data plane rpc SubscribeData(stream DataCommand) returns (stream DataResponse) {} @@ -51,7 +53,6 @@ message BlockFilters { map transactions = 2; map entries = 3; map blocks_meta = 4; - optional geyser.CommitmentLevel commitment_level = 5; } message DownloadBlockShard { @@ -77,11 +78,7 @@ message DataCommand { } } -message BlockShardDownloadFinish { - bytes blockchain_id = 1; - bytes block_uid = 2; - int32 shard_idx = 3; -} +message BlockShardDownloadFinish {} message BlockNotFound { bytes blockchain_id = 1; @@ -99,8 +96,6 @@ message DataResponse { oneof response { geyser.SubscribeUpdate update = 1; BlockShardDownloadFinish block_shard_download_finish = 2; - DataError error = 3; - Pong pong = 4; } } @@ -145,6 +140,16 @@ message ControlCommand { } } +message ControlResponse { + oneof response { + InitialConsumerGroupState init = 1; + CommitOffsetResult commit_offset = 2; + BlockchainHistory poll_hist = 3; + Pong pong = 4; + } +} + + message CommitOffsetResult { int64 offset = 1; int32 shard_id = 2; @@ -155,14 +160,6 @@ message InitialConsumerGroupState { map last_committed_offsets = 2; } -message ControlResponse { - oneof response { - InitialConsumerGroupState init = 1; - CommitOffsetResult commit_offset = 2; - BlockchainHistory poll_next = 3; - Pong pong = 4; - } -} message CreateConsumerGroupResponse { string consumer_group_id = 1; From 0e90c55a927c9915cc2d1784a7e1c53bb3447dc3 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Sat, 26 Apr 2025 16:40:30 -0400 Subject: [PATCH 09/56] v2: added support for commitment level filtering --- apps/fume/src/main.rs | 1 + .../src/runtime/mod.rs | 325 +++++++++++------- .../src/runtime/tokio.rs | 19 +- 3 files changed, 218 insertions(+), 127 deletions(-) diff --git a/apps/fume/src/main.rs b/apps/fume/src/main.rs index 2a3f778..f5b1e0f 100644 --- a/apps/fume/src/main.rs +++ b/apps/fume/src/main.rs @@ -290,6 +290,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { SubscribeRequestFilterTransactions::default(), )]), slots: HashMap::from([("f1".to_owned(), SubscribeRequestFilterSlots::default())]), + commitment: Some(CommitmentLevel::Finalized.into()), ..Default::default() }; diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 751116d..efee3ae 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -4,16 +4,13 @@ pub(crate) mod tokio; use { - crate::{ - proto::{self, BlockchainEvent}, - util::collections::KeyedVecDeque, - }, + crate::proto::{self, BlockchainEvent}, solana_sdk::clock::Slot, std::{ cmp::Reverse, - collections::{BTreeMap, BinaryHeap, HashMap, HashSet, VecDeque}, + collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}, }, - yellowstone_grpc_proto::geyser, + yellowstone_grpc_proto::geyser::{self, CommitmentLevel}, }; pub(crate) type FumeBlockchainId = [u8; 16]; @@ -28,23 +25,25 @@ pub(crate) type FumeOffset = i64; #[derive(Debug, Clone)] pub(crate) struct FumeDownloadRequest { - pub(crate) slot: Slot, - pub(crate) blockchain_id: FumeBlockchainId, - pub(crate) block_uid: FumeBlockUID, - pub(crate) num_shards: FumeNumShards, // First version of fumarole, it should always be 1 + pub slot: Slot, + pub blockchain_id: FumeBlockchainId, + pub block_uid: FumeBlockUID, + pub num_shards: FumeNumShards, // First version of fumarole, it should always be 1 + #[allow(dead_code)] + pub commitment_level: geyser::CommitmentLevel, } #[derive(Clone, Debug)] pub(crate) struct FumeSlotStatus { - pub(crate) offset: FumeOffset, - pub(crate) slot: Slot, - pub(crate) parent_slot: Option, - pub(crate) commitment_level: geyser::CommitmentLevel, - pub(crate) dead_error: Option, + pub offset: FumeOffset, + pub slot: Slot, + pub parent_slot: Option, + pub commitment_level: geyser::CommitmentLevel, + pub dead_error: Option, } #[derive(Debug, Default)] -struct SlotInfoProcessed { +struct SlotCommitmentProgression { processed_commitment_levels: HashSet, } @@ -53,7 +52,8 @@ struct SlotDownloadProgress { shard_remaining: Vec, } -enum SlotDownloadState { +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SlotDownloadState { Downloading, Done, } @@ -133,12 +133,11 @@ impl SlotDownloadProgress { pub(crate) struct FumaroleSM { /// The last committed offset pub last_committed_offset: FumeOffset, + slot_commitment_progression: BTreeMap, /// As we download and process slot status, we keep track of the progression of each slot here. - slot_progression: BTreeMap, + downloaded_slot: BTreeSet, /// Inlfight slot download inflight_slot_shard_download: HashMap, - /// Slot download queue - slot_download_queue: KeyedVecDeque, /// Slot blocked by a slot download (inflight or in queue) blocked_slot_status_update: HashMap>, /// Slot status queue whose slot have been completely downloaded in the current session. @@ -156,20 +155,24 @@ pub(crate) struct FumaroleSM { /// This is used to detect rough slot lag. /// this slot is not necessarily processed by the underlying runtime yet. pub max_slot_detected: Slot, + + /// Unprocessed blockchain events + unprocessed_blockchain_event: VecDeque, } impl FumaroleSM { pub fn new(last_committed_offset: FumeOffset) -> Self { Self { last_committed_offset, - slot_progression: Default::default(), + slot_commitment_progression: Default::default(), + downloaded_slot: Default::default(), inflight_slot_shard_download: Default::default(), - slot_download_queue: Default::default(), blocked_slot_status_update: Default::default(), slot_status_update_queue: Default::default(), processed_offset: Default::default(), committable_offset: last_committed_offset, max_slot_detected: 0, + unprocessed_blockchain_event: Default::default(), } } @@ -184,96 +187,48 @@ impl FumaroleSM { self.last_committed_offset = offset; } - /// - /// Queues incoming **ordered** blockchain events pub fn queue_blockchain_event(&mut self, events: IT) where IT: IntoIterator, { - let mut last_offset = self.last_committed_offset; - for events in events { - let BlockchainEvent { - offset, - blockchain_id, - block_uid, - num_shards, - slot, - parent_slot, - commitment_level, - blockchain_shard_id: _, /*First version this is value does not mean nothing */ - dead_error, - } = events; - - if offset < last_offset { + for event in events { + if event.offset < self.last_committed_offset { continue; } - let blockchain_id: [u8; 16] = blockchain_id - .try_into() - .expect("blockchain_id must be 16 bytes"); - let block_uid: [u8; 16] = block_uid.try_into().expect("block_uid must be 16 bytes"); - - let cl = geyser::CommitmentLevel::try_from(commitment_level) - .expect("invalid commitment level"); - let fume_slot_status = FumeSlotStatus { - offset, - slot, - parent_slot, - commitment_level: cl, - dead_error, - }; - last_offset = offset; - if slot > self.max_slot_detected { - self.max_slot_detected = slot; - } - // We don't download the same slot twice in the same session. - if !self.slot_progression.contains_key(&slot) { - // if the slot is already in-download, we don't need to schedule it for download again - if !self.inflight_slot_shard_download.contains_key(&slot) { - let download_request = FumeDownloadRequest { - slot, - blockchain_id, - block_uid, - num_shards, - }; - self.slot_download_queue.push_back(slot, download_request); + if self.downloaded_slot.contains(&event.slot) { + let fume_status = FumeSlotStatus { + offset: event.offset, + slot: event.slot, + parent_slot: event.parent_slot, + commitment_level: geyser::CommitmentLevel::try_from(event.commitment_level) + .expect("invalid commitment level"), + dead_error: event.dead_error, + }; + if self.inflight_slot_shard_download.contains_key(&event.slot) { + // This event is blocked by a slot download currently in progress + self.blocked_slot_status_update + .entry(event.slot) + .or_default() + .push_back(fume_status); + } else { + // Fast track this event, since the slot has been downloaded in the current session + // and we are not waiting for any shard to be downloaded. + self.slot_status_update_queue.push_back(fume_status); } - self.blocked_slot_status_update - .entry(slot) - .or_default() - .push_back(fume_slot_status); } else { - self.slot_status_update_queue.push_back(fume_slot_status); + self.unprocessed_blockchain_event.push_back(event); } } } - /// - /// Returns the [`Some(FumeDownloadRequest)`] to download if any, otherwise `None`. - /// - pub(crate) fn pop_slot_to_download(&mut self) -> Option { - let download_req = self.slot_download_queue.pop_front()?; - let download_progress = SlotDownloadProgress { - num_shards: download_req.num_shards, - shard_remaining: vec![false; download_req.num_shards as usize], - }; - let old = self - .inflight_slot_shard_download - .insert(download_req.slot, download_progress); - assert!(old.is_none(), "slot already in download"); - Some(download_req) - } - - /// - /// Returns the number of slots in the download queue - /// - pub fn slot_download_queue_size(&self) -> usize { - self.slot_download_queue.len() - } - /// /// Update download progression for a given `Slot` download /// - pub fn make_slot_download_progress(&mut self, slot: Slot, shard_idx: FumeShardIdx) { + pub fn make_slot_download_progress( + &mut self, + slot: Slot, + shard_idx: FumeShardIdx, + ) -> SlotDownloadState { let download_progress = self .inflight_slot_shard_download .get_mut(&slot) @@ -284,7 +239,8 @@ impl FumaroleSM { if matches!(download_state, SlotDownloadState::Done) { // all shards downloaded self.inflight_slot_shard_download.remove(&slot); - self.slot_progression.insert(slot, Default::default()); + self.downloaded_slot.insert(slot); + self.slot_commitment_progression.entry(slot).or_default(); let blocked_slot_status = self .blocked_slot_status_update @@ -292,22 +248,132 @@ impl FumaroleSM { .unwrap_or_default(); self.slot_status_update_queue.extend(blocked_slot_status); } + download_state + } + + pub fn pop_next_slot_status(&mut self) -> Option { + loop { + let slot_status = self.slot_status_update_queue.pop_front()?; + if let Some(commitment_history) = + self.slot_commitment_progression.get_mut(&slot_status.slot) + { + if commitment_history + .processed_commitment_levels + .insert(slot_status.commitment_level) + { + return Some(slot_status); + } else { + // We already processed this commitment level + continue; + } + } else { + // This slot has not been downloaded yet, but still has a status to process + unreachable!("slot status should not be available here"); + } + } + } + + fn make_sure_slot_commitment_progression_exists( + &mut self, + slot: Slot, + ) -> &mut SlotCommitmentProgression { + self.slot_commitment_progression.entry(slot).or_default() } /// /// Pop next slot status to process /// - pub fn pop_next_slot_status(&mut self) -> Option { - let slot_status = self.slot_status_update_queue.pop_front()?; - let info = self.slot_progression.get_mut(&slot_status.slot)?; - if info - .processed_commitment_levels - .insert(slot_status.commitment_level) - { - // We handle duplicate slot status event here. - Some(slot_status) - } else { - None + pub fn pop_slot_to_download( + &mut self, + commitment: Option, + ) -> Option { + loop { + let min_commitment = commitment.unwrap_or(CommitmentLevel::Processed); + let BlockchainEvent { + offset, + blockchain_id, + block_uid, + num_shards, + slot, + parent_slot, + commitment_level, + blockchain_shard_id: _, + dead_error, + } = self.unprocessed_blockchain_event.pop_front()?; + + let event_cl = geyser::CommitmentLevel::try_from(commitment_level) + .expect("invalid commitment level"); + + if event_cl < min_commitment { + self.slot_status_update_queue.push_back(FumeSlotStatus { + offset, + slot, + parent_slot, + commitment_level: event_cl, + dead_error, + }); + self.make_sure_slot_commitment_progression_exists(slot); + continue; + } + + if self.downloaded_slot.contains(&slot) { + // This slot has been fully downloaded by the runtime + self.make_sure_slot_commitment_progression_exists(slot); + let Some(progression) = self.slot_commitment_progression.get_mut(&slot) else { + unreachable!("slot status should not be available here"); + }; + + if progression.processed_commitment_levels.contains(&event_cl) { + // We already processed this commitment level + self.mark_offset_as_processed(offset); + continue; + } + + // We have a new commitment level for this slot and slot has been downloaded in the current session. + self.slot_status_update_queue.push_back(FumeSlotStatus { + offset, + slot, + parent_slot, + commitment_level: event_cl, + dead_error, + }); + } else { + // This slot has not been downloaded yet + let blockchain_id: [u8; 16] = blockchain_id + .try_into() + .expect("blockchain_id must be 16 bytes"); + let block_uid: [u8; 16] = block_uid.try_into().expect("block_uid must be 16 bytes"); + + // We have a new commitment level for this slot and slot has not been downloaded in the current session. + self.blocked_slot_status_update + .entry(slot) + .or_default() + .push_back(FumeSlotStatus { + offset, + slot, + parent_slot, + commitment_level: event_cl, + dead_error, + }); + + if !self.inflight_slot_shard_download.contains_key(&slot) { + // This slot has not been schedule for download yet + let download_request = FumeDownloadRequest { + slot, + blockchain_id, + block_uid, + num_shards, + commitment_level: event_cl, + }; + let download_progress = SlotDownloadProgress { + num_shards, + shard_remaining: vec![false; num_shards as usize], + }; + self.inflight_slot_shard_download + .insert(slot, download_progress); + return Some(download_request); + } + } } } @@ -382,15 +448,15 @@ mod tests { sm.queue_blockchain_event(vec![event.clone()]); // Slot status should not be available, since we didn't download it yet. - assert!(sm.pop_next_slot_status().is_none()); - - let download_req = sm.pop_slot_to_download().unwrap(); + let download_req = sm.pop_slot_to_download(None).unwrap(); assert_eq!(download_req.slot, 1); - assert!(sm.pop_slot_to_download().is_none()); + assert!(sm.pop_slot_to_download(None).is_none()); + assert!(sm.pop_next_slot_status().is_none()); - sm.make_slot_download_progress(1, 0); + let download_state = sm.make_slot_download_progress(1, 0); + assert_eq!(download_state, SlotDownloadState::Done); let status = sm.pop_next_slot_status().unwrap(); @@ -405,7 +471,7 @@ mod tests { sm.queue_blockchain_event(vec![event2.clone()]); // It should not cause new slot download request - assert!(sm.pop_slot_to_download().is_none()); + assert!(sm.pop_slot_to_download(None).is_none()); let status = sm.pop_next_slot_status().unwrap(); assert_eq!(status.slot, 1); @@ -425,11 +491,11 @@ mod tests { // Slot status should not be available, since we didn't download it yet. assert!(sm.pop_next_slot_status().is_none()); - let download_req = sm.pop_slot_to_download().unwrap(); + let download_req = sm.pop_slot_to_download(None).unwrap(); assert_eq!(download_req.slot, 1); - assert!(sm.pop_slot_to_download().is_none()); + assert!(sm.pop_slot_to_download(None).is_none()); sm.make_slot_download_progress(1, 0); @@ -441,7 +507,30 @@ mod tests { // Putting the same event back should be ignored sm.queue_blockchain_event(vec![event]); + assert!(sm.pop_slot_to_download(None).is_none()); assert!(sm.pop_next_slot_status().is_none()); - assert!(sm.pop_slot_to_download().is_none()); + } + + #[test] + fn it_should_handle_min_commitment_level() { + let mut sm = FumaroleSM::new(0); + + let event = random_blockchain_event(1, 1, CommitmentLevel::Processed); + sm.queue_blockchain_event(vec![event.clone()]); + + // Slot status should not be available, since we didn't download it yet. + assert!(sm.pop_next_slot_status().is_none()); + + // Use finalized commitment level here + let download_req = sm.pop_slot_to_download(Some(CommitmentLevel::Finalized)); + assert!(download_req.is_none()); + + assert!(sm.pop_slot_to_download(None).is_none()); + + // It should not cause the slot status to be available here even if we have a finalized commitment level filtered out before + let status = sm.pop_next_slot_status().unwrap(); + + assert_eq!(status.slot, 1); + assert_eq!(status.commitment_level, CommitmentLevel::Processed); } } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index eee76b7..da85381 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -27,7 +27,7 @@ use { }, tonic::Code, yellowstone_grpc_proto::geyser::{ - self, SubscribeRequest, SubscribeUpdate, SubscribeUpdateSlot, + self, CommitmentLevel, SubscribeRequest, SubscribeUpdate, SubscribeUpdateSlot, }, }; @@ -292,6 +292,12 @@ impl TokioFumeDragonsmouthRuntime { .map(|(idx, _)| idx) } + fn commitment_level(&self) -> Option { + self.subscribe_request + .commitment + .map(|cl| CommitmentLevel::try_from(cl).expect("invalid commitment level")) + } + fn schedule_download_task_if_any(&mut self) { // This loop drains as many download slot request as possible, // limited to available [`DataPlaneBidi`]. @@ -303,7 +309,7 @@ impl TokioFumeDragonsmouthRuntime { let maybe_download_request = self .download_to_retry .pop_front() - .or_else(|| self.sm.pop_slot_to_download()); + .or_else(|| self.sm.pop_slot_to_download(self.commitment_level())); let Some(download_request) = maybe_download_request else { break; @@ -350,12 +356,6 @@ impl TokioFumeDragonsmouthRuntime { inc_inflight_slot_download(Self::RUNTIME_NAME); } } - - #[cfg(feature = "prometheus")] - { - let size = self.sm.slot_download_queue_size() + self.download_to_retry.len(); - set_slot_download_queue_size(Self::RUNTIME_NAME, size); - } } async fn handle_data_plane_task_result( @@ -512,8 +512,9 @@ impl TokioFumeDragonsmouthRuntime { if self.dragonsmouth_outlet.send(Ok(update)).await.is_err() { return; } - self.sm.mark_offset_as_processed(slot_status.offset); } + + self.sm.mark_offset_as_processed(slot_status.offset); } } From 0af6e1696a2d89fd39e9d6eaeeb2bc0456cd048e Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 28 Apr 2025 09:44:52 -0400 Subject: [PATCH 10/56] v2: decouple fumarole runtime from slot download --- apps/fume/src/main.rs | 6 +- crates/yellowstone-fumarole-client/src/lib.rs | 59 +- .../src/runtime/mod.rs | 1 + .../src/runtime/tokio.rs | 788 +++++++++++------- examples/rust/src/bin/client.rs | 2 +- proto/fumarole_v2.proto | 4 +- 6 files changed, 547 insertions(+), 313 deletions(-) diff --git a/apps/fume/src/main.rs b/apps/fume/src/main.rs index f5b1e0f..5aa02c9 100644 --- a/apps/fume/src/main.rs +++ b/apps/fume/src/main.rs @@ -284,13 +284,13 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { // This request listen for all account updates and transaction updates let request = SubscribeRequest { - accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), + // accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), transactions: HashMap::from([( "f1".to_owned(), SubscribeRequestFilterTransactions::default(), )]), slots: HashMap::from([("f1".to_owned(), SubscribeRequestFilterSlots::default())]), - commitment: Some(CommitmentLevel::Finalized.into()), + // commitment: Some(CommitmentLevel::Finalized.into()), ..Default::default() }; @@ -308,7 +308,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let DragonsmouthAdapterSession { sink: _, mut source, - runtime_handle: _, + fumarole_handle: _, } = dragonsmouth_session; let mut shutdown = create_shutdown(); diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index f92371e..f880164 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -118,8 +118,12 @@ pub(crate) mod util; use { config::FumaroleConfig, + futures::future::{select, Either}, proto::control_response::Response, - runtime::{tokio::TokioFumeDragonsmouthRuntime, FumaroleSM}, + runtime::{ + tokio::{DownloadTaskRunnerChannels, GrpcDownloadTaskRunner, TokioFumeDragonsmouthRuntime}, + FumaroleSM, + }, std::{ collections::HashMap, num::{NonZeroU8, NonZeroUsize}, @@ -224,7 +228,7 @@ pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(5); /// /// Default maximum number of consecutive failed slot download attempts before failing the fumarole session. /// -pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: u8 = 3; +pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: usize = 3; /// /// Default number of parallel data streams (TCP connections) to open to fumarole. @@ -285,7 +289,7 @@ pub struct FumaroleSubscribeConfig { /// /// Maximum number of consecutive failed slot download attempts before failing the fumarole session. /// - pub max_failed_slot_download_attempt: u8, + pub max_failed_slot_download_attempt: usize, /// /// Capacity of each data channel for the fumarole client @@ -354,8 +358,7 @@ pub struct DragonsmouthAdapterSession { /// If you want to stop the fumarole session, you need to drop the [`DragonsmouthAdapterSession::source`] channel, /// then you could wait for the handle to finish. /// - pub runtime_handle: - tokio::task::JoinHandle>>, + pub fumarole_handle: tokio::task::JoinHandle<()>, } fn string_pairs_to_metadata_header( @@ -501,31 +504,57 @@ impl FumaroleClient { data_plane_channel_vec.push(conn); } + let (download_task_runner_cnc_tx, download_task_runner_cnc_rx) = mpsc::channel(10); + // Make sure the channel capacity is really low, since the grpc runner already implements its own concurrency control + let (download_task_queue_tx, download_task_queue_rx) = mpsc::channel(10); + let (download_result_tx, download_result_rx) = mpsc::channel(10); + let grpc_download_task_runner = GrpcDownloadTaskRunner::new( + handle.clone(), + data_plane_channel_vec, + self.connector.clone(), + download_task_runner_cnc_rx, + download_task_queue_rx, + download_result_tx, + config.max_failed_slot_download_attempt, + ); + + let download_task_runner_chans = DownloadTaskRunnerChannels { + download_task_queue_tx, + cnc_tx: download_task_runner_cnc_tx, + download_result_rx, + }; + let tokio_rt = TokioFumeDragonsmouthRuntime { rt: handle.clone(), sm, dragonsmouth_bidi: dm_bidi, subscribe_request: request, - fumarole_connector: self.connector.clone(), + download_task_runner_chans, consumer_group_name: consumer_group_name.as_ref().to_string(), control_plane_tx: fume_control_plane_tx, control_plane_rx: fume_control_plane_rx, - data_plane_channel_vec, - data_plane_tasks: Default::default(), - data_plane_task_meta: Default::default(), dragonsmouth_outlet, - download_to_retry: Default::default(), - download_attempts: Default::default(), - max_slot_download_attempt: config.max_failed_slot_download_attempt, commit_interval: config.commit_interval, last_commit: Instant::now(), }; - - let jh = handle.spawn(tokio_rt.run()); + let download_task_runner_jh = handle.spawn(grpc_download_task_runner.run()); + let fumarole_rt_jh = handle.spawn(tokio_rt.run()); + let fut = async move { + let either = select(download_task_runner_jh, fumarole_rt_jh).await; + match either { + Either::Left((result, _)) => { + let _ = result.expect("fumarole download task runner failed"); + } + Either::Right((result, _)) => { + let _ = result.expect("fumarole runtime failed"); + } + } + }; + let fumarole_handle = handle.spawn(fut); let dm_session = DragonsmouthAdapterSession { sink: dm_tx, source: dragonsmouth_inlet, - runtime_handle: jh, + fumarole_handle, }; Ok(dm_session) } diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index efee3ae..2d83b71 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -28,6 +28,7 @@ pub(crate) struct FumeDownloadRequest { pub slot: Slot, pub blockchain_id: FumeBlockchainId, pub block_uid: FumeBlockUID, + #[allow(dead_code)] pub num_shards: FumeNumShards, // First version of fumarole, it should always be 1 #[allow(dead_code)] pub commitment_level: geyser::CommitmentLevel, diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index da85381..2ff3046 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -5,7 +5,7 @@ use crate::metrics::{ set_max_slot_detected, set_slot_download_queue_size, }; use { - super::{FumaroleSM, FumeDownloadRequest, FumeOffset}, + super::{FumaroleSM, FumeDownloadRequest, FumeOffset, FumeShardIdx}, crate::{ metrics::inc_total_event_downloaded, proto::{ @@ -18,11 +18,10 @@ use { solana_sdk::clock::Slot, std::{ collections::{HashMap, VecDeque}, - sync::Arc, time::{Duration, Instant}, }, tokio::{ - sync::{mpsc, Semaphore}, + sync::mpsc::{self, error::TrySendError}, task::{self, JoinSet}, }, tonic::Code, @@ -34,13 +33,14 @@ use { /// /// Holds information about on-going data plane task. /// -#[derive(Clone, Debug)] +#[derive(Debug, Clone)] pub(crate) struct DataPlaneTaskMeta { - download_request: FumeDownloadRequest, + client_idx: usize, + request: FumeDownloadRequest, + filters: Option, + dragonsmouth_outlet: mpsc::Sender>, scheduled_at: Instant, - download_attempt: u8, client_rev: u64, - client_idx: usize, } /// @@ -52,61 +52,56 @@ pub struct DragonsmouthSubscribeRequestBidi { pub rx: mpsc::Receiver, } -pub(crate) struct DataPlaneConn { - sem: Arc, - client: GrpcFumaroleClient, - rev: u64, -} - -struct ProtectedGrpcFumaroleClient { - client: GrpcFumaroleClient, - _permit: tokio::sync::OwnedSemaphorePermit, -} - impl DataPlaneConn { pub fn new(client: GrpcFumaroleClient, concurrency_limit: usize) -> Self { Self { - sem: Arc::new(Semaphore::new(concurrency_limit)), + permits: concurrency_limit, client, rev: 0, } } fn has_permit(&self) -> bool { - self.sem.available_permits() > 0 + self.permits > 0 } +} + +pub enum DownloadTaskSenderError { + NoPermit, + Disconnected, +} + +pub struct DownloadPermit { + drop: Option>, +} - fn acquire(&mut self) -> ProtectedGrpcFumaroleClient { - let permit = Arc::clone(&self.sem) - .try_acquire_owned() - .expect("failed to acquire semaphore permit"); - ProtectedGrpcFumaroleClient { - client: self.client.clone(), - _permit: permit, +impl Drop for DownloadPermit { + fn drop(&mut self) { + if let Some(drop) = self.drop.take() { + drop(); } } } +pub enum DownloadTaskResult { + Ok(CompletedDownloadBlockTask), + Err { slot: Slot, err: DownloadBlockError }, +} + /// /// Fumarole runtime based on Tokio outputting Dragonsmouth only events. /// pub(crate) struct TokioFumeDragonsmouthRuntime { pub rt: tokio::runtime::Handle, pub sm: FumaroleSM, + pub download_task_runner_chans: DownloadTaskRunnerChannels, pub dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, pub subscribe_request: SubscribeRequest, - pub fumarole_connector: FumaroleGrpcConnector, #[allow(dead_code)] pub consumer_group_name: String, pub control_plane_tx: mpsc::Sender, pub control_plane_rx: mpsc::Receiver>, - pub data_plane_channel_vec: Vec, - pub data_plane_tasks: JoinSet>, - pub data_plane_task_meta: HashMap, pub dragonsmouth_outlet: mpsc::Sender>, - pub download_to_retry: VecDeque, - pub download_attempts: HashMap, - pub max_slot_download_attempt: u8, pub commit_interval: Duration, pub last_commit: Instant, } @@ -135,97 +130,6 @@ const fn build_commit_offset_cmd(offset: FumeOffset) -> ControlCommand { } } -pub(crate) struct DownloadBlockTask { - download_request: FumeDownloadRequest, - protected: ProtectedGrpcFumaroleClient, - filters: Option, - dragonsmouth_oulet: mpsc::Sender>, -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum DownloadBlockError { - #[error("download block task disconnected")] - Disconnected, - #[error("dragonsmouth outlet disconnected")] - OutletDisconnected, - #[error("block shard not found")] - BlockShardNotFound, - #[error("error during transportation or processing")] - FailedDownload, - #[error("unknown error: {0}")] - Fatal(#[from] tonic::Status), -} - -fn map_tonic_error_code_to_download_block_error(code: Code) -> DownloadBlockError { - match code { - Code::NotFound => DownloadBlockError::BlockShardNotFound, - Code::Unavailable => DownloadBlockError::Disconnected, - Code::Internal - | Code::Aborted - | Code::DataLoss - | Code::ResourceExhausted - | Code::Unknown - | Code::Cancelled => DownloadBlockError::FailedDownload, - Code::Ok => { - unreachable!("ok") - } - Code::InvalidArgument => { - panic!("invalid argument"); - } - Code::DeadlineExceeded => DownloadBlockError::FailedDownload, - rest => DownloadBlockError::Fatal(tonic::Status::new(rest, "unknown error")), - } -} - -pub(crate) struct CompletedDownloadBlockTask { - total_event_downloaded: usize, -} - -impl DownloadBlockTask { - async fn run(mut self) -> Result { - let request = DownloadBlockShard { - blockchain_id: self.download_request.blockchain_id.to_vec(), - block_uid: self.download_request.block_uid.to_vec(), - shard_idx: 0, - block_filters: self.filters, - }; - let resp = self.protected.client.download_block(request).await; - - let mut rx = match resp { - Ok(resp) => resp.into_inner(), - Err(e) => { - return Err(map_tonic_error_code_to_download_block_error(e.code())); - } - }; - let mut total_event_downloaded = 0; - while let Some(data) = rx.next().await { - let resp = data - .map_err(|e| { - let code = e.code(); - tracing::error!("download block error: {code:?}"); - map_tonic_error_code_to_download_block_error(code) - })? - .response - .expect("missing response"); - - match resp { - data_response::Response::Update(update) => { - total_event_downloaded += 1; - if self.dragonsmouth_oulet.send(Ok(update)).await.is_err() { - return Err(DownloadBlockError::OutletDisconnected); - } - } - data_response::Response::BlockShardDownloadFinish(_) => { - return Ok(CompletedDownloadBlockTask { - total_event_downloaded, - }); - } - } - } - Err(DownloadBlockError::FailedDownload) - } -} - #[derive(Debug, thiserror::Error)] pub enum RuntimeError { #[error(transparent)] @@ -283,15 +187,6 @@ impl TokioFumeDragonsmouthRuntime { } } - fn find_most_under_utilized_data_plane_client(&self) -> Option { - self.data_plane_channel_vec - .iter() - .enumerate() - .filter(|(_, conn)| conn.has_permit()) - .max_by_key(|(_, conn)| conn.sem.available_permits()) - .map(|(idx, _)| idx) - } - fn commitment_level(&self) -> Option { self.subscribe_request .commitment @@ -302,159 +197,49 @@ impl TokioFumeDragonsmouthRuntime { // This loop drains as many download slot request as possible, // limited to available [`DataPlaneBidi`]. loop { - let Some(client_idx) = self.find_most_under_utilized_data_plane_client() else { - break; + let result = self + .download_task_runner_chans + .download_task_queue_tx + .try_reserve(); + let permit = match result { + Ok(permit) => permit, + Err(TrySendError::Full(_)) => { + break; + } + Err(TrySendError::Closed(_)) => { + panic!("download task runner closed unexpectedly") + } }; - let maybe_download_request = self - .download_to_retry - .pop_front() - .or_else(|| self.sm.pop_slot_to_download(self.commitment_level())); - - let Some(download_request) = maybe_download_request else { + let Some(download_request) = self.sm.pop_slot_to_download(self.commitment_level()) + else { break; }; - - assert!(download_request.num_shards == 1, "this client is incompatible with remote server since it does not support sharded block download"); - let client = self - .data_plane_channel_vec - .get_mut(client_idx) - .expect("should not be none"); - let permit = client.acquire(); - let client_rev = client.rev; - let download_task = DownloadBlockTask { - download_request: download_request.clone(), - protected: permit, + let download_task_args = DownloadTaskArgs { + download_request, filters: Some(self.subscribe_request.clone().into()), - dragonsmouth_oulet: self.dragonsmouth_outlet.clone(), + dragonsmouth_outlet: self.dragonsmouth_outlet.clone(), }; - - let download_attempts = self - .download_attempts - .entry(download_request.slot) - .or_default(); - - *download_attempts += 1; - - let ah = self - .data_plane_tasks - .spawn_on(download_task.run(), &self.rt); - tracing::debug!("download task scheduled for slot {}", download_request.slot); - self.data_plane_task_meta.insert( - ah.id(), - DataPlaneTaskMeta { - download_request, - scheduled_at: Instant::now(), - download_attempt: *download_attempts, - client_rev, - client_idx, - }, - ); - - #[cfg(feature = "prometheus")] - { - inc_inflight_slot_download(Self::RUNTIME_NAME); - } + permit.send(download_task_args); } } - async fn handle_data_plane_task_result( - &mut self, - task_id: task::Id, - result: Result, - ) -> Result<(), DownloadBlockError> { - let Some(task_meta) = self.data_plane_task_meta.remove(&task_id) else { - panic!("missing task meta") - }; - - #[cfg(feature = "prometheus")] - { - dec_inflight_slot_download(Self::RUNTIME_NAME); - } - - let slot = task_meta.download_request.slot; - tracing::debug!("download task result received for slot {}", slot); - match result { - Ok(completed) => { + fn handle_download_result(&mut self, download_result: DownloadTaskResult) { + match download_result { + DownloadTaskResult::Ok(completed) => { let CompletedDownloadBlockTask { - total_event_downloaded, + slot, + block_uid: _, + shard_idx, + total_event_downloaded: _, } = completed; - let elapsed = task_meta.scheduled_at.elapsed(); - - #[cfg(feature = "prometheus")] - { - observe_slot_download_duration(Self::RUNTIME_NAME, elapsed); - inc_slot_download_count(Self::RUNTIME_NAME); - inc_total_event_downloaded(Self::RUNTIME_NAME, total_event_downloaded); - } - - tracing::debug!( - "downloaded slot {slot} in {elapsed:?}, total events: {total_event_downloaded}" - ); - let _ = self.download_attempts.remove(&slot); - // TODO: Add support for sharded progress - self.sm.make_slot_download_progress(slot, 0); + self.sm.make_slot_download_progress(slot, shard_idx); } - Err(e) => { - #[cfg(feature = "prometheus")] - { - inc_failed_slot_download_attempt(Self::RUNTIME_NAME); - } - - match e { - x @ (DownloadBlockError::Disconnected | DownloadBlockError::FailedDownload) => { - // We need to retry it - if task_meta.download_attempt >= self.max_slot_download_attempt { - tracing::error!( - "download slot {slot} failed: {x:?}, max attempts reached" - ); - return Err(x); - } - let remaining_attempt = self - .max_slot_download_attempt - .saturating_sub(task_meta.download_attempt); - tracing::debug!( - "download slot {slot} failed: {x:?}, remaining attempts: {remaining_attempt}" - ); - // Recreate the data plane bidi - let t = Instant::now(); - - tracing::debug!("data plane bidi rebuilt in {:?}", t.elapsed()); - - let conn = self - .data_plane_channel_vec - .get_mut(task_meta.client_idx) - .expect("should not be none"); - - if task_meta.client_rev == conn.rev { - let new_client = self - .fumarole_connector - .connect() - .await - .expect("failed to reconnect data plane client"); - conn.client = new_client; - } - - tracing::debug!("Download slot {slot} failed, rescheduling for retry..."); - self.download_to_retry.push_back(task_meta.download_request); - } - DownloadBlockError::OutletDisconnected => { - // Will automatically be handled in the `run` main loop. - // so nothing to do. - tracing::debug!("dragonsmouth outlet disconnected"); - } - DownloadBlockError::BlockShardNotFound => { - // TODO: I don't think it should ever happen, but lets panic first so we get notified by client if it ever happens. - tracing::error!("Slot {slot} not found"); - panic!("slot {slot} not found"); - } - DownloadBlockError::Fatal(e) => { - panic!("fatal error: {e}"); - } - } + DownloadTaskResult::Err { slot, err } => { + // TODO add option to let user decide what to do, by default let it crash + panic!("Failed to download slot {slot}: {err:?}") } } - Ok(()) } async fn commit_offset(&mut self) { @@ -518,17 +303,6 @@ impl TokioFumeDragonsmouthRuntime { } } - async fn unsafe_cancel_all_tasks(&mut self) { - tracing::debug!("aborting all data plane tasks"); - self.data_plane_tasks.abort_all(); - self.data_plane_task_meta.clear(); - self.download_attempts.clear(); - - while (self.data_plane_tasks.join_next().await).is_some() { - // Drain all tasks - } - } - pub(crate) async fn run(mut self) -> Result<(), Box> { let inital_load_history_cmd = build_poll_history_cmd(None); self.control_plane_tx @@ -566,15 +340,14 @@ impl TokioFumeDragonsmouthRuntime { } } } - Some(result) = self.data_plane_tasks.join_next_with_id() => { - let (task_id, download_result) = result.expect("data plane task set"); - let result = self.handle_data_plane_task_result(task_id, download_result).await; - if let Err(e) = result { - self.unsafe_cancel_all_tasks().await; - if let DownloadBlockError::Fatal(e) = e { - let _ = self.dragonsmouth_outlet.send(Err(e)).await; + maybe = self.download_task_runner_chans.download_result_rx.recv() => { + match maybe { + Some(result) => { + self.handle_download_result(result); + }, + None => { + panic!("download task runner channel closed") } - break; } } @@ -589,3 +362,434 @@ impl TokioFumeDragonsmouthRuntime { Ok(()) } } + +/// +/// Channels to interact with a "download task runner". +/// +/// Instead of using Trait which does not work very well for Asynchronous Specs, +/// we use channels to create polymorphic behaviour (indirection-through-channels), +/// similar to how actor-based programming works. +/// +pub struct DownloadTaskRunnerChannels { + /// + /// Where you send download task request to. + /// + pub download_task_queue_tx: mpsc::Sender, + + /// + /// Sends command the download task runner. + /// + pub cnc_tx: mpsc::Sender, + + /// + /// Where you get back feedback from download task result. + pub download_result_rx: mpsc::Receiver, +} + +pub enum DownloadTaskRunnerCommand {} + +/// +/// Download task runner that use gRPC protocol to download slot content. +/// +/// It manages concurrent [`GrpcDownloadBlockTaskRun`] instance and route back +/// download result to the requestor. +/// +pub struct GrpcDownloadTaskRunner { + /// Tokio runtime handle to spawn download task on. + rt: tokio::runtime::Handle, + + /// + /// Pool of gRPC channels + /// + data_plane_channel_vec: Vec, + + /// + /// gRPC channel connector + /// + connector: FumaroleGrpcConnector, + + /// + /// Sets of inflight download tasks + /// + tasks: JoinSet>, + /// + /// Inflight download task metadata index + /// + task_meta: HashMap, + + /// + /// Command-and-Control channel to send command to the runner + /// + cnc_rx: mpsc::Receiver, + + /// + /// Download task queue + /// + download_task_queue: mpsc::Receiver, + + /// + /// Current inflight slow download attempt + /// + download_attempts: HashMap, + + /// + /// The sink to send download task result to. + /// + outlet: mpsc::Sender, + + /// + /// The maximum download attempt per slot (how many download failure do we allow) + /// + max_download_attempt_per_slot: usize, +} + +/// +/// The download task specification to use by the runner. +/// +#[derive(Debug, Clone)] +pub struct DownloadTaskArgs { + pub download_request: FumeDownloadRequest, + pub filters: Option, + pub dragonsmouth_outlet: mpsc::Sender>, +} + +pub(crate) struct DataPlaneConn { + permits: usize, + client: GrpcFumaroleClient, + rev: u64, +} + +impl GrpcDownloadTaskRunner { + const RUNTIME_NAME: &'static str = "tokio_grpc_task_runner"; + + pub fn new( + rt: tokio::runtime::Handle, + data_plane_channel_vec: Vec, + connector: FumaroleGrpcConnector, + cnc_rx: mpsc::Receiver, + download_task_queue: mpsc::Receiver, + outlet: mpsc::Sender, + max_download_attempt_by_slot: usize, + ) -> Self { + Self { + rt, + data_plane_channel_vec, + connector, + tasks: JoinSet::new(), + task_meta: HashMap::new(), + cnc_rx, + download_task_queue, + download_attempts: HashMap::new(), + outlet, + max_download_attempt_per_slot: max_download_attempt_by_slot, + } + } + + /// + /// Always pick the client with the highest permit limit (least used) + /// + fn find_most_underloaded_client(&self) -> Option { + self.data_plane_channel_vec + .iter() + .enumerate() + .max_by_key(|(_, conn)| conn.permits) + .filter(|(_, conn)| conn.has_permit()) + .map(|(idx, _)| idx) + } + + async fn handle_data_plane_task_result( + &mut self, + task_id: task::Id, + result: Result, + ) -> Result<(), DownloadBlockError> { + let Some(task_meta) = self.task_meta.remove(&task_id) else { + panic!("missing task meta") + }; + + #[cfg(feature = "prometheus")] + { + dec_inflight_slot_download(Self::RUNTIME_NAME); + } + + let slot = task_meta.request.slot; + tracing::debug!("download task result received for slot {}", slot); + + let state = self + .data_plane_channel_vec + .get_mut(task_meta.client_idx) + .expect("should not be none"); + state.permits += 1; + + match result { + Ok(completed) => { + let CompletedDownloadBlockTask { + total_event_downloaded, + slot, + block_uid: _, + shard_idx: _, + } = completed; + let elapsed = task_meta.scheduled_at.elapsed(); + + #[cfg(feature = "prometheus")] + { + observe_slot_download_duration(Self::RUNTIME_NAME, elapsed); + inc_slot_download_count(Self::RUNTIME_NAME); + inc_total_event_downloaded(Self::RUNTIME_NAME, total_event_downloaded); + } + + tracing::debug!( + "downloaded slot {slot} in {elapsed:?}, total events: {total_event_downloaded}" + ); + let _ = self.download_attempts.remove(&slot); + let _ = self.outlet.send(DownloadTaskResult::Ok(completed)).await; + } + Err(e) => { + #[cfg(feature = "prometheus")] + { + inc_failed_slot_download_attempt(Self::RUNTIME_NAME); + } + let download_attempt = self + .download_attempts + .get(&slot) + .expect("should track download attempt"); + + match e { + x @ (DownloadBlockError::Disconnected | DownloadBlockError::FailedDownload) => { + // We need to retry it + if *download_attempt >= self.max_download_attempt_per_slot { + tracing::error!( + "download slot {slot} failed: {x:?}, max attempts reached" + ); + return Err(x); + } + let remaining_attempt = self + .max_download_attempt_per_slot + .saturating_sub(*download_attempt); + tracing::debug!( + "download slot {slot} failed: {x:?}, remaining attempts: {remaining_attempt}" + ); + // Recreate the data plane bidi + let t = Instant::now(); + + tracing::debug!("data plane bidi rebuilt in {:?}", t.elapsed()); + let conn = self + .data_plane_channel_vec + .get_mut(task_meta.client_idx) + .expect("should not be none"); + + if task_meta.client_rev == conn.rev { + let new_client = self + .connector + .connect() + .await + .expect("failed to reconnect data plane client"); + conn.client = new_client; + conn.rev += 1; + } + + tracing::debug!("Download slot {slot} failed, rescheduling for retry..."); + let task_spec = DownloadTaskArgs { + download_request: task_meta.request, + filters: task_meta.filters, + dragonsmouth_outlet: task_meta.dragonsmouth_outlet, + }; + // Reschedule download immediately + self.spawn_grpc_download_task(task_meta.client_idx, task_spec); + } + DownloadBlockError::OutletDisconnected => { + // Will automatically be handled in the `run` main loop. + // so nothing to do. + tracing::debug!("dragonsmouth outlet disconnected"); + } + DownloadBlockError::BlockShardNotFound => { + // TODO: I don't think it should ever happen, but lets panic first so we get notified by client if it ever happens. + tracing::error!("Slot {slot} not found"); + let _ = self + .outlet + .send(DownloadTaskResult::Err { + slot: slot, + err: DownloadBlockError::BlockShardNotFound, + }) + .await; + } + DownloadBlockError::Fatal(e) => return Err(DownloadBlockError::Fatal(e)), + } + } + } + Ok(()) + } + + fn spawn_grpc_download_task(&mut self, client_idx: usize, task_spec: DownloadTaskArgs) { + let conn = self + .data_plane_channel_vec + .get_mut(client_idx) + .expect("should not be none"); + + let client = conn.client.clone(); + let client_rev = conn.rev; + + let DownloadTaskArgs { + download_request, + filters, + dragonsmouth_outlet, + } = task_spec; + let slot = download_request.slot; + let task = GrpcDownloadBlockTaskRun { + download_request: download_request.clone(), + client, + filters: filters.clone(), + dragonsmouth_oulet: dragonsmouth_outlet.clone(), + }; + let ah = self.tasks.spawn_on(task.run(), &self.rt); + let task_meta = DataPlaneTaskMeta { + client_idx, + request: download_request.clone(), + filters, + dragonsmouth_outlet, + scheduled_at: Instant::now(), + client_rev, + }; + self.download_attempts + .entry(slot) + .and_modify(|e| *e += 1) + .or_insert(1); + conn.permits.checked_sub(1).expect("underflow"); + self.task_meta.insert(ah.id(), task_meta); + } + + pub(crate) async fn run(mut self) -> Result<(), DownloadBlockError> { + while !self.outlet.is_closed() { + let maybe_available_client_idx = self.find_most_underloaded_client(); + tokio::select! { + maybe = self.cnc_rx.recv() => { + match maybe { + Some(cmd) => { + todo!() + }, + None => { + tracing::debug!("command channel disconnected"); + break; + } + } + } + maybe_download_task = self.download_task_queue.recv(), if maybe_available_client_idx.is_some() => { + match maybe_download_task { + Some(download_task) => { + let Some(client_idx) = maybe_available_client_idx else { + unreachable!("client idx is some") + }; + self.spawn_grpc_download_task(client_idx, download_task); + } + None => { + tracing::debug!("download task queue disconnected"); + break; + } + } + } + Some(result) = self.tasks.join_next_with_id() => { + let (task_id, result) = result.expect("should never panic"); + self.handle_data_plane_task_result(task_id, result).await?; + } + } + } + tracing::debug!("Closing GrpcDownloadTaskRunner loop"); + Ok(()) + } +} + +pub(crate) struct GrpcDownloadBlockTaskRun { + download_request: FumeDownloadRequest, + client: GrpcFumaroleClient, + filters: Option, + dragonsmouth_oulet: mpsc::Sender>, +} + +#[derive(Debug, thiserror::Error)] +pub(crate) enum DownloadBlockError { + #[error("download block task disconnected")] + Disconnected, + #[error("dragonsmouth outlet disconnected")] + OutletDisconnected, + #[error("block shard not found")] + BlockShardNotFound, + #[error("error during transportation or processing")] + FailedDownload, + #[error("unknown error: {0}")] + Fatal(#[from] tonic::Status), +} + +fn map_tonic_error_code_to_download_block_error(code: Code) -> DownloadBlockError { + match code { + Code::NotFound => DownloadBlockError::BlockShardNotFound, + Code::Unavailable => DownloadBlockError::Disconnected, + Code::Internal + | Code::Aborted + | Code::DataLoss + | Code::ResourceExhausted + | Code::Unknown + | Code::Cancelled => DownloadBlockError::FailedDownload, + Code::Ok => { + unreachable!("ok") + } + Code::InvalidArgument => { + panic!("invalid argument"); + } + Code::DeadlineExceeded => DownloadBlockError::FailedDownload, + rest => DownloadBlockError::Fatal(tonic::Status::new(rest, "unknown error")), + } +} + +pub(crate) struct CompletedDownloadBlockTask { + slot: u64, + block_uid: [u8; 16], + shard_idx: FumeShardIdx, + total_event_downloaded: usize, +} + +impl GrpcDownloadBlockTaskRun { + async fn run(mut self) -> Result { + let request = DownloadBlockShard { + blockchain_id: self.download_request.blockchain_id.to_vec(), + block_uid: self.download_request.block_uid.to_vec(), + shard_idx: 0, + block_filters: self.filters, + }; + let resp = self.client.download_block(request).await; + + let mut rx = match resp { + Ok(resp) => resp.into_inner(), + Err(e) => { + return Err(map_tonic_error_code_to_download_block_error(e.code())); + } + }; + let mut total_event_downloaded = 0; + while let Some(data) = rx.next().await { + let resp = data + .map_err(|e| { + let code = e.code(); + tracing::error!("download block error: {code:?}"); + map_tonic_error_code_to_download_block_error(code) + })? + .response + .expect("missing response"); + + match resp { + data_response::Response::Update(update) => { + total_event_downloaded += 1; + if self.dragonsmouth_oulet.send(Ok(update)).await.is_err() { + return Err(DownloadBlockError::OutletDisconnected); + } + } + data_response::Response::BlockShardDownloadFinish(_) => { + return Ok(CompletedDownloadBlockTask { + slot: self.download_request.slot, + block_uid: self.download_request.block_uid, + shard_idx: 0, + total_event_downloaded, + }); + } + } + } + Err(DownloadBlockError::FailedDownload) + } +} diff --git a/examples/rust/src/bin/client.rs b/examples/rust/src/bin/client.rs index ab8ed62..5db8ec6 100644 --- a/examples/rust/src/bin/client.rs +++ b/examples/rust/src/bin/client.rs @@ -77,7 +77,7 @@ async fn subscribe(args: SubscribeArgs, config: FumaroleConfig) { let DragonsmouthAdapterSession { sink: _, mut source, - runtime_handle: _, + fumarole_handle: _, } = dragonsmouth_session; while let Some(result) = source.recv().await { diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index 03b076c..d728b13 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -73,8 +73,8 @@ message Pong { message DataCommand { oneof command { - DownloadBlockShard download_block_shard = 1; - Ping ping = 2; + BlockFilters filters = 1; + DownloadBlockShard download_block_shard = 2; } } From 4df44297459ed3eb0368e527af02d89c2b153f05 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 28 Apr 2025 09:48:49 -0400 Subject: [PATCH 11/56] v2: reorganized code --- apps/fume/src/main.rs | 2 +- crates/yellowstone-fumarole-client/src/lib.rs | 1 - .../src/runtime/tokio.rs | 47 +++++++------------ 3 files changed, 17 insertions(+), 33 deletions(-) diff --git a/apps/fume/src/main.rs b/apps/fume/src/main.rs index 5aa02c9..bd4517d 100644 --- a/apps/fume/src/main.rs +++ b/apps/fume/src/main.rs @@ -284,7 +284,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { // This request listen for all account updates and transaction updates let request = SubscribeRequest { - // accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), + accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), transactions: HashMap::from([( "f1".to_owned(), SubscribeRequestFilterTransactions::default(), diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index f880164..1725ab3 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -525,7 +525,6 @@ impl FumaroleClient { }; let tokio_rt = TokioFumeDragonsmouthRuntime { - rt: handle.clone(), sm, dragonsmouth_bidi: dm_bidi, subscribe_request: request, diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 2ff3046..9fe583e 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -30,19 +30,6 @@ use { }, }; -/// -/// Holds information about on-going data plane task. -/// -#[derive(Debug, Clone)] -pub(crate) struct DataPlaneTaskMeta { - client_idx: usize, - request: FumeDownloadRequest, - filters: Option, - dragonsmouth_outlet: mpsc::Sender>, - scheduled_at: Instant, - client_rev: u64, -} - /// /// Mimics Dragonsmouth subscribe request bidirectional stream. /// @@ -66,23 +53,6 @@ impl DataPlaneConn { } } -pub enum DownloadTaskSenderError { - NoPermit, - Disconnected, -} - -pub struct DownloadPermit { - drop: Option>, -} - -impl Drop for DownloadPermit { - fn drop(&mut self) { - if let Some(drop) = self.drop.take() { - drop(); - } - } -} - pub enum DownloadTaskResult { Ok(CompletedDownloadBlockTask), Err { slot: Slot, err: DownloadBlockError }, @@ -91,8 +61,9 @@ pub enum DownloadTaskResult { /// /// Fumarole runtime based on Tokio outputting Dragonsmouth only events. /// +/// Drives the Fumarole State-Machine ([`FumaroleSM`]) using Async I/O. +/// pub(crate) struct TokioFumeDragonsmouthRuntime { - pub rt: tokio::runtime::Handle, pub sm: FumaroleSM, pub download_task_runner_chans: DownloadTaskRunnerChannels, pub dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, @@ -388,6 +359,19 @@ pub struct DownloadTaskRunnerChannels { pub enum DownloadTaskRunnerCommand {} +/// +/// Holds information about on-going data plane task. +/// +#[derive(Debug, Clone)] +pub(crate) struct DataPlaneTaskMeta { + client_idx: usize, + request: FumeDownloadRequest, + filters: Option, + dragonsmouth_outlet: mpsc::Sender>, + scheduled_at: Instant, + client_rev: u64, +} + /// /// Download task runner that use gRPC protocol to download slot content. /// @@ -741,6 +725,7 @@ fn map_tonic_error_code_to_download_block_error(code: Code) -> DownloadBlockErro pub(crate) struct CompletedDownloadBlockTask { slot: u64, + #[allow(dead_code)] block_uid: [u8; 16], shard_idx: FumeShardIdx, total_event_downloaded: usize, From f8c2ca144e2a6c2ffacd209bff92fe4adf6cec99 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 29 Apr 2025 17:03:00 -0400 Subject: [PATCH 12/56] v2: deprecated revision detection --- Cargo.lock | 1 + apps/fume/Cargo.toml | 1 + apps/fume/src/main.rs | 234 +++++++++++++++++- crates/yellowstone-fumarole-client/src/lib.rs | 2 +- .../src/runtime/mod.rs | 11 +- .../src/runtime/tokio.rs | 72 ++++-- .../src/util/collections.rs | 69 ------ .../src/util/mod.rs | 1 - proto/fumarole_v2.proto | 4 +- 9 files changed, 286 insertions(+), 109 deletions(-) delete mode 100644 crates/yellowstone-fumarole-client/src/util/collections.rs diff --git a/Cargo.lock b/Cargo.lock index 77e6b99..b507975 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1232,6 +1232,7 @@ dependencies = [ "serde_yaml", "solana-sdk", "tabled", + "thiserror", "tokio", "tokio-stream", "tonic", diff --git a/apps/fume/Cargo.toml b/apps/fume/Cargo.toml index 1e0bddb..fd7a6b1 100644 --- a/apps/fume/Cargo.toml +++ b/apps/fume/Cargo.toml @@ -16,6 +16,7 @@ futures = { workspace = true } solana-sdk = { workspace = true } serde_yaml = { workspace = true } tabled = { workspace = true } +thiserror = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread", "signal"] } tokio-stream = { workspace = true } tonic = { workspace = true } diff --git a/apps/fume/src/main.rs b/apps/fume/src/main.rs index bd4517d..14f2247 100644 --- a/apps/fume/src/main.rs +++ b/apps/fume/src/main.rs @@ -1,12 +1,14 @@ use { clap::Parser, futures::{future::BoxFuture, FutureExt}, - solana_sdk::{bs58, pubkey::Pubkey}, + solana_sdk::{bs58, clock::Slot, pubkey::Pubkey}, std::{ - collections::HashMap, + collections::{HashMap, HashSet}, io::{stderr, stdout, IsTerminal}, num::{NonZeroU8, NonZeroUsize}, path::PathBuf, + str::FromStr, + time::Duration, }, tabled::{builder::Builder, Table}, tokio::{ @@ -25,9 +27,9 @@ use { }, yellowstone_grpc_proto::geyser::{ subscribe_update::UpdateOneof, CommitmentLevel, SubscribeRequest, - SubscribeRequestFilterAccounts, SubscribeRequestFilterSlots, - SubscribeRequestFilterTransactions, SubscribeUpdateAccount, SubscribeUpdateSlot, - SubscribeUpdateTransaction, + SubscribeRequestFilterAccounts, SubscribeRequestFilterBlocksMeta, + SubscribeRequestFilterSlots, SubscribeRequestFilterTransactions, SubscribeUpdateAccount, + SubscribeUpdateBlockMeta, SubscribeUpdateSlot, SubscribeUpdateTransaction, }, }; @@ -59,6 +61,8 @@ enum Action { DeleteAllCg, /// Subscribe to fumarole events Subscribe(SubscribeArgs), + /// Subscribe to fumarole block stats + SubscribeBlocks(SubscribeArgs), } #[derive(Debug, Clone, Parser)] @@ -82,19 +86,73 @@ pub struct DeleteCgArgs { name: String, } +#[derive(Debug, Clone, Parser, Default)] +pub enum CommitmentOption { + Finalized, + Confirmed, + #[default] + Processed, +} + +#[derive(Debug, thiserror::Error)] +#[error("Invalid commitment option {0}")] +pub struct FromStrCommitmentOptionErr(String); + +impl FromStr for CommitmentOption { + type Err = FromStrCommitmentOptionErr; + + fn from_str(s: &str) -> Result { + match s { + "finalized" => Ok(CommitmentOption::Finalized), + "confirmed" => Ok(CommitmentOption::Confirmed), + "processed" => Ok(CommitmentOption::Processed), + whatever => Err(FromStrCommitmentOptionErr(whatever.to_owned())), + } + } +} + +impl ToString for CommitmentOption { + fn to_string(&self) -> String { + match self { + CommitmentOption::Finalized => "finalized".to_string(), + CommitmentOption::Confirmed => "confirmed".to_string(), + CommitmentOption::Processed => "processed".to_string(), + } + } +} + +impl From for CommitmentLevel { + fn from(commitment: CommitmentOption) -> Self { + match commitment { + CommitmentOption::Finalized => CommitmentLevel::Finalized, + CommitmentOption::Confirmed => CommitmentLevel::Confirmed, + CommitmentOption::Processed => CommitmentLevel::Processed, + } + } +} + #[derive(Debug, Clone, Parser)] struct SubscribeArgs { /// Name of the consumer group to subscribe to #[clap(long)] cg_name: String, + + #[clap(long, default_value = "processed")] + commitment: CommitmentOption, } fn summarize_account(account: SubscribeUpdateAccount) -> Option { let slot = account.slot; let account = account.account?; - let pubkey = Pubkey::try_from(account.pubkey).expect("Failed to parse pubkey"); - let owner = Pubkey::try_from(account.owner).expect("Failed to parse owner"); - Some(format!("account,{},{},{}", slot, pubkey, owner)) + // let pubkey = Pubkey::try_from(account.pubkey).expect("Failed to parse pubkey"); + // let owner = Pubkey::try_from(account.owner).expect("Failed to parse owner"); + let tx_sig = account.txn_signature; + let tx_sig = if tx_sig.is_none() { + "None".to_string() + } else { + bs58::encode(tx_sig.unwrap()).into_string() + }; + Some(format!("account,{slot},{tx_sig}")) } fn summarize_tx(tx: SubscribeUpdateTransaction) -> Option { @@ -279,8 +337,138 @@ pub fn create_shutdown() -> BoxFuture<'static, ()> { .boxed() } +async fn subscribe_block_stats(mut client: FumaroleClient, args: SubscribeArgs) { + let SubscribeArgs { + cg_name, + commitment, + } = args; + let commitment_level: CommitmentLevel = commitment.into(); + // This request listen for all account updates and transaction updates + let request = SubscribeRequest { + // accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), + // transactions: HashMap::from([( + // "f1".to_owned(), + // SubscribeRequestFilterTransactions::default(), + // )]), + blocks_meta: HashMap::from([( + "f1".to_owned(), + SubscribeRequestFilterBlocksMeta::default(), + )]), + slots: HashMap::from([("f1".to_owned(), SubscribeRequestFilterSlots::default())]), + commitment: Some(commitment_level.into()), + ..Default::default() + }; + + println!("Subscribing to consumer group {}", cg_name); + let subscribe_config = FumaroleSubscribeConfig { + num_data_plane_tcp_connections: NonZeroU8::new(1).unwrap(), + concurrent_download_limit_per_tcp: NonZeroUsize::new(1).unwrap(), + commit_interval: Duration::from_secs(1), + ..Default::default() + }; + let dragonsmouth_session = client + .dragonsmouth_subscribe_with_config(cg_name.clone(), request, subscribe_config) + .await + .expect("Failed to subscribe"); + let DragonsmouthAdapterSession { + sink: _, + mut source, + fumarole_handle: _, + } = dragonsmouth_session; + + let mut shutdown = create_shutdown(); + + #[allow(dead_code)] + enum BlockRow { + AccountUpdate(SubscribeUpdateAccount), + TransactionUpdate(SubscribeUpdateTransaction), + BlockMetaUpdate(SubscribeUpdateBlockMeta), + } + + let mut blocks: HashMap> = HashMap::new(); + let mut block_status: HashMap> = HashMap::new(); + loop { + tokio::select! { + _ = &mut shutdown => { + println!("Shutting down..."); + break; + } + result = source.recv() => { + let Some(result) = result else { + println!("grpc stream closed!"); + break; + }; + + let event = result.expect("Failed to receive event"); + + if let Some(oneof) = event.update_oneof { + match oneof { + UpdateOneof::Account(account_update) => { + blocks.entry( + account_update.slot + ).or_default().push(BlockRow::AccountUpdate(account_update)); + }, + UpdateOneof::Transaction(tx) => { + blocks.entry( + tx.slot + ).or_default().push(BlockRow::TransactionUpdate(tx)); + }, + UpdateOneof::Slot(slot) => { + let SubscribeUpdateSlot { + slot, + parent: _, + status, + dead_error: _ + } = slot; + let cl = CommitmentLevel::try_from(status).unwrap(); + block_status.entry(slot).or_default().push(cl); + let block_data = blocks.remove(&slot); + let msg = if let Some(block_data) = block_data { + let mut block_status = block_status.remove(&slot).unwrap(); + let _status = block_status.pop().unwrap(); + let mut account_updates = 0; + let mut tx_cnt = 0; + for row in block_data { + match row { + BlockRow::AccountUpdate(_) => { + account_updates += 1; + } + BlockRow::TransactionUpdate(_) => { + tx_cnt += 1; + } + BlockRow::BlockMetaUpdate(_) => { + } + } + } + format!( + "block {slot}, status {cl:?} account_updates {account_updates} tx_cnt {tx_cnt}" + ) + } else { + format!("block {slot} status {cl:?}") + }; + println!("{}", msg); + } + UpdateOneof::BlockMeta(block_meta) => { + + blocks.entry( + block_meta.slot + ).or_default().push(BlockRow::BlockMetaUpdate(block_meta)); + } + _ => {}, + } + } + } + } + } + println!("Exiting subscribe loop"); +} + async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { - let SubscribeArgs { cg_name } = args; + let SubscribeArgs { + cg_name, + commitment, + } = args; + let commitment_level: CommitmentLevel = commitment.into(); // This request listen for all account updates and transaction updates let request = SubscribeRequest { @@ -289,8 +477,12 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { "f1".to_owned(), SubscribeRequestFilterTransactions::default(), )]), + blocks_meta: HashMap::from([( + "f1".to_owned(), + SubscribeRequestFilterBlocksMeta::default(), + )]), slots: HashMap::from([("f1".to_owned(), SubscribeRequestFilterSlots::default())]), - // commitment: Some(CommitmentLevel::Finalized.into()), + commitment: Some(commitment_level.into()), ..Default::default() }; @@ -298,13 +490,13 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let subscribe_config = FumaroleSubscribeConfig { num_data_plane_tcp_connections: NonZeroU8::new(1).unwrap(), concurrent_download_limit_per_tcp: NonZeroUsize::new(1).unwrap(), + commit_interval: Duration::from_secs(1), ..Default::default() }; let dragonsmouth_session = client .dragonsmouth_subscribe_with_config(cg_name.clone(), request, subscribe_config) .await .expect("Failed to subscribe"); - println!("Subscribed to consumer group {}", cg_name); let DragonsmouthAdapterSession { sink: _, mut source, @@ -321,6 +513,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { } result = source.recv() => { let Some(result) = result else { + println!("grpc stream closed!"); break; }; @@ -328,8 +521,12 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let message = if let Some(oneof) = event.update_oneof { match oneof { - UpdateOneof::Account(account_update) => summarize_account(account_update), - UpdateOneof::Transaction(tx) => summarize_tx(tx), + UpdateOneof::Account(account_update) => { + summarize_account(account_update) + }, + UpdateOneof::Transaction(tx) => { + summarize_tx(tx) + }, UpdateOneof::Slot(slot) => { let SubscribeUpdateSlot { slot, @@ -340,6 +537,13 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let cl = CommitmentLevel::try_from(status).unwrap(); Some(format!("slot={slot}, parent={parent:?}, status={cl:?}")) } + UpdateOneof::BlockMeta(block_meta) => { + let SubscribeUpdateBlockMeta { + slot, + .. + } = block_meta; + Some(format!("block_meta={slot}")) + } _ => None, } } else { @@ -352,6 +556,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { } } } + println!("Exiting subscribe loop"); } #[allow(dead_code)] @@ -408,5 +613,8 @@ async fn main() { Action::Subscribe(subscribe_args) => { subscribe(fumarole_client, subscribe_args).await; } + Action::SubscribeBlocks(subscribe_args) => { + subscribe_block_stats(fumarole_client, subscribe_args).await; + } } } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index 1725ab3..377e556 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -223,7 +223,7 @@ pub const DEFAULT_DRAGONSMOUTH_CAPACITY: usize = 10000; /// /// Default Fumarole commit offset interval /// -pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(5); +pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(60); /// /// Default maximum number of consecutive failed slot download attempts before failing the fumarole session. diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 2d83b71..446f6d8 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -8,7 +8,7 @@ use { solana_sdk::clock::Slot, std::{ cmp::Reverse, - collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}, + collections::{hash_map, BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}, }, yellowstone_grpc_proto::geyser::{self, CommitmentLevel}, }; @@ -182,8 +182,8 @@ impl FumaroleSM { /// pub fn update_committed_offset(&mut self, offset: FumeOffset) { assert!( - offset > self.last_committed_offset, - "offset must be greater than last committed offset" + offset >= self.last_committed_offset, + "offset must be greater than or equal to last committed offset" ); self.last_committed_offset = offset; } @@ -357,7 +357,7 @@ impl FumaroleSM { dead_error, }); - if !self.inflight_slot_shard_download.contains_key(&slot) { + if let hash_map::Entry::Vacant(e) = self.inflight_slot_shard_download.entry(slot) { // This slot has not been schedule for download yet let download_request = FumeDownloadRequest { slot, @@ -370,8 +370,7 @@ impl FumaroleSM { num_shards, shard_remaining: vec![false; num_shards as usize], }; - self.inflight_slot_shard_download - .insert(slot, download_progress); + e.insert(download_progress); return Some(download_request); } } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 9fe583e..ab967a2 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -40,7 +40,7 @@ pub struct DragonsmouthSubscribeRequestBidi { } impl DataPlaneConn { - pub fn new(client: GrpcFumaroleClient, concurrency_limit: usize) -> Self { + pub const fn new(client: GrpcFumaroleClient, concurrency_limit: usize) -> Self { Self { permits: concurrency_limit, client, @@ -48,7 +48,7 @@ impl DataPlaneConn { } } - fn has_permit(&self) -> bool { + const fn has_permit(&self) -> bool { self.permits > 0 } } @@ -118,6 +118,11 @@ impl From for BlockFilters { } } +enum LoopInstruction { + Continue, + ErrorStop, +} + impl TokioFumeDragonsmouthRuntime { const RUNTIME_NAME: &'static str = "tokio"; @@ -213,16 +218,22 @@ impl TokioFumeDragonsmouthRuntime { } } + async unsafe fn force_commit_offset(&mut self) { + tracing::debug!("committing offset {}", self.sm.committable_offset); + self.control_plane_tx + .send(build_commit_offset_cmd(self.sm.committable_offset)) + .await + .expect("failed to commit offset"); + #[cfg(feature = "prometheus")] + { + inc_offset_commitment_count(Self::RUNTIME_NAME); + } + } + async fn commit_offset(&mut self) { if self.sm.last_committed_offset < self.sm.committable_offset { - tracing::debug!("committing offset {}", self.sm.committable_offset); - self.control_plane_tx - .send(build_commit_offset_cmd(self.sm.committable_offset)) - .await - .expect("failed to commit offset"); - #[cfg(feature = "prometheus")] - { - inc_offset_commitment_count(Self::RUNTIME_NAME); + unsafe { + self.force_commit_offset().await; } } @@ -274,13 +285,36 @@ impl TokioFumeDragonsmouthRuntime { } } + async fn handle_control_plane_resp( + &mut self, + result: Result, + ) -> LoopInstruction { + match result { + Ok(control_response) => { + self.handle_control_response(control_response); + LoopInstruction::Continue + } + Err(e) => { + // TODO implement auto-reconnect on Unavailable + let _ = self.dragonsmouth_outlet.send(Err(e)).await; + LoopInstruction::ErrorStop + } + } + } + pub(crate) async fn run(mut self) -> Result<(), Box> { let inital_load_history_cmd = build_poll_history_cmd(None); + self.control_plane_tx .send(inital_load_history_cmd) .await .expect("disconnected"); + // Always start to commit offset, to make sure not another instance is committing to the same offset. + unsafe { + self.force_commit_offset().await; + } + loop { if self.dragonsmouth_outlet.is_closed() { tracing::debug!("Detected dragonsmouth outlet closed"); @@ -298,12 +332,16 @@ impl TokioFumeDragonsmouthRuntime { } control_response = self.control_plane_rx.recv() => { match control_response { - Some(Ok(control_response)) => { - self.handle_control_response(control_response); - } - Some(Err(e)) => { - tracing::error!("control plane error: {e}"); - return Err(Box::new(RuntimeError::GrpcError(e))); + Some(result) => { + match self.handle_control_plane_resp(result).await { + LoopInstruction::Continue => { + // continue + } + LoopInstruction::ErrorStop => { + tracing::debug!("control plane error"); + break; + } + } } None => { tracing::debug!("control plane disconnected"); @@ -591,7 +629,7 @@ impl GrpcDownloadTaskRunner { let _ = self .outlet .send(DownloadTaskResult::Err { - slot: slot, + slot, err: DownloadBlockError::BlockShardNotFound, }) .await; diff --git a/crates/yellowstone-fumarole-client/src/util/collections.rs b/crates/yellowstone-fumarole-client/src/util/collections.rs deleted file mode 100644 index d3713d6..0000000 --- a/crates/yellowstone-fumarole-client/src/util/collections.rs +++ /dev/null @@ -1,69 +0,0 @@ -use std::{ - collections::{HashSet, VecDeque}, - hash::Hash, -}; - -#[derive(Debug)] -pub struct KeyedVecDeque { - vec: VecDeque<(K, V)>, - index: HashSet, -} - -impl Default for KeyedVecDeque { - fn default() -> Self { - Self { - vec: Default::default(), - index: Default::default(), - } - } -} - -impl KeyedVecDeque -where - K: Eq + Hash + Clone, -{ - #[allow(dead_code)] - pub fn new() -> Self { - KeyedVecDeque { - vec: VecDeque::new(), - index: HashSet::new(), - } - } - - pub fn len(&self) -> usize { - self.vec.len() - } - - pub fn push_back(&mut self, key: K, item: V) -> bool { - if self.index.insert(key.clone()) { - self.vec.push_back((key, item)); - true - } else { - false - } - } - - #[allow(dead_code)] - pub fn push_front(&mut self, key: K, item: V) -> bool { - if self.index.insert(key.clone()) { - self.vec.push_front((key, item)); - true - } else { - false - } - } - - pub fn pop_front(&mut self) -> Option { - if let Some((k, v)) = self.vec.pop_front() { - assert!(self.index.remove(&k)); - Some(v) - } else { - None - } - } - - #[allow(dead_code)] - pub fn is_empty(&self) -> bool { - self.vec.is_empty() - } -} diff --git a/crates/yellowstone-fumarole-client/src/util/mod.rs b/crates/yellowstone-fumarole-client/src/util/mod.rs index 7777d52..773d491 100644 --- a/crates/yellowstone-fumarole-client/src/util/mod.rs +++ b/crates/yellowstone-fumarole-client/src/util/mod.rs @@ -1,2 +1 @@ -pub mod collections; pub mod grpc; diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index d728b13..34b18f3 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -73,8 +73,8 @@ message Pong { message DataCommand { oneof command { - BlockFilters filters = 1; - DownloadBlockShard download_block_shard = 2; + DownloadBlockShard download_block_shard = 1; + BlockFilters filter_update = 2; } } From fc51550e336ba2c641cda825532766eb14db0114 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 5 May 2025 14:33:00 -0400 Subject: [PATCH 13/56] v2: added version endpoint --- Cargo.lock | 43 ++-- Cargo.toml | 2 +- .../Cargo.toml | 9 +- apps/yellowstone-fumarole-cli/README.md | 184 +++++++++++++++ .../src/bin/fume.rs} | 213 +++++------------- crates/yellowstone-fumarole-client/src/lib.rs | 9 + proto/fumarole_v2.proto | 9 + 7 files changed, 287 insertions(+), 182 deletions(-) rename apps/{fume => yellowstone-fumarole-cli}/Cargo.toml (86%) create mode 100644 apps/yellowstone-fumarole-cli/README.md rename apps/{fume/src/main.rs => yellowstone-fumarole-cli/src/bin/fume.rs} (68%) diff --git a/Cargo.lock b/Cargo.lock index b507975..d5d078b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -744,6 +744,7 @@ checksum = "2678fade3b77aa3a8ff3aae87e9c008d3fb00473a41c71fbf74e91c8c7b37e84" dependencies = [ "clap", "log", + "tracing-core", ] [[package]] @@ -1222,27 +1223,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fume" -version = "0.1.1+solana.2.1.11" -dependencies = [ - "clap", - "clap-verbosity-flag", - "futures", - "serde_yaml", - "solana-sdk", - "tabled", - "thiserror", - "tokio", - "tokio-stream", - "tonic", - "tracing", - "tracing-subscriber", - "yellowstone-fumarole-client", - "yellowstone-grpc-client", - "yellowstone-grpc-proto", -] - [[package]] name = "futures" version = "0.3.31" @@ -5238,6 +5218,27 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +[[package]] +name = "yellowstone-fumarole-cli" +version = "0.1.0-pre1" +dependencies = [ + "clap", + "clap-verbosity-flag", + "futures", + "serde_yaml", + "solana-sdk", + "tabled", + "thiserror", + "tokio", + "tokio-stream", + "tonic", + "tracing", + "tracing-subscriber", + "yellowstone-fumarole-client", + "yellowstone-grpc-client", + "yellowstone-grpc-proto", +] + [[package]] name = "yellowstone-fumarole-client" version = "0.2.0-pre.1+solana.2.1.11" diff --git a/Cargo.toml b/Cargo.toml index 4051b37..331f3da 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] members = [ - "apps/fume", + "apps/yellowstone-fumarole-cli", "crates/yellowstone-fumarole-client", "examples/rust", ] diff --git a/apps/fume/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml similarity index 86% rename from apps/fume/Cargo.toml rename to apps/yellowstone-fumarole-cli/Cargo.toml index fd7a6b1..7b65f02 100644 --- a/apps/fume/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -1,7 +1,7 @@ [package] -name = "fume" +name = "yellowstone-fumarole-cli" authors.workspace = true -version.workspace = true +version = "0.1.0-pre1" edition.workspace = true homepage.workspace = true repository.workspace = true @@ -9,9 +9,12 @@ license.workspace = true keywords.workspace = true publish.workspace = true +[[bin]] +name = "fume" + [dependencies] clap = { workspace = true, features = ["derive"] } -clap-verbosity-flag = { workspace = true } +clap-verbosity-flag = { workspace = true, features = ["tracing"] } futures = { workspace = true } solana-sdk = { workspace = true } serde_yaml = { workspace = true } diff --git a/apps/yellowstone-fumarole-cli/README.md b/apps/yellowstone-fumarole-cli/README.md new file mode 100644 index 0000000..3f659d2 --- /dev/null +++ b/apps/yellowstone-fumarole-cli/README.md @@ -0,0 +1,184 @@ + +# Yellowstone Fumarole CLI + +Fumarole CLI tool + +## Install + +```sh +$ cargo install yellowstone-fumarole-cli +``` + +## Usage + +### Configuration file + +Fumarole CLI look for a file in `~/.config/fume/config.toml` by default, you can change the path location by using `fume --config `. + +Here's how to configure your config file: + +```toml +[fumarole] +endpoints = ["https://fumarole.endpoint.rpcpool.com"] +x-token = "" +``` + +You can test your configuration file with `test-config` subcommand: + +```sh +$ fume test-config +``` + +or with custom config path: + +```sh +$ fume --config path/to/config.toml test-config +``` + +### Create consumer group + +To create a consumer group that at the end of the log, that stream only "confirmed" commitment level transaction: + +```sh +$ fume create-cg --name helloworld-1 \ +--commitment confirmed \ +--seek latest \ +--include tx +``` + +To do the same but for account update + +```sh +$ fume create-cg --name helloworld-2 \ +--commitment confirmed \ +--seek latest \ +--include account +``` + +More usage can be find using the `--help` options: + +```sh +$ fume create-cg --help +Creates a consumer group + +Options: + --name TEXT Consumer group name to subscribe to, if none + provided a random name will be generated + following the pattern + 'fume-'. + --size INTEGER Size of the consumer group + --commitment [processed|confirmed|finalized] + Commitment level [default: confirmed] + --include [all|account|tx] Include option [default: all] + --seek [earliest|latest|slot] Seek option [default: latest] + --help Show this message and exit. +``` + +### Consumer Group Staleness + +Consumer groups can become stale if you are ingesting too slowly. + +Fumarole is a distributed log of blockchain event where each new blockchain event gets appended to. + +As Solana emits a lot of event in one hour, we cannot keep every blockchain event forever. + +Fumarole evicts fragment of the log as they age old enough. + +Depending of the Fumarole cluster you are connected to this time may vary. Connect Triton-One team to learn more. + +When creating a Consumer Group, you must ingest what you are capable of. Otherwise your consumer group is destined to become stale. + +A stale consumer group is a consumer group that haven't yet ingested blockchain event that had already been evicted by Fumarole vacuum process. + + +### Consumer Group Size and performance guideline + +Consumer group size allow you to shard a fumarole stream into multiple consumer group member. +Sharded consumer group follow similar semantics as [Kafka Static Consumer membership](https://cwiki.apache.org/confluence/display/KAFKA/KIP-345%3A+Introduce+static+membership+protocol+to+reduce+consumer+rebalances). + +Here's a quick-recap of static group membership: + +- The Fumarole log is already sharded in multiple partitions. +- When you create a consumer group with `--size N`, it creates `N` member with each `# total fumarole partition / N` partitions. +- Each member of the cnsumer group advance **at its own pace**. +- Your consumer group becomes stale as soon as **one membership is stale**. + + +As of this writing the maximum size of a consumer group is `6`. + +Each member can have their own dedicated TCP connection which offer better performance. + +The processing you do in reception, your internet speed, network bandwidth and location will impact the size of the consumer group. + +Ingesting everything Fumarole can output requires you to be in the same region as your assigned Fumarole cluster and multiple Gbits for internet Bandwidth, otherwise you will fall behind and become stale. + +Limit your subscription feed by using the various filters over the accounts and transactions we offer. + +As for the consumer group size goes, starting with a size of `1` is the simplest approach. + +If you are falling behind because your receiving code adds too much processing overhead, you can try +`2`, `3` and so forth. + +Fumarole is already redundant and load balanced inside our Data centers, increasing `--size` does not inherently add more redundancy. It is a tool for you to scale your read operation in case on instance is not sufficient. + +To create a consumer group with `2` members you just have to provided `--size` options: + +```sh +$ fume create-cg --name example --size 2 +``` + +### List all consumer groups + +```sh +$ fume list-cg +``` + +### Delete a consumer groups + +```sh +$ fume delete-cg --name helloworld +``` + +### Delete all consumer groups + +```sh +$ fume delete-all-cg +``` + +### Stream summary on terminal + +To stream out from the CLI, you can use the `stream` command and its various features! + +```sh +$ fume subscribe --cg-name helloworld +``` + +You can filter the stream content by adding one or multiple occurrence of the following options: + +- `--tx-account ` : filter transaction by account keys. +- `--owner ` : filter account update based on its owner +- `--account ` : filter account update based on accout key. + +Here is an example to get all account updates owned by Token SPL program: + +```sh +$ fume subscribe --name helloworld \ +--owner TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA +``` + +Here is how to chain multiple filters together: + +```sh +$ fume subscribe --cg-name helloworld \ +--owner metaqbxxUerdq28cj1RbAWkYQm3ybzjb6a8bt518x1s \ +--owner TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb \ +--owner TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA \ +--owner ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL \ +--owner BGUMAp9Gq7iTEuizy4pqaxsTyUCBK68MDfK752saRPUY \ +--owner CoREENxT6tW1HoK8ypY1SxRMZTcVPm7R94rH4PZNhX7d \ +--tx-account BGUMAp9Gq7iTEuizy4pqaxsTyUCBK68MDfK752saRPUY +``` + +The above command stream all data required by [DAS](https://github.com/rpcpool/digital-asset-validator-plugin). + +**Note**: This command serves more as a testing tool/playground for you to try it out as it only prints summarized data. \ No newline at end of file diff --git a/apps/fume/src/main.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs similarity index 68% rename from apps/fume/src/main.rs rename to apps/yellowstone-fumarole-cli/src/bin/fume.rs index 14f2247..827b2ee 100644 --- a/apps/fume/src/main.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -1,9 +1,9 @@ use { clap::Parser, futures::{future::BoxFuture, FutureExt}, - solana_sdk::{bs58, clock::Slot, pubkey::Pubkey}, + solana_sdk::{bs58, pubkey::Pubkey}, std::{ - collections::{HashMap, HashSet}, + collections::HashMap, io::{stderr, stdout, IsTerminal}, num::{NonZeroU8, NonZeroUsize}, path::PathBuf, @@ -49,39 +49,37 @@ struct Args { #[derive(Debug, Clone, Parser)] enum Action { - /// Get Consumer Group Info - GetCgInfo(GetCgInfoArgs), - /// Create a consumer group - CreateCg(CreateCgArgs), - /// Delete a consumer group - DeleteCg(DeleteCgArgs), - /// List all consumer groups - ListCg, - /// Delete all consumer groups - DeleteAllCg, + /// Get Persistent Subscriber Info + GetInfo(GetCgInfoArgs), + /// Create Persistent Subscriber + Create(CreateCgArgs), + /// Delete a Persistent Subscriber + Delete(DeleteCgArgs), + /// List all persistent subscribers + List, + /// Delete all persistent subscribers + DeleteAll, /// Subscribe to fumarole events Subscribe(SubscribeArgs), - /// Subscribe to fumarole block stats - SubscribeBlocks(SubscribeArgs), } #[derive(Debug, Clone, Parser)] pub struct GetCgInfoArgs { - /// Name of the consumer group to get info for + /// Name of the persistent subscriber to get info for #[clap(long)] name: String, } #[derive(Debug, Clone, Parser)] pub struct CreateCgArgs { - /// Name of the consumer group to create + /// Name of the persistent subscriber to create #[clap(long)] name: String, } #[derive(Debug, Clone, Parser)] pub struct DeleteCgArgs { - /// Name of the consumer group to delete + /// Name of the persistent subscriber to delete #[clap(long)] name: String, } @@ -133,12 +131,24 @@ impl From for CommitmentLevel { #[derive(Debug, Clone, Parser)] struct SubscribeArgs { - /// Name of the consumer group to subscribe to + /// Name of the persistent subscriber #[clap(long)] - cg_name: String, + name: String, #[clap(long, default_value = "processed")] commitment: CommitmentOption, + + /// List of pubkeys to subscribe to + #[clap(short, long)] + pubkey: Vec, + + /// List of owners to subscribe to + #[clap(short, long)] + owner: Vec, + + /// List of pubkeys to must be in the transaction to subscribe to + #[clap(long, short)] + tx_pubkey: Vec, } fn summarize_account(account: SubscribeUpdateAccount) -> Option { @@ -228,7 +238,7 @@ async fn create_cg(args: CreateCgArgs, mut client: FumaroleClient) { return; } eprintln!( - "Failed to create consumer group: {} {}", + "Failed to create consumer group: {}, {}", e.code(), e.message() ); @@ -337,145 +347,31 @@ pub fn create_shutdown() -> BoxFuture<'static, ()> { .boxed() } -async fn subscribe_block_stats(mut client: FumaroleClient, args: SubscribeArgs) { +async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let SubscribeArgs { - cg_name, + name: cg_name, commitment, + pubkey, + owner, + tx_pubkey, } = args; let commitment_level: CommitmentLevel = commitment.into(); // This request listen for all account updates and transaction updates let request = SubscribeRequest { - // accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), - // transactions: HashMap::from([( - // "f1".to_owned(), - // SubscribeRequestFilterTransactions::default(), - // )]), - blocks_meta: HashMap::from([( + accounts: HashMap::from([( "f1".to_owned(), - SubscribeRequestFilterBlocksMeta::default(), + SubscribeRequestFilterAccounts { + account: pubkey.iter().map(|p| p.to_string()).collect(), + owner: owner.iter().map(|p| p.to_string()).collect(), + ..Default::default() + }, )]), - slots: HashMap::from([("f1".to_owned(), SubscribeRequestFilterSlots::default())]), - commitment: Some(commitment_level.into()), - ..Default::default() - }; - - println!("Subscribing to consumer group {}", cg_name); - let subscribe_config = FumaroleSubscribeConfig { - num_data_plane_tcp_connections: NonZeroU8::new(1).unwrap(), - concurrent_download_limit_per_tcp: NonZeroUsize::new(1).unwrap(), - commit_interval: Duration::from_secs(1), - ..Default::default() - }; - let dragonsmouth_session = client - .dragonsmouth_subscribe_with_config(cg_name.clone(), request, subscribe_config) - .await - .expect("Failed to subscribe"); - let DragonsmouthAdapterSession { - sink: _, - mut source, - fumarole_handle: _, - } = dragonsmouth_session; - - let mut shutdown = create_shutdown(); - - #[allow(dead_code)] - enum BlockRow { - AccountUpdate(SubscribeUpdateAccount), - TransactionUpdate(SubscribeUpdateTransaction), - BlockMetaUpdate(SubscribeUpdateBlockMeta), - } - - let mut blocks: HashMap> = HashMap::new(); - let mut block_status: HashMap> = HashMap::new(); - loop { - tokio::select! { - _ = &mut shutdown => { - println!("Shutting down..."); - break; - } - result = source.recv() => { - let Some(result) = result else { - println!("grpc stream closed!"); - break; - }; - - let event = result.expect("Failed to receive event"); - - if let Some(oneof) = event.update_oneof { - match oneof { - UpdateOneof::Account(account_update) => { - blocks.entry( - account_update.slot - ).or_default().push(BlockRow::AccountUpdate(account_update)); - }, - UpdateOneof::Transaction(tx) => { - blocks.entry( - tx.slot - ).or_default().push(BlockRow::TransactionUpdate(tx)); - }, - UpdateOneof::Slot(slot) => { - let SubscribeUpdateSlot { - slot, - parent: _, - status, - dead_error: _ - } = slot; - let cl = CommitmentLevel::try_from(status).unwrap(); - block_status.entry(slot).or_default().push(cl); - let block_data = blocks.remove(&slot); - let msg = if let Some(block_data) = block_data { - let mut block_status = block_status.remove(&slot).unwrap(); - let _status = block_status.pop().unwrap(); - let mut account_updates = 0; - let mut tx_cnt = 0; - for row in block_data { - match row { - BlockRow::AccountUpdate(_) => { - account_updates += 1; - } - BlockRow::TransactionUpdate(_) => { - tx_cnt += 1; - } - BlockRow::BlockMetaUpdate(_) => { - } - } - } - format!( - "block {slot}, status {cl:?} account_updates {account_updates} tx_cnt {tx_cnt}" - ) - } else { - format!("block {slot} status {cl:?}") - }; - println!("{}", msg); - } - UpdateOneof::BlockMeta(block_meta) => { - - blocks.entry( - block_meta.slot - ).or_default().push(BlockRow::BlockMetaUpdate(block_meta)); - } - _ => {}, - } - } - } - } - } - println!("Exiting subscribe loop"); -} - -async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { - let SubscribeArgs { - cg_name, - commitment, - } = args; - let commitment_level: CommitmentLevel = commitment.into(); - - // This request listen for all account updates and transaction updates - let request = SubscribeRequest { - accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), transactions: HashMap::from([( "f1".to_owned(), - SubscribeRequestFilterTransactions::default(), + SubscribeRequestFilterTransactions { + account_include: tx_pubkey.iter().map(|p| p.to_string()).collect(), + ..Default::default() + }, )]), blocks_meta: HashMap::from([( "f1".to_owned(), @@ -583,6 +479,12 @@ fn setup_tracing_test_many( #[tokio::main] async fn main() { let args = Args::parse(); + let verbosity = args.verbose.tracing_level_filter(); + let curr_crate = env!("CARGO_PKG_NAME"); + + let filter = format!("{curr_crate}={verbosity},yellowstone_fumarole_client={verbosity}"); + let env_filter = EnvFilter::new(filter); + tracing_subscriber::fmt().with_env_filter(env_filter).init(); // setup_tracing_test_many(["yellowstone_fumarole_client"]); let config = std::fs::read_to_string(&args.config).expect("Failed to read config file"); @@ -595,26 +497,23 @@ async fn main() { .expect("Failed to connect to fumarole"); match args.action { - Action::GetCgInfo(get_cg_info_args) => { + Action::GetInfo(get_cg_info_args) => { get_cg_info(get_cg_info_args, fumarole_client).await; } - Action::CreateCg(create_cg_args) => { + Action::Create(create_cg_args) => { create_cg(create_cg_args, fumarole_client).await; } - Action::DeleteCg(delete_cg_args) => { + Action::Delete(delete_cg_args) => { delete_cg(delete_cg_args, fumarole_client).await; } - Action::ListCg => { + Action::List => { list_all_cg(fumarole_client).await; } - Action::DeleteAllCg => { + Action::DeleteAll => { delete_all_cg(fumarole_client).await; } Action::Subscribe(subscribe_args) => { subscribe(fumarole_client, subscribe_args).await; } - Action::SubscribeBlocks(subscribe_args) => { - subscribe_block_stats(fumarole_client, subscribe_args).await; - } } } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index 377e556..fc343fe 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -391,6 +391,15 @@ impl FumaroleClient { }) } + /// + /// Returns the current version of the Fumarole service. + /// + pub async fn version(&mut self) -> Result { + let request = tonic::Request::new(proto::VersionRequest {}); + let response = self.inner.version(request).await?; + Ok(response.into_inner()) + } + /// /// Subscribe to a stream of updates from the Fumarole service /// diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index 34b18f3..e424005 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -17,8 +17,17 @@ service Fumarole { // Represents subscription to the control plane rpc Subscribe(stream ControlCommand) returns (stream ControlResponse) {} + + rpc Version(VersionRequest) returns (VersionResponse) {} } +message VersionRequest {} + +message VersionResponse { + string version = 1; +} + + message GetConsumerGroupInfoRequest { string consumer_group_name = 1; } From a1a850e7ed1faa196f13f3fad73d00e5386b7a21 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 5 May 2025 14:40:12 -0400 Subject: [PATCH 14/56] v2: support test-config in fume cli --- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index 827b2ee..ccd9aaf 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -49,6 +49,8 @@ struct Args { #[derive(Debug, Clone, Parser)] enum Action { + /// Test the connection to the fumarole service + TestConfig, /// Get Persistent Subscriber Info GetInfo(GetCgInfoArgs), /// Create Persistent Subscriber @@ -476,6 +478,31 @@ fn setup_tracing_test_many( .try_init() } +async fn test_config(mut fumarole_client: FumaroleClient) { + let result = fumarole_client.version().await; + match result { + Ok(version) => { + println!( + "Successfully connected to Fumarole Service -- version: {}", + version.version + ); + } + Err(e) => { + match e.code() { + Code::Unauthenticated => { + eprintln!( + "Missing authentication token or invalid token in configuration file" + ); + } + _ => { + eprintln!("Failed to connect to fumarole: {}", e); + } + } + return; + } + } +} + #[tokio::main] async fn main() { let args = Args::parse(); @@ -497,6 +524,9 @@ async fn main() { .expect("Failed to connect to fumarole"); match args.action { + Action::TestConfig => { + test_config(fumarole_client).await; + } Action::GetInfo(get_cg_info_args) => { get_cg_info(get_cg_info_args, fumarole_client).await; } From e4c84307a551cbf51534f4add184dd8b8223efa5 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 5 May 2025 14:48:56 -0400 Subject: [PATCH 15/56] v2: added fume CLI readme --- apps/yellowstone-fumarole-cli/README.md | 137 +++++------------- .../config_example.toml | 3 + apps/yellowstone-fumarole-cli/src/bin/fume.rs | 16 +- 3 files changed, 49 insertions(+), 107 deletions(-) create mode 100644 apps/yellowstone-fumarole-cli/config_example.toml diff --git a/apps/yellowstone-fumarole-cli/README.md b/apps/yellowstone-fumarole-cli/README.md index 3f659d2..fac26c1 100644 --- a/apps/yellowstone-fumarole-cli/README.md +++ b/apps/yellowstone-fumarole-cli/README.md @@ -11,22 +11,46 @@ $ cargo install yellowstone-fumarole-cli ## Usage -### Configuration file +```sh +fume --help + +Yellowstone Fumarole CLI + +Usage: fume [OPTIONS] --config + +Commands: + test-config Test the connection to the fumarole service + get-info Get Persistent Subscriber Info + create Create Persistent Subscriber + delete Delete a Persistent Subscriber + list List all persistent subscribers + delete-all Delete all persistent subscribers + subscribe Subscribe to fumarole events + help Print this message or the help of the given subcommand(s) + +Options: + --config Path to static config file + -v, --verbose... Increase logging verbosity + -q, --quiet... Decrease logging verbosity + -h, --help Print help + -V, --version Print version +``` + -Fumarole CLI look for a file in `~/.config/fume/config.toml` by default, you can change the path location by using `fume --config `. +### Configuration file Here's how to configure your config file: ```toml [fumarole] endpoints = ["https://fumarole.endpoint.rpcpool.com"] -x-token = "" +x-token = "00000000-0000-0000-0000-000000000000" ``` You can test your configuration file with `test-config` subcommand: ```sh -$ fume test-config +$ fume --config path/to/config.toml test-config ``` or with custom config path: @@ -35,114 +59,29 @@ or with custom config path: $ fume --config path/to/config.toml test-config ``` -### Create consumer group - -To create a consumer group that at the end of the log, that stream only "confirmed" commitment level transaction: - -```sh -$ fume create-cg --name helloworld-1 \ ---commitment confirmed \ ---seek latest \ ---include tx -``` - -To do the same but for account update - -```sh -$ fume create-cg --name helloworld-2 \ ---commitment confirmed \ ---seek latest \ ---include account -``` - -More usage can be find using the `--help` options: - -```sh -$ fume create-cg --help -Creates a consumer group - -Options: - --name TEXT Consumer group name to subscribe to, if none - provided a random name will be generated - following the pattern - 'fume-'. - --size INTEGER Size of the consumer group - --commitment [processed|confirmed|finalized] - Commitment level [default: confirmed] - --include [all|account|tx] Include option [default: all] - --seek [earliest|latest|slot] Seek option [default: latest] - --help Show this message and exit. -``` - -### Consumer Group Staleness - -Consumer groups can become stale if you are ingesting too slowly. - -Fumarole is a distributed log of blockchain event where each new blockchain event gets appended to. - -As Solana emits a lot of event in one hour, we cannot keep every blockchain event forever. - -Fumarole evicts fragment of the log as they age old enough. - -Depending of the Fumarole cluster you are connected to this time may vary. Connect Triton-One team to learn more. - -When creating a Consumer Group, you must ingest what you are capable of. Otherwise your consumer group is destined to become stale. - -A stale consumer group is a consumer group that haven't yet ingested blockchain event that had already been evicted by Fumarole vacuum process. - - -### Consumer Group Size and performance guideline - -Consumer group size allow you to shard a fumarole stream into multiple consumer group member. -Sharded consumer group follow similar semantics as [Kafka Static Consumer membership](https://cwiki.apache.org/confluence/display/KAFKA/KIP-345%3A+Introduce+static+membership+protocol+to+reduce+consumer+rebalances). - -Here's a quick-recap of static group membership: - -- The Fumarole log is already sharded in multiple partitions. -- When you create a consumer group with `--size N`, it creates `N` member with each `# total fumarole partition / N` partitions. -- Each member of the cnsumer group advance **at its own pace**. -- Your consumer group becomes stale as soon as **one membership is stale**. - - -As of this writing the maximum size of a consumer group is `6`. - -Each member can have their own dedicated TCP connection which offer better performance. - -The processing you do in reception, your internet speed, network bandwidth and location will impact the size of the consumer group. - -Ingesting everything Fumarole can output requires you to be in the same region as your assigned Fumarole cluster and multiple Gbits for internet Bandwidth, otherwise you will fall behind and become stale. - -Limit your subscription feed by using the various filters over the accounts and transactions we offer. - -As for the consumer group size goes, starting with a size of `1` is the simplest approach. - -If you are falling behind because your receiving code adds too much processing overhead, you can try -`2`, `3` and so forth. - -Fumarole is already redundant and load balanced inside our Data centers, increasing `--size` does not inherently add more redundancy. It is a tool for you to scale your read operation in case on instance is not sufficient. +### Create a Persistent Subscriber -To create a consumer group with `2` members you just have to provided `--size` options: ```sh -$ fume create-cg --name example --size 2 +$ fume create --name helloworld-1 \ ``` -### List all consumer groups +### List all persistent subscribers ```sh -$ fume list-cg +$ fume list ``` -### Delete a consumer groups +### Delete a persistent subscribers ```sh -$ fume delete-cg --name helloworld +$ fume delete --name helloworld ``` -### Delete all consumer groups +### Delete all persistent subscribers ```sh -$ fume delete-all-cg +$ fume delete-all ``` ### Stream summary on terminal @@ -150,12 +89,12 @@ $ fume delete-all-cg To stream out from the CLI, you can use the `stream` command and its various features! ```sh -$ fume subscribe --cg-name helloworld +$ fume subscribe --name helloworld ``` You can filter the stream content by adding one or multiple occurrence of the following options: -- `--tx-account ` : filter transaction by account keys. +- `--tx-pubkey ` : filter transaction by account keys. - `--owner ` : filter account update based on its owner - `--account ` : filter account update based on accout key. diff --git a/apps/yellowstone-fumarole-cli/config_example.toml b/apps/yellowstone-fumarole-cli/config_example.toml new file mode 100644 index 0000000..0b254f9 --- /dev/null +++ b/apps/yellowstone-fumarole-cli/config_example.toml @@ -0,0 +1,3 @@ +[fumarole] +endpoints = ["https://fumarole.endpoint.rpcpool.com"] +x-token = "00000000-0000-0000-0000-000000000000" \ No newline at end of file diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index ccd9aaf..32808cc 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -34,7 +34,7 @@ use { }; #[derive(Debug, Clone, Parser)] -#[clap(author, version, about = "Yellowstone gRPC ScyllaDB Tool")] +#[clap(author, version, about = "Yellowstone Fumarole CLI")] struct Args { /// Path to static config file #[clap(long)] @@ -140,17 +140,17 @@ struct SubscribeArgs { #[clap(long, default_value = "processed")] commitment: CommitmentOption, - /// List of pubkeys to subscribe to + /// List of account public keys to subscribe to #[clap(short, long)] - pubkey: Vec, + account: Vec, - /// List of owners to subscribe to + /// List of account owners to subscribe to #[clap(short, long)] owner: Vec, - /// List of pubkeys to must be in the transaction to subscribe to + /// List of account public keys that must be included in the transaction #[clap(long, short)] - tx_pubkey: Vec, + tx_account: Vec, } fn summarize_account(account: SubscribeUpdateAccount) -> Option { @@ -353,9 +353,9 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let SubscribeArgs { name: cg_name, commitment, - pubkey, + account: pubkey, owner, - tx_pubkey, + tx_account: tx_pubkey, } = args; let commitment_level: CommitmentLevel = commitment.into(); // This request listen for all account updates and transaction updates From f81dec557db64bae6a417ec39e9ed2a30639bc72 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 5 May 2025 15:42:02 -0400 Subject: [PATCH 16/56] v2: support subscribe request update --- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 39 ++++++------- crates/yellowstone-fumarole-client/src/lib.rs | 15 ++--- .../src/metrics.rs | 17 ------ .../src/runtime/tokio.rs | 55 ++++++++++++++----- 4 files changed, 67 insertions(+), 59 deletions(-) diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index 32808cc..e6869de 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -4,8 +4,9 @@ use { solana_sdk::{bs58, pubkey::Pubkey}, std::{ collections::HashMap, + fmt, io::{stderr, stdout, IsTerminal}, - num::{NonZeroU8, NonZeroUsize}, + num::NonZeroUsize, path::PathBuf, str::FromStr, time::Duration, @@ -111,12 +112,12 @@ impl FromStr for CommitmentOption { } } -impl ToString for CommitmentOption { - fn to_string(&self) -> String { +impl fmt::Display for CommitmentOption { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - CommitmentOption::Finalized => "finalized".to_string(), - CommitmentOption::Confirmed => "confirmed".to_string(), - CommitmentOption::Processed => "processed".to_string(), + CommitmentOption::Finalized => write!(f, "finalized"), + CommitmentOption::Confirmed => write!(f, "confirmed"), + CommitmentOption::Processed => write!(f, "processed"), } } } @@ -159,10 +160,10 @@ fn summarize_account(account: SubscribeUpdateAccount) -> Option { // let pubkey = Pubkey::try_from(account.pubkey).expect("Failed to parse pubkey"); // let owner = Pubkey::try_from(account.owner).expect("Failed to parse owner"); let tx_sig = account.txn_signature; - let tx_sig = if tx_sig.is_none() { - "None".to_string() + let tx_sig = if let Some(tx_sig_bytes) = tx_sig { + bs58::encode(tx_sig_bytes).into_string() } else { - bs58::encode(tx_sig.unwrap()).into_string() + "None".to_string() }; Some(format!("account,{slot},{tx_sig}")) } @@ -386,7 +387,6 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { println!("Subscribing to consumer group {}", cg_name); let subscribe_config = FumaroleSubscribeConfig { - num_data_plane_tcp_connections: NonZeroU8::new(1).unwrap(), concurrent_download_limit_per_tcp: NonZeroUsize::new(1).unwrap(), commit_interval: Duration::from_secs(1), ..Default::default() @@ -487,19 +487,14 @@ async fn test_config(mut fumarole_client: FumaroleClient) { version.version ); } - Err(e) => { - match e.code() { - Code::Unauthenticated => { - eprintln!( - "Missing authentication token or invalid token in configuration file" - ); - } - _ => { - eprintln!("Failed to connect to fumarole: {}", e); - } + Err(e) => match e.code() { + Code::Unauthenticated => { + eprintln!("Missing authentication token or invalid token in configuration file"); } - return; - } + _ => { + eprintln!("Failed to connect to fumarole: {}", e); + } + }, } } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index fc343fe..f5ede32 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -126,7 +126,7 @@ use { }, std::{ collections::HashMap, - num::{NonZeroU8, NonZeroUsize}, + num::NonZeroUsize, time::{Duration, Instant}, }, tokio::sync::mpsc, @@ -233,7 +233,7 @@ pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: usize = 3; /// /// Default number of parallel data streams (TCP connections) to open to fumarole. /// -pub const DEFAULT_PARA_DATA_STREAMS: u8 = 3; +// const _DEFAULT_PARA_DATA_STREAMS: u8 = 3; /**TODO: enable this after beta*/ /// /// Default maximum number of concurrent download requests to the fumarole service inside a single data plane TCP connection. @@ -274,7 +274,7 @@ pub struct FumaroleSubscribeConfig { /// /// Number of parallel data streams (TCP connections) to open to fumarole /// - pub num_data_plane_tcp_connections: NonZeroU8, + // pub num_data_plane_tcp_connections: NonZeroU8, /*TODO: enable this after beta */ /// /// Maximum number of concurrent download requests to the fumarole service inside a single data plane TCP connection. @@ -300,7 +300,7 @@ pub struct FumaroleSubscribeConfig { impl Default for FumaroleSubscribeConfig { fn default() -> Self { Self { - num_data_plane_tcp_connections: NonZeroU8::new(DEFAULT_PARA_DATA_STREAMS).unwrap(), + // num_data_plane_tcp_connections: NonZeroU8::new(DEFAULT_PARA_DATA_STREAMS).unwrap(), /**THIS FEATURE WILL BE DONE AFTER BETA */ concurrent_download_limit_per_tcp: NonZeroUsize::new( DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, ) @@ -501,9 +501,9 @@ impl FumaroleClient { rx: dm_rx, }; - let mut data_plane_channel_vec = - Vec::with_capacity(config.num_data_plane_tcp_connections.get() as usize); - for _ in 0..config.num_data_plane_tcp_connections.get() { + let mut data_plane_channel_vec = Vec::with_capacity(1); + // TODO: support config.num_data_plane_tcp_connections + for _ in 0..1 { let client = self .connector .connect() @@ -525,6 +525,7 @@ impl FumaroleClient { download_task_queue_rx, download_result_tx, config.max_failed_slot_download_attempt, + request.clone(), ); let download_task_runner_chans = DownloadTaskRunnerChannels { diff --git a/crates/yellowstone-fumarole-client/src/metrics.rs b/crates/yellowstone-fumarole-client/src/metrics.rs index a5d26a0..a4fb159 100644 --- a/crates/yellowstone-fumarole-client/src/metrics.rs +++ b/crates/yellowstone-fumarole-client/src/metrics.rs @@ -29,14 +29,6 @@ lazy_static! { &["runtime"], ) .unwrap(); - pub(crate) static ref SLOT_DOWNLOAD_QUEUE_SIZE: IntGaugeVec = IntGaugeVec::new( - Opts::new( - "fumarole_slot_download_queue", - "Number slot download requests in the queue, waiting to be downloaded", - ), - &["runtime"], - ) - .unwrap(); pub(crate) static ref SLOT_DOWNLOAD_DURATION: HistogramVec = HistogramVec::new( HistogramOpts::new( "fumarole_slot_download_duration_ms", @@ -115,12 +107,6 @@ pub(crate) fn dec_inflight_slot_download(name: impl AsRef) { .dec(); } -pub(crate) fn set_slot_download_queue_size(name: impl AsRef, size: usize) { - SLOT_DOWNLOAD_QUEUE_SIZE - .with_label_values(&[name.as_ref()]) - .set(size as i64); -} - pub(crate) fn inc_offset_commitment_count(name: impl AsRef) { OFFSET_COMMITMENT_COUNT .with_label_values(&[name.as_ref()]) @@ -149,9 +135,6 @@ pub fn register_metrics(registry: &prometheus::Registry) { registry .register(Box::new(INFLIGHT_SLOT_DOWNLOAD.clone())) .unwrap(); - registry - .register(Box::new(SLOT_DOWNLOAD_QUEUE_SIZE.clone())) - .unwrap(); registry .register(Box::new(SLOT_DOWNLOAD_DURATION.clone())) .unwrap(); diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index ab967a2..78cc708 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -2,7 +2,7 @@ use crate::metrics::{ dec_inflight_slot_download, inc_failed_slot_download_attempt, inc_inflight_slot_download, inc_offset_commitment_count, inc_slot_download_count, observe_slot_download_duration, - set_max_slot_detected, set_slot_download_queue_size, + set_max_slot_detected, }; use { super::{FumaroleSM, FumeDownloadRequest, FumeOffset, FumeShardIdx}, @@ -193,7 +193,6 @@ impl TokioFumeDragonsmouthRuntime { }; let download_task_args = DownloadTaskArgs { download_request, - filters: Some(self.subscribe_request.clone().into()), dragonsmouth_outlet: self.dragonsmouth_outlet.clone(), }; permit.send(download_task_args); @@ -302,6 +301,17 @@ impl TokioFumeDragonsmouthRuntime { } } + async fn handle_new_subscribe_request(&mut self, subscribe_request: SubscribeRequest) { + self.subscribe_request = subscribe_request; + self.download_task_runner_chans + .cnc_tx + .send(DownloadTaskRunnerCommand::UpdateSubscribeRequest( + self.subscribe_request.clone(), + )) + .await + .expect("failed to send subscribe request"); + } + pub(crate) async fn run(mut self) -> Result<(), Box> { let inital_load_history_cmd = build_poll_history_cmd(None); @@ -328,7 +338,8 @@ impl TokioFumeDragonsmouthRuntime { tokio::select! { Some(subscribe_request) = self.dragonsmouth_bidi.rx.recv() => { tracing::debug!("dragonsmouth subscribe request received"); - self.subscribe_request = subscribe_request + // self.subscribe_request = subscribe_request + self.handle_new_subscribe_request(subscribe_request).await; } control_response = self.control_plane_rx.recv() => { match control_response { @@ -395,7 +406,9 @@ pub struct DownloadTaskRunnerChannels { pub download_result_rx: mpsc::Receiver, } -pub enum DownloadTaskRunnerCommand {} +pub enum DownloadTaskRunnerCommand { + UpdateSubscribeRequest(SubscribeRequest), +} /// /// Holds information about on-going data plane task. @@ -404,7 +417,6 @@ pub enum DownloadTaskRunnerCommand {} pub(crate) struct DataPlaneTaskMeta { client_idx: usize, request: FumeDownloadRequest, - filters: Option, dragonsmouth_outlet: mpsc::Sender>, scheduled_at: Instant, client_rev: u64, @@ -463,6 +475,9 @@ pub struct GrpcDownloadTaskRunner { /// The maximum download attempt per slot (how many download failure do we allow) /// max_download_attempt_per_slot: usize, + + /// The subscribe request to use for the download task + subscribe_request: SubscribeRequest, } /// @@ -471,7 +486,6 @@ pub struct GrpcDownloadTaskRunner { #[derive(Debug, Clone)] pub struct DownloadTaskArgs { pub download_request: FumeDownloadRequest, - pub filters: Option, pub dragonsmouth_outlet: mpsc::Sender>, } @@ -484,6 +498,7 @@ pub(crate) struct DataPlaneConn { impl GrpcDownloadTaskRunner { const RUNTIME_NAME: &'static str = "tokio_grpc_task_runner"; + #[allow(clippy::too_many_arguments)] pub fn new( rt: tokio::runtime::Handle, data_plane_channel_vec: Vec, @@ -492,6 +507,7 @@ impl GrpcDownloadTaskRunner { download_task_queue: mpsc::Receiver, outlet: mpsc::Sender, max_download_attempt_by_slot: usize, + subscribe_request: SubscribeRequest, ) -> Self { Self { rt, @@ -504,13 +520,14 @@ impl GrpcDownloadTaskRunner { download_attempts: HashMap::new(), outlet, max_download_attempt_per_slot: max_download_attempt_by_slot, + subscribe_request, } } /// /// Always pick the client with the highest permit limit (least used) /// - fn find_most_underloaded_client(&self) -> Option { + fn find_least_use_client(&self) -> Option { self.data_plane_channel_vec .iter() .enumerate() @@ -612,7 +629,6 @@ impl GrpcDownloadTaskRunner { tracing::debug!("Download slot {slot} failed, rescheduling for retry..."); let task_spec = DownloadTaskArgs { download_request: task_meta.request, - filters: task_meta.filters, dragonsmouth_outlet: task_meta.dragonsmouth_outlet, }; // Reschedule download immediately @@ -652,21 +668,20 @@ impl GrpcDownloadTaskRunner { let DownloadTaskArgs { download_request, - filters, + // filters, dragonsmouth_outlet, } = task_spec; let slot = download_request.slot; let task = GrpcDownloadBlockTaskRun { download_request: download_request.clone(), client, - filters: filters.clone(), + filters: Some(self.subscribe_request.clone().into()), dragonsmouth_oulet: dragonsmouth_outlet.clone(), }; let ah = self.tasks.spawn_on(task.run(), &self.rt); let task_meta = DataPlaneTaskMeta { client_idx, request: download_request.clone(), - filters, dragonsmouth_outlet, scheduled_at: Instant::now(), client_rev, @@ -676,17 +691,31 @@ impl GrpcDownloadTaskRunner { .and_modify(|e| *e += 1) .or_insert(1); conn.permits.checked_sub(1).expect("underflow"); + + #[cfg(feature = "prometheus")] + { + inc_inflight_slot_download(Self::RUNTIME_NAME); + } + self.task_meta.insert(ah.id(), task_meta); } + fn handle_control_command(&mut self, cmd: DownloadTaskRunnerCommand) { + match cmd { + DownloadTaskRunnerCommand::UpdateSubscribeRequest(subscribe_request) => { + self.subscribe_request = subscribe_request; + } + } + } + pub(crate) async fn run(mut self) -> Result<(), DownloadBlockError> { while !self.outlet.is_closed() { - let maybe_available_client_idx = self.find_most_underloaded_client(); + let maybe_available_client_idx = self.find_least_use_client(); tokio::select! { maybe = self.cnc_rx.recv() => { match maybe { Some(cmd) => { - todo!() + self.handle_control_command(cmd); }, None => { tracing::debug!("command channel disconnected"); From 15756c63a6ea0a474673ffc7ff1b36eaeeccd628 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 6 May 2025 14:52:31 -0400 Subject: [PATCH 17/56] v2: fume cli + prometheus intg --- Cargo.lock | 14 +++ Cargo.toml | 3 + apps/yellowstone-fumarole-cli/Cargo.toml | 10 +- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 106 +++++++++++++----- apps/yellowstone-fumarole-cli/src/lib.rs | 1 + apps/yellowstone-fumarole-cli/src/prom.rs | 62 ++++++++++ crates/yellowstone-fumarole-client/Cargo.toml | 3 +- .../src/metrics.rs | 66 +++++++++++ .../src/runtime/mod.rs | 94 +++++++++++----- .../src/runtime/tokio.rs | 52 +++++++-- rust-toolchain.toml | 2 +- 11 files changed, 346 insertions(+), 67 deletions(-) create mode 100644 apps/yellowstone-fumarole-cli/src/lib.rs create mode 100644 apps/yellowstone-fumarole-cli/src/prom.rs diff --git a/Cargo.lock b/Cargo.lock index d5d078b..0965579 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1312,6 +1312,15 @@ dependencies = [ "slab", ] +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + [[package]] name = "generic-array" version = "0.14.7" @@ -5225,6 +5234,10 @@ dependencies = [ "clap", "clap-verbosity-flag", "futures", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "prometheus", "serde_yaml", "solana-sdk", "tabled", @@ -5245,6 +5258,7 @@ version = "0.2.0-pre.1+solana.2.1.11" dependencies = [ "async-trait", "futures", + "fxhash", "http 1.2.0", "hyper 1.6.0", "lazy_static", diff --git a/Cargo.toml b/Cargo.toml index 331f3da..8b3e5ce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,8 +22,11 @@ async-trait = "0.1.88" clap = "4.5.7" clap-verbosity-flag = "3.0.2" futures = "0.3.31" +fxhash = "0.2.1" http = "1.2.0" +http-body-util = "0.1" hyper = "1.3.1" +hyper-util = "0.1" lazy_static = "~1.5.0" prometheus = "~0.13.0" prost = "0.13.0" diff --git a/apps/yellowstone-fumarole-cli/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml index 7b65f02..fde4f3f 100644 --- a/apps/yellowstone-fumarole-cli/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -16,6 +16,10 @@ name = "fume" clap = { workspace = true, features = ["derive"] } clap-verbosity-flag = { workspace = true, features = ["tracing"] } futures = { workspace = true } +http-body-util = { workspace = true } +hyper = { workspace = true, features = ["http1"] } +hyper-util = { workspace = true, features = ["http1"] } +prometheus = { workspace = true } solana-sdk = { workspace = true } serde_yaml = { workspace = true } tabled = { workspace = true } @@ -23,11 +27,11 @@ thiserror = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread", "signal"] } tokio-stream = { workspace = true } tonic = { workspace = true } -yellowstone-fumarole-client = { workspace = true } -yellowstone-grpc-client = { workspace = true } -yellowstone-grpc-proto = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true, features = ["env-filter"] } +yellowstone-fumarole-client = { workspace = true, features = ["prometheus"] } +yellowstone-grpc-client = { workspace = true } +yellowstone-grpc-proto = { workspace = true } [lints] workspace = true diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index e6869de..f104877 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -4,8 +4,10 @@ use { solana_sdk::{bs58, pubkey::Pubkey}, std::{ collections::HashMap, - fmt, - io::{stderr, stdout, IsTerminal}, + fmt::{self, Debug}, + fs::File, + io::{stdout, Write}, + net::{AddrParseError, SocketAddr}, num::NonZeroUsize, path::PathBuf, str::FromStr, @@ -17,7 +19,8 @@ use { signal::unix::{signal, SignalKind}, }, tonic::Code, - tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter}, + tracing_subscriber::EnvFilter, + yellowstone_fumarole_cli::prom::prometheus_server, yellowstone_fumarole_client::{ config::FumaroleConfig, proto::{ @@ -34,6 +37,44 @@ use { }, }; +#[derive(Debug, Clone)] +pub struct PrometheusBindAddr(SocketAddr); + +impl From for SocketAddr { + fn from(addr: PrometheusBindAddr) -> Self { + addr.0 + } +} + +impl fmt::Display for PrometheusBindAddr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From for PrometheusBindAddrParseError { + fn from(err: AddrParseError) -> Self { + PrometheusBindAddrParseError(err.to_string()) + } +} + +#[derive(Debug, thiserror::Error)] +#[error("Invalid prometheus bind address {0}")] +pub struct PrometheusBindAddrParseError(String); + +impl FromStr for PrometheusBindAddr { + type Err = PrometheusBindAddrParseError; + + fn from_str(s: &str) -> Result { + if s == "0" { + Ok(PrometheusBindAddr("127.0.0.1:0".parse()?)) + } else { + let ip_addr = s.parse()?; + Ok(PrometheusBindAddr(ip_addr)) + } + } +} + #[derive(Debug, Clone, Parser)] #[clap(author, version, about = "Yellowstone Fumarole CLI")] struct Args { @@ -134,6 +175,15 @@ impl From for CommitmentLevel { #[derive(Debug, Clone, Parser)] struct SubscribeArgs { + /// bind address for prometheus HTTP server endpoint, or "0" to bind to a random localhost port. + #[clap(long)] + prometheus: Option, + + /// Output to write geyser events to. + /// If not specified, output will be written to stdout + #[clap(long)] + out: Option, + /// Name of the persistent subscriber #[clap(long)] name: String, @@ -352,12 +402,34 @@ pub fn create_shutdown() -> BoxFuture<'static, ()> { async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let SubscribeArgs { + prometheus, name: cg_name, commitment, account: pubkey, owner, tx_account: tx_pubkey, + out, } = args; + + let mut out: Box = if let Some(out) = out { + Box::new( + File::options() + .write(true) + .open(PathBuf::from(out)) + .expect("Failed to open output file"), + ) + } else { + Box::new(stdout()) + }; + + let registry = prometheus::Registry::new(); + yellowstone_fumarole_client::metrics::register_metrics(®istry); + + if let Some(bind_addr) = prometheus { + let socket_addr: SocketAddr = bind_addr.into(); + tokio::spawn(prometheus_server(socket_addr, registry)); + } + let commitment_level: CommitmentLevel = commitment.into(); // This request listen for all account updates and transaction updates let request = SubscribeRequest { @@ -449,7 +521,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { }; if let Some(message) = message { - println!("{}", message); + writeln!(out, "{}", message).expect("Failed to write to output file"); } } } @@ -457,27 +529,6 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { println!("Exiting subscribe loop"); } -#[allow(dead_code)] -fn setup_tracing_test_many( - modules: impl IntoIterator, -) -> Result<(), tracing_subscriber::util::TryInitError> { - let is_atty = stdout().is_terminal() && stderr().is_terminal(); - let io_layer = tracing_subscriber::fmt::layer() - .with_ansi(is_atty) - .with_line_number(true); - - let directives = modules - .into_iter() - .fold(EnvFilter::default(), |filter, module| { - filter.add_directive(format!("{module}=debug").parse().expect("invalid module")) - }); - - tracing_subscriber::registry() - .with(io_layer) - .with(directives) - .try_init() -} - async fn test_config(mut fumarole_client: FumaroleClient) { let result = fumarole_client.version().await; match result { @@ -506,7 +557,10 @@ async fn main() { let filter = format!("{curr_crate}={verbosity},yellowstone_fumarole_client={verbosity}"); let env_filter = EnvFilter::new(filter); - tracing_subscriber::fmt().with_env_filter(env_filter).init(); + tracing_subscriber::fmt() + .with_env_filter(env_filter) + .with_line_number(true) + .init(); // setup_tracing_test_many(["yellowstone_fumarole_client"]); let config = std::fs::read_to_string(&args.config).expect("Failed to read config file"); diff --git a/apps/yellowstone-fumarole-cli/src/lib.rs b/apps/yellowstone-fumarole-cli/src/lib.rs new file mode 100644 index 0000000..90576bf --- /dev/null +++ b/apps/yellowstone-fumarole-cli/src/lib.rs @@ -0,0 +1 @@ +pub mod prom; diff --git a/apps/yellowstone-fumarole-cli/src/prom.rs b/apps/yellowstone-fumarole-cli/src/prom.rs new file mode 100644 index 0000000..f68f761 --- /dev/null +++ b/apps/yellowstone-fumarole-cli/src/prom.rs @@ -0,0 +1,62 @@ +use { + http_body_util::Full, + hyper::{body::Bytes, server::conn::http1, service::service_fn, Request, Response}, + hyper_util::rt::TokioIo, + prometheus::{Registry, TextEncoder}, + std::{convert::Infallible, net::SocketAddr}, + tokio::net::TcpListener, +}; + +pub async fn prometheus_service_fn( + registry: &Registry, + _: Request, +) -> Result>, Infallible> { + let metrics = TextEncoder::new().encode_to_string(®istry.gather()); + + match metrics { + Ok(metrics) => Ok(Response::new(Full::new(Bytes::from(metrics)))), + Err(e) => { + Ok(Response::new(Full::new(Bytes::from(format!( + "Failed to encode metrics: {}", + e + ))))) + } + } +} + +pub async fn prometheus_server(bind_addr: SocketAddr, registry: Registry) { + // We create a TcpListener and bind it to 127.0.0.1:3000 + let listener = TcpListener::bind(bind_addr) + .await + .expect("Failed to bind TCP listener"); + let addr = listener.local_addr().expect("Failed to get local address"); + println!("Prometheus listening on http://{}", addr); + // We start a loop to continuously accept incoming connections + loop { + let (stream, _) = listener + .accept() + .await + .expect("Failed to accept connection"); + + // Use an adapter to access something implementing `tokio::io` traits as if they implement + // `hyper::rt` IO traits. + let io = TokioIo::new(stream); + let registry2 = registry.clone(); + // Spawn a tokio task to serve multiple connections concurrently + tokio::task::spawn(async move { + let svc_fn = |req| { + let registry = registry2.clone(); + async move { prometheus_service_fn(®istry, req).await } + }; + + let result = http1::Builder::new() + // `service_fn` converts our function in a `Service` + .serve_connection(io, service_fn(svc_fn)) + .await; + + if let Err(err) = result { + eprintln!("Error serving connection: {:?}", err); + } + }); + } +} diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index f783bf6..624e448 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -11,7 +11,7 @@ keywords = { workspace = true } publish = true [features] -default = ["prometheus"] +default = [] prometheus = ["dep:prometheus"] [package.metadata] @@ -23,6 +23,7 @@ include = [ [dependencies] async-trait = { workspace = true } futures = { workspace = true } +fxhash = { workspace = true } http = { workspace = true } hyper = { workspace = true } lazy_static = { workspace = true } diff --git a/crates/yellowstone-fumarole-client/src/metrics.rs b/crates/yellowstone-fumarole-client/src/metrics.rs index a4fb159..39d45fb 100644 --- a/crates/yellowstone-fumarole-client/src/metrics.rs +++ b/crates/yellowstone-fumarole-client/src/metrics.rs @@ -67,6 +67,14 @@ lazy_static! { &["runtime"], ) .unwrap(); + pub(crate) static ref SKIP_OFFSET_COMMITMENT_COUNT: IntCounterVec = IntCounterVec::new( + Opts::new( + "fumarole_skip_offset_commitment_count", + "Number of skipped offset commitment done to remote Fumarole service", + ), + &["runtime"], + ) + .unwrap(); pub(crate) static ref TOTAL_EVENT_DOWNLOADED: IntCounterVec = IntCounterVec::new( Opts::new( "fumarole_total_event_downloaded", @@ -75,6 +83,28 @@ lazy_static! { &["runtime"], ) .unwrap(); + pub(crate) static ref SLOT_STATUS_OFFSET_PROCESSED_CNT: IntCounterVec = IntCounterVec::new( + Opts::new( + "fumarole_slot_status_offset_processed_count", + "Number of offset processed from Fumarole runtime", + ), + &["runtime"], + ) + .unwrap(); + pub(crate) static ref PROCESSED_SLOT_STATUS_OFFSET_QUEUE: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_processed_slot_status_offset_queue", + "The number of slot status offset that is blocked from commitment, waiting for missing offset to be acknowledged", + ), + &["runtime"], + ).unwrap(); + pub(crate) static ref SLOT_STATUS_UPDATE_QUEUE_LEN: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_slot_status_update_queue_len", + "The number of slot status update that is waiting to be ack", + ), + &["runtime"], + ).unwrap(); } pub(crate) fn inc_total_event_downloaded(name: impl AsRef, amount: usize) { @@ -125,6 +155,30 @@ pub(crate) fn inc_failed_slot_download_attempt(name: impl AsRef) { .inc(); } +pub(crate) fn inc_skip_offset_commitment_count(name: impl AsRef) { + SKIP_OFFSET_COMMITMENT_COUNT + .with_label_values(&[name.as_ref()]) + .inc(); +} + +pub(crate) fn inc_slot_status_offset_processed_count(name: impl AsRef) { + SLOT_STATUS_OFFSET_PROCESSED_CNT + .with_label_values(&[name.as_ref()]) + .inc(); +} + +pub(crate) fn set_processed_slot_status_offset_queue_len(name: impl AsRef, len: usize) { + PROCESSED_SLOT_STATUS_OFFSET_QUEUE + .with_label_values(&[name.as_ref()]) + .set(len as i64); +} + +pub(crate) fn set_slot_status_update_queue_len(name: impl AsRef, len: usize) { + SLOT_STATUS_UPDATE_QUEUE_LEN + .with_label_values(&[name.as_ref()]) + .set(len as i64); +} + /// /// Register Fumarole metrics to the given registry. /// @@ -150,4 +204,16 @@ pub fn register_metrics(registry: &prometheus::Registry) { registry .register(Box::new(TOTAL_EVENT_DOWNLOADED.clone())) .unwrap(); + registry + .register(Box::new(SKIP_OFFSET_COMMITMENT_COUNT.clone())) + .unwrap(); + registry + .register(Box::new(SLOT_STATUS_OFFSET_PROCESSED_CNT.clone())) + .unwrap(); + registry + .register(Box::new(PROCESSED_SLOT_STATUS_OFFSET_QUEUE.clone())) + .unwrap(); + registry + .register(Box::new(SLOT_STATUS_UPDATE_QUEUE_LEN.clone())) + .unwrap(); } diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 446f6d8..b761edc 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -5,6 +5,7 @@ pub(crate) mod tokio; use { crate::proto::{self, BlockchainEvent}, + fxhash::FxHashMap, solana_sdk::clock::Slot, std::{ cmp::Reverse, @@ -23,6 +24,8 @@ pub(crate) type FumeShardIdx = u32; pub(crate) type FumeOffset = i64; +pub(crate) type FumeSessionSequence = u64; + #[derive(Debug, Clone)] pub(crate) struct FumeDownloadRequest { pub slot: Slot, @@ -36,6 +39,9 @@ pub(crate) struct FumeDownloadRequest { #[derive(Clone, Debug)] pub(crate) struct FumeSlotStatus { + /// Rough unit of time inside fumarole state machine. + pub session_sequence: FumeSessionSequence, + #[allow(dead_code)] pub offset: FumeOffset, pub slot: Slot, pub parent_slot: Option, @@ -146,19 +152,25 @@ pub(crate) struct FumaroleSM { /// Keeps track of each offset have been processed by the underlying runtime. /// Fumarole State Machine emits slot status in disorder, but still requires ordering /// when computing the `committable_offset` - processed_offset: BinaryHeap>, + processed_offset: BinaryHeap>, /// Represents the high-water mark fume offset that can be committed to the remote fumarole service. /// It means the runtime processed everything <= committable offset. pub committable_offset: FumeOffset, + last_processed_fume_sequence: FumeSessionSequence, + /// Represents the max slot detected in the current session. /// This is used to detect rough slot lag. /// this slot is not necessarily processed by the underlying runtime yet. pub max_slot_detected: Slot, /// Unprocessed blockchain events - unprocessed_blockchain_event: VecDeque, + unprocessed_blockchain_event: VecDeque<(u64, proto::BlockchainEvent)>, + + sequence: u64, + + sequence_to_offset: FxHashMap, } impl FumaroleSM { @@ -174,6 +186,9 @@ impl FumaroleSM { committable_offset: last_committed_offset, max_slot_detected: 0, unprocessed_blockchain_event: Default::default(), + sequence: 1, + last_processed_fume_sequence: 0, + sequence_to_offset: Default::default(), } } @@ -188,6 +203,12 @@ impl FumaroleSM { self.last_committed_offset = offset; } + fn next_sequence(&mut self) -> u64 { + let ret = self.sequence; + self.sequence += 1; + ret + } + pub fn queue_blockchain_event(&mut self, events: IT) where IT: IntoIterator, @@ -196,8 +217,17 @@ impl FumaroleSM { if event.offset < self.last_committed_offset { continue; } + + if event.slot > self.max_slot_detected { + self.max_slot_detected = event.slot; + } + let sequence = self.next_sequence(); + + self.sequence_to_offset.insert(sequence, event.offset); + if self.downloaded_slot.contains(&event.slot) { let fume_status = FumeSlotStatus { + session_sequence: sequence, offset: event.offset, slot: event.slot, parent_slot: event.parent_slot, @@ -217,7 +247,8 @@ impl FumaroleSM { self.slot_status_update_queue.push_back(fume_status); } } else { - self.unprocessed_blockchain_event.push_back(event); + self.unprocessed_blockchain_event + .push_back((sequence, event)); } } } @@ -290,6 +321,8 @@ impl FumaroleSM { ) -> Option { loop { let min_commitment = commitment.unwrap_or(CommitmentLevel::Processed); + let (session_sequence, blockchain_event) = + self.unprocessed_blockchain_event.pop_front()?; let BlockchainEvent { offset, blockchain_id, @@ -300,13 +333,14 @@ impl FumaroleSM { commitment_level, blockchain_shard_id: _, dead_error, - } = self.unprocessed_blockchain_event.pop_front()?; + } = blockchain_event; let event_cl = geyser::CommitmentLevel::try_from(commitment_level) .expect("invalid commitment level"); if event_cl < min_commitment { self.slot_status_update_queue.push_back(FumeSlotStatus { + session_sequence, offset, slot, parent_slot, @@ -326,12 +360,13 @@ impl FumaroleSM { if progression.processed_commitment_levels.contains(&event_cl) { // We already processed this commitment level - self.mark_offset_as_processed(offset); + self.mark_event_as_processed(session_sequence); continue; } // We have a new commitment level for this slot and slot has been downloaded in the current session. self.slot_status_update_queue.push_back(FumeSlotStatus { + session_sequence, offset, slot, parent_slot, @@ -350,6 +385,7 @@ impl FumaroleSM { .entry(slot) .or_default() .push_back(FumeSlotStatus { + session_sequence, offset, slot, parent_slot, @@ -377,36 +413,40 @@ impl FumaroleSM { } } - #[inline] - const fn missing_process_offset(&self) -> FumeOffset { - self.committable_offset + 1 + pub fn slot_status_update_queue_len(&self) -> usize { + self.slot_status_update_queue.len() } /// /// Marks this [`FumeOffset`] has processed by the runtime. /// - pub fn mark_offset_as_processed(&mut self, offset: FumeOffset) { - if offset == self.missing_process_offset() { - self.committable_offset = offset; - - loop { - let Some(offset2) = self.processed_offset.peek().copied() else { - break; - }; - - if offset2.0 != self.missing_process_offset() { - break; - } + pub fn mark_event_as_processed(&mut self, event_seq_number: FumeSessionSequence) { + let fume_offset = self + .sequence_to_offset + .remove(&event_seq_number) + .expect("event sequence number not found"); + self.processed_offset + .push(Reverse((event_seq_number, fume_offset))); - let offset2 = self.processed_offset.pop().unwrap().0; - assert_eq!(offset2, self.missing_process_offset()); - self.committable_offset = offset2; + loop { + let Some(tuple) = self.processed_offset.peek().copied() else { + break; + }; + let (blocked_event_seq_number2, fume_offset2) = tuple.0; + if blocked_event_seq_number2 != self.last_processed_fume_sequence + 1 { + break; } - } else { - self.processed_offset.push(Reverse(offset)); + + let _ = self.processed_offset.pop().unwrap(); + self.committable_offset = fume_offset2; + self.last_processed_fume_sequence = blocked_event_seq_number2; } } + pub fn processed_offset_queue_len(&self) -> usize { + self.processed_offset.len() + } + /// /// Returns true if there is no blockchain event history to track or progress on. /// @@ -462,7 +502,7 @@ mod tests { assert_eq!(status.slot, 1); assert_eq!(status.commitment_level, CommitmentLevel::Processed); - sm.mark_offset_as_processed(status.offset); + sm.mark_event_as_processed(status.session_sequence); // All subsequent commitment level should be available right away let mut event2 = event.clone(); @@ -476,7 +516,7 @@ mod tests { let status = sm.pop_next_slot_status().unwrap(); assert_eq!(status.slot, 1); assert_eq!(status.commitment_level, CommitmentLevel::Confirmed); - sm.mark_offset_as_processed(status.offset); + sm.mark_event_as_processed(status.session_sequence); assert_eq!(sm.committable_offset, event2.offset); } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 78cc708..1f96326 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -1,13 +1,14 @@ #[cfg(feature = "prometheus")] use crate::metrics::{ dec_inflight_slot_download, inc_failed_slot_download_attempt, inc_inflight_slot_download, - inc_offset_commitment_count, inc_slot_download_count, observe_slot_download_duration, - set_max_slot_detected, + inc_offset_commitment_count, inc_skip_offset_commitment_count, inc_slot_download_count, + inc_slot_status_offset_processed_count, inc_total_event_downloaded, + observe_slot_download_duration, set_max_slot_detected, + set_processed_slot_status_offset_queue_len, set_slot_status_update_queue_len, }; use { super::{FumaroleSM, FumeDownloadRequest, FumeOffset, FumeShardIdx}, crate::{ - metrics::inc_total_event_downloaded, proto::{ self, data_response, BlockFilters, CommitOffset, ControlCommand, DownloadBlockShard, PollBlockchainHistory, @@ -234,6 +235,11 @@ impl TokioFumeDragonsmouthRuntime { unsafe { self.force_commit_offset().await; } + } else { + #[cfg(feature = "prometheus")] + { + inc_skip_offset_commitment_count(Self::RUNTIME_NAME); + } } self.last_commit = Instant::now(); @@ -247,7 +253,11 @@ impl TokioFumeDragonsmouthRuntime { slot_status_vec.push_back(slot_status); } - tracing::debug!("draining slot status: {} events", slot_status_vec.len()); + if slot_status_vec.is_empty() { + return; + } + + tracing::debug!("draining {} slot status", slot_status_vec.len()); for slot_status in slot_status_vec { let mut matched_filters = vec![]; @@ -270,7 +280,6 @@ impl TokioFumeDragonsmouthRuntime { slot: slot_status.slot, parent: slot_status.parent_slot, status: slot_status.commitment_level.into(), - // TODO: support dead slot dead_error: slot_status.dead_error, }, )), @@ -280,7 +289,20 @@ impl TokioFumeDragonsmouthRuntime { } } - self.sm.mark_offset_as_processed(slot_status.offset); + self.sm + .mark_event_as_processed(slot_status.session_sequence); + #[cfg(feature = "prometheus")] + { + inc_slot_status_offset_processed_count(Self::RUNTIME_NAME); + } + } + + #[cfg(feature = "prometheus")] + { + set_processed_slot_status_offset_queue_len( + Self::RUNTIME_NAME, + self.sm.processed_offset_queue_len(), + ); } } @@ -331,6 +353,12 @@ impl TokioFumeDragonsmouthRuntime { break; } + #[cfg(feature = "prometheus")] + { + let slot_status_update_queue_len = self.sm.slot_status_update_queue_len(); + set_slot_status_update_queue_len(Self::RUNTIME_NAME, slot_status_update_queue_len); + } + let commit_deadline = self.last_commit + self.commit_interval; self.poll_history_if_needed().await; @@ -372,7 +400,7 @@ impl TokioFumeDragonsmouthRuntime { } _ = tokio::time::sleep_until(commit_deadline.into()) => { - tracing::debug!("commit deadline reached"); + tracing::trace!("commit deadline reached"); self.commit_offset().await; } } @@ -551,7 +579,6 @@ impl GrpcDownloadTaskRunner { } let slot = task_meta.request.slot; - tracing::debug!("download task result received for slot {}", slot); let state = self .data_plane_channel_vec @@ -573,7 +600,6 @@ impl GrpcDownloadTaskRunner { { observe_slot_download_duration(Self::RUNTIME_NAME, elapsed); inc_slot_download_count(Self::RUNTIME_NAME); - inc_total_event_downloaded(Self::RUNTIME_NAME, total_event_downloaded); } tracing::debug!( @@ -799,6 +825,8 @@ pub(crate) struct CompletedDownloadBlockTask { } impl GrpcDownloadBlockTaskRun { + const RUNTIME_NAME: &'static str = "tokio_grpc_task_run"; + async fn run(mut self) -> Result { let request = DownloadBlockShard { blockchain_id: self.download_request.blockchain_id.to_vec(), @@ -828,6 +856,12 @@ impl GrpcDownloadBlockTaskRun { match resp { data_response::Response::Update(update) => { total_event_downloaded += 1; + + #[cfg(feature = "prometheus")] + { + inc_total_event_downloaded(Self::RUNTIME_NAME, 1); + } + if self.dragonsmouth_oulet.send(Ok(update)).await.is_err() { return Err(DownloadBlockError::OutletDisconnected); } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 402b068..071ff5b 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.79.0" +channel = "1.81.0" components = ["clippy", "rustfmt"] targets = [] profile = "minimal" From 768050bc0ef085933944fb89cf2df3dbf04f1efa Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 6 May 2025 15:23:54 -0400 Subject: [PATCH 18/56] v2: renamed consumer group to subscriber name --- Cargo.lock | 966 ++++++++++-------- Cargo.toml | 2 +- apps/yellowstone-fumarole-cli/Cargo.toml | 5 +- apps/yellowstone-fumarole-cli/src/prom.rs | 10 +- crates/yellowstone-fumarole-client/Cargo.toml | 2 +- crates/yellowstone-fumarole-client/build.rs | 2 - crates/yellowstone-fumarole-client/src/lib.rs | 12 +- 7 files changed, 533 insertions(+), 466 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0965579..ff9988b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -72,7 +72,7 @@ dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy", + "zerocopy 0.7.35", ] [[package]] @@ -166,9 +166,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "ark-bn254" @@ -313,9 +313,9 @@ checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" [[package]] name = "async-compression" -version = "0.4.18" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" +checksum = "b37fc50485c4f3f736a4fb14199f6d5f5ba008d7f28fe710306c92780f004c07" dependencies = [ "brotli", "flate2", @@ -344,7 +344,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -355,7 +355,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -389,7 +389,7 @@ dependencies = [ "axum-core", "bytes", "futures-util", - "http 1.2.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", "itoa", @@ -415,7 +415,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.2.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", "mime", @@ -428,9 +428,9 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.74" +version = "0.3.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" dependencies = [ "addr2line", "cfg-if", @@ -476,18 +476,18 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" dependencies = [ "serde", ] [[package]] name = "blake3" -version = "1.5.5" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" dependencies = [ "arrayref", "arrayvec", @@ -527,11 +527,11 @@ dependencies = [ [[package]] name = "borsh" -version = "1.5.5" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5430e3be710b68d984d1391c854eb431a9d548640711faa54eecb1df93db91cc" +checksum = "ad8646f98db542e39fc66e68a20b2144f6a732636df7c2354e74645faaa433ce" dependencies = [ - "borsh-derive 1.5.5", + "borsh-derive 1.5.7", "cfg_aliases", ] @@ -550,15 +550,15 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.5.5" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8b668d39970baad5356d7c83a86fee3a539e6f93bf6764c97368243e17a0487" +checksum = "fdd1d3c0c2f5833f22386f252fe8ed005c7f59fdcddeef025c01b4c3b9fd9ac3" dependencies = [ "once_cell", - "proc-macro-crate 3.2.0", + "proc-macro-crate 3.3.0", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -585,9 +585,9 @@ dependencies = [ [[package]] name = "brotli" -version = "7.0.0" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" +checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -596,9 +596,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "4.0.2" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74fa05ad7d803d413eb8380983b092cbbaf9a85f151b871360e7b00cd7060b37" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -637,9 +637,9 @@ checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" [[package]] name = "bytemuck" -version = "1.21.0" +version = "1.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" +checksum = "9134a6ef01ce4b366b50689c94f82c14bc72bc5d0386829828a2e2752ef7958c" dependencies = [ "bytemuck_derive", ] @@ -652,7 +652,7 @@ checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -663,15 +663,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cc" -version = "1.2.13" +version = "1.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7777341816418c02e033934a09f20dc0ccaf65a5201ef8a450ae0105a573fda" +checksum = "8691782945451c1c383942c4874dbe63814f61cb57ef773cda2972682b7bb3c0" dependencies = [ "jobserver", "libc", @@ -698,14 +698,14 @@ checksum = "45565fc9416b9896014f5732ac776f810ee53a66730c17e4020c3ec064a8f88f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "chrono" -version = "0.4.39" +version = "0.4.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" dependencies = [ "android-tzdata", "iana-time-zone", @@ -713,7 +713,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -728,9 +728,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.29" +version = "4.5.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acebd8ad879283633b343856142139f2da2317c96b05b4dd6181c61e2480184" +checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071" dependencies = [ "clap_builder", "clap_derive", @@ -749,9 +749,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.29" +version = "4.5.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ba32cbda51c7e1dfd49acc1457ba1a7dec5b64fe360e828acb13ca8dc9c2f9" +checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2" dependencies = [ "anstream", "anstyle", @@ -761,14 +761,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.28" +version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" +checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -868,9 +868,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.14" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] @@ -956,14 +956,14 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "darling" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ "darling_core", "darling_macro", @@ -971,34 +971,34 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "deranged" -version = "0.3.11" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", "serde", @@ -1049,7 +1049,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -1090,14 +1090,14 @@ dependencies = [ "derivation-path", "ed25519-dalek", "hmac 0.12.1", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] name = "either" -version = "1.13.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "encoding_rs" @@ -1125,20 +1125,20 @@ checksum = "a1ab991c1362ac86c61ab6f556cff143daa22e5a15e4e189df818b2fd19fe65b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", "windows-sys 0.59.0", @@ -1164,30 +1164,30 @@ checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "five8_const" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b4f62f0f8ca357f93ae90c8c2dd1041a1f665fde2f889ea9b1787903829015" +checksum = "26dec3da8bc3ef08f2c04f61eab298c3ab334523e55f076354d6d6f613799a7b" dependencies = [ "five8_core", ] [[package]] name = "five8_core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94474d15a76982be62ca8a39570dccce148d98c238ebb7408b0a21b2c4bdddc4" +checksum = "2551bf44bc5f776c15044b9b94153a00198be06743e262afaaa61f11ac7523a5" [[package]] name = "fixedbitset" -version = "0.4.2" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" [[package]] name = "flate2" -version = "1.0.35" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" dependencies = [ "crc32fast", "miniz_oxide", @@ -1279,7 +1279,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -1357,9 +1357,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", @@ -1370,14 +1370,14 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "libc", - "wasi 0.13.3+wasi-0.2.2", - "windows-targets 0.52.6", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", ] [[package]] @@ -1398,7 +1398,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.7.1", + "indexmap 2.9.0", "slab", "tokio", "tokio-util", @@ -1407,17 +1407,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.7" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.2.0", - "indexmap 2.7.1", + "http 1.3.1", + "indexmap 2.9.0", "slab", "tokio", "tokio-util", @@ -1450,9 +1450,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" [[package]] name = "heck" @@ -1509,9 +1509,9 @@ dependencies = [ [[package]] name = "http" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" dependencies = [ "bytes", "fnv", @@ -1536,27 +1536,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.2.0", + "http 1.3.1", ] [[package]] name = "http-body-util" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", - "futures-util", - "http 1.2.0", + "futures-core", + "http 1.3.1", "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "httpdate" @@ -1597,8 +1597,8 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.7", - "http 1.2.0", + "h2 0.4.10", + "http 1.3.1", "http-body 1.0.1", "httparse", "httpdate", @@ -1638,16 +1638,17 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.2.0", + "http 1.3.1", "http-body 1.0.1", "hyper 1.6.0", + "libc", "pin-project-lite", "socket2", "tokio", @@ -1657,14 +1658,15 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.61" +version = "0.1.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", + "log", "wasm-bindgen", "windows-core", ] @@ -1719,9 +1721,9 @@ dependencies = [ [[package]] name = "icu_locid_transform_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" +checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" [[package]] name = "icu_normalizer" @@ -1743,9 +1745,9 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" [[package]] name = "icu_properties" @@ -1764,9 +1766,9 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" [[package]] name = "icu_provider" @@ -1793,7 +1795,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -1836,20 +1838,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.7.1" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", - "hashbrown 0.15.2", + "hashbrown 0.15.3", "serde", ] [[package]] name = "inout" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ "generic-array", ] @@ -1886,25 +1888,26 @@ dependencies = [ [[package]] name = "itertools" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jobserver" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" dependencies = [ + "getrandom 0.3.2", "libc", ] @@ -1935,9 +1938,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" [[package]] name = "libsecp256k1" @@ -1989,15 +1992,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.15" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "litemap" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" [[package]] name = "lock_api" @@ -2011,9 +2014,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "matchers" @@ -2074,9 +2077,9 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "miniz_oxide" -version = "0.8.4" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3b1c9bd4fe1f0f8b387f6eb9eb3b4a1aa26185e5750efb9140301703f62cd1b" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ "adler2", ] @@ -2167,7 +2170,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2226,10 +2229,10 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 3.2.0", + "proc-macro-crate 3.3.0", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2243,9 +2246,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.3" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "opaque-debug" @@ -2255,11 +2258,11 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl" -version = "0.10.70" +version = "0.10.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6" +checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" dependencies = [ - "bitflags 2.8.0", + "bitflags 2.9.0", "cfg-if", "foreign-types", "libc", @@ -2276,7 +2279,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2287,9 +2290,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" -version = "0.9.105" +version = "0.9.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc" +checksum = "e145e1651e858e820e4860f7b9c5e169bc1d8ce1c86043be79fa7b7634821847" dependencies = [ "cc", "libc", @@ -2369,32 +2372,32 @@ dependencies = [ [[package]] name = "petgraph" -version = "0.6.5" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" dependencies = [ "fixedbitset", - "indexmap 2.7.1", + "indexmap 2.9.0", ] [[package]] name = "pin-project" -version = "1.1.9" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.9" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2411,9 +2414,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "polyval" @@ -2435,21 +2438,21 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy", + "zerocopy 0.8.25", ] [[package]] name = "prettyplease" -version = "0.2.29" +version = "0.2.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac" +checksum = "664ec5419c51e34154eec046ebcba56312d5a2fc3b09a06da188e1ad21afadf6" dependencies = [ "proc-macro2", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -2463,9 +2466,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.2.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" +checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" dependencies = [ "toml_edit", ] @@ -2489,14 +2492,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "proc-macro2" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -2518,9 +2521,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", "prost-derive", @@ -2528,12 +2531,12 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ "heck", - "itertools 0.13.0", + "itertools 0.14.0", "log", "multimap", "once_cell", @@ -2542,28 +2545,28 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 2.0.98", + "syn 2.0.101", "tempfile", ] [[package]] name = "prost-derive" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "prost-types" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" dependencies = [ "prost", ] @@ -2594,13 +2597,19 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "rand" version = "0.7.3" @@ -2660,7 +2669,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -2674,11 +2683,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" dependencies = [ - "bitflags 2.8.0", + "bitflags 2.9.0", ] [[package]] @@ -2770,15 +2779,14 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.8" +version = "0.17.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -2800,11 +2808,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.44" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ - "bitflags 2.8.0", + "bitflags 2.9.0", "errno", "libc", "linux-raw-sys", @@ -2825,15 +2833,15 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.22" +version = "0.23.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" +checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321" dependencies = [ "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.102.8", + "rustls-webpki 0.103.2", "subtle", "zeroize", ] @@ -2886,9 +2894,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.8" +version = "0.103.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +checksum = "7149975849f1abb3832b246010ef62ccc80d3a76169517ada7188252b9cfb437" dependencies = [ "ring", "rustls-pki-types", @@ -2897,15 +2905,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" [[package]] name = "ryu" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "schannel" @@ -2944,7 +2952,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" dependencies = [ - "bitflags 2.8.0", + "bitflags 2.9.0", "core-foundation 0.10.0", "core-foundation-sys", "libc", @@ -2963,44 +2971,44 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.25" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" [[package]] name = "serde" -version = "1.0.217" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_bytes" -version = "0.11.15" +version = "0.11.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" +checksum = "8437fd221bde2d4ca316d61b90e337e9e702b3820b87d63caa9ba6c02bd06d96" dependencies = [ "serde", ] [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "serde_json" -version = "1.0.138" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", @@ -3030,7 +3038,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.7.1", + "indexmap 2.9.0", "serde", "serde_derive", "serde_json", @@ -3047,7 +3055,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -3056,7 +3064,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", "itoa", "ryu", "serde", @@ -3078,9 +3086,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -3114,9 +3122,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" dependencies = [ "libc", ] @@ -3144,15 +3152,15 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" [[package]] name = "socket2" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" dependencies = [ "libc", "windows-sys 0.52.0", @@ -3160,9 +3168,9 @@ dependencies = [ [[package]] name = "solana-account" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2af97266ee346ef1cd1649ba462d08bd3d254e50c06c45d3e70a21871a1da6a" +checksum = "d9a495abef137c65f58282720384262503172cddb937c94c1a01f0a6c553a0dc" dependencies = [ "bincode", "serde", @@ -3174,9 +3182,9 @@ dependencies = [ [[package]] name = "solana-account-decoder" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "717a67d421f9ba91a7b845af1d5b0039fcb44b86f4b294b28ae447a0f2bf48c8" +checksum = "86e83b9f421857e9aee51df52aab53d03a9f5860a57c0adcda8a480392f2f85a" dependencies = [ "Inflector", "base64 0.22.1", @@ -3200,9 +3208,9 @@ dependencies = [ [[package]] name = "solana-account-decoder-client-types" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e42008241f82751639daee861f1ecaf9c3342d3aa6ce96c063290379a5f9f7c0" +checksum = "4af8ffcad184f2486e5e677fe25250f8f0d77b2d5eb24045fb720963525272b7" dependencies = [ "base64 0.22.1", "bs58", @@ -3216,9 +3224,9 @@ dependencies = [ [[package]] name = "solana-account-info" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed2417317f26f0941dd8e552ac1f9768eb2aa3b7f16ec992a6833f058295bea" +checksum = "9b43b59c9659eb61504c4cc73a92a5995a117fa4ffc04bb0da002848cfdd7fcd" dependencies = [ "bincode", "serde", @@ -3229,18 +3237,18 @@ dependencies = [ [[package]] name = "solana-atomic-u64" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0cd0453d46a62ed36ce234be9153a3c4d433711f1cec6943345d1637d6a0908" +checksum = "9f9c33447056f11c1c486ffc2d803366847ba712463d9640891e50490faafd56" dependencies = [ "parking_lot", ] [[package]] name = "solana-bincode" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97957d987dc85bbfa90cb7e919ee0b071206affc0209e7221d7ea4844e7be31" +checksum = "f7c4ce8ddc2f5343e64346f9f8a05e9f27579d848f059485290d5b9c962c352c" dependencies = [ "bincode", "serde", @@ -3249,9 +3257,9 @@ dependencies = [ [[package]] name = "solana-bn254" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957ce0d8b021f78f7b3c99d82b21a8dae617cf016377647c4d43a6e3141e8f2f" +checksum = "8e485c027635dd7c6e558949d13bcc052340d704eef4219438593afea5632f42" dependencies = [ "ark-bn254", "ark-ec", @@ -3264,19 +3272,19 @@ dependencies = [ [[package]] name = "solana-borsh" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99affe31b10c1cd4a6438d307b92c1b17c89c974aebf2c2aa15cd790d0ba672b" +checksum = "ad79f227829e9b3fa1227acf21b02877f1a0d99d1b753a8085254b706fbddfee" dependencies = [ "borsh 0.10.4", - "borsh 1.5.5", + "borsh 1.5.7", ] [[package]] name = "solana-clock" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97864f28abd43d03e7ca7242059e340bb6e637e0ce99fd66f6420c43fa359898" +checksum = "7dd1a3f42e823861b812f388d4007bfb2d23aa316d999a2f2ca124fa33c72a40" dependencies = [ "serde", "serde_derive", @@ -3286,18 +3294,18 @@ dependencies = [ [[package]] name = "solana-compute-budget" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f92a2ba8c5ed92fc805f8a92a3bfbbaca05da80d87f180aea4e9f28b9e0fa22" +checksum = "61ac55f874d43496b1e1d091576dd99e74e5863911ca55ac36912fe8fe3aa155" dependencies = [ "solana-sdk", ] [[package]] name = "solana-config-program" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a78fbed4792e4cd029a0dd95d14ec42ea602fd7247b40e8fbc4c96b3404da1" +checksum = "dbb08509a0969a3929fc48de314af650ce77d678ec742e49b6f60535c57eccb2" dependencies = [ "bincode", "chrono", @@ -3311,9 +3319,9 @@ dependencies = [ [[package]] name = "solana-cpi" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54c3b096dc77222b9c19ffe9cf6c1c32bd1e9882ceb955d213be4315bbe3b95" +checksum = "f43b1391eecec7c15ae83477d0bfebc9a92ebe69f793683fd497ac02d180fcd5" dependencies = [ "solana-account-info", "solana-define-syscall", @@ -3325,9 +3333,9 @@ dependencies = [ [[package]] name = "solana-curve25519" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2ed697e82c44b0833550501e3fab428c07cc2865c788307fad4c98a64d27d0" +checksum = "bdcc0923e1fbfe614d4a5675c3e4398b521c9d128c2114aaf3e404ea81ad08ee" dependencies = [ "bytemuck", "bytemuck_derive", @@ -3338,24 +3346,24 @@ dependencies = [ [[package]] name = "solana-decode-error" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c92852914fe0cfec234576a30b1de4b11516dd729226d5de04e4c67d80447a7" +checksum = "750a2c40fd97c96f7464ccf11142c598d5d35094d5988944f7ca5714e014737c" dependencies = [ "num-traits", ] [[package]] name = "solana-define-syscall" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44015e77f6f321bf526f7d026b08d8f34b57b1ea6e46038fd13e59f43a53a475" +checksum = "592a9b501608cb642af6ad9d07c8be8827772c20d1afa5173c85c561da7942a3" [[package]] name = "solana-derivation-path" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2cd4b95383d8926cc22d4a33417aa2e38897475f259cff4eb319c8cf0f7ac02" +checksum = "c59883e489a9f19caef8af9a198f791f2b62348e14e5279f87df950c233c7880" dependencies = [ "derivation-path", "qstring", @@ -3364,9 +3372,9 @@ dependencies = [ [[package]] name = "solana-epoch-schedule" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3409f250234ec4bbd999de3eac727ca21dfbfd39a831906f6ec112a66d2e1a2" +checksum = "80ec39611020935101afd9a57ac57ea6961796b7d8705974408601e97d4dfb30" dependencies = [ "serde", "serde_derive", @@ -3376,9 +3384,9 @@ dependencies = [ [[package]] name = "solana-feature-set" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ddda14ac5f2da82da4df043eabca2f2c00ac0d59f10295b8c8c3404fcc5f67" +checksum = "286cb8e4d888f36026bcb1603ff8ab52565ff12705ed24ed44dc68047ef2f779" dependencies = [ "lazy_static", "solana-clock", @@ -3390,9 +3398,9 @@ dependencies = [ [[package]] name = "solana-fee-calculator" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8db8c4be5e012215ed1e3394cd3c188e217dd4f0c821045e5d2c1262aac8b4e" +checksum = "bb464dcd65c2737f25e85b4f40e11ae90b106fa5ed6127acc7e3f5d70a7128a2" dependencies = [ "log", "serde", @@ -3401,11 +3409,11 @@ dependencies = [ [[package]] name = "solana-hash" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c25925816be2f57992c4c5af7dff31713bc95696c2fbc4bca911e290ba2f330" +checksum = "8591e9192a4575792bb6a175f82b6bcfb301fc1113c4342bce7789f00726e98a" dependencies = [ - "borsh 1.5.5", + "borsh 1.5.7", "bs58", "bytemuck", "bytemuck_derive", @@ -3419,9 +3427,9 @@ dependencies = [ [[package]] name = "solana-inflation" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91a53086a0f0cc093ffce9e5be4399785f05a0d49f0ff2cd6d5f3f4d593e2e9" +checksum = "5c79ffef2dcf3c9361e8333276c08bd84f112f0760e2fe7273fd089d9bfd2c3b" dependencies = [ "serde", "serde_derive", @@ -3429,13 +3437,13 @@ dependencies = [ [[package]] name = "solana-instruction" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab8c46b6f76857222ee1adeb7031b8eb0eb5134920614e9fd1bd710052b96a9" +checksum = "6cfdcaf08849c1828899c5f9ac8da6077bd3b339e2b0648b2d99d3fd780c3c6f" dependencies = [ "bincode", - "borsh 1.5.5", - "getrandom 0.2.15", + "borsh 1.5.7", + "getrandom 0.2.16", "js-sys", "num-traits", "serde", @@ -3447,9 +3455,9 @@ dependencies = [ [[package]] name = "solana-last-restart-slot" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633f272467f3e1a28dfcfb1a7df55129752524a18938a84fd67086e205f0bd88" +checksum = "9b026280da05ff5a5fecd37057f85029b135b65df442467eeb15a8824b902294" dependencies = [ "serde", "serde_derive", @@ -3459,24 +3467,24 @@ dependencies = [ [[package]] name = "solana-log-collector" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "034627f9849eeafcbfa24f0e4ad0da50eb68422ceab5c605b7d87755af77b201" +checksum = "6ca24a27f90bc5b3e27a97f0c7dd062a7ad068df240fd8df1267e4840f5d1991" dependencies = [ "log", ] [[package]] name = "solana-measure" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24fa953c2b49a131492b5927e714ab60b7b927610c7ed3355b9ad28909622b5e" +checksum = "d1520b3d2e1271adc263807fb6bea9d1ded1aaf4b3dffc4e8c1d51c4444417db" [[package]] name = "solana-metrics" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cce8eeecdde1cfed801d0d8683856e0e0cc731119894a7ae77a966915cf84964" +checksum = "2e5546d4505ca1d7c07fd4bedf5f20d256929c4bf614e031c0e84c94cc8d1a94" dependencies = [ "crossbeam-channel", "gethostname", @@ -3489,27 +3497,27 @@ dependencies = [ [[package]] name = "solana-msg" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80dde3316c6ee6e8d57bf105139ec93f8c32a42fe3ec42a3cda2ca9efb72c0e6" +checksum = "ddbd7c6efaea83a2bd85a14a0a062ccc77039070311d2ca1af4c887be500192f" dependencies = [ "solana-define-syscall", ] [[package]] name = "solana-native-token" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0721f46122a2f1837f571d5a6c1478c962ebefd6d65d02694b3a267b58dbf2" +checksum = "59b58d96fb3ff6d29e0afdb933888f65bcdf0775b334eee4f1f6f72b475cb531" [[package]] name = "solana-packet" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39fcc5cf0ef0ac6a62dd09fae772672c2d6865ee1d1ba5fbfbcc94b2c37b2be8" +checksum = "a46ca92cd3303aa3a225b4b3b4d9b2d29e42927545f1c1ff4042ca516b4decbd" dependencies = [ "bincode", - "bitflags 2.8.0", + "bitflags 2.9.0", "cfg_eval", "serde", "serde_derive", @@ -3518,9 +3526,9 @@ dependencies = [ [[package]] name = "solana-precompile-error" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54310052930124b78392b03d802aa465afe6fded96d97f2e6ca6b1dead85d8d9" +checksum = "09c58acffc2369dd3965e666a69015a978ef639e7e11cfe950b80d3ccfb44115" dependencies = [ "num-traits", "solana-decode-error", @@ -3528,16 +3536,16 @@ dependencies = [ [[package]] name = "solana-program" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12511916a9658664921ca12dd6214910de655ac9955159c1e9871bd516936cac" +checksum = "1c3aa133068171f46e9583dc9c20221b9a67459e7b8aecd3be5b49af60b2887f" dependencies = [ "base64 0.22.1", "bincode", - "bitflags 2.8.0", + "bitflags 2.9.0", "blake3", "borsh 0.10.4", - "borsh 1.5.5", + "borsh 1.5.7", "bs58", "bv", "bytemuck", @@ -3546,7 +3554,7 @@ dependencies = [ "console_log", "curve25519-dalek 4.1.3", "five8_const", - "getrandom 0.2.15", + "getrandom 0.2.16", "js-sys", "lazy_static", "log", @@ -3559,7 +3567,7 @@ dependencies = [ "serde", "serde_bytes", "serde_derive", - "sha2 0.10.8", + "sha2 0.10.9", "sha3", "solana-account-info", "solana-atomic-u64", @@ -3601,9 +3609,9 @@ dependencies = [ [[package]] name = "solana-program-entrypoint" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3422fa98d2ac5b20df9c9feb9f638e1170341b3c4259c26cd91a6a7098f6830" +checksum = "42577056d9910b5e3badfb4ae8e234a814ff854e645e679b4286e1b4338c0f03" dependencies = [ "solana-account-info", "solana-msg", @@ -3613,11 +3621,11 @@ dependencies = [ [[package]] name = "solana-program-error" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a2ea6d8e88767586e6d547e5afb00cda08cee79c986443b2d47236aac50a755" +checksum = "a268d99b9f7a2ebfee0e8aa03be2f926626575d2e3c33ef02245c1e99477202e" dependencies = [ - "borsh 1.5.5", + "borsh 1.5.7", "num-traits", "serde", "serde_derive", @@ -3629,9 +3637,9 @@ dependencies = [ [[package]] name = "solana-program-memory" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "716e1c9cbd3c5e9d9147ffb7e74815cfb34ff7a3196127da64aa8d1866beab52" +checksum = "46d5f1d48635ce777d931ba85a4be23d9da2443cb24bcd9cc380636444a3e234" dependencies = [ "num-traits", "solana-define-syscall", @@ -3639,24 +3647,24 @@ dependencies = [ [[package]] name = "solana-program-option" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c8ffad2c86e5de375ae5f0a46f64eb5897a63c514e958e908c1a98059c57d4" +checksum = "21f8d02965a1f8382b55c834fc9388c95112e8b238625f5c1e6289f20573b91a" [[package]] name = "solana-program-pack" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c185f9170ac85a93d5caaaaf5fe7bf0d49febdb329506bd7ea13716e4eb0189" +checksum = "019fc8c5e8698918bb0675afce683a17248e1c38b178c59f23577fbfd2374aa5" dependencies = [ "solana-program-error", ] [[package]] name = "solana-program-runtime" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5cf60b4b2d8d70b082d03973b8e646ca1c65351eb12ab33427c2df40cd178cf" +checksum = "7702436e95dadea0553a5b29b42271b81da61c2ae78e4425c74fadd7ce2252b5" dependencies = [ "base64 0.22.1", "bincode", @@ -3684,18 +3692,18 @@ dependencies = [ [[package]] name = "solana-pubkey" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb80787769457f022a39a55cf439d1996aeecc2364c99483c97318d80f15436" +checksum = "16bcff57fc2a096f6c57851e22587a7a36b6fdaa567eab75bb0bcd06f449fcbc" dependencies = [ "borsh 0.10.4", - "borsh 1.5.5", + "borsh 1.5.7", "bs58", "bytemuck", "bytemuck_derive", "curve25519-dalek 4.1.3", "five8_const", - "getrandom 0.2.15", + "getrandom 0.2.16", "js-sys", "num-traits", "rand 0.8.5", @@ -3711,9 +3719,9 @@ dependencies = [ [[package]] name = "solana-rent" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b4cd58602eb0c2250cd83a8cc8287ca6271b99af95d2a33250e6592c04e286" +checksum = "abb867706bd6e7bbbdf303416f44a5560bcef6f9a0fc5610a725c1a92103abc7" dependencies = [ "serde", "serde_derive", @@ -3723,19 +3731,19 @@ dependencies = [ [[package]] name = "solana-sanitize" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74c557ff8937946d24c4f188f3029c1fdba4e23a15ed11cc8b31a72017e911d5" +checksum = "ac599243d068ed88b40e781e02c92c8ed3edd0170337b1b4cee995e4fbe84af0" [[package]] name = "solana-sdk" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d755acdf62b367c1c4ca7ac1069c34a090d281b6425d11dd9410d4a147d99d3" +checksum = "cef4d9a579ff99aa5109921f729ab9cba07b207486b2c1eab8240c97777102ba" dependencies = [ "bincode", - "bitflags 2.8.0", - "borsh 1.5.5", + "bitflags 2.9.0", + "borsh 1.5.7", "bs58", "bytemuck", "bytemuck_derive", @@ -3763,7 +3771,7 @@ dependencies = [ "serde_derive", "serde_json", "serde_with", - "sha2 0.10.8", + "sha2 0.10.9", "sha3", "siphasher", "solana-account", @@ -3793,23 +3801,23 @@ dependencies = [ [[package]] name = "solana-sdk-macro" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9055600bc70a91936458b3a43a4173f8b8cd4ee64a0dc83cbb00737cadc519a5" +checksum = "b2ea6cfa40c712e5de92ffa2d62a2b296379d09411e0f1d7fcd9fecf5fcc5a30" dependencies = [ "bs58", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "solana-secp256k1-recover" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b904576bfc5b72172aed9c133fe54840625ab9d510bd429d453c54bd6e4245c3" +checksum = "1e9d4483cde845bb0f70374d2905014650057eafdcc546a3e50059e7643318e8" dependencies = [ - "borsh 1.5.5", + "borsh 1.5.7", "libsecp256k1", "solana-define-syscall", "thiserror", @@ -3817,9 +3825,9 @@ dependencies = [ [[package]] name = "solana-secp256r1-program" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c1329b7faa66f80bb3dadcece042589d22881120b6c0d0f712f742ad002f26" +checksum = "6feaf48cad9bf5ca1a04c8cd1fb45d4fa507ff86dae77802b8e8f47b89fb0eed" dependencies = [ "bytemuck", "openssl", @@ -3837,18 +3845,18 @@ checksum = "468aa43b7edb1f9b7b7b686d5c3aeb6630dc1708e86e31343499dd5c4d775183" [[package]] name = "solana-serde-varint" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e741efbc732c2e33fd600d39a5a5e63cbab18fc75fc84a98df68c2aa2b373b64" +checksum = "e3778f75e718af3c3e4b42ca67ec3a4197658855eaa5372a2f41e2378290b4fc" dependencies = [ "serde", ] [[package]] name = "solana-serialize-utils" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a6511f5147f992239415bd4bb297ad593da57b4ab634ed9bc10f81a560bc90" +checksum = "4ccf458c0aaa5d517fa358c70a6322e2cb7c18c659464eaa7135de5b3c1b2837" dependencies = [ "solana-instruction", "solana-pubkey", @@ -3857,29 +3865,29 @@ dependencies = [ [[package]] name = "solana-sha256-hasher" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3456f5d3868b9ae8e7bc53529bbbd8bee48b0d9cf3783f918269e71e4ee5268d" +checksum = "4028550a372b5ce9514941fb1a04cfac9aa66f76fd9684a7b11ef37ac586e492" dependencies = [ - "sha2 0.10.8", + "sha2 0.10.9", "solana-define-syscall", "solana-hash", ] [[package]] name = "solana-short-vec" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01771c84475e25352169e3fc901cae565f75ff8c9b40a4fa858f776211f20cbc" +checksum = "be97defacda69848b33aa1fec3571435e5a3bfb55cdd2afd66867604eee3ff84" dependencies = [ "serde", ] [[package]] name = "solana-signature" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f89b547c800c3541d4d5d71de8c82f37a0050f361626213a425ad4f767da27b" +checksum = "97fdb44ae08fa08bcaf5a3c01cf0d1b9c363ab2e3e2602e9b7806f653d08b4d0" dependencies = [ "bs58", "ed25519-dalek", @@ -3892,9 +3900,9 @@ dependencies = [ [[package]] name = "solana-slot-hashes" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3012c024a81d591d02a10648d5f4256d6fc3c9d93bc5421cadba224794940f6c" +checksum = "8c7bb06f75cb9d4c44f71dbc1cf46b1e67aaa9f737b66389b18471a8db1d9a09" dependencies = [ "serde", "serde_derive", @@ -3904,9 +3912,9 @@ dependencies = [ [[package]] name = "solana-slot-history" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "817a68e2aae8fbcf00adef67eba05c513b0a461b5ed1fd0bd2cb1299a394a650" +checksum = "ff10530199def6788f8750cc274629e49e2c4baf181ddfb7c754813fd0bc252e" dependencies = [ "bv", "serde", @@ -3916,9 +3924,9 @@ dependencies = [ [[package]] name = "solana-stable-layout" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec316bf731aeb8e9e8a55634efb938eaf5c979d71a9e7d3de54f5848da4994a2" +checksum = "e36af09027fa5a658210d7add0c7661934a879439b68336dbe680263255d3f62" dependencies = [ "solana-instruction", "solana-pubkey", @@ -3926,18 +3934,18 @@ dependencies = [ [[package]] name = "solana-sysvar-id" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a6ca7b6e6bf9f8c0de74e90546426190385a1c0b8e4d4f1975165f2335f9dc0" +checksum = "a5d72e2ff3e0a3b14d0fd4010d1fe7e4fcc9c4d8e5d43826c75c5629477e8b1a" dependencies = [ "solana-pubkey", ] [[package]] name = "solana-timings" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39f948e963c99cee7d2a14da5faf864cb4ae298f8cb679fc088ec581d9d76aed" +checksum = "7a4cb26e92fe54cff708393ea63cb4b5c983cd1c86c5b5d4962523f57e95239e" dependencies = [ "eager", "enum-iterator", @@ -3946,9 +3954,9 @@ dependencies = [ [[package]] name = "solana-transaction-error" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec8a6d17d8de8549df56d64b9af314eec3c4b705372790aa8dde7196e1c5f005" +checksum = "ccf8715c2acb247f4592eb7dc1d616454836f10c2d0e397a557fae4192337618" dependencies = [ "serde", "serde_derive", @@ -3958,14 +3966,14 @@ dependencies = [ [[package]] name = "solana-transaction-status" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadee373e77a84b180d955847e7f013195b43498e1a8c526c27771e8d0140915" +checksum = "d7c62ad1caf3e15f05cf40dc82e765319750b4f6a48e1ad3ae2e8a4f6be6e7de" dependencies = [ "Inflector", "base64 0.22.1", "bincode", - "borsh 1.5.5", + "borsh 1.5.7", "bs58", "lazy_static", "log", @@ -3986,9 +3994,9 @@ dependencies = [ [[package]] name = "solana-transaction-status-client-types" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc4018fa5363ccf0932b26821abf47c226f52fb865b2944d1f29db83a11774a" +checksum = "836a0e47c3d688676b37c1e3ff6a1fbb6a8b3bfaf050ee6343b6b993e3cc91cd" dependencies = [ "base64 0.22.1", "bincode", @@ -4004,9 +4012,9 @@ dependencies = [ [[package]] name = "solana-type-overrides" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9673b27fb01b5479a25bfdd83d5ea1112433c8cf81a0aa8616c829587b285ecd" +checksum = "ecb3bf3b629b5730bbc12ae75ef23b4665f3940fa98ac9bb4e251509952ae863" dependencies = [ "lazy_static", "rand 0.8.5", @@ -4014,9 +4022,9 @@ dependencies = [ [[package]] name = "solana-vote" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b1a73fa59c07095599091dd9f026aceb478109d61d41720883a22ead9c18f8e" +checksum = "76360d4e0e639a8f593ea7a314b2d930fc473419479fc3ce1f3a633bf8ea159f" dependencies = [ "itertools 0.12.1", "log", @@ -4028,9 +4036,9 @@ dependencies = [ [[package]] name = "solana-zk-token-sdk" -version = "2.1.11" +version = "2.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b9c007da85c5be2273c96647e4070bf69b2fda7ba43cddf5eab84c14b2fad67" +checksum = "8c2cc3254bc3d6f09fcf7677a4606d8bb5d7719860065cde3dd8e7e41022740a" dependencies = [ "aes-gcm-siv", "base64 0.22.1", @@ -4076,12 +4084,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - [[package]] name = "spl-associated-token-account" version = "4.0.0" @@ -4089,7 +4091,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68034596cf4804880d265f834af1ff2f821ad5293e41fa0f8f59086c181fc38e" dependencies = [ "assert_matches", - "borsh 1.5.5", + "borsh 1.5.7", "num-derive", "num-traits", "solana-program", @@ -4117,7 +4119,7 @@ checksum = "d9e8418ea6269dcfb01c712f0444d2c75542c04448b480e87de59d2865edc750" dependencies = [ "quote", "spl-discriminator-syn", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -4128,8 +4130,8 @@ checksum = "8c1f05593b7ca9eac7caca309720f2eafb96355e037e6d373b909a80fe7b69b9" dependencies = [ "proc-macro2", "quote", - "sha2 0.10.8", - "syn 2.0.98", + "sha2 0.10.9", + "syn 2.0.101", "thiserror", ] @@ -4148,7 +4150,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c704c88fc457fa649ba3aabe195c79d885c3f26709efaddc453c8de352c90b87" dependencies = [ - "borsh 1.5.5", + "borsh 1.5.7", "bytemuck", "bytemuck_derive", "solana-program", @@ -4177,8 +4179,8 @@ checksum = "e6d375dd76c517836353e093c2dbb490938ff72821ab568b545fd30ab3256b3e" dependencies = [ "proc-macro2", "quote", - "sha2 0.10.8", - "syn 2.0.98", + "sha2 0.10.9", + "syn 2.0.101", ] [[package]] @@ -4253,7 +4255,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6c2318ddff97e006ed9b1291ebec0750a78547f870f62a69c56fe3b46a5d8fc" dependencies = [ - "borsh 1.5.5", + "borsh 1.5.7", "solana-program", "spl-discriminator", "spl-pod", @@ -4321,9 +4323,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.98" +version = "2.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" +checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" dependencies = [ "proc-macro2", "quote", @@ -4344,13 +4346,13 @@ checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -4394,18 +4396,17 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "tempfile" -version = "3.16.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" +checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ - "cfg-if", "fastrand", - "getrandom 0.3.1", + "getrandom 0.3.2", "once_cell", "rustix", "windows-sys 0.59.0", @@ -4428,7 +4429,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -4443,9 +4444,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.37" +version = "0.3.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" dependencies = [ "deranged", "itoa", @@ -4458,15 +4459,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" [[package]] name = "time-macros" -version = "0.2.19" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" dependencies = [ "num-conv", "time-core", @@ -4484,9 +4485,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" dependencies = [ "tinyvec_macros", ] @@ -4499,9 +4500,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.43.0" +version = "1.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" +checksum = "2513ca694ef9ede0fb23fe71a4ee4107cb102b9dc1930f6d0fd77aae068ae165" dependencies = [ "backtrace", "bytes", @@ -4522,7 +4523,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -4537,11 +4538,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ - "rustls 0.23.22", + "rustls 0.23.27", "tokio", ] @@ -4558,9 +4559,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.13" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df" dependencies = [ "bytes", "futures-core", @@ -4580,17 +4581,17 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" [[package]] name = "toml_edit" -version = "0.22.24" +version = "0.22.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" +checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" dependencies = [ - "indexmap 2.7.1", + "indexmap 2.9.0", "toml_datetime", "winnow", ] @@ -4607,8 +4608,8 @@ dependencies = [ "base64 0.22.1", "bytes", "flate2", - "h2 0.4.7", - "http 1.2.0", + "h2 0.4.10", + "http 1.3.1", "http-body 1.0.1", "http-body-util", "hyper 1.6.0", @@ -4621,7 +4622,7 @@ dependencies = [ "rustls-pemfile 2.2.0", "socket2", "tokio", - "tokio-rustls 0.26.1", + "tokio-rustls 0.26.2", "tokio-stream", "tower 0.4.13", "tower-layer", @@ -4641,7 +4642,7 @@ dependencies = [ "prost-build", "prost-types", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -4722,7 +4723,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -4772,15 +4773,15 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unicode-ident" -version = "1.0.16" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-width" @@ -4864,7 +4865,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" dependencies = [ - "getrandom 0.3.1", + "getrandom 0.3.2", ] [[package]] @@ -4914,9 +4915,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" -version = "0.13.3+wasi-0.2.2" +version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] @@ -4943,7 +4944,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", "wasm-bindgen-shared", ] @@ -4978,7 +4979,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5032,11 +5033,61 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.52.0" +version = "0.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" dependencies = [ - "windows-targets 0.52.6", + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] + +[[package]] +name = "windows-link" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" + +[[package]] +name = "windows-result" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +dependencies = [ + "windows-link", ] [[package]] @@ -5189,9 +5240,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.2" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603" +checksum = "d9fb597c990f03753e08d3c29efbfcf2019a003b4bf4ba19225c158e1549f0f3" dependencies = [ "memchr", ] @@ -5208,11 +5259,11 @@ dependencies = [ [[package]] name = "wit-bindgen-rt" -version = "0.33.0" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.8.0", + "bitflags 2.9.0", ] [[package]] @@ -5229,7 +5280,7 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "yellowstone-fumarole-cli" -version = "0.1.0-pre1" +version = "0.1.0-pre.2+solana.2.1" dependencies = [ "clap", "clap-verbosity-flag", @@ -5254,12 +5305,12 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-client" -version = "0.2.0-pre.1+solana.2.1.11" +version = "0.2.0-pre.2+solana.2.1" dependencies = [ "async-trait", "futures", "fxhash", - "http 1.2.0", + "http 1.3.1", "hyper 1.6.0", "lazy_static", "prometheus", @@ -5347,7 +5398,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", "synstructure", ] @@ -5357,8 +5408,16 @@ version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ - "byteorder", - "zerocopy-derive", + "zerocopy-derive 0.7.35", +] + +[[package]] +name = "zerocopy" +version = "0.8.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" +dependencies = [ + "zerocopy-derive 0.8.25", ] [[package]] @@ -5369,27 +5428,38 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", ] [[package]] name = "zerofrom" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", "synstructure", ] @@ -5410,7 +5480,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] @@ -5432,32 +5502,32 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.98", + "syn 2.0.101", ] [[package]] name = "zstd" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "7.2.1" +version = "7.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.13+zstd.1.5.6" +version = "2.0.15+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" +checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 8b3e5ce..3de426c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,7 +34,7 @@ protobuf-src = "1.1.0" serde = "1.0.203" serde_with = "3.8.1" serde_yaml = "0.9.34" -solana-sdk = "=2.1.11" +solana-sdk = "~2.1.11" tabled = "0.18.0" thiserror = "1.0.61" tokio = "1.38.0" diff --git a/apps/yellowstone-fumarole-cli/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml index fde4f3f..68e6f34 100644 --- a/apps/yellowstone-fumarole-cli/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -1,13 +1,14 @@ [package] name = "yellowstone-fumarole-cli" +description = "Yellowstone Fumarole CLI" +version = "0.1.0-pre.2+solana.2.1" authors.workspace = true -version = "0.1.0-pre1" edition.workspace = true homepage.workspace = true repository.workspace = true license.workspace = true keywords.workspace = true -publish.workspace = true +publish = true [[bin]] name = "fume" diff --git a/apps/yellowstone-fumarole-cli/src/prom.rs b/apps/yellowstone-fumarole-cli/src/prom.rs index f68f761..c3efb33 100644 --- a/apps/yellowstone-fumarole-cli/src/prom.rs +++ b/apps/yellowstone-fumarole-cli/src/prom.rs @@ -15,12 +15,10 @@ pub async fn prometheus_service_fn( match metrics { Ok(metrics) => Ok(Response::new(Full::new(Bytes::from(metrics)))), - Err(e) => { - Ok(Response::new(Full::new(Bytes::from(format!( - "Failed to encode metrics: {}", - e - ))))) - } + Err(e) => Ok(Response::new(Full::new(Bytes::from(format!( + "Failed to encode metrics: {}", + e + ))))), } } diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index 624e448..55c5c09 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-client" description = "Yellowstone Fumarole Client" -version = "0.2.0-pre.1+solana.2.1.11" +version = "0.2.0-pre.2+solana.2.1" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } diff --git a/crates/yellowstone-fumarole-client/build.rs b/crates/yellowstone-fumarole-client/build.rs index c9687a8..02c5df0 100644 --- a/crates/yellowstone-fumarole-client/build.rs +++ b/crates/yellowstone-fumarole-client/build.rs @@ -4,10 +4,8 @@ fn main() { let package_root = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let path = Path::new(&package_root); - // let yellowstone_api_proto_dir = path.join("yellowstone-api/proto"); let yellowstone_grpc_proto_dir = path.join("yellowstone-grpc-proto"); let proto_dir = path.join("proto"); - // let proto_dir = yellowstone_api_proto_dir.to_str().unwrap(); env::set_var("PROTOC", protobuf_src::protoc()); tonic_build::configure() diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index f5ede32..d4a7c3a 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -9,7 +9,7 @@ //! //! However, there are some differences: //! -//! - The `yellowstone-fumarole` client uses multiple gRPC connections to communicate with the Fumarole service : avoids [`HoL`] blocking. +//! - The `yellowstone-fumarole` (Coming soon) client uses multiple gRPC connections to communicate with the Fumarole service : avoids [`HoL`] blocking. //! - The `yellowstone-fumarole` subscribers are persistent and can be reused across multiple sessions (not computer). //! - The `yellowstone-fumarole` can reconnect to the Fumarole service if the connection is lost. //! @@ -405,7 +405,7 @@ impl FumaroleClient { /// pub async fn dragonsmouth_subscribe( &mut self, - consumer_group_name: S, + subscriber_name: S, request: geyser::SubscribeRequest, ) -> Result where @@ -413,7 +413,7 @@ impl FumaroleClient { { let handle = tokio::runtime::Handle::current(); self.dragonsmouth_subscribe_with_config_on( - consumer_group_name, + subscriber_name, request, Default::default(), handle, @@ -441,7 +441,7 @@ impl FumaroleClient { /// pub async fn dragonsmouth_subscribe_with_config_on( &mut self, - consumer_group_name: S, + subscriber_name: S, request: geyser::SubscribeRequest, config: FumaroleSubscribeConfig, handle: tokio::runtime::Handle, @@ -456,7 +456,7 @@ impl FumaroleClient { let (fume_control_plane_tx, fume_control_plane_rx) = mpsc::channel(100); let initial_join = JoinControlPlane { - consumer_group_name: Some(consumer_group_name.as_ref().to_string()), + consumer_group_name: Some(subscriber_name.as_ref().to_string()), }; let initial_join_command = ControlCommand { command: Some(proto::control_command::Command::InitialJoin(initial_join)), @@ -539,7 +539,7 @@ impl FumaroleClient { dragonsmouth_bidi: dm_bidi, subscribe_request: request, download_task_runner_chans, - consumer_group_name: consumer_group_name.as_ref().to_string(), + consumer_group_name: subscriber_name.as_ref().to_string(), control_plane_tx: fume_control_plane_tx, control_plane_rx: fume_control_plane_rx, dragonsmouth_outlet, From 092a99db5a1689a37b29346522e9bb58b0c8f3d1 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Wed, 7 May 2025 14:26:08 -0400 Subject: [PATCH 19/56] v2: better subscribe options for fume cli --- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 115 ++++++++++++++---- .../src/runtime/tokio.rs | 15 +-- 2 files changed, 100 insertions(+), 30 deletions(-) diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index f104877..e6569bb 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -3,7 +3,7 @@ use { futures::{future::BoxFuture, FutureExt}, solana_sdk::{bs58, pubkey::Pubkey}, std::{ - collections::HashMap, + collections::{HashMap, HashSet}, fmt::{self, Debug}, fs::File, io::{stdout, Write}, @@ -173,6 +173,50 @@ impl From for CommitmentLevel { } } + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum SubscribeDataType { + Account, + Transaction, + Slot, + BlockMeta, +} + +#[derive(Debug, Clone)] +pub struct SubscribeInclude { + set: HashSet, +} + +#[derive(Debug, thiserror::Error)] +#[error("Invalid include type {0}")] +pub struct FromStrSubscribeIncludeErr(String); + +impl FromStr for SubscribeInclude { + type Err = String; + + fn from_str(s: &str) -> Result { + let include = s + .split(',') + .map(|s| s.trim()) + .map(|s| match s { + "account" => Ok(vec![SubscribeDataType::Account]), + "tx" => Ok(vec![SubscribeDataType::Transaction]), + "meta" => Ok(vec![SubscribeDataType::BlockMeta]), + "slot" => Ok(vec![SubscribeDataType::Slot]), + "all" => Ok(vec![ + SubscribeDataType::Account, + SubscribeDataType::Transaction, + SubscribeDataType::Slot, + SubscribeDataType::BlockMeta, + ]), + unknown => Err(format!("Invalid include type: {unknown}")), + }) + .collect::, _>>()?; + let include = include.into_iter().flatten().collect::>(); + Ok(SubscribeInclude { set: include }) + } +} + #[derive(Debug, Clone, Parser)] struct SubscribeArgs { /// bind address for prometheus HTTP server endpoint, or "0" to bind to a random localhost port. @@ -184,6 +228,14 @@ struct SubscribeArgs { #[clap(long)] out: Option, + /// + /// Comma separate list of Geyser event types you want to subscribe to. + /// Valid values are: [account, tx, slot, block_meta, all] + /// If not specified, all event types will be subscribed to. + /// Examples: account,tx, all, slot,meta,tx, tx + #[clap(long, default_value = "all")] + include: SubscribeInclude, + /// Name of the persistent subscriber #[clap(long)] name: String, @@ -404,6 +456,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let SubscribeArgs { prometheus, name: cg_name, + include, commitment, account: pubkey, owner, @@ -432,31 +485,47 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { let commitment_level: CommitmentLevel = commitment.into(); // This request listen for all account updates and transaction updates - let request = SubscribeRequest { - accounts: HashMap::from([( - "f1".to_owned(), - SubscribeRequestFilterAccounts { - account: pubkey.iter().map(|p| p.to_string()).collect(), - owner: owner.iter().map(|p| p.to_string()).collect(), - ..Default::default() - }, - )]), - transactions: HashMap::from([( - "f1".to_owned(), - SubscribeRequestFilterTransactions { - account_include: tx_pubkey.iter().map(|p| p.to_string()).collect(), - ..Default::default() - }, - )]), - blocks_meta: HashMap::from([( - "f1".to_owned(), - SubscribeRequestFilterBlocksMeta::default(), - )]), - slots: HashMap::from([("f1".to_owned(), SubscribeRequestFilterSlots::default())]), + let mut request = SubscribeRequest { commitment: Some(commitment_level.into()), ..Default::default() }; + for to_include in include.set { + match to_include { + SubscribeDataType::Account => { + request.accounts = HashMap::from([( + "fumarole".to_owned(), + SubscribeRequestFilterAccounts { + account: pubkey.iter().map(|p| p.to_string()).collect(), + owner: owner.iter().map(|p| p.to_string()).collect(), + ..Default::default() + }, + )]); + }, + SubscribeDataType::Transaction => { + request.transactions = HashMap::from([( + "fumarole".to_owned(), + SubscribeRequestFilterTransactions { + account_include: tx_pubkey.iter().map(|p| p.to_string()).collect(), + ..Default::default() + }, + )]); + } + SubscribeDataType::Slot => { + request.slots = HashMap::from([( + "fumarole".to_owned(), + SubscribeRequestFilterSlots::default(), + )]); + }, + SubscribeDataType::BlockMeta => { + request.blocks_meta = HashMap::from([( + "fumarole".to_owned(), + SubscribeRequestFilterBlocksMeta::default(), + )]); + } + } + } + println!("Subscribing to consumer group {}", cg_name); let subscribe_config = FumaroleSubscribeConfig { concurrent_download_limit_per_tcp: NonZeroUsize::new(1).unwrap(), @@ -512,7 +581,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { slot, .. } = block_meta; - Some(format!("block_meta={slot}")) + Some(format!("block={slot}, tx_count={}, entry_count={}", block_meta.executed_transaction_count, block_meta.entries_count)) } _ => None, } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 1f96326..487adb6 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -137,10 +137,13 @@ impl TokioFumeDragonsmouthRuntime { self.sm.update_committed_offset(commit_offset_result.offset); } proto::control_response::Response::PollHist(blockchain_history) => { - tracing::debug!( - "polled blockchain history : {} events", - blockchain_history.events.len() - ); + if !blockchain_history.events.is_empty() { + tracing::debug!( + "polled blockchain history : {} events", + blockchain_history.events.len() + ); + } + self.sm.queue_blockchain_event(blockchain_history.events); #[cfg(feature = "prometheus")] { @@ -159,7 +162,6 @@ impl TokioFumeDragonsmouthRuntime { async fn poll_history_if_needed(&mut self) { if self.sm.need_new_blockchain_events() { let cmd = build_poll_history_cmd(Some(self.sm.committable_offset)); - tracing::debug!("polling history..."); self.control_plane_tx.send(cmd).await.expect("disconnected"); } } @@ -219,7 +221,6 @@ impl TokioFumeDragonsmouthRuntime { } async unsafe fn force_commit_offset(&mut self) { - tracing::debug!("committing offset {}", self.sm.committable_offset); self.control_plane_tx .send(build_commit_offset_cmd(self.sm.committable_offset)) .await @@ -258,7 +259,6 @@ impl TokioFumeDragonsmouthRuntime { } tracing::debug!("draining {} slot status", slot_status_vec.len()); - for slot_status in slot_status_vec { let mut matched_filters = vec![]; for (filter_name, filter) in &self.subscribe_request.slots { @@ -284,6 +284,7 @@ impl TokioFumeDragonsmouthRuntime { }, )), }; + tracing::trace!("sending dragonsmouth update: {:?}", update); if self.dragonsmouth_outlet.send(Ok(update)).await.is_err() { return; } From bf7b5cee7681e7c21d96dc2cd078b0148fab012b Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Wed, 7 May 2025 14:54:47 -0400 Subject: [PATCH 20/56] v2: better fume cli config loading --- apps/yellowstone-fumarole-cli/README.md | 61 +++++++++++-------- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 41 ++++++++++--- crates/yellowstone-fumarole-client/src/lib.rs | 2 +- .../src/runtime/tokio.rs | 2 +- 4 files changed, 70 insertions(+), 36 deletions(-) diff --git a/apps/yellowstone-fumarole-cli/README.md b/apps/yellowstone-fumarole-cli/README.md index fac26c1..782fec6 100644 --- a/apps/yellowstone-fumarole-cli/README.md +++ b/apps/yellowstone-fumarole-cli/README.md @@ -12,11 +12,10 @@ $ cargo install yellowstone-fumarole-cli ## Usage ```sh -fume --help - +fume help Yellowstone Fumarole CLI -Usage: fume [OPTIONS] --config +Usage: fume [OPTIONS] Commands: test-config Test the connection to the fumarole service @@ -29,7 +28,7 @@ Commands: help Print this message or the help of the given subcommand(s) Options: - --config Path to static config file + --config Path to the config file. If not specified, the default config file will be used. The default config file is ~/.fumarole/config.yaml. You can also set the FUMAROLE_CONFIG environment variable to specify the config file. If the config file is not found, the program will exit with an error -v, --verbose... Increase logging verbosity -q, --quiet... Decrease logging verbosity -h, --help Print help @@ -37,7 +36,7 @@ Options: ``` -### Configuration file +## Configuration file Here's how to configure your config file: @@ -50,65 +49,75 @@ x-token = "00000000-0000-0000-0000-000000000000" You can test your configuration file with `test-config` subcommand: ```sh -$ fume --config path/to/config.toml test-config +fume --config path/to/config.toml test-config ``` -or with custom config path: +By default, if you don't provide `--config`, fumarole CLI will use the value at `FUMAROLE_CONFIG` environment variable if set, +otherwise fallback to `~/.fumarole/config.yaml`. + + +## Create a Persistent Subscriber ```sh -$ fume --config path/to/config.toml test-config +fume create --name helloworld-1 \ ``` -### Create a Persistent Subscriber - +## List all persistent subscribers ```sh -$ fume create --name helloworld-1 \ +fume list ``` -### List all persistent subscribers +## Delete a persistent subscribers ```sh -$ fume list +fume delete --name helloworld ``` -### Delete a persistent subscribers +## Delete all persistent subscribers ```sh -$ fume delete --name helloworld +fume delete-all ``` -### Delete all persistent subscribers +## Stream summary on terminal + +To stream out from the CLI, you can use the `stream` command and its various features! ```sh -$ fume delete-all +fume subscribe --name helloworld ``` -### Stream summary on terminal +### Filters -To stream out from the CLI, you can use the `stream` command and its various features! +You can filter by event type : account, transaction, slot status and block meta: + +```sh +fume subscribe --name --include tx # This will only stream out transaction +``` ```sh -$ fume subscribe --name helloworld +fume subscribe --name --include account,slot # This will only stream out account update and slot status ``` -You can filter the stream content by adding one or multiple occurrence of the following options: +```sh + fume subscribe --name --include all # This will stream everything : account update, transactions, slot status and block meta update. +``` -- `--tx-pubkey ` : filter transaction by account keys. -- `--owner ` : filter account update based on its owner -- `--account ` : filter account update based on accout key. +You can also filter incoming Geyser events by account pubkeys, account owners and transaction account inclusion. Here is an example to get all account updates owned by Token SPL program: ```sh -$ fume subscribe --name helloworld \ +fume subscribe --name helloworld \ --owner TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA ``` Here is how to chain multiple filters together: ```sh -$ fume subscribe --cg-name helloworld \ +fume -- subscribe --name test1 \ +--include tx,account --owner metaqbxxUerdq28cj1RbAWkYQm3ybzjb6a8bt518x1s \ --owner TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb \ --owner TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA \ diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index e6569bb..958611e 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -4,6 +4,7 @@ use { solana_sdk::{bs58, pubkey::Pubkey}, std::{ collections::{HashMap, HashSet}, + env, fmt::{self, Debug}, fs::File, io::{stdout, Write}, @@ -37,6 +38,8 @@ use { }, }; +const FUMAROLE_CONFIG_ENV: &str = "FUMAROLE_CONFIG"; + #[derive(Debug, Clone)] pub struct PrometheusBindAddr(SocketAddr); @@ -78,9 +81,13 @@ impl FromStr for PrometheusBindAddr { #[derive(Debug, Clone, Parser)] #[clap(author, version, about = "Yellowstone Fumarole CLI")] struct Args { - /// Path to static config file + /// Path to the config file. + /// If not specified, the default config file will be used. + /// The default config file is ~/.fumarole/config.yaml. + /// You can also set the FUMAROLE_CONFIG environment variable to specify the config file. + /// If the config file is not found, the program will exit with an error. #[clap(long)] - config: PathBuf, + config: Option, #[clap(flatten)] verbose: clap_verbosity_flag::Verbosity, @@ -173,7 +180,6 @@ impl From for CommitmentLevel { } } - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum SubscribeDataType { Account, @@ -501,7 +507,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { ..Default::default() }, )]); - }, + } SubscribeDataType::Transaction => { request.transactions = HashMap::from([( "fumarole".to_owned(), @@ -516,7 +522,7 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { "fumarole".to_owned(), SubscribeRequestFilterSlots::default(), )]); - }, + } SubscribeDataType::BlockMeta => { request.blocks_meta = HashMap::from([( "fumarole".to_owned(), @@ -618,6 +624,14 @@ async fn test_config(mut fumarole_client: FumaroleClient) { } } +fn home_dir() -> Option { + if cfg!(target_os = "windows") { + env::var("USERPROFILE").ok().map(PathBuf::from) + } else { + env::var("HOME").ok().map(PathBuf::from) + } +} + #[tokio::main] async fn main() { let args = Args::parse(); @@ -631,9 +645,20 @@ async fn main() { .with_line_number(true) .init(); - // setup_tracing_test_many(["yellowstone_fumarole_client"]); - let config = std::fs::read_to_string(&args.config).expect("Failed to read config file"); - + let maybe_config = args.config; + let config = if let Some(config_path) = maybe_config { + std::fs::read_to_string(&config_path).expect("Failed to read config file") + } else { + let mut default_config_path = home_dir().expect("Failed to get home directory"); + default_config_path.push(".fumarole"); + default_config_path.push("config.yaml"); + + let config_path = std::env::var(FUMAROLE_CONFIG_ENV) + .map(PathBuf::from) + .unwrap_or(default_config_path); + std::fs::read_to_string(&config_path) + .unwrap_or_else(|_| panic!("Failed to read config file at {config_path:?}")) + }; let config = serde_yaml::from_str::(config.as_str()) .expect("failed to parse fumarole config"); diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index d4a7c3a..a5c69d7 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -223,7 +223,7 @@ pub const DEFAULT_DRAGONSMOUTH_CAPACITY: usize = 10000; /// /// Default Fumarole commit offset interval /// -pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(60); +pub const DEFAULT_COMMIT_INTERVAL: Duration = Duration::from_secs(10); /// /// Default maximum number of consecutive failed slot download attempts before failing the fumarole session. diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 487adb6..baa61ce 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -143,7 +143,7 @@ impl TokioFumeDragonsmouthRuntime { blockchain_history.events.len() ); } - + self.sm.queue_blockchain_event(blockchain_history.events); #[cfg(feature = "prometheus")] { From 0e0c25f04e3e37d48c5c939dd9cdf85239bcd4d9 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 8 May 2025 16:58:13 -0400 Subject: [PATCH 21/56] v2: added gc and slot retention for fumarole sm --- crates/yellowstone-fumarole-client/src/lib.rs | 17 ++++++++++- .../src/runtime/mod.rs | 28 ++++++++++++++++--- .../src/runtime/tokio.rs | 10 ++++++- 3 files changed, 49 insertions(+), 6 deletions(-) diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index a5c69d7..d84b04c 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -116,6 +116,8 @@ pub mod metrics; pub(crate) mod runtime; pub(crate) mod util; +use runtime::{tokio::DEFAULT_GC_INTERVAL, DEFAULT_SLOT_MEMORY_RETENTION}; + use { config::FumaroleConfig, futures::future::{select, Either}, @@ -295,6 +297,16 @@ pub struct FumaroleSubscribeConfig { /// Capacity of each data channel for the fumarole client /// pub data_channel_capacity: NonZeroUsize, + + /// + /// Garbage collection interval for the fumarole client in ticks (loop iteration of the fumarole runtime) + /// + pub gc_interval: usize, + + /// + /// How far back in time the fumarole client should retain slot memory. + /// This is used to avoid downloading the same slot multiple times. + pub slot_memory_retention: usize, } impl Default for FumaroleSubscribeConfig { @@ -308,6 +320,8 @@ impl Default for FumaroleSubscribeConfig { commit_interval: DEFAULT_COMMIT_INTERVAL, max_failed_slot_download_attempt: DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, data_channel_capacity: NonZeroUsize::new(DEFAULT_DRAGONSMOUTH_CAPACITY).unwrap(), + gc_interval: DEFAULT_GC_INTERVAL, + slot_memory_retention: DEFAULT_SLOT_MEMORY_RETENTION, } } } @@ -493,7 +507,7 @@ impl FumaroleClient { .get(&0) .expect("no last committed offset"); - let sm = FumaroleSM::new(*last_committed_offset); + let sm = FumaroleSM::new(*last_committed_offset, config.slot_memory_retention); let (dm_tx, dm_rx) = mpsc::channel(100); let dm_bidi = DragonsmouthSubscribeRequestBidi { @@ -545,6 +559,7 @@ impl FumaroleClient { dragonsmouth_outlet, commit_interval: config.commit_interval, last_commit: Instant::now(), + gc_interval: config.gc_interval, }; let download_task_runner_jh = handle.spawn(grpc_download_task_runner.run()); let fumarole_rt_jh = handle.spawn(tokio_rt.run()); diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index b761edc..26fcb80 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -26,6 +26,8 @@ pub(crate) type FumeOffset = i64; pub(crate) type FumeSessionSequence = u64; +pub(crate) const DEFAULT_SLOT_MEMORY_RETENTION: usize = 10000; + #[derive(Debug, Clone)] pub(crate) struct FumeDownloadRequest { pub slot: Slot, @@ -171,10 +173,15 @@ pub(crate) struct FumaroleSM { sequence: u64, sequence_to_offset: FxHashMap, + + slot_memory_retention: usize, } impl FumaroleSM { - pub fn new(last_committed_offset: FumeOffset) -> Self { + pub fn new( + last_committed_offset: FumeOffset, + slot_memory_retention: usize, + ) -> Self { Self { last_committed_offset, slot_commitment_progression: Default::default(), @@ -189,6 +196,7 @@ impl FumaroleSM { sequence: 1, last_processed_fume_sequence: 0, sequence_to_offset: Default::default(), + slot_memory_retention, } } @@ -209,6 +217,18 @@ impl FumaroleSM { ret } + pub fn gc(&mut self) { + while self.downloaded_slot.len() > self.slot_memory_retention { + let Some(slot) = self.downloaded_slot.pop_first() else { + break; + }; + + self.slot_commitment_progression.remove(&slot); + self.inflight_slot_shard_download.remove(&slot); + self.blocked_slot_status_update.remove(&slot); + } + } + pub fn queue_blockchain_event(&mut self, events: IT) where IT: IntoIterator, @@ -482,7 +502,7 @@ mod tests { #[test] fn test_fumarole_sm_happy_path() { - let mut sm = FumaroleSM::new(0); + let mut sm = FumaroleSM::new(0, DEFAULT_SLOT_MEMORY_RETENTION); let event = random_blockchain_event(1, 1, CommitmentLevel::Processed); sm.queue_blockchain_event(vec![event.clone()]); @@ -523,7 +543,7 @@ mod tests { #[test] fn it_should_dedup_slot_status() { - let mut sm = FumaroleSM::new(0); + let mut sm = FumaroleSM::new(0, DEFAULT_SLOT_MEMORY_RETENTION); let event = random_blockchain_event(1, 1, CommitmentLevel::Processed); sm.queue_blockchain_event(vec![event.clone()]); @@ -553,7 +573,7 @@ mod tests { #[test] fn it_should_handle_min_commitment_level() { - let mut sm = FumaroleSM::new(0); + let mut sm = FumaroleSM::new(0, DEFAULT_SLOT_MEMORY_RETENTION); let event = random_blockchain_event(1, 1, CommitmentLevel::Processed); sm.queue_blockchain_event(vec![event.clone()]); diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index baa61ce..39868f9 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -31,6 +31,8 @@ use { }, }; +pub const DEFAULT_GC_INTERVAL: usize = 100; + /// /// Mimics Dragonsmouth subscribe request bidirectional stream. /// @@ -76,6 +78,7 @@ pub(crate) struct TokioFumeDragonsmouthRuntime { pub dragonsmouth_outlet: mpsc::Sender>, pub commit_interval: Duration, pub last_commit: Instant, + pub gc_interval: usize, // in ticks } const fn build_poll_history_cmd(from: Option) -> ControlCommand { @@ -347,8 +350,13 @@ impl TokioFumeDragonsmouthRuntime { unsafe { self.force_commit_offset().await; } - + let mut ticks = 0; loop { + ticks += 1; + if ticks % self.gc_interval == 0 { + self.sm.gc(); + ticks = 0; + } if self.dragonsmouth_outlet.is_closed() { tracing::debug!("Detected dragonsmouth outlet closed"); break; From 6712bc329a0ac1c776ccd9c73045eec35113f719 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 9 May 2025 10:19:49 -0400 Subject: [PATCH 22/56] v2: updated cargo deps --- Cargo.lock | 193 +++++++----------- Cargo.toml | 12 +- crates/yellowstone-fumarole-client/src/lib.rs | 65 +++--- .../src/runtime/mod.rs | 5 +- .../src/runtime/tokio.rs | 2 +- rust-toolchain.toml | 2 +- 6 files changed, 126 insertions(+), 153 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ff9988b..bdec349 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,14 +65,14 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy 0.7.35", + "zerocopy", ] [[package]] @@ -669,9 +669,9 @@ checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cc" -version = "1.2.21" +version = "1.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8691782945451c1c383942c4874dbe63814f61cb57ef773cda2972682b7bb3c0" +checksum = "32db95edf998450acc7881c932f94cd9b05c87b4b2599e8bab064753da4acfd1" dependencies = [ "jobserver", "libc", @@ -1370,9 +1370,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", @@ -1682,21 +1682,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" dependencies = [ "displaydoc", + "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" dependencies = [ "displaydoc", "litemap", @@ -1705,31 +1706,11 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" dependencies = [ "displaydoc", "icu_collections", @@ -1737,67 +1718,54 @@ dependencies = [ "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "2549ca8c7241c82f59c80ba2a6f415d931c5b58d24fb8412caa1a1f02c49139a" dependencies = [ "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "potential_utf", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" +checksum = "8197e866e47b68f8f7d95249e172903bec06004b18b2937f1095d40a0c57de04" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", + "icu_locale_core", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -1817,9 +1785,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -1907,7 +1875,7 @@ version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.3", "libc", ] @@ -1998,9 +1966,9 @@ checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "litemap" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "lock_api" @@ -2430,6 +2398,15 @@ dependencies = [ "universal-hash", ] +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -2442,7 +2419,7 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy 0.8.25", + "zerocopy", ] [[package]] @@ -2878,9 +2855,12 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "zeroize", +] [[package]] name = "rustls-webpki" @@ -4406,7 +4386,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ "fastrand", - "getrandom 0.3.2", + "getrandom 0.3.3", "once_cell", "rustix", "windows-sys 0.59.0", @@ -4475,9 +4455,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" dependencies = [ "displaydoc", "zerovec", @@ -4841,12 +4821,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -4865,7 +4839,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.3", ] [[package]] @@ -5240,9 +5214,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9fb597c990f03753e08d3c29efbfcf2019a003b4bf4ba19225c158e1549f0f3" +checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec" dependencies = [ "memchr", ] @@ -5266,17 +5240,11 @@ dependencies = [ "bitflags 2.9.0", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "yellowstone-fumarole-cli" @@ -5380,9 +5348,9 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" dependencies = [ "serde", "stable_deref_trait", @@ -5392,9 +5360,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", @@ -5402,33 +5370,13 @@ dependencies = [ "synstructure", ] -[[package]] -name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "zerocopy-derive 0.7.35", -] - [[package]] name = "zerocopy" version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" dependencies = [ - "zerocopy-derive 0.8.25", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", + "zerocopy-derive", ] [[package]] @@ -5483,11 +5431,22 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" dependencies = [ "yoke", "zerofrom", @@ -5496,9 +5455,9 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index 3de426c..09fd3e5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,18 +29,18 @@ hyper = "1.3.1" hyper-util = "0.1" lazy_static = "~1.5.0" prometheus = "~0.13.0" -prost = "0.13.0" -protobuf-src = "1.1.0" +prost = "~0.13.0" +protobuf-src = "~1.1.0" serde = "1.0.203" serde_with = "3.8.1" serde_yaml = "0.9.34" solana-sdk = "~2.1.11" tabled = "0.18.0" thiserror = "1.0.61" -tokio = "1.38.0" -tokio-stream = "0.1.11" -tonic = "0.12.3" -tonic-build = "0.12.3" +tokio = "^1" +tokio-stream = "~0.1.11" +tonic = "~0.12.3" +tonic-build = "~0.12.3" tower = "0.5.2" tracing = "0.1.41" tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index d84b04c..b11d274 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -19,7 +19,7 @@ //! //! ## Create a `FumaroleClient` //! -//! ```ignore +//! ```rust //! use yellowstone_fumarole_client::FumaroleClient; //! use yellowstone_fumarole_client::config::FumaroleConfig; //! @@ -47,30 +47,46 @@ //! ``` //! ## Dragonsmouth-like Subscribe //! -//! ```ignore -//! use yellowstone_fumarole_client::FumaroleClient; -//! +//! ```rust +//! use { +//! yellowstone_fumarole_client::{ +//! FumaroleClient, config::FumaroleConfig, DragonsmouthAdapterSession +//! }, +//! yellowstone_grpc_proto::geyser::{ +//! SubscribeRequest, SubscribeRequestFilterAccounts, +//! SubscribeRequestFilterTransactions, +//! }, +//! }; //! -//! let mut client = FumaroleClient::connect(config).await.unwrap(); +//! #[tokio::main] +//! async fn main() { +//! let config = FumaroleConfig { +//! endpoint: "https://example.com".to_string(), +//! x_token: Some("00000000-0000-0000-0000-000000000000".to_string()), +//! max_decoding_message_size_bytes: FumaroleConfig::default_max_decoding_message_size_bytes(), +//! x_metadata: Default::default(), +//! }; //! -//! let request = geyser::SubscribeRequest { -//! accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), -//! transactions: HashMap::from([("f1".to_owned(), SubscribeRequestFilterTransactions::default())]), -//! ..Default::default() -//! }; +//! let mut client = FumaroleClient::connect(config).await.unwrap(); //! +//! let request = SubscribeRequest { +//! accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), +//! transactions: HashMap::from([("f1".to_owned(), SubscribeRequestFilterTransactions::default())]), +//! ..Default::default() +//! }; //! -//! let dragonsmouth_adapter = client.dragonsmouth_subscribe("my-consumer-group", request).await.unwrap(); +//! let dragonsmouth_adapter = client.dragonsmouth_subscribe("my-consumer-group", request).await.unwrap(); //! -//! let DragonsmouthAdapterSession { -//! sink: _, // Channel to update [`SubscribeRequest`] requests to the fumarole service -//! mut source, // Channel to receive updates from the fumarole service -//! runtime_handle: _, // Handle to the fumarole session client runtime -//! } = dragonsmouth_adapter; +//! let DragonsmouthAdapterSession { +//! sink: _, // Channel to update [`SubscribeRequest`] requests to the fumarole service +//! mut source, // Channel to receive updates from the fumarole service +//! runtime_handle: _, // Handle to the fumarole session client runtime +//! } = dragonsmouth_adapter; //! -//! while let Some(result) = source.recv().await { -//! let event = result.expect("Failed to receive event"); -//! // ... do something with the event +//! while let Some(result) = source.recv().await { +//! let event = result.expect("Failed to receive event"); +//! println!("{:?}", event); +//! } //! } //! ``` //! @@ -116,15 +132,16 @@ pub mod metrics; pub(crate) mod runtime; pub(crate) mod util; -use runtime::{tokio::DEFAULT_GC_INTERVAL, DEFAULT_SLOT_MEMORY_RETENTION}; - use { config::FumaroleConfig, futures::future::{select, Either}, proto::control_response::Response, runtime::{ - tokio::{DownloadTaskRunnerChannels, GrpcDownloadTaskRunner, TokioFumeDragonsmouthRuntime}, - FumaroleSM, + tokio::{ + DownloadTaskRunnerChannels, GrpcDownloadTaskRunner, TokioFumeDragonsmouthRuntime, + DEFAULT_GC_INTERVAL, + }, + FumaroleSM, DEFAULT_SLOT_MEMORY_RETENTION, }, std::{ collections::HashMap, @@ -300,7 +317,7 @@ pub struct FumaroleSubscribeConfig { /// /// Garbage collection interval for the fumarole client in ticks (loop iteration of the fumarole runtime) - /// + /// pub gc_interval: usize, /// diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 26fcb80..e8cc88f 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -178,10 +178,7 @@ pub(crate) struct FumaroleSM { } impl FumaroleSM { - pub fn new( - last_committed_offset: FumeOffset, - slot_memory_retention: usize, - ) -> Self { + pub fn new(last_committed_offset: FumeOffset, slot_memory_retention: usize) -> Self { Self { last_committed_offset, slot_commitment_progression: Default::default(), diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 39868f9..3acdaf3 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -353,7 +353,7 @@ impl TokioFumeDragonsmouthRuntime { let mut ticks = 0; loop { ticks += 1; - if ticks % self.gc_interval == 0 { + if ticks % self.gc_interval == 0 { self.sm.gc(); ticks = 0; } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 071ff5b..86cda94 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.81.0" +channel = "1.82.0" components = ["clippy", "rustfmt"] targets = [] profile = "minimal" From 138e7fbf741287bba6d0da554fe77514acaa3a5e Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 16 May 2025 14:17:09 -0400 Subject: [PATCH 23/56] v2: unpinned deps --- Cargo.lock | 70 +++++++++++++++++++++++++++--------------------------- Cargo.toml | 44 +++++++++++++++++----------------- 2 files changed, 57 insertions(+), 57 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bdec349..180ce16 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -476,9 +476,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" dependencies = [ "serde", ] @@ -669,9 +669,9 @@ checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cc" -version = "1.2.22" +version = "1.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32db95edf998450acc7881c932f94cd9b05c87b4b2599e8bab064753da4acfd1" +checksum = "5f4ac86a9e5bc1e2b3449ab9d7d3a6a405e3d1bb28d7b9be8614f55846ae3766" dependencies = [ "jobserver", "libc", @@ -728,9 +728,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.37" +version = "4.5.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071" +checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" dependencies = [ "clap_builder", "clap_derive", @@ -749,9 +749,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.37" +version = "4.5.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2" +checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" dependencies = [ "anstream", "anstyle", @@ -1136,9 +1136,9 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" dependencies = [ "libc", "windows-sys 0.59.0", @@ -2065,9 +2065,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" +checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" [[package]] name = "nu-ansi-term" @@ -2230,7 +2230,7 @@ version = "0.10.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "cfg-if", "foreign-types", "libc", @@ -2664,7 +2664,7 @@ version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -2789,7 +2789,7 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "errno", "libc", "linux-raw-sys", @@ -2818,7 +2818,7 @@ dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.103.2", + "rustls-webpki 0.103.3", "subtle", "zeroize", ] @@ -2874,9 +2874,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.2" +version = "0.103.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7149975849f1abb3832b246010ef62ccc80d3a76169517ada7188252b9cfb437" +checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435" dependencies = [ "ring", "rustls-pki-types", @@ -2932,7 +2932,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", "core-foundation 0.10.0", "core-foundation-sys", "libc", @@ -3497,7 +3497,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a46ca92cd3303aa3a225b4b3b4d9b2d29e42927545f1c1ff4042ca516b4decbd" dependencies = [ "bincode", - "bitflags 2.9.0", + "bitflags 2.9.1", "cfg_eval", "serde", "serde_derive", @@ -3522,7 +3522,7 @@ checksum = "1c3aa133068171f46e9583dc9c20221b9a67459e7b8aecd3be5b49af60b2887f" dependencies = [ "base64 0.22.1", "bincode", - "bitflags 2.9.0", + "bitflags 2.9.1", "blake3", "borsh 0.10.4", "borsh 1.5.7", @@ -3722,7 +3722,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cef4d9a579ff99aa5109921f729ab9cba07b207486b2c1eab8240c97777102ba" dependencies = [ "bincode", - "bitflags 2.9.0", + "bitflags 2.9.1", "borsh 1.5.7", "bs58", "bytemuck", @@ -4381,9 +4381,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.19.1" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", "getrandom 0.3.3", @@ -5007,9 +5007,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.61.0" +version = "0.61.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +checksum = "46ec44dc15085cea82cf9c78f85a9114c463a369786585ad2882d1ff0b0acf40" dependencies = [ "windows-implement", "windows-interface", @@ -5048,18 +5048,18 @@ checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" [[package]] name = "windows-result" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +checksum = "4b895b5356fc36103d0f64dd1e94dfa7ac5633f1c9dd6e80fe9ec4adef69e09d" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +checksum = "2a7ab927b2637c19b3dbe0965e75d8f2d30bdd697a1516191cad2ec4df8fb28a" dependencies = [ "windows-link", ] @@ -5237,7 +5237,7 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.9.1", ] [[package]] @@ -5316,9 +5316,9 @@ dependencies = [ [[package]] name = "yellowstone-grpc-client" -version = "5.0.0" +version = "5.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02edc8a1f2049482c96c397452e27e46036322d7e179c0709b2d54aac13722bf" +checksum = "036a48ab3619ae9353e7b1797fbffc100c9b075196b32308ac7d3364d6a62460" dependencies = [ "bytes", "futures", @@ -5330,9 +5330,9 @@ dependencies = [ [[package]] name = "yellowstone-grpc-proto" -version = "5.0.0" +version = "5.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b5c9aed52d059ee34dab8118fe0f115a3a1e52b2e6cc01d099dfd92e1c73e7a" +checksum = "aa8694ec221758f065002e93c2aa93ccad56989e884185b3e3e483d031fb3d61" dependencies = [ "anyhow", "bincode", diff --git a/Cargo.toml b/Cargo.toml index 09fd3e5..9869bce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,36 +18,36 @@ publish = false [workspace.dependencies] -async-trait = "0.1.88" -clap = "4.5.7" -clap-verbosity-flag = "3.0.2" -futures = "0.3.31" -fxhash = "0.2.1" -http = "1.2.0" -http-body-util = "0.1" -hyper = "1.3.1" -hyper-util = "0.1" +async-trait = "~0.1.88" +clap = "^4.5.7" +clap-verbosity-flag = "^3.0.2" +futures = "~0.3.31" +fxhash = "~0.2.1" +http = "^1.2.0" +http-body-util = "~0.1" +hyper = "^1.3.1" +hyper-util = "~0.1.11" lazy_static = "~1.5.0" prometheus = "~0.13.0" prost = "~0.13.0" protobuf-src = "~1.1.0" -serde = "1.0.203" -serde_with = "3.8.1" -serde_yaml = "0.9.34" -solana-sdk = "~2.1.11" -tabled = "0.18.0" -thiserror = "1.0.61" -tokio = "^1" +serde = "~1.0.203" +serde_with = "^3.8.1" +serde_yaml = "~0.9.34" +solana-sdk = "~2.1.21" +tabled = "~0.18.0" +thiserror = "^1.0.61" +tokio = "1" tokio-stream = "~0.1.11" tonic = "~0.12.3" tonic-build = "~0.12.3" -tower = "0.5.2" -tracing = "0.1.41" -tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } -uuid = { version = "1.16.0" } +tower = "~0.5.2" +tracing = "~0.1.41" +tracing-subscriber = { version = "~0.3.18", features = ["env-filter"] } +uuid = { version = "1" } yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client" } -yellowstone-grpc-client = "5.0.0" -yellowstone-grpc-proto = "5.0.0" +yellowstone-grpc-client = "5" +yellowstone-grpc-proto = "5" [workspace.lints.clippy] clone_on_ref_ptr = "deny" From 03c6c0e19cff31445a0f405ea3044fba59829c90 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 16 May 2025 14:40:37 -0400 Subject: [PATCH 24/56] v2: added deny.toml + updated README --- Cargo.lock | 92 ++++--- Cargo.toml | 2 +- README.md | 237 ++---------------- apps/yellowstone-fumarole-cli/Cargo.toml | 2 +- crates/yellowstone-fumarole-client/Cargo.toml | 2 +- deny.toml | 30 +++ 6 files changed, 115 insertions(+), 250 deletions(-) create mode 100644 deny.toml diff --git a/Cargo.lock b/Cargo.lock index 180ce16..4ca6f06 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2483,9 +2483,9 @@ dependencies = [ [[package]] name = "prometheus" -version = "0.13.4" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" +checksum = "3ca5326d8d0b950a9acd87e6a3f94745394f62e4dae1b1ee22b2bc0c394af43a" dependencies = [ "cfg-if", "fnv", @@ -2493,7 +2493,7 @@ dependencies = [ "memchr", "parking_lot", "protobuf", - "thiserror", + "thiserror 2.0.12", ] [[package]] @@ -2550,9 +2550,14 @@ dependencies = [ [[package]] name = "protobuf" -version = "2.28.0" +version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" +checksum = "d65a1d4ddae7d8b5de68153b48f6aa3bba8cb002b243dbdbc55a5afbc98f99f4" +dependencies = [ + "once_cell", + "protobuf-support", + "thiserror 1.0.69", +] [[package]] name = "protobuf-src" @@ -2563,6 +2568,15 @@ dependencies = [ "autotools", ] +[[package]] +name = "protobuf-support" +version = "3.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e36c2f31e0a47f9280fb347ef5e461ffcd2c52dd520d8e216b52f93b0b0d7d6" +dependencies = [ + "thiserror 1.0.69", +] + [[package]] name = "qstring" version = "0.7.2" @@ -3182,7 +3196,7 @@ dependencies = [ "spl-token-2022", "spl-token-group-interface", "spl-token-metadata-interface", - "thiserror", + "thiserror 1.0.69", "zstd", ] @@ -3247,7 +3261,7 @@ dependencies = [ "ark-serialize", "bytemuck", "solana-program", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3321,7 +3335,7 @@ dependencies = [ "bytemuck_derive", "curve25519-dalek 4.1.3", "solana-program", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3472,7 +3486,7 @@ dependencies = [ "log", "reqwest", "solana-sdk", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3583,7 +3597,7 @@ dependencies = [ "solana-stable-layout", "solana-sysvar-id", "solana-transaction-error", - "thiserror", + "thiserror 1.0.69", "wasm-bindgen", ] @@ -3667,7 +3681,7 @@ dependencies = [ "solana-type-overrides", "solana-vote", "solana_rbpf", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3775,7 +3789,7 @@ dependencies = [ "solana-short-vec", "solana-signature", "solana-transaction-error", - "thiserror", + "thiserror 1.0.69", "wasm-bindgen", ] @@ -3800,7 +3814,7 @@ dependencies = [ "borsh 1.5.7", "libsecp256k1", "solana-define-syscall", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3969,7 +3983,7 @@ dependencies = [ "spl-token-2022", "spl-token-group-interface", "spl-token-metadata-interface", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3987,7 +4001,7 @@ dependencies = [ "solana-account-decoder-client-types", "solana-sdk", "solana-signature", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4011,7 +4025,7 @@ dependencies = [ "serde", "serde_derive", "solana-sdk", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4042,7 +4056,7 @@ dependencies = [ "solana-program", "solana-sdk", "subtle", - "thiserror", + "thiserror 1.0.69", "zeroize", ] @@ -4060,7 +4074,7 @@ dependencies = [ "rand 0.8.5", "rustc-demangle", "scroll", - "thiserror", + "thiserror 1.0.69", "winapi", ] @@ -4077,7 +4091,7 @@ dependencies = [ "solana-program", "spl-token", "spl-token-2022", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4112,7 +4126,7 @@ dependencies = [ "quote", "sha2 0.10.9", "syn 2.0.101", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4148,7 +4162,7 @@ dependencies = [ "num-traits", "solana-program", "spl-program-error-derive", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4189,7 +4203,7 @@ dependencies = [ "num-traits", "num_enum", "solana-program", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4213,7 +4227,7 @@ dependencies = [ "spl-token-metadata-interface", "spl-transfer-hook-interface", "spl-type-length-value", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4398,7 +4412,16 @@ version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl 2.0.12", ] [[package]] @@ -4412,6 +4435,17 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "thiserror-impl" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] + [[package]] name = "thread_local" version = "1.1.8" @@ -5248,7 +5282,7 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "yellowstone-fumarole-cli" -version = "0.1.0-pre.2+solana.2.1" +version = "0.1.0-2+solana.2.1" dependencies = [ "clap", "clap-verbosity-flag", @@ -5260,7 +5294,7 @@ dependencies = [ "serde_yaml", "solana-sdk", "tabled", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tonic", @@ -5273,7 +5307,7 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-client" -version = "0.2.0-pre.2+solana.2.1" +version = "0.2.0-2+solana.2.1" dependencies = [ "async-trait", "futures", @@ -5288,7 +5322,7 @@ dependencies = [ "serde_with", "serde_yaml", "solana-sdk", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tonic", @@ -5322,7 +5356,7 @@ checksum = "036a48ab3619ae9353e7b1797fbffc100c9b075196b32308ac7d3364d6a62460" dependencies = [ "bytes", "futures", - "thiserror", + "thiserror 1.0.69", "tonic", "tonic-health", "yellowstone-grpc-proto", diff --git a/Cargo.toml b/Cargo.toml index 9869bce..85da71f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ http-body-util = "~0.1" hyper = "^1.3.1" hyper-util = "~0.1.11" lazy_static = "~1.5.0" -prometheus = "~0.13.0" +prometheus = "~0.14.0" prost = "~0.13.0" protobuf-src = "~1.1.0" serde = "~1.0.203" diff --git a/README.md b/README.md index 324aadc..195592b 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,15 @@ -# yellowstone-fumarole +# Yellowstone-Fumarole Public repo for Yellowstone-Fumarole ## Fume CLI -See more details in fume [README](fume/README.md) + +```bash +cargo install yellowstone-fumarole-cli +``` + +See [yellowstone fumarole cli documentation](apps/yellowstone-fumarole-cli/README.md) ## Rust client @@ -13,233 +18,29 @@ We offer a simple fumarole Rust client crate in `crates/yellowstone-fumarole-cli An example can be found in `examples/rust/client.rs`. See rust example [README](examples/rust/README.md) for more details. +## Examples -## How Fumarole Works - -Fumarole collects and stores geyser events from multiple RPC nodes, creating a unified log where each geyser event is stored with a unique ever increasing offset. - -To enhance scalability for both reads and writes, Fumarole distributes data across multiple partitions. - -Every shard has its private sequence generator that assigned unique offset to -each geyser event. - -### Consumer groups - -Fumarole supports consumer groups, designed to scale reads similarly to Kafka consumer groups. However, Fumarole specifically implements static consumer groups. - -When creating a consumer group, you can define the number of parallel readers, with a maximum limit of six. In a group with six members, each member is assigned a subset of blockchain event partitions. Every member maintains its own offset tracker, which records its position in the Fumarole log. - -To ensure proper operation, all members must remain active. If any member becomes stale, the entire consumer group is considered stale. - -Consumer group are bound to a commitment level: you must decide if you want to listen on -`PROCESSED`, `CONFIRMED` or `FINALIZED` data. - -### Parallel streaming - -Consumer group with size greater than `1` allows you to streaming geyser event in paralle on two distinct TCP connection. - -Each TCP connections returns different geyser event. - -Each member of your consumer group gets exclusive access on a equal amount of partitions in Fumarole. - -**Note**: If you create a consumer group of size > `1` you need to make sure -to consume from every member, otherwise you will eventually make your consumer group stale. - -### Stale consumer group - -Fumarole has a retention policy of two (`2`) hours. - -If you create a consumer group and one of the member still points to stale offset, the whole consumer group will become stale. - -Once a consumer group is stale you cannot use it anymore and must delete it using [fume](fume). - -### Consumer group offset commitment - -Fumarole uses automatic offset commitment and stores your offset every `500ms`. - -Later version of Fumarole will support manual offset commit which better precision and -removes the risk of skipping data during transmission failure. - -### Creating a consumer group - -When you crate a consumer group you must provide the following information: - -1. The name of the group -2. The size of the group: maximum of `6` -3. What you want to listen to: `account`, `tx` or `both` -4. Initial offset: - - LATEST: start at the peek of the log - - EARLIEST: start at the beginning of the log - - SLOT: You provide a slot number where you want to start at. Fumarole will clip you closest to that slot. - -As of right now, every customer account is limited to 15 consumer groups. - -To create a consumer group, use [fume create-cg](fume) command: - -```sh -fume --config create-cg --help -Usage: fume create-cg [OPTIONS] - - Creates a consumer group - -Options: - --name TEXT Consumer group name to subscribe to, if none - provided a random name will be generated - following the pattern - 'fume-'. - --size INTEGER Size of the consumer group - --commitment [processed|confirmed|finalized] - Commitment level [default: confirmed] - --include [all|account|tx] Include option [default: all] - --seek [earliest|latest|slot] Seek option [default: latest] - --help Show this message and exit. -``` - -### Consumer group size recommandation - -You don't have to over provisionned your consumer group. -Bigger consumer group can be more complex to manager and higher risk of having staleness. - -Whatever you want to consume always start with a consumer group of size `1` -and increase as you need. - -The most important criteria to consume data is still your bandwidth capacity and network latency with our datacenters. - - -### Consumer groups limitations - -- Maximum group size : 6 -- Number of consumer groups per customer account: 15 -- Event you can subscribe too: account updates and transactions -- Consumer group can not change commitment level once created. -- If one member of the consumer group become stale, the entire consumer group become stale. -- Stale consumer group cannot be recuperate -- Time before stale : TBD -- One TCP connection per member -- Because of partitionning, streaming geyser event are not sorted by slot. -- Fumarole deliver at-least one semantics +See [examples/rust folder](examples/rust). ## Dragonsmouth vs Fumarole -|| gRPC | Persisted | Redundant | +Here's a comparison table between the Two + +|| gRPC | Persisted | Stateful | |-------|------|-----------|-----------| | Fumarole | ✅ | ✅ | ✅ | | Dragonsmouth | ✅ | ❌ | ❌ | -**Persisted** : If you drop your connection with Fumarole and reconnect within a reasonnable amount of -time, you won't loose any data. You restart right where you left off. +## Target audience -**Redundant** : Fumarole backend is fed by multiple RPC Nodes and data is stored across multiple servers -allowing redundancy and better read/write scalability. +Wallet Apps, dApps, indexer. -**gRPC**: Fumarole subscribe stream outs _Dragonsmouth_ `SubscribeUpdate` object so the learning curve -for fumarole stays low and can integrate easily into your code without too much changes. +Fumarole puts more emphasis on reliability and availability. +Compare to [yellowstone-grpc](https://github.com/rpcpool/yellowstone-grpc), slot latency will be higher. -**Note**: You don't have to do anything to benefits from redundancy and persistence. It is done -in the backend for you. - -### Filtering compatibility - -Fumarole supports the exact same accounts and transactions filter as _Dragonsmouth_. - -Here's a reminder of _Dragonsmouth_ gRPC `SubscribeRequest`: - -```proto -message SubscribeRequest { - map accounts = 1; - map slots = 2; - map transactions = 3; - map transactions_status = 10; - map blocks = 4; - map blocks_meta = 5; - map entry = 8; - optional CommitmentLevel commitment = 6; - repeated SubscribeRequestAccountsDataSlice accounts_data_slice = 7; - optional SubscribeRequestPing ping = 9; - optional uint64 from_slot = 11; -} -``` - -Here's Fumarole `SubscribeRequest` - -```proto -message SubscribeRequest { - string consumer_group_label = 1; // name of the consumer group - optional uint32 consumer_id = 2; // #num consumer group member, 0 by default - map accounts = 3; // Same as Dragonsmouth - map transactions = 4; // Same as Dragonsmouth -} -``` - -### Coding examples - -To see the difference between Dragonsmouth and fumarole compare two files [dragonsmouth.rs](examples/rust/src/bin/dragonsmouth.rs) and -[client.rs](examples/rust/src/bin/client.rs). - -More precisely the only difference between the two is how you subscribe. - - -Here is Dragonsmouth: - -```rust -let endpoint = config.endpoint.clone(); - -let mut geyser = GeyserGrpcBuilder::from_shared(endpoint) - .expect("Failed to parse endpoint") - .x_token(config.x_token) - .expect("x_token") - .tls_config(ClientTlsConfig::new().with_native_roots()) - .expect("tls_config") - .connect() - .await - .expect("Failed to connect to geyser"); - -// This request listen for all account updates and transaction updates -let request = SubscribeRequest { - accounts: HashMap::from( - [("f1".to_owned(), SubscribeRequestFilterAccounts::default())] - ), - transactions: HashMap::from( - [("f1".to_owned(), SubscribeRequestFilterTransactions::default())] - ), - ..Default::default() -}; -let (_sink, mut rx) = geyser.subscribe_with_request(Some(request)).await.expect("Failed to subscribe"); -``` - - -And here's the more concise Fumarole version: - - -```rust -let requests = yellowstone_fumarole_client::SubscribeRequestBuilder::default() - .build(args.cg_name); - -let fumarole = FumaroleClientBuilder::default().connect(config); -let rx = fumarole - .subscribe_with_request(request) - .await - .expect("Failed to subscribe to Fumarole service"); -``` +We are aiming at three (3) slot behind on avg for Fumarole as its main purpose is to provide more reliable and forgiving geyser data source. -If you want better control of your `SubscribeRequest` build process, you can fallback to the its _de-sugar_ form, without the builder pattern: - -```rust -let request = yellowstone_fumarole_client::proto::SubscribeRequest { - consumer_group_label: "my_group".to_string(), - accounts: HashMap::from( - [("f1".to_owned(), SubscribeRequestFilterAccounts::default())] - ), - transactions: HashMap::from( - [("f1".to_owned(), SubscribeRequestFilterTransactions::default())] - ), -} - -let fumarole = FumaroleClientBuilder::default().connect(config); -let rx = fumarole - .subscribe_with_request(request) - .await - .expect("Failed to subscribe to Fumarole service"); -``` \ No newline at end of file +**NOTE**: slot latency exclude client side latency to download and process the whole slot. +slot latency refers to the difference between the chain-tip and what Fumarole has register internally so-far. \ No newline at end of file diff --git a/apps/yellowstone-fumarole-cli/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml index 68e6f34..64e96db 100644 --- a/apps/yellowstone-fumarole-cli/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-cli" description = "Yellowstone Fumarole CLI" -version = "0.1.0-pre.2+solana.2.1" +version = "0.1.0-2+solana.2.1" authors.workspace = true edition.workspace = true homepage.workspace = true diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index 55c5c09..b733bbe 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-client" description = "Yellowstone Fumarole Client" -version = "0.2.0-pre.2+solana.2.1" +version = "0.2.0-2+solana.2.1" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } diff --git a/deny.toml b/deny.toml new file mode 100644 index 0000000..fbd4a4b --- /dev/null +++ b/deny.toml @@ -0,0 +1,30 @@ +[graph] +all-features = true + +[advisories] +ignore = [ + # Advisory: https://rustsec.org/advisories/RUSTSEC-2022-0093 + # Versions of `ed25519-dalek` prior to v2.0 model private and public keys as + # separate types which can be assembled into a `Keypair`, and also provide APIs + # for serializing and deserializing 64-byte private/public keypairs. + # + # Such APIs and serializations are inherently unsafe as the public key is one of + # the inputs used in the deterministic computation of the `S` part of the signature, + # but not in the `R` value. An adversary could somehow use the signing function as + # an oracle that allows arbitrary public keys as input can obtain two signatures + # for the same message sharing the same `R` and only differ on the `S` part. + # + # Unfortunately, when this happens, one can easily extract the private key. + "RUSTSEC-2022-0093", + + # Timing variability of any kind is problematic when working with potentially secret values such as + # elliptic curve scalars, and such issues can potentially leak private keys and other secrets. Such a + # problem was recently discovered in `curve25519-dalek`. + "RUSTSEC-2024-0344", + + # derivative 2.2.0 registry+https://github.com/rust-lang/crates.io-index + "RUSTSEC-2024-0388", + + # paste 1.0.15 + "RUSTSEC-2024-0436", +] From 8da954eb138de7ec1b27e472534634458fbf463e Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 16 May 2025 14:43:04 -0400 Subject: [PATCH 25/56] v2: pre-2 cargo versionning --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- apps/yellowstone-fumarole-cli/Cargo.toml | 2 +- crates/yellowstone-fumarole-client/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4ca6f06..a32256e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5282,7 +5282,7 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "yellowstone-fumarole-cli" -version = "0.1.0-2+solana.2.1" +version = "0.1.0-pre.2+solana.2.1" dependencies = [ "clap", "clap-verbosity-flag", @@ -5307,7 +5307,7 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-client" -version = "0.2.0-2+solana.2.1" +version = "0.2.0-pre.2+solana.2.1" dependencies = [ "async-trait", "futures", diff --git a/Cargo.toml b/Cargo.toml index 85da71f..4c5f068 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ tower = "~0.5.2" tracing = "~0.1.41" tracing-subscriber = { version = "~0.3.18", features = ["env-filter"] } uuid = { version = "1" } -yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client" } +yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client", version = "0.2.0-pre.2+solana.2.1" } yellowstone-grpc-client = "5" yellowstone-grpc-proto = "5" diff --git a/apps/yellowstone-fumarole-cli/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml index 64e96db..68e6f34 100644 --- a/apps/yellowstone-fumarole-cli/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-cli" description = "Yellowstone Fumarole CLI" -version = "0.1.0-2+solana.2.1" +version = "0.1.0-pre.2+solana.2.1" authors.workspace = true edition.workspace = true homepage.workspace = true diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index b733bbe..55c5c09 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-client" description = "Yellowstone Fumarole Client" -version = "0.2.0-2+solana.2.1" +version = "0.2.0-pre.2+solana.2.1" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } From ebfa98a81a24d7c1829b08bc5a80d174ea9f8071 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 16 May 2025 15:10:16 -0400 Subject: [PATCH 26/56] better doc in yellowstone-fumarole-client --- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 11 +- crates/yellowstone-fumarole-client/src/lib.rs | 150 +++++++++++++++--- .../src/runtime/mod.rs | 2 + .../src/runtime/tokio.rs | 3 + examples/rust/Cargo.toml | 2 +- .../bin/{client.rs => example-fumarole.rs} | 64 +++++--- 6 files changed, 175 insertions(+), 57 deletions(-) rename examples/rust/src/bin/{client.rs => example-fumarole.rs} (61%) diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index 958611e..67a742c 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -646,8 +646,9 @@ async fn main() { .init(); let maybe_config = args.config; - let config = if let Some(config_path) = maybe_config { - std::fs::read_to_string(&config_path).expect("Failed to read config file") + let config_file = if let Some(config_path) = maybe_config { + std::fs::File::open(config_path.clone()) + .unwrap_or_else(|_| panic!("Failed to read config file at {config_path:?}")) } else { let mut default_config_path = home_dir().expect("Failed to get home directory"); default_config_path.push(".fumarole"); @@ -656,11 +657,11 @@ async fn main() { let config_path = std::env::var(FUMAROLE_CONFIG_ENV) .map(PathBuf::from) .unwrap_or(default_config_path); - std::fs::read_to_string(&config_path) + std::fs::File::open(config_path.clone()) .unwrap_or_else(|_| panic!("Failed to read config file at {config_path:?}")) }; - let config = serde_yaml::from_str::(config.as_str()) - .expect("failed to parse fumarole config"); + let config: FumaroleConfig = + serde_yaml::from_reader(config_file).expect("failed to parse fumarole config"); let fumarole_client = FumaroleClient::connect(config.clone()) .await diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index b11d274..9b29a73 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -19,7 +19,9 @@ //! //! ## Create a `FumaroleClient` //! -//! ```rust +//! To create a `FumaroleClient`, you need to provide a configuration object. +//! +//! ```ignore //! use yellowstone_fumarole_client::FumaroleClient; //! use yellowstone_fumarole_client::config::FumaroleConfig; //! @@ -37,7 +39,12 @@ //! } //! ``` //! -//! **NOTE**: The struct `FumaroleConfig` supports deserialization from a YAML file. +//! The prefered way to create `FumaroleConfig` is use `serde_yaml` to deserialize from a YAML file. +//! +//! ```ignore +//! let config_file = std::fs::File::open("path/to/config.yaml").unwrap(); +//! let config: FumaroleConfig = serde_yaml::from_reader(config_file).unwrap(); +//! ``` //! //! Here's an example of a YAML file: //! @@ -45,47 +52,138 @@ //! endpoint: https://example.com //! x-token: 00000000-0000-0000-0000-000000000000 //! ``` +//! +//! //! ## Dragonsmouth-like Subscribe //! //! ```rust //! use { +//! clap::Parser, +//! solana_sdk::{bs58, pubkey::Pubkey}, +//! std::{collections::HashMap, path::PathBuf}, //! yellowstone_fumarole_client::{ -//! FumaroleClient, config::FumaroleConfig, DragonsmouthAdapterSession +//! config::FumaroleConfig, DragonsmouthAdapterSession, FumaroleClient, //! }, //! yellowstone_grpc_proto::geyser::{ -//! SubscribeRequest, SubscribeRequestFilterAccounts, -//! SubscribeRequestFilterTransactions, -//! }, +//! subscribe_update::UpdateOneof, SubscribeRequest, +//! SubscribeRequestFilterTransactions, SubscribeUpdateAccount, SubscribeUpdateTransaction, +//! }, //! }; //! -//! #[tokio::main] -//! async fn main() { -//! let config = FumaroleConfig { -//! endpoint: "https://example.com".to_string(), -//! x_token: Some("00000000-0000-0000-0000-000000000000".to_string()), -//! max_decoding_message_size_bytes: FumaroleConfig::default_max_decoding_message_size_bytes(), -//! x_metadata: Default::default(), -//! }; +//! #[derive(Debug, Clone, Parser)] +//! #[clap(author, version, about = "Yellowstone gRPC ScyllaDB Tool")] +//! struct Args { +//! /// Path to static config file +//! #[clap(long)] +//! config: PathBuf, //! -//! let mut client = FumaroleClient::connect(config).await.unwrap(); +//! #[clap(subcommand)] +//! action: Action, +//! } //! +//! #[derive(Debug, Clone, Parser)] +//! enum Action { +//! /// Subscribe to fumarole events +//! Subscribe(SubscribeArgs), +//! } +//! +//! #[derive(Debug, Clone, Parser)] +//! struct SubscribeArgs { +//! /// Name of the persistent subscriber to use +//! #[clap(long)] +//! name: String, +//! } +//! +//! fn summarize_account(account: SubscribeUpdateAccount) -> Option { +//! let slot = account.slot; +//! let account = account.account?; +//! let pubkey = Pubkey::try_from(account.pubkey).expect("Failed to parse pubkey"); +//! let owner = Pubkey::try_from(account.owner).expect("Failed to parse owner"); +//! Some(format!("account,{},{},{}", slot, pubkey, owner)) +//! } +//! +//! fn summarize_tx(tx: SubscribeUpdateTransaction) -> Option { +//! let slot = tx.slot; +//! let tx = tx.transaction?; +//! let sig = bs58::encode(tx.signature).into_string(); +//! Some(format!("tx,{slot},{sig}")) +//! } +//! +//! async fn subscribe(args: SubscribeArgs, config: FumaroleConfig) { +//! // This request listen for all account updates and transaction updates //! let request = SubscribeRequest { -//! accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), -//! transactions: HashMap::from([("f1".to_owned(), SubscribeRequestFilterTransactions::default())]), +//! transactions: HashMap::from([( +//! "f1".to_owned(), +//! SubscribeRequestFilterTransactions::default(), +//! )]), //! ..Default::default() //! }; //! -//! let dragonsmouth_adapter = client.dragonsmouth_subscribe("my-consumer-group", request).await.unwrap(); +//! let mut fumarole_client = FumaroleClient::connect(config) +//! .await +//! .expect("Failed to connect to fumarole"); +//! +//! let dragonsmouth_session = fumarole_client +//! .dragonsmouth_subscribe(args.name, request) +//! .await +//! .expect("Failed to subscribe"); //! //! let DragonsmouthAdapterSession { -//! sink: _, // Channel to update [`SubscribeRequest`] requests to the fumarole service -//! mut source, // Channel to receive updates from the fumarole service -//! runtime_handle: _, // Handle to the fumarole session client runtime -//! } = dragonsmouth_adapter; -//! -//! while let Some(result) = source.recv().await { -//! let event = result.expect("Failed to receive event"); -//! println!("{:?}", event); +//! sink: _, +//! mut source, +//! mut fumarole_handle, +//! } = dragonsmouth_session; +//! +//! loop { +//! +//! tokio::select! { +//! result = &mut fumarole_handle => { +//! eprintln!("Fumarole handle closed: {:?}", result); +//! break; +//! } +//! maybe = source.recv() => { +//! match maybe { +//! None => { +//! eprintln!("Source closed"); +//! break; +//! } +//! Some(result) => { +//! let event = result.expect("Failed to receive event"); +//! let message = if let Some(oneof) = event.update_oneof { +//! match oneof { +//! UpdateOneof::Account(account_update) => { +//! summarize_account(account_update) +//! } +//! UpdateOneof::Transaction(tx) => { +//! summarize_tx(tx) +//! } +//! _ => None, +//! } +//! } else { +//! None +//! }; +//! +//! if let Some(message) = message { +//! println!("{}", message); +//! } +//! } +//! } +//! } +//! } +//! } +//! } +//! +//! #[tokio::main] +//! async fn main() { +//! let args: Args = Args::parse(); +//! let config = std::fs::read_to_string(&args.config).expect("Failed to read config file"); +//! let config: FumaroleConfig = +//! serde_yaml::from_str(&config).expect("Failed to parse config file"); +//! +//! match args.action { +//! Action::Subscribe(sub_args) => { +//! subscribe(sub_args, config).await; +//! } //! } //! } //! ``` diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index e8cc88f..4487980 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -430,6 +430,7 @@ impl FumaroleSM { } } + #[allow(dead_code)] pub fn slot_status_update_queue_len(&self) -> usize { self.slot_status_update_queue.len() } @@ -460,6 +461,7 @@ impl FumaroleSM { } } + #[allow(dead_code)] pub fn processed_offset_queue_len(&self) -> usize { self.processed_offset.len() } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 3acdaf3..eb53235 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -128,6 +128,7 @@ enum LoopInstruction { } impl TokioFumeDragonsmouthRuntime { + #[allow(dead_code)] const RUNTIME_NAME: &'static str = "tokio"; fn handle_control_response(&mut self, control_response: proto::ControlResponse) { @@ -533,6 +534,7 @@ pub(crate) struct DataPlaneConn { } impl GrpcDownloadTaskRunner { + #[allow(dead_code)] const RUNTIME_NAME: &'static str = "tokio_grpc_task_runner"; #[allow(clippy::too_many_arguments)] @@ -834,6 +836,7 @@ pub(crate) struct CompletedDownloadBlockTask { } impl GrpcDownloadBlockTaskRun { + #[allow(dead_code)] const RUNTIME_NAME: &'static str = "tokio_grpc_task_run"; async fn run(mut self) -> Result { diff --git a/examples/rust/Cargo.toml b/examples/rust/Cargo.toml index 32072e0..33f1983 100644 --- a/examples/rust/Cargo.toml +++ b/examples/rust/Cargo.toml @@ -11,7 +11,7 @@ keywords = { workspace = true } publish = { workspace = true } [[bin]] -name = "client" +name = "example-fumarole" [[bin]] name = "dragonsmouth" diff --git a/examples/rust/src/bin/client.rs b/examples/rust/src/bin/example-fumarole.rs similarity index 61% rename from examples/rust/src/bin/client.rs rename to examples/rust/src/bin/example-fumarole.rs index 5db8ec6..6fe6c5c 100644 --- a/examples/rust/src/bin/client.rs +++ b/examples/rust/src/bin/example-fumarole.rs @@ -6,8 +6,8 @@ use { config::FumaroleConfig, DragonsmouthAdapterSession, FumaroleClient, }, yellowstone_grpc_proto::geyser::{ - subscribe_update::UpdateOneof, SubscribeRequest, SubscribeRequestFilterAccounts, - SubscribeRequestFilterTransactions, SubscribeUpdateAccount, SubscribeUpdateTransaction, + subscribe_update::UpdateOneof, SubscribeRequest, SubscribeRequestFilterTransactions, + SubscribeUpdateAccount, SubscribeUpdateTransaction, }, }; @@ -30,13 +30,9 @@ enum Action { #[derive(Debug, Clone, Parser)] struct SubscribeArgs { - /// Name of the consumer group to subscribe to + /// Name of the persistent subscriber to use #[clap(long)] - cg_name: String, - - /// Number of parallel streams to open: must be lower or equal to the size of your consumer group, otherwise the program will return an error - #[clap(long)] - par: Option, + name: String, } fn summarize_account(account: SubscribeUpdateAccount) -> Option { @@ -57,7 +53,6 @@ fn summarize_tx(tx: SubscribeUpdateTransaction) -> Option { async fn subscribe(args: SubscribeArgs, config: FumaroleConfig) { // This request listen for all account updates and transaction updates let request = SubscribeRequest { - accounts: HashMap::from([("f1".to_owned(), SubscribeRequestFilterAccounts::default())]), transactions: HashMap::from([( "f1".to_owned(), SubscribeRequestFilterTransactions::default(), @@ -70,31 +65,50 @@ async fn subscribe(args: SubscribeArgs, config: FumaroleConfig) { .expect("Failed to connect to fumarole"); let dragonsmouth_session = fumarole_client - .dragonsmouth_subscribe(args.cg_name, request) + .dragonsmouth_subscribe(args.name, request) .await .expect("Failed to subscribe"); let DragonsmouthAdapterSession { sink: _, mut source, - fumarole_handle: _, + mut fumarole_handle, } = dragonsmouth_session; - while let Some(result) = source.recv().await { - let event = result.expect("Failed to receive event"); - - let message = if let Some(oneof) = event.update_oneof { - match oneof { - UpdateOneof::Account(account_update) => summarize_account(account_update), - UpdateOneof::Transaction(tx) => summarize_tx(tx), - _ => None, + loop { + tokio::select! { + result = &mut fumarole_handle => { + eprintln!("Fumarole handle closed: {:?}", result); + break; + } + maybe = source.recv() => { + match maybe { + None => { + eprintln!("Source closed"); + break; + } + Some(result) => { + let event = result.expect("Failed to receive event"); + let message = if let Some(oneof) = event.update_oneof { + match oneof { + UpdateOneof::Account(account_update) => { + summarize_account(account_update) + } + UpdateOneof::Transaction(tx) => { + summarize_tx(tx) + } + _ => None, + } + } else { + None + }; + + if let Some(message) = message { + println!("{}", message); + } + } + } } - } else { - None - }; - - if let Some(message) = message { - println!("{}", message); } } } From 3939678f84327eadebbdeda65db6a61d78a9068c Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 16 May 2025 15:14:34 -0400 Subject: [PATCH 27/56] v2: README --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index 195592b..c5a7877 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,16 @@ We offer a simple fumarole Rust client crate in `crates/yellowstone-fumarole-cli An example can be found in `examples/rust/client.rs`. See rust example [README](examples/rust/README.md) for more details. +## Must know: Staleness + + +Unlike _Dragonsmouth_, you can consume data at your own pace, since Fumarole uses a polling approach at its core. + +However, it is important you don't over commit to the amount of data you want to subcribe to. + +Fumarole does not store data forever and if you are too slow to make progress in your subscriber session, your +TCP connection will be interrupt because of staleness. + ## Examples See [examples/rust folder](examples/rust). From f25da572ffdac0167af5fb4b1c6ab7e706f1b23d Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Wed, 21 May 2025 10:29:30 -0400 Subject: [PATCH 28/56] first draft of python sdk --- .../src/runtime/mod.rs | 2 +- python/yellowstone-fumarole-client/README.md | 0 .../yellowstone-fumarole-client/poetry.lock | 450 ++++++++++++++++ .../pyproject.toml | 33 ++ .../scripts/build.sh | 50 ++ .../tests/__init__.py | 0 .../tests/test_fumarole_sm.py | 122 +++++ .../yellowstone_api/__init__.py | 0 .../yellowstone_api/fumarole_v2_pb2.py | 122 +++++ .../yellowstone_api/fumarole_v2_pb2.pyi | 328 ++++++++++++ .../yellowstone_api/fumarole_v2_pb2_grpc.py | 400 ++++++++++++++ .../yellowstone_api/geyser_pb2.py | 144 +++++ .../yellowstone_api/geyser_pb2.pyi | 501 ++++++++++++++++++ .../yellowstone_api/geyser_pb2_grpc.py | 355 +++++++++++++ .../yellowstone_api/solana_storage_pb2.py | 75 +++ .../yellowstone_api/solana_storage_pb2.pyi | 238 +++++++++ .../solana_storage_pb2_grpc.py | 24 + .../yellowstone_fumarole_client/__init__.py | 1 + .../runtime/__init__.py | 2 + .../runtime/state_machine.py | 321 +++++++++++ 20 files changed, 3167 insertions(+), 1 deletion(-) create mode 100644 python/yellowstone-fumarole-client/README.md create mode 100644 python/yellowstone-fumarole-client/poetry.lock create mode 100644 python/yellowstone-fumarole-client/pyproject.toml create mode 100755 python/yellowstone-fumarole-client/scripts/build.sh create mode 100644 python/yellowstone-fumarole-client/tests/__init__.py create mode 100644 python/yellowstone-fumarole-client/tests/test_fumarole_sm.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/__init__.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.pyi create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2_grpc.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.pyi create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2_grpc.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.pyi create mode 100644 python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2_grpc.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index 4487980..ac60aab 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -168,7 +168,7 @@ pub(crate) struct FumaroleSM { pub max_slot_detected: Slot, /// Unprocessed blockchain events - unprocessed_blockchain_event: VecDeque<(u64, proto::BlockchainEvent)>, + unprocessed_blockchain_event: VecDeque<(FumeSessionSequence, proto::BlockchainEvent)>, sequence: u64, diff --git a/python/yellowstone-fumarole-client/README.md b/python/yellowstone-fumarole-client/README.md new file mode 100644 index 0000000..e69de29 diff --git a/python/yellowstone-fumarole-client/poetry.lock b/python/yellowstone-fumarole-client/poetry.lock new file mode 100644 index 0000000..32ed653 --- /dev/null +++ b/python/yellowstone-fumarole-client/poetry.lock @@ -0,0 +1,450 @@ +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. + +[[package]] +name = "base58" +version = "2.1.1" +description = "Base58 and Base58Check implementation." +optional = false +python-versions = ">=3.5" +files = [ + {file = "base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2"}, + {file = "base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c"}, +] + +[package.extras] +tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", "pytest-cov", "pytest-flake8"] + +[[package]] +name = "black" +version = "24.10.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +files = [ + {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, + {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, + {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, + {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, + {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, + {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, + {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, + {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, + {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, + {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, + {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, + {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, + {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, + {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, + {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, + {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, + {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, + {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, + {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, + {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, + {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, + {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "click" +version = "8.2.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +files = [ + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "grpcio" +version = "1.71.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.9" +files = [ + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.71.0)"] + +[[package]] +name = "grpcio-tools" +version = "1.71.0" +description = "Protobuf code generator for gRPC" +optional = false +python-versions = ">=3.9" +files = [ + {file = "grpcio_tools-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:f4ad7f0d756546902597053d70b3af2606fbd70d7972876cd75c1e241d22ae00"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:64bdb291df61cf570b5256777ad5fe2b1db6d67bc46e55dc56a0a862722ae329"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:8dd9795e982d77a4b496f7278b943c2563d9afde2069cdee78c111a40cc4d675"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1b5860c41a36b26fec4f52998f1a451d0525a5c9a4fb06b6ea3e9211abdb925"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3059c14035e5dc03d462f261e5900b9a077fd1a36976c3865b8507474520bad4"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f360981b215b1d5aff9235b37e7e1826246e35bbac32a53e41d4e990a37b8f4c"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bfe3888c3bbe16a5aa39409bc38744a31c0c3d2daa2b0095978c56e106c85b42"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:145985c0bf12131f0a1503e65763e0f060473f7f3928ed1ff3fb0e8aad5bc8ac"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-win32.whl", hash = "sha256:82c430edd939bb863550ee0fecf067d78feff828908a1b529bbe33cc57f2419c"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:83e90724e3f02415c628e4ead1d6ffe063820aaaa078d9a39176793df958cd5a"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:1f19b16b49afa5d21473f49c0966dd430c88d089cd52ac02404d8cef67134efb"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:459c8f5e00e390aecd5b89de67deb3ec7188a274bc6cb50e43cef35ab3a3f45d"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:edab7e6518de01196be37f96cb1e138c3819986bf5e2a6c9e1519b4d716b2f5a"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b93b9f6adc7491d4c10144c0643409db298e5e63c997106a804f6f0248dbaf4"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ae5f2efa9e644c10bf1021600bfc099dfbd8e02b184d2d25dc31fcd6c2bc59e"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:65aa082f4435571d65d5ce07fc444f23c3eff4f3e34abef599ef8c9e1f6f360f"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1331e726e08b7bdcbf2075fcf4b47dff07842b04845e6e220a08a4663e232d7f"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6693a7d3ba138b0e693b3d1f687cdd9db9e68976c3fa2b951c17a072fea8b583"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-win32.whl", hash = "sha256:6d11ed3ff7b6023b5c72a8654975324bb98c1092426ba5b481af406ff559df00"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:072b2a5805ac97e4623b3aa8f7818275f3fb087f4aa131b0fce00471065f6eaa"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:61c0409d5bdac57a7bd0ce0ab01c1c916728fe4c8a03d77a25135ad481eb505c"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:28784f39921d061d2164a9dcda5164a69d07bf29f91f0ea50b505958292312c9"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:192808cf553cedca73f0479cc61d5684ad61f24db7a5f3c4dfe1500342425866"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989ee9da61098230d3d4c8f8f8e27c2de796f1ff21b1c90110e636d9acd9432b"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:541a756276c8a55dec991f6c0106ae20c8c8f5ce8d0bdbfcb01e2338d1a8192b"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:870c0097700d13c403e5517cb7750ab5b4a791ce3e71791c411a38c5468b64bd"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:abd57f615e88bf93c3c6fd31f923106e3beb12f8cd2df95b0d256fa07a7a0a57"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:753270e2d06d37e6d7af8967d1d059ec635ad215882041a36294f4e2fd502b2e"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-win32.whl", hash = "sha256:0e647794bd7138b8c215e86277a9711a95cf6a03ff6f9e555d54fdf7378b9f9d"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:48debc879570972d28bfe98e4970eff25bb26da3f383e0e49829b2d2cd35ad87"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:9a78d07d6c301a25ef5ede962920a522556a1dfee1ccc05795994ceb867f766c"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:580ac88141c9815557e63c9c04f5b1cdb19b4db8d0cb792b573354bde1ee8b12"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f7c678e68ece0ae908ecae1c4314a0c2c7f83e26e281738b9609860cc2c82d96"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56ecd6cc89b5e5eed1de5eb9cafce86c9c9043ee3840888cc464d16200290b53"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a041afc20ab2431d756b6295d727bd7adee813b21b06a3483f4a7a15ea15f"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2a1712f12102b60c8d92779b89d0504e0d6f3a59f2b933e5622b8583f5c02992"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:41878cb7a75477e62fdd45e7e9155b3af1b7a5332844021e2511deaf99ac9e6c"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:682e958b476049ccc14c71bedf3f979bced01f6e0c04852efc5887841a32ad6b"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-win32.whl", hash = "sha256:0ccfb837152b7b858b9f26bb110b3ae8c46675d56130f6c2f03605c4f129be13"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:ffff9bc5eacb34dd26b487194f7d44a3e64e752fc2cf049d798021bf25053b87"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:834959b6eceb85de5217a411aba1643b5f782798680c122202d6a06177226644"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:e3ae9556e2a1cd70e7d7b0e0459c35af71d51a7dae4cf36075068011a69f13ec"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:77fe6db1334e0ce318b2cb4e70afa94e0c173ed1a533d37aea69ad9f61ae8ea9"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57e3e2544c306b60ef2d76570bac4e977be1ad548641c9eec130c3bc47e80141"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af39e245fa56f7f5c2fe86b7d6c1b78f395c07e54d5613cbdbb3c24769a92b6e"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f987d0053351217954543b174b0bddbf51d45b3cfcf8d6de97b0a43d264d753"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8e6cdbba4dae7b37b0d25d074614be9936fb720144420f03d9f142a80be69ba2"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3adc8b229e60c77bab5a5d62b415667133bd5ced7d59b5f71d6317c9143631e"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f68334d28a267fabec6e70cb5986e9999cfbfd14db654094ddf9aedd804a293a"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:1291a6136c07a86c3bb09f6c33f5cf227cc14956edd1b85cb572327a36e0aef8"}, + {file = "grpcio_tools-1.71.0.tar.gz", hash = "sha256:38dba8e0d5e0fb23a034e09644fdc6ed862be2371887eee54901999e8f6792a8"}, +] + +[package.dependencies] +grpcio = ">=1.71.0" +protobuf = ">=5.26.1,<6.0dev" +setuptools = "*" + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +files = [ + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "protobuf" +version = "5.29.4" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, +] + +[[package]] +name = "pytest" +version = "8.3.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "setuptools" +version = "80.8.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +files = [ + {file = "setuptools-80.8.0-py3-none-any.whl", hash = "sha256:95a60484590d24103af13b686121328cc2736bee85de8936383111e421b9edc0"}, + {file = "setuptools-80.8.0.tar.gz", hash = "sha256:49f7af965996f26d43c8ae34539c8d99c5042fbff34302ea151eaa9c207cd257"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "fc1e192401cbe5da4fe12291c42d1ec2de9dba6c6426b3bf6f8350655904dba4" diff --git a/python/yellowstone-fumarole-client/pyproject.toml b/python/yellowstone-fumarole-client/pyproject.toml new file mode 100644 index 0000000..74c2c3d --- /dev/null +++ b/python/yellowstone-fumarole-client/pyproject.toml @@ -0,0 +1,33 @@ +[tool.poetry] +name = "yellowstone-fumarole-client" +version = "0.1.0" +homepage = "https://github.com/rpcpool/yellowstone-fumarole" +repository = "https://github.com/rpcpool/yellowstone-fumarole" +description = "Yellowstone Fumarole Python Client" +authors = ["Louis-Vincent ", "Triton One "] +readme = "README.md" + +packages = [ + { include = "yellowstone_api" }, + { include = "yellowstone_fumarole_client" }, +] + +[tool.poetry.dependencies] +python = "^3.10" +click = "^8.1.7" +grpcio = "^1.68.1" +protobuf = "^5.29.1" +toml = "^0.10.2" +base58 = "^2.1.1" +tabulate = "^0.9.0" + +[tool.poetry.group.test.dependencies] +pytest = "^8.3.4" + +[tool.poetry.group.dev.dependencies] +grpcio-tools = "^1.68.1" +black = "^24.10.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/python/yellowstone-fumarole-client/scripts/build.sh b/python/yellowstone-fumarole-client/scripts/build.sh new file mode 100755 index 0000000..64b74dd --- /dev/null +++ b/python/yellowstone-fumarole-client/scripts/build.sh @@ -0,0 +1,50 @@ +#!/bin/bash +set -e + +script_dir=$(dirname "$(realpath "$BASH_SOURCE")") + +# Go to parent directory as long as it is the not repo root +repo_dir="$script_dir" + +while [ "$repo_dir" != "/" ] && [ ! -f "$repo_dir/Cargo.toml" ]; do + repo_dir=$(dirname "$repo_dir") +done + +if [ -f "$repo_dir/Cargo.toml" ]; then + cd "$current_dir" +else + echo "Cargo.toml not found in any parent directory." + exit 1 +fi + + +package_dir="$(dirname "$script_dir")" +echo "fume_dir: $package_dir" +echo "repo_dir: $repo_dir" +proto_path="$repo_dir/proto" +proto_path2="$repo_dir/yellowstone-grpc/yellowstone-grpc-proto/proto" +out_dir="$package_dir/yellowstone_api" +module_name="yellowstone_api" +rm -fr $out_dir/* +mkdir -p $out_dir + +/bin/env python -m grpc_tools.protoc \ + -I$proto_path \ + -I$proto_path2 \ + --python_out=$out_dir \ + --pyi_out=$out_dir \ + --grpc_python_out=$out_dir \ + $proto_path/*.proto $proto_path2/*.proto + +pushd $out_dir +for file in *.py*; do + name="${file%.*}" + sed -i "s/^import \(.*\)_pb2 as \(.*\)/import $module_name.\1_pb2 as \2/g" $file + sed -i "s/^import \(.*\)_pb2_grpc as \(.*\)/import $module_name.\1_pb2 as \2/g" $file + sed -i "s/^from \(.*\)_pb2_grpc import \(.*\)/from $module_name.\1_pb2 import \2/g" $file + sed -i "s/^from \(.*\)_pb2 import \(.*\)/from $module_name.\1_pb2 import \2/g" $file +done + +touch '__init__.py' + +popd \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/tests/__init__.py b/python/yellowstone-fumarole-client/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/yellowstone-fumarole-client/tests/test_fumarole_sm.py b/python/yellowstone-fumarole-client/tests/test_fumarole_sm.py new file mode 100644 index 0000000..d4e50d9 --- /dev/null +++ b/python/yellowstone-fumarole-client/tests/test_fumarole_sm.py @@ -0,0 +1,122 @@ +import uuid +from yellowstone_fumarole_client.runtime.state_machine import ( + DEFAULT_SLOT_MEMORY_RETENTION, + FumaroleSM, + FumeOffset, + Slot, + SlotDownloadState, +) + +from yellowstone_api.fumarole_v2_pb2 import BlockchainEvent, CommitmentLevel + + +# Tests +def random_blockchain_event( + offset: FumeOffset, slot: Slot, commitment_level +) -> BlockchainEvent: + blockchain_id = uuid.UUID(int=0).bytes + block_uid = uuid.uuid4().bytes + return BlockchainEvent( + offset=offset, + blockchain_id=blockchain_id, + block_uid=block_uid, + num_shards=1, + slot=slot, + parent_slot=None, + commitment_level=commitment_level, + blockchain_shard_id=0, + dead_error=None, + ) + + +def test_fumarole_sm_happy_path(): + sm = FumaroleSM( + last_committed_offset=0, slot_memory_retention=DEFAULT_SLOT_MEMORY_RETENTION + ) + event = random_blockchain_event( + offset=1, slot=1, commitment_level=CommitmentLevel.PROCESSED + ) + sm.queue_blockchain_event([event]) + + download_req = sm.pop_slot_to_download(None) + assert download_req is not None + assert download_req.slot == 1 + + assert sm.pop_slot_to_download(None) is None + assert sm.pop_next_slot_status() is None + + download_state = sm.make_slot_download_progress(slot=1, shard_idx=0) + assert download_state == SlotDownloadState.Done + + status = sm.pop_next_slot_status() + assert status is not None + assert status.slot == 1 + assert status.commitment_level == CommitmentLevel.PROCESSED + sm.mark_event_as_processed(status.session_sequence) + + event2 = random_blockchain_event( + offset=2, slot=1, commitment_level=CommitmentLevel.CONFIRMED + ) + sm.queue_blockchain_event([event2]) + + assert sm.pop_slot_to_download(None) is None + + status = sm.pop_next_slot_status() + assert status is not None + assert status.slot == 1 + assert status.commitment_level == CommitmentLevel.CONFIRMED + sm.mark_event_as_processed(status.session_sequence) + + assert sm.committable_offset == event2.offset + + +def test_it_should_dedup_slot_status(): + sm = FumaroleSM( + last_committed_offset=0, slot_memory_retention=DEFAULT_SLOT_MEMORY_RETENTION + ) + event = random_blockchain_event( + offset=1, slot=1, commitment_level=CommitmentLevel.PROCESSED + ) + sm.queue_blockchain_event([event]) + + assert sm.pop_next_slot_status() is None + + download_req = sm.pop_slot_to_download(None) + assert download_req is not None + assert download_req.slot == 1 + + assert sm.pop_slot_to_download(None) is None + + sm.make_slot_download_progress(slot=1, shard_idx=0) + + status = sm.pop_next_slot_status() + assert status is not None + assert status.slot == 1 + assert status.commitment_level == CommitmentLevel.PROCESSED + + sm.queue_blockchain_event([event]) + + assert sm.pop_slot_to_download(None) is None + assert sm.pop_next_slot_status() is None + + +def test_it_should_handle_min_commitment_level(): + sm = FumaroleSM( + last_committed_offset=0, slot_memory_retention=DEFAULT_SLOT_MEMORY_RETENTION + ) + event = random_blockchain_event( + offset=1, slot=1, commitment_level=CommitmentLevel.PROCESSED + ) + sm.queue_blockchain_event([event]) + + assert sm.pop_next_slot_status() is None + + download_req = sm.pop_slot_to_download(CommitmentLevel.FINALIZED) + assert download_req is None + + assert sm.pop_slot_to_download(None) is None + + status = sm.pop_next_slot_status() + assert status is not None + assert status.slot == 1 + assert status.commitment_level == CommitmentLevel.PROCESSED diff --git a/python/yellowstone-fumarole-client/yellowstone_api/__init__.py b/python/yellowstone-fumarole-client/yellowstone_api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.py b/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.py new file mode 100644 index 0000000..0025e27 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: fumarole_v2.proto +# Protobuf Python Version: 5.29.0 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 0, + '', + 'fumarole_v2.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import yellowstone_api.geyser_pb2 as geyser__pb2 +try: + solana__storage__pb2 = geyser__pb2.solana__storage__pb2 +except AttributeError: + solana__storage__pb2 = geyser__pb2.solana_storage_pb2 + +from yellowstone_api.geyser_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66umarole_v2.proto\x12\x0b\x66umarole_v2\x1a\x0cgeyser.proto\"\x10\n\x0eVersionRequest\"\"\n\x0fVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\":\n\x1bGetConsumerGroupInfoRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\"9\n\x1a\x44\x65leteConsumerGroupRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\".\n\x1b\x44\x65leteConsumerGroupResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"\x1b\n\x19ListConsumerGroupsRequest\"U\n\x1aListConsumerGroupsResponse\x12\x37\n\x0f\x63onsumer_groups\x18\x01 \x03(\x0b\x32\x1e.fumarole_v2.ConsumerGroupInfo\"N\n\x11\x43onsumerGroupInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x1b\n\x13\x63onsumer_group_name\x18\x02 \x01(\t\x12\x10\n\x08is_stale\x18\x03 \x01(\x08\"4\n\x15GetSlotLagInfoRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\"\xf1\x04\n\x0c\x42lockFilters\x12\x39\n\x08\x61\x63\x63ounts\x18\x01 \x03(\x0b\x32\'.fumarole_v2.BlockFilters.AccountsEntry\x12\x41\n\x0ctransactions\x18\x02 \x03(\x0b\x32+.fumarole_v2.BlockFilters.TransactionsEntry\x12\x37\n\x07\x65ntries\x18\x03 \x03(\x0b\x32&.fumarole_v2.BlockFilters.EntriesEntry\x12>\n\x0b\x62locks_meta\x18\x04 \x03(\x0b\x32).fumarole_v2.BlockFilters.BlocksMetaEntry\x1aW\n\rAccountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.geyser.SubscribeRequestFilterAccounts:\x02\x38\x01\x1a_\n\x11TransactionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1aS\n\x0c\x45ntriesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterEntry:\x02\x38\x01\x1a[\n\x0f\x42locksMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.geyser.SubscribeRequestFilterBlocksMeta:\x02\x38\x01\"\x98\x01\n\x12\x44ownloadBlockShard\x12\x15\n\rblockchain_id\x18\x01 \x01(\x0c\x12\x11\n\tblock_uid\x18\x02 \x01(\x0c\x12\x11\n\tshard_idx\x18\x03 \x01(\x05\x12\x34\n\x0c\x62lockFilters\x18\x04 \x01(\x0b\x32\x19.fumarole_v2.BlockFiltersH\x00\x88\x01\x01\x42\x0f\n\r_blockFilters\"\x17\n\x04Ping\x12\x0f\n\x07ping_id\x18\x01 \x01(\r\"\x17\n\x04Pong\x12\x0f\n\x07ping_id\x18\x01 \x01(\r\"\x8d\x01\n\x0b\x44\x61taCommand\x12?\n\x14\x64ownload_block_shard\x18\x01 \x01(\x0b\x32\x1f.fumarole_v2.DownloadBlockShardH\x00\x12\x32\n\rfilter_update\x18\x02 \x01(\x0b\x32\x19.fumarole_v2.BlockFiltersH\x00\x42\t\n\x07\x63ommand\"\x1a\n\x18\x42lockShardDownloadFinish\"L\n\rBlockNotFound\x12\x15\n\rblockchain_id\x18\x01 \x01(\x0c\x12\x11\n\tblock_uid\x18\x02 \x01(\x0c\x12\x11\n\tshard_idx\x18\x03 \x01(\x05\"E\n\tDataError\x12/\n\tnot_found\x18\x01 \x01(\x0b\x32\x1a.fumarole_v2.BlockNotFoundH\x00\x42\x07\n\x05\x65rror\"\x93\x01\n\x0c\x44\x61taResponse\x12)\n\x06update\x18\x01 \x01(\x0b\x32\x17.geyser.SubscribeUpdateH\x00\x12L\n\x1b\x62lock_shard_download_finish\x18\x02 \x01(\x0b\x32%.fumarole_v2.BlockShardDownloadFinishH\x00\x42\n\n\x08response\"0\n\x0c\x43ommitOffset\x12\x0e\n\x06offset\x18\x01 \x01(\x03\x12\x10\n\x08shard_id\x18\x02 \x01(\x05\"c\n\x15PollBlockchainHistory\x12\x10\n\x08shard_id\x18\x01 \x01(\x05\x12\x11\n\x04\x66rom\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\x12\n\x05limit\x18\x03 \x01(\x03H\x01\x88\x01\x01\x42\x07\n\x05_fromB\x08\n\x06_limit\"\x8f\x02\n\x0f\x42lockchainEvent\x12\x0e\n\x06offset\x18\x01 \x01(\x03\x12\x15\n\rblockchain_id\x18\x02 \x01(\x0c\x12\x11\n\tblock_uid\x18\x03 \x01(\x0c\x12\x12\n\nnum_shards\x18\x04 \x01(\r\x12\x0c\n\x04slot\x18\x05 \x01(\x04\x12\x18\n\x0bparent_slot\x18\x06 \x01(\x04H\x00\x88\x01\x01\x12\x31\n\x10\x63ommitment_level\x18\x07 \x01(\x0e\x32\x17.geyser.CommitmentLevel\x12\x1b\n\x13\x62lockchain_shard_id\x18\x08 \x01(\x05\x12\x17\n\ndead_error\x18\t \x01(\tH\x01\x88\x01\x01\x42\x0e\n\x0c_parent_slotB\r\n\x0b_dead_error\"A\n\x11\x42lockchainHistory\x12,\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x1c.fumarole_v2.BlockchainEvent\"L\n\x10JoinControlPlane\x12 \n\x13\x63onsumer_group_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x16\n\x14_consumer_group_name\"\xe2\x01\n\x0e\x43ontrolCommand\x12\x35\n\x0cinitial_join\x18\x01 \x01(\x0b\x32\x1d.fumarole_v2.JoinControlPlaneH\x00\x12\x32\n\rcommit_offset\x18\x02 \x01(\x0b\x32\x19.fumarole_v2.CommitOffsetH\x00\x12\x37\n\tpoll_hist\x18\x03 \x01(\x0b\x32\".fumarole_v2.PollBlockchainHistoryH\x00\x12!\n\x04ping\x18\x04 \x01(\x0b\x32\x11.fumarole_v2.PingH\x00\x42\t\n\x07\x63ommand\"\xe7\x01\n\x0f\x43ontrolResponse\x12\x36\n\x04init\x18\x01 \x01(\x0b\x32&.fumarole_v2.InitialConsumerGroupStateH\x00\x12\x38\n\rcommit_offset\x18\x02 \x01(\x0b\x32\x1f.fumarole_v2.CommitOffsetResultH\x00\x12\x33\n\tpoll_hist\x18\x03 \x01(\x0b\x32\x1e.fumarole_v2.BlockchainHistoryH\x00\x12!\n\x04pong\x18\x04 \x01(\x0b\x32\x11.fumarole_v2.PongH\x00\x42\n\n\x08response\"6\n\x12\x43ommitOffsetResult\x12\x0e\n\x06offset\x18\x01 \x01(\x03\x12\x10\n\x08shard_id\x18\x02 \x01(\x05\"\xd1\x01\n\x19InitialConsumerGroupState\x12\x15\n\rblockchain_id\x18\x01 \x01(\x0c\x12`\n\x16last_committed_offsets\x18\x02 \x03(\x0b\x32@.fumarole_v2.InitialConsumerGroupState.LastCommittedOffsetsEntry\x1a;\n\x19LastCommittedOffsetsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\"8\n\x1b\x43reateConsumerGroupResponse\x12\x19\n\x11\x63onsumer_group_id\x18\x01 \x01(\t\"z\n\x1a\x43reateConsumerGroupRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\x12?\n\x15initial_offset_policy\x18\x02 \x01(\x0e\x32 .fumarole_v2.InitialOffsetPolicy*!\n\x13InitialOffsetPolicy\x12\n\n\x06LATEST\x10\x00\x32\xe2\x05\n\x08\x46umarole\x12\x62\n\x14GetConsumerGroupInfo\x12(.fumarole_v2.GetConsumerGroupInfoRequest\x1a\x1e.fumarole_v2.ConsumerGroupInfo\"\x00\x12g\n\x12ListConsumerGroups\x12&.fumarole_v2.ListConsumerGroupsRequest\x1a\'.fumarole_v2.ListConsumerGroupsResponse\"\x00\x12j\n\x13\x44\x65leteConsumerGroup\x12\'.fumarole_v2.DeleteConsumerGroupRequest\x1a(.fumarole_v2.DeleteConsumerGroupResponse\"\x00\x12j\n\x13\x43reateConsumerGroup\x12\'.fumarole_v2.CreateConsumerGroupRequest\x1a(.fumarole_v2.CreateConsumerGroupResponse\"\x00\x12O\n\rDownloadBlock\x12\x1f.fumarole_v2.DownloadBlockShard\x1a\x19.fumarole_v2.DataResponse\"\x00\x30\x01\x12J\n\rSubscribeData\x12\x18.fumarole_v2.DataCommand\x1a\x19.fumarole_v2.DataResponse\"\x00(\x01\x30\x01\x12L\n\tSubscribe\x12\x1b.fumarole_v2.ControlCommand\x1a\x1c.fumarole_v2.ControlResponse\"\x00(\x01\x30\x01\x12\x46\n\x07Version\x12\x1b.fumarole_v2.VersionRequest\x1a\x1c.fumarole_v2.VersionResponse\"\x00P\x00\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'fumarole_v2_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_BLOCKFILTERS_ACCOUNTSENTRY']._loaded_options = None + _globals['_BLOCKFILTERS_ACCOUNTSENTRY']._serialized_options = b'8\001' + _globals['_BLOCKFILTERS_TRANSACTIONSENTRY']._loaded_options = None + _globals['_BLOCKFILTERS_TRANSACTIONSENTRY']._serialized_options = b'8\001' + _globals['_BLOCKFILTERS_ENTRIESENTRY']._loaded_options = None + _globals['_BLOCKFILTERS_ENTRIESENTRY']._serialized_options = b'8\001' + _globals['_BLOCKFILTERS_BLOCKSMETAENTRY']._loaded_options = None + _globals['_BLOCKFILTERS_BLOCKSMETAENTRY']._serialized_options = b'8\001' + _globals['_INITIALCONSUMERGROUPSTATE_LASTCOMMITTEDOFFSETSENTRY']._loaded_options = None + _globals['_INITIALCONSUMERGROUPSTATE_LASTCOMMITTEDOFFSETSENTRY']._serialized_options = b'8\001' + _globals['_INITIALOFFSETPOLICY']._serialized_start=3306 + _globals['_INITIALOFFSETPOLICY']._serialized_end=3339 + _globals['_VERSIONREQUEST']._serialized_start=48 + _globals['_VERSIONREQUEST']._serialized_end=64 + _globals['_VERSIONRESPONSE']._serialized_start=66 + _globals['_VERSIONRESPONSE']._serialized_end=100 + _globals['_GETCONSUMERGROUPINFOREQUEST']._serialized_start=102 + _globals['_GETCONSUMERGROUPINFOREQUEST']._serialized_end=160 + _globals['_DELETECONSUMERGROUPREQUEST']._serialized_start=162 + _globals['_DELETECONSUMERGROUPREQUEST']._serialized_end=219 + _globals['_DELETECONSUMERGROUPRESPONSE']._serialized_start=221 + _globals['_DELETECONSUMERGROUPRESPONSE']._serialized_end=267 + _globals['_LISTCONSUMERGROUPSREQUEST']._serialized_start=269 + _globals['_LISTCONSUMERGROUPSREQUEST']._serialized_end=296 + _globals['_LISTCONSUMERGROUPSRESPONSE']._serialized_start=298 + _globals['_LISTCONSUMERGROUPSRESPONSE']._serialized_end=383 + _globals['_CONSUMERGROUPINFO']._serialized_start=385 + _globals['_CONSUMERGROUPINFO']._serialized_end=463 + _globals['_GETSLOTLAGINFOREQUEST']._serialized_start=465 + _globals['_GETSLOTLAGINFOREQUEST']._serialized_end=517 + _globals['_BLOCKFILTERS']._serialized_start=520 + _globals['_BLOCKFILTERS']._serialized_end=1145 + _globals['_BLOCKFILTERS_ACCOUNTSENTRY']._serialized_start=783 + _globals['_BLOCKFILTERS_ACCOUNTSENTRY']._serialized_end=870 + _globals['_BLOCKFILTERS_TRANSACTIONSENTRY']._serialized_start=872 + _globals['_BLOCKFILTERS_TRANSACTIONSENTRY']._serialized_end=967 + _globals['_BLOCKFILTERS_ENTRIESENTRY']._serialized_start=969 + _globals['_BLOCKFILTERS_ENTRIESENTRY']._serialized_end=1052 + _globals['_BLOCKFILTERS_BLOCKSMETAENTRY']._serialized_start=1054 + _globals['_BLOCKFILTERS_BLOCKSMETAENTRY']._serialized_end=1145 + _globals['_DOWNLOADBLOCKSHARD']._serialized_start=1148 + _globals['_DOWNLOADBLOCKSHARD']._serialized_end=1300 + _globals['_PING']._serialized_start=1302 + _globals['_PING']._serialized_end=1325 + _globals['_PONG']._serialized_start=1327 + _globals['_PONG']._serialized_end=1350 + _globals['_DATACOMMAND']._serialized_start=1353 + _globals['_DATACOMMAND']._serialized_end=1494 + _globals['_BLOCKSHARDDOWNLOADFINISH']._serialized_start=1496 + _globals['_BLOCKSHARDDOWNLOADFINISH']._serialized_end=1522 + _globals['_BLOCKNOTFOUND']._serialized_start=1524 + _globals['_BLOCKNOTFOUND']._serialized_end=1600 + _globals['_DATAERROR']._serialized_start=1602 + _globals['_DATAERROR']._serialized_end=1671 + _globals['_DATARESPONSE']._serialized_start=1674 + _globals['_DATARESPONSE']._serialized_end=1821 + _globals['_COMMITOFFSET']._serialized_start=1823 + _globals['_COMMITOFFSET']._serialized_end=1871 + _globals['_POLLBLOCKCHAINHISTORY']._serialized_start=1873 + _globals['_POLLBLOCKCHAINHISTORY']._serialized_end=1972 + _globals['_BLOCKCHAINEVENT']._serialized_start=1975 + _globals['_BLOCKCHAINEVENT']._serialized_end=2246 + _globals['_BLOCKCHAINHISTORY']._serialized_start=2248 + _globals['_BLOCKCHAINHISTORY']._serialized_end=2313 + _globals['_JOINCONTROLPLANE']._serialized_start=2315 + _globals['_JOINCONTROLPLANE']._serialized_end=2391 + _globals['_CONTROLCOMMAND']._serialized_start=2394 + _globals['_CONTROLCOMMAND']._serialized_end=2620 + _globals['_CONTROLRESPONSE']._serialized_start=2623 + _globals['_CONTROLRESPONSE']._serialized_end=2854 + _globals['_COMMITOFFSETRESULT']._serialized_start=2856 + _globals['_COMMITOFFSETRESULT']._serialized_end=2910 + _globals['_INITIALCONSUMERGROUPSTATE']._serialized_start=2913 + _globals['_INITIALCONSUMERGROUPSTATE']._serialized_end=3122 + _globals['_INITIALCONSUMERGROUPSTATE_LASTCOMMITTEDOFFSETSENTRY']._serialized_start=3063 + _globals['_INITIALCONSUMERGROUPSTATE_LASTCOMMITTEDOFFSETSENTRY']._serialized_end=3122 + _globals['_CREATECONSUMERGROUPRESPONSE']._serialized_start=3124 + _globals['_CREATECONSUMERGROUPRESPONSE']._serialized_end=3180 + _globals['_CREATECONSUMERGROUPREQUEST']._serialized_start=3182 + _globals['_CREATECONSUMERGROUPREQUEST']._serialized_end=3304 + _globals['_FUMAROLE']._serialized_start=3342 + _globals['_FUMAROLE']._serialized_end=4080 +# @@protoc_insertion_point(module_scope) diff --git a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.pyi b/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.pyi new file mode 100644 index 0000000..ca248d8 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.pyi @@ -0,0 +1,328 @@ +import yellowstone_api.geyser_pb2 as _geyser_pb2 +import yellowstone_api.solana_storage_pb2 as _solana_storage_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union +from yellowstone_api.geyser_pb2 import SubscribeRequest as SubscribeRequest +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccounts as SubscribeRequestFilterAccounts +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilter as SubscribeRequestFilterAccountsFilter +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilterMemcmp as SubscribeRequestFilterAccountsFilterMemcmp +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilterLamports as SubscribeRequestFilterAccountsFilterLamports +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterSlots as SubscribeRequestFilterSlots +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterTransactions as SubscribeRequestFilterTransactions +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterBlocks as SubscribeRequestFilterBlocks +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterBlocksMeta as SubscribeRequestFilterBlocksMeta +from yellowstone_api.geyser_pb2 import SubscribeRequestFilterEntry as SubscribeRequestFilterEntry +from yellowstone_api.geyser_pb2 import SubscribeRequestAccountsDataSlice as SubscribeRequestAccountsDataSlice +from yellowstone_api.geyser_pb2 import SubscribeRequestPing as SubscribeRequestPing +from yellowstone_api.geyser_pb2 import SubscribeUpdate as SubscribeUpdate +from yellowstone_api.geyser_pb2 import SubscribeUpdateAccount as SubscribeUpdateAccount +from yellowstone_api.geyser_pb2 import SubscribeUpdateAccountInfo as SubscribeUpdateAccountInfo +from yellowstone_api.geyser_pb2 import SubscribeUpdateSlot as SubscribeUpdateSlot +from yellowstone_api.geyser_pb2 import SubscribeUpdateTransaction as SubscribeUpdateTransaction +from yellowstone_api.geyser_pb2 import SubscribeUpdateTransactionInfo as SubscribeUpdateTransactionInfo +from yellowstone_api.geyser_pb2 import SubscribeUpdateTransactionStatus as SubscribeUpdateTransactionStatus +from yellowstone_api.geyser_pb2 import SubscribeUpdateBlock as SubscribeUpdateBlock +from yellowstone_api.geyser_pb2 import SubscribeUpdateBlockMeta as SubscribeUpdateBlockMeta +from yellowstone_api.geyser_pb2 import SubscribeUpdateEntry as SubscribeUpdateEntry +from yellowstone_api.geyser_pb2 import SubscribeUpdatePing as SubscribeUpdatePing +from yellowstone_api.geyser_pb2 import SubscribeUpdatePong as SubscribeUpdatePong +from yellowstone_api.geyser_pb2 import PingRequest as PingRequest +from yellowstone_api.geyser_pb2 import PongResponse as PongResponse +from yellowstone_api.geyser_pb2 import GetLatestBlockhashRequest as GetLatestBlockhashRequest +from yellowstone_api.geyser_pb2 import GetLatestBlockhashResponse as GetLatestBlockhashResponse +from yellowstone_api.geyser_pb2 import GetBlockHeightRequest as GetBlockHeightRequest +from yellowstone_api.geyser_pb2 import GetBlockHeightResponse as GetBlockHeightResponse +from yellowstone_api.geyser_pb2 import GetSlotRequest as GetSlotRequest +from yellowstone_api.geyser_pb2 import GetSlotResponse as GetSlotResponse +from yellowstone_api.geyser_pb2 import GetVersionRequest as GetVersionRequest +from yellowstone_api.geyser_pb2 import GetVersionResponse as GetVersionResponse +from yellowstone_api.geyser_pb2 import IsBlockhashValidRequest as IsBlockhashValidRequest +from yellowstone_api.geyser_pb2 import IsBlockhashValidResponse as IsBlockhashValidResponse +from yellowstone_api.geyser_pb2 import CommitmentLevel as CommitmentLevel +from yellowstone_api.geyser_pb2 import SlotStatus as SlotStatus + +DESCRIPTOR: _descriptor.FileDescriptor +PROCESSED: _geyser_pb2.CommitmentLevel +CONFIRMED: _geyser_pb2.CommitmentLevel +FINALIZED: _geyser_pb2.CommitmentLevel +SLOT_PROCESSED: _geyser_pb2.SlotStatus +SLOT_CONFIRMED: _geyser_pb2.SlotStatus +SLOT_FINALIZED: _geyser_pb2.SlotStatus +SLOT_FIRST_SHRED_RECEIVED: _geyser_pb2.SlotStatus +SLOT_COMPLETED: _geyser_pb2.SlotStatus +SLOT_CREATED_BANK: _geyser_pb2.SlotStatus +SLOT_DEAD: _geyser_pb2.SlotStatus + +class InitialOffsetPolicy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + LATEST: _ClassVar[InitialOffsetPolicy] +LATEST: InitialOffsetPolicy + +class VersionRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class VersionResponse(_message.Message): + __slots__ = ("version",) + VERSION_FIELD_NUMBER: _ClassVar[int] + version: str + def __init__(self, version: _Optional[str] = ...) -> None: ... + +class GetConsumerGroupInfoRequest(_message.Message): + __slots__ = ("consumer_group_name",) + CONSUMER_GROUP_NAME_FIELD_NUMBER: _ClassVar[int] + consumer_group_name: str + def __init__(self, consumer_group_name: _Optional[str] = ...) -> None: ... + +class DeleteConsumerGroupRequest(_message.Message): + __slots__ = ("consumer_group_name",) + CONSUMER_GROUP_NAME_FIELD_NUMBER: _ClassVar[int] + consumer_group_name: str + def __init__(self, consumer_group_name: _Optional[str] = ...) -> None: ... + +class DeleteConsumerGroupResponse(_message.Message): + __slots__ = ("success",) + SUCCESS_FIELD_NUMBER: _ClassVar[int] + success: bool + def __init__(self, success: bool = ...) -> None: ... + +class ListConsumerGroupsRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class ListConsumerGroupsResponse(_message.Message): + __slots__ = ("consumer_groups",) + CONSUMER_GROUPS_FIELD_NUMBER: _ClassVar[int] + consumer_groups: _containers.RepeatedCompositeFieldContainer[ConsumerGroupInfo] + def __init__(self, consumer_groups: _Optional[_Iterable[_Union[ConsumerGroupInfo, _Mapping]]] = ...) -> None: ... + +class ConsumerGroupInfo(_message.Message): + __slots__ = ("id", "consumer_group_name", "is_stale") + ID_FIELD_NUMBER: _ClassVar[int] + CONSUMER_GROUP_NAME_FIELD_NUMBER: _ClassVar[int] + IS_STALE_FIELD_NUMBER: _ClassVar[int] + id: str + consumer_group_name: str + is_stale: bool + def __init__(self, id: _Optional[str] = ..., consumer_group_name: _Optional[str] = ..., is_stale: bool = ...) -> None: ... + +class GetSlotLagInfoRequest(_message.Message): + __slots__ = ("consumer_group_name",) + CONSUMER_GROUP_NAME_FIELD_NUMBER: _ClassVar[int] + consumer_group_name: str + def __init__(self, consumer_group_name: _Optional[str] = ...) -> None: ... + +class BlockFilters(_message.Message): + __slots__ = ("accounts", "transactions", "entries", "blocks_meta") + class AccountsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _geyser_pb2.SubscribeRequestFilterAccounts + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_geyser_pb2.SubscribeRequestFilterAccounts, _Mapping]] = ...) -> None: ... + class TransactionsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _geyser_pb2.SubscribeRequestFilterTransactions + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_geyser_pb2.SubscribeRequestFilterTransactions, _Mapping]] = ...) -> None: ... + class EntriesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _geyser_pb2.SubscribeRequestFilterEntry + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_geyser_pb2.SubscribeRequestFilterEntry, _Mapping]] = ...) -> None: ... + class BlocksMetaEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _geyser_pb2.SubscribeRequestFilterBlocksMeta + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_geyser_pb2.SubscribeRequestFilterBlocksMeta, _Mapping]] = ...) -> None: ... + ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] + ENTRIES_FIELD_NUMBER: _ClassVar[int] + BLOCKS_META_FIELD_NUMBER: _ClassVar[int] + accounts: _containers.MessageMap[str, _geyser_pb2.SubscribeRequestFilterAccounts] + transactions: _containers.MessageMap[str, _geyser_pb2.SubscribeRequestFilterTransactions] + entries: _containers.MessageMap[str, _geyser_pb2.SubscribeRequestFilterEntry] + blocks_meta: _containers.MessageMap[str, _geyser_pb2.SubscribeRequestFilterBlocksMeta] + def __init__(self, accounts: _Optional[_Mapping[str, _geyser_pb2.SubscribeRequestFilterAccounts]] = ..., transactions: _Optional[_Mapping[str, _geyser_pb2.SubscribeRequestFilterTransactions]] = ..., entries: _Optional[_Mapping[str, _geyser_pb2.SubscribeRequestFilterEntry]] = ..., blocks_meta: _Optional[_Mapping[str, _geyser_pb2.SubscribeRequestFilterBlocksMeta]] = ...) -> None: ... + +class DownloadBlockShard(_message.Message): + __slots__ = ("blockchain_id", "block_uid", "shard_idx", "blockFilters") + BLOCKCHAIN_ID_FIELD_NUMBER: _ClassVar[int] + BLOCK_UID_FIELD_NUMBER: _ClassVar[int] + SHARD_IDX_FIELD_NUMBER: _ClassVar[int] + BLOCKFILTERS_FIELD_NUMBER: _ClassVar[int] + blockchain_id: bytes + block_uid: bytes + shard_idx: int + blockFilters: BlockFilters + def __init__(self, blockchain_id: _Optional[bytes] = ..., block_uid: _Optional[bytes] = ..., shard_idx: _Optional[int] = ..., blockFilters: _Optional[_Union[BlockFilters, _Mapping]] = ...) -> None: ... + +class Ping(_message.Message): + __slots__ = ("ping_id",) + PING_ID_FIELD_NUMBER: _ClassVar[int] + ping_id: int + def __init__(self, ping_id: _Optional[int] = ...) -> None: ... + +class Pong(_message.Message): + __slots__ = ("ping_id",) + PING_ID_FIELD_NUMBER: _ClassVar[int] + ping_id: int + def __init__(self, ping_id: _Optional[int] = ...) -> None: ... + +class DataCommand(_message.Message): + __slots__ = ("download_block_shard", "filter_update") + DOWNLOAD_BLOCK_SHARD_FIELD_NUMBER: _ClassVar[int] + FILTER_UPDATE_FIELD_NUMBER: _ClassVar[int] + download_block_shard: DownloadBlockShard + filter_update: BlockFilters + def __init__(self, download_block_shard: _Optional[_Union[DownloadBlockShard, _Mapping]] = ..., filter_update: _Optional[_Union[BlockFilters, _Mapping]] = ...) -> None: ... + +class BlockShardDownloadFinish(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class BlockNotFound(_message.Message): + __slots__ = ("blockchain_id", "block_uid", "shard_idx") + BLOCKCHAIN_ID_FIELD_NUMBER: _ClassVar[int] + BLOCK_UID_FIELD_NUMBER: _ClassVar[int] + SHARD_IDX_FIELD_NUMBER: _ClassVar[int] + blockchain_id: bytes + block_uid: bytes + shard_idx: int + def __init__(self, blockchain_id: _Optional[bytes] = ..., block_uid: _Optional[bytes] = ..., shard_idx: _Optional[int] = ...) -> None: ... + +class DataError(_message.Message): + __slots__ = ("not_found",) + NOT_FOUND_FIELD_NUMBER: _ClassVar[int] + not_found: BlockNotFound + def __init__(self, not_found: _Optional[_Union[BlockNotFound, _Mapping]] = ...) -> None: ... + +class DataResponse(_message.Message): + __slots__ = ("update", "block_shard_download_finish") + UPDATE_FIELD_NUMBER: _ClassVar[int] + BLOCK_SHARD_DOWNLOAD_FINISH_FIELD_NUMBER: _ClassVar[int] + update: _geyser_pb2.SubscribeUpdate + block_shard_download_finish: BlockShardDownloadFinish + def __init__(self, update: _Optional[_Union[_geyser_pb2.SubscribeUpdate, _Mapping]] = ..., block_shard_download_finish: _Optional[_Union[BlockShardDownloadFinish, _Mapping]] = ...) -> None: ... + +class CommitOffset(_message.Message): + __slots__ = ("offset", "shard_id") + OFFSET_FIELD_NUMBER: _ClassVar[int] + SHARD_ID_FIELD_NUMBER: _ClassVar[int] + offset: int + shard_id: int + def __init__(self, offset: _Optional[int] = ..., shard_id: _Optional[int] = ...) -> None: ... + +class PollBlockchainHistory(_message.Message): + __slots__ = ("shard_id", "limit") + SHARD_ID_FIELD_NUMBER: _ClassVar[int] + FROM_FIELD_NUMBER: _ClassVar[int] + LIMIT_FIELD_NUMBER: _ClassVar[int] + shard_id: int + limit: int + def __init__(self, shard_id: _Optional[int] = ..., limit: _Optional[int] = ..., **kwargs) -> None: ... + +class BlockchainEvent(_message.Message): + __slots__ = ("offset", "blockchain_id", "block_uid", "num_shards", "slot", "parent_slot", "commitment_level", "blockchain_shard_id", "dead_error") + OFFSET_FIELD_NUMBER: _ClassVar[int] + BLOCKCHAIN_ID_FIELD_NUMBER: _ClassVar[int] + BLOCK_UID_FIELD_NUMBER: _ClassVar[int] + NUM_SHARDS_FIELD_NUMBER: _ClassVar[int] + SLOT_FIELD_NUMBER: _ClassVar[int] + PARENT_SLOT_FIELD_NUMBER: _ClassVar[int] + COMMITMENT_LEVEL_FIELD_NUMBER: _ClassVar[int] + BLOCKCHAIN_SHARD_ID_FIELD_NUMBER: _ClassVar[int] + DEAD_ERROR_FIELD_NUMBER: _ClassVar[int] + offset: int + blockchain_id: bytes + block_uid: bytes + num_shards: int + slot: int + parent_slot: int + commitment_level: _geyser_pb2.CommitmentLevel + blockchain_shard_id: int + dead_error: str + def __init__(self, offset: _Optional[int] = ..., blockchain_id: _Optional[bytes] = ..., block_uid: _Optional[bytes] = ..., num_shards: _Optional[int] = ..., slot: _Optional[int] = ..., parent_slot: _Optional[int] = ..., commitment_level: _Optional[_Union[_geyser_pb2.CommitmentLevel, str]] = ..., blockchain_shard_id: _Optional[int] = ..., dead_error: _Optional[str] = ...) -> None: ... + +class BlockchainHistory(_message.Message): + __slots__ = ("events",) + EVENTS_FIELD_NUMBER: _ClassVar[int] + events: _containers.RepeatedCompositeFieldContainer[BlockchainEvent] + def __init__(self, events: _Optional[_Iterable[_Union[BlockchainEvent, _Mapping]]] = ...) -> None: ... + +class JoinControlPlane(_message.Message): + __slots__ = ("consumer_group_name",) + CONSUMER_GROUP_NAME_FIELD_NUMBER: _ClassVar[int] + consumer_group_name: str + def __init__(self, consumer_group_name: _Optional[str] = ...) -> None: ... + +class ControlCommand(_message.Message): + __slots__ = ("initial_join", "commit_offset", "poll_hist", "ping") + INITIAL_JOIN_FIELD_NUMBER: _ClassVar[int] + COMMIT_OFFSET_FIELD_NUMBER: _ClassVar[int] + POLL_HIST_FIELD_NUMBER: _ClassVar[int] + PING_FIELD_NUMBER: _ClassVar[int] + initial_join: JoinControlPlane + commit_offset: CommitOffset + poll_hist: PollBlockchainHistory + ping: Ping + def __init__(self, initial_join: _Optional[_Union[JoinControlPlane, _Mapping]] = ..., commit_offset: _Optional[_Union[CommitOffset, _Mapping]] = ..., poll_hist: _Optional[_Union[PollBlockchainHistory, _Mapping]] = ..., ping: _Optional[_Union[Ping, _Mapping]] = ...) -> None: ... + +class ControlResponse(_message.Message): + __slots__ = ("init", "commit_offset", "poll_hist", "pong") + INIT_FIELD_NUMBER: _ClassVar[int] + COMMIT_OFFSET_FIELD_NUMBER: _ClassVar[int] + POLL_HIST_FIELD_NUMBER: _ClassVar[int] + PONG_FIELD_NUMBER: _ClassVar[int] + init: InitialConsumerGroupState + commit_offset: CommitOffsetResult + poll_hist: BlockchainHistory + pong: Pong + def __init__(self, init: _Optional[_Union[InitialConsumerGroupState, _Mapping]] = ..., commit_offset: _Optional[_Union[CommitOffsetResult, _Mapping]] = ..., poll_hist: _Optional[_Union[BlockchainHistory, _Mapping]] = ..., pong: _Optional[_Union[Pong, _Mapping]] = ...) -> None: ... + +class CommitOffsetResult(_message.Message): + __slots__ = ("offset", "shard_id") + OFFSET_FIELD_NUMBER: _ClassVar[int] + SHARD_ID_FIELD_NUMBER: _ClassVar[int] + offset: int + shard_id: int + def __init__(self, offset: _Optional[int] = ..., shard_id: _Optional[int] = ...) -> None: ... + +class InitialConsumerGroupState(_message.Message): + __slots__ = ("blockchain_id", "last_committed_offsets") + class LastCommittedOffsetsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: int + value: int + def __init__(self, key: _Optional[int] = ..., value: _Optional[int] = ...) -> None: ... + BLOCKCHAIN_ID_FIELD_NUMBER: _ClassVar[int] + LAST_COMMITTED_OFFSETS_FIELD_NUMBER: _ClassVar[int] + blockchain_id: bytes + last_committed_offsets: _containers.ScalarMap[int, int] + def __init__(self, blockchain_id: _Optional[bytes] = ..., last_committed_offsets: _Optional[_Mapping[int, int]] = ...) -> None: ... + +class CreateConsumerGroupResponse(_message.Message): + __slots__ = ("consumer_group_id",) + CONSUMER_GROUP_ID_FIELD_NUMBER: _ClassVar[int] + consumer_group_id: str + def __init__(self, consumer_group_id: _Optional[str] = ...) -> None: ... + +class CreateConsumerGroupRequest(_message.Message): + __slots__ = ("consumer_group_name", "initial_offset_policy") + CONSUMER_GROUP_NAME_FIELD_NUMBER: _ClassVar[int] + INITIAL_OFFSET_POLICY_FIELD_NUMBER: _ClassVar[int] + consumer_group_name: str + initial_offset_policy: InitialOffsetPolicy + def __init__(self, consumer_group_name: _Optional[str] = ..., initial_offset_policy: _Optional[_Union[InitialOffsetPolicy, str]] = ...) -> None: ... diff --git a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2_grpc.py b/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2_grpc.py new file mode 100644 index 0000000..3197d83 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2_grpc.py @@ -0,0 +1,400 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +import yellowstone_api.fumarole_v2_pb2 as fumarole__v2__pb2 + +GRPC_GENERATED_VERSION = '1.71.0' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in fumarole_v2_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) + + +class FumaroleStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetConsumerGroupInfo = channel.unary_unary( + '/fumarole_v2.Fumarole/GetConsumerGroupInfo', + request_serializer=fumarole__v2__pb2.GetConsumerGroupInfoRequest.SerializeToString, + response_deserializer=fumarole__v2__pb2.ConsumerGroupInfo.FromString, + _registered_method=True) + self.ListConsumerGroups = channel.unary_unary( + '/fumarole_v2.Fumarole/ListConsumerGroups', + request_serializer=fumarole__v2__pb2.ListConsumerGroupsRequest.SerializeToString, + response_deserializer=fumarole__v2__pb2.ListConsumerGroupsResponse.FromString, + _registered_method=True) + self.DeleteConsumerGroup = channel.unary_unary( + '/fumarole_v2.Fumarole/DeleteConsumerGroup', + request_serializer=fumarole__v2__pb2.DeleteConsumerGroupRequest.SerializeToString, + response_deserializer=fumarole__v2__pb2.DeleteConsumerGroupResponse.FromString, + _registered_method=True) + self.CreateConsumerGroup = channel.unary_unary( + '/fumarole_v2.Fumarole/CreateConsumerGroup', + request_serializer=fumarole__v2__pb2.CreateConsumerGroupRequest.SerializeToString, + response_deserializer=fumarole__v2__pb2.CreateConsumerGroupResponse.FromString, + _registered_method=True) + self.DownloadBlock = channel.unary_stream( + '/fumarole_v2.Fumarole/DownloadBlock', + request_serializer=fumarole__v2__pb2.DownloadBlockShard.SerializeToString, + response_deserializer=fumarole__v2__pb2.DataResponse.FromString, + _registered_method=True) + self.SubscribeData = channel.stream_stream( + '/fumarole_v2.Fumarole/SubscribeData', + request_serializer=fumarole__v2__pb2.DataCommand.SerializeToString, + response_deserializer=fumarole__v2__pb2.DataResponse.FromString, + _registered_method=True) + self.Subscribe = channel.stream_stream( + '/fumarole_v2.Fumarole/Subscribe', + request_serializer=fumarole__v2__pb2.ControlCommand.SerializeToString, + response_deserializer=fumarole__v2__pb2.ControlResponse.FromString, + _registered_method=True) + self.Version = channel.unary_unary( + '/fumarole_v2.Fumarole/Version', + request_serializer=fumarole__v2__pb2.VersionRequest.SerializeToString, + response_deserializer=fumarole__v2__pb2.VersionResponse.FromString, + _registered_method=True) + + +class FumaroleServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetConsumerGroupInfo(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListConsumerGroups(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteConsumerGroup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateConsumerGroup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DownloadBlock(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SubscribeData(self, request_iterator, context): + """Represents subscription to the data plane + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Subscribe(self, request_iterator, context): + """Represents subscription to the control plane + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Version(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_FumaroleServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetConsumerGroupInfo': grpc.unary_unary_rpc_method_handler( + servicer.GetConsumerGroupInfo, + request_deserializer=fumarole__v2__pb2.GetConsumerGroupInfoRequest.FromString, + response_serializer=fumarole__v2__pb2.ConsumerGroupInfo.SerializeToString, + ), + 'ListConsumerGroups': grpc.unary_unary_rpc_method_handler( + servicer.ListConsumerGroups, + request_deserializer=fumarole__v2__pb2.ListConsumerGroupsRequest.FromString, + response_serializer=fumarole__v2__pb2.ListConsumerGroupsResponse.SerializeToString, + ), + 'DeleteConsumerGroup': grpc.unary_unary_rpc_method_handler( + servicer.DeleteConsumerGroup, + request_deserializer=fumarole__v2__pb2.DeleteConsumerGroupRequest.FromString, + response_serializer=fumarole__v2__pb2.DeleteConsumerGroupResponse.SerializeToString, + ), + 'CreateConsumerGroup': grpc.unary_unary_rpc_method_handler( + servicer.CreateConsumerGroup, + request_deserializer=fumarole__v2__pb2.CreateConsumerGroupRequest.FromString, + response_serializer=fumarole__v2__pb2.CreateConsumerGroupResponse.SerializeToString, + ), + 'DownloadBlock': grpc.unary_stream_rpc_method_handler( + servicer.DownloadBlock, + request_deserializer=fumarole__v2__pb2.DownloadBlockShard.FromString, + response_serializer=fumarole__v2__pb2.DataResponse.SerializeToString, + ), + 'SubscribeData': grpc.stream_stream_rpc_method_handler( + servicer.SubscribeData, + request_deserializer=fumarole__v2__pb2.DataCommand.FromString, + response_serializer=fumarole__v2__pb2.DataResponse.SerializeToString, + ), + 'Subscribe': grpc.stream_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=fumarole__v2__pb2.ControlCommand.FromString, + response_serializer=fumarole__v2__pb2.ControlResponse.SerializeToString, + ), + 'Version': grpc.unary_unary_rpc_method_handler( + servicer.Version, + request_deserializer=fumarole__v2__pb2.VersionRequest.FromString, + response_serializer=fumarole__v2__pb2.VersionResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'fumarole_v2.Fumarole', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('fumarole_v2.Fumarole', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class Fumarole(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetConsumerGroupInfo(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/fumarole_v2.Fumarole/GetConsumerGroupInfo', + fumarole__v2__pb2.GetConsumerGroupInfoRequest.SerializeToString, + fumarole__v2__pb2.ConsumerGroupInfo.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListConsumerGroups(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/fumarole_v2.Fumarole/ListConsumerGroups', + fumarole__v2__pb2.ListConsumerGroupsRequest.SerializeToString, + fumarole__v2__pb2.ListConsumerGroupsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteConsumerGroup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/fumarole_v2.Fumarole/DeleteConsumerGroup', + fumarole__v2__pb2.DeleteConsumerGroupRequest.SerializeToString, + fumarole__v2__pb2.DeleteConsumerGroupResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateConsumerGroup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/fumarole_v2.Fumarole/CreateConsumerGroup', + fumarole__v2__pb2.CreateConsumerGroupRequest.SerializeToString, + fumarole__v2__pb2.CreateConsumerGroupResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DownloadBlock(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/fumarole_v2.Fumarole/DownloadBlock', + fumarole__v2__pb2.DownloadBlockShard.SerializeToString, + fumarole__v2__pb2.DataResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def SubscribeData(request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.stream_stream( + request_iterator, + target, + '/fumarole_v2.Fumarole/SubscribeData', + fumarole__v2__pb2.DataCommand.SerializeToString, + fumarole__v2__pb2.DataResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Subscribe(request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.stream_stream( + request_iterator, + target, + '/fumarole_v2.Fumarole/Subscribe', + fumarole__v2__pb2.ControlCommand.SerializeToString, + fumarole__v2__pb2.ControlResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Version(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/fumarole_v2.Fumarole/Version', + fumarole__v2__pb2.VersionRequest.SerializeToString, + fumarole__v2__pb2.VersionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.py b/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.py new file mode 100644 index 0000000..f291e70 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: geyser.proto +# Protobuf Python Version: 5.29.0 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 0, + '', + 'geyser.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +import yellowstone_api.solana_storage_pb2 as solana__storage__pb2 + +from yellowstone_api.solana_storage_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cgeyser.proto\x12\x06geyser\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x14solana-storage.proto\"\x9c\n\n\x10SubscribeRequest\x12\x38\n\x08\x61\x63\x63ounts\x18\x01 \x03(\x0b\x32&.geyser.SubscribeRequest.AccountsEntry\x12\x32\n\x05slots\x18\x02 \x03(\x0b\x32#.geyser.SubscribeRequest.SlotsEntry\x12@\n\x0ctransactions\x18\x03 \x03(\x0b\x32*.geyser.SubscribeRequest.TransactionsEntry\x12M\n\x13transactions_status\x18\n \x03(\x0b\x32\x30.geyser.SubscribeRequest.TransactionsStatusEntry\x12\x34\n\x06\x62locks\x18\x04 \x03(\x0b\x32$.geyser.SubscribeRequest.BlocksEntry\x12=\n\x0b\x62locks_meta\x18\x05 \x03(\x0b\x32(.geyser.SubscribeRequest.BlocksMetaEntry\x12\x32\n\x05\x65ntry\x18\x08 \x03(\x0b\x32#.geyser.SubscribeRequest.EntryEntry\x12\x30\n\ncommitment\x18\x06 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x12\x46\n\x13\x61\x63\x63ounts_data_slice\x18\x07 \x03(\x0b\x32).geyser.SubscribeRequestAccountsDataSlice\x12/\n\x04ping\x18\t \x01(\x0b\x32\x1c.geyser.SubscribeRequestPingH\x01\x88\x01\x01\x12\x16\n\tfrom_slot\x18\x0b \x01(\x04H\x02\x88\x01\x01\x1aW\n\rAccountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.geyser.SubscribeRequestFilterAccounts:\x02\x38\x01\x1aQ\n\nSlotsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterSlots:\x02\x38\x01\x1a_\n\x11TransactionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1a\x65\n\x17TransactionsStatusEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1aS\n\x0b\x42locksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x33\n\x05value\x18\x02 \x01(\x0b\x32$.geyser.SubscribeRequestFilterBlocks:\x02\x38\x01\x1a[\n\x0f\x42locksMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.geyser.SubscribeRequestFilterBlocksMeta:\x02\x38\x01\x1aQ\n\nEntryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterEntry:\x02\x38\x01\x42\r\n\x0b_commitmentB\x07\n\x05_pingB\x0c\n\n_from_slot\"\xbf\x01\n\x1eSubscribeRequestFilterAccounts\x12\x0f\n\x07\x61\x63\x63ount\x18\x02 \x03(\t\x12\r\n\x05owner\x18\x03 \x03(\t\x12=\n\x07\x66ilters\x18\x04 \x03(\x0b\x32,.geyser.SubscribeRequestFilterAccountsFilter\x12#\n\x16nonempty_txn_signature\x18\x05 \x01(\x08H\x00\x88\x01\x01\x42\x19\n\x17_nonempty_txn_signature\"\xf3\x01\n$SubscribeRequestFilterAccountsFilter\x12\x44\n\x06memcmp\x18\x01 \x01(\x0b\x32\x32.geyser.SubscribeRequestFilterAccountsFilterMemcmpH\x00\x12\x12\n\x08\x64\x61tasize\x18\x02 \x01(\x04H\x00\x12\x1d\n\x13token_account_state\x18\x03 \x01(\x08H\x00\x12H\n\x08lamports\x18\x04 \x01(\x0b\x32\x34.geyser.SubscribeRequestFilterAccountsFilterLamportsH\x00\x42\x08\n\x06\x66ilter\"y\n*SubscribeRequestFilterAccountsFilterMemcmp\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0f\n\x05\x62ytes\x18\x02 \x01(\x0cH\x00\x12\x10\n\x06\x62\x61se58\x18\x03 \x01(\tH\x00\x12\x10\n\x06\x62\x61se64\x18\x04 \x01(\tH\x00\x42\x06\n\x04\x64\x61ta\"m\n,SubscribeRequestFilterAccountsFilterLamports\x12\x0c\n\x02\x65q\x18\x01 \x01(\x04H\x00\x12\x0c\n\x02ne\x18\x02 \x01(\x04H\x00\x12\x0c\n\x02lt\x18\x03 \x01(\x04H\x00\x12\x0c\n\x02gt\x18\x04 \x01(\x04H\x00\x42\x05\n\x03\x63mp\"\x8f\x01\n\x1bSubscribeRequestFilterSlots\x12!\n\x14\x66ilter_by_commitment\x18\x01 \x01(\x08H\x00\x88\x01\x01\x12\x1e\n\x11interslot_updates\x18\x02 \x01(\x08H\x01\x88\x01\x01\x42\x17\n\x15_filter_by_commitmentB\x14\n\x12_interslot_updates\"\xd2\x01\n\"SubscribeRequestFilterTransactions\x12\x11\n\x04vote\x18\x01 \x01(\x08H\x00\x88\x01\x01\x12\x13\n\x06\x66\x61iled\x18\x02 \x01(\x08H\x01\x88\x01\x01\x12\x16\n\tsignature\x18\x05 \x01(\tH\x02\x88\x01\x01\x12\x17\n\x0f\x61\x63\x63ount_include\x18\x03 \x03(\t\x12\x17\n\x0f\x61\x63\x63ount_exclude\x18\x04 \x03(\t\x12\x18\n\x10\x61\x63\x63ount_required\x18\x06 \x03(\tB\x07\n\x05_voteB\t\n\x07_failedB\x0c\n\n_signature\"\xd9\x01\n\x1cSubscribeRequestFilterBlocks\x12\x17\n\x0f\x61\x63\x63ount_include\x18\x01 \x03(\t\x12!\n\x14include_transactions\x18\x02 \x01(\x08H\x00\x88\x01\x01\x12\x1d\n\x10include_accounts\x18\x03 \x01(\x08H\x01\x88\x01\x01\x12\x1c\n\x0finclude_entries\x18\x04 \x01(\x08H\x02\x88\x01\x01\x42\x17\n\x15_include_transactionsB\x13\n\x11_include_accountsB\x12\n\x10_include_entries\"\"\n SubscribeRequestFilterBlocksMeta\"\x1d\n\x1bSubscribeRequestFilterEntry\"C\n!SubscribeRequestAccountsDataSlice\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0e\n\x06length\x18\x02 \x01(\x04\"\"\n\x14SubscribeRequestPing\x12\n\n\x02id\x18\x01 \x01(\x05\"\xb5\x04\n\x0fSubscribeUpdate\x12\x0f\n\x07\x66ilters\x18\x01 \x03(\t\x12\x31\n\x07\x61\x63\x63ount\x18\x02 \x01(\x0b\x32\x1e.geyser.SubscribeUpdateAccountH\x00\x12+\n\x04slot\x18\x03 \x01(\x0b\x32\x1b.geyser.SubscribeUpdateSlotH\x00\x12\x39\n\x0btransaction\x18\x04 \x01(\x0b\x32\".geyser.SubscribeUpdateTransactionH\x00\x12\x46\n\x12transaction_status\x18\n \x01(\x0b\x32(.geyser.SubscribeUpdateTransactionStatusH\x00\x12-\n\x05\x62lock\x18\x05 \x01(\x0b\x32\x1c.geyser.SubscribeUpdateBlockH\x00\x12+\n\x04ping\x18\x06 \x01(\x0b\x32\x1b.geyser.SubscribeUpdatePingH\x00\x12+\n\x04pong\x18\t \x01(\x0b\x32\x1b.geyser.SubscribeUpdatePongH\x00\x12\x36\n\nblock_meta\x18\x07 \x01(\x0b\x32 .geyser.SubscribeUpdateBlockMetaH\x00\x12-\n\x05\x65ntry\x18\x08 \x01(\x0b\x32\x1c.geyser.SubscribeUpdateEntryH\x00\x12.\n\ncreated_at\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x0e\n\x0cupdate_oneof\"o\n\x16SubscribeUpdateAccount\x12\x33\n\x07\x61\x63\x63ount\x18\x01 \x01(\x0b\x32\".geyser.SubscribeUpdateAccountInfo\x12\x0c\n\x04slot\x18\x02 \x01(\x04\x12\x12\n\nis_startup\x18\x03 \x01(\x08\"\xc8\x01\n\x1aSubscribeUpdateAccountInfo\x12\x0e\n\x06pubkey\x18\x01 \x01(\x0c\x12\x10\n\x08lamports\x18\x02 \x01(\x04\x12\r\n\x05owner\x18\x03 \x01(\x0c\x12\x12\n\nexecutable\x18\x04 \x01(\x08\x12\x12\n\nrent_epoch\x18\x05 \x01(\x04\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x15\n\rwrite_version\x18\x07 \x01(\x04\x12\x1a\n\rtxn_signature\x18\x08 \x01(\x0cH\x00\x88\x01\x01\x42\x10\n\x0e_txn_signature\"\x8f\x01\n\x13SubscribeUpdateSlot\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x13\n\x06parent\x18\x02 \x01(\x04H\x00\x88\x01\x01\x12\"\n\x06status\x18\x03 \x01(\x0e\x32\x12.geyser.SlotStatus\x12\x17\n\ndead_error\x18\x04 \x01(\tH\x01\x88\x01\x01\x42\t\n\x07_parentB\r\n\x0b_dead_error\"g\n\x1aSubscribeUpdateTransaction\x12;\n\x0btransaction\x18\x01 \x01(\x0b\x32&.geyser.SubscribeUpdateTransactionInfo\x12\x0c\n\x04slot\x18\x02 \x01(\x04\"\xd8\x01\n\x1eSubscribeUpdateTransactionInfo\x12\x11\n\tsignature\x18\x01 \x01(\x0c\x12\x0f\n\x07is_vote\x18\x02 \x01(\x08\x12?\n\x0btransaction\x18\x03 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.Transaction\x12\x42\n\x04meta\x18\x04 \x01(\x0b\x32\x34.solana.storage.ConfirmedBlock.TransactionStatusMeta\x12\r\n\x05index\x18\x05 \x01(\x04\"\xa1\x01\n SubscribeUpdateTransactionStatus\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tsignature\x18\x02 \x01(\x0c\x12\x0f\n\x07is_vote\x18\x03 \x01(\x08\x12\r\n\x05index\x18\x04 \x01(\x04\x12<\n\x03\x65rr\x18\x05 \x01(\x0b\x32/.solana.storage.ConfirmedBlock.TransactionError\"\xa0\x04\n\x14SubscribeUpdateBlock\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x37\n\x07rewards\x18\x03 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Rewards\x12@\n\nblock_time\x18\x04 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x05 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x13\n\x0bparent_slot\x18\x07 \x01(\x04\x12\x18\n\x10parent_blockhash\x18\x08 \x01(\t\x12\"\n\x1a\x65xecuted_transaction_count\x18\t \x01(\x04\x12<\n\x0ctransactions\x18\x06 \x03(\x0b\x32&.geyser.SubscribeUpdateTransactionInfo\x12\x1d\n\x15updated_account_count\x18\n \x01(\x04\x12\x34\n\x08\x61\x63\x63ounts\x18\x0b \x03(\x0b\x32\".geyser.SubscribeUpdateAccountInfo\x12\x15\n\rentries_count\x18\x0c \x01(\x04\x12-\n\x07\x65ntries\x18\r \x03(\x0b\x32\x1c.geyser.SubscribeUpdateEntry\"\xe2\x02\n\x18SubscribeUpdateBlockMeta\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x37\n\x07rewards\x18\x03 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Rewards\x12@\n\nblock_time\x18\x04 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x05 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x13\n\x0bparent_slot\x18\x06 \x01(\x04\x12\x18\n\x10parent_blockhash\x18\x07 \x01(\t\x12\"\n\x1a\x65xecuted_transaction_count\x18\x08 \x01(\x04\x12\x15\n\rentries_count\x18\t \x01(\x04\"\x9d\x01\n\x14SubscribeUpdateEntry\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\r\n\x05index\x18\x02 \x01(\x04\x12\x12\n\nnum_hashes\x18\x03 \x01(\x04\x12\x0c\n\x04hash\x18\x04 \x01(\x0c\x12\"\n\x1a\x65xecuted_transaction_count\x18\x05 \x01(\x04\x12\"\n\x1astarting_transaction_index\x18\x06 \x01(\x04\"\x15\n\x13SubscribeUpdatePing\"!\n\x13SubscribeUpdatePong\x12\n\n\x02id\x18\x01 \x01(\x05\"\x1c\n\x0bPingRequest\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\x1d\n\x0cPongResponse\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\\\n\x19GetLatestBlockhashRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"^\n\x1aGetLatestBlockhashResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x1f\n\x17last_valid_block_height\x18\x03 \x01(\x04\"X\n\x15GetBlockHeightRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\".\n\x16GetBlockHeightResponse\x12\x14\n\x0c\x62lock_height\x18\x01 \x01(\x04\"Q\n\x0eGetSlotRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"\x1f\n\x0fGetSlotResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\"\x13\n\x11GetVersionRequest\"%\n\x12GetVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"m\n\x17IsBlockhashValidRequest\x12\x11\n\tblockhash\x18\x01 \x01(\t\x12\x30\n\ncommitment\x18\x02 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"7\n\x18IsBlockhashValidResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\r\n\x05valid\x18\x02 \x01(\x08*>\n\x0f\x43ommitmentLevel\x12\r\n\tPROCESSED\x10\x00\x12\r\n\tCONFIRMED\x10\x01\x12\r\n\tFINALIZED\x10\x02*\xa1\x01\n\nSlotStatus\x12\x12\n\x0eSLOT_PROCESSED\x10\x00\x12\x12\n\x0eSLOT_CONFIRMED\x10\x01\x12\x12\n\x0eSLOT_FINALIZED\x10\x02\x12\x1d\n\x19SLOT_FIRST_SHRED_RECEIVED\x10\x03\x12\x12\n\x0eSLOT_COMPLETED\x10\x04\x12\x15\n\x11SLOT_CREATED_BANK\x10\x05\x12\r\n\tSLOT_DEAD\x10\x06\x32\x93\x04\n\x06Geyser\x12\x44\n\tSubscribe\x12\x18.geyser.SubscribeRequest\x1a\x17.geyser.SubscribeUpdate\"\x00(\x01\x30\x01\x12\x33\n\x04Ping\x12\x13.geyser.PingRequest\x1a\x14.geyser.PongResponse\"\x00\x12]\n\x12GetLatestBlockhash\x12!.geyser.GetLatestBlockhashRequest\x1a\".geyser.GetLatestBlockhashResponse\"\x00\x12Q\n\x0eGetBlockHeight\x12\x1d.geyser.GetBlockHeightRequest\x1a\x1e.geyser.GetBlockHeightResponse\"\x00\x12<\n\x07GetSlot\x12\x16.geyser.GetSlotRequest\x1a\x17.geyser.GetSlotResponse\"\x00\x12W\n\x10IsBlockhashValid\x12\x1f.geyser.IsBlockhashValidRequest\x1a .geyser.IsBlockhashValidResponse\"\x00\x12\x45\n\nGetVersion\x12\x19.geyser.GetVersionRequest\x1a\x1a.geyser.GetVersionResponse\"\x00\x42;Z9github.com/rpcpool/yellowstone-grpc/examples/golang/protoP\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'geyser_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'Z9github.com/rpcpool/yellowstone-grpc/examples/golang/proto' + _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._loaded_options = None + _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_options = b'8\001' + _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._loaded_options = None + _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._serialized_options = b'8\001' + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._loaded_options = None + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_options = b'8\001' + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._loaded_options = None + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._serialized_options = b'8\001' + _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._loaded_options = None + _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._serialized_options = b'8\001' + _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._loaded_options = None + _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._serialized_options = b'8\001' + _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._loaded_options = None + _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._serialized_options = b'8\001' + _globals['_COMMITMENTLEVEL']._serialized_start=6188 + _globals['_COMMITMENTLEVEL']._serialized_end=6250 + _globals['_SLOTSTATUS']._serialized_start=6253 + _globals['_SLOTSTATUS']._serialized_end=6414 + _globals['_SUBSCRIBEREQUEST']._serialized_start=80 + _globals['_SUBSCRIBEREQUEST']._serialized_end=1388 + _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_start=719 + _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_end=806 + _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._serialized_start=808 + _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._serialized_end=889 + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_start=891 + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_end=986 + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._serialized_start=988 + _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._serialized_end=1089 + _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._serialized_start=1091 + _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._serialized_end=1174 + _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._serialized_start=1176 + _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._serialized_end=1267 + _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._serialized_start=1269 + _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._serialized_end=1350 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTS']._serialized_start=1391 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTS']._serialized_end=1582 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTER']._serialized_start=1585 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTER']._serialized_end=1828 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERMEMCMP']._serialized_start=1830 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERMEMCMP']._serialized_end=1951 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERLAMPORTS']._serialized_start=1953 + _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERLAMPORTS']._serialized_end=2062 + _globals['_SUBSCRIBEREQUESTFILTERSLOTS']._serialized_start=2065 + _globals['_SUBSCRIBEREQUESTFILTERSLOTS']._serialized_end=2208 + _globals['_SUBSCRIBEREQUESTFILTERTRANSACTIONS']._serialized_start=2211 + _globals['_SUBSCRIBEREQUESTFILTERTRANSACTIONS']._serialized_end=2421 + _globals['_SUBSCRIBEREQUESTFILTERBLOCKS']._serialized_start=2424 + _globals['_SUBSCRIBEREQUESTFILTERBLOCKS']._serialized_end=2641 + _globals['_SUBSCRIBEREQUESTFILTERBLOCKSMETA']._serialized_start=2643 + _globals['_SUBSCRIBEREQUESTFILTERBLOCKSMETA']._serialized_end=2677 + _globals['_SUBSCRIBEREQUESTFILTERENTRY']._serialized_start=2679 + _globals['_SUBSCRIBEREQUESTFILTERENTRY']._serialized_end=2708 + _globals['_SUBSCRIBEREQUESTACCOUNTSDATASLICE']._serialized_start=2710 + _globals['_SUBSCRIBEREQUESTACCOUNTSDATASLICE']._serialized_end=2777 + _globals['_SUBSCRIBEREQUESTPING']._serialized_start=2779 + _globals['_SUBSCRIBEREQUESTPING']._serialized_end=2813 + _globals['_SUBSCRIBEUPDATE']._serialized_start=2816 + _globals['_SUBSCRIBEUPDATE']._serialized_end=3381 + _globals['_SUBSCRIBEUPDATEACCOUNT']._serialized_start=3383 + _globals['_SUBSCRIBEUPDATEACCOUNT']._serialized_end=3494 + _globals['_SUBSCRIBEUPDATEACCOUNTINFO']._serialized_start=3497 + _globals['_SUBSCRIBEUPDATEACCOUNTINFO']._serialized_end=3697 + _globals['_SUBSCRIBEUPDATESLOT']._serialized_start=3700 + _globals['_SUBSCRIBEUPDATESLOT']._serialized_end=3843 + _globals['_SUBSCRIBEUPDATETRANSACTION']._serialized_start=3845 + _globals['_SUBSCRIBEUPDATETRANSACTION']._serialized_end=3948 + _globals['_SUBSCRIBEUPDATETRANSACTIONINFO']._serialized_start=3951 + _globals['_SUBSCRIBEUPDATETRANSACTIONINFO']._serialized_end=4167 + _globals['_SUBSCRIBEUPDATETRANSACTIONSTATUS']._serialized_start=4170 + _globals['_SUBSCRIBEUPDATETRANSACTIONSTATUS']._serialized_end=4331 + _globals['_SUBSCRIBEUPDATEBLOCK']._serialized_start=4334 + _globals['_SUBSCRIBEUPDATEBLOCK']._serialized_end=4878 + _globals['_SUBSCRIBEUPDATEBLOCKMETA']._serialized_start=4881 + _globals['_SUBSCRIBEUPDATEBLOCKMETA']._serialized_end=5235 + _globals['_SUBSCRIBEUPDATEENTRY']._serialized_start=5238 + _globals['_SUBSCRIBEUPDATEENTRY']._serialized_end=5395 + _globals['_SUBSCRIBEUPDATEPING']._serialized_start=5397 + _globals['_SUBSCRIBEUPDATEPING']._serialized_end=5418 + _globals['_SUBSCRIBEUPDATEPONG']._serialized_start=5420 + _globals['_SUBSCRIBEUPDATEPONG']._serialized_end=5453 + _globals['_PINGREQUEST']._serialized_start=5455 + _globals['_PINGREQUEST']._serialized_end=5483 + _globals['_PONGRESPONSE']._serialized_start=5485 + _globals['_PONGRESPONSE']._serialized_end=5514 + _globals['_GETLATESTBLOCKHASHREQUEST']._serialized_start=5516 + _globals['_GETLATESTBLOCKHASHREQUEST']._serialized_end=5608 + _globals['_GETLATESTBLOCKHASHRESPONSE']._serialized_start=5610 + _globals['_GETLATESTBLOCKHASHRESPONSE']._serialized_end=5704 + _globals['_GETBLOCKHEIGHTREQUEST']._serialized_start=5706 + _globals['_GETBLOCKHEIGHTREQUEST']._serialized_end=5794 + _globals['_GETBLOCKHEIGHTRESPONSE']._serialized_start=5796 + _globals['_GETBLOCKHEIGHTRESPONSE']._serialized_end=5842 + _globals['_GETSLOTREQUEST']._serialized_start=5844 + _globals['_GETSLOTREQUEST']._serialized_end=5925 + _globals['_GETSLOTRESPONSE']._serialized_start=5927 + _globals['_GETSLOTRESPONSE']._serialized_end=5958 + _globals['_GETVERSIONREQUEST']._serialized_start=5960 + _globals['_GETVERSIONREQUEST']._serialized_end=5979 + _globals['_GETVERSIONRESPONSE']._serialized_start=5981 + _globals['_GETVERSIONRESPONSE']._serialized_end=6018 + _globals['_ISBLOCKHASHVALIDREQUEST']._serialized_start=6020 + _globals['_ISBLOCKHASHVALIDREQUEST']._serialized_end=6129 + _globals['_ISBLOCKHASHVALIDRESPONSE']._serialized_start=6131 + _globals['_ISBLOCKHASHVALIDRESPONSE']._serialized_end=6186 + _globals['_GEYSER']._serialized_start=6417 + _globals['_GEYSER']._serialized_end=6948 +# @@protoc_insertion_point(module_scope) diff --git a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.pyi b/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.pyi new file mode 100644 index 0000000..09417be --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.pyi @@ -0,0 +1,501 @@ +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +import yellowstone_api.solana_storage_pb2 as _solana_storage_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union +from yellowstone_api.solana_storage_pb2 import ConfirmedBlock as ConfirmedBlock +from yellowstone_api.solana_storage_pb2 import ConfirmedTransaction as ConfirmedTransaction +from yellowstone_api.solana_storage_pb2 import Transaction as Transaction +from yellowstone_api.solana_storage_pb2 import Message as Message +from yellowstone_api.solana_storage_pb2 import MessageHeader as MessageHeader +from yellowstone_api.solana_storage_pb2 import MessageAddressTableLookup as MessageAddressTableLookup +from yellowstone_api.solana_storage_pb2 import TransactionStatusMeta as TransactionStatusMeta +from yellowstone_api.solana_storage_pb2 import TransactionError as TransactionError +from yellowstone_api.solana_storage_pb2 import InnerInstructions as InnerInstructions +from yellowstone_api.solana_storage_pb2 import InnerInstruction as InnerInstruction +from yellowstone_api.solana_storage_pb2 import CompiledInstruction as CompiledInstruction +from yellowstone_api.solana_storage_pb2 import TokenBalance as TokenBalance +from yellowstone_api.solana_storage_pb2 import UiTokenAmount as UiTokenAmount +from yellowstone_api.solana_storage_pb2 import ReturnData as ReturnData +from yellowstone_api.solana_storage_pb2 import Reward as Reward +from yellowstone_api.solana_storage_pb2 import Rewards as Rewards +from yellowstone_api.solana_storage_pb2 import UnixTimestamp as UnixTimestamp +from yellowstone_api.solana_storage_pb2 import BlockHeight as BlockHeight +from yellowstone_api.solana_storage_pb2 import NumPartitions as NumPartitions +from yellowstone_api.solana_storage_pb2 import RewardType as RewardType + +DESCRIPTOR: _descriptor.FileDescriptor +Unspecified: _solana_storage_pb2.RewardType +Fee: _solana_storage_pb2.RewardType +Rent: _solana_storage_pb2.RewardType +Staking: _solana_storage_pb2.RewardType +Voting: _solana_storage_pb2.RewardType + +class CommitmentLevel(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + PROCESSED: _ClassVar[CommitmentLevel] + CONFIRMED: _ClassVar[CommitmentLevel] + FINALIZED: _ClassVar[CommitmentLevel] + +class SlotStatus(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SLOT_PROCESSED: _ClassVar[SlotStatus] + SLOT_CONFIRMED: _ClassVar[SlotStatus] + SLOT_FINALIZED: _ClassVar[SlotStatus] + SLOT_FIRST_SHRED_RECEIVED: _ClassVar[SlotStatus] + SLOT_COMPLETED: _ClassVar[SlotStatus] + SLOT_CREATED_BANK: _ClassVar[SlotStatus] + SLOT_DEAD: _ClassVar[SlotStatus] +PROCESSED: CommitmentLevel +CONFIRMED: CommitmentLevel +FINALIZED: CommitmentLevel +SLOT_PROCESSED: SlotStatus +SLOT_CONFIRMED: SlotStatus +SLOT_FINALIZED: SlotStatus +SLOT_FIRST_SHRED_RECEIVED: SlotStatus +SLOT_COMPLETED: SlotStatus +SLOT_CREATED_BANK: SlotStatus +SLOT_DEAD: SlotStatus + +class SubscribeRequest(_message.Message): + __slots__ = ("accounts", "slots", "transactions", "transactions_status", "blocks", "blocks_meta", "entry", "commitment", "accounts_data_slice", "ping", "from_slot") + class AccountsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SubscribeRequestFilterAccounts + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterAccounts, _Mapping]] = ...) -> None: ... + class SlotsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SubscribeRequestFilterSlots + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterSlots, _Mapping]] = ...) -> None: ... + class TransactionsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SubscribeRequestFilterTransactions + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterTransactions, _Mapping]] = ...) -> None: ... + class TransactionsStatusEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SubscribeRequestFilterTransactions + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterTransactions, _Mapping]] = ...) -> None: ... + class BlocksEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SubscribeRequestFilterBlocks + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterBlocks, _Mapping]] = ...) -> None: ... + class BlocksMetaEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SubscribeRequestFilterBlocksMeta + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterBlocksMeta, _Mapping]] = ...) -> None: ... + class EntryEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SubscribeRequestFilterEntry + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterEntry, _Mapping]] = ...) -> None: ... + ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + SLOTS_FIELD_NUMBER: _ClassVar[int] + TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] + TRANSACTIONS_STATUS_FIELD_NUMBER: _ClassVar[int] + BLOCKS_FIELD_NUMBER: _ClassVar[int] + BLOCKS_META_FIELD_NUMBER: _ClassVar[int] + ENTRY_FIELD_NUMBER: _ClassVar[int] + COMMITMENT_FIELD_NUMBER: _ClassVar[int] + ACCOUNTS_DATA_SLICE_FIELD_NUMBER: _ClassVar[int] + PING_FIELD_NUMBER: _ClassVar[int] + FROM_SLOT_FIELD_NUMBER: _ClassVar[int] + accounts: _containers.MessageMap[str, SubscribeRequestFilterAccounts] + slots: _containers.MessageMap[str, SubscribeRequestFilterSlots] + transactions: _containers.MessageMap[str, SubscribeRequestFilterTransactions] + transactions_status: _containers.MessageMap[str, SubscribeRequestFilterTransactions] + blocks: _containers.MessageMap[str, SubscribeRequestFilterBlocks] + blocks_meta: _containers.MessageMap[str, SubscribeRequestFilterBlocksMeta] + entry: _containers.MessageMap[str, SubscribeRequestFilterEntry] + commitment: CommitmentLevel + accounts_data_slice: _containers.RepeatedCompositeFieldContainer[SubscribeRequestAccountsDataSlice] + ping: SubscribeRequestPing + from_slot: int + def __init__(self, accounts: _Optional[_Mapping[str, SubscribeRequestFilterAccounts]] = ..., slots: _Optional[_Mapping[str, SubscribeRequestFilterSlots]] = ..., transactions: _Optional[_Mapping[str, SubscribeRequestFilterTransactions]] = ..., transactions_status: _Optional[_Mapping[str, SubscribeRequestFilterTransactions]] = ..., blocks: _Optional[_Mapping[str, SubscribeRequestFilterBlocks]] = ..., blocks_meta: _Optional[_Mapping[str, SubscribeRequestFilterBlocksMeta]] = ..., entry: _Optional[_Mapping[str, SubscribeRequestFilterEntry]] = ..., commitment: _Optional[_Union[CommitmentLevel, str]] = ..., accounts_data_slice: _Optional[_Iterable[_Union[SubscribeRequestAccountsDataSlice, _Mapping]]] = ..., ping: _Optional[_Union[SubscribeRequestPing, _Mapping]] = ..., from_slot: _Optional[int] = ...) -> None: ... + +class SubscribeRequestFilterAccounts(_message.Message): + __slots__ = ("account", "owner", "filters", "nonempty_txn_signature") + ACCOUNT_FIELD_NUMBER: _ClassVar[int] + OWNER_FIELD_NUMBER: _ClassVar[int] + FILTERS_FIELD_NUMBER: _ClassVar[int] + NONEMPTY_TXN_SIGNATURE_FIELD_NUMBER: _ClassVar[int] + account: _containers.RepeatedScalarFieldContainer[str] + owner: _containers.RepeatedScalarFieldContainer[str] + filters: _containers.RepeatedCompositeFieldContainer[SubscribeRequestFilterAccountsFilter] + nonempty_txn_signature: bool + def __init__(self, account: _Optional[_Iterable[str]] = ..., owner: _Optional[_Iterable[str]] = ..., filters: _Optional[_Iterable[_Union[SubscribeRequestFilterAccountsFilter, _Mapping]]] = ..., nonempty_txn_signature: bool = ...) -> None: ... + +class SubscribeRequestFilterAccountsFilter(_message.Message): + __slots__ = ("memcmp", "datasize", "token_account_state", "lamports") + MEMCMP_FIELD_NUMBER: _ClassVar[int] + DATASIZE_FIELD_NUMBER: _ClassVar[int] + TOKEN_ACCOUNT_STATE_FIELD_NUMBER: _ClassVar[int] + LAMPORTS_FIELD_NUMBER: _ClassVar[int] + memcmp: SubscribeRequestFilterAccountsFilterMemcmp + datasize: int + token_account_state: bool + lamports: SubscribeRequestFilterAccountsFilterLamports + def __init__(self, memcmp: _Optional[_Union[SubscribeRequestFilterAccountsFilterMemcmp, _Mapping]] = ..., datasize: _Optional[int] = ..., token_account_state: bool = ..., lamports: _Optional[_Union[SubscribeRequestFilterAccountsFilterLamports, _Mapping]] = ...) -> None: ... + +class SubscribeRequestFilterAccountsFilterMemcmp(_message.Message): + __slots__ = ("offset", "bytes", "base58", "base64") + OFFSET_FIELD_NUMBER: _ClassVar[int] + BYTES_FIELD_NUMBER: _ClassVar[int] + BASE58_FIELD_NUMBER: _ClassVar[int] + BASE64_FIELD_NUMBER: _ClassVar[int] + offset: int + bytes: bytes + base58: str + base64: str + def __init__(self, offset: _Optional[int] = ..., bytes: _Optional[bytes] = ..., base58: _Optional[str] = ..., base64: _Optional[str] = ...) -> None: ... + +class SubscribeRequestFilterAccountsFilterLamports(_message.Message): + __slots__ = ("eq", "ne", "lt", "gt") + EQ_FIELD_NUMBER: _ClassVar[int] + NE_FIELD_NUMBER: _ClassVar[int] + LT_FIELD_NUMBER: _ClassVar[int] + GT_FIELD_NUMBER: _ClassVar[int] + eq: int + ne: int + lt: int + gt: int + def __init__(self, eq: _Optional[int] = ..., ne: _Optional[int] = ..., lt: _Optional[int] = ..., gt: _Optional[int] = ...) -> None: ... + +class SubscribeRequestFilterSlots(_message.Message): + __slots__ = ("filter_by_commitment", "interslot_updates") + FILTER_BY_COMMITMENT_FIELD_NUMBER: _ClassVar[int] + INTERSLOT_UPDATES_FIELD_NUMBER: _ClassVar[int] + filter_by_commitment: bool + interslot_updates: bool + def __init__(self, filter_by_commitment: bool = ..., interslot_updates: bool = ...) -> None: ... + +class SubscribeRequestFilterTransactions(_message.Message): + __slots__ = ("vote", "failed", "signature", "account_include", "account_exclude", "account_required") + VOTE_FIELD_NUMBER: _ClassVar[int] + FAILED_FIELD_NUMBER: _ClassVar[int] + SIGNATURE_FIELD_NUMBER: _ClassVar[int] + ACCOUNT_INCLUDE_FIELD_NUMBER: _ClassVar[int] + ACCOUNT_EXCLUDE_FIELD_NUMBER: _ClassVar[int] + ACCOUNT_REQUIRED_FIELD_NUMBER: _ClassVar[int] + vote: bool + failed: bool + signature: str + account_include: _containers.RepeatedScalarFieldContainer[str] + account_exclude: _containers.RepeatedScalarFieldContainer[str] + account_required: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, vote: bool = ..., failed: bool = ..., signature: _Optional[str] = ..., account_include: _Optional[_Iterable[str]] = ..., account_exclude: _Optional[_Iterable[str]] = ..., account_required: _Optional[_Iterable[str]] = ...) -> None: ... + +class SubscribeRequestFilterBlocks(_message.Message): + __slots__ = ("account_include", "include_transactions", "include_accounts", "include_entries") + ACCOUNT_INCLUDE_FIELD_NUMBER: _ClassVar[int] + INCLUDE_TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] + INCLUDE_ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + INCLUDE_ENTRIES_FIELD_NUMBER: _ClassVar[int] + account_include: _containers.RepeatedScalarFieldContainer[str] + include_transactions: bool + include_accounts: bool + include_entries: bool + def __init__(self, account_include: _Optional[_Iterable[str]] = ..., include_transactions: bool = ..., include_accounts: bool = ..., include_entries: bool = ...) -> None: ... + +class SubscribeRequestFilterBlocksMeta(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class SubscribeRequestFilterEntry(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class SubscribeRequestAccountsDataSlice(_message.Message): + __slots__ = ("offset", "length") + OFFSET_FIELD_NUMBER: _ClassVar[int] + LENGTH_FIELD_NUMBER: _ClassVar[int] + offset: int + length: int + def __init__(self, offset: _Optional[int] = ..., length: _Optional[int] = ...) -> None: ... + +class SubscribeRequestPing(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: int + def __init__(self, id: _Optional[int] = ...) -> None: ... + +class SubscribeUpdate(_message.Message): + __slots__ = ("filters", "account", "slot", "transaction", "transaction_status", "block", "ping", "pong", "block_meta", "entry", "created_at") + FILTERS_FIELD_NUMBER: _ClassVar[int] + ACCOUNT_FIELD_NUMBER: _ClassVar[int] + SLOT_FIELD_NUMBER: _ClassVar[int] + TRANSACTION_FIELD_NUMBER: _ClassVar[int] + TRANSACTION_STATUS_FIELD_NUMBER: _ClassVar[int] + BLOCK_FIELD_NUMBER: _ClassVar[int] + PING_FIELD_NUMBER: _ClassVar[int] + PONG_FIELD_NUMBER: _ClassVar[int] + BLOCK_META_FIELD_NUMBER: _ClassVar[int] + ENTRY_FIELD_NUMBER: _ClassVar[int] + CREATED_AT_FIELD_NUMBER: _ClassVar[int] + filters: _containers.RepeatedScalarFieldContainer[str] + account: SubscribeUpdateAccount + slot: SubscribeUpdateSlot + transaction: SubscribeUpdateTransaction + transaction_status: SubscribeUpdateTransactionStatus + block: SubscribeUpdateBlock + ping: SubscribeUpdatePing + pong: SubscribeUpdatePong + block_meta: SubscribeUpdateBlockMeta + entry: SubscribeUpdateEntry + created_at: _timestamp_pb2.Timestamp + def __init__(self, filters: _Optional[_Iterable[str]] = ..., account: _Optional[_Union[SubscribeUpdateAccount, _Mapping]] = ..., slot: _Optional[_Union[SubscribeUpdateSlot, _Mapping]] = ..., transaction: _Optional[_Union[SubscribeUpdateTransaction, _Mapping]] = ..., transaction_status: _Optional[_Union[SubscribeUpdateTransactionStatus, _Mapping]] = ..., block: _Optional[_Union[SubscribeUpdateBlock, _Mapping]] = ..., ping: _Optional[_Union[SubscribeUpdatePing, _Mapping]] = ..., pong: _Optional[_Union[SubscribeUpdatePong, _Mapping]] = ..., block_meta: _Optional[_Union[SubscribeUpdateBlockMeta, _Mapping]] = ..., entry: _Optional[_Union[SubscribeUpdateEntry, _Mapping]] = ..., created_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + +class SubscribeUpdateAccount(_message.Message): + __slots__ = ("account", "slot", "is_startup") + ACCOUNT_FIELD_NUMBER: _ClassVar[int] + SLOT_FIELD_NUMBER: _ClassVar[int] + IS_STARTUP_FIELD_NUMBER: _ClassVar[int] + account: SubscribeUpdateAccountInfo + slot: int + is_startup: bool + def __init__(self, account: _Optional[_Union[SubscribeUpdateAccountInfo, _Mapping]] = ..., slot: _Optional[int] = ..., is_startup: bool = ...) -> None: ... + +class SubscribeUpdateAccountInfo(_message.Message): + __slots__ = ("pubkey", "lamports", "owner", "executable", "rent_epoch", "data", "write_version", "txn_signature") + PUBKEY_FIELD_NUMBER: _ClassVar[int] + LAMPORTS_FIELD_NUMBER: _ClassVar[int] + OWNER_FIELD_NUMBER: _ClassVar[int] + EXECUTABLE_FIELD_NUMBER: _ClassVar[int] + RENT_EPOCH_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + WRITE_VERSION_FIELD_NUMBER: _ClassVar[int] + TXN_SIGNATURE_FIELD_NUMBER: _ClassVar[int] + pubkey: bytes + lamports: int + owner: bytes + executable: bool + rent_epoch: int + data: bytes + write_version: int + txn_signature: bytes + def __init__(self, pubkey: _Optional[bytes] = ..., lamports: _Optional[int] = ..., owner: _Optional[bytes] = ..., executable: bool = ..., rent_epoch: _Optional[int] = ..., data: _Optional[bytes] = ..., write_version: _Optional[int] = ..., txn_signature: _Optional[bytes] = ...) -> None: ... + +class SubscribeUpdateSlot(_message.Message): + __slots__ = ("slot", "parent", "status", "dead_error") + SLOT_FIELD_NUMBER: _ClassVar[int] + PARENT_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + DEAD_ERROR_FIELD_NUMBER: _ClassVar[int] + slot: int + parent: int + status: SlotStatus + dead_error: str + def __init__(self, slot: _Optional[int] = ..., parent: _Optional[int] = ..., status: _Optional[_Union[SlotStatus, str]] = ..., dead_error: _Optional[str] = ...) -> None: ... + +class SubscribeUpdateTransaction(_message.Message): + __slots__ = ("transaction", "slot") + TRANSACTION_FIELD_NUMBER: _ClassVar[int] + SLOT_FIELD_NUMBER: _ClassVar[int] + transaction: SubscribeUpdateTransactionInfo + slot: int + def __init__(self, transaction: _Optional[_Union[SubscribeUpdateTransactionInfo, _Mapping]] = ..., slot: _Optional[int] = ...) -> None: ... + +class SubscribeUpdateTransactionInfo(_message.Message): + __slots__ = ("signature", "is_vote", "transaction", "meta", "index") + SIGNATURE_FIELD_NUMBER: _ClassVar[int] + IS_VOTE_FIELD_NUMBER: _ClassVar[int] + TRANSACTION_FIELD_NUMBER: _ClassVar[int] + META_FIELD_NUMBER: _ClassVar[int] + INDEX_FIELD_NUMBER: _ClassVar[int] + signature: bytes + is_vote: bool + transaction: _solana_storage_pb2.Transaction + meta: _solana_storage_pb2.TransactionStatusMeta + index: int + def __init__(self, signature: _Optional[bytes] = ..., is_vote: bool = ..., transaction: _Optional[_Union[_solana_storage_pb2.Transaction, _Mapping]] = ..., meta: _Optional[_Union[_solana_storage_pb2.TransactionStatusMeta, _Mapping]] = ..., index: _Optional[int] = ...) -> None: ... + +class SubscribeUpdateTransactionStatus(_message.Message): + __slots__ = ("slot", "signature", "is_vote", "index", "err") + SLOT_FIELD_NUMBER: _ClassVar[int] + SIGNATURE_FIELD_NUMBER: _ClassVar[int] + IS_VOTE_FIELD_NUMBER: _ClassVar[int] + INDEX_FIELD_NUMBER: _ClassVar[int] + ERR_FIELD_NUMBER: _ClassVar[int] + slot: int + signature: bytes + is_vote: bool + index: int + err: _solana_storage_pb2.TransactionError + def __init__(self, slot: _Optional[int] = ..., signature: _Optional[bytes] = ..., is_vote: bool = ..., index: _Optional[int] = ..., err: _Optional[_Union[_solana_storage_pb2.TransactionError, _Mapping]] = ...) -> None: ... + +class SubscribeUpdateBlock(_message.Message): + __slots__ = ("slot", "blockhash", "rewards", "block_time", "block_height", "parent_slot", "parent_blockhash", "executed_transaction_count", "transactions", "updated_account_count", "accounts", "entries_count", "entries") + SLOT_FIELD_NUMBER: _ClassVar[int] + BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + REWARDS_FIELD_NUMBER: _ClassVar[int] + BLOCK_TIME_FIELD_NUMBER: _ClassVar[int] + BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] + PARENT_SLOT_FIELD_NUMBER: _ClassVar[int] + PARENT_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + EXECUTED_TRANSACTION_COUNT_FIELD_NUMBER: _ClassVar[int] + TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] + UPDATED_ACCOUNT_COUNT_FIELD_NUMBER: _ClassVar[int] + ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + ENTRIES_COUNT_FIELD_NUMBER: _ClassVar[int] + ENTRIES_FIELD_NUMBER: _ClassVar[int] + slot: int + blockhash: str + rewards: _solana_storage_pb2.Rewards + block_time: _solana_storage_pb2.UnixTimestamp + block_height: _solana_storage_pb2.BlockHeight + parent_slot: int + parent_blockhash: str + executed_transaction_count: int + transactions: _containers.RepeatedCompositeFieldContainer[SubscribeUpdateTransactionInfo] + updated_account_count: int + accounts: _containers.RepeatedCompositeFieldContainer[SubscribeUpdateAccountInfo] + entries_count: int + entries: _containers.RepeatedCompositeFieldContainer[SubscribeUpdateEntry] + def __init__(self, slot: _Optional[int] = ..., blockhash: _Optional[str] = ..., rewards: _Optional[_Union[_solana_storage_pb2.Rewards, _Mapping]] = ..., block_time: _Optional[_Union[_solana_storage_pb2.UnixTimestamp, _Mapping]] = ..., block_height: _Optional[_Union[_solana_storage_pb2.BlockHeight, _Mapping]] = ..., parent_slot: _Optional[int] = ..., parent_blockhash: _Optional[str] = ..., executed_transaction_count: _Optional[int] = ..., transactions: _Optional[_Iterable[_Union[SubscribeUpdateTransactionInfo, _Mapping]]] = ..., updated_account_count: _Optional[int] = ..., accounts: _Optional[_Iterable[_Union[SubscribeUpdateAccountInfo, _Mapping]]] = ..., entries_count: _Optional[int] = ..., entries: _Optional[_Iterable[_Union[SubscribeUpdateEntry, _Mapping]]] = ...) -> None: ... + +class SubscribeUpdateBlockMeta(_message.Message): + __slots__ = ("slot", "blockhash", "rewards", "block_time", "block_height", "parent_slot", "parent_blockhash", "executed_transaction_count", "entries_count") + SLOT_FIELD_NUMBER: _ClassVar[int] + BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + REWARDS_FIELD_NUMBER: _ClassVar[int] + BLOCK_TIME_FIELD_NUMBER: _ClassVar[int] + BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] + PARENT_SLOT_FIELD_NUMBER: _ClassVar[int] + PARENT_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + EXECUTED_TRANSACTION_COUNT_FIELD_NUMBER: _ClassVar[int] + ENTRIES_COUNT_FIELD_NUMBER: _ClassVar[int] + slot: int + blockhash: str + rewards: _solana_storage_pb2.Rewards + block_time: _solana_storage_pb2.UnixTimestamp + block_height: _solana_storage_pb2.BlockHeight + parent_slot: int + parent_blockhash: str + executed_transaction_count: int + entries_count: int + def __init__(self, slot: _Optional[int] = ..., blockhash: _Optional[str] = ..., rewards: _Optional[_Union[_solana_storage_pb2.Rewards, _Mapping]] = ..., block_time: _Optional[_Union[_solana_storage_pb2.UnixTimestamp, _Mapping]] = ..., block_height: _Optional[_Union[_solana_storage_pb2.BlockHeight, _Mapping]] = ..., parent_slot: _Optional[int] = ..., parent_blockhash: _Optional[str] = ..., executed_transaction_count: _Optional[int] = ..., entries_count: _Optional[int] = ...) -> None: ... + +class SubscribeUpdateEntry(_message.Message): + __slots__ = ("slot", "index", "num_hashes", "hash", "executed_transaction_count", "starting_transaction_index") + SLOT_FIELD_NUMBER: _ClassVar[int] + INDEX_FIELD_NUMBER: _ClassVar[int] + NUM_HASHES_FIELD_NUMBER: _ClassVar[int] + HASH_FIELD_NUMBER: _ClassVar[int] + EXECUTED_TRANSACTION_COUNT_FIELD_NUMBER: _ClassVar[int] + STARTING_TRANSACTION_INDEX_FIELD_NUMBER: _ClassVar[int] + slot: int + index: int + num_hashes: int + hash: bytes + executed_transaction_count: int + starting_transaction_index: int + def __init__(self, slot: _Optional[int] = ..., index: _Optional[int] = ..., num_hashes: _Optional[int] = ..., hash: _Optional[bytes] = ..., executed_transaction_count: _Optional[int] = ..., starting_transaction_index: _Optional[int] = ...) -> None: ... + +class SubscribeUpdatePing(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class SubscribeUpdatePong(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: int + def __init__(self, id: _Optional[int] = ...) -> None: ... + +class PingRequest(_message.Message): + __slots__ = ("count",) + COUNT_FIELD_NUMBER: _ClassVar[int] + count: int + def __init__(self, count: _Optional[int] = ...) -> None: ... + +class PongResponse(_message.Message): + __slots__ = ("count",) + COUNT_FIELD_NUMBER: _ClassVar[int] + count: int + def __init__(self, count: _Optional[int] = ...) -> None: ... + +class GetLatestBlockhashRequest(_message.Message): + __slots__ = ("commitment",) + COMMITMENT_FIELD_NUMBER: _ClassVar[int] + commitment: CommitmentLevel + def __init__(self, commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... + +class GetLatestBlockhashResponse(_message.Message): + __slots__ = ("slot", "blockhash", "last_valid_block_height") + SLOT_FIELD_NUMBER: _ClassVar[int] + BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + LAST_VALID_BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] + slot: int + blockhash: str + last_valid_block_height: int + def __init__(self, slot: _Optional[int] = ..., blockhash: _Optional[str] = ..., last_valid_block_height: _Optional[int] = ...) -> None: ... + +class GetBlockHeightRequest(_message.Message): + __slots__ = ("commitment",) + COMMITMENT_FIELD_NUMBER: _ClassVar[int] + commitment: CommitmentLevel + def __init__(self, commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... + +class GetBlockHeightResponse(_message.Message): + __slots__ = ("block_height",) + BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] + block_height: int + def __init__(self, block_height: _Optional[int] = ...) -> None: ... + +class GetSlotRequest(_message.Message): + __slots__ = ("commitment",) + COMMITMENT_FIELD_NUMBER: _ClassVar[int] + commitment: CommitmentLevel + def __init__(self, commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... + +class GetSlotResponse(_message.Message): + __slots__ = ("slot",) + SLOT_FIELD_NUMBER: _ClassVar[int] + slot: int + def __init__(self, slot: _Optional[int] = ...) -> None: ... + +class GetVersionRequest(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class GetVersionResponse(_message.Message): + __slots__ = ("version",) + VERSION_FIELD_NUMBER: _ClassVar[int] + version: str + def __init__(self, version: _Optional[str] = ...) -> None: ... + +class IsBlockhashValidRequest(_message.Message): + __slots__ = ("blockhash", "commitment") + BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + COMMITMENT_FIELD_NUMBER: _ClassVar[int] + blockhash: str + commitment: CommitmentLevel + def __init__(self, blockhash: _Optional[str] = ..., commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... + +class IsBlockhashValidResponse(_message.Message): + __slots__ = ("slot", "valid") + SLOT_FIELD_NUMBER: _ClassVar[int] + VALID_FIELD_NUMBER: _ClassVar[int] + slot: int + valid: bool + def __init__(self, slot: _Optional[int] = ..., valid: bool = ...) -> None: ... diff --git a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2_grpc.py b/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2_grpc.py new file mode 100644 index 0000000..eb8f5c7 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2_grpc.py @@ -0,0 +1,355 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +import yellowstone_api.geyser_pb2 as geyser__pb2 + +GRPC_GENERATED_VERSION = '1.71.0' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in geyser_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) + + +class GeyserStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Subscribe = channel.stream_stream( + '/geyser.Geyser/Subscribe', + request_serializer=geyser__pb2.SubscribeRequest.SerializeToString, + response_deserializer=geyser__pb2.SubscribeUpdate.FromString, + _registered_method=True) + self.Ping = channel.unary_unary( + '/geyser.Geyser/Ping', + request_serializer=geyser__pb2.PingRequest.SerializeToString, + response_deserializer=geyser__pb2.PongResponse.FromString, + _registered_method=True) + self.GetLatestBlockhash = channel.unary_unary( + '/geyser.Geyser/GetLatestBlockhash', + request_serializer=geyser__pb2.GetLatestBlockhashRequest.SerializeToString, + response_deserializer=geyser__pb2.GetLatestBlockhashResponse.FromString, + _registered_method=True) + self.GetBlockHeight = channel.unary_unary( + '/geyser.Geyser/GetBlockHeight', + request_serializer=geyser__pb2.GetBlockHeightRequest.SerializeToString, + response_deserializer=geyser__pb2.GetBlockHeightResponse.FromString, + _registered_method=True) + self.GetSlot = channel.unary_unary( + '/geyser.Geyser/GetSlot', + request_serializer=geyser__pb2.GetSlotRequest.SerializeToString, + response_deserializer=geyser__pb2.GetSlotResponse.FromString, + _registered_method=True) + self.IsBlockhashValid = channel.unary_unary( + '/geyser.Geyser/IsBlockhashValid', + request_serializer=geyser__pb2.IsBlockhashValidRequest.SerializeToString, + response_deserializer=geyser__pb2.IsBlockhashValidResponse.FromString, + _registered_method=True) + self.GetVersion = channel.unary_unary( + '/geyser.Geyser/GetVersion', + request_serializer=geyser__pb2.GetVersionRequest.SerializeToString, + response_deserializer=geyser__pb2.GetVersionResponse.FromString, + _registered_method=True) + + +class GeyserServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Subscribe(self, request_iterator, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Ping(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetLatestBlockhash(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetBlockHeight(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSlot(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def IsBlockhashValid(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetVersion(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_GeyserServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Subscribe': grpc.stream_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=geyser__pb2.SubscribeRequest.FromString, + response_serializer=geyser__pb2.SubscribeUpdate.SerializeToString, + ), + 'Ping': grpc.unary_unary_rpc_method_handler( + servicer.Ping, + request_deserializer=geyser__pb2.PingRequest.FromString, + response_serializer=geyser__pb2.PongResponse.SerializeToString, + ), + 'GetLatestBlockhash': grpc.unary_unary_rpc_method_handler( + servicer.GetLatestBlockhash, + request_deserializer=geyser__pb2.GetLatestBlockhashRequest.FromString, + response_serializer=geyser__pb2.GetLatestBlockhashResponse.SerializeToString, + ), + 'GetBlockHeight': grpc.unary_unary_rpc_method_handler( + servicer.GetBlockHeight, + request_deserializer=geyser__pb2.GetBlockHeightRequest.FromString, + response_serializer=geyser__pb2.GetBlockHeightResponse.SerializeToString, + ), + 'GetSlot': grpc.unary_unary_rpc_method_handler( + servicer.GetSlot, + request_deserializer=geyser__pb2.GetSlotRequest.FromString, + response_serializer=geyser__pb2.GetSlotResponse.SerializeToString, + ), + 'IsBlockhashValid': grpc.unary_unary_rpc_method_handler( + servicer.IsBlockhashValid, + request_deserializer=geyser__pb2.IsBlockhashValidRequest.FromString, + response_serializer=geyser__pb2.IsBlockhashValidResponse.SerializeToString, + ), + 'GetVersion': grpc.unary_unary_rpc_method_handler( + servicer.GetVersion, + request_deserializer=geyser__pb2.GetVersionRequest.FromString, + response_serializer=geyser__pb2.GetVersionResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'geyser.Geyser', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('geyser.Geyser', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class Geyser(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Subscribe(request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.stream_stream( + request_iterator, + target, + '/geyser.Geyser/Subscribe', + geyser__pb2.SubscribeRequest.SerializeToString, + geyser__pb2.SubscribeUpdate.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def Ping(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/geyser.Geyser/Ping', + geyser__pb2.PingRequest.SerializeToString, + geyser__pb2.PongResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetLatestBlockhash(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/geyser.Geyser/GetLatestBlockhash', + geyser__pb2.GetLatestBlockhashRequest.SerializeToString, + geyser__pb2.GetLatestBlockhashResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetBlockHeight(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/geyser.Geyser/GetBlockHeight', + geyser__pb2.GetBlockHeightRequest.SerializeToString, + geyser__pb2.GetBlockHeightResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetSlot(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/geyser.Geyser/GetSlot', + geyser__pb2.GetSlotRequest.SerializeToString, + geyser__pb2.GetSlotResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def IsBlockhashValid(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/geyser.Geyser/IsBlockhashValid', + geyser__pb2.IsBlockhashValidRequest.SerializeToString, + geyser__pb2.IsBlockhashValidResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetVersion(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/geyser.Geyser/GetVersion', + geyser__pb2.GetVersionRequest.SerializeToString, + geyser__pb2.GetVersionResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.py b/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.py new file mode 100644 index 0000000..7458363 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: solana-storage.proto +# Protobuf Python Version: 5.29.0 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 0, + '', + 'solana-storage.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14solana-storage.proto\x12\x1dsolana.storage.ConfirmedBlock\"\xa1\x03\n\x0e\x43onfirmedBlock\x12\x1a\n\x12previous_blockhash\x18\x01 \x01(\t\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x13\n\x0bparent_slot\x18\x03 \x01(\x04\x12I\n\x0ctransactions\x18\x04 \x03(\x0b\x32\x33.solana.storage.ConfirmedBlock.ConfirmedTransaction\x12\x36\n\x07rewards\x18\x05 \x03(\x0b\x32%.solana.storage.ConfirmedBlock.Reward\x12@\n\nblock_time\x18\x06 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x07 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x44\n\x0enum_partitions\x18\x08 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.NumPartitions\"\x9b\x01\n\x14\x43onfirmedTransaction\x12?\n\x0btransaction\x18\x01 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.Transaction\x12\x42\n\x04meta\x18\x02 \x01(\x0b\x32\x34.solana.storage.ConfirmedBlock.TransactionStatusMeta\"Z\n\x0bTransaction\x12\x12\n\nsignatures\x18\x01 \x03(\x0c\x12\x37\n\x07message\x18\x02 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Message\"\xad\x02\n\x07Message\x12<\n\x06header\x18\x01 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.MessageHeader\x12\x14\n\x0c\x61\x63\x63ount_keys\x18\x02 \x03(\x0c\x12\x18\n\x10recent_blockhash\x18\x03 \x01(\x0c\x12H\n\x0cinstructions\x18\x04 \x03(\x0b\x32\x32.solana.storage.ConfirmedBlock.CompiledInstruction\x12\x11\n\tversioned\x18\x05 \x01(\x08\x12W\n\x15\x61\x64\x64ress_table_lookups\x18\x06 \x03(\x0b\x32\x38.solana.storage.ConfirmedBlock.MessageAddressTableLookup\"~\n\rMessageHeader\x12\x1f\n\x17num_required_signatures\x18\x01 \x01(\r\x12$\n\x1cnum_readonly_signed_accounts\x18\x02 \x01(\r\x12&\n\x1enum_readonly_unsigned_accounts\x18\x03 \x01(\r\"d\n\x19MessageAddressTableLookup\x12\x13\n\x0b\x61\x63\x63ount_key\x18\x01 \x01(\x0c\x12\x18\n\x10writable_indexes\x18\x02 \x01(\x0c\x12\x18\n\x10readonly_indexes\x18\x03 \x01(\x0c\"\xda\x05\n\x15TransactionStatusMeta\x12<\n\x03\x65rr\x18\x01 \x01(\x0b\x32/.solana.storage.ConfirmedBlock.TransactionError\x12\x0b\n\x03\x66\x65\x65\x18\x02 \x01(\x04\x12\x14\n\x0cpre_balances\x18\x03 \x03(\x04\x12\x15\n\rpost_balances\x18\x04 \x03(\x04\x12L\n\x12inner_instructions\x18\x05 \x03(\x0b\x32\x30.solana.storage.ConfirmedBlock.InnerInstructions\x12\x1f\n\x17inner_instructions_none\x18\n \x01(\x08\x12\x14\n\x0clog_messages\x18\x06 \x03(\t\x12\x19\n\x11log_messages_none\x18\x0b \x01(\x08\x12G\n\x12pre_token_balances\x18\x07 \x03(\x0b\x32+.solana.storage.ConfirmedBlock.TokenBalance\x12H\n\x13post_token_balances\x18\x08 \x03(\x0b\x32+.solana.storage.ConfirmedBlock.TokenBalance\x12\x36\n\x07rewards\x18\t \x03(\x0b\x32%.solana.storage.ConfirmedBlock.Reward\x12!\n\x19loaded_writable_addresses\x18\x0c \x03(\x0c\x12!\n\x19loaded_readonly_addresses\x18\r \x03(\x0c\x12>\n\x0breturn_data\x18\x0e \x01(\x0b\x32).solana.storage.ConfirmedBlock.ReturnData\x12\x18\n\x10return_data_none\x18\x0f \x01(\x08\x12#\n\x16\x63ompute_units_consumed\x18\x10 \x01(\x04H\x00\x88\x01\x01\x42\x19\n\x17_compute_units_consumed\"\x1f\n\x10TransactionError\x12\x0b\n\x03\x65rr\x18\x01 \x01(\x0c\"i\n\x11InnerInstructions\x12\r\n\x05index\x18\x01 \x01(\r\x12\x45\n\x0cinstructions\x18\x02 \x03(\x0b\x32/.solana.storage.ConfirmedBlock.InnerInstruction\"x\n\x10InnerInstruction\x12\x18\n\x10program_id_index\x18\x01 \x01(\r\x12\x10\n\x08\x61\x63\x63ounts\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\x19\n\x0cstack_height\x18\x04 \x01(\rH\x00\x88\x01\x01\x42\x0f\n\r_stack_height\"O\n\x13\x43ompiledInstruction\x12\x18\n\x10program_id_index\x18\x01 \x01(\r\x12\x10\n\x08\x61\x63\x63ounts\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\"\x9d\x01\n\x0cTokenBalance\x12\x15\n\raccount_index\x18\x01 \x01(\r\x12\x0c\n\x04mint\x18\x02 \x01(\t\x12\x45\n\x0fui_token_amount\x18\x03 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UiTokenAmount\x12\r\n\x05owner\x18\x04 \x01(\t\x12\x12\n\nprogram_id\x18\x05 \x01(\t\"^\n\rUiTokenAmount\x12\x11\n\tui_amount\x18\x01 \x01(\x01\x12\x10\n\x08\x64\x65\x63imals\x18\x02 \x01(\r\x12\x0e\n\x06\x61mount\x18\x03 \x01(\t\x12\x18\n\x10ui_amount_string\x18\x04 \x01(\t\".\n\nReturnData\x12\x12\n\nprogram_id\x18\x01 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\x94\x01\n\x06Reward\x12\x0e\n\x06pubkey\x18\x01 \x01(\t\x12\x10\n\x08lamports\x18\x02 \x01(\x03\x12\x14\n\x0cpost_balance\x18\x03 \x01(\x04\x12>\n\x0breward_type\x18\x04 \x01(\x0e\x32).solana.storage.ConfirmedBlock.RewardType\x12\x12\n\ncommission\x18\x05 \x01(\t\"\x87\x01\n\x07Rewards\x12\x36\n\x07rewards\x18\x01 \x03(\x0b\x32%.solana.storage.ConfirmedBlock.Reward\x12\x44\n\x0enum_partitions\x18\x02 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.NumPartitions\"\"\n\rUnixTimestamp\x12\x11\n\ttimestamp\x18\x01 \x01(\x03\"#\n\x0b\x42lockHeight\x12\x14\n\x0c\x62lock_height\x18\x01 \x01(\x04\"\'\n\rNumPartitions\x12\x16\n\x0enum_partitions\x18\x01 \x01(\x04*I\n\nRewardType\x12\x0f\n\x0bUnspecified\x10\x00\x12\x07\n\x03\x46\x65\x65\x10\x01\x12\x08\n\x04Rent\x10\x02\x12\x0b\n\x07Staking\x10\x03\x12\n\n\x06Voting\x10\x04\x42;Z9github.com/rpcpool/yellowstone-grpc/examples/golang/protob\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'solana_storage_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'Z9github.com/rpcpool/yellowstone-grpc/examples/golang/proto' + _globals['_REWARDTYPE']._serialized_start=3042 + _globals['_REWARDTYPE']._serialized_end=3115 + _globals['_CONFIRMEDBLOCK']._serialized_start=56 + _globals['_CONFIRMEDBLOCK']._serialized_end=473 + _globals['_CONFIRMEDTRANSACTION']._serialized_start=476 + _globals['_CONFIRMEDTRANSACTION']._serialized_end=631 + _globals['_TRANSACTION']._serialized_start=633 + _globals['_TRANSACTION']._serialized_end=723 + _globals['_MESSAGE']._serialized_start=726 + _globals['_MESSAGE']._serialized_end=1027 + _globals['_MESSAGEHEADER']._serialized_start=1029 + _globals['_MESSAGEHEADER']._serialized_end=1155 + _globals['_MESSAGEADDRESSTABLELOOKUP']._serialized_start=1157 + _globals['_MESSAGEADDRESSTABLELOOKUP']._serialized_end=1257 + _globals['_TRANSACTIONSTATUSMETA']._serialized_start=1260 + _globals['_TRANSACTIONSTATUSMETA']._serialized_end=1990 + _globals['_TRANSACTIONERROR']._serialized_start=1992 + _globals['_TRANSACTIONERROR']._serialized_end=2023 + _globals['_INNERINSTRUCTIONS']._serialized_start=2025 + _globals['_INNERINSTRUCTIONS']._serialized_end=2130 + _globals['_INNERINSTRUCTION']._serialized_start=2132 + _globals['_INNERINSTRUCTION']._serialized_end=2252 + _globals['_COMPILEDINSTRUCTION']._serialized_start=2254 + _globals['_COMPILEDINSTRUCTION']._serialized_end=2333 + _globals['_TOKENBALANCE']._serialized_start=2336 + _globals['_TOKENBALANCE']._serialized_end=2493 + _globals['_UITOKENAMOUNT']._serialized_start=2495 + _globals['_UITOKENAMOUNT']._serialized_end=2589 + _globals['_RETURNDATA']._serialized_start=2591 + _globals['_RETURNDATA']._serialized_end=2637 + _globals['_REWARD']._serialized_start=2640 + _globals['_REWARD']._serialized_end=2788 + _globals['_REWARDS']._serialized_start=2791 + _globals['_REWARDS']._serialized_end=2926 + _globals['_UNIXTIMESTAMP']._serialized_start=2928 + _globals['_UNIXTIMESTAMP']._serialized_end=2962 + _globals['_BLOCKHEIGHT']._serialized_start=2964 + _globals['_BLOCKHEIGHT']._serialized_end=2999 + _globals['_NUMPARTITIONS']._serialized_start=3001 + _globals['_NUMPARTITIONS']._serialized_end=3040 +# @@protoc_insertion_point(module_scope) diff --git a/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.pyi b/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.pyi new file mode 100644 index 0000000..10312a6 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.pyi @@ -0,0 +1,238 @@ +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class RewardType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + Unspecified: _ClassVar[RewardType] + Fee: _ClassVar[RewardType] + Rent: _ClassVar[RewardType] + Staking: _ClassVar[RewardType] + Voting: _ClassVar[RewardType] +Unspecified: RewardType +Fee: RewardType +Rent: RewardType +Staking: RewardType +Voting: RewardType + +class ConfirmedBlock(_message.Message): + __slots__ = ("previous_blockhash", "blockhash", "parent_slot", "transactions", "rewards", "block_time", "block_height", "num_partitions") + PREVIOUS_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + PARENT_SLOT_FIELD_NUMBER: _ClassVar[int] + TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] + REWARDS_FIELD_NUMBER: _ClassVar[int] + BLOCK_TIME_FIELD_NUMBER: _ClassVar[int] + BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] + NUM_PARTITIONS_FIELD_NUMBER: _ClassVar[int] + previous_blockhash: str + blockhash: str + parent_slot: int + transactions: _containers.RepeatedCompositeFieldContainer[ConfirmedTransaction] + rewards: _containers.RepeatedCompositeFieldContainer[Reward] + block_time: UnixTimestamp + block_height: BlockHeight + num_partitions: NumPartitions + def __init__(self, previous_blockhash: _Optional[str] = ..., blockhash: _Optional[str] = ..., parent_slot: _Optional[int] = ..., transactions: _Optional[_Iterable[_Union[ConfirmedTransaction, _Mapping]]] = ..., rewards: _Optional[_Iterable[_Union[Reward, _Mapping]]] = ..., block_time: _Optional[_Union[UnixTimestamp, _Mapping]] = ..., block_height: _Optional[_Union[BlockHeight, _Mapping]] = ..., num_partitions: _Optional[_Union[NumPartitions, _Mapping]] = ...) -> None: ... + +class ConfirmedTransaction(_message.Message): + __slots__ = ("transaction", "meta") + TRANSACTION_FIELD_NUMBER: _ClassVar[int] + META_FIELD_NUMBER: _ClassVar[int] + transaction: Transaction + meta: TransactionStatusMeta + def __init__(self, transaction: _Optional[_Union[Transaction, _Mapping]] = ..., meta: _Optional[_Union[TransactionStatusMeta, _Mapping]] = ...) -> None: ... + +class Transaction(_message.Message): + __slots__ = ("signatures", "message") + SIGNATURES_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + signatures: _containers.RepeatedScalarFieldContainer[bytes] + message: Message + def __init__(self, signatures: _Optional[_Iterable[bytes]] = ..., message: _Optional[_Union[Message, _Mapping]] = ...) -> None: ... + +class Message(_message.Message): + __slots__ = ("header", "account_keys", "recent_blockhash", "instructions", "versioned", "address_table_lookups") + HEADER_FIELD_NUMBER: _ClassVar[int] + ACCOUNT_KEYS_FIELD_NUMBER: _ClassVar[int] + RECENT_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] + INSTRUCTIONS_FIELD_NUMBER: _ClassVar[int] + VERSIONED_FIELD_NUMBER: _ClassVar[int] + ADDRESS_TABLE_LOOKUPS_FIELD_NUMBER: _ClassVar[int] + header: MessageHeader + account_keys: _containers.RepeatedScalarFieldContainer[bytes] + recent_blockhash: bytes + instructions: _containers.RepeatedCompositeFieldContainer[CompiledInstruction] + versioned: bool + address_table_lookups: _containers.RepeatedCompositeFieldContainer[MessageAddressTableLookup] + def __init__(self, header: _Optional[_Union[MessageHeader, _Mapping]] = ..., account_keys: _Optional[_Iterable[bytes]] = ..., recent_blockhash: _Optional[bytes] = ..., instructions: _Optional[_Iterable[_Union[CompiledInstruction, _Mapping]]] = ..., versioned: bool = ..., address_table_lookups: _Optional[_Iterable[_Union[MessageAddressTableLookup, _Mapping]]] = ...) -> None: ... + +class MessageHeader(_message.Message): + __slots__ = ("num_required_signatures", "num_readonly_signed_accounts", "num_readonly_unsigned_accounts") + NUM_REQUIRED_SIGNATURES_FIELD_NUMBER: _ClassVar[int] + NUM_READONLY_SIGNED_ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + NUM_READONLY_UNSIGNED_ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + num_required_signatures: int + num_readonly_signed_accounts: int + num_readonly_unsigned_accounts: int + def __init__(self, num_required_signatures: _Optional[int] = ..., num_readonly_signed_accounts: _Optional[int] = ..., num_readonly_unsigned_accounts: _Optional[int] = ...) -> None: ... + +class MessageAddressTableLookup(_message.Message): + __slots__ = ("account_key", "writable_indexes", "readonly_indexes") + ACCOUNT_KEY_FIELD_NUMBER: _ClassVar[int] + WRITABLE_INDEXES_FIELD_NUMBER: _ClassVar[int] + READONLY_INDEXES_FIELD_NUMBER: _ClassVar[int] + account_key: bytes + writable_indexes: bytes + readonly_indexes: bytes + def __init__(self, account_key: _Optional[bytes] = ..., writable_indexes: _Optional[bytes] = ..., readonly_indexes: _Optional[bytes] = ...) -> None: ... + +class TransactionStatusMeta(_message.Message): + __slots__ = ("err", "fee", "pre_balances", "post_balances", "inner_instructions", "inner_instructions_none", "log_messages", "log_messages_none", "pre_token_balances", "post_token_balances", "rewards", "loaded_writable_addresses", "loaded_readonly_addresses", "return_data", "return_data_none", "compute_units_consumed") + ERR_FIELD_NUMBER: _ClassVar[int] + FEE_FIELD_NUMBER: _ClassVar[int] + PRE_BALANCES_FIELD_NUMBER: _ClassVar[int] + POST_BALANCES_FIELD_NUMBER: _ClassVar[int] + INNER_INSTRUCTIONS_FIELD_NUMBER: _ClassVar[int] + INNER_INSTRUCTIONS_NONE_FIELD_NUMBER: _ClassVar[int] + LOG_MESSAGES_FIELD_NUMBER: _ClassVar[int] + LOG_MESSAGES_NONE_FIELD_NUMBER: _ClassVar[int] + PRE_TOKEN_BALANCES_FIELD_NUMBER: _ClassVar[int] + POST_TOKEN_BALANCES_FIELD_NUMBER: _ClassVar[int] + REWARDS_FIELD_NUMBER: _ClassVar[int] + LOADED_WRITABLE_ADDRESSES_FIELD_NUMBER: _ClassVar[int] + LOADED_READONLY_ADDRESSES_FIELD_NUMBER: _ClassVar[int] + RETURN_DATA_FIELD_NUMBER: _ClassVar[int] + RETURN_DATA_NONE_FIELD_NUMBER: _ClassVar[int] + COMPUTE_UNITS_CONSUMED_FIELD_NUMBER: _ClassVar[int] + err: TransactionError + fee: int + pre_balances: _containers.RepeatedScalarFieldContainer[int] + post_balances: _containers.RepeatedScalarFieldContainer[int] + inner_instructions: _containers.RepeatedCompositeFieldContainer[InnerInstructions] + inner_instructions_none: bool + log_messages: _containers.RepeatedScalarFieldContainer[str] + log_messages_none: bool + pre_token_balances: _containers.RepeatedCompositeFieldContainer[TokenBalance] + post_token_balances: _containers.RepeatedCompositeFieldContainer[TokenBalance] + rewards: _containers.RepeatedCompositeFieldContainer[Reward] + loaded_writable_addresses: _containers.RepeatedScalarFieldContainer[bytes] + loaded_readonly_addresses: _containers.RepeatedScalarFieldContainer[bytes] + return_data: ReturnData + return_data_none: bool + compute_units_consumed: int + def __init__(self, err: _Optional[_Union[TransactionError, _Mapping]] = ..., fee: _Optional[int] = ..., pre_balances: _Optional[_Iterable[int]] = ..., post_balances: _Optional[_Iterable[int]] = ..., inner_instructions: _Optional[_Iterable[_Union[InnerInstructions, _Mapping]]] = ..., inner_instructions_none: bool = ..., log_messages: _Optional[_Iterable[str]] = ..., log_messages_none: bool = ..., pre_token_balances: _Optional[_Iterable[_Union[TokenBalance, _Mapping]]] = ..., post_token_balances: _Optional[_Iterable[_Union[TokenBalance, _Mapping]]] = ..., rewards: _Optional[_Iterable[_Union[Reward, _Mapping]]] = ..., loaded_writable_addresses: _Optional[_Iterable[bytes]] = ..., loaded_readonly_addresses: _Optional[_Iterable[bytes]] = ..., return_data: _Optional[_Union[ReturnData, _Mapping]] = ..., return_data_none: bool = ..., compute_units_consumed: _Optional[int] = ...) -> None: ... + +class TransactionError(_message.Message): + __slots__ = ("err",) + ERR_FIELD_NUMBER: _ClassVar[int] + err: bytes + def __init__(self, err: _Optional[bytes] = ...) -> None: ... + +class InnerInstructions(_message.Message): + __slots__ = ("index", "instructions") + INDEX_FIELD_NUMBER: _ClassVar[int] + INSTRUCTIONS_FIELD_NUMBER: _ClassVar[int] + index: int + instructions: _containers.RepeatedCompositeFieldContainer[InnerInstruction] + def __init__(self, index: _Optional[int] = ..., instructions: _Optional[_Iterable[_Union[InnerInstruction, _Mapping]]] = ...) -> None: ... + +class InnerInstruction(_message.Message): + __slots__ = ("program_id_index", "accounts", "data", "stack_height") + PROGRAM_ID_INDEX_FIELD_NUMBER: _ClassVar[int] + ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + STACK_HEIGHT_FIELD_NUMBER: _ClassVar[int] + program_id_index: int + accounts: bytes + data: bytes + stack_height: int + def __init__(self, program_id_index: _Optional[int] = ..., accounts: _Optional[bytes] = ..., data: _Optional[bytes] = ..., stack_height: _Optional[int] = ...) -> None: ... + +class CompiledInstruction(_message.Message): + __slots__ = ("program_id_index", "accounts", "data") + PROGRAM_ID_INDEX_FIELD_NUMBER: _ClassVar[int] + ACCOUNTS_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + program_id_index: int + accounts: bytes + data: bytes + def __init__(self, program_id_index: _Optional[int] = ..., accounts: _Optional[bytes] = ..., data: _Optional[bytes] = ...) -> None: ... + +class TokenBalance(_message.Message): + __slots__ = ("account_index", "mint", "ui_token_amount", "owner", "program_id") + ACCOUNT_INDEX_FIELD_NUMBER: _ClassVar[int] + MINT_FIELD_NUMBER: _ClassVar[int] + UI_TOKEN_AMOUNT_FIELD_NUMBER: _ClassVar[int] + OWNER_FIELD_NUMBER: _ClassVar[int] + PROGRAM_ID_FIELD_NUMBER: _ClassVar[int] + account_index: int + mint: str + ui_token_amount: UiTokenAmount + owner: str + program_id: str + def __init__(self, account_index: _Optional[int] = ..., mint: _Optional[str] = ..., ui_token_amount: _Optional[_Union[UiTokenAmount, _Mapping]] = ..., owner: _Optional[str] = ..., program_id: _Optional[str] = ...) -> None: ... + +class UiTokenAmount(_message.Message): + __slots__ = ("ui_amount", "decimals", "amount", "ui_amount_string") + UI_AMOUNT_FIELD_NUMBER: _ClassVar[int] + DECIMALS_FIELD_NUMBER: _ClassVar[int] + AMOUNT_FIELD_NUMBER: _ClassVar[int] + UI_AMOUNT_STRING_FIELD_NUMBER: _ClassVar[int] + ui_amount: float + decimals: int + amount: str + ui_amount_string: str + def __init__(self, ui_amount: _Optional[float] = ..., decimals: _Optional[int] = ..., amount: _Optional[str] = ..., ui_amount_string: _Optional[str] = ...) -> None: ... + +class ReturnData(_message.Message): + __slots__ = ("program_id", "data") + PROGRAM_ID_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + program_id: bytes + data: bytes + def __init__(self, program_id: _Optional[bytes] = ..., data: _Optional[bytes] = ...) -> None: ... + +class Reward(_message.Message): + __slots__ = ("pubkey", "lamports", "post_balance", "reward_type", "commission") + PUBKEY_FIELD_NUMBER: _ClassVar[int] + LAMPORTS_FIELD_NUMBER: _ClassVar[int] + POST_BALANCE_FIELD_NUMBER: _ClassVar[int] + REWARD_TYPE_FIELD_NUMBER: _ClassVar[int] + COMMISSION_FIELD_NUMBER: _ClassVar[int] + pubkey: str + lamports: int + post_balance: int + reward_type: RewardType + commission: str + def __init__(self, pubkey: _Optional[str] = ..., lamports: _Optional[int] = ..., post_balance: _Optional[int] = ..., reward_type: _Optional[_Union[RewardType, str]] = ..., commission: _Optional[str] = ...) -> None: ... + +class Rewards(_message.Message): + __slots__ = ("rewards", "num_partitions") + REWARDS_FIELD_NUMBER: _ClassVar[int] + NUM_PARTITIONS_FIELD_NUMBER: _ClassVar[int] + rewards: _containers.RepeatedCompositeFieldContainer[Reward] + num_partitions: NumPartitions + def __init__(self, rewards: _Optional[_Iterable[_Union[Reward, _Mapping]]] = ..., num_partitions: _Optional[_Union[NumPartitions, _Mapping]] = ...) -> None: ... + +class UnixTimestamp(_message.Message): + __slots__ = ("timestamp",) + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + timestamp: int + def __init__(self, timestamp: _Optional[int] = ...) -> None: ... + +class BlockHeight(_message.Message): + __slots__ = ("block_height",) + BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] + block_height: int + def __init__(self, block_height: _Optional[int] = ...) -> None: ... + +class NumPartitions(_message.Message): + __slots__ = ("num_partitions",) + NUM_PARTITIONS_FIELD_NUMBER: _ClassVar[int] + num_partitions: int + def __init__(self, num_partitions: _Optional[int] = ...) -> None: ... diff --git a/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2_grpc.py b/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2_grpc.py new file mode 100644 index 0000000..1544a78 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2_grpc.py @@ -0,0 +1,24 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + + +GRPC_GENERATED_VERSION = '1.71.0' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in solana_storage_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py new file mode 100644 index 0000000..f8cf2bf --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py @@ -0,0 +1 @@ +import yellowstone_fumarole_client.runtime \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py new file mode 100644 index 0000000..6cf9bae --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py @@ -0,0 +1,2 @@ + +from yellowstone_fumarole_client.runtime.state_machine import * \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py new file mode 100644 index 0000000..4983004 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py @@ -0,0 +1,321 @@ +from typing import Optional, List, Dict, Set, Deque, Tuple, Any +from collections import deque, defaultdict +from yellowstone_api.fumarole_v2_pb2 import ( + CommitmentLevel, + BlockchainEvent, +) +import heapq +import uuid +from enum import Enum + +__all__ = [ + "DEFAULT_SLOT_MEMORY_RETENTION", + "FumeBlockchainId", + "FumeBlockUID", + "FumeNumShards", + "FumeShardIdx", + "FumeOffset", + "FumeSessionSequence", + "Slot", + "FumeDownloadRequest", + "FumeSlotStatus", + "SlotCommitmentProgression", + "SlotDownloadProgress", + "SlotDownloadState", + "FumaroleSM", +] + +# Constants +DEFAULT_SLOT_MEMORY_RETENTION = 10000 + +# Type aliases +FumeBlockchainId = bytes # Equivalent to [u8; 16] +FumeBlockUID = bytes # Equivalent to [u8; 16] +FumeNumShards = int # Equivalent to u32 +FumeShardIdx = int # Equivalent to u32 +FumeOffset = int # Equivalent to i64 +FumeSessionSequence = int # Equivalent to u64 +Slot = int # From solana_sdk::clock::Slot + + +# Data structures +class FumeDownloadRequest: + def __init__( + self, + slot: Slot, + blockchain_id: FumeBlockchainId, + block_uid: FumeBlockUID, + num_shards: FumeNumShards, + commitment_level: CommitmentLevel, + ): + self.slot = slot + self.blockchain_id = blockchain_id + self.block_uid = block_uid + self.num_shards = num_shards + self.commitment_level = commitment_level + + +class FumeSlotStatus: + def __init__( + self, + session_sequence: FumeSessionSequence, + offset: FumeOffset, + slot: Slot, + parent_slot: Optional[Slot], + commitment_level: CommitmentLevel, + dead_error: Optional[str], + ): + self.session_sequence = session_sequence + self.offset = offset + self.slot = slot + self.parent_slot = parent_slot + self.commitment_level = commitment_level + self.dead_error = dead_error + + +class SlotCommitmentProgression: + def __init__(self): + self.processed_commitment_levels: Set[CommitmentLevel] = set() + + +class SlotDownloadProgress: + def __init__(self, num_shards: FumeNumShards): + self.num_shards = num_shards + self.shard_remaining = [False] * num_shards + + def do_progress(self, shard_idx: FumeShardIdx) -> "SlotDownloadState": + self.shard_remaining[shard_idx % self.num_shards] = True + return ( + SlotDownloadState.Done + if all(self.shard_remaining) + else SlotDownloadState.Downloading + ) + + +class SlotDownloadState(Enum): + Downloading = "Downloading" + Done = "Done" + + +class FumaroleSM: + """ + Sans-IO Fumarole State Machine + + Manages in-flight slot downloads and ensures correct ordering of slot statuses without performing I/O. + """ + + def __init__(self, last_committed_offset: FumeOffset, slot_memory_retention: int): + self.last_committed_offset = last_committed_offset + self.slot_commitment_progression = ( + dict() + ) # Slot -> SlotCommitmentProgression + self.downloaded_slot = set() # Set of downloaded slots + self.inflight_slot_shard_download = {} # Slot -> SlotDownloadProgress + self.blocked_slot_status_update = defaultdict( + deque + ) # Slot -> Deque[FumeSlotStatus] + self.slot_status_update_queue = deque() # Deque[FumeSlotStatus] + self.processed_offset = [] # Min-heap for (sequence, offset) + self.committable_offset = last_committed_offset + self.max_slot_detected = 0 + self.unprocessed_blockchain_event: Deque[(FumeSessionSequence, BlockchainEvent)] = deque() + self.sequence = 1 + self.last_processed_fume_sequence = 0 + self.sequence_to_offset = {} # FumeSessionSequence -> FumeOffset + self.slot_memory_retention = slot_memory_retention + + def update_committed_offset(self, offset: FumeOffset) -> None: + assert ( + offset >= self.last_committed_offset + ), "Offset must be >= last committed offset" + self.last_committed_offset = offset + + def next_sequence(self) -> int: + ret = self.sequence + self.sequence += 1 + return ret + + def gc(self) -> None: + """Garbage collect old slots to respect memory retention limit.""" + while len(self.downloaded_slot) > self.slot_memory_retention: + slot = self.downloaded_slot.pop(0) if self.downloaded_slot else None + if slot is None: + break + self.slot_commitment_progression.pop(slot, None) + self.inflight_slot_shard_download.pop(slot, None) + self.blocked_slot_status_update.pop(slot, None) + + def queue_blockchain_event(self, events: List[BlockchainEvent]) -> None: + """Queue blockchain events for processing.""" + for event in events: + if event.offset < self.last_committed_offset: + continue + + if event.slot > self.max_slot_detected: + self.max_slot_detected = event.slot + + sequence = self.next_sequence() + self.sequence_to_offset[sequence] = event.offset + + if event.slot in self.downloaded_slot: + fume_status = FumeSlotStatus( + session_sequence=sequence, + offset=event.offset, + slot=event.slot, + parent_slot=event.parent_slot, + commitment_level=event.commitment_level, + dead_error=event.dead_error, + ) + if event.slot in self.inflight_slot_shard_download: + self.blocked_slot_status_update[event.slot].append(fume_status) + else: + self.slot_status_update_queue.append(fume_status) + else: + self.unprocessed_blockchain_event.append((sequence, event)) + + def make_slot_download_progress( + self, slot: Slot, shard_idx: FumeShardIdx + ) -> SlotDownloadState: + """Update download progress for a given slot.""" + download_progress = self.inflight_slot_shard_download.get(slot) + if not download_progress: + raise ValueError("Slot not in download") + + download_state = download_progress.do_progress(shard_idx) + + if download_state == SlotDownloadState.Done: + self.inflight_slot_shard_download.pop(slot) + self.downloaded_slot.add(slot) + self.slot_commitment_progression.setdefault( + slot, SlotCommitmentProgression() + ) + blocked_statuses = self.blocked_slot_status_update.pop(slot, deque()) + self.slot_status_update_queue.extend(blocked_statuses) + + return download_state + + def pop_next_slot_status(self) -> Optional[FumeSlotStatus]: + """Pop the next slot status to process.""" + while self.slot_status_update_queue: + slot_status = self.slot_status_update_queue.popleft() + commitment_history = self.slot_commitment_progression.get(slot_status.slot) + if ( + commitment_history + and slot_status.commitment_level + not in commitment_history.processed_commitment_levels + ): + commitment_history.processed_commitment_levels.add( + slot_status.commitment_level + ) + return slot_status + elif not commitment_history: + raise RuntimeError("Slot status should not be available here") + return None + + def make_sure_slot_commitment_progression_exists( + self, slot: Slot + ) -> SlotCommitmentProgression: + """Ensure a slot has a commitment progression entry.""" + return self.slot_commitment_progression.setdefault( + slot, SlotCommitmentProgression() + ) + + def pop_slot_to_download( + self, commitment = None + ) -> Optional[FumeDownloadRequest]: + """Pop the next slot to download.""" + min_commitment = commitment or CommitmentLevel.PROCESSED + while self.unprocessed_blockchain_event: + session_sequence, blockchain_event = self.unprocessed_blockchain_event.popleft() + event_cl = blockchain_event.commitment_level + + if event_cl < min_commitment: + self.slot_status_update_queue.append( + FumeSlotStatus( + session_sequence=session_sequence, + offset=blockchain_event.offset, + slot=blockchain_event.slot, + parent_slot=blockchain_event.parent_slot, + commitment_level=event_cl, + dead_error=blockchain_event.dead_error, + ) + ) + self.make_sure_slot_commitment_progression_exists(blockchain_event.slot) + continue + + if blockchain_event.slot in self.downloaded_slot: + self.make_sure_slot_commitment_progression_exists(blockchain_event.slot) + progression = self.slot_commitment_progression[blockchain_event.slot] + if event_cl in progression.processed_commitment_levels: + self.mark_event_as_processed(session_sequence) + continue + + self.slot_status_update_queue.append( + FumeSlotStatus( + session_sequence=session_sequence, + offset=blockchain_event.offset, + slot=blockchain_event.slot, + parent_slot=blockchain_event.parent_slot, + commitment_level=event_cl, + dead_error=blockchain_event.dead_error, + ) + ) + else: + blockchain_id = bytes(blockchain_event.blockchain_id) + block_uid = bytes(blockchain_event.block_uid) + if blockchain_event.slot not in self.inflight_slot_shard_download: + download_request = FumeDownloadRequest( + slot=blockchain_event.slot, + blockchain_id=blockchain_id, + block_uid=block_uid, + num_shards=blockchain_event.num_shards, + commitment_level=event_cl, + ) + download_progress = SlotDownloadProgress( + num_shards=blockchain_event.num_shards + ) + self.inflight_slot_shard_download[blockchain_event.slot] = ( + download_progress + ) + self.blocked_slot_status_update[blockchain_event.slot].append( + FumeSlotStatus( + session_sequence=session_sequence, + offset=blockchain_event.offset, + slot=blockchain_event.slot, + parent_slot=blockchain_event.parent_slot, + commitment_level=event_cl, + dead_error=blockchain_event.dead_error, + ) + ) + return download_request + return None + + def mark_event_as_processed(self, event_seq_number: FumeSessionSequence) -> None: + """Mark an event as processed and update committable offset.""" + fume_offset = self.sequence_to_offset.pop(event_seq_number, None) + if fume_offset is None: + raise ValueError("Event sequence number not found") + heapq.heappush( + self.processed_offset, (-event_seq_number, -fume_offset) + ) # Negate for min-heap + + while self.processed_offset: + seq, offset = self.processed_offset[0] + seq, offset = -seq, -offset # Convert back to positive + if seq != self.last_processed_fume_sequence + 1: + break + heapq.heappop(self.processed_offset) + self.committable_offset = offset + self.last_processed_fume_sequence = seq + + def slot_status_update_queue_len(self) -> int: + """Return the length of the slot status update queue.""" + return len(self.slot_status_update_queue) + + def processed_offset_queue_len(self) -> int: + """Return the length of the processed offset queue.""" + return len(self.processed_offset) + + def need_new_blockchain_events(self) -> bool: + """Check if new blockchain events are needed.""" + return not self.slot_status_update_queue and not self.blocked_slot_status_update From 1b7018faf95346b69fbcd70ed6e4bc7b67630a6c Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Wed, 21 May 2025 17:25:41 -0400 Subject: [PATCH 29/56] testing --- .../yellowstone-fumarole-client/src/grpc.rs | 32 + crates/yellowstone-fumarole-client/src/lib.rs | 33 +- .../yellowstone-fumarole-client/poetry.lock | 276 ++++---- .../pyproject.toml | 9 +- python/yellowstone-fumarole-client/pytest.ini | 6 + .../scripts/build.sh | 4 +- .../tests/test_fumarole_client_intg.py | 118 ++++ .../tests/test_fumarole_sm.py | 2 +- .../yellowstone_fumarole_client/__init__.py | 303 ++++++++- .../yellowstone_fumarole_client/config.py | 26 + .../grpc_connectivity.py | 197 ++++++ .../runtime/__init__.py | 4 +- .../runtime/aio.py | 593 ++++++++++++++++++ .../runtime/state_machine.py | 19 +- .../__init__.py | 0 .../fumarole_v2_pb2.py | 4 +- .../fumarole_v2_pb2.pyi | 80 +-- .../fumarole_v2_pb2_grpc.py | 2 +- .../geyser_pb2.py | 4 +- .../geyser_pb2.pyi | 42 +- .../geyser_pb2_grpc.py | 2 +- .../solana_storage_pb2.py | 0 .../solana_storage_pb2.pyi | 0 .../solana_storage_pb2_grpc.py | 0 24 files changed, 1513 insertions(+), 243 deletions(-) create mode 100644 crates/yellowstone-fumarole-client/src/grpc.rs create mode 100644 python/yellowstone-fumarole-client/pytest.ini create mode 100644 python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/config.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/__init__.py (100%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/fumarole_v2_pb2.py (99%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/fumarole_v2_pb2.pyi (77%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/fumarole_v2_pb2_grpc.py (99%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/geyser_pb2.py (99%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/geyser_pb2.pyi (92%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/geyser_pb2_grpc.py (99%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/solana_storage_pb2.py (100%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/solana_storage_pb2.pyi (100%) rename python/yellowstone-fumarole-client/{yellowstone_api => yellowstone_fumarole_proto}/solana_storage_pb2_grpc.py (100%) diff --git a/crates/yellowstone-fumarole-client/src/grpc.rs b/crates/yellowstone-fumarole-client/src/grpc.rs new file mode 100644 index 0000000..421fcae --- /dev/null +++ b/crates/yellowstone-fumarole-client/src/grpc.rs @@ -0,0 +1,32 @@ +use tonic::{service::interceptor::InterceptedService, transport::{Channel, Endpoint}}; + +use crate::{config::FumaroleConfig, proto::fumarole_client::FumaroleClient, string_pairs_to_metadata_header, FumeInterceptor}; + +#[derive(Clone)] +pub struct FumaroleGrpcConnector { + pub config: FumaroleConfig, + pub endpoint: Endpoint, +} + +impl FumaroleGrpcConnector { + pub async fn connect( + &self, + ) -> Result< + FumaroleClient>, + tonic::transport::Error, + > { + let channel = self.endpoint.connect().await?; + let interceptor = FumeInterceptor { + x_token: self + .config + .x_token + .as_ref() + .map(|token| token.try_into()) + .transpose() + .unwrap(), + metadata: string_pairs_to_metadata_header(self.config.x_metadata.clone()).unwrap(), + }; + Ok(FumaroleClient::with_interceptor(channel, interceptor) + .max_decoding_message_size(self.config.max_decoding_message_size_bytes)) + } +} \ No newline at end of file diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index 9b29a73..e0b3731 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -229,6 +229,7 @@ pub mod metrics; pub(crate) mod runtime; pub(crate) mod util; +pub(crate) mod grpc; use { config::FumaroleConfig, @@ -281,6 +282,7 @@ use { proto::{fumarole_client::FumaroleClient as TonicFumaroleClient, JoinControlPlane}, runtime::tokio::DataPlaneConn, tonic::transport::Endpoint, + crate::grpc::FumaroleGrpcConnector, }; #[derive(Clone)] @@ -728,33 +730,4 @@ impl FumaroleClient { { self.inner.create_consumer_group(request).await } -} - -#[derive(Clone)] -pub(crate) struct FumaroleGrpcConnector { - config: FumaroleConfig, - endpoint: Endpoint, -} - -impl FumaroleGrpcConnector { - async fn connect( - &self, - ) -> Result< - TonicFumaroleClient>, - tonic::transport::Error, - > { - let channel = self.endpoint.connect().await?; - let interceptor = FumeInterceptor { - x_token: self - .config - .x_token - .as_ref() - .map(|token| token.try_into()) - .transpose() - .unwrap(), - metadata: string_pairs_to_metadata_header(self.config.x_metadata.clone()).unwrap(), - }; - Ok(TonicFumaroleClient::with_interceptor(channel, interceptor) - .max_decoding_message_size(self.config.max_decoding_message_size_bytes)) - } -} +} \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/poetry.lock b/python/yellowstone-fumarole-client/poetry.lock index 32ed653..9d9e39d 100644 --- a/python/yellowstone-fumarole-client/poetry.lock +++ b/python/yellowstone-fumarole-client/poetry.lock @@ -1,5 +1,18 @@ # This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +[[package]] +name = "asyncio" +version = "3.4.3" +description = "reference implementation of PEP 3156" +optional = false +python-versions = "*" +files = [ + {file = "asyncio-3.4.3-cp33-none-win32.whl", hash = "sha256:b62c9157d36187eca799c378e572c969f0da87cd5fc42ca372d92cdb06e7e1de"}, + {file = "asyncio-3.4.3-cp33-none-win_amd64.whl", hash = "sha256:c46a87b48213d7464f22d9a497b9eef8c1928b68320a2fa94240f969f6fec08c"}, + {file = "asyncio-3.4.3-py3-none-any.whl", hash = "sha256:c4d18b22701821de07bd6aea8b53d21449ec0ec5680645e5317062ea21817d2d"}, + {file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"}, +] + [[package]] name = "base58" version = "2.1.1" @@ -51,8 +64,6 @@ mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -85,85 +96,68 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "exceptiongroup" -version = "1.3.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "grpcio" -version = "1.71.0" +version = "1.72.0rc1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.9" files = [ - {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, - {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, - {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, - {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, - {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, - {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, - {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, - {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, - {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, - {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, - {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, - {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, - {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, - {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, - {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, - {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, - {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, - {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, - {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, - {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, - {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, - {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, - {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, - {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, - {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, - {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, - {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, - {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, - {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, - {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, - {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, - {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, - {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, - {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, - {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, - {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, - {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, - {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, - {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, - {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, - {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, - {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, - {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, - {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, - {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, - {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, - {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, - {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, - {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, - {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, - {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-linux_armv7l.whl", hash = "sha256:db7db4b246a7fb21aeb70e7220be480948aa9c535eaa777ea0c840416ed8cac9"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:baf028e61662fd320c18fb50070b6e330fa24b2b3a4d113f4d57b41e0f5b5873"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bf84cf17dfbf49ebe11b081b7a3c83b23625a80c979741e2e98b0ddb41080397"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fd6f8700d34754b32d13af234da2e413f408c8b741c8039f11beb06d53c3f6a"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f05d243b8d814dd1c6fca19e4e0c5986fc70e2c3aa29e2c7c67e877e4c03ede6"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:390a70394e2c315d7c480496db259ec16c00baeebf759c8967247269f0fee981"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b08973c62eda11343e7131d78635d50ae0c138a8f39eb817ca83cca842527d04"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce539397a258af1dee26118c40327004d023617bc99493baaf8e7938491f7361"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-win32.whl", hash = "sha256:4f97f628095bbdf6d4c2c15c1bc18f0514f90781528bc6082bb697ccc71d4f42"}, + {file = "grpcio-1.72.0rc1-cp310-cp310-win_amd64.whl", hash = "sha256:dbcdf7a5463b61fca1586b54f7ea3c9dfd159f535224f457ae307f52d8d4a839"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-linux_armv7l.whl", hash = "sha256:23ebb3947783f10fec3e1d0b29b94db8e72f721900d1dd9c1d6db5876da69066"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:fd96b20846907ed4cd95bf1d628f16732f450114bde897eedb323fc3bc1eddb3"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:6df1ba4a5f5793ae210699e1b1745f77a4ac17f73510fc36ee12c215f02523b4"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3398957c611f0af7cee4fdd34268b6664be8689eae0327440efb794e544908b"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ef66029da9cbe94ba3047c1b04653e1d5096ca8d036eb6e24092f0e847d2c4f"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6566e3e3458805381f8714492e8f559f082f8955ccd1c98d71f8afc0612dc841"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3c799bfa92450e95d3f1f9cc4b7d8cbefc8bd4356d3f6573d2fb5e698353192a"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a251992531f3b16be3c013ec45a9caa69ecfe9b45335652d5681659f6d117233"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-win32.whl", hash = "sha256:c9e5f2c628dedf0886b774eee17e003a043941024e68ee2ebe76be6981a7baab"}, + {file = "grpcio-1.72.0rc1-cp311-cp311-win_amd64.whl", hash = "sha256:8b9c0a84ff584da3f5c0cb04ee3d87c0bc70d41ab5a21d3b943963a94c622892"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-linux_armv7l.whl", hash = "sha256:188ac9d8cb05c250e212ba946a65a8541419bdfd803373d6b7fb8b10fe5ff991"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8bd956711dc21235bc78a70bf04a28b3f747c6576b9bb79362803707fec9f705"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b032b9cbb325e28ff847b6aae1df5a090aa49b682dc80c926b24a96de43c01aa"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ca12a4388a40eb0411264af291184e2cca38176996b591ac047844abd81d40b"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cefd52f392f4d6747b401f825901c48176737f7b03b17be0a0a638da194749"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a24408fb051b70efa440b95f7e1acbb1c3067609934aa53a953d8d2cfc4d824"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c7b37608d14792d3dacb9aba55b96a17a074e139c4567b0ac5c1926302add910"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:81ca42a96299ca617f3bc7b60660f15cabb98de6fce440ecd4d0640a5554345f"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-win32.whl", hash = "sha256:9ff2ef2a553d4edc8c620df3735b15a1e7dc05a60262e8c28445f2676fb09189"}, + {file = "grpcio-1.72.0rc1-cp312-cp312-win_amd64.whl", hash = "sha256:3c9a6613662591c198d9e4e499f3336bc5c1c0e3fe3f0922cf48e74b37b3dcd1"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-linux_armv7l.whl", hash = "sha256:995e3e5c43cab6d0f1922b43b3c01a2624a4497ce91c3124e807497654301c59"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:8dfb0ff2ddd708dbecdffa37245b79aef707e789ffb0fc6a8be01608d982afcd"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:7e08eb53d6123995da63df90ce50e5b834de0a8ebfb1a3ac0890a2e246d2771c"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71cb52c0956fe7868692b490fda341a52d8187fab94e1136f5bd253c8e3560ac"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcf76ce8d4a6829f112ad88c4e6d528dbef922e01834d4a5cc3718bf599f7e84"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:8852b6234a52b6b694a5f9a5a687d59127b3e71c8e345eebd6d483abbc412217"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:d1a0fee8420d9e453dc8cba1c7c067ca2d3054487cb6616ab8dad41f15e57465"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a13149f4fd3904093fa2dba484744dd7205f536650a533ab24dd95cca393c14c"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-win32.whl", hash = "sha256:cebe148511a1965363fc6aafd60a488fe9dc5d74dd92a59a8ecba66ddd53c573"}, + {file = "grpcio-1.72.0rc1-cp313-cp313-win_amd64.whl", hash = "sha256:843352c352970a1df5bbf7da68d2770781f4bff2c85a4a0d20cc6eaaadf26e59"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-linux_armv7l.whl", hash = "sha256:2083c0cdff47ff7d4b093d05d703baeeef8db3b2c1f43c9f9d4288a99e444cdd"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42df7e0f9d66f5c9b246d8e1da74605bce27b10dec20b6fc204edd6e7178da2d"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:1190c2e4f221b5bd0e6eba3e44d6758ef48eeb2216dcb9734c158e8a5d8ce6a3"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d6c8d2ea63e1cdaaa81271e5c867fcd9732050324df372ff9d3163968be68c8"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6ee161b9d112232e5d6be437bf56383dca2334bd17e8b7a4a3f97f33722bdd"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9abbdf945e3b151603d642f2bc7a637b87af2e3480ed047689bad9eb4fa9c712"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2edab5d26319a1fed695ec658efe3846b75e0c7f3a6202b042099c9b11dc10fd"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03b46e0041bee18a786ccef978bc29a26e4bd1b73a6ca0b21252387167843ff1"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-win32.whl", hash = "sha256:9b861cbfb63433e02b52f9971644095bec4a5fcd1e4d3f94e18cfad38f649d53"}, + {file = "grpcio-1.72.0rc1-cp39-cp39-win_amd64.whl", hash = "sha256:2416792a567cba9f92bffc1a55ce0f2c8106956a2e32bfe8a22a8094a56b7108"}, + {file = "grpcio-1.72.0rc1.tar.gz", hash = "sha256:221793dccd3332060f426975a041d319d6d57323d857d4afc25257ec4a5a67f3"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.71.0)"] +protobuf = ["grpcio-tools (>=1.72.0rc1)"] [[package]] name = "grpcio-tools" @@ -338,15 +332,93 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.26.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"}, + {file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + [[package]] name = "setuptools" version = "80.8.0" @@ -392,59 +464,7 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, -] - [metadata] lock-version = "2.0" -python-versions = "^3.10" -content-hash = "fc1e192401cbe5da4fe12291c42d1ec2de9dba6c6426b3bf6f8350655904dba4" +python-versions = "^3.13" +content-hash = "1f8950e8b5c62addccfab2f840ef0e42e615879717c6058e09603cae71e89cb1" diff --git a/python/yellowstone-fumarole-client/pyproject.toml b/python/yellowstone-fumarole-client/pyproject.toml index 74c2c3d..f0d7a10 100644 --- a/python/yellowstone-fumarole-client/pyproject.toml +++ b/python/yellowstone-fumarole-client/pyproject.toml @@ -8,18 +8,20 @@ authors = ["Louis-Vincent ", "Triton One FumaroleConfig: + + path = environ["TEST_FUMAROLE_CONFIG"] + + with open(path, "r") as f: + return FumaroleConfig.from_yaml(f) + + +@pytest.mark.asyncio +async def test_fumarole_delete_all(fumarole_config): + """ + Test the delete_all_cg function. + """ + logging.debug("test_fumarole_delete_all") + # Create a FumaroleClient instance + + fumarole_config.x_metadata = {"x-subscription-id": str(uuid.uuid4())} + + client: FumaroleClient = await FumaroleClient.connect(fumarole_config) + # Call the delete_all_cg function + await client.delete_all_consumer_groups() + + resp = await client.create_consumer_group( + CreateConsumerGroupRequest( + consumer_group_name="test", + ) + ) + assert resp.consumer_group_id, "Failed to create consumer group" + + resp = await client.create_consumer_group( + CreateConsumerGroupRequest( + consumer_group_name="test2", + ) + ) + + logging.debug("create consumer group response: %s", resp) + + cg_list = await client.list_consumer_groups() + + assert len(cg_list.consumer_groups) == 2 + + await client.delete_all_consumer_groups() + + cg_list = await client.list_consumer_groups() + assert len(cg_list.consumer_groups) == 0 + + cg_info = await client.get_consumer_group_info(consumer_group_name="test") + assert cg_info is None, "Failed to get consumer group info" + + +@pytest.mark.asyncio +async def test_dragonsmouth_adapter(fumarole_config): + """ + Test the delete_all_cg function. + """ + logging.debug("test_fumarole_delete_all") + # Create a FumaroleClient instance + + fumarole_config.x_metadata = {"x-subscription-id": str(uuid.uuid4())} + + client: FumaroleClient = await FumaroleClient.connect(fumarole_config) + await client.delete_all_consumer_groups() + + resp = await client.create_consumer_group( + CreateConsumerGroupRequest( + consumer_group_name="test", + ) + ) + assert resp.consumer_group_id, "Failed to create consumer group" + + session = await client.dragonsmouth_subscribe( + consumer_group_name="test", + request=SubscribeRequest( + accounts={"fumarole": SubscribeRequestFilterAccounts()}, + transactions={"fumarole": SubscribeRequestFilterTransactions()}, + blocks_meta={"fumarole": SubscribeRequestFilterBlocksMeta()}, + entry={"fumarole": SubscribeRequestFilterEntry()}, + slots={"fumarole": SubscribeRequestFilterSlots()}, + ), + ) + + dragonsmouth_source = session.source + fh = session.fumarole_handle + while True: + + tasks = [asyncio.create_task(dragonsmouth_source.get()), fh] + + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + + for t in pending: + t.cancel() + + for task in done: + if task == tasks[0]: + print(f"Consumed: {type(task.result())}") + else: + print(f"session ended with: {type(task.result())}") + return diff --git a/python/yellowstone-fumarole-client/tests/test_fumarole_sm.py b/python/yellowstone-fumarole-client/tests/test_fumarole_sm.py index d4e50d9..2f0243b 100644 --- a/python/yellowstone-fumarole-client/tests/test_fumarole_sm.py +++ b/python/yellowstone-fumarole-client/tests/test_fumarole_sm.py @@ -7,7 +7,7 @@ SlotDownloadState, ) -from yellowstone_api.fumarole_v2_pb2 import BlockchainEvent, CommitmentLevel +from yellowstone_fumarole_proto.fumarole_v2_pb2 import BlockchainEvent, CommitmentLevel # Tests diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py index f8cf2bf..ed0f869 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py @@ -1 +1,302 @@ -import yellowstone_fumarole_client.runtime \ No newline at end of file +import asyncio +import logging +from yellowstone_fumarole_client.grpc_connectivity import ( + FumaroleGrpcConnector, +) +from typing import Dict, Optional +from dataclasses import dataclass +from . import config +from yellowstone_fumarole_client.runtime.aio import ( + AsyncioFumeDragonsmouthRuntime, + GrpcDownloadTaskRunner, + DownloadTaskRunnerChannels, + DataPlaneConn, + FumaroleSM, + DEFAULT_GC_INTERVAL, + DEFAULT_SLOT_MEMORY_RETENTION, + DragonsmouthSubscribeRequestBidi, +) +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequest, SubscribeUpdate +from yellowstone_fumarole_proto.fumarole_v2_pb2 import ( + ControlResponse, + VersionRequest, + VersionResponse, + JoinControlPlane, + ControlCommand, + ListConsumerGroupsRequest, + ListConsumerGroupsResponse, + GetConsumerGroupInfoRequest, + ConsumerGroupInfo, + DeleteConsumerGroupRequest, + DeleteConsumerGroupResponse, + CreateConsumerGroupRequest, + CreateConsumerGroupResponse, +) +from yellowstone_fumarole_proto.fumarole_v2_pb2_grpc import FumaroleStub +import grpc + +__all__ = [ + "FumaroleClient", + "FumaroleConfig", + "FumaroleSubscribeConfig", + "DragonsmouthAdapterSession", + "DEFAULT_DRAGONSMOUTH_CAPACITY", + "DEFAULT_COMMIT_INTERVAL", + "DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT", + "DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP", +] + +# Constants +DEFAULT_DRAGONSMOUTH_CAPACITY = 10000 +DEFAULT_COMMIT_INTERVAL = 10.0 # seconds +DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = 3 +DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = 10 + +# Error classes + + +# FumaroleSubscribeConfig +@dataclass +class FumaroleSubscribeConfig: + """Configuration for subscribing to a dragonsmouth stream.""" + + # The maximum number of concurrent download tasks per TCP connection. + concurrent_download_limit_per_tcp: int = DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP + + # The interval at which to commit the slot memory. + commit_interval: float = DEFAULT_COMMIT_INTERVAL + + # The maximum number of failed slot download attempts before giving up. + max_failed_slot_download_attempt: int = DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT + + # The maximum number of slots to download concurrently. + data_channel_capacity: int = DEFAULT_DRAGONSMOUTH_CAPACITY + + # The interval at which to perform garbage collection on the slot memory. + gc_interval: int = DEFAULT_GC_INTERVAL + + # The retention period for slot memory in seconds. + slot_memory_retention: int = DEFAULT_SLOT_MEMORY_RETENTION + + +# DragonsmouthAdapterSession +@dataclass +class DragonsmouthAdapterSession: + """Session for interacting with the dragonsmouth-like stream.""" + + # The queue for sending SubscribeRequest update to the dragonsmouth stream. + sink: asyncio.Queue + + # The queue for receiving SubscribeUpdate from the dragonsmouth stream. + source: asyncio.Queue + + # The task handle for the fumarole runtime. + fumarole_handle: asyncio.Task + + +# FumaroleClient +class FumaroleClient: + """Fumarole client for interacting with the Fumarole server.""" + + logger = logging.getLogger(__name__) + + def __init__(self, connector: FumaroleGrpcConnector, stub: FumaroleStub): + self.connector = connector + self.stub = stub + + @staticmethod + async def connect(config: config.FumaroleConfig) -> "FumaroleClient": + """Connect to the Fumarole server using the provided configuration.""" + endpoint = config.endpoint + connector = FumaroleGrpcConnector(config=config, endpoint=endpoint) + FumaroleClient.logger.debug(f"Connecting to {endpoint}") + client = await connector.connect() + FumaroleClient.logger.debug(f"Connected to {endpoint}") + return FumaroleClient(connector=connector, stub=client) + + async def version(self) -> VersionResponse: + """Get the version of the Fumarole server.""" + request = VersionRequest() + response = await self.stub.version(request) + return response + + async def dragonsmouth_subscribe( + self, consumer_group_name: str, request: SubscribeRequest + ) -> DragonsmouthAdapterSession: + """Subscribe to a dragonsmouth stream with default configuration.""" + return await self.dragonsmouth_subscribe_with_config( + consumer_group_name, request, FumaroleSubscribeConfig() + ) + + async def dragonsmouth_subscribe_with_config( + self, + consumer_group_name: str, + request: SubscribeRequest, + config: FumaroleSubscribeConfig, + ) -> DragonsmouthAdapterSession: + """Subscribe to a dragonsmouth stream with custom configuration.""" + dragonsmouth_outlet = asyncio.Queue(maxsize=DEFAULT_DRAGONSMOUTH_CAPACITY) + dragonsmouth_inlet = asyncio.Queue(maxsize=DEFAULT_DRAGONSMOUTH_CAPACITY) + fume_control_plane_q = asyncio.Queue(maxsize=100) + + initial_join = JoinControlPlane(consumer_group_name=consumer_group_name) + initial_join_command = ControlCommand(initial_join=initial_join) + await fume_control_plane_q.put(initial_join_command) + + FumaroleClient.logger.debug( + f"Sent initial join command: {initial_join_command}" + ) + + async def control_plane_sink(): + try: + while True: + update = await fume_control_plane_q.get() + yield update + finally: + FumaroleClient.logger.debug("Control plane sink closed") + + fume_control_plane_stream_rx: grpc.aio.StreamStreamMultiCallable = ( + self.stub.Subscribe(control_plane_sink()) + ) + + control_response: ControlResponse = await fume_control_plane_stream_rx.read() + init = control_response.init + if init is None: + raise ValueError(f"Unexpected initial response: {control_response}") + + FumaroleClient.logger.debug(f"Control response: {control_response}") + + last_committed_offset = init.last_committed_offsets.get(0) + if last_committed_offset is None: + raise ValueError("No last committed offset") + + sm = FumaroleSM(last_committed_offset, config.slot_memory_retention) + subscribe_request_queue = asyncio.Queue(maxsize=100) + dm_bidi = DragonsmouthSubscribeRequestBidi(rx=subscribe_request_queue) + + data_plane_channel_vec = [] + for _ in range(1): # TODO: support multiple connections + client = await self.connector.connect() + conn = DataPlaneConn( + permits=config.concurrent_download_limit_per_tcp, client=client, rev=0 + ) + data_plane_channel_vec.append(conn) + + download_task_runner_cnc_queue = asyncio.Queue(maxsize=10) + download_task_queue = asyncio.Queue(maxsize=10) + download_result_queue = asyncio.Queue(maxsize=10) + + grpc_download_task_runner = GrpcDownloadTaskRunner( + data_plane_channel_vec=data_plane_channel_vec, + connector=self.connector, + cnc_rx=download_task_runner_cnc_queue, + download_task_queue=download_task_queue, + outlet=download_result_queue, + max_download_attempt_by_slot=config.max_failed_slot_download_attempt, + subscribe_request=request, + ) + + download_task_runner_chans = DownloadTaskRunnerChannels( + download_task_queue_tx=download_task_queue, + cnc_tx=download_task_runner_cnc_queue, + download_result_rx=download_result_queue, + ) + + rt = AsyncioFumeDragonsmouthRuntime( + sm=sm, + download_task_runner_chans=download_task_runner_chans, + dragonsmouth_bidi=dm_bidi, + subscribe_request=request, + consumer_group_name=consumer_group_name, + control_plane_q=fume_control_plane_q, + control_plane_stream_reader=fume_control_plane_stream_rx, + dragonsmouth_outlet=dragonsmouth_outlet, + commit_interval=config.commit_interval, + gc_interval=config.gc_interval, + ) + + download_task_runner_task = asyncio.create_task(grpc_download_task_runner.run()) + rt_task = asyncio.create_task(rt.run()) + + async def runtime_fut(): + tasks = [download_task_runner_task, rt_task] + done, pending = await asyncio.wait( + tasks, return_when=asyncio.FIRST_COMPLETED + ) + + for task in pending: + task.cancel() + + for task in done: + if task == tasks[0]: + FumaroleClient.logger.info( + f"Download task runner completed with {task.result()}" + ) + elif task == tasks[1]: + FumaroleClient.logger.info( + f"Runtime task completed with {task.result()}" + ) + + fumarole_handle = asyncio.create_task(runtime_fut()) + FumaroleClient.logger.debug(f"Fumarole handle created: {fumarole_handle}") + return DragonsmouthAdapterSession( + sink=subscribe_request_queue, + source=dragonsmouth_inlet, + fumarole_handle=fumarole_handle, + ) + + async def list_consumer_groups( + self, + ) -> ListConsumerGroupsResponse: + """Lists all consumer groups.""" + return await self.stub.ListConsumerGroups(ListConsumerGroupsRequest()) + + async def get_consumer_group_info( + self, consumer_group_name: str + ) -> Optional[ConsumerGroupInfo]: + """Gets information about a consumer group by name.""" + try: + return await self.stub.GetConsumerGroupInfo( + GetConsumerGroupInfoRequest(consumer_group_name=consumer_group_name) + ) + except grpc.aio.AioRpcError as e: + if e.code() == grpc.StatusCode.NOT_FOUND: + return None + else: + raise + + async def delete_consumer_group( + self, consumer_group_name: str + ) -> DeleteConsumerGroupResponse: + """Delete a consumer group by name.""" + return await self.stub.DeleteConsumerGroup( + DeleteConsumerGroupRequest(consumer_group_name=consumer_group_name) + ) + + async def delete_all_consumer_groups( + self, + ) -> DeleteConsumerGroupResponse: + """Deletes all consumer groups.""" + consumer_group_list = await self.list_consumer_groups() + + tasks = [] + + async with asyncio.TaskGroup() as tg: + for group in consumer_group_list.consumer_groups: + cg_name = group.consumer_group_name + task = tg.create_task(self.delete_consumer_group(cg_name)) + tasks.append((cg_name, task)) + + # Raise an error if any task fails + for cg_name, task in tasks: + result = task.result() + if not result.success: + raise RuntimeError( + f"Failed to delete consumer group {cg_name}: {result.error}" + ) + + async def create_consumer_group( + self, request: CreateConsumerGroupRequest + ) -> CreateConsumerGroupResponse: + """Creates a new consumer group.""" + return await self.stub.CreateConsumerGroup(request) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/config.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/config.py new file mode 100644 index 0000000..2d80803 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/config.py @@ -0,0 +1,26 @@ +from dataclasses import dataclass +from typing import Dict, Optional +import yaml + + +@dataclass +class FumaroleConfig: + endpoint: str + x_token: Optional[str] = None + max_decoding_message_size_bytes: int = 512_000_000 + x_metadata: Dict[str, str] = None + + def __post_init__(self): + self.x_metadata = self.x_metadata or {} + + @classmethod + def from_yaml(cls, fileobj) -> "FumaroleConfig": + data = yaml.safe_load(fileobj) + return cls( + endpoint=data["endpoint"], + x_token=data.get("x-token") or data.get("x_token"), + max_decoding_message_size_bytes=data.get( + "max_decoding_message_size_bytes", cls.max_decoding_message_size_bytes + ), + x_metadata=data.get("x-metadata", {}), + ) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py new file mode 100644 index 0000000..3cf4c9e --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py @@ -0,0 +1,197 @@ +import logging +from typing import Optional +import grpc +from yellowstone_fumarole_client.config import FumaroleConfig +from yellowstone_fumarole_proto.fumarole_v2_pb2_grpc import FumaroleStub + +X_TOKEN_HEADER = "x-token" + + +def _triton_sign_request( + callback: grpc.AuthMetadataPluginCallback, + x_token: Optional[str], + error: Optional[Exception], +): + # WARNING: metadata is a 1d-tuple (,), the last comma is necessary + metadata = ((X_TOKEN_HEADER, x_token),) + return callback(metadata, error) + + +class TritonAuthMetadataPlugin(grpc.AuthMetadataPlugin): + """Metadata wrapper for raw access token credentials.""" + + def __init__(self, x_token: str): + self.x_token = x_token + + def __call__( + self, + context: grpc.AuthMetadataContext, + callback: grpc.AuthMetadataPluginCallback, + ): + return _triton_sign_request(callback, self.x_token, None) + + +def grpc_channel(endpoint: str, x_token=None, compression=None, *grpc_options): + options = [("grpc.max_receive_message_length", 111111110), *grpc_options] + if x_token is not None: + auth = TritonAuthMetadataPlugin(x_token) + # ssl_creds allow you to use our https endpoint + # grpc.ssl_channel_credentials with no arguments will look through your CA trust store. + ssl_creds = grpc.ssl_channel_credentials() + + # call credentials will be sent on each request if setup with composite_channel_credentials. + call_creds: grpc.CallCredentials = grpc.metadata_call_credentials(auth) + + # Combined creds will store the channel creds aswell as the call credentials + combined_creds = grpc.composite_channel_credentials(ssl_creds, call_creds) + + return grpc.secure_channel( + endpoint, + credentials=combined_creds, + compression=compression, + options=options, + ) + else: + return grpc.insecure_channel(endpoint, compression=compression, options=options) + + +# Because of a bug in grpcio library, multiple inheritance of ClientInterceptor subclasses does not work. +# You have to create a new class for each type of interceptor you want to use. + + +class MetadataInterceptor( + grpc.aio.UnaryStreamClientInterceptor, + grpc.aio.StreamUnaryClientInterceptor, + grpc.aio.StreamStreamClientInterceptor, + grpc.aio.UnaryUnaryClientInterceptor, +): + + def __init__(self, metadata): + if isinstance(metadata, dict): + metadata = metadata.items() + self.metadata = list(metadata) + + async def intercept_unary_unary( + self, continuation, client_call_details: grpc.aio.ClientCallDetails, request + ): + logging.debug("intercept_unary_unary") + new_details = client_call_details._replace( + metadata=self._merge_metadata(client_call_details.metadata) + ) + return await continuation(new_details, request) + + async def intercept_unary_stream( + self, continuation, client_call_details: grpc.aio.ClientCallDetails, request + ): + logging.debug("intercept_unary_stream") + new_details = client_call_details._replace( + metadata=self._merge_metadata(client_call_details.metadata) + ) + return await continuation(new_details, request) + + async def intercept_stream_unary( + self, continuation, client_call_details: grpc.aio.ClientCallDetails, request + ): + logging.debug("intercept_stream_unary") + new_details = client_call_details._replace( + metadata=self._merge_metadata(client_call_details.metadata) + ) + return await continuation(new_details, request) + + async def intercept_stream_stream( + self, continuation, client_call_details: grpc.aio.ClientCallDetails, request + ): + logging.debug("intercept_stream_stream") + new_details = client_call_details._replace( + metadata=self._merge_metadata(client_call_details.metadata) + ) + return await continuation(new_details, request) + + def unary_stream_interceptor(self) -> grpc.aio.UnaryStreamClientInterceptor: + this = self + + class Interceptor(grpc.aio.UnaryStreamClientInterceptor): + async def intercept_unary_stream(self, *args): + return await this.intercept_unary_stream(*args) + + return Interceptor() + + def stream_unary_interceptor(self) -> grpc.aio.StreamUnaryClientInterceptor: + this = self + + class Interceptor(grpc.aio.StreamUnaryClientInterceptor): + async def intercept_stream_unary(self, *args): + return await this.intercept_stream_unary(*args) + + return Interceptor() + + def stream_stream_interceptor(self) -> grpc.aio.StreamStreamClientInterceptor: + this = self + + class Interceptor(grpc.aio.StreamStreamClientInterceptor): + async def intercept_stream_stream(self, *args): + return await this.intercept_stream_stream(*args) + + return Interceptor() + + def unary_unary_interceptor(self) -> grpc.aio.UnaryUnaryClientInterceptor: + this = self + + class Interceptor(grpc.aio.UnaryUnaryClientInterceptor): + async def intercept_unary_unary(self, *args): + return await this.intercept_unary_unary(*args) + + return Interceptor() + + def interceptors(self) -> list[grpc.aio.ClientInterceptor]: + return [ + self.unary_unary_interceptor(), + self.unary_stream_interceptor(), + self.stream_unary_interceptor(), + self.stream_stream_interceptor(), + ] + + def _merge_metadata(self, existing): + result = list(existing or []) + self.metadata + return result + + +class FumaroleGrpcConnector: + logger = logging.getLogger(__name__) + + def __init__(self, config: FumaroleConfig, endpoint: str): + self.config = config + self.endpoint = endpoint + + async def connect(self, *grpc_options) -> FumaroleStub: + options = [("grpc.max_receive_message_length", 111111110), *grpc_options] + interceptors = MetadataInterceptor(self.config.x_metadata).interceptors() + if self.config.x_token is not None: + auth = TritonAuthMetadataPlugin(self.config.x_token) + # ssl_creds allow you to use our https endpoint + # grpc.ssl_channel_credentials with no arguments will look through your CA trust store. + ssl_creds = grpc.ssl_channel_credentials() + + # call credentials will be sent on each request if setup with composite_channel_credentials. + call_creds: grpc.CallCredentials = grpc.metadata_call_credentials(auth) + + # Combined creds will store the channel creds aswell as the call credentials + combined_creds = grpc.composite_channel_credentials(ssl_creds, call_creds) + FumaroleGrpcConnector.logger.debug( + "Using secure channel with x-token authentication" + ) + channel = grpc.aio.secure_channel( + self.endpoint, + credentials=combined_creds, + options=options, + interceptors=interceptors, + ) + else: + FumaroleGrpcConnector.logger.debug( + "Using insecure channel without authentication" + ) + channel = grpc.aio.insecure_channel( + self.endpoint, options=options, interceptors=interceptors + ) + + return FumaroleStub(channel) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py index 6cf9bae..bf4ed30 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py @@ -1,2 +1,2 @@ - -from yellowstone_fumarole_client.runtime.state_machine import * \ No newline at end of file +from yellowstone_fumarole_client.runtime.state_machine import * +from yellowstone_fumarole_client.runtime.aio import * diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py new file mode 100644 index 0000000..17976e7 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py @@ -0,0 +1,593 @@ +# DataPlaneConn +import asyncio +from typing import Optional, List +from collections import deque +from dataclasses import dataclass +import time +from yellowstone_fumarole_client.runtime.state_machine import ( + FumaroleSM, + FumeDownloadRequest, + FumeOffset, + FumeShardIdx, + CommitmentLevel, +) +from yellowstone_fumarole_proto.geyser_pb2 import ( + SubscribeRequest, + SubscribeUpdate, + SubscribeUpdateSlot, + CommitmentLevel as ProtoCommitmentLevel, +) +from yellowstone_fumarole_proto.fumarole_v2_pb2 import ( + ControlCommand, + PollBlockchainHistory, + CommitOffset, + ControlResponse, + DownloadBlockShard, + BlockFilters, +) +from yellowstone_fumarole_proto.fumarole_v2_pb2_grpc import ( + Fumarole as GrpcFumaroleClient, +) +from yellowstone_fumarole_client.grpc_connectivity import FumaroleGrpcConnector +import logging + + +# Constants +DEFAULT_GC_INTERVAL = 100 + +DEFAULT_SLOT_MEMORY_RETENTION = 10000 + + +@dataclass +class DataPlaneConn: + permits: int + client: GrpcFumaroleClient + rev: int + + def has_permit(self) -> bool: + return self.permits > 0 + + +# DownloadTaskResult +@dataclass +class CompletedDownloadBlockTask: + slot: int + block_uid: bytes + shard_idx: FumeShardIdx + total_event_downloaded: int + + +@dataclass +class DownloadBlockError: + kind: str # 'Disconnected', 'OutletDisconnected', 'BlockShardNotFound', 'FailedDownload', 'Fatal' + message: str + + +@dataclass +class DownloadTaskResult: + kind: str # 'Ok' or 'Err' + completed: Optional[CompletedDownloadBlockTask] = None + slot: Optional[int] = None + err: Optional[DownloadBlockError] = None + + +# DragonsmouthSubscribeRequestBidi +@dataclass +class DragonsmouthSubscribeRequestBidi: + rx: asyncio.Queue + + +LOGGER = logging.getLogger(__name__) + + +# TokioFumeDragonsmouthRuntime +class AsyncioFumeDragonsmouthRuntime: + + def __init__( + self, + sm: FumaroleSM, + download_task_runner_chans: "DownloadTaskRunnerChannels", + dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, + subscribe_request: SubscribeRequest, + consumer_group_name: str, + control_plane_q: asyncio.Queue, + control_plane_stream_reader, + dragonsmouth_outlet: asyncio.Queue, + commit_interval: float, # in seconds + gc_interval: int, + ): + self.sm = sm + self.download_task_runner_chans = download_task_runner_chans + self.dragonsmouth_bidi = dragonsmouth_bidi + self.subscribe_request = subscribe_request + self.consumer_group_name = consumer_group_name + self.control_plane_tx = control_plane_q + self.control_plane_stream_rx = control_plane_stream_reader + self.dragonsmouth_outlet = dragonsmouth_outlet + self.commit_interval = commit_interval + self.last_commit = time.time() + self.gc_interval = gc_interval + + def build_poll_history_cmd( + self, from_offset: Optional[FumeOffset] + ) -> ControlCommand: + return ControlCommand(poll_hist=PollBlockchainHistory(shard_id=0, limit=None)) + + def build_commit_offset_cmd(self, offset: FumeOffset) -> ControlCommand: + return ControlCommand(commit_offset=CommitOffset(offset=offset, shard_id=0)) + + def handle_control_response(self, control_response: ControlResponse): + + response_field = control_response.WhichOneof("response") + assert response_field is not None, "Control response is empty" + + match response_field: + case "poll_hist": + poll_hist = control_response.poll_hist + LOGGER.debug(f"Received poll history {len(poll_hist.events)} events") + self.sm.queue_blockchain_event(poll_hist.events) + case "commit_offset": + commit_offset = control_response.commit_offset + LOGGER.debug(f"Received commit offset: {commit_offset}") + self.sm.update_committed_offset(commit_offset.offset) + case "pong": + LOGGER.debug("Received pong") + case _: + raise ValueError("Unexpected control response") + + async def poll_history_if_needed(self): + if self.sm.need_new_blockchain_events(): + cmd = self.build_poll_history_cmd(self.sm.committable_offset) + await self.control_plane_tx.put(cmd) + + def commitment_level(self): + return self.subscribe_request.commitment + + def schedule_download_task_if_any(self): + while True: + if ( + not self.download_task_runner_chans.download_task_queue_tx.qsize() < 100 + ): # Simulate try_reserve + break + download_request = self.sm.pop_slot_to_download(self.commitment_level()) + if not download_request: + break + download_task_args = DownloadTaskArgs( + download_request=download_request, + dragonsmouth_outlet=self.dragonsmouth_outlet, + ) + LOGGER.debug(f"Scheduling download task for slot {download_request.slot}") + asyncio.create_task( + self.download_task_runner_chans.download_task_queue_tx.put( + download_task_args + ) + ) + + def handle_download_result(self, download_result: DownloadTaskResult): + if download_result.kind == "Ok": + completed = download_result.completed + LOGGER.debug( + f"Download completed for slot {completed.slot}, shard {completed.shard_idx}" + ) + self.sm.make_slot_download_progress(completed.slot, completed.shard_idx) + else: + slot = download_result.slot + err = download_result.err + raise RuntimeError(f"Failed to download slot {slot}: {err.message}") + + async def force_commit_offset(self): + LOGGER.debug(f"Force committing offset {self.sm.committable_offset}") + await self.control_plane_tx.put( + self.build_commit_offset_cmd(self.sm.committable_offset) + ) + + async def commit_offset(self): + if self.sm.last_committed_offset < self.sm.committable_offset: + LOGGER.debug(f"Committing offset {self.sm.committable_offset}") + await self.force_commit_offset() + self.last_commit = time.time() + + async def drain_slot_status(self): + commitment = self.subscribe_request.commitment + slot_status_vec = deque() + while slot_status := self.sm.pop_next_slot_status(): + slot_status_vec.append(slot_status) + + if not slot_status_vec: + return + + LOGGER.debug(f"Draining {len(slot_status_vec)} slot status") + for slot_status in slot_status_vec: + matched_filters = [] + for filter_name, filter in self.subscribe_request.slots.items(): + if ( + filter.filter_by_commitment + and slot_status.commitment_level.value == commitment + ): + matched_filters.append(filter_name) + elif not filter.filter_by_commitment: + matched_filters.append(filter_name) + + if matched_filters: + update = SubscribeUpdate( + filters=matched_filters, + created_at=None, + update_oneof=SubscribeUpdateSlot( + slot=slot_status.slot, + parent=slot_status.parent_slot, + status=slot_status.commitment_level.value, + dead_error=slot_status.dead_error, + ), + ) + LOGGER.debug(f"Sending dragonsmouth update: {update}") + try: + await self.dragonsmouth_outlet.put(update) + except asyncio.QueueFull: + return + + self.sm.mark_event_as_processed(slot_status.session_sequence) + + async def handle_control_plane_resp( + self, result: ControlResponse | Exception + ) -> bool: + if isinstance(result, Exception): + await self.dragonsmouth_outlet.put(result) + return False + self.handle_control_response(result) + return True + + async def handle_new_subscribe_request(self, subscribe_request: SubscribeRequest): + self.subscribe_request = subscribe_request + await self.download_task_runner_chans.cnc_tx.put( + DownloadTaskRunnerCommand.UpdateSubscribeRequest(subscribe_request) + ) + + async def run(self): + LOGGER.debug(f"Fumarole runtime starting...") + await self.control_plane_tx.put(self.build_poll_history_cmd(None)) + LOGGER.debug("Initial poll history command sent") + await self.force_commit_offset() + LOGGER.debug("Initial commit offset command sent") + ticks = 0 + while True: + ticks += 1 + LOGGER.debug(f"Runtime loop tick") + if ticks % self.gc_interval == 0: + LOGGER.debug("Running garbage collection") + self.sm.gc() + ticks = 0 + + if self.dragonsmouth_outlet.qsize() >= 100: # Simulate is_closed + LOGGER.debug("Detected dragonsmouth outlet closed") + break + + commit_deadline = self.last_commit + self.commit_interval + await self.poll_history_if_needed() + self.schedule_download_task_if_any() + + tasks = [ + asyncio.create_task(self.dragonsmouth_bidi.rx.get()), + asyncio.create_task(self.control_plane_stream_rx.read()), + asyncio.create_task( + self.download_task_runner_chans.download_result_rx.get() + ), + asyncio.create_task( + asyncio.sleep(max(0, commit_deadline - time.time())) + ), + ] + + done, pending = await asyncio.wait( + tasks, return_when=asyncio.FIRST_COMPLETED + ) + for task in pending: + task.cancel() + + for task in done: + try: + result = task.result() + if task == tasks[0]: # dragonsmouth_bidi.rx + LOGGER.debug("Dragonsmouth subscribe request received") + await self.handle_new_subscribe_request(result) + elif task == tasks[1]: # control_plane_rx + if not await self.handle_control_plane_resp(result): + LOGGER.debug("Control plane error") + return + elif task == tasks[2]: # download_result_rx + self.handle_download_result(result) + elif task == tasks[3]: # sleep + LOGGER.debug("Commit deadline reached") + await self.commit_offset() + except asyncio.CancelledError: + pass + except Exception as e: + LOGGER.error(f"Error: {e}") + raise e + + await self.drain_slot_status() + + LOGGER.debug("Fumarole runtime exiting") + + +# DownloadTaskRunnerChannels +@dataclass +class DownloadTaskRunnerChannels: + download_task_queue_tx: asyncio.Queue + cnc_tx: asyncio.Queue + download_result_rx: asyncio.Queue + + +# DownloadTaskRunnerCommand +@dataclass +class DownloadTaskRunnerCommand: + kind: str + subscribe_request: Optional[SubscribeRequest] = None + + @classmethod + def UpdateSubscribeRequest(cls, subscribe_request: SubscribeRequest): + return cls(kind="UpdateSubscribeRequest", subscribe_request=subscribe_request) + + +# DownloadTaskArgs +@dataclass +class DownloadTaskArgs: + download_request: FumeDownloadRequest + dragonsmouth_outlet: asyncio.Queue + + +# DataPlaneTaskMeta +@dataclass +class DataPlaneTaskMeta: + client_idx: int + request: FumeDownloadRequest + dragonsmouth_outlet: asyncio.Queue + scheduled_at: float + client_rev: int + + +# GrpcDownloadTaskRunner +class GrpcDownloadTaskRunner: + def __init__( + self, + data_plane_channel_vec: List[DataPlaneConn], + connector: FumaroleGrpcConnector, + cnc_rx: asyncio.Queue, + download_task_queue: asyncio.Queue, + outlet: asyncio.Queue, + max_download_attempt_by_slot: int, + subscribe_request: SubscribeRequest, + ): + self.data_plane_channel_vec = data_plane_channel_vec + self.connector = connector + self.tasks = [] + self.task_meta = {} + self.cnc_rx = cnc_rx + self.download_task_queue = download_task_queue + self.download_attempts = {} + self.outlet = outlet + self.max_download_attempt_per_slot = max_download_attempt_by_slot + self.subscribe_request = subscribe_request + self.task_counter = 0 + + def find_least_use_client(self) -> Optional[int]: + max_permits = -1 + best_idx = None + for idx, conn in enumerate(self.data_plane_channel_vec): + if conn.has_permit() and conn.permits > max_permits: + max_permits = conn.permits + best_idx = idx + return best_idx + + async def handle_data_plane_task_result( + self, task_id: int, result: DownloadTaskResult + ): + task_meta = self.task_meta.pop(task_id, None) + if not task_meta: + raise RuntimeError("Missing task meta") + + slot = task_meta.request.slot + conn = self.data_plane_channel_vec[task_meta.client_idx] + conn.permits += 1 + + if result.kind == "Ok": + completed = result.completed + elapsed = time.time() - task_meta.scheduled_at + LOGGER.debug( + f"Downloaded slot {slot} in {elapsed}s, total events: {completed.total_event_downloaded}" + ) + self.download_attempts.pop(slot, None) + await self.outlet.put(result) + else: + err = result.err + download_attempt = self.download_attempts.get(slot, 0) + if err.kind in ("Disconnected", "FailedDownload"): + if download_attempt >= self.max_download_attempt_per_slot: + LOGGER.error( + f"Download slot {slot} failed: {err.message}, max attempts reached" + ) + await self.outlet.put( + DownloadTaskResult(kind="Err", slot=slot, err=err) + ) + return + remaining = self.max_download_attempt_per_slot - download_attempt + LOGGER.debug( + f"Download slot {slot} failed: {err.message}, remaining attempts: {remaining}" + ) + if task_meta.client_rev == conn.rev: + conn.client = await self.connector.connect() + conn.rev += 1 + LOGGER.debug(f"Download slot {slot} failed, rescheduling for retry...") + task_spec = DownloadTaskArgs( + download_request=task_meta.request, + dragonsmouth_outlet=task_meta.dragonsmouth_outlet, + ) + self.spawn_grpc_download_task(task_meta.client_idx, task_spec) + elif err.kind == "OutletDisconnected": + LOGGER.debug("Dragonsmouth outlet disconnected") + elif err.kind == "BlockShardNotFound": + LOGGER.error(f"Slot {slot} not found") + await self.outlet.put( + DownloadTaskResult(kind="Err", slot=slot, err=err) + ) + elif err.kind == "Fatal": + raise RuntimeError(f"Fatal error: {err.message}") + + def spawn_grpc_download_task(self, client_idx: int, task_spec: DownloadTaskArgs): + conn = self.data_plane_channel_vec[client_idx] + client = conn.client # Clone not needed in Python + download_request = task_spec.download_request + slot = download_request.slot + task = GrpcDownloadBlockTaskRun( + download_request=download_request, + client=client, + filters=BlockFilters( + accounts=self.subscribe_request.accounts, + transactions=self.subscribe_request.transactions, + entries=self.subscribe_request.entry, + blocks_meta=self.subscribe_request.blocks_meta, + ), + dragonsmouth_oulet=task_spec.dragonsmouth_outlet, + ) + task_id = self.task_counter + self.task_counter += 1 + self.tasks.append(asyncio.create_task(task.run(task_id))) + self.download_attempts[slot] = self.download_attempts.get(slot, 0) + 1 + conn.permits -= 1 + self.task_meta[task_id] = DataPlaneTaskMeta( + client_idx=client_idx, + request=download_request, + dragonsmouth_outlet=task_spec.dragonsmouth_outlet, + scheduled_at=time.time(), + client_rev=conn.rev, + ) + + def handle_control_command(self, cmd: DownloadTaskRunnerCommand): + if cmd.kind == "UpdateSubscribeRequest": + self.subscribe_request = cmd.subscribe_request + + async def run(self): + while self.outlet.qsize() < 100: # Simulate is_closed + maybe_available_client_idx = self.find_least_use_client() + tasks = [asyncio.create_task(self.cnc_rx.get())] + if maybe_available_client_idx is not None: + tasks.append(asyncio.create_task(self.download_task_queue.get())) + for task in self.tasks[:]: + if task.done(): + self.tasks.remove(task) + task_id, result = task.result() + await self.handle_data_plane_task_result(task_id, result) + if tasks: + done, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + for task in done: + try: + result = task.result() + if task == tasks[0]: # cnc_rx + self.handle_control_command(result) + elif len(tasks) > 1 and task == tasks[1]: # download_task_queue + self.spawn_grpc_download_task( + maybe_available_client_idx, result + ) + except Exception as e: + LOGGER.debug(f"Error: {e}") + return + LOGGER.debug("Closing GrpcDownloadTaskRunner loop") + + +# GrpcDownloadBlockTaskRun +class GrpcDownloadBlockTaskRun: + def __init__( + self, + download_request: FumeDownloadRequest, + client: GrpcFumaroleClient, + filters: Optional[BlockFilters], + dragonsmouth_oulet: asyncio.Queue, + ): + self.download_request = download_request + self.client = client + self.filters = filters + self.dragonsmouth_oulet = dragonsmouth_oulet + + def map_tonic_error_code_to_download_block_error( + self, code: str + ) -> DownloadBlockError: + if code == "NotFound": + return DownloadBlockError( + kind="BlockShardNotFound", message="Block shard not found" + ) + elif code == "Unavailable": + return DownloadBlockError(kind="Disconnected", message="Disconnected") + elif code in ( + "Internal", + "Aborted", + "DataLoss", + "ResourceExhausted", + "Unknown", + "Cancelled", + "DeadlineExceeded", + ): + return DownloadBlockError(kind="FailedDownload", message="Failed download") + elif code == "InvalidArgument": + raise ValueError("Invalid argument") + else: + return DownloadBlockError(kind="Fatal", message=f"Unknown error: {code}") + + async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: + request = DownloadBlockShard( + blockchain_id=self.download_request.blockchain_id, + block_uid=self.download_request.block_uid, + shard_idx=0, + blockFilters=self.filters, + ) + try: + resp = self.client.DownloadBlock(request) + except Exception as e: + LOGGER.error(f"Download block error: {e}") + return task_id, DownloadTaskResult( + kind="Err", + slot=self.download_request.slot, + err=self.map_tonic_error_code_to_download_block_error(str(e)), + ) + + total_event_downloaded = 0 + async for data in resp: + + kind = data.WhichOneof("response") + + match kind: + case "update": + update = data.update + assert update is not None, "Update is None" + total_event_downloaded += 1 + try: + await self.dragonsmouth_oulet.put(update) + except asyncio.QueueFull: + return task_id, DownloadTaskResult( + kind="Err", + slot=self.download_request.slot, + err=DownloadBlockError( + kind="OutletDisconnected", message="Outlet disconnected" + ), + ) + case "block_shard_download_finish": + return task_id, DownloadTaskResult( + kind="Ok", + completed=CompletedDownloadBlockTask( + slot=self.download_request.slot, + block_uid=self.download_request.block_uid, + shard_idx=0, + total_event_downloaded=total_event_downloaded, + ), + ) + case _: + return task_id, DownloadTaskResult( + kind="Err", + slot=self.download_request.slot, + err=self.map_tonic_error_code_to_download_block_error( + "Unknown" + ), + ) + + return task_id, DownloadTaskResult( + kind="Err", + slot=self.download_request.slot, + err=DownloadBlockError(kind="FailedDownload", message="Failed download"), + ) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py index 4983004..4ffceea 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py @@ -1,6 +1,6 @@ from typing import Optional, List, Dict, Set, Deque, Tuple, Any from collections import deque, defaultdict -from yellowstone_api.fumarole_v2_pb2 import ( +from yellowstone_fumarole_proto.fumarole_v2_pb2 import ( CommitmentLevel, BlockchainEvent, ) @@ -106,9 +106,7 @@ class FumaroleSM: def __init__(self, last_committed_offset: FumeOffset, slot_memory_retention: int): self.last_committed_offset = last_committed_offset - self.slot_commitment_progression = ( - dict() - ) # Slot -> SlotCommitmentProgression + self.slot_commitment_progression = dict() # Slot -> SlotCommitmentProgression self.downloaded_slot = set() # Set of downloaded slots self.inflight_slot_shard_download = {} # Slot -> SlotDownloadProgress self.blocked_slot_status_update = defaultdict( @@ -118,7 +116,9 @@ def __init__(self, last_committed_offset: FumeOffset, slot_memory_retention: int self.processed_offset = [] # Min-heap for (sequence, offset) self.committable_offset = last_committed_offset self.max_slot_detected = 0 - self.unprocessed_blockchain_event: Deque[(FumeSessionSequence, BlockchainEvent)] = deque() + self.unprocessed_blockchain_event: Deque[ + (FumeSessionSequence, BlockchainEvent) + ] = deque() self.sequence = 1 self.last_processed_fume_sequence = 0 self.sequence_to_offset = {} # FumeSessionSequence -> FumeOffset @@ -137,6 +137,7 @@ def next_sequence(self) -> int: def gc(self) -> None: """Garbage collect old slots to respect memory retention limit.""" + LOGGER.debug("Garbage collecting old slots") while len(self.downloaded_slot) > self.slot_memory_retention: slot = self.downloaded_slot.pop(0) if self.downloaded_slot else None if slot is None: @@ -220,13 +221,13 @@ def make_sure_slot_commitment_progression_exists( slot, SlotCommitmentProgression() ) - def pop_slot_to_download( - self, commitment = None - ) -> Optional[FumeDownloadRequest]: + def pop_slot_to_download(self, commitment=None) -> Optional[FumeDownloadRequest]: """Pop the next slot to download.""" min_commitment = commitment or CommitmentLevel.PROCESSED while self.unprocessed_blockchain_event: - session_sequence, blockchain_event = self.unprocessed_blockchain_event.popleft() + session_sequence, blockchain_event = ( + self.unprocessed_blockchain_event.popleft() + ) event_cl = blockchain_event.commitment_level if event_cl < min_commitment: diff --git a/python/yellowstone-fumarole-client/yellowstone_api/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/__init__.py similarity index 100% rename from python/yellowstone-fumarole-client/yellowstone_api/__init__.py rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/__init__.py diff --git a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2.py similarity index 99% rename from python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.py rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2.py index 0025e27..e8fff73 100644 --- a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2.py @@ -22,13 +22,13 @@ _sym_db = _symbol_database.Default() -import yellowstone_api.geyser_pb2 as geyser__pb2 +import yellowstone_fumarole_proto.geyser_pb2 as geyser__pb2 try: solana__storage__pb2 = geyser__pb2.solana__storage__pb2 except AttributeError: solana__storage__pb2 = geyser__pb2.solana_storage_pb2 -from yellowstone_api.geyser_pb2 import * +from yellowstone_fumarole_proto.geyser_pb2 import * DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66umarole_v2.proto\x12\x0b\x66umarole_v2\x1a\x0cgeyser.proto\"\x10\n\x0eVersionRequest\"\"\n\x0fVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\":\n\x1bGetConsumerGroupInfoRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\"9\n\x1a\x44\x65leteConsumerGroupRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\".\n\x1b\x44\x65leteConsumerGroupResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"\x1b\n\x19ListConsumerGroupsRequest\"U\n\x1aListConsumerGroupsResponse\x12\x37\n\x0f\x63onsumer_groups\x18\x01 \x03(\x0b\x32\x1e.fumarole_v2.ConsumerGroupInfo\"N\n\x11\x43onsumerGroupInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x1b\n\x13\x63onsumer_group_name\x18\x02 \x01(\t\x12\x10\n\x08is_stale\x18\x03 \x01(\x08\"4\n\x15GetSlotLagInfoRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\"\xf1\x04\n\x0c\x42lockFilters\x12\x39\n\x08\x61\x63\x63ounts\x18\x01 \x03(\x0b\x32\'.fumarole_v2.BlockFilters.AccountsEntry\x12\x41\n\x0ctransactions\x18\x02 \x03(\x0b\x32+.fumarole_v2.BlockFilters.TransactionsEntry\x12\x37\n\x07\x65ntries\x18\x03 \x03(\x0b\x32&.fumarole_v2.BlockFilters.EntriesEntry\x12>\n\x0b\x62locks_meta\x18\x04 \x03(\x0b\x32).fumarole_v2.BlockFilters.BlocksMetaEntry\x1aW\n\rAccountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.geyser.SubscribeRequestFilterAccounts:\x02\x38\x01\x1a_\n\x11TransactionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1aS\n\x0c\x45ntriesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterEntry:\x02\x38\x01\x1a[\n\x0f\x42locksMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.geyser.SubscribeRequestFilterBlocksMeta:\x02\x38\x01\"\x98\x01\n\x12\x44ownloadBlockShard\x12\x15\n\rblockchain_id\x18\x01 \x01(\x0c\x12\x11\n\tblock_uid\x18\x02 \x01(\x0c\x12\x11\n\tshard_idx\x18\x03 \x01(\x05\x12\x34\n\x0c\x62lockFilters\x18\x04 \x01(\x0b\x32\x19.fumarole_v2.BlockFiltersH\x00\x88\x01\x01\x42\x0f\n\r_blockFilters\"\x17\n\x04Ping\x12\x0f\n\x07ping_id\x18\x01 \x01(\r\"\x17\n\x04Pong\x12\x0f\n\x07ping_id\x18\x01 \x01(\r\"\x8d\x01\n\x0b\x44\x61taCommand\x12?\n\x14\x64ownload_block_shard\x18\x01 \x01(\x0b\x32\x1f.fumarole_v2.DownloadBlockShardH\x00\x12\x32\n\rfilter_update\x18\x02 \x01(\x0b\x32\x19.fumarole_v2.BlockFiltersH\x00\x42\t\n\x07\x63ommand\"\x1a\n\x18\x42lockShardDownloadFinish\"L\n\rBlockNotFound\x12\x15\n\rblockchain_id\x18\x01 \x01(\x0c\x12\x11\n\tblock_uid\x18\x02 \x01(\x0c\x12\x11\n\tshard_idx\x18\x03 \x01(\x05\"E\n\tDataError\x12/\n\tnot_found\x18\x01 \x01(\x0b\x32\x1a.fumarole_v2.BlockNotFoundH\x00\x42\x07\n\x05\x65rror\"\x93\x01\n\x0c\x44\x61taResponse\x12)\n\x06update\x18\x01 \x01(\x0b\x32\x17.geyser.SubscribeUpdateH\x00\x12L\n\x1b\x62lock_shard_download_finish\x18\x02 \x01(\x0b\x32%.fumarole_v2.BlockShardDownloadFinishH\x00\x42\n\n\x08response\"0\n\x0c\x43ommitOffset\x12\x0e\n\x06offset\x18\x01 \x01(\x03\x12\x10\n\x08shard_id\x18\x02 \x01(\x05\"c\n\x15PollBlockchainHistory\x12\x10\n\x08shard_id\x18\x01 \x01(\x05\x12\x11\n\x04\x66rom\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\x12\n\x05limit\x18\x03 \x01(\x03H\x01\x88\x01\x01\x42\x07\n\x05_fromB\x08\n\x06_limit\"\x8f\x02\n\x0f\x42lockchainEvent\x12\x0e\n\x06offset\x18\x01 \x01(\x03\x12\x15\n\rblockchain_id\x18\x02 \x01(\x0c\x12\x11\n\tblock_uid\x18\x03 \x01(\x0c\x12\x12\n\nnum_shards\x18\x04 \x01(\r\x12\x0c\n\x04slot\x18\x05 \x01(\x04\x12\x18\n\x0bparent_slot\x18\x06 \x01(\x04H\x00\x88\x01\x01\x12\x31\n\x10\x63ommitment_level\x18\x07 \x01(\x0e\x32\x17.geyser.CommitmentLevel\x12\x1b\n\x13\x62lockchain_shard_id\x18\x08 \x01(\x05\x12\x17\n\ndead_error\x18\t \x01(\tH\x01\x88\x01\x01\x42\x0e\n\x0c_parent_slotB\r\n\x0b_dead_error\"A\n\x11\x42lockchainHistory\x12,\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x1c.fumarole_v2.BlockchainEvent\"L\n\x10JoinControlPlane\x12 \n\x13\x63onsumer_group_name\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x16\n\x14_consumer_group_name\"\xe2\x01\n\x0e\x43ontrolCommand\x12\x35\n\x0cinitial_join\x18\x01 \x01(\x0b\x32\x1d.fumarole_v2.JoinControlPlaneH\x00\x12\x32\n\rcommit_offset\x18\x02 \x01(\x0b\x32\x19.fumarole_v2.CommitOffsetH\x00\x12\x37\n\tpoll_hist\x18\x03 \x01(\x0b\x32\".fumarole_v2.PollBlockchainHistoryH\x00\x12!\n\x04ping\x18\x04 \x01(\x0b\x32\x11.fumarole_v2.PingH\x00\x42\t\n\x07\x63ommand\"\xe7\x01\n\x0f\x43ontrolResponse\x12\x36\n\x04init\x18\x01 \x01(\x0b\x32&.fumarole_v2.InitialConsumerGroupStateH\x00\x12\x38\n\rcommit_offset\x18\x02 \x01(\x0b\x32\x1f.fumarole_v2.CommitOffsetResultH\x00\x12\x33\n\tpoll_hist\x18\x03 \x01(\x0b\x32\x1e.fumarole_v2.BlockchainHistoryH\x00\x12!\n\x04pong\x18\x04 \x01(\x0b\x32\x11.fumarole_v2.PongH\x00\x42\n\n\x08response\"6\n\x12\x43ommitOffsetResult\x12\x0e\n\x06offset\x18\x01 \x01(\x03\x12\x10\n\x08shard_id\x18\x02 \x01(\x05\"\xd1\x01\n\x19InitialConsumerGroupState\x12\x15\n\rblockchain_id\x18\x01 \x01(\x0c\x12`\n\x16last_committed_offsets\x18\x02 \x03(\x0b\x32@.fumarole_v2.InitialConsumerGroupState.LastCommittedOffsetsEntry\x1a;\n\x19LastCommittedOffsetsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\"8\n\x1b\x43reateConsumerGroupResponse\x12\x19\n\x11\x63onsumer_group_id\x18\x01 \x01(\t\"z\n\x1a\x43reateConsumerGroupRequest\x12\x1b\n\x13\x63onsumer_group_name\x18\x01 \x01(\t\x12?\n\x15initial_offset_policy\x18\x02 \x01(\x0e\x32 .fumarole_v2.InitialOffsetPolicy*!\n\x13InitialOffsetPolicy\x12\n\n\x06LATEST\x10\x00\x32\xe2\x05\n\x08\x46umarole\x12\x62\n\x14GetConsumerGroupInfo\x12(.fumarole_v2.GetConsumerGroupInfoRequest\x1a\x1e.fumarole_v2.ConsumerGroupInfo\"\x00\x12g\n\x12ListConsumerGroups\x12&.fumarole_v2.ListConsumerGroupsRequest\x1a\'.fumarole_v2.ListConsumerGroupsResponse\"\x00\x12j\n\x13\x44\x65leteConsumerGroup\x12\'.fumarole_v2.DeleteConsumerGroupRequest\x1a(.fumarole_v2.DeleteConsumerGroupResponse\"\x00\x12j\n\x13\x43reateConsumerGroup\x12\'.fumarole_v2.CreateConsumerGroupRequest\x1a(.fumarole_v2.CreateConsumerGroupResponse\"\x00\x12O\n\rDownloadBlock\x12\x1f.fumarole_v2.DownloadBlockShard\x1a\x19.fumarole_v2.DataResponse\"\x00\x30\x01\x12J\n\rSubscribeData\x12\x18.fumarole_v2.DataCommand\x1a\x19.fumarole_v2.DataResponse\"\x00(\x01\x30\x01\x12L\n\tSubscribe\x12\x1b.fumarole_v2.ControlCommand\x1a\x1c.fumarole_v2.ControlResponse\"\x00(\x01\x30\x01\x12\x46\n\x07Version\x12\x1b.fumarole_v2.VersionRequest\x1a\x1c.fumarole_v2.VersionResponse\"\x00P\x00\x62\x06proto3') diff --git a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.pyi b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2.pyi similarity index 77% rename from python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.pyi rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2.pyi index ca248d8..96d8afe 100644 --- a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2.pyi +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2.pyi @@ -1,48 +1,48 @@ -import yellowstone_api.geyser_pb2 as _geyser_pb2 -import yellowstone_api.solana_storage_pb2 as _solana_storage_pb2 +import yellowstone_fumarole_proto.geyser_pb2 as _geyser_pb2 +import yellowstone_fumarole_proto.solana_storage_pb2 as _solana_storage_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union -from yellowstone_api.geyser_pb2 import SubscribeRequest as SubscribeRequest -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccounts as SubscribeRequestFilterAccounts -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilter as SubscribeRequestFilterAccountsFilter -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilterMemcmp as SubscribeRequestFilterAccountsFilterMemcmp -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilterLamports as SubscribeRequestFilterAccountsFilterLamports -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterSlots as SubscribeRequestFilterSlots -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterTransactions as SubscribeRequestFilterTransactions -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterBlocks as SubscribeRequestFilterBlocks -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterBlocksMeta as SubscribeRequestFilterBlocksMeta -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterEntry as SubscribeRequestFilterEntry -from yellowstone_api.geyser_pb2 import SubscribeRequestAccountsDataSlice as SubscribeRequestAccountsDataSlice -from yellowstone_api.geyser_pb2 import SubscribeRequestPing as SubscribeRequestPing -from yellowstone_api.geyser_pb2 import SubscribeUpdate as SubscribeUpdate -from yellowstone_api.geyser_pb2 import SubscribeUpdateAccount as SubscribeUpdateAccount -from yellowstone_api.geyser_pb2 import SubscribeUpdateAccountInfo as SubscribeUpdateAccountInfo -from yellowstone_api.geyser_pb2 import SubscribeUpdateSlot as SubscribeUpdateSlot -from yellowstone_api.geyser_pb2 import SubscribeUpdateTransaction as SubscribeUpdateTransaction -from yellowstone_api.geyser_pb2 import SubscribeUpdateTransactionInfo as SubscribeUpdateTransactionInfo -from yellowstone_api.geyser_pb2 import SubscribeUpdateTransactionStatus as SubscribeUpdateTransactionStatus -from yellowstone_api.geyser_pb2 import SubscribeUpdateBlock as SubscribeUpdateBlock -from yellowstone_api.geyser_pb2 import SubscribeUpdateBlockMeta as SubscribeUpdateBlockMeta -from yellowstone_api.geyser_pb2 import SubscribeUpdateEntry as SubscribeUpdateEntry -from yellowstone_api.geyser_pb2 import SubscribeUpdatePing as SubscribeUpdatePing -from yellowstone_api.geyser_pb2 import SubscribeUpdatePong as SubscribeUpdatePong -from yellowstone_api.geyser_pb2 import PingRequest as PingRequest -from yellowstone_api.geyser_pb2 import PongResponse as PongResponse -from yellowstone_api.geyser_pb2 import GetLatestBlockhashRequest as GetLatestBlockhashRequest -from yellowstone_api.geyser_pb2 import GetLatestBlockhashResponse as GetLatestBlockhashResponse -from yellowstone_api.geyser_pb2 import GetBlockHeightRequest as GetBlockHeightRequest -from yellowstone_api.geyser_pb2 import GetBlockHeightResponse as GetBlockHeightResponse -from yellowstone_api.geyser_pb2 import GetSlotRequest as GetSlotRequest -from yellowstone_api.geyser_pb2 import GetSlotResponse as GetSlotResponse -from yellowstone_api.geyser_pb2 import GetVersionRequest as GetVersionRequest -from yellowstone_api.geyser_pb2 import GetVersionResponse as GetVersionResponse -from yellowstone_api.geyser_pb2 import IsBlockhashValidRequest as IsBlockhashValidRequest -from yellowstone_api.geyser_pb2 import IsBlockhashValidResponse as IsBlockhashValidResponse -from yellowstone_api.geyser_pb2 import CommitmentLevel as CommitmentLevel -from yellowstone_api.geyser_pb2 import SlotStatus as SlotStatus +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequest as SubscribeRequest +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterAccounts as SubscribeRequestFilterAccounts +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterAccountsFilter as SubscribeRequestFilterAccountsFilter +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterAccountsFilterMemcmp as SubscribeRequestFilterAccountsFilterMemcmp +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterAccountsFilterLamports as SubscribeRequestFilterAccountsFilterLamports +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterSlots as SubscribeRequestFilterSlots +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterTransactions as SubscribeRequestFilterTransactions +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterBlocks as SubscribeRequestFilterBlocks +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterBlocksMeta as SubscribeRequestFilterBlocksMeta +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestFilterEntry as SubscribeRequestFilterEntry +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestAccountsDataSlice as SubscribeRequestAccountsDataSlice +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequestPing as SubscribeRequestPing +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdate as SubscribeUpdate +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateAccount as SubscribeUpdateAccount +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateAccountInfo as SubscribeUpdateAccountInfo +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateSlot as SubscribeUpdateSlot +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateTransaction as SubscribeUpdateTransaction +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateTransactionInfo as SubscribeUpdateTransactionInfo +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateTransactionStatus as SubscribeUpdateTransactionStatus +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateBlock as SubscribeUpdateBlock +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateBlockMeta as SubscribeUpdateBlockMeta +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdateEntry as SubscribeUpdateEntry +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdatePing as SubscribeUpdatePing +from yellowstone_fumarole_proto.geyser_pb2 import SubscribeUpdatePong as SubscribeUpdatePong +from yellowstone_fumarole_proto.geyser_pb2 import PingRequest as PingRequest +from yellowstone_fumarole_proto.geyser_pb2 import PongResponse as PongResponse +from yellowstone_fumarole_proto.geyser_pb2 import GetLatestBlockhashRequest as GetLatestBlockhashRequest +from yellowstone_fumarole_proto.geyser_pb2 import GetLatestBlockhashResponse as GetLatestBlockhashResponse +from yellowstone_fumarole_proto.geyser_pb2 import GetBlockHeightRequest as GetBlockHeightRequest +from yellowstone_fumarole_proto.geyser_pb2 import GetBlockHeightResponse as GetBlockHeightResponse +from yellowstone_fumarole_proto.geyser_pb2 import GetSlotRequest as GetSlotRequest +from yellowstone_fumarole_proto.geyser_pb2 import GetSlotResponse as GetSlotResponse +from yellowstone_fumarole_proto.geyser_pb2 import GetVersionRequest as GetVersionRequest +from yellowstone_fumarole_proto.geyser_pb2 import GetVersionResponse as GetVersionResponse +from yellowstone_fumarole_proto.geyser_pb2 import IsBlockhashValidRequest as IsBlockhashValidRequest +from yellowstone_fumarole_proto.geyser_pb2 import IsBlockhashValidResponse as IsBlockhashValidResponse +from yellowstone_fumarole_proto.geyser_pb2 import CommitmentLevel as CommitmentLevel +from yellowstone_fumarole_proto.geyser_pb2 import SlotStatus as SlotStatus DESCRIPTOR: _descriptor.FileDescriptor PROCESSED: _geyser_pb2.CommitmentLevel diff --git a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2_grpc.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2_grpc.py similarity index 99% rename from python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2_grpc.py rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2_grpc.py index 3197d83..e7416c5 100644 --- a/python/yellowstone-fumarole-client/yellowstone_api/fumarole_v2_pb2_grpc.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/fumarole_v2_pb2_grpc.py @@ -3,7 +3,7 @@ import grpc import warnings -import yellowstone_api.fumarole_v2_pb2 as fumarole__v2__pb2 +import yellowstone_fumarole_proto.fumarole_v2_pb2 as fumarole__v2__pb2 GRPC_GENERATED_VERSION = '1.71.0' GRPC_VERSION = grpc.__version__ diff --git a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2.py similarity index 99% rename from python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.py rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2.py index f291e70..79a8781 100644 --- a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2.py @@ -23,9 +23,9 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -import yellowstone_api.solana_storage_pb2 as solana__storage__pb2 +import yellowstone_fumarole_proto.solana_storage_pb2 as solana__storage__pb2 -from yellowstone_api.solana_storage_pb2 import * +from yellowstone_fumarole_proto.solana_storage_pb2 import * DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cgeyser.proto\x12\x06geyser\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x14solana-storage.proto\"\x9c\n\n\x10SubscribeRequest\x12\x38\n\x08\x61\x63\x63ounts\x18\x01 \x03(\x0b\x32&.geyser.SubscribeRequest.AccountsEntry\x12\x32\n\x05slots\x18\x02 \x03(\x0b\x32#.geyser.SubscribeRequest.SlotsEntry\x12@\n\x0ctransactions\x18\x03 \x03(\x0b\x32*.geyser.SubscribeRequest.TransactionsEntry\x12M\n\x13transactions_status\x18\n \x03(\x0b\x32\x30.geyser.SubscribeRequest.TransactionsStatusEntry\x12\x34\n\x06\x62locks\x18\x04 \x03(\x0b\x32$.geyser.SubscribeRequest.BlocksEntry\x12=\n\x0b\x62locks_meta\x18\x05 \x03(\x0b\x32(.geyser.SubscribeRequest.BlocksMetaEntry\x12\x32\n\x05\x65ntry\x18\x08 \x03(\x0b\x32#.geyser.SubscribeRequest.EntryEntry\x12\x30\n\ncommitment\x18\x06 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x12\x46\n\x13\x61\x63\x63ounts_data_slice\x18\x07 \x03(\x0b\x32).geyser.SubscribeRequestAccountsDataSlice\x12/\n\x04ping\x18\t \x01(\x0b\x32\x1c.geyser.SubscribeRequestPingH\x01\x88\x01\x01\x12\x16\n\tfrom_slot\x18\x0b \x01(\x04H\x02\x88\x01\x01\x1aW\n\rAccountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.geyser.SubscribeRequestFilterAccounts:\x02\x38\x01\x1aQ\n\nSlotsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterSlots:\x02\x38\x01\x1a_\n\x11TransactionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1a\x65\n\x17TransactionsStatusEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1aS\n\x0b\x42locksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x33\n\x05value\x18\x02 \x01(\x0b\x32$.geyser.SubscribeRequestFilterBlocks:\x02\x38\x01\x1a[\n\x0f\x42locksMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.geyser.SubscribeRequestFilterBlocksMeta:\x02\x38\x01\x1aQ\n\nEntryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterEntry:\x02\x38\x01\x42\r\n\x0b_commitmentB\x07\n\x05_pingB\x0c\n\n_from_slot\"\xbf\x01\n\x1eSubscribeRequestFilterAccounts\x12\x0f\n\x07\x61\x63\x63ount\x18\x02 \x03(\t\x12\r\n\x05owner\x18\x03 \x03(\t\x12=\n\x07\x66ilters\x18\x04 \x03(\x0b\x32,.geyser.SubscribeRequestFilterAccountsFilter\x12#\n\x16nonempty_txn_signature\x18\x05 \x01(\x08H\x00\x88\x01\x01\x42\x19\n\x17_nonempty_txn_signature\"\xf3\x01\n$SubscribeRequestFilterAccountsFilter\x12\x44\n\x06memcmp\x18\x01 \x01(\x0b\x32\x32.geyser.SubscribeRequestFilterAccountsFilterMemcmpH\x00\x12\x12\n\x08\x64\x61tasize\x18\x02 \x01(\x04H\x00\x12\x1d\n\x13token_account_state\x18\x03 \x01(\x08H\x00\x12H\n\x08lamports\x18\x04 \x01(\x0b\x32\x34.geyser.SubscribeRequestFilterAccountsFilterLamportsH\x00\x42\x08\n\x06\x66ilter\"y\n*SubscribeRequestFilterAccountsFilterMemcmp\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0f\n\x05\x62ytes\x18\x02 \x01(\x0cH\x00\x12\x10\n\x06\x62\x61se58\x18\x03 \x01(\tH\x00\x12\x10\n\x06\x62\x61se64\x18\x04 \x01(\tH\x00\x42\x06\n\x04\x64\x61ta\"m\n,SubscribeRequestFilterAccountsFilterLamports\x12\x0c\n\x02\x65q\x18\x01 \x01(\x04H\x00\x12\x0c\n\x02ne\x18\x02 \x01(\x04H\x00\x12\x0c\n\x02lt\x18\x03 \x01(\x04H\x00\x12\x0c\n\x02gt\x18\x04 \x01(\x04H\x00\x42\x05\n\x03\x63mp\"\x8f\x01\n\x1bSubscribeRequestFilterSlots\x12!\n\x14\x66ilter_by_commitment\x18\x01 \x01(\x08H\x00\x88\x01\x01\x12\x1e\n\x11interslot_updates\x18\x02 \x01(\x08H\x01\x88\x01\x01\x42\x17\n\x15_filter_by_commitmentB\x14\n\x12_interslot_updates\"\xd2\x01\n\"SubscribeRequestFilterTransactions\x12\x11\n\x04vote\x18\x01 \x01(\x08H\x00\x88\x01\x01\x12\x13\n\x06\x66\x61iled\x18\x02 \x01(\x08H\x01\x88\x01\x01\x12\x16\n\tsignature\x18\x05 \x01(\tH\x02\x88\x01\x01\x12\x17\n\x0f\x61\x63\x63ount_include\x18\x03 \x03(\t\x12\x17\n\x0f\x61\x63\x63ount_exclude\x18\x04 \x03(\t\x12\x18\n\x10\x61\x63\x63ount_required\x18\x06 \x03(\tB\x07\n\x05_voteB\t\n\x07_failedB\x0c\n\n_signature\"\xd9\x01\n\x1cSubscribeRequestFilterBlocks\x12\x17\n\x0f\x61\x63\x63ount_include\x18\x01 \x03(\t\x12!\n\x14include_transactions\x18\x02 \x01(\x08H\x00\x88\x01\x01\x12\x1d\n\x10include_accounts\x18\x03 \x01(\x08H\x01\x88\x01\x01\x12\x1c\n\x0finclude_entries\x18\x04 \x01(\x08H\x02\x88\x01\x01\x42\x17\n\x15_include_transactionsB\x13\n\x11_include_accountsB\x12\n\x10_include_entries\"\"\n SubscribeRequestFilterBlocksMeta\"\x1d\n\x1bSubscribeRequestFilterEntry\"C\n!SubscribeRequestAccountsDataSlice\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0e\n\x06length\x18\x02 \x01(\x04\"\"\n\x14SubscribeRequestPing\x12\n\n\x02id\x18\x01 \x01(\x05\"\xb5\x04\n\x0fSubscribeUpdate\x12\x0f\n\x07\x66ilters\x18\x01 \x03(\t\x12\x31\n\x07\x61\x63\x63ount\x18\x02 \x01(\x0b\x32\x1e.geyser.SubscribeUpdateAccountH\x00\x12+\n\x04slot\x18\x03 \x01(\x0b\x32\x1b.geyser.SubscribeUpdateSlotH\x00\x12\x39\n\x0btransaction\x18\x04 \x01(\x0b\x32\".geyser.SubscribeUpdateTransactionH\x00\x12\x46\n\x12transaction_status\x18\n \x01(\x0b\x32(.geyser.SubscribeUpdateTransactionStatusH\x00\x12-\n\x05\x62lock\x18\x05 \x01(\x0b\x32\x1c.geyser.SubscribeUpdateBlockH\x00\x12+\n\x04ping\x18\x06 \x01(\x0b\x32\x1b.geyser.SubscribeUpdatePingH\x00\x12+\n\x04pong\x18\t \x01(\x0b\x32\x1b.geyser.SubscribeUpdatePongH\x00\x12\x36\n\nblock_meta\x18\x07 \x01(\x0b\x32 .geyser.SubscribeUpdateBlockMetaH\x00\x12-\n\x05\x65ntry\x18\x08 \x01(\x0b\x32\x1c.geyser.SubscribeUpdateEntryH\x00\x12.\n\ncreated_at\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x0e\n\x0cupdate_oneof\"o\n\x16SubscribeUpdateAccount\x12\x33\n\x07\x61\x63\x63ount\x18\x01 \x01(\x0b\x32\".geyser.SubscribeUpdateAccountInfo\x12\x0c\n\x04slot\x18\x02 \x01(\x04\x12\x12\n\nis_startup\x18\x03 \x01(\x08\"\xc8\x01\n\x1aSubscribeUpdateAccountInfo\x12\x0e\n\x06pubkey\x18\x01 \x01(\x0c\x12\x10\n\x08lamports\x18\x02 \x01(\x04\x12\r\n\x05owner\x18\x03 \x01(\x0c\x12\x12\n\nexecutable\x18\x04 \x01(\x08\x12\x12\n\nrent_epoch\x18\x05 \x01(\x04\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x15\n\rwrite_version\x18\x07 \x01(\x04\x12\x1a\n\rtxn_signature\x18\x08 \x01(\x0cH\x00\x88\x01\x01\x42\x10\n\x0e_txn_signature\"\x8f\x01\n\x13SubscribeUpdateSlot\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x13\n\x06parent\x18\x02 \x01(\x04H\x00\x88\x01\x01\x12\"\n\x06status\x18\x03 \x01(\x0e\x32\x12.geyser.SlotStatus\x12\x17\n\ndead_error\x18\x04 \x01(\tH\x01\x88\x01\x01\x42\t\n\x07_parentB\r\n\x0b_dead_error\"g\n\x1aSubscribeUpdateTransaction\x12;\n\x0btransaction\x18\x01 \x01(\x0b\x32&.geyser.SubscribeUpdateTransactionInfo\x12\x0c\n\x04slot\x18\x02 \x01(\x04\"\xd8\x01\n\x1eSubscribeUpdateTransactionInfo\x12\x11\n\tsignature\x18\x01 \x01(\x0c\x12\x0f\n\x07is_vote\x18\x02 \x01(\x08\x12?\n\x0btransaction\x18\x03 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.Transaction\x12\x42\n\x04meta\x18\x04 \x01(\x0b\x32\x34.solana.storage.ConfirmedBlock.TransactionStatusMeta\x12\r\n\x05index\x18\x05 \x01(\x04\"\xa1\x01\n SubscribeUpdateTransactionStatus\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tsignature\x18\x02 \x01(\x0c\x12\x0f\n\x07is_vote\x18\x03 \x01(\x08\x12\r\n\x05index\x18\x04 \x01(\x04\x12<\n\x03\x65rr\x18\x05 \x01(\x0b\x32/.solana.storage.ConfirmedBlock.TransactionError\"\xa0\x04\n\x14SubscribeUpdateBlock\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x37\n\x07rewards\x18\x03 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Rewards\x12@\n\nblock_time\x18\x04 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x05 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x13\n\x0bparent_slot\x18\x07 \x01(\x04\x12\x18\n\x10parent_blockhash\x18\x08 \x01(\t\x12\"\n\x1a\x65xecuted_transaction_count\x18\t \x01(\x04\x12<\n\x0ctransactions\x18\x06 \x03(\x0b\x32&.geyser.SubscribeUpdateTransactionInfo\x12\x1d\n\x15updated_account_count\x18\n \x01(\x04\x12\x34\n\x08\x61\x63\x63ounts\x18\x0b \x03(\x0b\x32\".geyser.SubscribeUpdateAccountInfo\x12\x15\n\rentries_count\x18\x0c \x01(\x04\x12-\n\x07\x65ntries\x18\r \x03(\x0b\x32\x1c.geyser.SubscribeUpdateEntry\"\xe2\x02\n\x18SubscribeUpdateBlockMeta\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x37\n\x07rewards\x18\x03 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Rewards\x12@\n\nblock_time\x18\x04 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x05 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x13\n\x0bparent_slot\x18\x06 \x01(\x04\x12\x18\n\x10parent_blockhash\x18\x07 \x01(\t\x12\"\n\x1a\x65xecuted_transaction_count\x18\x08 \x01(\x04\x12\x15\n\rentries_count\x18\t \x01(\x04\"\x9d\x01\n\x14SubscribeUpdateEntry\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\r\n\x05index\x18\x02 \x01(\x04\x12\x12\n\nnum_hashes\x18\x03 \x01(\x04\x12\x0c\n\x04hash\x18\x04 \x01(\x0c\x12\"\n\x1a\x65xecuted_transaction_count\x18\x05 \x01(\x04\x12\"\n\x1astarting_transaction_index\x18\x06 \x01(\x04\"\x15\n\x13SubscribeUpdatePing\"!\n\x13SubscribeUpdatePong\x12\n\n\x02id\x18\x01 \x01(\x05\"\x1c\n\x0bPingRequest\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\x1d\n\x0cPongResponse\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\\\n\x19GetLatestBlockhashRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"^\n\x1aGetLatestBlockhashResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x1f\n\x17last_valid_block_height\x18\x03 \x01(\x04\"X\n\x15GetBlockHeightRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\".\n\x16GetBlockHeightResponse\x12\x14\n\x0c\x62lock_height\x18\x01 \x01(\x04\"Q\n\x0eGetSlotRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"\x1f\n\x0fGetSlotResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\"\x13\n\x11GetVersionRequest\"%\n\x12GetVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"m\n\x17IsBlockhashValidRequest\x12\x11\n\tblockhash\x18\x01 \x01(\t\x12\x30\n\ncommitment\x18\x02 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"7\n\x18IsBlockhashValidResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\r\n\x05valid\x18\x02 \x01(\x08*>\n\x0f\x43ommitmentLevel\x12\r\n\tPROCESSED\x10\x00\x12\r\n\tCONFIRMED\x10\x01\x12\r\n\tFINALIZED\x10\x02*\xa1\x01\n\nSlotStatus\x12\x12\n\x0eSLOT_PROCESSED\x10\x00\x12\x12\n\x0eSLOT_CONFIRMED\x10\x01\x12\x12\n\x0eSLOT_FINALIZED\x10\x02\x12\x1d\n\x19SLOT_FIRST_SHRED_RECEIVED\x10\x03\x12\x12\n\x0eSLOT_COMPLETED\x10\x04\x12\x15\n\x11SLOT_CREATED_BANK\x10\x05\x12\r\n\tSLOT_DEAD\x10\x06\x32\x93\x04\n\x06Geyser\x12\x44\n\tSubscribe\x12\x18.geyser.SubscribeRequest\x1a\x17.geyser.SubscribeUpdate\"\x00(\x01\x30\x01\x12\x33\n\x04Ping\x12\x13.geyser.PingRequest\x1a\x14.geyser.PongResponse\"\x00\x12]\n\x12GetLatestBlockhash\x12!.geyser.GetLatestBlockhashRequest\x1a\".geyser.GetLatestBlockhashResponse\"\x00\x12Q\n\x0eGetBlockHeight\x12\x1d.geyser.GetBlockHeightRequest\x1a\x1e.geyser.GetBlockHeightResponse\"\x00\x12<\n\x07GetSlot\x12\x16.geyser.GetSlotRequest\x1a\x17.geyser.GetSlotResponse\"\x00\x12W\n\x10IsBlockhashValid\x12\x1f.geyser.IsBlockhashValidRequest\x1a .geyser.IsBlockhashValidResponse\"\x00\x12\x45\n\nGetVersion\x12\x19.geyser.GetVersionRequest\x1a\x1a.geyser.GetVersionResponse\"\x00\x42;Z9github.com/rpcpool/yellowstone-grpc/examples/golang/protoP\x01\x62\x06proto3') diff --git a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.pyi b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2.pyi similarity index 92% rename from python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.pyi rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2.pyi index 09417be..3ef9a49 100644 --- a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2.pyi +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2.pyi @@ -1,30 +1,30 @@ from google.protobuf import timestamp_pb2 as _timestamp_pb2 -import yellowstone_api.solana_storage_pb2 as _solana_storage_pb2 +import yellowstone_fumarole_proto.solana_storage_pb2 as _solana_storage_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union -from yellowstone_api.solana_storage_pb2 import ConfirmedBlock as ConfirmedBlock -from yellowstone_api.solana_storage_pb2 import ConfirmedTransaction as ConfirmedTransaction -from yellowstone_api.solana_storage_pb2 import Transaction as Transaction -from yellowstone_api.solana_storage_pb2 import Message as Message -from yellowstone_api.solana_storage_pb2 import MessageHeader as MessageHeader -from yellowstone_api.solana_storage_pb2 import MessageAddressTableLookup as MessageAddressTableLookup -from yellowstone_api.solana_storage_pb2 import TransactionStatusMeta as TransactionStatusMeta -from yellowstone_api.solana_storage_pb2 import TransactionError as TransactionError -from yellowstone_api.solana_storage_pb2 import InnerInstructions as InnerInstructions -from yellowstone_api.solana_storage_pb2 import InnerInstruction as InnerInstruction -from yellowstone_api.solana_storage_pb2 import CompiledInstruction as CompiledInstruction -from yellowstone_api.solana_storage_pb2 import TokenBalance as TokenBalance -from yellowstone_api.solana_storage_pb2 import UiTokenAmount as UiTokenAmount -from yellowstone_api.solana_storage_pb2 import ReturnData as ReturnData -from yellowstone_api.solana_storage_pb2 import Reward as Reward -from yellowstone_api.solana_storage_pb2 import Rewards as Rewards -from yellowstone_api.solana_storage_pb2 import UnixTimestamp as UnixTimestamp -from yellowstone_api.solana_storage_pb2 import BlockHeight as BlockHeight -from yellowstone_api.solana_storage_pb2 import NumPartitions as NumPartitions -from yellowstone_api.solana_storage_pb2 import RewardType as RewardType +from yellowstone_fumarole_proto.solana_storage_pb2 import ConfirmedBlock as ConfirmedBlock +from yellowstone_fumarole_proto.solana_storage_pb2 import ConfirmedTransaction as ConfirmedTransaction +from yellowstone_fumarole_proto.solana_storage_pb2 import Transaction as Transaction +from yellowstone_fumarole_proto.solana_storage_pb2 import Message as Message +from yellowstone_fumarole_proto.solana_storage_pb2 import MessageHeader as MessageHeader +from yellowstone_fumarole_proto.solana_storage_pb2 import MessageAddressTableLookup as MessageAddressTableLookup +from yellowstone_fumarole_proto.solana_storage_pb2 import TransactionStatusMeta as TransactionStatusMeta +from yellowstone_fumarole_proto.solana_storage_pb2 import TransactionError as TransactionError +from yellowstone_fumarole_proto.solana_storage_pb2 import InnerInstructions as InnerInstructions +from yellowstone_fumarole_proto.solana_storage_pb2 import InnerInstruction as InnerInstruction +from yellowstone_fumarole_proto.solana_storage_pb2 import CompiledInstruction as CompiledInstruction +from yellowstone_fumarole_proto.solana_storage_pb2 import TokenBalance as TokenBalance +from yellowstone_fumarole_proto.solana_storage_pb2 import UiTokenAmount as UiTokenAmount +from yellowstone_fumarole_proto.solana_storage_pb2 import ReturnData as ReturnData +from yellowstone_fumarole_proto.solana_storage_pb2 import Reward as Reward +from yellowstone_fumarole_proto.solana_storage_pb2 import Rewards as Rewards +from yellowstone_fumarole_proto.solana_storage_pb2 import UnixTimestamp as UnixTimestamp +from yellowstone_fumarole_proto.solana_storage_pb2 import BlockHeight as BlockHeight +from yellowstone_fumarole_proto.solana_storage_pb2 import NumPartitions as NumPartitions +from yellowstone_fumarole_proto.solana_storage_pb2 import RewardType as RewardType DESCRIPTOR: _descriptor.FileDescriptor Unspecified: _solana_storage_pb2.RewardType diff --git a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2_grpc.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2_grpc.py similarity index 99% rename from python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2_grpc.py rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2_grpc.py index eb8f5c7..c3d925e 100644 --- a/python/yellowstone-fumarole-client/yellowstone_api/geyser_pb2_grpc.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/geyser_pb2_grpc.py @@ -3,7 +3,7 @@ import grpc import warnings -import yellowstone_api.geyser_pb2 as geyser__pb2 +import yellowstone_fumarole_proto.geyser_pb2 as geyser__pb2 GRPC_GENERATED_VERSION = '1.71.0' GRPC_VERSION = grpc.__version__ diff --git a/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/solana_storage_pb2.py similarity index 100% rename from python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.py rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/solana_storage_pb2.py diff --git a/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.pyi b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/solana_storage_pb2.pyi similarity index 100% rename from python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2.pyi rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/solana_storage_pb2.pyi diff --git a/python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2_grpc.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_proto/solana_storage_pb2_grpc.py similarity index 100% rename from python/yellowstone-fumarole-client/yellowstone_api/solana_storage_pb2_grpc.py rename to python/yellowstone-fumarole-client/yellowstone_fumarole_proto/solana_storage_pb2_grpc.py From 4bb8eb8e3e3f3fc4bcba35399f789be6af243108 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 22 May 2025 17:11:15 -0400 Subject: [PATCH 30/56] wip --- .../tests/test_aio_utils.py | 111 ++++++++++++ .../tests/test_fumarole_client_intg.py | 26 +-- .../yellowstone_fumarole_client/__init__.py | 34 +++- .../runtime/__init__.py | 2 - .../runtime/aio.py | 169 ++++++++++-------- .../runtime/state_machine.py | 4 +- .../utils/__init__.py | 0 .../yellowstone_fumarole_client/utils/aio.py | 159 ++++++++++++++++ 8 files changed, 406 insertions(+), 99 deletions(-) create mode 100644 python/yellowstone-fumarole-client/tests/test_aio_utils.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/__init__.py create mode 100644 python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py diff --git a/python/yellowstone-fumarole-client/tests/test_aio_utils.py b/python/yellowstone-fumarole-client/tests/test_aio_utils.py new file mode 100644 index 0000000..7d9f362 --- /dev/null +++ b/python/yellowstone-fumarole-client/tests/test_aio_utils.py @@ -0,0 +1,111 @@ +import logging +import pytest +import asyncio +from yellowstone_fumarole_client.utils.aio import JoinSet, never + +# Helper coroutine for testing +@pytest.mark.asyncio +async def test_spawn_and_join_single_task(): + """Test spawning and joining a single task.""" + join_set = JoinSet() + + + async def test(): + await asyncio.sleep(0.1) + return 1 + + + handle = join_set.spawn(test()) + + assert handle + await asyncio.sleep(1) + + assert len(join_set) == 1 + + result = join_set.join_next() + assert result is not None + + result = await result + assert result.result() == 1 + + +# Helper coroutine for testing +@pytest.mark.asyncio +async def test_empty_joinset(): + join_set = JoinSet() + + maybe = join_set.join_next() + assert maybe is None + assert len(join_set) == 0 + + +@pytest.mark.asyncio +async def test_it_should_handle_canceling_spawned_task(): + join_set = JoinSet() + + async def test(): + await asyncio.sleep(100) + return 1 + + handle = join_set.spawn(test()) + # 1st case : Test cancel before join_next + handle.cancel() + + assert len(join_set) == 1 + result = await join_set.join_next() + assert result.cancelled() + assert len(join_set) == 0 + + # 2nd case : Test cancel after join_next + + handle = join_set.spawn(test()) + fut = join_set.join_next() + assert fut + assert len(join_set) == 1 + + handle.cancel() + result = await fut + assert result.cancelled() + assert len(join_set) == 0 + + +@pytest.mark.asyncio +async def test_it_should_be_cancel_safe(): + join_set = JoinSet() + + async def test(): + return 1 + + handle = join_set.spawn(test()) + fut: asyncio.Future = join_set.join_next() + assert fut + assert len(join_set) == 1 + logging.debug("Cancelling the future") + fut.cancel() + + assert fut.cancelled() + assert len(join_set) == 1 + + logging.debug("Waiting for the future to finish") + fut = join_set.join_next() + + assert fut + + result = await fut + assert result.result() == 1 + assert len(join_set) == 0 + + + +@pytest.mark.asyncio +async def test_spawn_task_identity(): + join_set = JoinSet() + + async def test(): + return 1 + + handle = join_set.spawn(test()) + fut = join_set.join_next() + result = await fut + assert result.get_name() == handle.id() + assert len(join_set) == 0 \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py b/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py index 5352fc9..e8931ff 100644 --- a/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py +++ b/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py @@ -76,7 +76,9 @@ async def test_dragonsmouth_adapter(fumarole_config): logging.debug("test_fumarole_delete_all") # Create a FumaroleClient instance - fumarole_config.x_metadata = {"x-subscription-id": str(uuid.uuid4())} + # x_subscription_id = str(uuid.uuid4()) + x_subscription_id = "d2ec45b8-4c2f-4678-a8dd-55cabcc1280a" + fumarole_config.x_metadata = {"x-subscription-id": x_subscription_id} client: FumaroleClient = await FumaroleClient.connect(fumarole_config) await client.delete_all_consumer_groups() @@ -91,7 +93,7 @@ async def test_dragonsmouth_adapter(fumarole_config): session = await client.dragonsmouth_subscribe( consumer_group_name="test", request=SubscribeRequest( - accounts={"fumarole": SubscribeRequestFilterAccounts()}, + # accounts={"fumarole": SubscribeRequestFilterAccounts()}, transactions={"fumarole": SubscribeRequestFilterTransactions()}, blocks_meta={"fumarole": SubscribeRequestFilterBlocksMeta()}, entry={"fumarole": SubscribeRequestFilterEntry()}, @@ -100,19 +102,21 @@ async def test_dragonsmouth_adapter(fumarole_config): ) dragonsmouth_source = session.source - fh = session.fumarole_handle + handle = session.fumarole_handle while True: - tasks = [asyncio.create_task(dragonsmouth_source.get()), fh] + tasks = [ + asyncio.create_task(dragonsmouth_source.get()), + handle + ] done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) - for t in pending: - t.cancel() - - for task in done: - if task == tasks[0]: - print(f"Consumed: {type(task.result())}") + for t in done: + if tasks[0] == t: + result = t.result() + logging.debug(f"Consumed: {type(result)}") else: - print(f"session ended with: {type(task.result())}") + result = t.result() + logging.debug(f"Handle: {type(result)}") return diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py index ed0f869..a46fdfc 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py @@ -135,8 +135,8 @@ async def dragonsmouth_subscribe_with_config( config: FumaroleSubscribeConfig, ) -> DragonsmouthAdapterSession: """Subscribe to a dragonsmouth stream with custom configuration.""" - dragonsmouth_outlet = asyncio.Queue(maxsize=DEFAULT_DRAGONSMOUTH_CAPACITY) - dragonsmouth_inlet = asyncio.Queue(maxsize=DEFAULT_DRAGONSMOUTH_CAPACITY) + dragonsmouth_outlet = asyncio.Queue(maxsize=config.data_channel_capacity) + dragonsmouth_inlet = asyncio.Queue(maxsize=config.data_channel_capacity) fume_control_plane_q = asyncio.Queue(maxsize=100) initial_join = JoinControlPlane(consumer_group_name=consumer_group_name) @@ -148,21 +148,39 @@ async def dragonsmouth_subscribe_with_config( ) async def control_plane_sink(): - try: - while True: + while True: + try: update = await fume_control_plane_q.get() yield update - finally: - FumaroleClient.logger.debug("Control plane sink closed") + except asyncio.QueueShutDown: + break + fume_control_plane_stream_rx: grpc.aio.StreamStreamMultiCallable = ( self.stub.Subscribe(control_plane_sink()) ) + control_response: ControlResponse = await fume_control_plane_stream_rx.read() init = control_response.init if init is None: raise ValueError(f"Unexpected initial response: {control_response}") + + # Once we have the initial response, we can spin a task to read from the stream + # and put the updates into the queue. + # This is a bit of a hack, but we need a Queue not a StreamStreamMultiCallable + # because Queue are cancel-safe, while Stream are not, or at least didn't find any docs about it. + fume_control_plane_rx_q = asyncio.Queue(maxsize=100) + async def control_plane_source(): + while True: + try: + async for update in fume_control_plane_stream_rx: + await fume_control_plane_rx_q.put(update) + except asyncio.QueueShutDown: + break + + _cp_src_task = asyncio.create_task(control_plane_source()) + FumaroleClient.logger.debug(f"Control response: {control_response}") @@ -208,8 +226,8 @@ async def control_plane_sink(): dragonsmouth_bidi=dm_bidi, subscribe_request=request, consumer_group_name=consumer_group_name, - control_plane_q=fume_control_plane_q, - control_plane_stream_reader=fume_control_plane_stream_rx, + control_plane_tx_q=fume_control_plane_q, + control_plane_rx_q=fume_control_plane_rx_q, dragonsmouth_outlet=dragonsmouth_outlet, commit_interval=config.commit_interval, gc_interval=config.gc_interval, diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py index bf4ed30..e69de29 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/__init__.py @@ -1,2 +0,0 @@ -from yellowstone_fumarole_client.runtime.state_machine import * -from yellowstone_fumarole_client.runtime.aio import * diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py index 17976e7..516db42 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py @@ -1,5 +1,6 @@ # DataPlaneConn import asyncio +import grpc from typing import Optional, List from collections import deque from dataclasses import dataclass @@ -28,6 +29,7 @@ from yellowstone_fumarole_proto.fumarole_v2_pb2_grpc import ( Fumarole as GrpcFumaroleClient, ) +from yellowstone_fumarole_client.utils.aio import JoinSet, never from yellowstone_fumarole_client.grpc_connectivity import FumaroleGrpcConnector import logging @@ -90,8 +92,8 @@ def __init__( dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, subscribe_request: SubscribeRequest, consumer_group_name: str, - control_plane_q: asyncio.Queue, - control_plane_stream_reader, + control_plane_tx_q: asyncio.Queue, + control_plane_rx_q: asyncio.Queue, dragonsmouth_outlet: asyncio.Queue, commit_interval: float, # in seconds gc_interval: int, @@ -101,8 +103,8 @@ def __init__( self.dragonsmouth_bidi = dragonsmouth_bidi self.subscribe_request = subscribe_request self.consumer_group_name = consumer_group_name - self.control_plane_tx = control_plane_q - self.control_plane_stream_rx = control_plane_stream_reader + self.control_plane_tx = control_plane_tx_q + self.control_plane_rx = control_plane_rx_q self.dragonsmouth_outlet = dragonsmouth_outlet self.commit_interval = commit_interval self.last_commit = time.time() @@ -145,10 +147,11 @@ def commitment_level(self): def schedule_download_task_if_any(self): while True: - if ( - not self.download_task_runner_chans.download_task_queue_tx.qsize() < 100 - ): # Simulate try_reserve + download_task_queue_tx = self.download_task_runner_chans.download_task_queue_tx + assert download_task_queue_tx.maxsize == 10 + if download_task_queue_tx.full(): break + download_request = self.sm.pop_slot_to_download(self.commitment_level()) if not download_request: break @@ -158,7 +161,7 @@ def schedule_download_task_if_any(self): ) LOGGER.debug(f"Scheduling download task for slot {download_request.slot}") asyncio.create_task( - self.download_task_runner_chans.download_task_queue_tx.put( + download_task_queue_tx.put( download_task_args ) ) @@ -167,7 +170,7 @@ def handle_download_result(self, download_result: DownloadTaskResult): if download_result.kind == "Ok": completed = download_result.completed LOGGER.debug( - f"Download completed for slot {completed.slot}, shard {completed.shard_idx}" + f"Download completed for slot {completed.slot}, shard {completed.shard_idx}, {completed.total_event_downloaded} total events" ) self.sm.make_slot_download_progress(completed.slot, completed.shard_idx) else: @@ -202,7 +205,7 @@ async def drain_slot_status(self): for filter_name, filter in self.subscribe_request.slots.items(): if ( filter.filter_by_commitment - and slot_status.commitment_level.value == commitment + and slot_status.commitment_level == commitment ): matched_filters.append(filter_name) elif not filter.filter_by_commitment: @@ -212,10 +215,10 @@ async def drain_slot_status(self): update = SubscribeUpdate( filters=matched_filters, created_at=None, - update_oneof=SubscribeUpdateSlot( + slot=SubscribeUpdateSlot( slot=slot_status.slot, parent=slot_status.parent_slot, - status=slot_status.commitment_level.value, + status=slot_status.commitment_level, dead_error=slot_status.dead_error, ), ) @@ -257,17 +260,14 @@ async def run(self): self.sm.gc() ticks = 0 - if self.dragonsmouth_outlet.qsize() >= 100: # Simulate is_closed - LOGGER.debug("Detected dragonsmouth outlet closed") - break - commit_deadline = self.last_commit + self.commit_interval await self.poll_history_if_needed() self.schedule_download_task_if_any() + # asyncio queues are cancel safe tasks = [ asyncio.create_task(self.dragonsmouth_bidi.rx.get()), - asyncio.create_task(self.control_plane_stream_rx.read()), + asyncio.create_task(self.control_plane_rx.get()), asyncio.create_task( self.download_task_runner_chans.download_result_rx.get() ), @@ -358,7 +358,7 @@ def __init__( ): self.data_plane_channel_vec = data_plane_channel_vec self.connector = connector - self.tasks = [] + self.tasks = JoinSet() self.task_meta = {} self.cnc_rx = cnc_rx self.download_task_queue = download_task_queue @@ -449,7 +449,7 @@ def spawn_grpc_download_task(self, client_idx: int, task_spec: DownloadTaskArgs) ) task_id = self.task_counter self.task_counter += 1 - self.tasks.append(asyncio.create_task(task.run(task_id))) + self.tasks.spawn(task.run(task_id)) self.download_attempts[slot] = self.download_attempts.get(slot, 0) + 1 conn.permits -= 1 self.task_meta[task_id] = DataPlaneTaskMeta( @@ -465,30 +465,42 @@ def handle_control_command(self, cmd: DownloadTaskRunnerCommand): self.subscribe_request = cmd.subscribe_request async def run(self): - while self.outlet.qsize() < 100: # Simulate is_closed + while True: maybe_available_client_idx = self.find_least_use_client() tasks = [asyncio.create_task(self.cnc_rx.get())] if maybe_available_client_idx is not None: tasks.append(asyncio.create_task(self.download_task_queue.get())) - for task in self.tasks[:]: - if task.done(): - self.tasks.remove(task) - task_id, result = task.result() - await self.handle_data_plane_task_result(task_id, result) + else: + tasks.append(asyncio.create_task(never())) + + next_download_result_fut = self.tasks.join_next() + if next_download_result_fut: + tasks.append(next_download_result_fut) + else: + tasks.append(asyncio.create_task(never())) + + assert len(tasks) == 3 if tasks: - done, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + for task in pending: + task.cancel() + for task in done: try: result = task.result() if task == tasks[0]: # cnc_rx self.handle_control_command(result) - elif len(tasks) > 1 and task == tasks[1]: # download_task_queue + elif task == tasks[1]: # download_task_queue + assert maybe_available_client_idx is not None self.spawn_grpc_download_task( maybe_available_client_idx, result ) - except Exception as e: - LOGGER.debug(f"Error: {e}") - return + elif task == tasks[2]: # download_result_rx + download_task = task.result() + task_id, result = download_task.result() + await self.handle_data_plane_task_result(task_id, result) + except asyncio.QueueShutDown as e: + break LOGGER.debug("Closing GrpcDownloadTaskRunner loop") @@ -507,25 +519,26 @@ def __init__( self.dragonsmouth_oulet = dragonsmouth_oulet def map_tonic_error_code_to_download_block_error( - self, code: str + self, e: grpc.aio.AioRpcError ) -> DownloadBlockError: - if code == "NotFound": + code = e.code() + if code == grpc.StatusCode.NOT_FOUND: return DownloadBlockError( kind="BlockShardNotFound", message="Block shard not found" ) - elif code == "Unavailable": + elif code == grpc.StatusCode.UNAVAILABLE: return DownloadBlockError(kind="Disconnected", message="Disconnected") elif code in ( - "Internal", - "Aborted", - "DataLoss", - "ResourceExhausted", - "Unknown", - "Cancelled", - "DeadlineExceeded", + grpc.StatusCode.INTERNAL, + grpc.StatusCode.ABORTED, + grpc.StatusCode.DATA_LOSS, + grpc.StatusCode.RESOURCE_EXHAUSTED, + grpc.StatusCode.UNKNOWN, + grpc.StatusCode.CANCELLED, + grpc.StatusCode.DEADLINE_EXCEEDED, ): return DownloadBlockError(kind="FailedDownload", message="Failed download") - elif code == "InvalidArgument": + elif code == grpc.StatusCode.INVALID_ARGUMENT: raise ValueError("Invalid argument") else: return DownloadBlockError(kind="Fatal", message=f"Unknown error: {code}") @@ -539,52 +552,54 @@ async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: ) try: resp = self.client.DownloadBlock(request) - except Exception as e: + except grpc.aio.AioRpcError as e: LOGGER.error(f"Download block error: {e}") return task_id, DownloadTaskResult( kind="Err", slot=self.download_request.slot, - err=self.map_tonic_error_code_to_download_block_error(str(e)), + err=self.map_tonic_error_code_to_download_block_error(e), ) total_event_downloaded = 0 - async for data in resp: - - kind = data.WhichOneof("response") - - match kind: - case "update": - update = data.update - assert update is not None, "Update is None" - total_event_downloaded += 1 - try: - await self.dragonsmouth_oulet.put(update) - except asyncio.QueueFull: + try: + async for data in resp: + kind = data.WhichOneof("response") + match kind: + case "update": + update = data.update + assert update is not None, "Update is None" + total_event_downloaded += 1 + try: + await self.dragonsmouth_oulet.put(update) + except asyncio.QueueShutDown: + return task_id, DownloadTaskResult( + kind="Err", + slot=self.download_request.slot, + err=DownloadBlockError( + kind="OutletDisconnected", message="Outlet disconnected" + ), + ) + case "block_shard_download_finish": return task_id, DownloadTaskResult( - kind="Err", - slot=self.download_request.slot, - err=DownloadBlockError( - kind="OutletDisconnected", message="Outlet disconnected" + kind="Ok", + completed=CompletedDownloadBlockTask( + slot=self.download_request.slot, + block_uid=self.download_request.block_uid, + shard_idx=0, + total_event_downloaded=total_event_downloaded, ), ) - case "block_shard_download_finish": - return task_id, DownloadTaskResult( - kind="Ok", - completed=CompletedDownloadBlockTask( - slot=self.download_request.slot, - block_uid=self.download_request.block_uid, - shard_idx=0, - total_event_downloaded=total_event_downloaded, - ), - ) - case _: - return task_id, DownloadTaskResult( - kind="Err", - slot=self.download_request.slot, - err=self.map_tonic_error_code_to_download_block_error( - "Unknown" - ), - ) + case unknown: + raise RuntimeError("Unexpected response kind: {unknown}") + except grpc.aio.AioRpcError as e: + LOGGER.error(f"Download block error: {e}") + return task_id, DownloadTaskResult( + kind="Err", + slot=self.download_request.slot, + err=self.map_tonic_error_code_to_download_block_error( + e + ), + ) return task_id, DownloadTaskResult( kind="Err", diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py index 4ffceea..9515176 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py @@ -137,7 +137,6 @@ def next_sequence(self) -> int: def gc(self) -> None: """Garbage collect old slots to respect memory retention limit.""" - LOGGER.debug("Garbage collecting old slots") while len(self.downloaded_slot) > self.slot_memory_retention: slot = self.downloaded_slot.pop(0) if self.downloaded_slot else None if slot is None: @@ -149,6 +148,7 @@ def gc(self) -> None: def queue_blockchain_event(self, events: List[BlockchainEvent]) -> None: """Queue blockchain events for processing.""" for event in events: + if event.offset < self.last_committed_offset: continue @@ -167,6 +167,7 @@ def queue_blockchain_event(self, events: List[BlockchainEvent]) -> None: commitment_level=event.commitment_level, dead_error=event.dead_error, ) + if event.slot in self.inflight_slot_shard_download: self.blocked_slot_status_update[event.slot].append(fume_status) else: @@ -228,6 +229,7 @@ def pop_slot_to_download(self, commitment=None) -> Optional[FumeDownloadRequest] session_sequence, blockchain_event = ( self.unprocessed_blockchain_event.popleft() ) + blockchain_event: BlockchainEvent = blockchain_event event_cl = blockchain_event.commitment_level if event_cl < min_commitment: diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py new file mode 100644 index 0000000..f243f23 --- /dev/null +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py @@ -0,0 +1,159 @@ + + + + + +import asyncio +from collections import deque +import threading +import logging +import uuid + +LOGGER = logging.getLogger(__name__) + + +async def never(): + """ + Create a forever pending future. This future is not set and will never be set. + This is useful for testing purposes. + """ + loop = asyncio.get_running_loop() + return await loop.create_future() + + +class CancelHandle: + + def __init__(self, task: asyncio.Task): + self._task = task + + def cancel(self) -> bool: + return self._task.cancel() + + def id(self) -> int: + return self._task.get_name() + +class JoinSet: + """ + A set of tasks that can be joined. + """ + + def __init__(self, *, loop: asyncio.AbstractEventLoop | None = None): + try: + self._loop = loop or asyncio.get_running_loop() + except RuntimeError: + # fallback for when no loop is running yet + self._loop = asyncio.get_event_loop() + self.tasks = set() + self.ready = set() + self.waker = set() + self.my_thread = threading.get_ident() + + def spawn(self, fut: asyncio.Future) -> CancelHandle: + """ + Spawn an awaitable (coroutine, task, or future) and add it to the set as a task. + """ + # Convert awaitable to task + task = self._loop.create_task(fut) + task.set_name(uuid.uuid4().int) + + def callback(task: asyncio.Task): + self.tasks.discard(task) + try: + waker = self.waker.pop() + if not waker.cancelled(): + waker.set_result(task) + except KeyError: + # No waker available, add task to the ready queue + pass + self.ready.add(task) + + task.add_done_callback(callback) + + # Add task to the set + self.tasks.add(task) + return CancelHandle(task) + + def __len__(self) -> int: + return len(self.tasks) + len(self.ready) + + def take(self) -> 'JoinSet': + """Takes ownership of the JoinSet and returns a new JoinSet. + """ + self.my_thread = threading.get_ident() + return self + + def join_next(self) -> asyncio.Future | None: + """ + Join the next task in the set if any, otherwise return None + + [Cacncel-Safety] + This method is cancel-safe. The future returned by this method can be cancelled + without affecting the JoinSet. The JoinSet will continue to track the tasks + and will not be affected by the cancellation of the future. + """ + + # Check if the current thread is the same as the thread that created the JoinSet + if self.my_thread != threading.get_ident(): + raise RuntimeError("JoinSet.join_next must be called from the same thread that created the JoinSet") + + if not self.tasks and not self.ready: + return None + + fut = self._loop.create_future() + + # assert not self.waker, "JoinSet.join_next requires exclusive access to join set" + + while True: + try: + task = self.ready.pop() + task: asyncio.Task = task + fut.set_result(task) + return fut + except KeyError: + LOGGER.debug("No tasks ready") + # No tasks are ready + break + + # No tasks are ready + # Add the future to the set + + def deregister_waker(task): + try: + if task.cancelled(): + # If the task is cancelled, remove it from the set + self.waker.remove(fut) + else: + actual_task = task.result() + self.ready.discard(actual_task) + except KeyError: + pass + + fut.add_done_callback(deregister_waker) + + self.waker.add(fut) + + # Check if there are any tasks that are already ready + # in between the time we added the future and now + try: + task = self.ready.pop() + LOGGER.debug("Task ready in between") + except KeyError: + if not self.tasks: + # No tasks are ready, return the future + LOGGER.debug("No tasks ready, returning None") + return None + LOGGER.debug("No tasks ready, but tasks exist, returning future") + return fut + + # If there is a task ready, set the future to the result + try: + waker = self.waker.pop() + waker.set_result(task) + LOGGER.debug("Task ready, setting waker") + return waker + except KeyError: + LOGGER.debug("No waker available, adding task to the ready queue") + # No waker available, add task to the ready queue + fut = self._loop.create_future() + fut.set_result(task) + return fut \ No newline at end of file From 089d74b417d6eb48239cb148c076400c4cc89490 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Fri, 23 May 2025 11:17:23 -0400 Subject: [PATCH 31/56] wip --- proto/fumarole_v2.proto | 1 + .../tests/test_aio_utils.py | 114 +++++++++++++++++- .../runtime/aio.py | 49 ++++---- .../yellowstone_fumarole_client/utils/aio.py | 76 ++---------- 4 files changed, 147 insertions(+), 93 deletions(-) diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index e424005..ecec1c7 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -132,6 +132,7 @@ message BlockchainEvent { optional string dead_error = 9; } + message BlockchainHistory { repeated BlockchainEvent events = 1; } diff --git a/python/yellowstone-fumarole-client/tests/test_aio_utils.py b/python/yellowstone-fumarole-client/tests/test_aio_utils.py index 7d9f362..fc3aac6 100644 --- a/python/yellowstone-fumarole-client/tests/test_aio_utils.py +++ b/python/yellowstone-fumarole-client/tests/test_aio_utils.py @@ -1,4 +1,5 @@ import logging +from typing import Coroutine import pytest import asyncio from yellowstone_fumarole_client.utils.aio import JoinSet, never @@ -73,30 +74,62 @@ async def test(): async def test_it_should_be_cancel_safe(): join_set = JoinSet() + barrier = asyncio.Event() async def test(): + await barrier.wait() return 1 handle = join_set.spawn(test()) - fut: asyncio.Future = join_set.join_next() + fut: Coroutine = join_set.join_next() assert fut assert len(join_set) == 1 logging.debug("Cancelling the future") - fut.cancel() - - assert fut.cancelled() + task = asyncio.create_task(fut) + assert task.cancel() assert len(join_set) == 1 logging.debug("Waiting for the future to finish") - fut = join_set.join_next() + fut: Coroutine = join_set.join_next() + + assert fut + barrier.set() + result = await fut + assert result.result() == 1 + assert len(join_set) == 0 + + +@pytest.mark.asyncio +async def test_it_should_be_cancel_safe_even_with_ready_future(): + join_set = JoinSet() + async def test(): + return 1 + handle = join_set.spawn(test()) + fut: Coroutine = join_set.join_next() assert fut + assert len(join_set) == 1 + logging.debug("Cancelling the future") + task = asyncio.create_task(fut) + await asyncio.sleep(1) + # assert not task.cancel() + # try: + # await task + # except asyncio.CancelledError: + # pass + # assert task.cancelled() + assert len(join_set) == 1 + logging.debug("Waiting for the future to finish") + fut: Coroutine = join_set.join_next() + + assert fut result = await fut assert result.result() == 1 assert len(join_set) == 0 + @pytest.mark.asyncio async def test_spawn_task_identity(): join_set = JoinSet() @@ -108,4 +141,73 @@ async def test(): fut = join_set.join_next() result = await fut assert result.get_name() == handle.id() - assert len(join_set) == 0 \ No newline at end of file + assert len(join_set) == 0 + + +@pytest.mark.asyncio +async def test_concurrent_spawn(): + join_set = JoinSet() + + barrier1 = asyncio.Event() + barrier2 = asyncio.Event() + async def test1(): + await barrier1.wait() + return 1 + + async def test2(): + await barrier2.wait() + return 10 + + ch1 = join_set.spawn(test1()) + ch2 = join_set.spawn(test2()) + + + assert len(join_set) == 2 + + join_co = join_set.join_next() + + barrier1.set() + barrier2.set() + + result1 = await join_co + assert result1.result() in [1, 10] + assert len(join_set) == 1 + result2 = await join_set.join_next() + assert result2.result() in [1, 10] + assert len(join_set) == 0 + assert result1.result() != result2.result() + + +@pytest.mark.asyncio +async def test_queue_is_cancel_safe(): + """ + Test that the queue is cancel safe. + """ + queue = asyncio.Queue(maxsize=3) + queue.put_nowait(1) + queue.put_nowait(2) + queue.put_nowait(3) + + assert not queue.empty() + + async def consumer(queue): + print("Consumer waiting for item...") + try: + item = await queue.get() + print(f"Got item: {item}") + except asyncio.CancelledError: + print("Consumer was cancelled!") + + + assert queue.qsize() == 3 + task = asyncio.create_task(consumer(queue)) + + await asyncio.sleep(1) + task.cancel() + try: + await task + except asyncio.CancelledError: + pass + assert queue.qsize() == 3 + result = await queue.get() + assert result == 1 \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py index 516db42..1558bd9 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py @@ -29,7 +29,7 @@ from yellowstone_fumarole_proto.fumarole_v2_pb2_grpc import ( Fumarole as GrpcFumaroleClient, ) -from yellowstone_fumarole_client.utils.aio import JoinSet, never +from yellowstone_fumarole_client.utils.aio import JoinSet, never, CancelHandle from yellowstone_fumarole_client.grpc_connectivity import FumaroleGrpcConnector import logging @@ -263,7 +263,7 @@ async def run(self): commit_deadline = self.last_commit + self.commit_interval await self.poll_history_if_needed() self.schedule_download_task_if_any() - + # asyncio queues are cancel safe tasks = [ asyncio.create_task(self.dragonsmouth_bidi.rx.get()), @@ -276,6 +276,10 @@ async def run(self): ), ] + select_group = select_group() + + branch_idx = select_group.add_branch(awaitable) + done, pending = await asyncio.wait( tasks, return_when=asyncio.FIRST_COMPLETED ) @@ -366,7 +370,6 @@ def __init__( self.outlet = outlet self.max_download_attempt_per_slot = max_download_attempt_by_slot self.subscribe_request = subscribe_request - self.task_counter = 0 def find_least_use_client(self) -> Optional[int]: max_permits = -1 @@ -380,10 +383,12 @@ def find_least_use_client(self) -> Optional[int]: async def handle_data_plane_task_result( self, task_id: int, result: DownloadTaskResult ): - task_meta = self.task_meta.pop(task_id, None) - if not task_meta: - raise RuntimeError("Missing task meta") - + LOGGER.debug(f"Handling data plane task result for task {task_id}") + try: + task_meta = self.task_meta.pop(task_id) + except KeyError as e: + LOGGER.error(f"Task {task_id} not found in task meta") + raise e slot = task_meta.request.slot conn = self.data_plane_channel_vec[task_meta.client_idx] conn.permits += 1 @@ -447,9 +452,9 @@ def spawn_grpc_download_task(self, client_idx: int, task_spec: DownloadTaskArgs) ), dragonsmouth_oulet=task_spec.dragonsmouth_outlet, ) - task_id = self.task_counter - self.task_counter += 1 - self.tasks.spawn(task.run(task_id)) + ch: CancelHandle = self.tasks.spawn(task.run()) + task_id = ch.id() + LOGGER.debug(f"Spawned download task {task_id} for slot {slot}") self.download_attempts[slot] = self.download_attempts.get(slot, 0) + 1 conn.permits -= 1 self.task_meta[task_id] = DataPlaneTaskMeta( @@ -473,9 +478,9 @@ async def run(self): else: tasks.append(asyncio.create_task(never())) - next_download_result_fut = self.tasks.join_next() - if next_download_result_fut: - tasks.append(next_download_result_fut) + next_download_result_co = self.tasks.join_next() + if next_download_result_co: + tasks.append(asyncio.create_task(next_download_result_co)) else: tasks.append(asyncio.create_task(never())) @@ -497,11 +502,11 @@ async def run(self): ) elif task == tasks[2]: # download_result_rx download_task = task.result() - task_id, result = download_task.result() + task_id = download_task.get_name() + result = download_task.result() await self.handle_data_plane_task_result(task_id, result) except asyncio.QueueShutDown as e: - break - LOGGER.debug("Closing GrpcDownloadTaskRunner loop") + return # GrpcDownloadBlockTaskRun @@ -543,7 +548,7 @@ def map_tonic_error_code_to_download_block_error( else: return DownloadBlockError(kind="Fatal", message=f"Unknown error: {code}") - async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: + async def run(self) -> DownloadTaskResult: request = DownloadBlockShard( blockchain_id=self.download_request.blockchain_id, block_uid=self.download_request.block_uid, @@ -554,7 +559,7 @@ async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: resp = self.client.DownloadBlock(request) except grpc.aio.AioRpcError as e: LOGGER.error(f"Download block error: {e}") - return task_id, DownloadTaskResult( + return DownloadTaskResult( kind="Err", slot=self.download_request.slot, err=self.map_tonic_error_code_to_download_block_error(e), @@ -572,7 +577,7 @@ async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: try: await self.dragonsmouth_oulet.put(update) except asyncio.QueueShutDown: - return task_id, DownloadTaskResult( + return DownloadTaskResult( kind="Err", slot=self.download_request.slot, err=DownloadBlockError( @@ -580,7 +585,7 @@ async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: ), ) case "block_shard_download_finish": - return task_id, DownloadTaskResult( + return DownloadTaskResult( kind="Ok", completed=CompletedDownloadBlockTask( slot=self.download_request.slot, @@ -593,7 +598,7 @@ async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: raise RuntimeError("Unexpected response kind: {unknown}") except grpc.aio.AioRpcError as e: LOGGER.error(f"Download block error: {e}") - return task_id, DownloadTaskResult( + return DownloadTaskResult( kind="Err", slot=self.download_request.slot, err=self.map_tonic_error_code_to_download_block_error( @@ -601,7 +606,7 @@ async def run(self, task_id: int) -> tuple[int, DownloadTaskResult]: ), ) - return task_id, DownloadTaskResult( + return DownloadTaskResult( kind="Err", slot=self.download_request.slot, err=DownloadBlockError(kind="FailedDownload", message="Failed download"), diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py index f243f23..9feff78 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py @@ -1,12 +1,9 @@ - - - - import asyncio from collections import deque import threading import logging +from typing import Coroutine import uuid LOGGER = logging.getLogger(__name__) @@ -45,7 +42,7 @@ def __init__(self, *, loop: asyncio.AbstractEventLoop | None = None): self._loop = asyncio.get_event_loop() self.tasks = set() self.ready = set() - self.waker = set() + self.waker = asyncio.Event() self.my_thread = threading.get_ident() def spawn(self, fut: asyncio.Future) -> CancelHandle: @@ -58,14 +55,8 @@ def spawn(self, fut: asyncio.Future) -> CancelHandle: def callback(task: asyncio.Task): self.tasks.discard(task) - try: - waker = self.waker.pop() - if not waker.cancelled(): - waker.set_result(task) - except KeyError: - # No waker available, add task to the ready queue - pass self.ready.add(task) + self.waker.set() task.add_done_callback(callback) @@ -82,7 +73,7 @@ def take(self) -> 'JoinSet': self.my_thread = threading.get_ident() return self - def join_next(self) -> asyncio.Future | None: + def join_next(self) -> Coroutine: """ Join the next task in the set if any, otherwise return None @@ -99,61 +90,16 @@ def join_next(self) -> asyncio.Future | None: if not self.tasks and not self.ready: return None - fut = self._loop.create_future() + self.waker.clear() # assert not self.waker, "JoinSet.join_next requires exclusive access to join set" - while True: - try: - task = self.ready.pop() - task: asyncio.Task = task - fut.set_result(task) - return fut - except KeyError: - LOGGER.debug("No tasks ready") - # No tasks are ready - break - - # No tasks are ready - # Add the future to the set - - def deregister_waker(task): - try: - if task.cancelled(): - # If the task is cancelled, remove it from the set - self.waker.remove(fut) - else: - actual_task = task.result() - self.ready.discard(actual_task) - except KeyError: - pass - - fut.add_done_callback(deregister_waker) - - self.waker.add(fut) + async def my_fut(): + await self.waker.wait() + return self.ready.pop() # Check if there are any tasks that are already ready # in between the time we added the future and now - try: - task = self.ready.pop() - LOGGER.debug("Task ready in between") - except KeyError: - if not self.tasks: - # No tasks are ready, return the future - LOGGER.debug("No tasks ready, returning None") - return None - LOGGER.debug("No tasks ready, but tasks exist, returning future") - return fut - - # If there is a task ready, set the future to the result - try: - waker = self.waker.pop() - waker.set_result(task) - LOGGER.debug("Task ready, setting waker") - return waker - except KeyError: - LOGGER.debug("No waker available, adding task to the ready queue") - # No waker available, add task to the ready queue - fut = self._loop.create_future() - fut.set_result(task) - return fut \ No newline at end of file + if self.ready: + self.waker.set() + return my_fut() \ No newline at end of file From e025e8ebdda381459f4b04a9265a1a27c8cdfc2a Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Mon, 26 May 2025 16:49:46 -0400 Subject: [PATCH 32/56] fixed python client --- .../tests/test_aio_utils.py | 213 ----------- .../tests/test_fumarole_client_intg.py | 67 +++- .../yellowstone_fumarole_client/__init__.py | 72 +--- .../grpc_connectivity.py | 8 +- .../runtime/aio.py | 331 ++++++------------ .../yellowstone_fumarole_client/utils/aio.py | 95 +---- 6 files changed, 190 insertions(+), 596 deletions(-) delete mode 100644 python/yellowstone-fumarole-client/tests/test_aio_utils.py diff --git a/python/yellowstone-fumarole-client/tests/test_aio_utils.py b/python/yellowstone-fumarole-client/tests/test_aio_utils.py deleted file mode 100644 index fc3aac6..0000000 --- a/python/yellowstone-fumarole-client/tests/test_aio_utils.py +++ /dev/null @@ -1,213 +0,0 @@ -import logging -from typing import Coroutine -import pytest -import asyncio -from yellowstone_fumarole_client.utils.aio import JoinSet, never - -# Helper coroutine for testing -@pytest.mark.asyncio -async def test_spawn_and_join_single_task(): - """Test spawning and joining a single task.""" - join_set = JoinSet() - - - async def test(): - await asyncio.sleep(0.1) - return 1 - - - handle = join_set.spawn(test()) - - assert handle - await asyncio.sleep(1) - - assert len(join_set) == 1 - - result = join_set.join_next() - assert result is not None - - result = await result - assert result.result() == 1 - - -# Helper coroutine for testing -@pytest.mark.asyncio -async def test_empty_joinset(): - join_set = JoinSet() - - maybe = join_set.join_next() - assert maybe is None - assert len(join_set) == 0 - - -@pytest.mark.asyncio -async def test_it_should_handle_canceling_spawned_task(): - join_set = JoinSet() - - async def test(): - await asyncio.sleep(100) - return 1 - - handle = join_set.spawn(test()) - # 1st case : Test cancel before join_next - handle.cancel() - - assert len(join_set) == 1 - result = await join_set.join_next() - assert result.cancelled() - assert len(join_set) == 0 - - # 2nd case : Test cancel after join_next - - handle = join_set.spawn(test()) - fut = join_set.join_next() - assert fut - assert len(join_set) == 1 - - handle.cancel() - result = await fut - assert result.cancelled() - assert len(join_set) == 0 - - -@pytest.mark.asyncio -async def test_it_should_be_cancel_safe(): - join_set = JoinSet() - - barrier = asyncio.Event() - async def test(): - await barrier.wait() - return 1 - - handle = join_set.spawn(test()) - fut: Coroutine = join_set.join_next() - assert fut - assert len(join_set) == 1 - logging.debug("Cancelling the future") - task = asyncio.create_task(fut) - assert task.cancel() - assert len(join_set) == 1 - - logging.debug("Waiting for the future to finish") - fut: Coroutine = join_set.join_next() - - assert fut - barrier.set() - result = await fut - assert result.result() == 1 - assert len(join_set) == 0 - - -@pytest.mark.asyncio -async def test_it_should_be_cancel_safe_even_with_ready_future(): - join_set = JoinSet() - async def test(): - return 1 - - handle = join_set.spawn(test()) - fut: Coroutine = join_set.join_next() - assert fut - assert len(join_set) == 1 - logging.debug("Cancelling the future") - task = asyncio.create_task(fut) - await asyncio.sleep(1) - # assert not task.cancel() - # try: - # await task - # except asyncio.CancelledError: - # pass - # assert task.cancelled() - assert len(join_set) == 1 - - logging.debug("Waiting for the future to finish") - fut: Coroutine = join_set.join_next() - - assert fut - result = await fut - assert result.result() == 1 - assert len(join_set) == 0 - - - - -@pytest.mark.asyncio -async def test_spawn_task_identity(): - join_set = JoinSet() - - async def test(): - return 1 - - handle = join_set.spawn(test()) - fut = join_set.join_next() - result = await fut - assert result.get_name() == handle.id() - assert len(join_set) == 0 - - -@pytest.mark.asyncio -async def test_concurrent_spawn(): - join_set = JoinSet() - - barrier1 = asyncio.Event() - barrier2 = asyncio.Event() - async def test1(): - await barrier1.wait() - return 1 - - async def test2(): - await barrier2.wait() - return 10 - - ch1 = join_set.spawn(test1()) - ch2 = join_set.spawn(test2()) - - - assert len(join_set) == 2 - - join_co = join_set.join_next() - - barrier1.set() - barrier2.set() - - result1 = await join_co - assert result1.result() in [1, 10] - assert len(join_set) == 1 - result2 = await join_set.join_next() - assert result2.result() in [1, 10] - assert len(join_set) == 0 - assert result1.result() != result2.result() - - -@pytest.mark.asyncio -async def test_queue_is_cancel_safe(): - """ - Test that the queue is cancel safe. - """ - queue = asyncio.Queue(maxsize=3) - queue.put_nowait(1) - queue.put_nowait(2) - queue.put_nowait(3) - - assert not queue.empty() - - async def consumer(queue): - print("Consumer waiting for item...") - try: - item = await queue.get() - print(f"Got item: {item}") - except asyncio.CancelledError: - print("Consumer was cancelled!") - - - assert queue.qsize() == 3 - task = asyncio.create_task(consumer(queue)) - - await asyncio.sleep(1) - task.cancel() - try: - await task - except asyncio.CancelledError: - pass - assert queue.qsize() == 3 - result = await queue.get() - assert result == 1 \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py b/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py index e8931ff..cc8ee5e 100644 --- a/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py +++ b/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py @@ -1,9 +1,10 @@ +from typing import Optional import uuid import pytest import asyncio import logging from os import environ - +from collections import defaultdict from yellowstone_fumarole_client.config import FumaroleConfig from yellowstone_fumarole_client import FumaroleClient from yellowstone_fumarole_proto.fumarole_v2_pb2 import CreateConsumerGroupRequest @@ -15,6 +16,14 @@ SubscribeRequestFilterEntry, SubscribeRequestFilterSlots, ) +from yellowstone_fumarole_proto.geyser_pb2 import ( + SubscribeUpdate, + SubscribeUpdateTransaction, + SubscribeUpdateBlockMeta, + SubscribeUpdateAccount, + SubscribeUpdateEntry, + SubscribeUpdateSlot, +) @pytest.fixture @@ -100,23 +109,55 @@ async def test_dragonsmouth_adapter(fumarole_config): slots={"fumarole": SubscribeRequestFilterSlots()}, ), ) - + logging.warning("starting session") dragonsmouth_source = session.source handle = session.fumarole_handle - while True: - - tasks = [ - asyncio.create_task(dragonsmouth_source.get()), - handle - ] + class BlockConstruction: + def __init__(self): + self.tx_vec: list[SubscribeUpdateTransaction] = [] + self.entry_vec: list[SubscribeUpdateEntry] = [] + self.account_vec: list[SubscribeUpdateAccount] = [] + self.meta: Optional[SubscribeUpdateBlockMeta] = None + + def check_block_integrity(self) -> bool: + assert self.meta is not None, "Block meta is not set" + return ( + len(self.tx_vec) == self.meta.executed_transaction_count + and len(self.entry_vec) == self.meta.entries_count + ) + + block_map = defaultdict(BlockConstruction) + while True: + tasks = [asyncio.create_task(dragonsmouth_source.get()), handle] done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) - for t in done: if tasks[0] == t: - result = t.result() - logging.debug(f"Consumed: {type(result)}") + result: SubscribeUpdate = t.result() + if result.HasField("block_meta"): + block_meta: SubscribeUpdateBlockMeta = result.block_meta + slot = block_meta.slot + block_map[slot].meta = block_meta + elif result.HasField("transaction"): + tx: SubscribeUpdateTransaction = result.transaction + slot = tx.slot + block = block_map[slot] + block.tx_vec.append(tx) + elif result.HasField("account"): + account: SubscribeUpdateAccount = result.account + slot = account.slot + block = block_map[slot] + block.account_vec.append(account) + elif result.HasField("entry"): + entry: SubscribeUpdateEntry = result.entry + slot = entry.slot + block = block_map[slot] + block.entry_vec.append(entry) + elif result.HasField("slot"): + result: SubscribeUpdateSlot = result.slot + block = block_map[result.slot] + assert block.check_block_integrity() + return else: result = t.result() - logging.debug(f"Handle: {type(result)}") - return + raise RuntimeError("failed to get dragonsmouth source: %s" % result) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py index a46fdfc..856c517 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py @@ -8,13 +8,11 @@ from . import config from yellowstone_fumarole_client.runtime.aio import ( AsyncioFumeDragonsmouthRuntime, - GrpcDownloadTaskRunner, - DownloadTaskRunnerChannels, - DataPlaneConn, FumaroleSM, DEFAULT_GC_INTERVAL, DEFAULT_SLOT_MEMORY_RETENTION, DragonsmouthSubscribeRequestBidi, + GrpcSlotDownloader, ) from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequest, SubscribeUpdate from yellowstone_fumarole_proto.fumarole_v2_pb2 import ( @@ -48,7 +46,7 @@ # Constants DEFAULT_DRAGONSMOUTH_CAPACITY = 10000 -DEFAULT_COMMIT_INTERVAL = 10.0 # seconds +DEFAULT_COMMIT_INTERVAL = 5.0 # seconds DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = 3 DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = 10 @@ -136,7 +134,6 @@ async def dragonsmouth_subscribe_with_config( ) -> DragonsmouthAdapterSession: """Subscribe to a dragonsmouth stream with custom configuration.""" dragonsmouth_outlet = asyncio.Queue(maxsize=config.data_channel_capacity) - dragonsmouth_inlet = asyncio.Queue(maxsize=config.data_channel_capacity) fume_control_plane_q = asyncio.Queue(maxsize=100) initial_join = JoinControlPlane(consumer_group_name=consumer_group_name) @@ -155,22 +152,21 @@ async def control_plane_sink(): except asyncio.QueueShutDown: break - fume_control_plane_stream_rx: grpc.aio.StreamStreamMultiCallable = ( self.stub.Subscribe(control_plane_sink()) ) - control_response: ControlResponse = await fume_control_plane_stream_rx.read() init = control_response.init if init is None: raise ValueError(f"Unexpected initial response: {control_response}") - + # Once we have the initial response, we can spin a task to read from the stream # and put the updates into the queue. # This is a bit of a hack, but we need a Queue not a StreamStreamMultiCallable # because Queue are cancel-safe, while Stream are not, or at least didn't find any docs about it. fume_control_plane_rx_q = asyncio.Queue(maxsize=100) + async def control_plane_source(): while True: try: @@ -178,9 +174,8 @@ async def control_plane_source(): await fume_control_plane_rx_q.put(update) except asyncio.QueueShutDown: break - - _cp_src_task = asyncio.create_task(control_plane_source()) + _cp_src_task = asyncio.create_task(control_plane_source()) FumaroleClient.logger.debug(f"Control response: {control_response}") @@ -192,37 +187,15 @@ async def control_plane_source(): subscribe_request_queue = asyncio.Queue(maxsize=100) dm_bidi = DragonsmouthSubscribeRequestBidi(rx=subscribe_request_queue) - data_plane_channel_vec = [] - for _ in range(1): # TODO: support multiple connections - client = await self.connector.connect() - conn = DataPlaneConn( - permits=config.concurrent_download_limit_per_tcp, client=client, rev=0 - ) - data_plane_channel_vec.append(conn) - - download_task_runner_cnc_queue = asyncio.Queue(maxsize=10) - download_task_queue = asyncio.Queue(maxsize=10) - download_result_queue = asyncio.Queue(maxsize=10) - - grpc_download_task_runner = GrpcDownloadTaskRunner( - data_plane_channel_vec=data_plane_channel_vec, - connector=self.connector, - cnc_rx=download_task_runner_cnc_queue, - download_task_queue=download_task_queue, - outlet=download_result_queue, - max_download_attempt_by_slot=config.max_failed_slot_download_attempt, - subscribe_request=request, - ) + data_plane_client = await self.connector.connect() - download_task_runner_chans = DownloadTaskRunnerChannels( - download_task_queue_tx=download_task_queue, - cnc_tx=download_task_runner_cnc_queue, - download_result_rx=download_result_queue, + grpc_slot_downloader = GrpcSlotDownloader( + client=data_plane_client, ) rt = AsyncioFumeDragonsmouthRuntime( sm=sm, - download_task_runner_chans=download_task_runner_chans, + slot_downloader=grpc_slot_downloader, dragonsmouth_bidi=dm_bidi, subscribe_request=request, consumer_group_name=consumer_group_name, @@ -231,35 +204,14 @@ async def control_plane_source(): dragonsmouth_outlet=dragonsmouth_outlet, commit_interval=config.commit_interval, gc_interval=config.gc_interval, + max_concurrent_download=config.concurrent_download_limit_per_tcp, ) - download_task_runner_task = asyncio.create_task(grpc_download_task_runner.run()) - rt_task = asyncio.create_task(rt.run()) - - async def runtime_fut(): - tasks = [download_task_runner_task, rt_task] - done, pending = await asyncio.wait( - tasks, return_when=asyncio.FIRST_COMPLETED - ) - - for task in pending: - task.cancel() - - for task in done: - if task == tasks[0]: - FumaroleClient.logger.info( - f"Download task runner completed with {task.result()}" - ) - elif task == tasks[1]: - FumaroleClient.logger.info( - f"Runtime task completed with {task.result()}" - ) - - fumarole_handle = asyncio.create_task(runtime_fut()) + fumarole_handle = asyncio.create_task(rt.run()) FumaroleClient.logger.debug(f"Fumarole handle created: {fumarole_handle}") return DragonsmouthAdapterSession( sink=subscribe_request_queue, - source=dragonsmouth_inlet, + source=dragonsmouth_outlet, fumarole_handle=fumarole_handle, ) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py index 3cf4c9e..70b09db 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/grpc_connectivity.py @@ -74,7 +74,7 @@ def __init__(self, metadata): async def intercept_unary_unary( self, continuation, client_call_details: grpc.aio.ClientCallDetails, request ): - logging.debug("intercept_unary_unary") + # logging.debug("intercept_unary_unary") new_details = client_call_details._replace( metadata=self._merge_metadata(client_call_details.metadata) ) @@ -83,7 +83,7 @@ async def intercept_unary_unary( async def intercept_unary_stream( self, continuation, client_call_details: grpc.aio.ClientCallDetails, request ): - logging.debug("intercept_unary_stream") + # logging.debug("intercept_unary_stream") new_details = client_call_details._replace( metadata=self._merge_metadata(client_call_details.metadata) ) @@ -92,7 +92,7 @@ async def intercept_unary_stream( async def intercept_stream_unary( self, continuation, client_call_details: grpc.aio.ClientCallDetails, request ): - logging.debug("intercept_stream_unary") + # logging.debug("intercept_stream_unary") new_details = client_call_details._replace( metadata=self._merge_metadata(client_call_details.metadata) ) @@ -101,7 +101,7 @@ async def intercept_stream_unary( async def intercept_stream_stream( self, continuation, client_call_details: grpc.aio.ClientCallDetails, request ): - logging.debug("intercept_stream_stream") + # logging.debug("intercept_stream_stream") new_details = client_call_details._replace( metadata=self._merge_metadata(client_call_details.metadata) ) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py index 1558bd9..fcbf94c 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py @@ -1,8 +1,10 @@ # DataPlaneConn +from abc import abstractmethod, ABC import asyncio +import uuid import grpc from typing import Optional, List -from collections import deque +from collections import abc, deque from dataclasses import dataclass import time from yellowstone_fumarole_client.runtime.state_machine import ( @@ -10,7 +12,6 @@ FumeDownloadRequest, FumeOffset, FumeShardIdx, - CommitmentLevel, ) from yellowstone_fumarole_proto.geyser_pb2 import ( SubscribeRequest, @@ -29,13 +30,13 @@ from yellowstone_fumarole_proto.fumarole_v2_pb2_grpc import ( Fumarole as GrpcFumaroleClient, ) -from yellowstone_fumarole_client.utils.aio import JoinSet, never, CancelHandle +from yellowstone_fumarole_client.utils.aio import Interval from yellowstone_fumarole_client.grpc_connectivity import FumaroleGrpcConnector import logging # Constants -DEFAULT_GC_INTERVAL = 100 +DEFAULT_GC_INTERVAL = 5 DEFAULT_SLOT_MEMORY_RETENTION = 10000 @@ -82,13 +83,22 @@ class DragonsmouthSubscribeRequestBidi: LOGGER = logging.getLogger(__name__) +class AsyncSlotDownloader(ABC): + + @abstractmethod + async def run_download( + self, subscribe_request: SubscribeRequest, spec: "DownloadTaskArgs" + ) -> DownloadTaskResult: + pass + + # TokioFumeDragonsmouthRuntime class AsyncioFumeDragonsmouthRuntime: def __init__( self, sm: FumaroleSM, - download_task_runner_chans: "DownloadTaskRunnerChannels", + slot_downloader: AsyncSlotDownloader, dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, subscribe_request: SubscribeRequest, consumer_group_name: str, @@ -97,9 +107,10 @@ def __init__( dragonsmouth_outlet: asyncio.Queue, commit_interval: float, # in seconds gc_interval: int, + max_concurrent_download: int = 10, ): self.sm = sm - self.download_task_runner_chans = download_task_runner_chans + self.slot_downloader: AsyncSlotDownloader = slot_downloader self.dragonsmouth_bidi = dragonsmouth_bidi self.subscribe_request = subscribe_request self.consumer_group_name = consumer_group_name @@ -107,8 +118,10 @@ def __init__( self.control_plane_rx = control_plane_rx_q self.dragonsmouth_outlet = dragonsmouth_outlet self.commit_interval = commit_interval - self.last_commit = time.time() self.gc_interval = gc_interval + self.max_concurrent_download = max_concurrent_download + self.download_tasks = dict() + self.inner_runtime_channel: asyncio.Queue = asyncio.Queue() def build_poll_history_cmd( self, from_offset: Optional[FumeOffset] @@ -147,24 +160,32 @@ def commitment_level(self): def schedule_download_task_if_any(self): while True: - download_task_queue_tx = self.download_task_runner_chans.download_task_queue_tx - assert download_task_queue_tx.maxsize == 10 - if download_task_queue_tx.full(): + LOGGER.debug("Checking for download tasks to schedule") + if len(self.download_tasks) >= self.max_concurrent_download: break + # Pop a slot to download from the state machine + LOGGER.debug("Popping slot to download") download_request = self.sm.pop_slot_to_download(self.commitment_level()) if not download_request: + LOGGER.debug("No download request available") break + + LOGGER.debug(f"Download request for slot {download_request.slot} popped") + assert ( + download_request.blockchain_id + ), "Download request must have a blockchain ID" download_task_args = DownloadTaskArgs( download_request=download_request, dragonsmouth_outlet=self.dragonsmouth_outlet, ) - LOGGER.debug(f"Scheduling download task for slot {download_request.slot}") - asyncio.create_task( - download_task_queue_tx.put( - download_task_args - ) + + coro = self.slot_downloader.run_download( + self.subscribe_request, download_task_args ) + donwload_task = asyncio.create_task(coro) + self.download_tasks[donwload_task] = download_request + LOGGER.debug(f"Scheduling download task for slot {download_request.slot}") def handle_download_result(self, download_result: DownloadTaskResult): if download_result.kind == "Ok": @@ -222,7 +243,6 @@ async def drain_slot_status(self): dead_error=slot_status.dead_error, ), ) - LOGGER.debug(f"Sending dragonsmouth update: {update}") try: await self.dragonsmouth_outlet.put(update) except asyncio.QueueFull: @@ -239,11 +259,8 @@ async def handle_control_plane_resp( self.handle_control_response(result) return True - async def handle_new_subscribe_request(self, subscribe_request: SubscribeRequest): + def handle_new_subscribe_request(self, subscribe_request: SubscribeRequest): self.subscribe_request = subscribe_request - await self.download_task_runner_chans.cnc_tx.put( - DownloadTaskRunnerCommand.UpdateSubscribeRequest(subscribe_request) - ) async def run(self): LOGGER.debug(f"Fumarole runtime starting...") @@ -252,60 +269,69 @@ async def run(self): await self.force_commit_offset() LOGGER.debug("Initial commit offset command sent") ticks = 0 - while True: + + task_map = { + asyncio.create_task(self.dragonsmouth_bidi.rx.get()): "dragonsmouth_bidi", + asyncio.create_task(self.control_plane_rx.get()): "control_plane_rx", + asyncio.create_task(Interval(self.commit_interval).tick()): "commit_tick", + } + + pending = set(task_map.keys()) + while pending: ticks += 1 LOGGER.debug(f"Runtime loop tick") if ticks % self.gc_interval == 0: LOGGER.debug("Running garbage collection") self.sm.gc() ticks = 0 - - commit_deadline = self.last_commit + self.commit_interval + LOGGER.debug(f"Polling history if needed") await self.poll_history_if_needed() + LOGGER.debug("Scheduling download tasks if any") self.schedule_download_task_if_any() - - # asyncio queues are cancel safe - tasks = [ - asyncio.create_task(self.dragonsmouth_bidi.rx.get()), - asyncio.create_task(self.control_plane_rx.get()), - asyncio.create_task( - self.download_task_runner_chans.download_result_rx.get() - ), - asyncio.create_task( - asyncio.sleep(max(0, commit_deadline - time.time())) - ), - ] - - select_group = select_group() - - branch_idx = select_group.add_branch(awaitable) + for t in self.download_tasks.keys(): + pending.add(t) + task_map[t] = "download_task" + download_task_inflight = len(self.download_tasks) + LOGGER.debug( + f"Current download tasks in flight: {download_task_inflight} / {self.max_concurrent_download}" + ) done, pending = await asyncio.wait( - tasks, return_when=asyncio.FIRST_COMPLETED + pending, return_when=asyncio.FIRST_COMPLETED ) - for task in pending: - task.cancel() - - for task in done: - try: - result = task.result() - if task == tasks[0]: # dragonsmouth_bidi.rx + for t in done: + result = t.result() + name = task_map.pop(t) + match name: + case "dragonsmouth_bidi": LOGGER.debug("Dragonsmouth subscribe request received") - await self.handle_new_subscribe_request(result) - elif task == tasks[1]: # control_plane_rx + self.handle_new_subscribe_request(result.subscribe_request) + new_task = asyncio.create_task(self.dragonsmouth_bidi.rx.get()) + task_map[new_task] = "dragonsmouth_bidi" + pending.add(new_task) + pass + case "control_plane_rx": + LOGGER.debug("Control plane response received") if not await self.handle_control_plane_resp(result): LOGGER.debug("Control plane error") return - elif task == tasks[2]: # download_result_rx + new_task = asyncio.create_task(self.control_plane_rx.get()) + task_map[new_task] = "control_plane_rx" + pending.add(new_task) + case "download_task": + LOGGER.debug("Download task result received") + assert self.download_tasks.pop(t) self.handle_download_result(result) - elif task == tasks[3]: # sleep - LOGGER.debug("Commit deadline reached") + case "commit_tick": + LOGGER.debug("Commit tick reached") await self.commit_offset() - except asyncio.CancelledError: - pass - except Exception as e: - LOGGER.error(f"Error: {e}") - raise e + new_task = asyncio.create_task( + Interval(self.commit_interval).tick() + ) + task_map[new_task] = "commit_tick" + pending.add(new_task) + case unknown: + raise RuntimeError(f"Unexpected task name: {unknown}") await self.drain_slot_status() @@ -338,175 +364,32 @@ class DownloadTaskArgs: dragonsmouth_outlet: asyncio.Queue -# DataPlaneTaskMeta -@dataclass -class DataPlaneTaskMeta: - client_idx: int - request: FumeDownloadRequest - dragonsmouth_outlet: asyncio.Queue - scheduled_at: float - client_rev: int - +class GrpcSlotDownloader(AsyncSlotDownloader): -# GrpcDownloadTaskRunner -class GrpcDownloadTaskRunner: def __init__( self, - data_plane_channel_vec: List[DataPlaneConn], - connector: FumaroleGrpcConnector, - cnc_rx: asyncio.Queue, - download_task_queue: asyncio.Queue, - outlet: asyncio.Queue, - max_download_attempt_by_slot: int, - subscribe_request: SubscribeRequest, + client: GrpcFumaroleClient, ): - self.data_plane_channel_vec = data_plane_channel_vec - self.connector = connector - self.tasks = JoinSet() - self.task_meta = {} - self.cnc_rx = cnc_rx - self.download_task_queue = download_task_queue - self.download_attempts = {} - self.outlet = outlet - self.max_download_attempt_per_slot = max_download_attempt_by_slot - self.subscribe_request = subscribe_request + self.client = client - def find_least_use_client(self) -> Optional[int]: - max_permits = -1 - best_idx = None - for idx, conn in enumerate(self.data_plane_channel_vec): - if conn.has_permit() and conn.permits > max_permits: - max_permits = conn.permits - best_idx = idx - return best_idx - - async def handle_data_plane_task_result( - self, task_id: int, result: DownloadTaskResult - ): - LOGGER.debug(f"Handling data plane task result for task {task_id}") - try: - task_meta = self.task_meta.pop(task_id) - except KeyError as e: - LOGGER.error(f"Task {task_id} not found in task meta") - raise e - slot = task_meta.request.slot - conn = self.data_plane_channel_vec[task_meta.client_idx] - conn.permits += 1 - - if result.kind == "Ok": - completed = result.completed - elapsed = time.time() - task_meta.scheduled_at - LOGGER.debug( - f"Downloaded slot {slot} in {elapsed}s, total events: {completed.total_event_downloaded}" - ) - self.download_attempts.pop(slot, None) - await self.outlet.put(result) - else: - err = result.err - download_attempt = self.download_attempts.get(slot, 0) - if err.kind in ("Disconnected", "FailedDownload"): - if download_attempt >= self.max_download_attempt_per_slot: - LOGGER.error( - f"Download slot {slot} failed: {err.message}, max attempts reached" - ) - await self.outlet.put( - DownloadTaskResult(kind="Err", slot=slot, err=err) - ) - return - remaining = self.max_download_attempt_per_slot - download_attempt - LOGGER.debug( - f"Download slot {slot} failed: {err.message}, remaining attempts: {remaining}" - ) - if task_meta.client_rev == conn.rev: - conn.client = await self.connector.connect() - conn.rev += 1 - LOGGER.debug(f"Download slot {slot} failed, rescheduling for retry...") - task_spec = DownloadTaskArgs( - download_request=task_meta.request, - dragonsmouth_outlet=task_meta.dragonsmouth_outlet, - ) - self.spawn_grpc_download_task(task_meta.client_idx, task_spec) - elif err.kind == "OutletDisconnected": - LOGGER.debug("Dragonsmouth outlet disconnected") - elif err.kind == "BlockShardNotFound": - LOGGER.error(f"Slot {slot} not found") - await self.outlet.put( - DownloadTaskResult(kind="Err", slot=slot, err=err) - ) - elif err.kind == "Fatal": - raise RuntimeError(f"Fatal error: {err.message}") - - def spawn_grpc_download_task(self, client_idx: int, task_spec: DownloadTaskArgs): - conn = self.data_plane_channel_vec[client_idx] - client = conn.client # Clone not needed in Python - download_request = task_spec.download_request - slot = download_request.slot - task = GrpcDownloadBlockTaskRun( - download_request=download_request, - client=client, + async def run_download( + self, subscribe_request: SubscribeRequest, spec: DownloadTaskArgs + ) -> DownloadTaskResult: + + download_task = GrpcDownloadBlockTaskRun( + download_request=spec.download_request, + client=self.client, filters=BlockFilters( - accounts=self.subscribe_request.accounts, - transactions=self.subscribe_request.transactions, - entries=self.subscribe_request.entry, - blocks_meta=self.subscribe_request.blocks_meta, + accounts=subscribe_request.accounts, + transactions=subscribe_request.transactions, + entries=subscribe_request.entry, + blocks_meta=subscribe_request.blocks_meta, ), - dragonsmouth_oulet=task_spec.dragonsmouth_outlet, - ) - ch: CancelHandle = self.tasks.spawn(task.run()) - task_id = ch.id() - LOGGER.debug(f"Spawned download task {task_id} for slot {slot}") - self.download_attempts[slot] = self.download_attempts.get(slot, 0) + 1 - conn.permits -= 1 - self.task_meta[task_id] = DataPlaneTaskMeta( - client_idx=client_idx, - request=download_request, - dragonsmouth_outlet=task_spec.dragonsmouth_outlet, - scheduled_at=time.time(), - client_rev=conn.rev, + dragonsmouth_oulet=spec.dragonsmouth_outlet, ) - def handle_control_command(self, cmd: DownloadTaskRunnerCommand): - if cmd.kind == "UpdateSubscribeRequest": - self.subscribe_request = cmd.subscribe_request - - async def run(self): - while True: - maybe_available_client_idx = self.find_least_use_client() - tasks = [asyncio.create_task(self.cnc_rx.get())] - if maybe_available_client_idx is not None: - tasks.append(asyncio.create_task(self.download_task_queue.get())) - else: - tasks.append(asyncio.create_task(never())) - - next_download_result_co = self.tasks.join_next() - if next_download_result_co: - tasks.append(asyncio.create_task(next_download_result_co)) - else: - tasks.append(asyncio.create_task(never())) - - assert len(tasks) == 3 - if tasks: - done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) - for task in pending: - task.cancel() - - for task in done: - try: - result = task.result() - if task == tasks[0]: # cnc_rx - self.handle_control_command(result) - elif task == tasks[1]: # download_task_queue - assert maybe_available_client_idx is not None - self.spawn_grpc_download_task( - maybe_available_client_idx, result - ) - elif task == tasks[2]: # download_result_rx - download_task = task.result() - task_id = download_task.get_name() - result = download_task.result() - await self.handle_data_plane_task_result(task_id, result) - except asyncio.QueueShutDown as e: - return + LOGGER.debug(f"Running download task for slot {spec.download_request.slot}") + return await download_task.run() # GrpcDownloadBlockTaskRun @@ -556,6 +439,9 @@ async def run(self) -> DownloadTaskResult: blockFilters=self.filters, ) try: + LOGGER.debug( + f"Requesting download for block {self.download_request.block_uid.hex()} at slot {self.download_request.slot}" + ) resp = self.client.DownloadBlock(request) except grpc.aio.AioRpcError as e: LOGGER.error(f"Download block error: {e}") @@ -577,14 +463,19 @@ async def run(self) -> DownloadTaskResult: try: await self.dragonsmouth_oulet.put(update) except asyncio.QueueShutDown: + LOGGER.error("Dragonsmouth outlet is disconnected") return DownloadTaskResult( kind="Err", slot=self.download_request.slot, err=DownloadBlockError( - kind="OutletDisconnected", message="Outlet disconnected" + kind="OutletDisconnected", + message="Outlet disconnected", ), ) case "block_shard_download_finish": + LOGGER.debug( + f"Download finished for block {self.download_request.block_uid.hex()} at slot {self.download_request.slot}" + ) return DownloadTaskResult( kind="Ok", completed=CompletedDownloadBlockTask( @@ -601,9 +492,7 @@ async def run(self) -> DownloadTaskResult: return DownloadTaskResult( kind="Err", slot=self.download_request.slot, - err=self.map_tonic_error_code_to_download_block_error( - e - ), + err=self.map_tonic_error_code_to_download_block_error(e), ) return DownloadTaskResult( diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py index 9feff78..b332593 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/utils/aio.py @@ -1,10 +1,6 @@ - import asyncio -from collections import deque -import threading import logging -from typing import Coroutine -import uuid +from typing import Any, Coroutine LOGGER = logging.getLogger(__name__) @@ -18,88 +14,17 @@ async def never(): return await loop.create_future() -class CancelHandle: - - def __init__(self, task: asyncio.Task): - self._task = task - - def cancel(self) -> bool: - return self._task.cancel() - - def id(self) -> int: - return self._task.get_name() - -class JoinSet: - """ - A set of tasks that can be joined. - """ - - def __init__(self, *, loop: asyncio.AbstractEventLoop | None = None): - try: - self._loop = loop or asyncio.get_running_loop() - except RuntimeError: - # fallback for when no loop is running yet - self._loop = asyncio.get_event_loop() - self.tasks = set() - self.ready = set() - self.waker = asyncio.Event() - self.my_thread = threading.get_ident() - - def spawn(self, fut: asyncio.Future) -> CancelHandle: - """ - Spawn an awaitable (coroutine, task, or future) and add it to the set as a task. - """ - # Convert awaitable to task - task = self._loop.create_task(fut) - task.set_name(uuid.uuid4().int) - - def callback(task: asyncio.Task): - self.tasks.discard(task) - self.ready.add(task) - self.waker.set() - - task.add_done_callback(callback) - - # Add task to the set - self.tasks.add(task) - return CancelHandle(task) - - def __len__(self) -> int: - return len(self.tasks) + len(self.ready) - - def take(self) -> 'JoinSet': - """Takes ownership of the JoinSet and returns a new JoinSet. - """ - self.my_thread = threading.get_ident() - return self +class Interval: - def join_next(self) -> Coroutine: + def __init__(self, interval: float): """ - Join the next task in the set if any, otherwise return None + Create an interval that will run the given factory every `interval` seconds. - [Cacncel-Safety] - This method is cancel-safe. The future returned by this method can be cancelled - without affecting the JoinSet. The JoinSet will continue to track the tasks - and will not be affected by the cancellation of the future. + Args: + interval: The interval in seconds. + factory: A factory that returns a coroutine to be run at the interval. """ + self.interval = interval - # Check if the current thread is the same as the thread that created the JoinSet - if self.my_thread != threading.get_ident(): - raise RuntimeError("JoinSet.join_next must be called from the same thread that created the JoinSet") - - if not self.tasks and not self.ready: - return None - - self.waker.clear() - - # assert not self.waker, "JoinSet.join_next requires exclusive access to join set" - - async def my_fut(): - await self.waker.wait() - return self.ready.pop() - - # Check if there are any tasks that are already ready - # in between the time we added the future and now - if self.ready: - self.waker.set() - return my_fut() \ No newline at end of file + async def tick(self): + await asyncio.sleep(self.interval) From 2cc6d0ebf564e265aafe45c1cf8a6d2c3aefb424 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 27 May 2025 13:13:35 -0400 Subject: [PATCH 33/56] added README for python --- LICENSING.md | 2 + python/yellowstone-fumarole-client/README.md | 94 +++++++++++++++ .../pyproject.toml | 2 +- .../yellowstone_fumarole_client/__init__.py | 49 ++++++-- .../runtime/aio.py | 109 +++++++++++------- 5 files changed, 203 insertions(+), 53 deletions(-) diff --git a/LICENSING.md b/LICENSING.md index eb43c5a..2deb34a 100644 --- a/LICENSING.md +++ b/LICENSING.md @@ -11,4 +11,6 @@ The following directories and their subdirectories are licensed under Apache-2.0 ``` examples crates +apps +python ``` diff --git a/python/yellowstone-fumarole-client/README.md b/python/yellowstone-fumarole-client/README.md index e69de29..63f74a4 100644 --- a/python/yellowstone-fumarole-client/README.md +++ b/python/yellowstone-fumarole-client/README.md @@ -0,0 +1,94 @@ +# Fumarole Python SDK + +This module contains Fumarole SDK for `python` programming language. + +## Configuration + +```yaml +endpoint: <"https://fumarole.endpoint.rpcpool.com"> +x-token: +``` + +## Manage consumer group + +Refer to [fume CLI](https://crates.io/crates/yellowstone-fumarole-cli) to manage your consumer groups. + +## Examples + +```python + +from typing import Optional +import uuid +import asyncio +import logging +from os import environ +from collections import defaultdict +from yellowstone_fumarole_client.config import FumaroleConfig +from yellowstone_fumarole_client import FumaroleClient +from yellowstone_fumarole_proto.fumarole_v2_pb2 import CreateConsumerGroupRequest +from yellowstone_fumarole_proto.geyser_pb2 import ( + SubscribeRequest, + SubscribeRequestFilterAccounts, + SubscribeRequestFilterTransactions, + SubscribeRequestFilterBlocksMeta, + SubscribeRequestFilterEntry, + SubscribeRequestFilterSlots, +) +from yellowstone_fumarole_proto.geyser_pb2 import ( + SubscribeUpdate, + SubscribeUpdateTransaction, + SubscribeUpdateBlockMeta, + SubscribeUpdateAccount, + SubscribeUpdateEntry, + SubscribeUpdateSlot, +) + +async def dragonsmouth_like_session(fumarole_config): + with open("~/.fumarole/config.yaml") as f: + fumarole_config = FumaroleConfig.from_yaml(f) + + client: FumaroleClient = await FumaroleClient.connect(fumarole_config) + await client.delete_all_consumer_groups() + + # --- This is optional --- + resp = await client.create_consumer_group( + CreateConsumerGroupRequest( + consumer_group_name="test", + ) + ) + assert resp.consumer_group_id, "Failed to create consumer group" + # --- END OF OPTIONAL BLOCK --- + + session = await client.dragonsmouth_subscribe( + consumer_group_name="test", + request=SubscribeRequest( + # accounts={"fumarole": SubscribeRequestFilterAccounts()}, + transactions={"fumarole": SubscribeRequestFilterTransactions()}, + blocks_meta={"fumarole": SubscribeRequestFilterBlocksMeta()}, + entry={"fumarole": SubscribeRequestFilterEntry()}, + slots={"fumarole": SubscribeRequestFilterSlots()}, + ), + ) + dragonsmouth_source = session.source + handle = session.fumarole_handle + block_map = defaultdict(BlockConstruction) + while True: + tasks = [asyncio.create_task(dragonsmouth_source.get()), handle] + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + for t in done: + if tasks[0] == t: + result: SubscribeUpdate = t.result() + if result.HasField("block_meta"): + block_meta: SubscribeUpdateBlockMeta = result.block_meta + elif result.HasField("transaction"): + tx: SubscribeUpdateTransaction = result.transaction + elif result.HasField("account"): + account: SubscribeUpdateAccount = result.account + elif result.HasField("entry"): + entry: SubscribeUpdateEntry = result.entry + elif result.HasField("slot"): + result: SubscribeUpdateSlot = result.slot + else: + result = t.result() + raise RuntimeError("failed to get dragonsmouth source: %s" % result) +``` \ No newline at end of file diff --git a/python/yellowstone-fumarole-client/pyproject.toml b/python/yellowstone-fumarole-client/pyproject.toml index f0d7a10..90c55b7 100644 --- a/python/yellowstone-fumarole-client/pyproject.toml +++ b/python/yellowstone-fumarole-client/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "yellowstone-fumarole-client" -version = "0.1.0" +version = "0.1.0-pre.1" homepage = "https://github.com/rpcpool/yellowstone-fumarole" repository = "https://github.com/rpcpool/yellowstone-fumarole" description = "Yellowstone Fumarole Python Client" diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py index 856c517..d734e40 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py @@ -11,7 +11,6 @@ FumaroleSM, DEFAULT_GC_INTERVAL, DEFAULT_SLOT_MEMORY_RETENTION, - DragonsmouthSubscribeRequestBidi, GrpcSlotDownloader, ) from yellowstone_fumarole_proto.geyser_pb2 import SubscribeRequest, SubscribeUpdate @@ -104,7 +103,10 @@ def __init__(self, connector: FumaroleGrpcConnector, stub: FumaroleStub): @staticmethod async def connect(config: config.FumaroleConfig) -> "FumaroleClient": - """Connect to the Fumarole server using the provided configuration.""" + """Connect to the Fumarole server using the provided configuration. + Args: + config (FumaroleConfig): Configuration for the Fumarole client. + """ endpoint = config.endpoint connector = FumaroleGrpcConnector(config=config, endpoint=endpoint) FumaroleClient.logger.debug(f"Connecting to {endpoint}") @@ -113,7 +115,8 @@ async def connect(config: config.FumaroleConfig) -> "FumaroleClient": return FumaroleClient(connector=connector, stub=client) async def version(self) -> VersionResponse: - """Get the version of the Fumarole server.""" + """Get the version of the Fumarole server. + """ request = VersionRequest() response = await self.stub.version(request) return response @@ -121,7 +124,12 @@ async def version(self) -> VersionResponse: async def dragonsmouth_subscribe( self, consumer_group_name: str, request: SubscribeRequest ) -> DragonsmouthAdapterSession: - """Subscribe to a dragonsmouth stream with default configuration.""" + """Subscribe to a dragonsmouth stream with default configuration. + + Args: + consumer_group_name (str): The name of the consumer group. + request (SubscribeRequest): The request to subscribe to the dragonsmouth stream. + """ return await self.dragonsmouth_subscribe_with_config( consumer_group_name, request, FumaroleSubscribeConfig() ) @@ -132,7 +140,13 @@ async def dragonsmouth_subscribe_with_config( request: SubscribeRequest, config: FumaroleSubscribeConfig, ) -> DragonsmouthAdapterSession: - """Subscribe to a dragonsmouth stream with custom configuration.""" + """Subscribe to a dragonsmouth stream with custom configuration. + + Args: + consumer_group_name (str): The name of the consumer group. + request (SubscribeRequest): The request to subscribe to the dragonsmouth stream. + config (FumaroleSubscribeConfig): The configuration for the dragonsmouth subscription. + """ dragonsmouth_outlet = asyncio.Queue(maxsize=config.data_channel_capacity) fume_control_plane_q = asyncio.Queue(maxsize=100) @@ -185,7 +199,6 @@ async def control_plane_source(): sm = FumaroleSM(last_committed_offset, config.slot_memory_retention) subscribe_request_queue = asyncio.Queue(maxsize=100) - dm_bidi = DragonsmouthSubscribeRequestBidi(rx=subscribe_request_queue) data_plane_client = await self.connector.connect() @@ -196,7 +209,7 @@ async def control_plane_source(): rt = AsyncioFumeDragonsmouthRuntime( sm=sm, slot_downloader=grpc_slot_downloader, - dragonsmouth_bidi=dm_bidi, + subscribe_request_update_q=subscribe_request_queue, subscribe_request=request, consumer_group_name=consumer_group_name, control_plane_tx_q=fume_control_plane_q, @@ -224,7 +237,12 @@ async def list_consumer_groups( async def get_consumer_group_info( self, consumer_group_name: str ) -> Optional[ConsumerGroupInfo]: - """Gets information about a consumer group by name.""" + """Gets information about a consumer group by name. + Returns None if the consumer group does not exist. + + Args: + consumer_group_name (str): The name of the consumer group to retrieve information for. + """ try: return await self.stub.GetConsumerGroupInfo( GetConsumerGroupInfoRequest(consumer_group_name=consumer_group_name) @@ -238,7 +256,12 @@ async def get_consumer_group_info( async def delete_consumer_group( self, consumer_group_name: str ) -> DeleteConsumerGroupResponse: - """Delete a consumer group by name.""" + """Delete a consumer group by name. + + NOTE: this operation is idempotent, meaning that if the consumer group does not exist, it will not raise an error. + Args: + consumer_group_name (str): The name of the consumer group to delete. + """ return await self.stub.DeleteConsumerGroup( DeleteConsumerGroupRequest(consumer_group_name=consumer_group_name) ) @@ -246,7 +269,8 @@ async def delete_consumer_group( async def delete_all_consumer_groups( self, ) -> DeleteConsumerGroupResponse: - """Deletes all consumer groups.""" + """Deletes all consumer groups. + """ consumer_group_list = await self.list_consumer_groups() tasks = [] @@ -268,5 +292,8 @@ async def delete_all_consumer_groups( async def create_consumer_group( self, request: CreateConsumerGroupRequest ) -> CreateConsumerGroupResponse: - """Creates a new consumer group.""" + """Creates a new consumer group. + Args: + request (CreateConsumerGroupRequest): The request to create a consumer group. + """ return await self.stub.CreateConsumerGroup(request) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py index fcbf94c..21a8343 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py @@ -41,19 +41,12 @@ DEFAULT_SLOT_MEMORY_RETENTION = 10000 -@dataclass -class DataPlaneConn: - permits: int - client: GrpcFumaroleClient - rev: int - - def has_permit(self) -> bool: - return self.permits > 0 - # DownloadTaskResult @dataclass class CompletedDownloadBlockTask: + """Represents a completed download block task. + """ slot: int block_uid: bytes shard_idx: FumeShardIdx @@ -62,44 +55,46 @@ class CompletedDownloadBlockTask: @dataclass class DownloadBlockError: + """Represents an error that occurred during the download of a block. + """ kind: str # 'Disconnected', 'OutletDisconnected', 'BlockShardNotFound', 'FailedDownload', 'Fatal' message: str @dataclass class DownloadTaskResult: + """Represents the result of a download task. + """ kind: str # 'Ok' or 'Err' completed: Optional[CompletedDownloadBlockTask] = None slot: Optional[int] = None err: Optional[DownloadBlockError] = None -# DragonsmouthSubscribeRequestBidi -@dataclass -class DragonsmouthSubscribeRequestBidi: - rx: asyncio.Queue - LOGGER = logging.getLogger(__name__) class AsyncSlotDownloader(ABC): - + """Abstract base class for slot downloaders.""" @abstractmethod async def run_download( self, subscribe_request: SubscribeRequest, spec: "DownloadTaskArgs" ) -> DownloadTaskResult: + """Run the download task for a given slot. + """ pass # TokioFumeDragonsmouthRuntime class AsyncioFumeDragonsmouthRuntime: - + """Asynchronous runtime for Fumarole with Dragonsmouth-like stream support. + """ def __init__( self, sm: FumaroleSM, slot_downloader: AsyncSlotDownloader, - dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, + subscribe_request_update_q: asyncio.Queue, subscribe_request: SubscribeRequest, consumer_group_name: str, control_plane_tx_q: asyncio.Queue, @@ -109,9 +104,24 @@ def __init__( gc_interval: int, max_concurrent_download: int = 10, ): + """Initialize the runtime with the given parameters. + + Args: + sm (FumaroleSM): The state machine managing the Fumarole state. + slot_downloader (AsyncSlotDownloader): The downloader for slots. + subscribe_request_update_q (asyncio.Queue): The queue for subscribe request updates. + subscribe_request (SubscribeRequest): The initial subscribe request. + consumer_group_name (str): The name of the consumer group. + control_plane_tx_q (asyncio.Queue): The queue for sending control commands. + control_plane_rx_q (asyncio.Queue): The queue for receiving control responses. + dragonsmouth_outlet (asyncio.Queue): The outlet for Dragonsmouth updates. + commit_interval (float): The interval for committing offsets, in seconds. + gc_interval (int): The interval for garbage collection, in seconds. + max_concurrent_download (int): The maximum number of concurrent download tasks. + """ self.sm = sm self.slot_downloader: AsyncSlotDownloader = slot_downloader - self.dragonsmouth_bidi = dragonsmouth_bidi + self.subscribe_request_update_q = subscribe_request_update_q self.subscribe_request = subscribe_request self.consumer_group_name = consumer_group_name self.control_plane_tx = control_plane_tx_q @@ -123,16 +133,19 @@ def __init__( self.download_tasks = dict() self.inner_runtime_channel: asyncio.Queue = asyncio.Queue() - def build_poll_history_cmd( + def _build_poll_history_cmd( self, from_offset: Optional[FumeOffset] ) -> ControlCommand: + """Build a command to poll the blockchain history. + """ return ControlCommand(poll_hist=PollBlockchainHistory(shard_id=0, limit=None)) - def build_commit_offset_cmd(self, offset: FumeOffset) -> ControlCommand: + def _build_commit_offset_cmd(self, offset: FumeOffset) -> ControlCommand: return ControlCommand(commit_offset=CommitOffset(offset=offset, shard_id=0)) - def handle_control_response(self, control_response: ControlResponse): - + def _handle_control_response(self, control_response: ControlResponse): + """Handle the control response received from the control plane. + """ response_field = control_response.WhichOneof("response") assert response_field is not None, "Control response is empty" @@ -151,14 +164,20 @@ def handle_control_response(self, control_response: ControlResponse): raise ValueError("Unexpected control response") async def poll_history_if_needed(self): + """Poll the history if the state machine needs new events. + """ if self.sm.need_new_blockchain_events(): - cmd = self.build_poll_history_cmd(self.sm.committable_offset) + cmd = self._build_poll_history_cmd(self.sm.committable_offset) await self.control_plane_tx.put(cmd) def commitment_level(self): + """Gets the commitment level from the subscribe request. + """ return self.subscribe_request.commitment - def schedule_download_task_if_any(self): + def _schedule_download_task_if_any(self): + """Schedules download tasks if there are any available slots. + """ while True: LOGGER.debug("Checking for download tasks to schedule") if len(self.download_tasks) >= self.max_concurrent_download: @@ -187,7 +206,9 @@ def schedule_download_task_if_any(self): self.download_tasks[donwload_task] = download_request LOGGER.debug(f"Scheduling download task for slot {download_request.slot}") - def handle_download_result(self, download_result: DownloadTaskResult): + def _handle_download_result(self, download_result: DownloadTaskResult): + """Handles the result of a download task. + """ if download_result.kind == "Ok": completed = download_result.completed LOGGER.debug( @@ -199,19 +220,21 @@ def handle_download_result(self, download_result: DownloadTaskResult): err = download_result.err raise RuntimeError(f"Failed to download slot {slot}: {err.message}") - async def force_commit_offset(self): + async def _force_commit_offset(self): LOGGER.debug(f"Force committing offset {self.sm.committable_offset}") await self.control_plane_tx.put( - self.build_commit_offset_cmd(self.sm.committable_offset) + self._build_commit_offset_cmd(self.sm.committable_offset) ) - async def commit_offset(self): + async def _commit_offset(self): if self.sm.last_committed_offset < self.sm.committable_offset: LOGGER.debug(f"Committing offset {self.sm.committable_offset}") - await self.force_commit_offset() + await self._force_commit_offset() self.last_commit = time.time() - async def drain_slot_status(self): + async def _drain_slot_status(self): + """Drains the slot status from the state machine and sends updates to the Dragonsmouth outlet. + """ commitment = self.subscribe_request.commitment slot_status_vec = deque() while slot_status := self.sm.pop_next_slot_status(): @@ -250,28 +273,32 @@ async def drain_slot_status(self): self.sm.mark_event_as_processed(slot_status.session_sequence) - async def handle_control_plane_resp( + async def _handle_control_plane_resp( self, result: ControlResponse | Exception ) -> bool: + """Handles the control plane response. + """ if isinstance(result, Exception): await self.dragonsmouth_outlet.put(result) return False - self.handle_control_response(result) + self._handle_control_response(result) return True def handle_new_subscribe_request(self, subscribe_request: SubscribeRequest): self.subscribe_request = subscribe_request async def run(self): + """Runs the Fumarole asyncio runtime. + """ LOGGER.debug(f"Fumarole runtime starting...") - await self.control_plane_tx.put(self.build_poll_history_cmd(None)) + await self.control_plane_tx.put(self._build_poll_history_cmd(None)) LOGGER.debug("Initial poll history command sent") - await self.force_commit_offset() + await self._force_commit_offset() LOGGER.debug("Initial commit offset command sent") ticks = 0 task_map = { - asyncio.create_task(self.dragonsmouth_bidi.rx.get()): "dragonsmouth_bidi", + asyncio.create_task(self.subscribe_request_update_q.get()): "dragonsmouth_bidi", asyncio.create_task(self.control_plane_rx.get()): "control_plane_rx", asyncio.create_task(Interval(self.commit_interval).tick()): "commit_tick", } @@ -287,7 +314,7 @@ async def run(self): LOGGER.debug(f"Polling history if needed") await self.poll_history_if_needed() LOGGER.debug("Scheduling download tasks if any") - self.schedule_download_task_if_any() + self._schedule_download_task_if_any() for t in self.download_tasks.keys(): pending.add(t) task_map[t] = "download_task" @@ -306,13 +333,13 @@ async def run(self): case "dragonsmouth_bidi": LOGGER.debug("Dragonsmouth subscribe request received") self.handle_new_subscribe_request(result.subscribe_request) - new_task = asyncio.create_task(self.dragonsmouth_bidi.rx.get()) + new_task = asyncio.create_task(self.subscribe_request_update_q.get()) task_map[new_task] = "dragonsmouth_bidi" pending.add(new_task) pass case "control_plane_rx": LOGGER.debug("Control plane response received") - if not await self.handle_control_plane_resp(result): + if not await self._handle_control_plane_resp(result): LOGGER.debug("Control plane error") return new_task = asyncio.create_task(self.control_plane_rx.get()) @@ -321,10 +348,10 @@ async def run(self): case "download_task": LOGGER.debug("Download task result received") assert self.download_tasks.pop(t) - self.handle_download_result(result) + self._handle_download_result(result) case "commit_tick": LOGGER.debug("Commit tick reached") - await self.commit_offset() + await self._commit_offset() new_task = asyncio.create_task( Interval(self.commit_interval).tick() ) @@ -333,7 +360,7 @@ async def run(self): case unknown: raise RuntimeError(f"Unexpected task name: {unknown}") - await self.drain_slot_status() + await self._drain_slot_status() LOGGER.debug("Fumarole runtime exiting") From 04913c035bd621c6057af58f9dc67a84a893f300 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 27 May 2025 13:15:51 -0400 Subject: [PATCH 34/56] v2,python: reformat src --- .../yellowstone_fumarole_client/__init__.py | 10 ++-- .../runtime/aio.py | 57 ++++++++----------- 2 files changed, 29 insertions(+), 38 deletions(-) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py index d734e40..1f16703 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py @@ -115,8 +115,7 @@ async def connect(config: config.FumaroleConfig) -> "FumaroleClient": return FumaroleClient(connector=connector, stub=client) async def version(self) -> VersionResponse: - """Get the version of the Fumarole server. - """ + """Get the version of the Fumarole server.""" request = VersionRequest() response = await self.stub.version(request) return response @@ -125,7 +124,7 @@ async def dragonsmouth_subscribe( self, consumer_group_name: str, request: SubscribeRequest ) -> DragonsmouthAdapterSession: """Subscribe to a dragonsmouth stream with default configuration. - + Args: consumer_group_name (str): The name of the consumer group. request (SubscribeRequest): The request to subscribe to the dragonsmouth stream. @@ -141,7 +140,7 @@ async def dragonsmouth_subscribe_with_config( config: FumaroleSubscribeConfig, ) -> DragonsmouthAdapterSession: """Subscribe to a dragonsmouth stream with custom configuration. - + Args: consumer_group_name (str): The name of the consumer group. request (SubscribeRequest): The request to subscribe to the dragonsmouth stream. @@ -269,8 +268,7 @@ async def delete_consumer_group( async def delete_all_consumer_groups( self, ) -> DeleteConsumerGroupResponse: - """Deletes all consumer groups. - """ + """Deletes all consumer groups.""" consumer_group_list = await self.list_consumer_groups() tasks = [] diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py index 21a8343..e4c92f0 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py @@ -41,12 +41,11 @@ DEFAULT_SLOT_MEMORY_RETENTION = 10000 - # DownloadTaskResult @dataclass class CompletedDownloadBlockTask: - """Represents a completed download block task. - """ + """Represents a completed download block task.""" + slot: int block_uid: bytes shard_idx: FumeShardIdx @@ -55,41 +54,40 @@ class CompletedDownloadBlockTask: @dataclass class DownloadBlockError: - """Represents an error that occurred during the download of a block. - """ + """Represents an error that occurred during the download of a block.""" + kind: str # 'Disconnected', 'OutletDisconnected', 'BlockShardNotFound', 'FailedDownload', 'Fatal' message: str @dataclass class DownloadTaskResult: - """Represents the result of a download task. - """ + """Represents the result of a download task.""" + kind: str # 'Ok' or 'Err' completed: Optional[CompletedDownloadBlockTask] = None slot: Optional[int] = None err: Optional[DownloadBlockError] = None - LOGGER = logging.getLogger(__name__) class AsyncSlotDownloader(ABC): """Abstract base class for slot downloaders.""" + @abstractmethod async def run_download( self, subscribe_request: SubscribeRequest, spec: "DownloadTaskArgs" ) -> DownloadTaskResult: - """Run the download task for a given slot. - """ + """Run the download task for a given slot.""" pass # TokioFumeDragonsmouthRuntime class AsyncioFumeDragonsmouthRuntime: - """Asynchronous runtime for Fumarole with Dragonsmouth-like stream support. - """ + """Asynchronous runtime for Fumarole with Dragonsmouth-like stream support.""" + def __init__( self, sm: FumaroleSM, @@ -136,16 +134,14 @@ def __init__( def _build_poll_history_cmd( self, from_offset: Optional[FumeOffset] ) -> ControlCommand: - """Build a command to poll the blockchain history. - """ + """Build a command to poll the blockchain history.""" return ControlCommand(poll_hist=PollBlockchainHistory(shard_id=0, limit=None)) def _build_commit_offset_cmd(self, offset: FumeOffset) -> ControlCommand: return ControlCommand(commit_offset=CommitOffset(offset=offset, shard_id=0)) def _handle_control_response(self, control_response: ControlResponse): - """Handle the control response received from the control plane. - """ + """Handle the control response received from the control plane.""" response_field = control_response.WhichOneof("response") assert response_field is not None, "Control response is empty" @@ -164,20 +160,17 @@ def _handle_control_response(self, control_response: ControlResponse): raise ValueError("Unexpected control response") async def poll_history_if_needed(self): - """Poll the history if the state machine needs new events. - """ + """Poll the history if the state machine needs new events.""" if self.sm.need_new_blockchain_events(): cmd = self._build_poll_history_cmd(self.sm.committable_offset) await self.control_plane_tx.put(cmd) def commitment_level(self): - """Gets the commitment level from the subscribe request. - """ + """Gets the commitment level from the subscribe request.""" return self.subscribe_request.commitment def _schedule_download_task_if_any(self): - """Schedules download tasks if there are any available slots. - """ + """Schedules download tasks if there are any available slots.""" while True: LOGGER.debug("Checking for download tasks to schedule") if len(self.download_tasks) >= self.max_concurrent_download: @@ -207,8 +200,7 @@ def _schedule_download_task_if_any(self): LOGGER.debug(f"Scheduling download task for slot {download_request.slot}") def _handle_download_result(self, download_result: DownloadTaskResult): - """Handles the result of a download task. - """ + """Handles the result of a download task.""" if download_result.kind == "Ok": completed = download_result.completed LOGGER.debug( @@ -233,8 +225,7 @@ async def _commit_offset(self): self.last_commit = time.time() async def _drain_slot_status(self): - """Drains the slot status from the state machine and sends updates to the Dragonsmouth outlet. - """ + """Drains the slot status from the state machine and sends updates to the Dragonsmouth outlet.""" commitment = self.subscribe_request.commitment slot_status_vec = deque() while slot_status := self.sm.pop_next_slot_status(): @@ -276,8 +267,7 @@ async def _drain_slot_status(self): async def _handle_control_plane_resp( self, result: ControlResponse | Exception ) -> bool: - """Handles the control plane response. - """ + """Handles the control plane response.""" if isinstance(result, Exception): await self.dragonsmouth_outlet.put(result) return False @@ -288,8 +278,7 @@ def handle_new_subscribe_request(self, subscribe_request: SubscribeRequest): self.subscribe_request = subscribe_request async def run(self): - """Runs the Fumarole asyncio runtime. - """ + """Runs the Fumarole asyncio runtime.""" LOGGER.debug(f"Fumarole runtime starting...") await self.control_plane_tx.put(self._build_poll_history_cmd(None)) LOGGER.debug("Initial poll history command sent") @@ -298,7 +287,9 @@ async def run(self): ticks = 0 task_map = { - asyncio.create_task(self.subscribe_request_update_q.get()): "dragonsmouth_bidi", + asyncio.create_task( + self.subscribe_request_update_q.get() + ): "dragonsmouth_bidi", asyncio.create_task(self.control_plane_rx.get()): "control_plane_rx", asyncio.create_task(Interval(self.commit_interval).tick()): "commit_tick", } @@ -333,7 +324,9 @@ async def run(self): case "dragonsmouth_bidi": LOGGER.debug("Dragonsmouth subscribe request received") self.handle_new_subscribe_request(result.subscribe_request) - new_task = asyncio.create_task(self.subscribe_request_update_q.get()) + new_task = asyncio.create_task( + self.subscribe_request_update_q.get() + ) task_map[new_task] = "dragonsmouth_bidi" pending.add(new_task) pass From 815f48853bbab13289769ebe1d3b08d254d9a332 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 27 May 2025 13:55:21 -0400 Subject: [PATCH 35/56] v2,python: fixed bug when updating subscribe request --- python/yellowstone-fumarole-client/pytest.ini | 2 +- .../tests/test_fumarole_client_intg.py | 189 +++++++++++++++++- .../yellowstone_fumarole_client/__init__.py | 4 +- .../runtime/aio.py | 5 +- 4 files changed, 194 insertions(+), 6 deletions(-) diff --git a/python/yellowstone-fumarole-client/pytest.ini b/python/yellowstone-fumarole-client/pytest.ini index 0309f87..afe5eec 100644 --- a/python/yellowstone-fumarole-client/pytest.ini +++ b/python/yellowstone-fumarole-client/pytest.ini @@ -1,5 +1,5 @@ [pytest] -log_cli = true +log_cli = false log_cli_level = DEBUG log_cli_format = %(asctime)s [%(pathname)s %(lineno)d] [%(levelname)s] %(message)s asyncio_default_fixture_loop_scope = function diff --git a/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py b/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py index cc8ee5e..7a21b5a 100644 --- a/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py +++ b/python/yellowstone-fumarole-client/tests/test_fumarole_client_intg.py @@ -6,7 +6,7 @@ from os import environ from collections import defaultdict from yellowstone_fumarole_client.config import FumaroleConfig -from yellowstone_fumarole_client import FumaroleClient +from yellowstone_fumarole_client import FumaroleClient, FumaroleSubscribeConfig from yellowstone_fumarole_proto.fumarole_v2_pb2 import CreateConsumerGroupRequest from yellowstone_fumarole_proto.geyser_pb2 import ( SubscribeRequest, @@ -77,6 +77,97 @@ async def test_fumarole_delete_all(fumarole_config): assert cg_info is None, "Failed to get consumer group info" +@pytest.mark.asyncio +async def test_updating_subscribe_request(fumarole_config): + """ + Test the slot update subscription. + """ + logging.debug("test_slot_update_subscribe") + # Create a FumaroleClient instance + + fumarole_config.x_metadata = {"x-subscription-id": str(uuid.uuid4())} + + client: FumaroleClient = await FumaroleClient.connect(fumarole_config) + + resp = await client.create_consumer_group( + CreateConsumerGroupRequest( + consumer_group_name="test", + ) + ) + assert resp.consumer_group_id, "Failed to create consumer group" + + session = await client.dragonsmouth_subscribe( + consumer_group_name="test", + request=SubscribeRequest( + slots={"fumarole": SubscribeRequestFilterSlots()}, + ), + ) + + dragonsmouth_source = session.source + handle = session.fumarole_handle + + slot_status_recv = [] + for _ in range(10): + tasks = [asyncio.create_task(dragonsmouth_source.get()), handle] + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + for t in done: + if tasks[0] == t: + result: SubscribeUpdate = t.result() + assert result.HasField("slot"), "Expected slot update" + slot: SubscribeUpdateSlot = result.slot + slot_status_recv.append(slot) + else: + result = t.result() + raise RuntimeError("failed to get dragonsmouth source: %s" % result) + assert len(slot_status_recv) == 10 + + +@pytest.mark.asyncio +async def test_updating_subscribe_request(fumarole_config): + """ + Test the slot update subscription. + """ + logging.debug("test_slot_update_subscribe") + # Create a FumaroleClient instance + + fumarole_config.x_metadata = {"x-subscription-id": str(uuid.uuid4())} + + client: FumaroleClient = await FumaroleClient.connect(fumarole_config) + + resp = await client.create_consumer_group( + CreateConsumerGroupRequest( + consumer_group_name="test", + ) + ) + assert resp.consumer_group_id, "Failed to create consumer group" + + session = await client.dragonsmouth_subscribe( + consumer_group_name="test", + request=SubscribeRequest( + entry={"fumarole": SubscribeRequestFilterEntry()}, + ), + ) + + dragonsmouth_source = session.source + handle = session.fumarole_handle + + entry_recv = [] + for _ in range(1000): + tasks = [asyncio.create_task(dragonsmouth_source.get()), handle] + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + for t in done: + if tasks[0] == t: + result: SubscribeUpdate = t.result() + assert result.HasField("entry"), "Expected slot update" + entry: SubscribeUpdateEntry = result.entry + assert isinstance(entry, SubscribeUpdateEntry), "Expected entry update" + entry_recv.append(entry) + else: + result = t.result() + raise RuntimeError("failed to get dragonsmouth source: %s" % result) + assert len(entry_recv) == 1000 + + @pytest.mark.asyncio async def test_dragonsmouth_adapter(fumarole_config): """ @@ -102,7 +193,7 @@ async def test_dragonsmouth_adapter(fumarole_config): session = await client.dragonsmouth_subscribe( consumer_group_name="test", request=SubscribeRequest( - # accounts={"fumarole": SubscribeRequestFilterAccounts()}, + accounts={"fumarole": SubscribeRequestFilterAccounts()}, transactions={"fumarole": SubscribeRequestFilterTransactions()}, blocks_meta={"fumarole": SubscribeRequestFilterBlocksMeta()}, entry={"fumarole": SubscribeRequestFilterEntry()}, @@ -121,6 +212,7 @@ def __init__(self): self.meta: Optional[SubscribeUpdateBlockMeta] = None def check_block_integrity(self) -> bool: + assert len(self.account_vec) > 0, "Block account vector is empty" assert self.meta is not None, "Block meta is not set" return ( len(self.tx_vec) == self.meta.executed_transaction_count @@ -161,3 +253,96 @@ def check_block_integrity(self) -> bool: else: result = t.result() raise RuntimeError("failed to get dragonsmouth source: %s" % result) + + +@pytest.mark.asyncio +async def test_updating_subscribe_request(fumarole_config): + """ + Test the slot update subscription. + """ + logging.debug("test_slot_update_subscribe") + # Create a FumaroleClient instance + + fumarole_config.x_metadata = {"x-subscription-id": str(uuid.uuid4())} + + client: FumaroleClient = await FumaroleClient.connect(fumarole_config) + + resp = await client.create_consumer_group( + CreateConsumerGroupRequest( + consumer_group_name="test", + ) + ) + assert resp.consumer_group_id, "Failed to create consumer group" + subscribe_config = FumaroleSubscribeConfig( + concurrent_download_limit=1, + ) + session = await client.dragonsmouth_subscribe_with_config( + consumer_group_name="test", + request=SubscribeRequest( + entry={"fumarole": SubscribeRequestFilterEntry()}, + ), + config=subscribe_config, + ) + + request2 = SubscribeRequest( + slots={"fumarole": SubscribeRequestFilterSlots()}, + ) + dragonsmouth_source = session.source + handle = session.fumarole_handle + update_subscribe_request_q = session.sink + data_recv = [] + for _ in range(500): + tasks = [asyncio.create_task(dragonsmouth_source.get()), handle] + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + for t in done: + if tasks[0] == t: + result: SubscribeUpdate = t.result() + assert result.HasField("entry"), "Expected slot update" + entry: SubscribeUpdateEntry = result.entry + assert isinstance(entry, SubscribeUpdateEntry), "Expected entry update" + data_recv.append(entry) + else: + result = t.result() + raise RuntimeError("failed to get dragonsmouth source: %s" % result) + + await update_subscribe_request_q.put(request2) + + when_new_filter_in_effect = None + slot_update_detected = 0 + while slot_update_detected < 10: + tasks = [asyncio.create_task(dragonsmouth_source.get()), handle] + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + for t in done: + if tasks[0] == t: + result: SubscribeUpdate = t.result() + assert result.HasField("slot") or result.HasField( + "entry" + ), "Expected slot or entry update" + if result.HasField("entry"): + entry: SubscribeUpdateEntry = result.entry + assert isinstance( + entry, SubscribeUpdateEntry + ), "Expected entry update" + data_recv.append(entry) + if when_new_filter_in_effect is not None: + assert ( + when_new_filter_in_effect > entry.slot + ), "New filter should be in effect after the slot update" + elif result.HasField("slot"): + assert isinstance( + result.slot, SubscribeUpdateSlot + ), "Expected slot update" + if when_new_filter_in_effect is None: + slot = result.slot.slot + when_new_filter_in_effect = slot + data_recv.append(result.slot) + slot_update_detected += 1 + slot: SubscribeUpdateSlot = result.slot + assert isinstance(slot, SubscribeUpdateSlot), "Expected slot update" + else: + result = t.result() + raise RuntimeError("failed to get dragonsmouth source: %s" % result) + + assert ( + when_new_filter_in_effect is not None + ), "New filter should be in effect after the slot update" diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py index 1f16703..30aa834 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/__init__.py @@ -58,7 +58,7 @@ class FumaroleSubscribeConfig: """Configuration for subscribing to a dragonsmouth stream.""" # The maximum number of concurrent download tasks per TCP connection. - concurrent_download_limit_per_tcp: int = DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP + concurrent_download_limit: int = DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP # The interval at which to commit the slot memory. commit_interval: float = DEFAULT_COMMIT_INTERVAL @@ -216,7 +216,7 @@ async def control_plane_source(): dragonsmouth_outlet=dragonsmouth_outlet, commit_interval=config.commit_interval, gc_interval=config.gc_interval, - max_concurrent_download=config.concurrent_download_limit_per_tcp, + max_concurrent_download=config.concurrent_download_limit, ) fumarole_handle = asyncio.create_task(rt.run()) diff --git a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py index e4c92f0..2165dbd 100644 --- a/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py +++ b/python/yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/aio.py @@ -323,7 +323,10 @@ async def run(self): match name: case "dragonsmouth_bidi": LOGGER.debug("Dragonsmouth subscribe request received") - self.handle_new_subscribe_request(result.subscribe_request) + assert isinstance( + result, SubscribeRequest + ), "Expected SubscribeRequest" + self.handle_new_subscribe_request(result) new_task = asyncio.create_task( self.subscribe_request_update_q.get() ) From 9061c07f18624ea3ce64c14e5f7d805966d99539 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 29 May 2025 15:38:35 -0400 Subject: [PATCH 36/56] added doc for fumarole api --- docs/fumarole_api.md | 242 +++++++++++++++++++++++++++++++++++++++++ docs/images/image1.png | 0 2 files changed, 242 insertions(+) create mode 100644 docs/fumarole_api.md create mode 100644 docs/images/image1.png diff --git a/docs/fumarole_api.md b/docs/fumarole_api.md new file mode 100644 index 0000000..e85b6e6 --- /dev/null +++ b/docs/fumarole_api.md @@ -0,0 +1,242 @@ +# Fumarole API Guide + +This document is a guide to understand how Fumarole API works. + +## Control vs Data Plane + +Fumarole API has two plane the client must managed: + +1. The control plane: + - Commit the consumer group offset + - Fetch blockchain slot history slice +2. The data plane: + - Slot Download stream request + +## Slot history + +The Fumarole backend offers a reliable and persistent slot history with a well-defined ordering. + +Persistent: The slot history is stored in our durable storage system. + +Stable: Each event is assigned a fixed, ever-increasing offset that never changes. + +Every message includes both a slot number and its corresponding slot commitment. +The slot history records when each slot becomes available and how it progresses through different commitment levels. + +Here is an example: + +![](images/blockchain_history.png.crdownload) + +Note: +- The message offset : 1,2,..,N are ordered. +- The message are NOT SORTED by slot : you may receive a message about slot 11 before slot 10. + + +I debarately plotted a sequence where slot are disordered because that will happen, so do not assume slot happens in order because this is not the case. +They will mostly happen in order but the order you see is the fumarole processed slot ordering. + + +## Inititate a connection + +Upon connecting to `Subscribe` method in `Fumarole` service, the first message you need to send is a control-plane command called `JoinControlPlane` + +```rust +let initial_join = JoinControlPlane { + consumer_group_name: Some(), +}; +let initial_join_command = ControlCommand { + command: Some(proto::control_command::Command::InitialJoin(initial_join)), +}; +``` + +Once you sent the command, you should receive back `ControlResponse::InitialConsumerGroupState`: + +```proto +message InitialConsumerGroupState { + bytes blockchain_id = 1; + map last_committed_offsets = 2; +} +``` + +The `last_committed_offsets` field should contain exactly one entry with key the int32 `0` => ``: + +```rust +response.last_committed_offsets.get(0).expect("should not be none") +``` + +The `last_committed_offsets` field is a map in case we want to support sharded control plane in the future. + +## Polling Slot history + +Inside `Fumarole.Subscribe` stream, you can poll Slot history using the command: + +```proto +message PollBlockchainHistory { + int32 shard_id = 1; // ALWAYS SET IT TO 0, THIS IS THE SUPPORT SHARDED HISTORY + optional int64 from = 2; + optional int64 limit = 3; +} +``` + +Technically speeaking, you don't need to provide the `from` or `limit` as the remote server remember where you leftoff. +If you want to force a specific offset you can fill the `from` field. + +The return result is a set of historical event: + +```proto +message BlockchainEvent { + int64 offset = 1; // the current offset in the log + bytes blockchain_id = 2; // the blockchain unique id + bytes block_uid = 3; // the block UUID bound to this event. + uint32 num_shards = 4; // ALWAYS 1. + uint64 slot = 5; // The slot number + optional uint64 parent_slot = 6; // maybe the parent slot + geyser.CommitmentLevel commitment_level = 7; // the commitment level this event belongs to. + int32 blockchain_shard_id = 8; // ALWAYS 0 + optional string dead_error = 9; // Slot dead error message. +} +``` + +The `block_uid` is the unique identifier use to download the slot content in the data-plane. + +## Offset committment + +It's the client responsability to "commit" its offset, stating it fully process a specific slot event in our history. Next time the client connect, Fumarole service will remember where the user left at. + +Still inside the control plane `Fumarole.Subscribe` bidi-stream, to commit your offset you need to send: + +```proto +message CommitOffset { + int64 offset = 1; // the offset you processed. + int32 shard_id = 2; // ALWAYS 0 +} +``` + +## Stream Slot data rows + +Using the data-plane unary-stream method `Fumarole.DownloadBlock` you can download a slot using the following request : + +```proto +message DownloadBlockShard { + bytes blockchain_id = 1; // COMES FROM InitialConsumerGroupState + bytes block_uid = 2; // COMES FROM BlockchainEvent + int32 shard_idx = 3; // ALWAYS 0 + optional BlockFilters blockFilters = 4; +} +``` + +And `blockFilters` : + +```proto +message BlockFilters { + map accounts = 1; + map transactions = 2; + map entries = 3; + map blocks_meta = 4; +} +``` + +Notice `BlockFilters` are 100% compatible with `geyser.SubscribeRequest` fields. In fact, it is a subset of `geyser.SubscribeRequest`. + +Return result of `Fumarole.DownloadBlock` is a Stream of `DataResponse`: + +```proto +message DataResponse { + oneof response { + geyser.SubscribeUpdate update = 1; + BlockShardDownloadFinish block_shard_download_finish = 2; + } +} +``` +It either a `geyser.SubscribeUpdate` or a signal that the slot data have been fully streamed out, so can you stop your streaming process. + +If you don't receive `block_shard_download_finish`, then the stream is done yet. If the stream closed before receiving `block_shard_download_finish`, then something must be wrong and you should throw an Exception or crash the download process. + + +## Simple Loop + + +```python + +taskset = {} + +loop: + if not state.has_any_event_to_process?(): + taskset.add( spawn(poll_new_unprocess_event()) ) + while state.has_slot_to_download?(): + slot_to_download = state.slot_to_download.pop() + taskset.add( spawn(download_slot(slot_to_download)) ) + taskset.wait_for_next() +``` + + +This is a really "simplified" process loop, your client has to keep tracking which slot to download and download any slot that ready to be download. + + +## Fumarole State Machine + +The State of Fumarole and its business logic that decide what to download should be handled by a state machine. + +Use the [python SDK implementation](../python//yellowstone-fumarole-client/yellowstone_fumarole_client/runtime/state_machine.py) as a reference. + + +Here's the API spec of the Fumarole state-machine: + +```python +class FumaroleSM: + """ + Sans-IO Fumarole State Machine + + Manages in-flight slot downloads and ensures correct ordering of slot statuses without performing I/O. + """ + + def gc(self) -> None: + """Garbage collect old slots to respect memory retention limit.""" + + def queue_blockchain_event(self, events: List[BlockchainEvent]) -> None: + """Queue blockchain events for processing.""" + + def make_slot_download_progress( + self, slot: Slot, shard_idx: FumeShardIdx + ) -> SlotDownloadState: + """Update download progress for a given slot.""" + + def pop_next_slot_status(self) -> Optional[FumeSlotStatus]: + """Pop the next slot status to process.""" + + def pop_slot_to_download(self, commitment=None) -> Optional[FumeDownloadRequest]: + """Pop the next slot to download.""" + + def mark_event_as_processed(self, event_seq_number: FumeSessionSequence) -> None: + """Mark an event as processed and update committable offset.""" + + def slot_status_update_queue_len(self) -> int: + """Return the length of the slot status update queue.""" + + def processed_offset_queue_len(self) -> int: + """Return the length of the processed offset queue.""" + + def need_new_blockchain_events(self) -> bool: + """Check if new blockchain events are needed.""" +``` + +As you poll history event from the fumarole service, you first register them through `queue_blockchain_event`. + +If you have register historical blockchain event to your state machine, new slot to download should become available to you via `pop_slot_to_download`. You driver implementation should do the actual slot download and +track the download progress through `make_slot_download_progress`. + +As you complete slot download, you should be able to pop slot status update through `pop_next_slot_status`. +Essentially, we only send slot commitment update if you have seen the entire slot. +By "seeing" the entire slot I mean downloading the entire slot locally. + +Once you downloaded the slot and sent the slot status to the enduser, you can then mark slot status as "processed" through `mark_event_as_processed`. + +Recap of the loop: + +1. Poll new historical events if no left. + - Register events to `queue_blockchain_event` +2. call `pop_slot_to_download` + - if not Null, do the actual download. +3. On slot download completed: `make_slot_download_progress` +4. Call `pop_next_slot_status` +5. Once you sent the slot status to the consumer, call `mark_event_as_processed` \ No newline at end of file diff --git a/docs/images/image1.png b/docs/images/image1.png new file mode 100644 index 0000000..e69de29 From 08684b4d31616a9c5044ab871394657b78dbbeba Mon Sep 17 00:00:00 2001 From: lvboudre Date: Tue, 8 Jul 2025 16:03:40 -0400 Subject: [PATCH 37/56] rust,client: support GetChainTip * added lag metrics for fumarole client rust sdk --- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 6 +- .../yellowstone-fumarole-client/src/grpc.rs | 21 +++-- crates/yellowstone-fumarole-client/src/lib.rs | 35 ++++++++- .../src/metrics.rs | 67 ++++++++++++++++ .../src/runtime/tokio.rs | 76 ++++++++++++++++++- proto/fumarole_v2.proto | 12 +++ 6 files changed, 203 insertions(+), 14 deletions(-) diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index 67a742c..db2c54b 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -268,12 +268,16 @@ fn summarize_account(account: SubscribeUpdateAccount) -> Option { // let pubkey = Pubkey::try_from(account.pubkey).expect("Failed to parse pubkey"); // let owner = Pubkey::try_from(account.owner).expect("Failed to parse owner"); let tx_sig = account.txn_signature; + let account_pubkey = Pubkey::try_from(account.pubkey).expect("Failed to parse pubkey"); + let owner = Pubkey::try_from(account.owner).expect("Failed to parse owner"); let tx_sig = if let Some(tx_sig_bytes) = tx_sig { bs58::encode(tx_sig_bytes).into_string() } else { "None".to_string() }; - Some(format!("account,{slot},{tx_sig}")) + Some(format!( + "account,{slot},pk={account_pubkey},owner={owner},tx={tx_sig}" + )) } fn summarize_tx(tx: SubscribeUpdateTransaction) -> Option { diff --git a/crates/yellowstone-fumarole-client/src/grpc.rs b/crates/yellowstone-fumarole-client/src/grpc.rs index 421fcae..8cbe949 100644 --- a/crates/yellowstone-fumarole-client/src/grpc.rs +++ b/crates/yellowstone-fumarole-client/src/grpc.rs @@ -1,6 +1,13 @@ -use tonic::{service::interceptor::InterceptedService, transport::{Channel, Endpoint}}; - -use crate::{config::FumaroleConfig, proto::fumarole_client::FumaroleClient, string_pairs_to_metadata_header, FumeInterceptor}; +use { + crate::{ + config::FumaroleConfig, proto::fumarole_client::FumaroleClient, + string_pairs_to_metadata_header, FumeInterceptor, + }, + tonic::{ + service::interceptor::InterceptedService, + transport::{Channel, Endpoint}, + }, +}; #[derive(Clone)] pub struct FumaroleGrpcConnector { @@ -11,10 +18,8 @@ pub struct FumaroleGrpcConnector { impl FumaroleGrpcConnector { pub async fn connect( &self, - ) -> Result< - FumaroleClient>, - tonic::transport::Error, - > { + ) -> Result>, tonic::transport::Error> + { let channel = self.endpoint.connect().await?; let interceptor = FumeInterceptor { x_token: self @@ -29,4 +34,4 @@ impl FumaroleGrpcConnector { Ok(FumaroleClient::with_interceptor(channel, interceptor) .max_decoding_message_size(self.config.max_decoding_message_size_bytes)) } -} \ No newline at end of file +} diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index e0b3731..983071b 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -227,9 +227,9 @@ pub mod config; #[cfg(feature = "prometheus")] pub mod metrics; +pub(crate) mod grpc; pub(crate) mod runtime; pub(crate) mod util; -pub(crate) mod grpc; use { config::FumaroleConfig, @@ -279,10 +279,10 @@ pub mod proto { } use { + crate::grpc::FumaroleGrpcConnector, proto::{fumarole_client::FumaroleClient as TonicFumaroleClient, JoinControlPlane}, runtime::tokio::DataPlaneConn, tonic::transport::Endpoint, - crate::grpc::FumaroleGrpcConnector, }; #[derive(Clone)] @@ -359,6 +359,12 @@ pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: usize = 3; /// pub const DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP: usize = 10; +/// +/// Default refresh tip interval for the fumarole client. +/// Only useful if you enable `prometheus` feature flags. +/// +pub const DEFAULT_REFRESH_TIP_INTERVAL: Duration = Duration::from_secs(5); // seconds + pub(crate) type GrpcFumaroleClient = TonicFumaroleClient>; /// @@ -424,6 +430,11 @@ pub struct FumaroleSubscribeConfig { /// How far back in time the fumarole client should retain slot memory. /// This is used to avoid downloading the same slot multiple times. pub slot_memory_retention: usize, + + /// + /// Interval to refresh the tip stats from the fumarole service. + /// + pub refresh_tip_stats_interval: Duration, } impl Default for FumaroleSubscribeConfig { @@ -439,6 +450,7 @@ impl Default for FumaroleSubscribeConfig { data_channel_capacity: NonZeroUsize::new(DEFAULT_DRAGONSMOUTH_CAPACITY).unwrap(), gc_interval: DEFAULT_GC_INTERVAL, slot_memory_retention: DEFAULT_SLOT_MEMORY_RETENTION, + refresh_tip_stats_interval: DEFAULT_REFRESH_TIP_INTERVAL, // Default to 5 seconds } } } @@ -580,6 +592,11 @@ impl FumaroleClient { where S: AsRef, { + assert!( + config.refresh_tip_stats_interval >= Duration::from_secs(5), + "refresh_tip_stats_interval must be greater than or equal to 5 seconds" + ); + use {proto::ControlCommand, runtime::tokio::DragonsmouthSubscribeRequestBidi}; let (dragonsmouth_outlet, dragonsmouth_inlet) = @@ -667,6 +684,8 @@ impl FumaroleClient { let tokio_rt = TokioFumeDragonsmouthRuntime { sm, + fumarole_client: self.clone(), + blockchain_id: initial_state.blockchain_id, dragonsmouth_bidi: dm_bidi, subscribe_request: request, download_task_runner_chans, @@ -676,7 +695,10 @@ impl FumaroleClient { dragonsmouth_outlet, commit_interval: config.commit_interval, last_commit: Instant::now(), + get_tip_interval: config.refresh_tip_stats_interval, + last_tip: Instant::now(), gc_interval: config.gc_interval, + non_critical_background_jobs: Default::default(), }; let download_task_runner_jh = handle.spawn(grpc_download_task_runner.run()); let fumarole_rt_jh = handle.spawn(tokio_rt.run()); @@ -730,4 +752,11 @@ impl FumaroleClient { { self.inner.create_consumer_group(request).await } -} \ No newline at end of file + + pub async fn get_chain_tip( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.get_chain_tip(request).await + } +} diff --git a/crates/yellowstone-fumarole-client/src/metrics.rs b/crates/yellowstone-fumarole-client/src/metrics.rs index 39d45fb..8ae1527 100644 --- a/crates/yellowstone-fumarole-client/src/metrics.rs +++ b/crates/yellowstone-fumarole-client/src/metrics.rs @@ -105,6 +105,64 @@ lazy_static! { ), &["runtime"], ).unwrap(); + + pub(crate) static ref MAX_OFFSET_COMMITTED: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_max_offset_committed", + "Max offset committed to Fumarole runtime", + ), + &["runtime"], + ).unwrap(); + + pub(crate) static ref FUMAROLE_BLOCKCHAIN_OFFSET_TIP: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_blockchain_offset_tip", + "The current offset tip of the Fumarole blockchain", + ), + &["runtime"], + ).unwrap(); + + pub(crate) static ref FUMAROLE_OFFSET_LAG_FROM_TIP: IntGaugeVec = IntGaugeVec::new( + Opts::new( + "fumarole_offset_lag_from_tip", + "The difference between last committed offset and the current tip of the Fumarole blockchain", + ), + &["runtime"], + ).unwrap(); +} + +pub(crate) fn set_fumarole_blockchain_offset_tip(name: impl AsRef, offset: i64) { + FUMAROLE_BLOCKCHAIN_OFFSET_TIP + .with_label_values(&[name.as_ref()]) + .set(offset); + update_fumarole_offset_lag_from_tip(name); +} + +fn update_fumarole_offset_lag_from_tip(name: impl AsRef) { + let tip = FUMAROLE_BLOCKCHAIN_OFFSET_TIP + .get_metric_with_label_values(&[name.as_ref()]) + .map(|m| m.get()) + .unwrap_or(0); + + let committed = MAX_OFFSET_COMMITTED + .get_metric_with_label_values(&[name.as_ref()]) + .map(|m| m.get()) + .unwrap_or(0); + + let tip = tip.max(0) as u64; + + let lag = tip.saturating_sub(committed.max(0) as u64); + + FUMAROLE_OFFSET_LAG_FROM_TIP + .with_label_values(&[name.as_ref()]) + .set(lag as i64); +} + +pub(crate) fn set_max_offset_committed(name: impl AsRef, offset: i64) { + MAX_OFFSET_COMMITTED + .with_label_values(&[name.as_ref()]) + .set(offset); + update_fumarole_offset_lag_from_tip(name); } pub(crate) fn inc_total_event_downloaded(name: impl AsRef, amount: usize) { @@ -216,4 +274,13 @@ pub fn register_metrics(registry: &prometheus::Registry) { registry .register(Box::new(SLOT_STATUS_UPDATE_QUEUE_LEN.clone())) .unwrap(); + registry + .register(Box::new(MAX_OFFSET_COMMITTED.clone())) + .unwrap(); + registry + .register(Box::new(FUMAROLE_BLOCKCHAIN_OFFSET_TIP.clone())) + .unwrap(); + registry + .register(Box::new(FUMAROLE_OFFSET_LAG_FROM_TIP.clone())) + .unwrap(); } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index eb53235..4c349ed 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -11,9 +11,9 @@ use { crate::{ proto::{ self, data_response, BlockFilters, CommitOffset, ControlCommand, DownloadBlockShard, - PollBlockchainHistory, + GetChainTipResponse, PollBlockchainHistory, }, - FumaroleGrpcConnector, GrpcFumaroleClient, + FumaroleClient, FumaroleGrpcConnector, GrpcFumaroleClient, }, futures::StreamExt, solana_sdk::clock::Slot, @@ -61,6 +61,10 @@ pub enum DownloadTaskResult { Err { slot: Slot, err: DownloadBlockError }, } +pub enum BackgroundJobResult { + UpdateTip(GetChainTipResponse), +} + /// /// Fumarole runtime based on Tokio outputting Dragonsmouth only events. /// @@ -68,6 +72,8 @@ pub enum DownloadTaskResult { /// pub(crate) struct TokioFumeDragonsmouthRuntime { pub sm: FumaroleSM, + pub blockchain_id: Vec, + pub fumarole_client: FumaroleClient, pub download_task_runner_chans: DownloadTaskRunnerChannels, pub dragonsmouth_bidi: DragonsmouthSubscribeRequestBidi, pub subscribe_request: SubscribeRequest, @@ -77,8 +83,12 @@ pub(crate) struct TokioFumeDragonsmouthRuntime { pub control_plane_rx: mpsc::Receiver>, pub dragonsmouth_outlet: mpsc::Sender>, pub commit_interval: Duration, + pub get_tip_interval: Duration, pub last_commit: Instant, + pub last_tip: Instant, + pub gc_interval: usize, // in ticks + pub non_critical_background_jobs: JoinSet, } const fn build_poll_history_cmd(from: Option) -> ControlCommand { @@ -231,7 +241,10 @@ impl TokioFumeDragonsmouthRuntime { .expect("failed to commit offset"); #[cfg(feature = "prometheus")] { + use crate::metrics::set_max_offset_committed; + inc_offset_commitment_count(Self::RUNTIME_NAME); + set_max_offset_committed(Self::RUNTIME_NAME, self.sm.committable_offset); } } @@ -339,6 +352,51 @@ impl TokioFumeDragonsmouthRuntime { .expect("failed to send subscribe request"); } + async fn update_tip(&mut self) { + #[cfg(feature = "prometheus")] + { + use crate::proto::GetChainTipRequest; + + let mut fumarole_client = self.fumarole_client.clone(); + let blockchain_id = self.blockchain_id.clone(); + let job = async move { + let result = fumarole_client + .get_chain_tip(GetChainTipRequest { blockchain_id }) + .await + .expect("failed to get chain tip") + .into_inner(); + BackgroundJobResult::UpdateTip(result) + }; + + self.non_critical_background_jobs.spawn(job); + } + self.last_tip = Instant::now(); + } + + fn handle_non_critical_job_result(&mut self, result: BackgroundJobResult) { + match result { + BackgroundJobResult::UpdateTip(get_tip_response) => { + tracing::debug!("received get tip response: {get_tip_response:?}"); + let GetChainTipResponse { + shard_to_max_offset_map, + .. + } = get_tip_response; + if shard_to_max_offset_map.is_empty() { + tracing::warn!("get tip response is empty, no shard to max offset map"); + return; + } + if let Some(tip) = shard_to_max_offset_map.values().max() { + tracing::trace!("tip is {tip}"); + #[cfg(feature = "prometheus")] + { + use crate::metrics::set_fumarole_blockchain_offset_tip; + set_fumarole_blockchain_offset_tip(Self::RUNTIME_NAME, *tip); + } + } + } + } + } + pub(crate) async fn run(mut self) -> Result<(), Box> { let inital_load_history_cmd = build_poll_history_cmd(None); @@ -369,6 +427,7 @@ impl TokioFumeDragonsmouthRuntime { set_slot_status_update_queue_len(Self::RUNTIME_NAME, slot_status_update_queue_len); } + let get_tip_deadline = self.last_tip + self.get_tip_interval; let commit_deadline = self.last_commit + self.commit_interval; self.poll_history_if_needed().await; @@ -398,6 +457,16 @@ impl TokioFumeDragonsmouthRuntime { } } } + Some(result) = self.non_critical_background_jobs.join_next() => { + match result { + Ok(result) => { + self.handle_non_critical_job_result(result); + } + Err(e) => { + tracing::warn!("non critical background job error with: {e:?}"); + } + } + } maybe = self.download_task_runner_chans.download_result_rx.recv() => { match maybe { Some(result) => { @@ -413,6 +482,9 @@ impl TokioFumeDragonsmouthRuntime { tracing::trace!("commit deadline reached"); self.commit_offset().await; } + _ = tokio::time::sleep_until(get_tip_deadline.into()) => { + self.update_tip().await; + } } self.drain_slot_status().await; } diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto index ecec1c7..d9e801e 100644 --- a/proto/fumarole_v2.proto +++ b/proto/fumarole_v2.proto @@ -15,12 +15,23 @@ service Fumarole { // Represents subscription to the data plane rpc SubscribeData(stream DataCommand) returns (stream DataResponse) {} + rpc GetChainTip(GetChainTipRequest) returns (GetChainTipResponse) {} + // Represents subscription to the control plane rpc Subscribe(stream ControlCommand) returns (stream ControlResponse) {} rpc Version(VersionRequest) returns (VersionResponse) {} } +message GetChainTipRequest { + bytes blockchain_id = 1; +} + +message GetChainTipResponse { + bytes blockchain_id = 1; + map shard_to_max_offset_map = 2; +} + message VersionRequest {} message VersionResponse { @@ -51,6 +62,7 @@ message ConsumerGroupInfo { string id = 1; string consumer_group_name = 2; bool is_stale = 3; + bytes blockchain_id = 4; } message GetSlotLagInfoRequest { From 3ef7d18c7c49de6e92fc43547c02cbc845a53bcf Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 8 Jul 2025 16:17:34 -0400 Subject: [PATCH 38/56] Bump cargo version --- Cargo.lock | 4 +-- Cargo.toml | 2 +- apps/yellowstone-fumarole-cli/Cargo.toml | 2 +- apps/yellowstone-fumarole-cli/README.md | 26 ++++++++++++++++++- apps/yellowstone-fumarole-cli/src/lib.rs | 17 ++++++++++++ crates/yellowstone-fumarole-client/Cargo.toml | 2 +- 6 files changed, 47 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a32256e..251a3a0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5282,7 +5282,7 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "yellowstone-fumarole-cli" -version = "0.1.0-pre.2+solana.2.1" +version = "0.1.0-pre.3+solana.2.1" dependencies = [ "clap", "clap-verbosity-flag", @@ -5307,7 +5307,7 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-client" -version = "0.2.0-pre.2+solana.2.1" +version = "0.2.0-pre.3+solana.2.1" dependencies = [ "async-trait", "futures", diff --git a/Cargo.toml b/Cargo.toml index 4c5f068..b9ea7c1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ tower = "~0.5.2" tracing = "~0.1.41" tracing-subscriber = { version = "~0.3.18", features = ["env-filter"] } uuid = { version = "1" } -yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client", version = "0.2.0-pre.2+solana.2.1" } +yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client", version = "0.2.0-pre.3+solana.2.1" } yellowstone-grpc-client = "5" yellowstone-grpc-proto = "5" diff --git a/apps/yellowstone-fumarole-cli/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml index 68e6f34..8563baf 100644 --- a/apps/yellowstone-fumarole-cli/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-cli" description = "Yellowstone Fumarole CLI" -version = "0.1.0-pre.2+solana.2.1" +version = "0.1.0-pre.3+solana.2.1" authors.workspace = true edition.workspace = true homepage.workspace = true diff --git a/apps/yellowstone-fumarole-cli/README.md b/apps/yellowstone-fumarole-cli/README.md index 782fec6..819735f 100644 --- a/apps/yellowstone-fumarole-cli/README.md +++ b/apps/yellowstone-fumarole-cli/README.md @@ -56,6 +56,13 @@ By default, if you don't provide `--config`, fumarole CLI will use the value at otherwise fallback to `~/.fumarole/config.yaml`. +```sh +export FUMAROLE_CONFIG=path/to/config.toml +fume test-config +Successfully connected to Fumarole Service +``` + + ## Create a Persistent Subscriber ```sh @@ -129,4 +136,21 @@ fume -- subscribe --name test1 \ The above command stream all data required by [DAS](https://github.com/rpcpool/digital-asset-validator-plugin). -**Note**: This command serves more as a testing tool/playground for you to try it out as it only prints summarized data. \ No newline at end of file +**Note**: This command serves more as a testing tool/playground for you to try it out as it only prints summarized data. + + +### Enabling Prometheus metrics + +When subscribing, you can enable prometheus metrics and bind to a port to view fumarole related metrics into HTML format. + +```sh +fume subscribe --name test1 --prometheus 0 +``` + +Using `--prometheus 0` this will bind to a random port on `127.0.0.1`. + +You can specify the address like this: + +```sh +fume subscribe --name test1 --prometheus 127.0.0.1:9999 +``` \ No newline at end of file diff --git a/apps/yellowstone-fumarole-cli/src/lib.rs b/apps/yellowstone-fumarole-cli/src/lib.rs index 90576bf..1a9413f 100644 --- a/apps/yellowstone-fumarole-cli/src/lib.rs +++ b/apps/yellowstone-fumarole-cli/src/lib.rs @@ -1 +1,18 @@ +//! Yellowstone Fumarole CLI +//! This crate provides a command-line interface for interacting with the Yellowstone Fumarole client. +//! It allows you to interact with Fumarole control-plane and manage your subscription resources. +//! +//! # Configuration +//! +//! ```yaml +//! x_token: +//! endpoint: +//! ``` +//! +//! # Example usage: +//! +//! See the [`README`] for detailed usage instructions. +//! +//! [`README`]: https://github.com/rpcpool/yellowstone-fumarole/blob/main/apps/yellowstone-fumarole-cli/README.md +//! pub mod prom; diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index 55c5c09..21a028b 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-client" description = "Yellowstone Fumarole Client" -version = "0.2.0-pre.2+solana.2.1" +version = "0.2.0-pre.3+solana.2.1" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } From dd4f2858cfdc7251997f8fc7f34e6131f4b46547 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Tue, 8 Jul 2025 17:13:29 -0400 Subject: [PATCH 39/56] removed deprecated fume python cli --- fume/CHANGELOG.md | 31 -- fume/README.md | 213 -------- fume/exemple_config.toml | 4 - fume/fume/__init__.py | 0 fume/fume/cli/__init__.py | 0 fume/fume/cli/app.py | 148 ------ fume/fume/cli/cg_commands.py | 221 -------- fume/fume/cli/stream_commands.py | 212 -------- fume/fume/grpc.py | 339 ------------ fume/poetry.lock | 455 ---------------- fume/pyproject.toml | 38 -- fume/scripts/build.sh | 36 -- fume/tests/__init__.py | 0 fume/tests/test_fumarole_client.py | 77 --- fume/yellowstone_api/__init__.py | 0 fume/yellowstone_api/fumarole_pb2.py | 82 --- fume/yellowstone_api/fumarole_pb2.pyi | 198 ------- fume/yellowstone_api/fumarole_pb2_grpc.py | 356 ------------- fume/yellowstone_api/geyser_pb2.py | 144 ----- fume/yellowstone_api/geyser_pb2.pyi | 501 ------------------ fume/yellowstone_api/geyser_pb2_grpc.py | 355 ------------- fume/yellowstone_api/solana_storage_pb2.py | 75 --- fume/yellowstone_api/solana_storage_pb2.pyi | 238 --------- .../solana_storage_pb2_grpc.py | 24 - .../pyproject.toml | 2 +- 25 files changed, 1 insertion(+), 3748 deletions(-) delete mode 100644 fume/CHANGELOG.md delete mode 100644 fume/README.md delete mode 100644 fume/exemple_config.toml delete mode 100644 fume/fume/__init__.py delete mode 100644 fume/fume/cli/__init__.py delete mode 100644 fume/fume/cli/app.py delete mode 100644 fume/fume/cli/cg_commands.py delete mode 100644 fume/fume/cli/stream_commands.py delete mode 100644 fume/fume/grpc.py delete mode 100644 fume/poetry.lock delete mode 100644 fume/pyproject.toml delete mode 100755 fume/scripts/build.sh delete mode 100644 fume/tests/__init__.py delete mode 100644 fume/tests/test_fumarole_client.py delete mode 100644 fume/yellowstone_api/__init__.py delete mode 100644 fume/yellowstone_api/fumarole_pb2.py delete mode 100644 fume/yellowstone_api/fumarole_pb2.pyi delete mode 100644 fume/yellowstone_api/fumarole_pb2_grpc.py delete mode 100644 fume/yellowstone_api/geyser_pb2.py delete mode 100644 fume/yellowstone_api/geyser_pb2.pyi delete mode 100644 fume/yellowstone_api/geyser_pb2_grpc.py delete mode 100644 fume/yellowstone_api/solana_storage_pb2.py delete mode 100644 fume/yellowstone_api/solana_storage_pb2.pyi delete mode 100644 fume/yellowstone_api/solana_storage_pb2_grpc.py diff --git a/fume/CHANGELOG.md b/fume/CHANGELOG.md deleted file mode 100644 index 5c6e961..0000000 --- a/fume/CHANGELOG.md +++ /dev/null @@ -1,31 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -**Note:** Version 0 of Semantic Versioning is handled differently from version 1 and above. -The minor version will be incremented upon a breaking change and the patch version will be incremented for features. - -## [Unreleased] - -### Breaking Changes - -### Features - -### Fixes - -## [0.2.0] - -### Fixes - -- Fixed blocking Ctlr+C where subprocess would block on `next(subscriber_stream)` until it got timeout by gRPC. Now Ctrl+C gets handled via a `sigint_handler` to interrupt gRPC stream. - -### Features - -- Enable gRPC GZIP compression by supports via `toml` configuration `compression` which can either be `gzip` or `none` (default). - -## [0.1.0] - -Initial release \ No newline at end of file diff --git a/fume/README.md b/fume/README.md deleted file mode 100644 index 3511e96..0000000 --- a/fume/README.md +++ /dev/null @@ -1,213 +0,0 @@ - -# Yellowstone-Fume - -Fumarole CLI tool - -## Install - - -```sh -$ pip install triton-fume -``` - -## Usage - -### Configuration file - -Fumarole CLI look for a file in `~/.config/fume/config.toml` by default, you can change the path location by using `fume --config `. - -Here's how to configure your config file: - -```toml -[fumarole] -endpoints = ["https://fumarole.endpoint.rpcpool.com"] -x-token = "" -``` - -You can test your configuration file with `test-config` subcommand: - -```sh -$ fume test-config -``` - -or with custom config path: - -```sh -$ fume --config path/to/config.toml test-config -``` - -### Create consumer group - -To create a consumer group that at the end of the log, that stream only "confirmed" commitment level transaction: - -```sh -$ fume create-cg --name helloworld-1 \ ---commitment confirmed \ ---seek latest \ ---include tx -``` - -To do the same but for account update - -```sh -$ fume create-cg --name helloworld-2 \ ---commitment confirmed \ ---seek latest \ ---include account -``` - -More usage can be find using the `--help` options: - -```sh -$ fume create-cg --help -Creates a consumer group - -Options: - --name TEXT Consumer group name to subscribe to, if none - provided a random name will be generated - following the pattern - 'fume-'. - --size INTEGER Size of the consumer group - --commitment [processed|confirmed|finalized] - Commitment level [default: confirmed] - --include [all|account|tx] Include option [default: all] - --seek [earliest|latest|slot] Seek option [default: latest] - --help Show this message and exit. -``` - -### Consumer Group Staleness - -Consumer groups can become stale if you are ingesting too slowly. - -Fumarole is a distributed log of blockchain event where each new blockchain event gets appended to. - -As Solana emits a lot of event in one hour, we cannot keep every blockchain event forever. - -Fumarole evicts fragment of the log as they age old enough. - -Depending of the Fumarole cluster you are connected to this time may vary. Connect Triton-One team to learn more. - -When creating a Consumer Group, you must ingest what you are capable of. Otherwise your consumer group is destined to become stale. - -A stale consumer group is a consumer group that haven't yet ingested blockchain event that had already been evicted by Fumarole vacuum process. - - -### Consumer Group Size and performance guideline - -Consumer group size allow you to shard a fumarole stream into multiple consumer group member. -Sharded consumer group follow similar semantics as [Kafka Static Consumer membership](https://cwiki.apache.org/confluence/display/KAFKA/KIP-345%3A+Introduce+static+membership+protocol+to+reduce+consumer+rebalances). - -Here's a quick-recap of static group membership: - -- The Fumarole log is already sharded in multiple partitions. -- When you create a consumer group with `--size N`, it creates `N` member with each `# total fumarole partition / N` partitions. -- Each member of the cnsumer group advance **at its own pace**. -- Your consumer group becomes stale as soon as **one membership is stale**. - - -As of this writing the maximum size of a consumer group is `6`. - -Each member can have their own dedicated TCP connection which offer better performance. - -The processing you do in reception, your internet speed, network bandwidth and location will impact the size of the consumer group. - -Ingesting everything Fumarole can output requires you to be in the same region as your assigned Fumarole cluster and multiple Gbits for internet Bandwidth, otherwise you will fall behind and become stale. - -Limit your subscription feed by using the various filters over the accounts and transactions we offer. - -As for the consumer group size goes, starting with a size of `1` is the simplest approach. - -If you are falling behind because your receiving code adds too much processing overhead, you can try -`2`, `3` and so forth. - -Fumarole is already redundant and load balanced inside our Data centers, increasing `--size` does not inherently add more redundancy. It is a tool for you to scale your read operation in case on instance is not sufficient. - -To create a consumer group with `2` members you just have to provided `--size` options: - -```sh -$ fume create-cg --name example --size 2 -``` - -### List all consumer groups - -```sh -$ fume list-cg -``` - -### Delete a consumer groups - -```sh -$ fume delete-cg --name helloworld -``` - -### Delete all consumer groups - -```sh -$ fume delete-all-cg -``` - -### Stream summary on terminal - -To stream out from the CLI, you can use the `stream` command and its various features! - -```sh -$ fume stream --name helloworld -``` - -You can filter the stream content by adding one or multiple occurrence of the following options: - -- `--tx-account ` : filter transaction by account keys. -- `--owner ` : filter account update based on its owner -- `--account ` : filter account update based on accout key. - -Here is an example to get all account updates owned by Token SPL program: - -```sh -$ fume stream --name helloworld \ ---owner TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA -``` - -Here is how to chain multiple filters together: - -```sh -$ fume stream --name helloworld \ ---owner metaqbxxUerdq28cj1RbAWkYQm3ybzjb6a8bt518x1s \ ---owner TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb \ ---owner TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA \ ---owner ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL \ ---owner BGUMAp9Gq7iTEuizy4pqaxsTyUCBK68MDfK752saRPUY \ ---owner CoREENxT6tW1HoK8ypY1SxRMZTcVPm7R94rH4PZNhX7d \ ---tx-account BGUMAp9Gq7iTEuizy4pqaxsTyUCBK68MDfK752saRPUY -``` - -The above command stream all data required by [DAS](https://github.com/rpcpool/digital-asset-validator-plugin). - -**Note**: This command serves more as a testing tool/playground for you to try it out as it only prints summarized data. - -## Development mode - -First git clone: - -```sh -git clone --recursive https://github.com/rpcpool/triton-fume.git -``` - -Initialize poetry project: - -```sh -$ poetry init -``` - -Install in dev mode: - -```sh -poetry install -``` - -Test in fume CLI works by printing its version: - -```sh -$ poetry run fume version -0.1.0 -``` - diff --git a/fume/exemple_config.toml b/fume/exemple_config.toml deleted file mode 100644 index b1251d9..0000000 --- a/fume/exemple_config.toml +++ /dev/null @@ -1,4 +0,0 @@ -[fumarole] -endpoints = ["localhost:9090"] -# x-token = "00000000-00000000-00000000-00000000" -x-subscription-id = "11111111-11111111-11111111-11111111" diff --git a/fume/fume/__init__.py b/fume/fume/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/fume/fume/cli/__init__.py b/fume/fume/cli/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/fume/fume/cli/app.py b/fume/fume/cli/app.py deleted file mode 100644 index 1cfc481..0000000 --- a/fume/fume/cli/app.py +++ /dev/null @@ -1,148 +0,0 @@ -import click -import importlib.metadata -import sys -import toml -import grpc -import os -from pathlib import Path - -from fume.cli.stream_commands import stream -from fume.cli.cg_commands import create_cg, list_cg, delete_cg, delete_all_cg, get_cg -from fume.grpc import FumaroleClient, grpc_channel - - -def parse_key_value(ctx, param, value): - # Parse the input as key:value pairs into a dictionary - pairs = [] - for item in value: - key, val = item.split(":", 1) # split only on the first colon - pairs.append((key, val)) - return pairs - - -def default_fume_config_path(): - default = Path.home() / ".config" / "fume" / "config.toml" - return os.environ.get("FUME_CONFIG", default) - - -@click.group() -@click.option( - "--config", - help="Path to configuration file", - type=click.Path(exists=False, readable=True), - show_default=True, - default=default_fume_config_path, -) -@click.option( - "--endpoints", help="Comma separated list of endpoints to fumarole", type=str -) -@click.option( - "--x-token", help="Access token for authentication", type=str, required=False -) -@click.option( - "-X", - "--x-header", - help="Metadata key value pairs", - type=str, - multiple=True, - required=False, - callback=parse_key_value, -) -@click.pass_context -def cli(ctx, config, endpoints, x_token, x_header): - if ctx.invoked_subcommand == "version": - return - try: - with open(config) as f: - config = toml.load(f) - ctx.obj = {"conn": config["fumarole"]} - metadata = [ - (k, v) - for k, v in config.get("fumarole", {}).items() - if k.startswith("x-") and k != "x-token" - ] - ctx.obj["conn"]["grpc-metadata"] = metadata - except FileNotFoundError: - ctx.obj = {"conn": {"grpc-metadata": []}} - print(f"Warning: Configuration file not found {config}", file=sys.stderr) - - conn = ctx.obj.get("conn", {}) - if endpoints: - conn["endpoints"] = endpoints.split(",") - - if x_token: - conn["x-token"] = x_token - - for x_header_key, x_header_value in x_header: - conn["grpc-metadata"].append((x_header_key, x_header_value)) - - if conn.get("compression") == "gzip": - conn["compression"] = grpc.Compression.Gzip - elif conn.get("compression") == "none": - conn["compression"] = grpc.Compression.NoCompression - elif conn.get("compression") is None: - conn["compression"] = grpc.Compression.NoCompression - else: - click.echo( - 'Error: Invalid compression type, supports "gzip" or "none".', err=True - ) - sys.exit(1) - - if not conn.get("x-token"): - click.echo("Warning: No access token provided", err=True) - - if not conn.get("endpoints"): - click.echo("Error: No endpoints provided", err=True) - sys.exit(1) - - return - - -@cli.command() -def version(): - print(f"{importlib.metadata.version('yellowstone-fume')}") - - -@cli.command(help="Test configuration file") -@click.option( - "--connect/--no-connect", - help="Connect to fumarole endpoints in configuration file", - default=True, -) -@click.pass_context -def test_config(ctx, connect): - conn = ctx.obj.get("conn") - endpoints = conn.get("endpoints") - x_token = conn.get("x-token") - metdata = conn.get("grpc-metadata") - - if not connect: - return - - if not endpoints and connect: - click.echo("Error: No endpoints provided -- can't ", err=True) - sys.exit(1) - - for e in endpoints: - with grpc_channel(e, x_token) as c: - fc = FumaroleClient(c, metadata=metdata) - fc.list_available_commitments() - click.echo(f"Sucessfully connected to {e}") - - click.echo("Configuration file is valid") - - -cli.add_command(stream) -cli.add_command(create_cg) -cli.add_command(list_cg) -cli.add_command(delete_cg) -cli.add_command(delete_all_cg) -cli.add_command(get_cg) - - -def main(): - cli() - - -if __name__ == "__main__": - main() diff --git a/fume/fume/cli/cg_commands.py b/fume/fume/cli/cg_commands.py deleted file mode 100644 index 9622554..0000000 --- a/fume/fume/cli/cg_commands.py +++ /dev/null @@ -1,221 +0,0 @@ -import random -import string -import click -from fume.grpc import FumaroleClient, grpc_channel -from tabulate import tabulate -import yellowstone_api.fumarole_pb2 as fumarole_p2b -from yellowstone_api.fumarole_pb2 import ConsumerGroupInfo - - -def generate_random_cg_name(): - """Generate a random consumer group name.""" - random_suffix = "".join(random.choices(string.ascii_lowercase, k=6)) - return f"fume-{random_suffix}" - - -def cg_info_into_text_row(cg: ConsumerGroupInfo) -> list[str]: - if cg.commitment_level == fumarole_p2b.PROCESSED: - cl = "processed" - elif cg.commitment_level == fumarole_p2b.CONFIRMED: - cl = "confirmed" - elif cg.commitment_level == fumarole_p2b.FINALIZED: - cl = "finalized" - else: - cl = "???" - - if cg.event_subscription_policy == fumarole_p2b.ACCOUNT_UPDATE_ONLY: - sub_policy = "account" - elif cg.event_subscription_policy == fumarole_p2b.TRANSACTION_ONLY: - sub_policy = "tx" - elif cg.event_subscription_policy == fumarole_p2b.BOTH: - sub_policy = "account|tx" - else: - sub_policy = "???" - - row = [cg.id, cg.consumer_group_label, cg.member_count, cl, sub_policy, cg.is_stale] - return row - - -@click.command() -@click.option( - "--name", - help="""Consumer group name to subscribe to, if none provided a random name will be generated following the pattern 'fume-'.""", - type=str, - default=generate_random_cg_name, -) -@click.option( - "--size", - help="Size of the consumer group", - type=int, - default=1, -) -@click.option( - "--commitment", - help="Commitment level", - type=click.Choice(["processed", "confirmed", "finalized"]), - default="confirmed", - show_default=True, - required=False, -) -@click.option( - "--include", - help="Include option", - type=click.Choice(["all", "account", "tx"]), - default="all", - show_default=True, - required=False, -) -@click.option( - "--seek", - help="Seek option", - type=click.Choice(["earliest", "latest", "slot"]), - default="latest", - show_default=True, - required=False, -) -@click.pass_context -def create_cg( - ctx, - name, - size, - commitment, - include, - seek, -): - """Creates a consumer group""" - conn = ctx.obj["conn"] - endpoints = conn["endpoints"] - x_token = conn.get("x-token") - metadata = conn.get("grpc-metadata") - compression = conn.get("compression") - - with grpc_channel(endpoints[0], x_token, compression=compression) as c: - fc = FumaroleClient(c, metadata=metadata) - - name = fc.create_consumer_group( - name=name, - size=size, - include=include, - initial_seek=seek, - commitment=commitment, - ) - - click.echo(f"Consumer group created: {name}") - - -@click.command() -@click.pass_context -def list_cg( - ctx, -): - """List active consumer groups""" - conn = ctx.obj["conn"] - endpoints = conn["endpoints"] - x_token = conn.get("x-token") - metadata = conn.get("grpc-metadata") - compression = conn.get("compression") - - with grpc_channel(endpoints[0], x_token, compression=compression) as c: - fc = FumaroleClient(c, metadata=metadata) - - cs = fc.list_consumer_groups() - - data = [["Id", "Name", "Size", "Commitment", "Subscriptions Policy", "Stale?"]] - - if not cs: - click.echo("You have no consumer groups") - else: - - for cg in cs: - row = cg_info_into_text_row(cg) - data.append(row) - - table = tabulate(data, headers="firstrow", tablefmt="grid") - click.echo(table) - - -@click.command() -@click.option( - "--name", - help="""Consumer group name to subscribe to, if none provided a random name will be generated following the pattern 'fume-'.""", - type=str, - default=generate_random_cg_name, -) -@click.pass_context -def delete_cg(ctx, name): - """Delete a consumer group""" - conn = ctx.obj["conn"] - endpoints = conn["endpoints"] - x_token = conn.get("x-token") - metadata = conn.get("grpc-metadata") - compression = conn.get("compression") - - with grpc_channel(endpoints[0], x_token, compression=compression) as c: - fc = FumaroleClient(c, metadata=metadata) - if click.confirm(f"Are you sure you want to delete consumer group {name}?"): - is_deleted = fc.delete_consumer_group(name=name) - if is_deleted: - click.echo(f"Consumer group {name} deleted!") - else: - click.echo(f"Consumer group {name} not found!") - - click.echo("Done") - - -@click.command() -@click.pass_context -def delete_all_cg(ctx): - """Deletes all consumer groups for current subscription""" - conn = ctx.obj["conn"] - endpoints = conn["endpoints"] - x_token = conn.get("x-token") - metadata = conn.get("grpc-metadata") - compression = conn.get("compression") - - with grpc_channel(endpoints[0], x_token, compression=compression) as c: - fc = FumaroleClient(c, metadata=metadata) - - cs = fc.list_consumer_groups() - - if not cs: - click.echo("You have no consumer groups to delete!") - return - - for cg in cs: - click.echo(f"{cg.consumer_group_label}") - - if click.confirm( - f"This operation will delete {len(cs)} consumer groups. Are you sure you want to proceed?" - ): - for cg in cs: - fc.delete_consumer_group(name=cg.consumer_group_label) - click.echo(f"Consumer group {cg.consumer_group_label} deleted!") - - -@click.command() -@click.option( - "--name", help="""Get Consumer group info by name""", type=str, required=True -) -@click.pass_context -def get_cg(ctx, name): - conn = ctx.obj["conn"] - endpoints = conn["endpoints"] - x_token = conn.get("x-token") - metadata = conn.get("grpc-metadata") - compression = conn.get("compression") - - with grpc_channel(endpoints[0], x_token, compression=compression) as c: - fc = FumaroleClient(c, metadata=metadata) - - cg = fc.get_cg_info(name) - if cg: - - data = [ - ["Id", "Name", "Size", "Commitment", "Subscriptions Policy", "Stale?"] - ] - row = cg_info_into_text_row(cg) - data.append(row) - table = tabulate(data, headers="firstrow", tablefmt="grid") - click.echo(table) - else: - click.echo(f"Consumer group {name} not found", err=True) diff --git a/fume/fume/cli/stream_commands.py b/fume/fume/cli/stream_commands.py deleted file mode 100644 index 9320570..0000000 --- a/fume/fume/cli/stream_commands.py +++ /dev/null @@ -1,212 +0,0 @@ -from dataclasses import dataclass -import multiprocessing -import multiprocessing.queues -import queue -import random -import signal -import string -from typing import Optional -import click -import grpc -from fume.grpc import ( - FumaroleClient, - grpc_channel, - SubscribeFilterBuilder, -) - - -def generate_random_cg_name(): - """Generate a random consumer group name.""" - random_suffix = "".join(random.choices(string.ascii_lowercase, k=6)) - return f"fume-{random_suffix}" - - -@dataclass -class StopFumaroleStream: - pass - - -@dataclass -class FumaroleStreamData: - data: any - - -@dataclass -class FumaroleStreamEnd: - pid: int - - -@click.command() -@click.option( - "--cg-name", - help="""Consumer group name to subscribe to, if none provided a random name will be generated following the pattern 'fume-'.""", - type=str, - default=generate_random_cg_name, -) -@click.option( - "-p", - "--parallel", - help="Number of parallel consumers, the number cannot be greater than the size of group", - type=int, - default=1, -) -@click.option( - "--tx-account", - help=""" - Filter transaction whose account keys include the provided value in base58 format. - You can provide multiple values by using the option multiple times. - """, - type=str, - multiple=True, - required=False, - show_default=True, -) -@click.option( - "--account", - help=""" - Filter Account update whose account keys include the provided value in base58 format. - """, - type=str, - multiple=True, - required=False, - show_default=True, -) -@click.option( - "--owner", - help=""" - Filter Account update whose account owner match the provided value in base58 format. - You can provide multiple values by using the option multiple times. - """, - type=str, - multiple=True, - required=False, - show_default=True, -) -@click.option( - "-o", - "--output-format", - help="Output format", - type=click.Choice(["json", "summ"]), - default="summ", -) -@click.pass_context -def stream(ctx, cg_name, parallel, tx_account, account, owner, output_format): - """Stream JSON data from Fumarole.""" - conn = ctx.obj["conn"] - endpoints = conn["endpoints"] - x_token = conn.get("x-token") - metadata = conn.get("grpc-metadata") - compression = conn.get("compression") - - subscribe_filter = ( - SubscribeFilterBuilder() - .with_tx_includes(list(tx_account)) - .with_accounts(list(account)) - .with_owners(list(owner)) - .build() - ) - - def fumarole_stream_proc( - cnc_rx: multiprocessing.Queue, # command-and-control queue - data_tx: multiprocessing.Queue, - cg_name: str, - member_idx: int, - endpoint: str, - x_token: Optional[str], - ): - def sigint_handler(signum, frame): - data_tx.put(FumaroleStreamEnd(pid=my_pid)) - exit(0) - - signal.signal(signal.SIGINT, sigint_handler) # Ignore Ctrl+C in subprocess - - with grpc_channel(endpoint, x_token, compression=compression) as channel: - fc = FumaroleClient(channel, metadata=metadata) - - my_pid = multiprocessing.current_process().pid - subscribe_iter = fc.subscribe( - cg_name, member_idx, mapper=output_format, **subscribe_filter - ) - for event in subscribe_iter: - data_tx.put(FumaroleStreamData(data=event)) - try: - command = cnc_rx.get_nowait() - match command: - case StopFumaroleStream(): - break - except queue.Empty: - pass - - channel.close() - # Flush any remaining data - for event in subscribe_iter: - data_tx.put(event) - - data_tx.put(FumaroleStreamEnd(pid=my_pid)) - - data_tx = multiprocessing.Queue() - data_rx = data_tx - fumarole_ps: dict[multiprocessing.Process, int] = dict() - fumarole_cnc_tx_vec = [] - fumarole_stream_id_vec = set() - for i in range(parallel): - j = i % len(endpoints) - endpoint = endpoints[j] - print(f"Spawned fumarole connection: {i}...") - - cnc_tx = multiprocessing.Queue() - cnc_rx = cnc_tx - fumarole: multiprocessing.Process = multiprocessing.Process( - target=fumarole_stream_proc, - args=(cnc_rx, data_tx, cg_name, i, endpoint, x_token), - ) - - fumarole.start() - click.echo( - f"Started fumarole connection: {i} with pid={fumarole.pid}!", err=True - ) - fumarole_ps[fumarole.pid] = fumarole - fumarole_cnc_tx_vec.append(cnc_tx) - fumarole_stream_id_vec.add(fumarole.pid) - - while True: - try: - if not all(p.is_alive() for p in fumarole_ps.values()): - break - match data_rx.get(timeout=1): - case FumaroleStreamData(data): - click.echo(data) - case FumaroleStreamEnd(pid): - fumarole_stream_id_vec.remove(pid) - click.echo(f"Connection {pid} ended!", err=True) - break - except KeyboardInterrupt: - break - except EOFError: - break - except queue.Empty: - pass - - for cnc_tx in fumarole_cnc_tx_vec: - cnc_tx.put(StopFumaroleStream()) - - # Drain any leftover data in case some is left in the queue. - while True: - try: - match data_rx.get(timeout=1): - case FumaroleStreamData(data): - click.echo(data) - case FumaroleStreamEnd(pid): - fumarole_stream_id_vec.remove(pid) - fumarole_proc = fumarole_ps.pop(pid) - fumarole_proc.terminate() - fumarole_proc.join() - except queue.Empty: - pass - - if not any(p.is_alive() for p in fumarole_ps.values()): - break - - for _, fumarole_proc in fumarole_ps.items(): - fumarole_proc.terminate() - fumarole_proc.join() diff --git a/fume/fume/grpc.py b/fume/fume/grpc.py deleted file mode 100644 index 7a9a56b..0000000 --- a/fume/fume/grpc.py +++ /dev/null @@ -1,339 +0,0 @@ -from click import Tuple -import grpc -import sys -from typing import Callable, Literal, Optional, Union -from collections.abc import Generator -from yellowstone_api.fumarole_pb2_grpc import FumaroleStub -import yellowstone_api.fumarole_pb2 as fumarole_pb2 -import yellowstone_api.geyser_pb2 as geyser_pb2 -import base58 - - -def _triton_sign_request( - callback: grpc.AuthMetadataPluginCallback, - x_token: Optional[str], - error: Optional[Exception], -): - # WARNING: metadata is a 1d-tuple (,), the last comma is necessary - metadata = (("x-token", x_token),) - return callback(metadata, error) - - -class TritonAuthMetadataPlugin(grpc.AuthMetadataPlugin): - """Metadata wrapper for raw access token credentials.""" - - def __init__(self, x_token: str): - self.x_token = x_token - - def __call__( - self, - context: grpc.AuthMetadataContext, - callback: grpc.AuthMetadataPluginCallback, - ): - return _triton_sign_request(callback, self.x_token, None) - - -def grpc_channel(endpoint: str, x_token=None, compression=None, *grpc_options): - options = [("grpc.max_receive_message_length", 111111110), *grpc_options] - if x_token is not None: - auth = TritonAuthMetadataPlugin(x_token) - # ssl_creds allow you to use our https endpoint - # grpc.ssl_channel_credentials with no arguments will look through your CA trust store. - ssl_creds = grpc.ssl_channel_credentials() - - # call credentials will be sent on each request if setup with composite_channel_credentials. - call_creds: grpc.CallCredentials = grpc.metadata_call_credentials(auth) - - # Combined creds will store the channel creds aswell as the call credentials - combined_creds = grpc.composite_channel_credentials(ssl_creds, call_creds) - - return grpc.secure_channel( - endpoint, - credentials=combined_creds, - compression=compression, - options=options, - ) - else: - return grpc.insecure_channel(endpoint, compression=compression, options=options) - - -InitialSeek = Union[Literal["earliest"], Literal["latest"], Literal["slot"]] - -IncludeOption = Union[Literal["all"], Literal["account"], Literal["tx"]] - -CommitmentOption = Union[ - Literal["processed"], Literal["confirmed"], Literal["finalized"] -] - - -def account_update_to_dict(account_update: geyser_pb2.SubscribeUpdateAccount) -> dict: - pass - - -def tx_to_dict(tx_update: geyser_pb2.SubscribeUpdateTransaction) -> dict: - pass - - -def subscribe_update_to_dict(obj, acc=None): - if acc is None: - acc = {} - - if hasattr(obj, "DESCRIPTOR"): - for field in obj.DESCRIPTOR.fields: - name = field.name - value = getattr(obj, name) - - if isinstance(value, (int, float, bool, str, bytes, type(None))): - if isinstance(value, bytes): - value = base58.b58encode(value).decode("utf-8") - acc[name] = value - else: - acc[name] = value - elif isinstance(value, list): - acc[name] = [] - ref = acc[name] - - else: - - if hasattr(value, "DESCRIPTOR"): - acc[name] = {} - ref = acc[name] - subscribe_update_to_dict(value, ref) - else: - try: - it = iter(value) - ref = [] - acc[name] = ref - for v in value: - if isinstance( - v, (int, float, bool, str, bytes, type(None)) - ): - if isinstance(v, bytes): - v = base58.b58encode(v).decode("utf-8") - ref.append(v) - else: - ref.append(v) - else: - new_obj = {} - ref.append(new_obj) - subscribe_update_to_dict(v, new_obj) - except TypeError: - pass - return acc - - -def summarize_account_update(account_update: geyser_pb2.SubscribeUpdateAccount) -> str: - slot = account_update.slot - pubkey = base58.b58encode(account_update.account.pubkey).decode("utf-8") - owner = base58.b58encode(account_update.account.owner).decode("utf-8") - account_info: geyser_pb2.SubscribeUpdateAccountInfo = account_update.account - if account_info.txn_signature: - txn_signature = base58.b58encode(account_info.txn_signature).decode("utf-8") - else: - txn_signature = None - size = account_info.ByteSize() - return ( - f"account,{slot},owner={owner},pubkey={pubkey},tx={txn_signature},size={size}" - ) - - -def summarize_tx_update(tx: geyser_pb2.SubscribeUpdateTransaction) -> str: - slot = tx.slot - tx_info = tx.transaction - tx_id = base58.b58encode(tx_info.signature).decode("utf-8") - return f"tx,{slot},{tx_id}" - - -def subscribe_update_to_summarize(subscribe_update: geyser_pb2.SubscribeUpdate) -> str: - if subscribe_update.HasField("account"): - account_update = getattr(subscribe_update, "account") - return summarize_account_update(account_update) - elif subscribe_update.HasField("transaction"): - tx_update = getattr(subscribe_update, "transaction") - return summarize_tx_update(tx_update) - else: - return None - - -class SubscribeFilterBuilder: - def __init__(self): - self.accounts = None - self.owners = None - self.tx_includes = None - self.tx_excludes = None - self.tx_requires = None - self.tx_fail = None - self.tx_vote = None - - def __getattr__(self, name): - if not name.startswith("with_"): - raise AttributeError(f"Attribute {name} not found") - - attr = name.split("with_")[1] - - def setter(value): - setattr(self, attr, value) - return self - - return setter - - def include_vote_tx(self): - self.tx_vote = None - return self - - def include_fail_tx(self): - self.tx_fail = None - return self - - def no_fail_tx(self): - self.tx_fail = False - return self - - def no_vote_tx(self, tx): - self.tx_vote = False - return self - - def build(self): - return { - "accounts": { - "default": geyser_pb2.SubscribeRequestFilterAccounts( - account=self.accounts, owner=self.owners - ) - }, - "transactions": { - "default": geyser_pb2.SubscribeRequestFilterTransactions( - vote=self.tx_vote, - failed=self.tx_fail, - account_required=self.tx_requires, - account_include=self.tx_includes, - account_exclude=self.tx_excludes, - ) - }, - } - - -class FumaroleClient: - - def __init__(self, channel, metadata: Optional[list[tuple[str, str]]] = None): - self.stub = FumaroleStub(channel) - self.metadata = metadata - - def list_available_commitments(self) -> list[str]: - resp = self.stub.ListAvailableCommitmentLevels( - fumarole_pb2.ListAvailableCommitmentLevelsRequest(), metadata=self.metadata - ) - - def delete_consumer_group(self, name: str) -> bool: - resp = self.stub.DeleteConsumerGroup( - fumarole_pb2.DeleteConsumerGroupRequest(consumer_group_label=name), - metadata=self.metadata, - ) - return resp.success - - def get_cg_info(self, name: str) -> Optional[fumarole_pb2.ConsumerGroupInfo]: - try: - resp = self.stub.GetConsumerGroupInfo( - fumarole_pb2.GetConsumerGroupInfoRequest(consumer_group_label=name), - metadata=self.metadata, - ) - return resp - except grpc.RpcError as e: - if e.code() == grpc.StatusCode.NOT_FOUND: - return None - else: - raise e - - def list_consumer_groups( - self, - ) -> list[fumarole_pb2.ConsumerGroupInfo]: - resp = self.stub.ListConsumerGroups( - fumarole_pb2.ListConsumerGroupsRequest(), metadata=self.metadata - ) - - return [cg for cg in resp.consumer_groups] - - def create_consumer_group( - self, - name: str, - size: int = 1, - include: IncludeOption = "all", - initial_seek: InitialSeek = "latest", - starting_slot: Optional[int] = None, - commitment: CommitmentOption = "confirmed", - ) -> str: - if initial_seek == "earliest": - initial_offset_policy = fumarole_pb2.InitialOffsetPolicy.EARLIEST - starting_slot = None - - if initial_seek == "latest": - initial_offset_policy = fumarole_pb2.InitialOffsetPolicy.LATEST - starting_slot = None - - if initial_seek == "slot": - initial_offset_policy = fumarole_pb2.InitialOffsetPolicy.SLOT - - if include == "account": - event_subscription_policy = ( - fumarole_pb2.EventSubscriptionPolicy.ACCOUNT_UPDATE_ONLY - ) - elif include == "tx": - event_subscription_policy = ( - fumarole_pb2.EventSubscriptionPolicy.TRANSACTION_ONLY - ) - else: - event_subscription_policy = fumarole_pb2.EventSubscriptionPolicy.BOTH - - if commitment == "processed": - commitment_level = geyser_pb2.CommitmentLevel.PROCESSED - elif commitment == "confirmed": - commitment_level = geyser_pb2.CommitmentLevel.CONFIRMED - else: - commitment_level = geyser_pb2.CommitmentLevel.FINALIZED - - resp = self.stub.CreateStaticConsumerGroup( - fumarole_pb2.CreateStaticConsumerGroupRequest( - consumer_group_label=name, - member_count=size, - initial_offset_policy=initial_offset_policy, - at_slot=starting_slot, - commitment_level=commitment_level, - event_subscription_policy=event_subscription_policy, - ), - metadata=self.metadata, - ) - - return name - - def subscribe( - self, - consumer_group_label: str, - member_idx: int = 0, - mapper: Union[ - Literal["json"], - Literal["summ"], - Callable[[geyser_pb2.SubscribeUpdate], any], - ] = "summ", - **subscribe_request_kwargs, - ) -> Generator[dict]: - sub_req = fumarole_pb2.SubscribeRequest( - consumer_group_label=consumer_group_label, - consumer_id=member_idx, - **subscribe_request_kwargs, - ) - stream = self.stub.Subscribe(iter([sub_req]), metadata=self.metadata) - if mapper == "json": - map_fn = subscribe_update_to_dict - elif mapper == "summ": - map_fn = subscribe_update_to_summarize - else: - map_fn = mapper - try: - for subscribe_update in stream: - result = map_fn(subscribe_update) - if result is not None: - yield result - except grpc.RpcError as e: - if e.code() == grpc.StatusCode.CANCELLED: - return - else: - raise e diff --git a/fume/poetry.lock b/fume/poetry.lock deleted file mode 100644 index 3b6a71e..0000000 --- a/fume/poetry.lock +++ /dev/null @@ -1,455 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. - -[[package]] -name = "base58" -version = "2.1.1" -description = "Base58 and Base58Check implementation." -optional = false -python-versions = ">=3.5" -files = [ - {file = "base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2"}, - {file = "base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c"}, -] - -[package.extras] -tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", "pytest-cov", "pytest-flake8"] - -[[package]] -name = "black" -version = "24.10.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -files = [ - {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, - {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, - {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, - {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, - {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, - {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, - {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, - {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, - {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, - {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, - {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, - {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, - {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, - {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, - {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, - {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, - {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, - {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, - {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, - {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, - {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, - {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "grpcio" -version = "1.68.1" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.68.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d"}, - {file = "grpcio-1.68.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78"}, - {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9"}, - {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e"}, - {file = "grpcio-1.68.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330"}, - {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079"}, - {file = "grpcio-1.68.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1"}, - {file = "grpcio-1.68.1-cp310-cp310-win32.whl", hash = "sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5"}, - {file = "grpcio-1.68.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746"}, - {file = "grpcio-1.68.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c"}, - {file = "grpcio-1.68.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73"}, - {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515"}, - {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd"}, - {file = "grpcio-1.68.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600"}, - {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe"}, - {file = "grpcio-1.68.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0"}, - {file = "grpcio-1.68.1-cp311-cp311-win32.whl", hash = "sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9"}, - {file = "grpcio-1.68.1-cp311-cp311-win_amd64.whl", hash = "sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2"}, - {file = "grpcio-1.68.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666"}, - {file = "grpcio-1.68.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1"}, - {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684"}, - {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e"}, - {file = "grpcio-1.68.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60"}, - {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475"}, - {file = "grpcio-1.68.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613"}, - {file = "grpcio-1.68.1-cp312-cp312-win32.whl", hash = "sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5"}, - {file = "grpcio-1.68.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c"}, - {file = "grpcio-1.68.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385"}, - {file = "grpcio-1.68.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c"}, - {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1"}, - {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54"}, - {file = "grpcio-1.68.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a"}, - {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161"}, - {file = "grpcio-1.68.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad"}, - {file = "grpcio-1.68.1-cp313-cp313-win32.whl", hash = "sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172"}, - {file = "grpcio-1.68.1-cp313-cp313-win_amd64.whl", hash = "sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e"}, - {file = "grpcio-1.68.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad"}, - {file = "grpcio-1.68.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea"}, - {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c"}, - {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf"}, - {file = "grpcio-1.68.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e"}, - {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c"}, - {file = "grpcio-1.68.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded"}, - {file = "grpcio-1.68.1-cp38-cp38-win32.whl", hash = "sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63"}, - {file = "grpcio-1.68.1-cp38-cp38-win_amd64.whl", hash = "sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d"}, - {file = "grpcio-1.68.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a"}, - {file = "grpcio-1.68.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1"}, - {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f"}, - {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9"}, - {file = "grpcio-1.68.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0"}, - {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e"}, - {file = "grpcio-1.68.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43"}, - {file = "grpcio-1.68.1-cp39-cp39-win32.whl", hash = "sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76"}, - {file = "grpcio-1.68.1-cp39-cp39-win_amd64.whl", hash = "sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1"}, - {file = "grpcio-1.68.1.tar.gz", hash = "sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.68.1)"] - -[[package]] -name = "grpcio-tools" -version = "1.68.1" -description = "Protobuf code generator for gRPC" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio_tools-1.68.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3a93ea324c5cbccdff55110777410d026dc1e69c3d47684ac97f57f7a77b9c70"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:94cbfb9482cfd7bdb5f081b94fa137a16e4fe031daa57a2cd85d8cb4e18dce25"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bbe7e1641859c858d0f4631f7f7c09e7302433f1aa037028d2419c1410945fac"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55c0f91c4294c5807796ed26af42509f3d68497942a92d9ee9f43b08768d6c3c"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85adc798fd3b57ab3e998b5897c5daab6840211ac16cdf3ba99901cb9b90094a"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0bdccb00709bf6180a80a353a99fa844cc0bb2d450cdf7fc6ab22c988bb6b4c"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2465e4d347b35dc0c007e074c79d5ded0a89c3aa26651e690f83593e0cc28af8"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-win32.whl", hash = "sha256:83c124a1776c1027da7d36584c8044cfed7a9f10e90f08dafde8d2a4cb822319"}, - {file = "grpcio_tools-1.68.1-cp310-cp310-win_amd64.whl", hash = "sha256:283fd1359d619d42c3346f1d8f0a70636a036a421178803a1ab8083fa4228a38"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:02f04de42834129eb54bb12469160ab631a0395d6a2b77975381c02b994086c3"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:92b6aab37095879ef9ee428dd171740ff794f4c7a66bc1cc7280cd0051f8cd96"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:1f0ac6ac5e1e33b998511981b3ef36489501833413354f3597b97a3452d7d7ba"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28e0bca3a262af86557f30e30ddf2fadc2324ee05cd7352716924cc7f83541f1"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12239cf5ca6b7b4937103953cf35c49683d935e32e98596fe52dd35168aa86e6"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8e48d8884fcf6b182c73d0560a183404458e30a0f479918b88ca8fbd48b8b05f"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e4e8059469847441855322da16fa2c0f9787b996c237a98778210e31188a8652"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-win32.whl", hash = "sha256:21815d54a83effbd2600d16382a7897298cfeffe578557fc9a47b642cc8ddafe"}, - {file = "grpcio_tools-1.68.1-cp311-cp311-win_amd64.whl", hash = "sha256:2114528723d9f12d3e24af3d433ec6f140deea1dd64d3bb1b4ebced217f1867c"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:d67a9d1ad22ff0d22715dba1d5f8f23ebd47cea84ccd20c90bf4690d988adc5b"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7f1e704ff73eb01afac51b63b74868a35aaa5d6f791fc63bd41af44a51aa232"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:e9f69988bd77db014795511c498e89a0db24bd47877e65921364114f88de3bee"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8585ec7d11fcc2bb635b39605a4466ca9fa28dbae0c184fe58f456da72cb9031"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c81d0be6c46fcbcd2cd126804060a95531cdf6d779436b2fbc68c8b4a7db2dc1"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6efdb02e75baf289935b5dad665f0e0f7c3311d86aae0cd2c709e2a8a34bb620"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ea367639e771e5a05f7320eb4ae2b27e09d2ec3baeae9819d1c590cc7eaaa08"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-win32.whl", hash = "sha256:a5b1021c9942bba7eca1555061e2d308f506198088a3a539fcb3633499c6635f"}, - {file = "grpcio_tools-1.68.1-cp312-cp312-win_amd64.whl", hash = "sha256:315ad9c28940c95e85e57aeca309d298113175c2d5e8221501a05a51072f5477"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:67e49b5ede0cc8a0f988f41f7b72f6bc03180aecdb5213bd985bc1bbfd9ffdac"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b78e38f953062d45ff92ec940da292dc9bfbf26de492c8dc44e12b13493a8e80"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:8ebe9df5bab4121e8f51e013a379be2027179a0c8013e89d686a1e5800e9c205"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be553e3ea7447ed9e2e2d089f3b0a77000e86d2681b3c77498c98dddffc62d22"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4877f3eabb6185b5691f5218fedc86a84a833734847a294048862ec910a2854"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:b98173e536e8f2779eff84a03409cca6497dc1fad3d10a47c8d881b2cb36259b"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:5b64035dcd0df70acf3af972c3f103b0ce141d29732fd94eaa8b38cf7c8e62fe"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-win32.whl", hash = "sha256:573f3ed3276df20c308797ae834ac6c5595b1dd2953b243eedadbcd986a287d7"}, - {file = "grpcio_tools-1.68.1-cp313-cp313-win_amd64.whl", hash = "sha256:c4539c6231015c40db879fbc0feaaf03adb4275c1bd2b4dd26e2323f2a13655a"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:3e0fc6dbc64efc7bb0fe23ce46587e0cbeb512142d543834c2bc9100c8f255ff"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79337ac1b19610b99f93aa52ae05e5fbf96adbe60d54ecf192af44cc69118d19"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:eb7cae5f0232aba9057f26a45ef6b0a5633d36627fe49442c0985b6f44b67822"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25fe1bcbb558a477c525bec9d67e1469d47dddc9430e6e5c0d11f67f08cfc810"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce901f42037d1ebc7724e721180d03e33163d5acf0a62c52728e6c36117c5e9"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3c213c2208c42dce2a5fc7cfb2b952a3c22ef019812f9f27bd54c6e00ee0720e"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff6ae5031a03ab90e9c508d12914438b73efd44b5eed9946bf8974c453d0ed57"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-win32.whl", hash = "sha256:41e631e72b6b94eb6f3d9cd533c682249f82fc58007c7561f6e521b884a6347e"}, - {file = "grpcio_tools-1.68.1-cp38-cp38-win_amd64.whl", hash = "sha256:69fb93761f116a5b063fb4f6150023c4d785304b37adcebf561b95018f9b40ae"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:31c703dba465956acb83adc105d61297459d0d14b512441d827f6c040cbffe2b"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1093f441751689d225916e3fe02daf98d2becab688b9e167bd2c38454ec50906"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3543b9205e5b88d2280493aa9b55d35ce9cc45b7a0891c9d84c200652802e22a"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79d575cc5a522b9920d9a07387976fc02d162bdf97ba51cf91fabdca8dfdb491"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d546e4a506288d6227acc0eb625039c5e1ad96218c8cfe9ecf661a41e15e442e"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:aced9c7a4edbf6eff73720bfa6fefd9053ae294535a488dfb92a372913eda10d"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3c08d1a244b5025ba3f8ef81d0885b431b93cc20bc4560add4cdfcf38c1bfad"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-win32.whl", hash = "sha256:049f05a3f227e9f696059a20b2858e6d7c1cd6037d8471306d7ab7627b1a4ce4"}, - {file = "grpcio_tools-1.68.1-cp39-cp39-win_amd64.whl", hash = "sha256:4c3599c75b1157e6bda24cdbdadb023bf0fe1085aa1e0047a1f35a8778f9b56e"}, - {file = "grpcio_tools-1.68.1.tar.gz", hash = "sha256:2413a17ad16c9c821b36e4a67fc64c37b9e4636ab1c3a07778018801378739ba"}, -] - -[package.dependencies] -grpcio = ">=1.68.1" -protobuf = ">=5.26.1,<6.0dev" -setuptools = "*" - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.6" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "protobuf" -version = "5.29.1" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.29.1-cp310-abi3-win32.whl", hash = "sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110"}, - {file = "protobuf-5.29.1-cp310-abi3-win_amd64.whl", hash = "sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34"}, - {file = "protobuf-5.29.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18"}, - {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155"}, - {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d"}, - {file = "protobuf-5.29.1-cp38-cp38-win32.whl", hash = "sha256:50879eb0eb1246e3a5eabbbe566b44b10348939b7cc1b267567e8c3d07213853"}, - {file = "protobuf-5.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:027fbcc48cea65a6b17028510fdd054147057fa78f4772eb547b9274e5219331"}, - {file = "protobuf-5.29.1-cp39-cp39-win32.whl", hash = "sha256:5a41deccfa5e745cef5c65a560c76ec0ed8e70908a67cc8f4da5fce588b50d57"}, - {file = "protobuf-5.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:012ce28d862ff417fd629285aca5d9772807f15ceb1a0dbd15b88f58c776c98c"}, - {file = "protobuf-5.29.1-py3-none-any.whl", hash = "sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0"}, - {file = "protobuf-5.29.1.tar.gz", hash = "sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb"}, -] - -[[package]] -name = "pytest" -version = "8.3.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "setuptools" -version = "75.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.10" -content-hash = "fc1e192401cbe5da4fe12291c42d1ec2de9dba6c6426b3bf6f8350655904dba4" diff --git a/fume/pyproject.toml b/fume/pyproject.toml deleted file mode 100644 index e8955cb..0000000 --- a/fume/pyproject.toml +++ /dev/null @@ -1,38 +0,0 @@ -[tool.poetry] -name = "yellowstone-fume" -homepage = "https://github.com/rpcpool/yellowstone-fumarole" -repository = "https://github.com/rpcpool/yellowstone-fumarole" -version = "0.2.0" -description = "Yellowstone Fumarole CLI tool" -authors = ["Louis-Vincent "] -readme = "README.md" - -packages = [ - { include = "yellowstone_api" }, - { include = "fume" }, -] - -[tool.poetry.dependencies] -python = "^3.10" -click = "^8.1.7" -grpcio = "^1.68.1" -protobuf = "^5.29.1" -toml = "^0.10.2" -base58 = "^2.1.1" -tabulate = "^0.9.0" - - -[tool.poetry.scripts] -fume = "fume.cli.app:main" - -[tool.poetry.group.test.dependencies] -pytest = "^8.3.4" - - -[tool.poetry.group.dev.dependencies] -grpcio-tools = "^1.68.1" -black = "^24.10.0" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/fume/scripts/build.sh b/fume/scripts/build.sh deleted file mode 100755 index 33e34ea..0000000 --- a/fume/scripts/build.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash -set -e - -#!/bin/bash -script_dir=$(dirname "$(realpath "$BASH_SOURCE")") -fume_dir="$(dirname "$script_dir")" -repo_dir="$(dirname "$fume_dir")" -echo "fume_dir: $fume_dir" -echo "repo_dir: $repo_dir" -proto_path="$repo_dir/proto" -proto_path2="$repo_dir/yellowstone-grpc/yellowstone-grpc-proto/proto" -out_dir="$fume_dir/yellowstone_api" -module_name="yellowstone_api" -rm -fr $out_dir/* -mkdir -p $out_dir - -/bin/env python -m grpc_tools.protoc \ - -I$proto_path \ - -I$proto_path2 \ - --python_out=$out_dir \ - --pyi_out=$out_dir \ - --grpc_python_out=$out_dir \ - $proto_path/*.proto $proto_path2/*.proto - -pushd $out_dir -for file in *.py*; do - name="${file%.*}" - sed -i "s/^import \(.*\)_pb2 as \(.*\)/import $module_name.\1_pb2 as \2/g" $file - sed -i "s/^import \(.*\)_pb2_grpc as \(.*\)/import $module_name.\1_pb2 as \2/g" $file - sed -i "s/^from \(.*\)_pb2_grpc import \(.*\)/from $module_name.\1_pb2 import \2/g" $file - sed -i "s/^from \(.*\)_pb2 import \(.*\)/from $module_name.\1_pb2 import \2/g" $file -done - -touch '__init__.py' - -popd \ No newline at end of file diff --git a/fume/tests/__init__.py b/fume/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/fume/tests/test_fumarole_client.py b/fume/tests/test_fumarole_client.py deleted file mode 100644 index c6bf6ca..0000000 --- a/fume/tests/test_fumarole_client.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -import random -import string -import toml -import pytest -from fume.grpc import FumaroleClient, grpc_channel - - -@pytest.fixture -def test_config(): - test_config_path = os.environ.get("FUME_TEST_CONFIG") - if test_config_path: - with open(test_config_path) as f: - config = toml.load(f) - return config["fumarole"] - else: - return { - "endpoints": "localhost:9000", - "x-token": None, - "x-subscription-id": "11111111-11111111-11111111-11111111", - } - - -@pytest.fixture -def fumarole_client(test_config): - - metadata = [ - (k, v) for k, v in test_config.items() if k.startswith("x-") and k != "x-token" - ] - - with grpc_channel( - test_config["endpoints"][0], test_config.get("x-token") - ) as channel: - yield FumaroleClient(channel, metadata=metadata) - - -def random_str(len, prefix=None): - suffix = "".join(random.choices(string.ascii_letters, k=len)) - if prefix: - return f"{prefix}-{suffix}" - return suffix - - -def test_create_consumer_group(fumarole_client: FumaroleClient): - cg_name = random_str(6, prefix="fume-test") - commitment = "confirmed" - - cg = fumarole_client.create_consumer_group( - name=cg_name, - commitment=commitment, - ) - - cg_info = fumarole_client.get_cg_info(cg_name) - cg_list = fumarole_client.list_consumer_groups() - assert cg_info - assert cg_info.consumer_group_label == cg_name - assert cg == cg_name - assert cg_name in [cg.consumer_group_label for cg in cg_list] - - -def test_delete_consumer(fumarole_client: FumaroleClient): - cg_name = random_str(6, prefix="fume-test") - commitment = "confirmed" - - cg = fumarole_client.create_consumer_group( - name=cg_name, - commitment=commitment, - ) - - cg_info = fumarole_client.get_cg_info(cg_name) - fumarole_client.delete_consumer_group(cg_name) - cg_info = fumarole_client.get_cg_info(cg_name) - - cg_list = fumarole_client.list_consumer_groups() - - assert not cg_info - assert cg_name not in [cg.consumer_group_label for cg in cg_list] diff --git a/fume/yellowstone_api/__init__.py b/fume/yellowstone_api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/fume/yellowstone_api/fumarole_pb2.py b/fume/yellowstone_api/fumarole_pb2.py deleted file mode 100644 index b285c05..0000000 --- a/fume/yellowstone_api/fumarole_pb2.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: fumarole.proto -# Protobuf Python Version: 5.28.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 28, - 1, - '', - 'fumarole.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -import yellowstone_api.geyser_pb2 as geyser__pb2 -try: - solana__storage__pb2 = geyser__pb2.solana__storage__pb2 -except AttributeError: - solana__storage__pb2 = geyser__pb2.solana_storage_pb2 - -from yellowstone_api.geyser_pb2 import * - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0e\x66umarole.proto\x12\x08\x66umarole\x1a\x0cgeyser.proto\"&\n$ListAvailableCommitmentLevelsRequest\"[\n%ListAvailableCommitmentLevelsResponse\x12\x32\n\x11\x63ommitment_levels\x18\x01 \x03(\x0e\x32\x17.geyser.CommitmentLevel\";\n\x1bGetConsumerGroupInfoRequest\x12\x1c\n\x14\x63onsumer_group_label\x18\x01 \x01(\t\":\n\x1a\x44\x65leteConsumerGroupRequest\x12\x1c\n\x14\x63onsumer_group_label\x18\x01 \x01(\t\".\n\x1b\x44\x65leteConsumerGroupResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"\x1b\n\x19ListConsumerGroupsRequest\"R\n\x1aListConsumerGroupsResponse\x12\x34\n\x0f\x63onsumer_groups\x18\x01 \x03(\x0b\x32\x1b.fumarole.ConsumerGroupInfo\"\x98\x02\n\x11\x43onsumerGroupInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x1c\n\x14\x63onsumer_group_label\x18\x02 \x01(\t\x12\x38\n\x13\x63onsumer_group_type\x18\x03 \x01(\x0e\x32\x1b.fumarole.ConsumerGroupType\x12\x14\n\x0cmember_count\x18\x04 \x01(\r\x12\x31\n\x10\x63ommitment_level\x18\x05 \x01(\x0e\x32\x17.geyser.CommitmentLevel\x12\x44\n\x19\x65vent_subscription_policy\x18\x06 \x01(\x0e\x32!.fumarole.EventSubscriptionPolicy\x12\x10\n\x08is_stale\x18\x07 \x01(\x08\"5\n\x15GetSlotLagInfoRequest\x12\x1c\n\x14\x63onsumer_group_label\x18\x01 \x01(\t\"H\n\x16GetSlotLagInfoResponse\x12\x15\n\rmax_slot_seen\x18\x01 \x01(\x04\x12\x17\n\x0fglobal_max_slot\x18\x02 \x01(\x04\"\x94\x03\n\x10SubscribeRequest\x12\x1c\n\x14\x63onsumer_group_label\x18\x01 \x01(\t\x12\x18\n\x0b\x63onsumer_id\x18\x02 \x01(\rH\x00\x88\x01\x01\x12:\n\x08\x61\x63\x63ounts\x18\x03 \x03(\x0b\x32(.fumarole.SubscribeRequest.AccountsEntry\x12\x42\n\x0ctransactions\x18\x04 \x03(\x0b\x32,.fumarole.SubscribeRequest.TransactionsEntry\x1aW\n\rAccountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.geyser.SubscribeRequestFilterAccounts:\x02\x38\x01\x1a_\n\x11TransactionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x42\x0e\n\x0c_consumer_id\"5\n!CreateStaticConsumerGroupResponse\x12\x10\n\x08group_id\x18\x01 \x01(\t\"\xc5\x02\n CreateStaticConsumerGroupRequest\x12\x1c\n\x14\x63onsumer_group_label\x18\x01 \x01(\t\x12\x19\n\x0cmember_count\x18\x02 \x01(\rH\x00\x88\x01\x01\x12<\n\x15initial_offset_policy\x18\x03 \x01(\x0e\x32\x1d.fumarole.InitialOffsetPolicy\x12\x31\n\x10\x63ommitment_level\x18\x04 \x01(\x0e\x32\x17.geyser.CommitmentLevel\x12\x44\n\x19\x65vent_subscription_policy\x18\x05 \x01(\x0e\x32!.fumarole.EventSubscriptionPolicy\x12\x14\n\x07\x61t_slot\x18\x06 \x01(\x03H\x01\x88\x01\x01\x42\x0f\n\r_member_countB\n\n\x08_at_slot*\x1f\n\x11\x43onsumerGroupType\x12\n\n\x06STATIC\x10\x00*9\n\x13InitialOffsetPolicy\x12\x0c\n\x08\x45\x41RLIEST\x10\x00\x12\n\n\x06LATEST\x10\x01\x12\x08\n\x04SLOT\x10\x02*R\n\x17\x45ventSubscriptionPolicy\x12\x17\n\x13\x41\x43\x43OUNT_UPDATE_ONLY\x10\x00\x12\x14\n\x10TRANSACTION_ONLY\x10\x01\x12\x08\n\x04\x42OTH\x10\x02\x32\xcd\x05\n\x08\x46umarole\x12\x82\x01\n\x1dListAvailableCommitmentLevels\x12..fumarole.ListAvailableCommitmentLevelsRequest\x1a/.fumarole.ListAvailableCommitmentLevelsResponse\"\x00\x12\\\n\x14GetConsumerGroupInfo\x12%.fumarole.GetConsumerGroupInfoRequest\x1a\x1b.fumarole.ConsumerGroupInfo\"\x00\x12\x61\n\x12ListConsumerGroups\x12#.fumarole.ListConsumerGroupsRequest\x1a$.fumarole.ListConsumerGroupsResponse\"\x00\x12\x64\n\x13\x44\x65leteConsumerGroup\x12$.fumarole.DeleteConsumerGroupRequest\x1a%.fumarole.DeleteConsumerGroupResponse\"\x00\x12v\n\x19\x43reateStaticConsumerGroup\x12*.fumarole.CreateStaticConsumerGroupRequest\x1a+.fumarole.CreateStaticConsumerGroupResponse\"\x00\x12\x46\n\tSubscribe\x12\x1a.fumarole.SubscribeRequest\x1a\x17.geyser.SubscribeUpdate\"\x00(\x01\x30\x01\x12U\n\x0eGetSlotLagInfo\x12\x1f.fumarole.GetSlotLagInfoRequest\x1a .fumarole.GetSlotLagInfoResponse\"\x00P\x00\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'fumarole_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - DESCRIPTOR._loaded_options = None - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_options = b'8\001' - _globals['_CONSUMERGROUPTYPE']._serialized_start=1659 - _globals['_CONSUMERGROUPTYPE']._serialized_end=1690 - _globals['_INITIALOFFSETPOLICY']._serialized_start=1692 - _globals['_INITIALOFFSETPOLICY']._serialized_end=1749 - _globals['_EVENTSUBSCRIPTIONPOLICY']._serialized_start=1751 - _globals['_EVENTSUBSCRIPTIONPOLICY']._serialized_end=1833 - _globals['_LISTAVAILABLECOMMITMENTLEVELSREQUEST']._serialized_start=42 - _globals['_LISTAVAILABLECOMMITMENTLEVELSREQUEST']._serialized_end=80 - _globals['_LISTAVAILABLECOMMITMENTLEVELSRESPONSE']._serialized_start=82 - _globals['_LISTAVAILABLECOMMITMENTLEVELSRESPONSE']._serialized_end=173 - _globals['_GETCONSUMERGROUPINFOREQUEST']._serialized_start=175 - _globals['_GETCONSUMERGROUPINFOREQUEST']._serialized_end=234 - _globals['_DELETECONSUMERGROUPREQUEST']._serialized_start=236 - _globals['_DELETECONSUMERGROUPREQUEST']._serialized_end=294 - _globals['_DELETECONSUMERGROUPRESPONSE']._serialized_start=296 - _globals['_DELETECONSUMERGROUPRESPONSE']._serialized_end=342 - _globals['_LISTCONSUMERGROUPSREQUEST']._serialized_start=344 - _globals['_LISTCONSUMERGROUPSREQUEST']._serialized_end=371 - _globals['_LISTCONSUMERGROUPSRESPONSE']._serialized_start=373 - _globals['_LISTCONSUMERGROUPSRESPONSE']._serialized_end=455 - _globals['_CONSUMERGROUPINFO']._serialized_start=458 - _globals['_CONSUMERGROUPINFO']._serialized_end=738 - _globals['_GETSLOTLAGINFOREQUEST']._serialized_start=740 - _globals['_GETSLOTLAGINFOREQUEST']._serialized_end=793 - _globals['_GETSLOTLAGINFORESPONSE']._serialized_start=795 - _globals['_GETSLOTLAGINFORESPONSE']._serialized_end=867 - _globals['_SUBSCRIBEREQUEST']._serialized_start=870 - _globals['_SUBSCRIBEREQUEST']._serialized_end=1274 - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_start=1074 - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_end=1161 - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_start=1163 - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_end=1258 - _globals['_CREATESTATICCONSUMERGROUPRESPONSE']._serialized_start=1276 - _globals['_CREATESTATICCONSUMERGROUPRESPONSE']._serialized_end=1329 - _globals['_CREATESTATICCONSUMERGROUPREQUEST']._serialized_start=1332 - _globals['_CREATESTATICCONSUMERGROUPREQUEST']._serialized_end=1657 - _globals['_FUMAROLE']._serialized_start=1836 - _globals['_FUMAROLE']._serialized_end=2553 -# @@protoc_insertion_point(module_scope) diff --git a/fume/yellowstone_api/fumarole_pb2.pyi b/fume/yellowstone_api/fumarole_pb2.pyi deleted file mode 100644 index e832a50..0000000 --- a/fume/yellowstone_api/fumarole_pb2.pyi +++ /dev/null @@ -1,198 +0,0 @@ -import yellowstone_api.geyser_pb2 as _geyser_pb2 -import yellowstone_api.solana_storage_pb2 as _solana_storage_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union -from yellowstone_api.geyser_pb2 import SubscribeRequest as SubscribeRequest -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccounts as SubscribeRequestFilterAccounts -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilter as SubscribeRequestFilterAccountsFilter -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilterMemcmp as SubscribeRequestFilterAccountsFilterMemcmp -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterAccountsFilterLamports as SubscribeRequestFilterAccountsFilterLamports -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterSlots as SubscribeRequestFilterSlots -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterTransactions as SubscribeRequestFilterTransactions -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterBlocks as SubscribeRequestFilterBlocks -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterBlocksMeta as SubscribeRequestFilterBlocksMeta -from yellowstone_api.geyser_pb2 import SubscribeRequestFilterEntry as SubscribeRequestFilterEntry -from yellowstone_api.geyser_pb2 import SubscribeRequestAccountsDataSlice as SubscribeRequestAccountsDataSlice -from yellowstone_api.geyser_pb2 import SubscribeRequestPing as SubscribeRequestPing -from yellowstone_api.geyser_pb2 import SubscribeUpdate as SubscribeUpdate -from yellowstone_api.geyser_pb2 import SubscribeUpdateAccount as SubscribeUpdateAccount -from yellowstone_api.geyser_pb2 import SubscribeUpdateAccountInfo as SubscribeUpdateAccountInfo -from yellowstone_api.geyser_pb2 import SubscribeUpdateSlot as SubscribeUpdateSlot -from yellowstone_api.geyser_pb2 import SubscribeUpdateTransaction as SubscribeUpdateTransaction -from yellowstone_api.geyser_pb2 import SubscribeUpdateTransactionInfo as SubscribeUpdateTransactionInfo -from yellowstone_api.geyser_pb2 import SubscribeUpdateTransactionStatus as SubscribeUpdateTransactionStatus -from yellowstone_api.geyser_pb2 import SubscribeUpdateBlock as SubscribeUpdateBlock -from yellowstone_api.geyser_pb2 import SubscribeUpdateBlockMeta as SubscribeUpdateBlockMeta -from yellowstone_api.geyser_pb2 import SubscribeUpdateEntry as SubscribeUpdateEntry -from yellowstone_api.geyser_pb2 import SubscribeUpdatePing as SubscribeUpdatePing -from yellowstone_api.geyser_pb2 import SubscribeUpdatePong as SubscribeUpdatePong -from yellowstone_api.geyser_pb2 import PingRequest as PingRequest -from yellowstone_api.geyser_pb2 import PongResponse as PongResponse -from yellowstone_api.geyser_pb2 import GetLatestBlockhashRequest as GetLatestBlockhashRequest -from yellowstone_api.geyser_pb2 import GetLatestBlockhashResponse as GetLatestBlockhashResponse -from yellowstone_api.geyser_pb2 import GetBlockHeightRequest as GetBlockHeightRequest -from yellowstone_api.geyser_pb2 import GetBlockHeightResponse as GetBlockHeightResponse -from yellowstone_api.geyser_pb2 import GetSlotRequest as GetSlotRequest -from yellowstone_api.geyser_pb2 import GetSlotResponse as GetSlotResponse -from yellowstone_api.geyser_pb2 import GetVersionRequest as GetVersionRequest -from yellowstone_api.geyser_pb2 import GetVersionResponse as GetVersionResponse -from yellowstone_api.geyser_pb2 import IsBlockhashValidRequest as IsBlockhashValidRequest -from yellowstone_api.geyser_pb2 import IsBlockhashValidResponse as IsBlockhashValidResponse -from yellowstone_api.geyser_pb2 import CommitmentLevel as CommitmentLevel -from yellowstone_api.geyser_pb2 import SlotStatus as SlotStatus - -DESCRIPTOR: _descriptor.FileDescriptor -PROCESSED: _geyser_pb2.CommitmentLevel -CONFIRMED: _geyser_pb2.CommitmentLevel -FINALIZED: _geyser_pb2.CommitmentLevel -SLOT_PROCESSED: _geyser_pb2.SlotStatus -SLOT_CONFIRMED: _geyser_pb2.SlotStatus -SLOT_FINALIZED: _geyser_pb2.SlotStatus -SLOT_FIRST_SHRED_RECEIVED: _geyser_pb2.SlotStatus -SLOT_COMPLETED: _geyser_pb2.SlotStatus -SLOT_CREATED_BANK: _geyser_pb2.SlotStatus -SLOT_DEAD: _geyser_pb2.SlotStatus - -class ConsumerGroupType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - STATIC: _ClassVar[ConsumerGroupType] - -class InitialOffsetPolicy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - EARLIEST: _ClassVar[InitialOffsetPolicy] - LATEST: _ClassVar[InitialOffsetPolicy] - SLOT: _ClassVar[InitialOffsetPolicy] - -class EventSubscriptionPolicy(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - ACCOUNT_UPDATE_ONLY: _ClassVar[EventSubscriptionPolicy] - TRANSACTION_ONLY: _ClassVar[EventSubscriptionPolicy] - BOTH: _ClassVar[EventSubscriptionPolicy] -STATIC: ConsumerGroupType -EARLIEST: InitialOffsetPolicy -LATEST: InitialOffsetPolicy -SLOT: InitialOffsetPolicy -ACCOUNT_UPDATE_ONLY: EventSubscriptionPolicy -TRANSACTION_ONLY: EventSubscriptionPolicy -BOTH: EventSubscriptionPolicy - -class ListAvailableCommitmentLevelsRequest(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class ListAvailableCommitmentLevelsResponse(_message.Message): - __slots__ = ("commitment_levels",) - COMMITMENT_LEVELS_FIELD_NUMBER: _ClassVar[int] - commitment_levels: _containers.RepeatedScalarFieldContainer[_geyser_pb2.CommitmentLevel] - def __init__(self, commitment_levels: _Optional[_Iterable[_Union[_geyser_pb2.CommitmentLevel, str]]] = ...) -> None: ... - -class GetConsumerGroupInfoRequest(_message.Message): - __slots__ = ("consumer_group_label",) - CONSUMER_GROUP_LABEL_FIELD_NUMBER: _ClassVar[int] - consumer_group_label: str - def __init__(self, consumer_group_label: _Optional[str] = ...) -> None: ... - -class DeleteConsumerGroupRequest(_message.Message): - __slots__ = ("consumer_group_label",) - CONSUMER_GROUP_LABEL_FIELD_NUMBER: _ClassVar[int] - consumer_group_label: str - def __init__(self, consumer_group_label: _Optional[str] = ...) -> None: ... - -class DeleteConsumerGroupResponse(_message.Message): - __slots__ = ("success",) - SUCCESS_FIELD_NUMBER: _ClassVar[int] - success: bool - def __init__(self, success: bool = ...) -> None: ... - -class ListConsumerGroupsRequest(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class ListConsumerGroupsResponse(_message.Message): - __slots__ = ("consumer_groups",) - CONSUMER_GROUPS_FIELD_NUMBER: _ClassVar[int] - consumer_groups: _containers.RepeatedCompositeFieldContainer[ConsumerGroupInfo] - def __init__(self, consumer_groups: _Optional[_Iterable[_Union[ConsumerGroupInfo, _Mapping]]] = ...) -> None: ... - -class ConsumerGroupInfo(_message.Message): - __slots__ = ("id", "consumer_group_label", "consumer_group_type", "member_count", "commitment_level", "event_subscription_policy", "is_stale") - ID_FIELD_NUMBER: _ClassVar[int] - CONSUMER_GROUP_LABEL_FIELD_NUMBER: _ClassVar[int] - CONSUMER_GROUP_TYPE_FIELD_NUMBER: _ClassVar[int] - MEMBER_COUNT_FIELD_NUMBER: _ClassVar[int] - COMMITMENT_LEVEL_FIELD_NUMBER: _ClassVar[int] - EVENT_SUBSCRIPTION_POLICY_FIELD_NUMBER: _ClassVar[int] - IS_STALE_FIELD_NUMBER: _ClassVar[int] - id: str - consumer_group_label: str - consumer_group_type: ConsumerGroupType - member_count: int - commitment_level: _geyser_pb2.CommitmentLevel - event_subscription_policy: EventSubscriptionPolicy - is_stale: bool - def __init__(self, id: _Optional[str] = ..., consumer_group_label: _Optional[str] = ..., consumer_group_type: _Optional[_Union[ConsumerGroupType, str]] = ..., member_count: _Optional[int] = ..., commitment_level: _Optional[_Union[_geyser_pb2.CommitmentLevel, str]] = ..., event_subscription_policy: _Optional[_Union[EventSubscriptionPolicy, str]] = ..., is_stale: bool = ...) -> None: ... - -class GetSlotLagInfoRequest(_message.Message): - __slots__ = ("consumer_group_label",) - CONSUMER_GROUP_LABEL_FIELD_NUMBER: _ClassVar[int] - consumer_group_label: str - def __init__(self, consumer_group_label: _Optional[str] = ...) -> None: ... - -class GetSlotLagInfoResponse(_message.Message): - __slots__ = ("max_slot_seen", "global_max_slot") - MAX_SLOT_SEEN_FIELD_NUMBER: _ClassVar[int] - GLOBAL_MAX_SLOT_FIELD_NUMBER: _ClassVar[int] - max_slot_seen: int - global_max_slot: int - def __init__(self, max_slot_seen: _Optional[int] = ..., global_max_slot: _Optional[int] = ...) -> None: ... - -class SubscribeRequest(_message.Message): - __slots__ = ("consumer_group_label", "consumer_id", "accounts", "transactions") - class AccountsEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: _geyser_pb2.SubscribeRequestFilterAccounts - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_geyser_pb2.SubscribeRequestFilterAccounts, _Mapping]] = ...) -> None: ... - class TransactionsEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: _geyser_pb2.SubscribeRequestFilterTransactions - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_geyser_pb2.SubscribeRequestFilterTransactions, _Mapping]] = ...) -> None: ... - CONSUMER_GROUP_LABEL_FIELD_NUMBER: _ClassVar[int] - CONSUMER_ID_FIELD_NUMBER: _ClassVar[int] - ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] - consumer_group_label: str - consumer_id: int - accounts: _containers.MessageMap[str, _geyser_pb2.SubscribeRequestFilterAccounts] - transactions: _containers.MessageMap[str, _geyser_pb2.SubscribeRequestFilterTransactions] - def __init__(self, consumer_group_label: _Optional[str] = ..., consumer_id: _Optional[int] = ..., accounts: _Optional[_Mapping[str, _geyser_pb2.SubscribeRequestFilterAccounts]] = ..., transactions: _Optional[_Mapping[str, _geyser_pb2.SubscribeRequestFilterTransactions]] = ...) -> None: ... - -class CreateStaticConsumerGroupResponse(_message.Message): - __slots__ = ("group_id",) - GROUP_ID_FIELD_NUMBER: _ClassVar[int] - group_id: str - def __init__(self, group_id: _Optional[str] = ...) -> None: ... - -class CreateStaticConsumerGroupRequest(_message.Message): - __slots__ = ("consumer_group_label", "member_count", "initial_offset_policy", "commitment_level", "event_subscription_policy", "at_slot") - CONSUMER_GROUP_LABEL_FIELD_NUMBER: _ClassVar[int] - MEMBER_COUNT_FIELD_NUMBER: _ClassVar[int] - INITIAL_OFFSET_POLICY_FIELD_NUMBER: _ClassVar[int] - COMMITMENT_LEVEL_FIELD_NUMBER: _ClassVar[int] - EVENT_SUBSCRIPTION_POLICY_FIELD_NUMBER: _ClassVar[int] - AT_SLOT_FIELD_NUMBER: _ClassVar[int] - consumer_group_label: str - member_count: int - initial_offset_policy: InitialOffsetPolicy - commitment_level: _geyser_pb2.CommitmentLevel - event_subscription_policy: EventSubscriptionPolicy - at_slot: int - def __init__(self, consumer_group_label: _Optional[str] = ..., member_count: _Optional[int] = ..., initial_offset_policy: _Optional[_Union[InitialOffsetPolicy, str]] = ..., commitment_level: _Optional[_Union[_geyser_pb2.CommitmentLevel, str]] = ..., event_subscription_policy: _Optional[_Union[EventSubscriptionPolicy, str]] = ..., at_slot: _Optional[int] = ...) -> None: ... diff --git a/fume/yellowstone_api/fumarole_pb2_grpc.py b/fume/yellowstone_api/fumarole_pb2_grpc.py deleted file mode 100644 index 3228654..0000000 --- a/fume/yellowstone_api/fumarole_pb2_grpc.py +++ /dev/null @@ -1,356 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - -import yellowstone_api.fumarole_pb2 as fumarole__pb2 -import yellowstone_api.geyser_pb2 as geyser__pb2 - -GRPC_GENERATED_VERSION = '1.68.1' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in fumarole_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) - - -class FumaroleStub(object): - """Missing associated documentation comment in .proto file.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListAvailableCommitmentLevels = channel.unary_unary( - '/fumarole.Fumarole/ListAvailableCommitmentLevels', - request_serializer=fumarole__pb2.ListAvailableCommitmentLevelsRequest.SerializeToString, - response_deserializer=fumarole__pb2.ListAvailableCommitmentLevelsResponse.FromString, - _registered_method=True) - self.GetConsumerGroupInfo = channel.unary_unary( - '/fumarole.Fumarole/GetConsumerGroupInfo', - request_serializer=fumarole__pb2.GetConsumerGroupInfoRequest.SerializeToString, - response_deserializer=fumarole__pb2.ConsumerGroupInfo.FromString, - _registered_method=True) - self.ListConsumerGroups = channel.unary_unary( - '/fumarole.Fumarole/ListConsumerGroups', - request_serializer=fumarole__pb2.ListConsumerGroupsRequest.SerializeToString, - response_deserializer=fumarole__pb2.ListConsumerGroupsResponse.FromString, - _registered_method=True) - self.DeleteConsumerGroup = channel.unary_unary( - '/fumarole.Fumarole/DeleteConsumerGroup', - request_serializer=fumarole__pb2.DeleteConsumerGroupRequest.SerializeToString, - response_deserializer=fumarole__pb2.DeleteConsumerGroupResponse.FromString, - _registered_method=True) - self.CreateStaticConsumerGroup = channel.unary_unary( - '/fumarole.Fumarole/CreateStaticConsumerGroup', - request_serializer=fumarole__pb2.CreateStaticConsumerGroupRequest.SerializeToString, - response_deserializer=fumarole__pb2.CreateStaticConsumerGroupResponse.FromString, - _registered_method=True) - self.Subscribe = channel.stream_stream( - '/fumarole.Fumarole/Subscribe', - request_serializer=fumarole__pb2.SubscribeRequest.SerializeToString, - response_deserializer=geyser__pb2.SubscribeUpdate.FromString, - _registered_method=True) - self.GetSlotLagInfo = channel.unary_unary( - '/fumarole.Fumarole/GetSlotLagInfo', - request_serializer=fumarole__pb2.GetSlotLagInfoRequest.SerializeToString, - response_deserializer=fumarole__pb2.GetSlotLagInfoResponse.FromString, - _registered_method=True) - - -class FumaroleServicer(object): - """Missing associated documentation comment in .proto file.""" - - def ListAvailableCommitmentLevels(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetConsumerGroupInfo(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListConsumerGroups(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteConsumerGroup(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def CreateStaticConsumerGroup(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Subscribe(self, request_iterator, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetSlotLagInfo(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_FumaroleServicer_to_server(servicer, server): - rpc_method_handlers = { - 'ListAvailableCommitmentLevels': grpc.unary_unary_rpc_method_handler( - servicer.ListAvailableCommitmentLevels, - request_deserializer=fumarole__pb2.ListAvailableCommitmentLevelsRequest.FromString, - response_serializer=fumarole__pb2.ListAvailableCommitmentLevelsResponse.SerializeToString, - ), - 'GetConsumerGroupInfo': grpc.unary_unary_rpc_method_handler( - servicer.GetConsumerGroupInfo, - request_deserializer=fumarole__pb2.GetConsumerGroupInfoRequest.FromString, - response_serializer=fumarole__pb2.ConsumerGroupInfo.SerializeToString, - ), - 'ListConsumerGroups': grpc.unary_unary_rpc_method_handler( - servicer.ListConsumerGroups, - request_deserializer=fumarole__pb2.ListConsumerGroupsRequest.FromString, - response_serializer=fumarole__pb2.ListConsumerGroupsResponse.SerializeToString, - ), - 'DeleteConsumerGroup': grpc.unary_unary_rpc_method_handler( - servicer.DeleteConsumerGroup, - request_deserializer=fumarole__pb2.DeleteConsumerGroupRequest.FromString, - response_serializer=fumarole__pb2.DeleteConsumerGroupResponse.SerializeToString, - ), - 'CreateStaticConsumerGroup': grpc.unary_unary_rpc_method_handler( - servicer.CreateStaticConsumerGroup, - request_deserializer=fumarole__pb2.CreateStaticConsumerGroupRequest.FromString, - response_serializer=fumarole__pb2.CreateStaticConsumerGroupResponse.SerializeToString, - ), - 'Subscribe': grpc.stream_stream_rpc_method_handler( - servicer.Subscribe, - request_deserializer=fumarole__pb2.SubscribeRequest.FromString, - response_serializer=geyser__pb2.SubscribeUpdate.SerializeToString, - ), - 'GetSlotLagInfo': grpc.unary_unary_rpc_method_handler( - servicer.GetSlotLagInfo, - request_deserializer=fumarole__pb2.GetSlotLagInfoRequest.FromString, - response_serializer=fumarole__pb2.GetSlotLagInfoResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'fumarole.Fumarole', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('fumarole.Fumarole', rpc_method_handlers) - - - # This class is part of an EXPERIMENTAL API. -class Fumarole(object): - """Missing associated documentation comment in .proto file.""" - - @staticmethod - def ListAvailableCommitmentLevels(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/fumarole.Fumarole/ListAvailableCommitmentLevels', - fumarole__pb2.ListAvailableCommitmentLevelsRequest.SerializeToString, - fumarole__pb2.ListAvailableCommitmentLevelsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetConsumerGroupInfo(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/fumarole.Fumarole/GetConsumerGroupInfo', - fumarole__pb2.GetConsumerGroupInfoRequest.SerializeToString, - fumarole__pb2.ConsumerGroupInfo.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def ListConsumerGroups(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/fumarole.Fumarole/ListConsumerGroups', - fumarole__pb2.ListConsumerGroupsRequest.SerializeToString, - fumarole__pb2.ListConsumerGroupsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def DeleteConsumerGroup(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/fumarole.Fumarole/DeleteConsumerGroup', - fumarole__pb2.DeleteConsumerGroupRequest.SerializeToString, - fumarole__pb2.DeleteConsumerGroupResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def CreateStaticConsumerGroup(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/fumarole.Fumarole/CreateStaticConsumerGroup', - fumarole__pb2.CreateStaticConsumerGroupRequest.SerializeToString, - fumarole__pb2.CreateStaticConsumerGroupResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def Subscribe(request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.stream_stream( - request_iterator, - target, - '/fumarole.Fumarole/Subscribe', - fumarole__pb2.SubscribeRequest.SerializeToString, - geyser__pb2.SubscribeUpdate.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetSlotLagInfo(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/fumarole.Fumarole/GetSlotLagInfo', - fumarole__pb2.GetSlotLagInfoRequest.SerializeToString, - fumarole__pb2.GetSlotLagInfoResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) diff --git a/fume/yellowstone_api/geyser_pb2.py b/fume/yellowstone_api/geyser_pb2.py deleted file mode 100644 index efff97f..0000000 --- a/fume/yellowstone_api/geyser_pb2.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: geyser.proto -# Protobuf Python Version: 5.28.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 28, - 1, - '', - 'geyser.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -import yellowstone_api.solana_storage_pb2 as solana__storage__pb2 - -from yellowstone_api.solana_storage_pb2 import * - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cgeyser.proto\x12\x06geyser\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x14solana-storage.proto\"\x9c\n\n\x10SubscribeRequest\x12\x38\n\x08\x61\x63\x63ounts\x18\x01 \x03(\x0b\x32&.geyser.SubscribeRequest.AccountsEntry\x12\x32\n\x05slots\x18\x02 \x03(\x0b\x32#.geyser.SubscribeRequest.SlotsEntry\x12@\n\x0ctransactions\x18\x03 \x03(\x0b\x32*.geyser.SubscribeRequest.TransactionsEntry\x12M\n\x13transactions_status\x18\n \x03(\x0b\x32\x30.geyser.SubscribeRequest.TransactionsStatusEntry\x12\x34\n\x06\x62locks\x18\x04 \x03(\x0b\x32$.geyser.SubscribeRequest.BlocksEntry\x12=\n\x0b\x62locks_meta\x18\x05 \x03(\x0b\x32(.geyser.SubscribeRequest.BlocksMetaEntry\x12\x32\n\x05\x65ntry\x18\x08 \x03(\x0b\x32#.geyser.SubscribeRequest.EntryEntry\x12\x30\n\ncommitment\x18\x06 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x12\x46\n\x13\x61\x63\x63ounts_data_slice\x18\x07 \x03(\x0b\x32).geyser.SubscribeRequestAccountsDataSlice\x12/\n\x04ping\x18\t \x01(\x0b\x32\x1c.geyser.SubscribeRequestPingH\x01\x88\x01\x01\x12\x16\n\tfrom_slot\x18\x0b \x01(\x04H\x02\x88\x01\x01\x1aW\n\rAccountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.geyser.SubscribeRequestFilterAccounts:\x02\x38\x01\x1aQ\n\nSlotsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterSlots:\x02\x38\x01\x1a_\n\x11TransactionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1a\x65\n\x17TransactionsStatusEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.geyser.SubscribeRequestFilterTransactions:\x02\x38\x01\x1aS\n\x0b\x42locksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x33\n\x05value\x18\x02 \x01(\x0b\x32$.geyser.SubscribeRequestFilterBlocks:\x02\x38\x01\x1a[\n\x0f\x42locksMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.geyser.SubscribeRequestFilterBlocksMeta:\x02\x38\x01\x1aQ\n\nEntryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.geyser.SubscribeRequestFilterEntry:\x02\x38\x01\x42\r\n\x0b_commitmentB\x07\n\x05_pingB\x0c\n\n_from_slot\"\xbf\x01\n\x1eSubscribeRequestFilterAccounts\x12\x0f\n\x07\x61\x63\x63ount\x18\x02 \x03(\t\x12\r\n\x05owner\x18\x03 \x03(\t\x12=\n\x07\x66ilters\x18\x04 \x03(\x0b\x32,.geyser.SubscribeRequestFilterAccountsFilter\x12#\n\x16nonempty_txn_signature\x18\x05 \x01(\x08H\x00\x88\x01\x01\x42\x19\n\x17_nonempty_txn_signature\"\xf3\x01\n$SubscribeRequestFilterAccountsFilter\x12\x44\n\x06memcmp\x18\x01 \x01(\x0b\x32\x32.geyser.SubscribeRequestFilterAccountsFilterMemcmpH\x00\x12\x12\n\x08\x64\x61tasize\x18\x02 \x01(\x04H\x00\x12\x1d\n\x13token_account_state\x18\x03 \x01(\x08H\x00\x12H\n\x08lamports\x18\x04 \x01(\x0b\x32\x34.geyser.SubscribeRequestFilterAccountsFilterLamportsH\x00\x42\x08\n\x06\x66ilter\"y\n*SubscribeRequestFilterAccountsFilterMemcmp\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0f\n\x05\x62ytes\x18\x02 \x01(\x0cH\x00\x12\x10\n\x06\x62\x61se58\x18\x03 \x01(\tH\x00\x12\x10\n\x06\x62\x61se64\x18\x04 \x01(\tH\x00\x42\x06\n\x04\x64\x61ta\"m\n,SubscribeRequestFilterAccountsFilterLamports\x12\x0c\n\x02\x65q\x18\x01 \x01(\x04H\x00\x12\x0c\n\x02ne\x18\x02 \x01(\x04H\x00\x12\x0c\n\x02lt\x18\x03 \x01(\x04H\x00\x12\x0c\n\x02gt\x18\x04 \x01(\x04H\x00\x42\x05\n\x03\x63mp\"\x8f\x01\n\x1bSubscribeRequestFilterSlots\x12!\n\x14\x66ilter_by_commitment\x18\x01 \x01(\x08H\x00\x88\x01\x01\x12\x1e\n\x11interslot_updates\x18\x02 \x01(\x08H\x01\x88\x01\x01\x42\x17\n\x15_filter_by_commitmentB\x14\n\x12_interslot_updates\"\xd2\x01\n\"SubscribeRequestFilterTransactions\x12\x11\n\x04vote\x18\x01 \x01(\x08H\x00\x88\x01\x01\x12\x13\n\x06\x66\x61iled\x18\x02 \x01(\x08H\x01\x88\x01\x01\x12\x16\n\tsignature\x18\x05 \x01(\tH\x02\x88\x01\x01\x12\x17\n\x0f\x61\x63\x63ount_include\x18\x03 \x03(\t\x12\x17\n\x0f\x61\x63\x63ount_exclude\x18\x04 \x03(\t\x12\x18\n\x10\x61\x63\x63ount_required\x18\x06 \x03(\tB\x07\n\x05_voteB\t\n\x07_failedB\x0c\n\n_signature\"\xd9\x01\n\x1cSubscribeRequestFilterBlocks\x12\x17\n\x0f\x61\x63\x63ount_include\x18\x01 \x03(\t\x12!\n\x14include_transactions\x18\x02 \x01(\x08H\x00\x88\x01\x01\x12\x1d\n\x10include_accounts\x18\x03 \x01(\x08H\x01\x88\x01\x01\x12\x1c\n\x0finclude_entries\x18\x04 \x01(\x08H\x02\x88\x01\x01\x42\x17\n\x15_include_transactionsB\x13\n\x11_include_accountsB\x12\n\x10_include_entries\"\"\n SubscribeRequestFilterBlocksMeta\"\x1d\n\x1bSubscribeRequestFilterEntry\"C\n!SubscribeRequestAccountsDataSlice\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x0e\n\x06length\x18\x02 \x01(\x04\"\"\n\x14SubscribeRequestPing\x12\n\n\x02id\x18\x01 \x01(\x05\"\xb5\x04\n\x0fSubscribeUpdate\x12\x0f\n\x07\x66ilters\x18\x01 \x03(\t\x12\x31\n\x07\x61\x63\x63ount\x18\x02 \x01(\x0b\x32\x1e.geyser.SubscribeUpdateAccountH\x00\x12+\n\x04slot\x18\x03 \x01(\x0b\x32\x1b.geyser.SubscribeUpdateSlotH\x00\x12\x39\n\x0btransaction\x18\x04 \x01(\x0b\x32\".geyser.SubscribeUpdateTransactionH\x00\x12\x46\n\x12transaction_status\x18\n \x01(\x0b\x32(.geyser.SubscribeUpdateTransactionStatusH\x00\x12-\n\x05\x62lock\x18\x05 \x01(\x0b\x32\x1c.geyser.SubscribeUpdateBlockH\x00\x12+\n\x04ping\x18\x06 \x01(\x0b\x32\x1b.geyser.SubscribeUpdatePingH\x00\x12+\n\x04pong\x18\t \x01(\x0b\x32\x1b.geyser.SubscribeUpdatePongH\x00\x12\x36\n\nblock_meta\x18\x07 \x01(\x0b\x32 .geyser.SubscribeUpdateBlockMetaH\x00\x12-\n\x05\x65ntry\x18\x08 \x01(\x0b\x32\x1c.geyser.SubscribeUpdateEntryH\x00\x12.\n\ncreated_at\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x0e\n\x0cupdate_oneof\"o\n\x16SubscribeUpdateAccount\x12\x33\n\x07\x61\x63\x63ount\x18\x01 \x01(\x0b\x32\".geyser.SubscribeUpdateAccountInfo\x12\x0c\n\x04slot\x18\x02 \x01(\x04\x12\x12\n\nis_startup\x18\x03 \x01(\x08\"\xc8\x01\n\x1aSubscribeUpdateAccountInfo\x12\x0e\n\x06pubkey\x18\x01 \x01(\x0c\x12\x10\n\x08lamports\x18\x02 \x01(\x04\x12\r\n\x05owner\x18\x03 \x01(\x0c\x12\x12\n\nexecutable\x18\x04 \x01(\x08\x12\x12\n\nrent_epoch\x18\x05 \x01(\x04\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\x12\x15\n\rwrite_version\x18\x07 \x01(\x04\x12\x1a\n\rtxn_signature\x18\x08 \x01(\x0cH\x00\x88\x01\x01\x42\x10\n\x0e_txn_signature\"\x8f\x01\n\x13SubscribeUpdateSlot\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x13\n\x06parent\x18\x02 \x01(\x04H\x00\x88\x01\x01\x12\"\n\x06status\x18\x03 \x01(\x0e\x32\x12.geyser.SlotStatus\x12\x17\n\ndead_error\x18\x04 \x01(\tH\x01\x88\x01\x01\x42\t\n\x07_parentB\r\n\x0b_dead_error\"g\n\x1aSubscribeUpdateTransaction\x12;\n\x0btransaction\x18\x01 \x01(\x0b\x32&.geyser.SubscribeUpdateTransactionInfo\x12\x0c\n\x04slot\x18\x02 \x01(\x04\"\xd8\x01\n\x1eSubscribeUpdateTransactionInfo\x12\x11\n\tsignature\x18\x01 \x01(\x0c\x12\x0f\n\x07is_vote\x18\x02 \x01(\x08\x12?\n\x0btransaction\x18\x03 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.Transaction\x12\x42\n\x04meta\x18\x04 \x01(\x0b\x32\x34.solana.storage.ConfirmedBlock.TransactionStatusMeta\x12\r\n\x05index\x18\x05 \x01(\x04\"\xa1\x01\n SubscribeUpdateTransactionStatus\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tsignature\x18\x02 \x01(\x0c\x12\x0f\n\x07is_vote\x18\x03 \x01(\x08\x12\r\n\x05index\x18\x04 \x01(\x04\x12<\n\x03\x65rr\x18\x05 \x01(\x0b\x32/.solana.storage.ConfirmedBlock.TransactionError\"\xa0\x04\n\x14SubscribeUpdateBlock\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x37\n\x07rewards\x18\x03 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Rewards\x12@\n\nblock_time\x18\x04 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x05 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x13\n\x0bparent_slot\x18\x07 \x01(\x04\x12\x18\n\x10parent_blockhash\x18\x08 \x01(\t\x12\"\n\x1a\x65xecuted_transaction_count\x18\t \x01(\x04\x12<\n\x0ctransactions\x18\x06 \x03(\x0b\x32&.geyser.SubscribeUpdateTransactionInfo\x12\x1d\n\x15updated_account_count\x18\n \x01(\x04\x12\x34\n\x08\x61\x63\x63ounts\x18\x0b \x03(\x0b\x32\".geyser.SubscribeUpdateAccountInfo\x12\x15\n\rentries_count\x18\x0c \x01(\x04\x12-\n\x07\x65ntries\x18\r \x03(\x0b\x32\x1c.geyser.SubscribeUpdateEntry\"\xe2\x02\n\x18SubscribeUpdateBlockMeta\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x37\n\x07rewards\x18\x03 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Rewards\x12@\n\nblock_time\x18\x04 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x05 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x13\n\x0bparent_slot\x18\x06 \x01(\x04\x12\x18\n\x10parent_blockhash\x18\x07 \x01(\t\x12\"\n\x1a\x65xecuted_transaction_count\x18\x08 \x01(\x04\x12\x15\n\rentries_count\x18\t \x01(\x04\"\x9d\x01\n\x14SubscribeUpdateEntry\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\r\n\x05index\x18\x02 \x01(\x04\x12\x12\n\nnum_hashes\x18\x03 \x01(\x04\x12\x0c\n\x04hash\x18\x04 \x01(\x0c\x12\"\n\x1a\x65xecuted_transaction_count\x18\x05 \x01(\x04\x12\"\n\x1astarting_transaction_index\x18\x06 \x01(\x04\"\x15\n\x13SubscribeUpdatePing\"!\n\x13SubscribeUpdatePong\x12\n\n\x02id\x18\x01 \x01(\x05\"\x1c\n\x0bPingRequest\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\x1d\n\x0cPongResponse\x12\r\n\x05\x63ount\x18\x01 \x01(\x05\"\\\n\x19GetLatestBlockhashRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"^\n\x1aGetLatestBlockhashResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x1f\n\x17last_valid_block_height\x18\x03 \x01(\x04\"X\n\x15GetBlockHeightRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\".\n\x16GetBlockHeightResponse\x12\x14\n\x0c\x62lock_height\x18\x01 \x01(\x04\"Q\n\x0eGetSlotRequest\x12\x30\n\ncommitment\x18\x01 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"\x1f\n\x0fGetSlotResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\"\x13\n\x11GetVersionRequest\"%\n\x12GetVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"m\n\x17IsBlockhashValidRequest\x12\x11\n\tblockhash\x18\x01 \x01(\t\x12\x30\n\ncommitment\x18\x02 \x01(\x0e\x32\x17.geyser.CommitmentLevelH\x00\x88\x01\x01\x42\r\n\x0b_commitment\"7\n\x18IsBlockhashValidResponse\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\r\n\x05valid\x18\x02 \x01(\x08*>\n\x0f\x43ommitmentLevel\x12\r\n\tPROCESSED\x10\x00\x12\r\n\tCONFIRMED\x10\x01\x12\r\n\tFINALIZED\x10\x02*\xa1\x01\n\nSlotStatus\x12\x12\n\x0eSLOT_PROCESSED\x10\x00\x12\x12\n\x0eSLOT_CONFIRMED\x10\x01\x12\x12\n\x0eSLOT_FINALIZED\x10\x02\x12\x1d\n\x19SLOT_FIRST_SHRED_RECEIVED\x10\x03\x12\x12\n\x0eSLOT_COMPLETED\x10\x04\x12\x15\n\x11SLOT_CREATED_BANK\x10\x05\x12\r\n\tSLOT_DEAD\x10\x06\x32\x93\x04\n\x06Geyser\x12\x44\n\tSubscribe\x12\x18.geyser.SubscribeRequest\x1a\x17.geyser.SubscribeUpdate\"\x00(\x01\x30\x01\x12\x33\n\x04Ping\x12\x13.geyser.PingRequest\x1a\x14.geyser.PongResponse\"\x00\x12]\n\x12GetLatestBlockhash\x12!.geyser.GetLatestBlockhashRequest\x1a\".geyser.GetLatestBlockhashResponse\"\x00\x12Q\n\x0eGetBlockHeight\x12\x1d.geyser.GetBlockHeightRequest\x1a\x1e.geyser.GetBlockHeightResponse\"\x00\x12<\n\x07GetSlot\x12\x16.geyser.GetSlotRequest\x1a\x17.geyser.GetSlotResponse\"\x00\x12W\n\x10IsBlockhashValid\x12\x1f.geyser.IsBlockhashValidRequest\x1a .geyser.IsBlockhashValidResponse\"\x00\x12\x45\n\nGetVersion\x12\x19.geyser.GetVersionRequest\x1a\x1a.geyser.GetVersionResponse\"\x00\x42;Z9github.com/rpcpool/yellowstone-grpc/examples/golang/protoP\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'geyser_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'Z9github.com/rpcpool/yellowstone-grpc/examples/golang/proto' - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._serialized_options = b'8\001' - _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._loaded_options = None - _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._serialized_options = b'8\001' - _globals['_COMMITMENTLEVEL']._serialized_start=6188 - _globals['_COMMITMENTLEVEL']._serialized_end=6250 - _globals['_SLOTSTATUS']._serialized_start=6253 - _globals['_SLOTSTATUS']._serialized_end=6414 - _globals['_SUBSCRIBEREQUEST']._serialized_start=80 - _globals['_SUBSCRIBEREQUEST']._serialized_end=1388 - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_start=719 - _globals['_SUBSCRIBEREQUEST_ACCOUNTSENTRY']._serialized_end=806 - _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._serialized_start=808 - _globals['_SUBSCRIBEREQUEST_SLOTSENTRY']._serialized_end=889 - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_start=891 - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSENTRY']._serialized_end=986 - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._serialized_start=988 - _globals['_SUBSCRIBEREQUEST_TRANSACTIONSSTATUSENTRY']._serialized_end=1089 - _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._serialized_start=1091 - _globals['_SUBSCRIBEREQUEST_BLOCKSENTRY']._serialized_end=1174 - _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._serialized_start=1176 - _globals['_SUBSCRIBEREQUEST_BLOCKSMETAENTRY']._serialized_end=1267 - _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._serialized_start=1269 - _globals['_SUBSCRIBEREQUEST_ENTRYENTRY']._serialized_end=1350 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTS']._serialized_start=1391 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTS']._serialized_end=1582 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTER']._serialized_start=1585 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTER']._serialized_end=1828 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERMEMCMP']._serialized_start=1830 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERMEMCMP']._serialized_end=1951 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERLAMPORTS']._serialized_start=1953 - _globals['_SUBSCRIBEREQUESTFILTERACCOUNTSFILTERLAMPORTS']._serialized_end=2062 - _globals['_SUBSCRIBEREQUESTFILTERSLOTS']._serialized_start=2065 - _globals['_SUBSCRIBEREQUESTFILTERSLOTS']._serialized_end=2208 - _globals['_SUBSCRIBEREQUESTFILTERTRANSACTIONS']._serialized_start=2211 - _globals['_SUBSCRIBEREQUESTFILTERTRANSACTIONS']._serialized_end=2421 - _globals['_SUBSCRIBEREQUESTFILTERBLOCKS']._serialized_start=2424 - _globals['_SUBSCRIBEREQUESTFILTERBLOCKS']._serialized_end=2641 - _globals['_SUBSCRIBEREQUESTFILTERBLOCKSMETA']._serialized_start=2643 - _globals['_SUBSCRIBEREQUESTFILTERBLOCKSMETA']._serialized_end=2677 - _globals['_SUBSCRIBEREQUESTFILTERENTRY']._serialized_start=2679 - _globals['_SUBSCRIBEREQUESTFILTERENTRY']._serialized_end=2708 - _globals['_SUBSCRIBEREQUESTACCOUNTSDATASLICE']._serialized_start=2710 - _globals['_SUBSCRIBEREQUESTACCOUNTSDATASLICE']._serialized_end=2777 - _globals['_SUBSCRIBEREQUESTPING']._serialized_start=2779 - _globals['_SUBSCRIBEREQUESTPING']._serialized_end=2813 - _globals['_SUBSCRIBEUPDATE']._serialized_start=2816 - _globals['_SUBSCRIBEUPDATE']._serialized_end=3381 - _globals['_SUBSCRIBEUPDATEACCOUNT']._serialized_start=3383 - _globals['_SUBSCRIBEUPDATEACCOUNT']._serialized_end=3494 - _globals['_SUBSCRIBEUPDATEACCOUNTINFO']._serialized_start=3497 - _globals['_SUBSCRIBEUPDATEACCOUNTINFO']._serialized_end=3697 - _globals['_SUBSCRIBEUPDATESLOT']._serialized_start=3700 - _globals['_SUBSCRIBEUPDATESLOT']._serialized_end=3843 - _globals['_SUBSCRIBEUPDATETRANSACTION']._serialized_start=3845 - _globals['_SUBSCRIBEUPDATETRANSACTION']._serialized_end=3948 - _globals['_SUBSCRIBEUPDATETRANSACTIONINFO']._serialized_start=3951 - _globals['_SUBSCRIBEUPDATETRANSACTIONINFO']._serialized_end=4167 - _globals['_SUBSCRIBEUPDATETRANSACTIONSTATUS']._serialized_start=4170 - _globals['_SUBSCRIBEUPDATETRANSACTIONSTATUS']._serialized_end=4331 - _globals['_SUBSCRIBEUPDATEBLOCK']._serialized_start=4334 - _globals['_SUBSCRIBEUPDATEBLOCK']._serialized_end=4878 - _globals['_SUBSCRIBEUPDATEBLOCKMETA']._serialized_start=4881 - _globals['_SUBSCRIBEUPDATEBLOCKMETA']._serialized_end=5235 - _globals['_SUBSCRIBEUPDATEENTRY']._serialized_start=5238 - _globals['_SUBSCRIBEUPDATEENTRY']._serialized_end=5395 - _globals['_SUBSCRIBEUPDATEPING']._serialized_start=5397 - _globals['_SUBSCRIBEUPDATEPING']._serialized_end=5418 - _globals['_SUBSCRIBEUPDATEPONG']._serialized_start=5420 - _globals['_SUBSCRIBEUPDATEPONG']._serialized_end=5453 - _globals['_PINGREQUEST']._serialized_start=5455 - _globals['_PINGREQUEST']._serialized_end=5483 - _globals['_PONGRESPONSE']._serialized_start=5485 - _globals['_PONGRESPONSE']._serialized_end=5514 - _globals['_GETLATESTBLOCKHASHREQUEST']._serialized_start=5516 - _globals['_GETLATESTBLOCKHASHREQUEST']._serialized_end=5608 - _globals['_GETLATESTBLOCKHASHRESPONSE']._serialized_start=5610 - _globals['_GETLATESTBLOCKHASHRESPONSE']._serialized_end=5704 - _globals['_GETBLOCKHEIGHTREQUEST']._serialized_start=5706 - _globals['_GETBLOCKHEIGHTREQUEST']._serialized_end=5794 - _globals['_GETBLOCKHEIGHTRESPONSE']._serialized_start=5796 - _globals['_GETBLOCKHEIGHTRESPONSE']._serialized_end=5842 - _globals['_GETSLOTREQUEST']._serialized_start=5844 - _globals['_GETSLOTREQUEST']._serialized_end=5925 - _globals['_GETSLOTRESPONSE']._serialized_start=5927 - _globals['_GETSLOTRESPONSE']._serialized_end=5958 - _globals['_GETVERSIONREQUEST']._serialized_start=5960 - _globals['_GETVERSIONREQUEST']._serialized_end=5979 - _globals['_GETVERSIONRESPONSE']._serialized_start=5981 - _globals['_GETVERSIONRESPONSE']._serialized_end=6018 - _globals['_ISBLOCKHASHVALIDREQUEST']._serialized_start=6020 - _globals['_ISBLOCKHASHVALIDREQUEST']._serialized_end=6129 - _globals['_ISBLOCKHASHVALIDRESPONSE']._serialized_start=6131 - _globals['_ISBLOCKHASHVALIDRESPONSE']._serialized_end=6186 - _globals['_GEYSER']._serialized_start=6417 - _globals['_GEYSER']._serialized_end=6948 -# @@protoc_insertion_point(module_scope) diff --git a/fume/yellowstone_api/geyser_pb2.pyi b/fume/yellowstone_api/geyser_pb2.pyi deleted file mode 100644 index 09417be..0000000 --- a/fume/yellowstone_api/geyser_pb2.pyi +++ /dev/null @@ -1,501 +0,0 @@ -from google.protobuf import timestamp_pb2 as _timestamp_pb2 -import yellowstone_api.solana_storage_pb2 as _solana_storage_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union -from yellowstone_api.solana_storage_pb2 import ConfirmedBlock as ConfirmedBlock -from yellowstone_api.solana_storage_pb2 import ConfirmedTransaction as ConfirmedTransaction -from yellowstone_api.solana_storage_pb2 import Transaction as Transaction -from yellowstone_api.solana_storage_pb2 import Message as Message -from yellowstone_api.solana_storage_pb2 import MessageHeader as MessageHeader -from yellowstone_api.solana_storage_pb2 import MessageAddressTableLookup as MessageAddressTableLookup -from yellowstone_api.solana_storage_pb2 import TransactionStatusMeta as TransactionStatusMeta -from yellowstone_api.solana_storage_pb2 import TransactionError as TransactionError -from yellowstone_api.solana_storage_pb2 import InnerInstructions as InnerInstructions -from yellowstone_api.solana_storage_pb2 import InnerInstruction as InnerInstruction -from yellowstone_api.solana_storage_pb2 import CompiledInstruction as CompiledInstruction -from yellowstone_api.solana_storage_pb2 import TokenBalance as TokenBalance -from yellowstone_api.solana_storage_pb2 import UiTokenAmount as UiTokenAmount -from yellowstone_api.solana_storage_pb2 import ReturnData as ReturnData -from yellowstone_api.solana_storage_pb2 import Reward as Reward -from yellowstone_api.solana_storage_pb2 import Rewards as Rewards -from yellowstone_api.solana_storage_pb2 import UnixTimestamp as UnixTimestamp -from yellowstone_api.solana_storage_pb2 import BlockHeight as BlockHeight -from yellowstone_api.solana_storage_pb2 import NumPartitions as NumPartitions -from yellowstone_api.solana_storage_pb2 import RewardType as RewardType - -DESCRIPTOR: _descriptor.FileDescriptor -Unspecified: _solana_storage_pb2.RewardType -Fee: _solana_storage_pb2.RewardType -Rent: _solana_storage_pb2.RewardType -Staking: _solana_storage_pb2.RewardType -Voting: _solana_storage_pb2.RewardType - -class CommitmentLevel(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - PROCESSED: _ClassVar[CommitmentLevel] - CONFIRMED: _ClassVar[CommitmentLevel] - FINALIZED: _ClassVar[CommitmentLevel] - -class SlotStatus(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - SLOT_PROCESSED: _ClassVar[SlotStatus] - SLOT_CONFIRMED: _ClassVar[SlotStatus] - SLOT_FINALIZED: _ClassVar[SlotStatus] - SLOT_FIRST_SHRED_RECEIVED: _ClassVar[SlotStatus] - SLOT_COMPLETED: _ClassVar[SlotStatus] - SLOT_CREATED_BANK: _ClassVar[SlotStatus] - SLOT_DEAD: _ClassVar[SlotStatus] -PROCESSED: CommitmentLevel -CONFIRMED: CommitmentLevel -FINALIZED: CommitmentLevel -SLOT_PROCESSED: SlotStatus -SLOT_CONFIRMED: SlotStatus -SLOT_FINALIZED: SlotStatus -SLOT_FIRST_SHRED_RECEIVED: SlotStatus -SLOT_COMPLETED: SlotStatus -SLOT_CREATED_BANK: SlotStatus -SLOT_DEAD: SlotStatus - -class SubscribeRequest(_message.Message): - __slots__ = ("accounts", "slots", "transactions", "transactions_status", "blocks", "blocks_meta", "entry", "commitment", "accounts_data_slice", "ping", "from_slot") - class AccountsEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: SubscribeRequestFilterAccounts - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterAccounts, _Mapping]] = ...) -> None: ... - class SlotsEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: SubscribeRequestFilterSlots - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterSlots, _Mapping]] = ...) -> None: ... - class TransactionsEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: SubscribeRequestFilterTransactions - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterTransactions, _Mapping]] = ...) -> None: ... - class TransactionsStatusEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: SubscribeRequestFilterTransactions - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterTransactions, _Mapping]] = ...) -> None: ... - class BlocksEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: SubscribeRequestFilterBlocks - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterBlocks, _Mapping]] = ...) -> None: ... - class BlocksMetaEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: SubscribeRequestFilterBlocksMeta - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterBlocksMeta, _Mapping]] = ...) -> None: ... - class EntryEntry(_message.Message): - __slots__ = ("key", "value") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: SubscribeRequestFilterEntry - def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SubscribeRequestFilterEntry, _Mapping]] = ...) -> None: ... - ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - SLOTS_FIELD_NUMBER: _ClassVar[int] - TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] - TRANSACTIONS_STATUS_FIELD_NUMBER: _ClassVar[int] - BLOCKS_FIELD_NUMBER: _ClassVar[int] - BLOCKS_META_FIELD_NUMBER: _ClassVar[int] - ENTRY_FIELD_NUMBER: _ClassVar[int] - COMMITMENT_FIELD_NUMBER: _ClassVar[int] - ACCOUNTS_DATA_SLICE_FIELD_NUMBER: _ClassVar[int] - PING_FIELD_NUMBER: _ClassVar[int] - FROM_SLOT_FIELD_NUMBER: _ClassVar[int] - accounts: _containers.MessageMap[str, SubscribeRequestFilterAccounts] - slots: _containers.MessageMap[str, SubscribeRequestFilterSlots] - transactions: _containers.MessageMap[str, SubscribeRequestFilterTransactions] - transactions_status: _containers.MessageMap[str, SubscribeRequestFilterTransactions] - blocks: _containers.MessageMap[str, SubscribeRequestFilterBlocks] - blocks_meta: _containers.MessageMap[str, SubscribeRequestFilterBlocksMeta] - entry: _containers.MessageMap[str, SubscribeRequestFilterEntry] - commitment: CommitmentLevel - accounts_data_slice: _containers.RepeatedCompositeFieldContainer[SubscribeRequestAccountsDataSlice] - ping: SubscribeRequestPing - from_slot: int - def __init__(self, accounts: _Optional[_Mapping[str, SubscribeRequestFilterAccounts]] = ..., slots: _Optional[_Mapping[str, SubscribeRequestFilterSlots]] = ..., transactions: _Optional[_Mapping[str, SubscribeRequestFilterTransactions]] = ..., transactions_status: _Optional[_Mapping[str, SubscribeRequestFilterTransactions]] = ..., blocks: _Optional[_Mapping[str, SubscribeRequestFilterBlocks]] = ..., blocks_meta: _Optional[_Mapping[str, SubscribeRequestFilterBlocksMeta]] = ..., entry: _Optional[_Mapping[str, SubscribeRequestFilterEntry]] = ..., commitment: _Optional[_Union[CommitmentLevel, str]] = ..., accounts_data_slice: _Optional[_Iterable[_Union[SubscribeRequestAccountsDataSlice, _Mapping]]] = ..., ping: _Optional[_Union[SubscribeRequestPing, _Mapping]] = ..., from_slot: _Optional[int] = ...) -> None: ... - -class SubscribeRequestFilterAccounts(_message.Message): - __slots__ = ("account", "owner", "filters", "nonempty_txn_signature") - ACCOUNT_FIELD_NUMBER: _ClassVar[int] - OWNER_FIELD_NUMBER: _ClassVar[int] - FILTERS_FIELD_NUMBER: _ClassVar[int] - NONEMPTY_TXN_SIGNATURE_FIELD_NUMBER: _ClassVar[int] - account: _containers.RepeatedScalarFieldContainer[str] - owner: _containers.RepeatedScalarFieldContainer[str] - filters: _containers.RepeatedCompositeFieldContainer[SubscribeRequestFilterAccountsFilter] - nonempty_txn_signature: bool - def __init__(self, account: _Optional[_Iterable[str]] = ..., owner: _Optional[_Iterable[str]] = ..., filters: _Optional[_Iterable[_Union[SubscribeRequestFilterAccountsFilter, _Mapping]]] = ..., nonempty_txn_signature: bool = ...) -> None: ... - -class SubscribeRequestFilterAccountsFilter(_message.Message): - __slots__ = ("memcmp", "datasize", "token_account_state", "lamports") - MEMCMP_FIELD_NUMBER: _ClassVar[int] - DATASIZE_FIELD_NUMBER: _ClassVar[int] - TOKEN_ACCOUNT_STATE_FIELD_NUMBER: _ClassVar[int] - LAMPORTS_FIELD_NUMBER: _ClassVar[int] - memcmp: SubscribeRequestFilterAccountsFilterMemcmp - datasize: int - token_account_state: bool - lamports: SubscribeRequestFilterAccountsFilterLamports - def __init__(self, memcmp: _Optional[_Union[SubscribeRequestFilterAccountsFilterMemcmp, _Mapping]] = ..., datasize: _Optional[int] = ..., token_account_state: bool = ..., lamports: _Optional[_Union[SubscribeRequestFilterAccountsFilterLamports, _Mapping]] = ...) -> None: ... - -class SubscribeRequestFilterAccountsFilterMemcmp(_message.Message): - __slots__ = ("offset", "bytes", "base58", "base64") - OFFSET_FIELD_NUMBER: _ClassVar[int] - BYTES_FIELD_NUMBER: _ClassVar[int] - BASE58_FIELD_NUMBER: _ClassVar[int] - BASE64_FIELD_NUMBER: _ClassVar[int] - offset: int - bytes: bytes - base58: str - base64: str - def __init__(self, offset: _Optional[int] = ..., bytes: _Optional[bytes] = ..., base58: _Optional[str] = ..., base64: _Optional[str] = ...) -> None: ... - -class SubscribeRequestFilterAccountsFilterLamports(_message.Message): - __slots__ = ("eq", "ne", "lt", "gt") - EQ_FIELD_NUMBER: _ClassVar[int] - NE_FIELD_NUMBER: _ClassVar[int] - LT_FIELD_NUMBER: _ClassVar[int] - GT_FIELD_NUMBER: _ClassVar[int] - eq: int - ne: int - lt: int - gt: int - def __init__(self, eq: _Optional[int] = ..., ne: _Optional[int] = ..., lt: _Optional[int] = ..., gt: _Optional[int] = ...) -> None: ... - -class SubscribeRequestFilterSlots(_message.Message): - __slots__ = ("filter_by_commitment", "interslot_updates") - FILTER_BY_COMMITMENT_FIELD_NUMBER: _ClassVar[int] - INTERSLOT_UPDATES_FIELD_NUMBER: _ClassVar[int] - filter_by_commitment: bool - interslot_updates: bool - def __init__(self, filter_by_commitment: bool = ..., interslot_updates: bool = ...) -> None: ... - -class SubscribeRequestFilterTransactions(_message.Message): - __slots__ = ("vote", "failed", "signature", "account_include", "account_exclude", "account_required") - VOTE_FIELD_NUMBER: _ClassVar[int] - FAILED_FIELD_NUMBER: _ClassVar[int] - SIGNATURE_FIELD_NUMBER: _ClassVar[int] - ACCOUNT_INCLUDE_FIELD_NUMBER: _ClassVar[int] - ACCOUNT_EXCLUDE_FIELD_NUMBER: _ClassVar[int] - ACCOUNT_REQUIRED_FIELD_NUMBER: _ClassVar[int] - vote: bool - failed: bool - signature: str - account_include: _containers.RepeatedScalarFieldContainer[str] - account_exclude: _containers.RepeatedScalarFieldContainer[str] - account_required: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, vote: bool = ..., failed: bool = ..., signature: _Optional[str] = ..., account_include: _Optional[_Iterable[str]] = ..., account_exclude: _Optional[_Iterable[str]] = ..., account_required: _Optional[_Iterable[str]] = ...) -> None: ... - -class SubscribeRequestFilterBlocks(_message.Message): - __slots__ = ("account_include", "include_transactions", "include_accounts", "include_entries") - ACCOUNT_INCLUDE_FIELD_NUMBER: _ClassVar[int] - INCLUDE_TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] - INCLUDE_ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - INCLUDE_ENTRIES_FIELD_NUMBER: _ClassVar[int] - account_include: _containers.RepeatedScalarFieldContainer[str] - include_transactions: bool - include_accounts: bool - include_entries: bool - def __init__(self, account_include: _Optional[_Iterable[str]] = ..., include_transactions: bool = ..., include_accounts: bool = ..., include_entries: bool = ...) -> None: ... - -class SubscribeRequestFilterBlocksMeta(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class SubscribeRequestFilterEntry(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class SubscribeRequestAccountsDataSlice(_message.Message): - __slots__ = ("offset", "length") - OFFSET_FIELD_NUMBER: _ClassVar[int] - LENGTH_FIELD_NUMBER: _ClassVar[int] - offset: int - length: int - def __init__(self, offset: _Optional[int] = ..., length: _Optional[int] = ...) -> None: ... - -class SubscribeRequestPing(_message.Message): - __slots__ = ("id",) - ID_FIELD_NUMBER: _ClassVar[int] - id: int - def __init__(self, id: _Optional[int] = ...) -> None: ... - -class SubscribeUpdate(_message.Message): - __slots__ = ("filters", "account", "slot", "transaction", "transaction_status", "block", "ping", "pong", "block_meta", "entry", "created_at") - FILTERS_FIELD_NUMBER: _ClassVar[int] - ACCOUNT_FIELD_NUMBER: _ClassVar[int] - SLOT_FIELD_NUMBER: _ClassVar[int] - TRANSACTION_FIELD_NUMBER: _ClassVar[int] - TRANSACTION_STATUS_FIELD_NUMBER: _ClassVar[int] - BLOCK_FIELD_NUMBER: _ClassVar[int] - PING_FIELD_NUMBER: _ClassVar[int] - PONG_FIELD_NUMBER: _ClassVar[int] - BLOCK_META_FIELD_NUMBER: _ClassVar[int] - ENTRY_FIELD_NUMBER: _ClassVar[int] - CREATED_AT_FIELD_NUMBER: _ClassVar[int] - filters: _containers.RepeatedScalarFieldContainer[str] - account: SubscribeUpdateAccount - slot: SubscribeUpdateSlot - transaction: SubscribeUpdateTransaction - transaction_status: SubscribeUpdateTransactionStatus - block: SubscribeUpdateBlock - ping: SubscribeUpdatePing - pong: SubscribeUpdatePong - block_meta: SubscribeUpdateBlockMeta - entry: SubscribeUpdateEntry - created_at: _timestamp_pb2.Timestamp - def __init__(self, filters: _Optional[_Iterable[str]] = ..., account: _Optional[_Union[SubscribeUpdateAccount, _Mapping]] = ..., slot: _Optional[_Union[SubscribeUpdateSlot, _Mapping]] = ..., transaction: _Optional[_Union[SubscribeUpdateTransaction, _Mapping]] = ..., transaction_status: _Optional[_Union[SubscribeUpdateTransactionStatus, _Mapping]] = ..., block: _Optional[_Union[SubscribeUpdateBlock, _Mapping]] = ..., ping: _Optional[_Union[SubscribeUpdatePing, _Mapping]] = ..., pong: _Optional[_Union[SubscribeUpdatePong, _Mapping]] = ..., block_meta: _Optional[_Union[SubscribeUpdateBlockMeta, _Mapping]] = ..., entry: _Optional[_Union[SubscribeUpdateEntry, _Mapping]] = ..., created_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... - -class SubscribeUpdateAccount(_message.Message): - __slots__ = ("account", "slot", "is_startup") - ACCOUNT_FIELD_NUMBER: _ClassVar[int] - SLOT_FIELD_NUMBER: _ClassVar[int] - IS_STARTUP_FIELD_NUMBER: _ClassVar[int] - account: SubscribeUpdateAccountInfo - slot: int - is_startup: bool - def __init__(self, account: _Optional[_Union[SubscribeUpdateAccountInfo, _Mapping]] = ..., slot: _Optional[int] = ..., is_startup: bool = ...) -> None: ... - -class SubscribeUpdateAccountInfo(_message.Message): - __slots__ = ("pubkey", "lamports", "owner", "executable", "rent_epoch", "data", "write_version", "txn_signature") - PUBKEY_FIELD_NUMBER: _ClassVar[int] - LAMPORTS_FIELD_NUMBER: _ClassVar[int] - OWNER_FIELD_NUMBER: _ClassVar[int] - EXECUTABLE_FIELD_NUMBER: _ClassVar[int] - RENT_EPOCH_FIELD_NUMBER: _ClassVar[int] - DATA_FIELD_NUMBER: _ClassVar[int] - WRITE_VERSION_FIELD_NUMBER: _ClassVar[int] - TXN_SIGNATURE_FIELD_NUMBER: _ClassVar[int] - pubkey: bytes - lamports: int - owner: bytes - executable: bool - rent_epoch: int - data: bytes - write_version: int - txn_signature: bytes - def __init__(self, pubkey: _Optional[bytes] = ..., lamports: _Optional[int] = ..., owner: _Optional[bytes] = ..., executable: bool = ..., rent_epoch: _Optional[int] = ..., data: _Optional[bytes] = ..., write_version: _Optional[int] = ..., txn_signature: _Optional[bytes] = ...) -> None: ... - -class SubscribeUpdateSlot(_message.Message): - __slots__ = ("slot", "parent", "status", "dead_error") - SLOT_FIELD_NUMBER: _ClassVar[int] - PARENT_FIELD_NUMBER: _ClassVar[int] - STATUS_FIELD_NUMBER: _ClassVar[int] - DEAD_ERROR_FIELD_NUMBER: _ClassVar[int] - slot: int - parent: int - status: SlotStatus - dead_error: str - def __init__(self, slot: _Optional[int] = ..., parent: _Optional[int] = ..., status: _Optional[_Union[SlotStatus, str]] = ..., dead_error: _Optional[str] = ...) -> None: ... - -class SubscribeUpdateTransaction(_message.Message): - __slots__ = ("transaction", "slot") - TRANSACTION_FIELD_NUMBER: _ClassVar[int] - SLOT_FIELD_NUMBER: _ClassVar[int] - transaction: SubscribeUpdateTransactionInfo - slot: int - def __init__(self, transaction: _Optional[_Union[SubscribeUpdateTransactionInfo, _Mapping]] = ..., slot: _Optional[int] = ...) -> None: ... - -class SubscribeUpdateTransactionInfo(_message.Message): - __slots__ = ("signature", "is_vote", "transaction", "meta", "index") - SIGNATURE_FIELD_NUMBER: _ClassVar[int] - IS_VOTE_FIELD_NUMBER: _ClassVar[int] - TRANSACTION_FIELD_NUMBER: _ClassVar[int] - META_FIELD_NUMBER: _ClassVar[int] - INDEX_FIELD_NUMBER: _ClassVar[int] - signature: bytes - is_vote: bool - transaction: _solana_storage_pb2.Transaction - meta: _solana_storage_pb2.TransactionStatusMeta - index: int - def __init__(self, signature: _Optional[bytes] = ..., is_vote: bool = ..., transaction: _Optional[_Union[_solana_storage_pb2.Transaction, _Mapping]] = ..., meta: _Optional[_Union[_solana_storage_pb2.TransactionStatusMeta, _Mapping]] = ..., index: _Optional[int] = ...) -> None: ... - -class SubscribeUpdateTransactionStatus(_message.Message): - __slots__ = ("slot", "signature", "is_vote", "index", "err") - SLOT_FIELD_NUMBER: _ClassVar[int] - SIGNATURE_FIELD_NUMBER: _ClassVar[int] - IS_VOTE_FIELD_NUMBER: _ClassVar[int] - INDEX_FIELD_NUMBER: _ClassVar[int] - ERR_FIELD_NUMBER: _ClassVar[int] - slot: int - signature: bytes - is_vote: bool - index: int - err: _solana_storage_pb2.TransactionError - def __init__(self, slot: _Optional[int] = ..., signature: _Optional[bytes] = ..., is_vote: bool = ..., index: _Optional[int] = ..., err: _Optional[_Union[_solana_storage_pb2.TransactionError, _Mapping]] = ...) -> None: ... - -class SubscribeUpdateBlock(_message.Message): - __slots__ = ("slot", "blockhash", "rewards", "block_time", "block_height", "parent_slot", "parent_blockhash", "executed_transaction_count", "transactions", "updated_account_count", "accounts", "entries_count", "entries") - SLOT_FIELD_NUMBER: _ClassVar[int] - BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - REWARDS_FIELD_NUMBER: _ClassVar[int] - BLOCK_TIME_FIELD_NUMBER: _ClassVar[int] - BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] - PARENT_SLOT_FIELD_NUMBER: _ClassVar[int] - PARENT_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - EXECUTED_TRANSACTION_COUNT_FIELD_NUMBER: _ClassVar[int] - TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] - UPDATED_ACCOUNT_COUNT_FIELD_NUMBER: _ClassVar[int] - ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - ENTRIES_COUNT_FIELD_NUMBER: _ClassVar[int] - ENTRIES_FIELD_NUMBER: _ClassVar[int] - slot: int - blockhash: str - rewards: _solana_storage_pb2.Rewards - block_time: _solana_storage_pb2.UnixTimestamp - block_height: _solana_storage_pb2.BlockHeight - parent_slot: int - parent_blockhash: str - executed_transaction_count: int - transactions: _containers.RepeatedCompositeFieldContainer[SubscribeUpdateTransactionInfo] - updated_account_count: int - accounts: _containers.RepeatedCompositeFieldContainer[SubscribeUpdateAccountInfo] - entries_count: int - entries: _containers.RepeatedCompositeFieldContainer[SubscribeUpdateEntry] - def __init__(self, slot: _Optional[int] = ..., blockhash: _Optional[str] = ..., rewards: _Optional[_Union[_solana_storage_pb2.Rewards, _Mapping]] = ..., block_time: _Optional[_Union[_solana_storage_pb2.UnixTimestamp, _Mapping]] = ..., block_height: _Optional[_Union[_solana_storage_pb2.BlockHeight, _Mapping]] = ..., parent_slot: _Optional[int] = ..., parent_blockhash: _Optional[str] = ..., executed_transaction_count: _Optional[int] = ..., transactions: _Optional[_Iterable[_Union[SubscribeUpdateTransactionInfo, _Mapping]]] = ..., updated_account_count: _Optional[int] = ..., accounts: _Optional[_Iterable[_Union[SubscribeUpdateAccountInfo, _Mapping]]] = ..., entries_count: _Optional[int] = ..., entries: _Optional[_Iterable[_Union[SubscribeUpdateEntry, _Mapping]]] = ...) -> None: ... - -class SubscribeUpdateBlockMeta(_message.Message): - __slots__ = ("slot", "blockhash", "rewards", "block_time", "block_height", "parent_slot", "parent_blockhash", "executed_transaction_count", "entries_count") - SLOT_FIELD_NUMBER: _ClassVar[int] - BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - REWARDS_FIELD_NUMBER: _ClassVar[int] - BLOCK_TIME_FIELD_NUMBER: _ClassVar[int] - BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] - PARENT_SLOT_FIELD_NUMBER: _ClassVar[int] - PARENT_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - EXECUTED_TRANSACTION_COUNT_FIELD_NUMBER: _ClassVar[int] - ENTRIES_COUNT_FIELD_NUMBER: _ClassVar[int] - slot: int - blockhash: str - rewards: _solana_storage_pb2.Rewards - block_time: _solana_storage_pb2.UnixTimestamp - block_height: _solana_storage_pb2.BlockHeight - parent_slot: int - parent_blockhash: str - executed_transaction_count: int - entries_count: int - def __init__(self, slot: _Optional[int] = ..., blockhash: _Optional[str] = ..., rewards: _Optional[_Union[_solana_storage_pb2.Rewards, _Mapping]] = ..., block_time: _Optional[_Union[_solana_storage_pb2.UnixTimestamp, _Mapping]] = ..., block_height: _Optional[_Union[_solana_storage_pb2.BlockHeight, _Mapping]] = ..., parent_slot: _Optional[int] = ..., parent_blockhash: _Optional[str] = ..., executed_transaction_count: _Optional[int] = ..., entries_count: _Optional[int] = ...) -> None: ... - -class SubscribeUpdateEntry(_message.Message): - __slots__ = ("slot", "index", "num_hashes", "hash", "executed_transaction_count", "starting_transaction_index") - SLOT_FIELD_NUMBER: _ClassVar[int] - INDEX_FIELD_NUMBER: _ClassVar[int] - NUM_HASHES_FIELD_NUMBER: _ClassVar[int] - HASH_FIELD_NUMBER: _ClassVar[int] - EXECUTED_TRANSACTION_COUNT_FIELD_NUMBER: _ClassVar[int] - STARTING_TRANSACTION_INDEX_FIELD_NUMBER: _ClassVar[int] - slot: int - index: int - num_hashes: int - hash: bytes - executed_transaction_count: int - starting_transaction_index: int - def __init__(self, slot: _Optional[int] = ..., index: _Optional[int] = ..., num_hashes: _Optional[int] = ..., hash: _Optional[bytes] = ..., executed_transaction_count: _Optional[int] = ..., starting_transaction_index: _Optional[int] = ...) -> None: ... - -class SubscribeUpdatePing(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class SubscribeUpdatePong(_message.Message): - __slots__ = ("id",) - ID_FIELD_NUMBER: _ClassVar[int] - id: int - def __init__(self, id: _Optional[int] = ...) -> None: ... - -class PingRequest(_message.Message): - __slots__ = ("count",) - COUNT_FIELD_NUMBER: _ClassVar[int] - count: int - def __init__(self, count: _Optional[int] = ...) -> None: ... - -class PongResponse(_message.Message): - __slots__ = ("count",) - COUNT_FIELD_NUMBER: _ClassVar[int] - count: int - def __init__(self, count: _Optional[int] = ...) -> None: ... - -class GetLatestBlockhashRequest(_message.Message): - __slots__ = ("commitment",) - COMMITMENT_FIELD_NUMBER: _ClassVar[int] - commitment: CommitmentLevel - def __init__(self, commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... - -class GetLatestBlockhashResponse(_message.Message): - __slots__ = ("slot", "blockhash", "last_valid_block_height") - SLOT_FIELD_NUMBER: _ClassVar[int] - BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - LAST_VALID_BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] - slot: int - blockhash: str - last_valid_block_height: int - def __init__(self, slot: _Optional[int] = ..., blockhash: _Optional[str] = ..., last_valid_block_height: _Optional[int] = ...) -> None: ... - -class GetBlockHeightRequest(_message.Message): - __slots__ = ("commitment",) - COMMITMENT_FIELD_NUMBER: _ClassVar[int] - commitment: CommitmentLevel - def __init__(self, commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... - -class GetBlockHeightResponse(_message.Message): - __slots__ = ("block_height",) - BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] - block_height: int - def __init__(self, block_height: _Optional[int] = ...) -> None: ... - -class GetSlotRequest(_message.Message): - __slots__ = ("commitment",) - COMMITMENT_FIELD_NUMBER: _ClassVar[int] - commitment: CommitmentLevel - def __init__(self, commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... - -class GetSlotResponse(_message.Message): - __slots__ = ("slot",) - SLOT_FIELD_NUMBER: _ClassVar[int] - slot: int - def __init__(self, slot: _Optional[int] = ...) -> None: ... - -class GetVersionRequest(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class GetVersionResponse(_message.Message): - __slots__ = ("version",) - VERSION_FIELD_NUMBER: _ClassVar[int] - version: str - def __init__(self, version: _Optional[str] = ...) -> None: ... - -class IsBlockhashValidRequest(_message.Message): - __slots__ = ("blockhash", "commitment") - BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - COMMITMENT_FIELD_NUMBER: _ClassVar[int] - blockhash: str - commitment: CommitmentLevel - def __init__(self, blockhash: _Optional[str] = ..., commitment: _Optional[_Union[CommitmentLevel, str]] = ...) -> None: ... - -class IsBlockhashValidResponse(_message.Message): - __slots__ = ("slot", "valid") - SLOT_FIELD_NUMBER: _ClassVar[int] - VALID_FIELD_NUMBER: _ClassVar[int] - slot: int - valid: bool - def __init__(self, slot: _Optional[int] = ..., valid: bool = ...) -> None: ... diff --git a/fume/yellowstone_api/geyser_pb2_grpc.py b/fume/yellowstone_api/geyser_pb2_grpc.py deleted file mode 100644 index c07a0f2..0000000 --- a/fume/yellowstone_api/geyser_pb2_grpc.py +++ /dev/null @@ -1,355 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - -import yellowstone_api.geyser_pb2 as geyser__pb2 - -GRPC_GENERATED_VERSION = '1.68.1' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in geyser_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) - - -class GeyserStub(object): - """Missing associated documentation comment in .proto file.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Subscribe = channel.stream_stream( - '/geyser.Geyser/Subscribe', - request_serializer=geyser__pb2.SubscribeRequest.SerializeToString, - response_deserializer=geyser__pb2.SubscribeUpdate.FromString, - _registered_method=True) - self.Ping = channel.unary_unary( - '/geyser.Geyser/Ping', - request_serializer=geyser__pb2.PingRequest.SerializeToString, - response_deserializer=geyser__pb2.PongResponse.FromString, - _registered_method=True) - self.GetLatestBlockhash = channel.unary_unary( - '/geyser.Geyser/GetLatestBlockhash', - request_serializer=geyser__pb2.GetLatestBlockhashRequest.SerializeToString, - response_deserializer=geyser__pb2.GetLatestBlockhashResponse.FromString, - _registered_method=True) - self.GetBlockHeight = channel.unary_unary( - '/geyser.Geyser/GetBlockHeight', - request_serializer=geyser__pb2.GetBlockHeightRequest.SerializeToString, - response_deserializer=geyser__pb2.GetBlockHeightResponse.FromString, - _registered_method=True) - self.GetSlot = channel.unary_unary( - '/geyser.Geyser/GetSlot', - request_serializer=geyser__pb2.GetSlotRequest.SerializeToString, - response_deserializer=geyser__pb2.GetSlotResponse.FromString, - _registered_method=True) - self.IsBlockhashValid = channel.unary_unary( - '/geyser.Geyser/IsBlockhashValid', - request_serializer=geyser__pb2.IsBlockhashValidRequest.SerializeToString, - response_deserializer=geyser__pb2.IsBlockhashValidResponse.FromString, - _registered_method=True) - self.GetVersion = channel.unary_unary( - '/geyser.Geyser/GetVersion', - request_serializer=geyser__pb2.GetVersionRequest.SerializeToString, - response_deserializer=geyser__pb2.GetVersionResponse.FromString, - _registered_method=True) - - -class GeyserServicer(object): - """Missing associated documentation comment in .proto file.""" - - def Subscribe(self, request_iterator, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Ping(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetLatestBlockhash(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetBlockHeight(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetSlot(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def IsBlockhashValid(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetVersion(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_GeyserServicer_to_server(servicer, server): - rpc_method_handlers = { - 'Subscribe': grpc.stream_stream_rpc_method_handler( - servicer.Subscribe, - request_deserializer=geyser__pb2.SubscribeRequest.FromString, - response_serializer=geyser__pb2.SubscribeUpdate.SerializeToString, - ), - 'Ping': grpc.unary_unary_rpc_method_handler( - servicer.Ping, - request_deserializer=geyser__pb2.PingRequest.FromString, - response_serializer=geyser__pb2.PongResponse.SerializeToString, - ), - 'GetLatestBlockhash': grpc.unary_unary_rpc_method_handler( - servicer.GetLatestBlockhash, - request_deserializer=geyser__pb2.GetLatestBlockhashRequest.FromString, - response_serializer=geyser__pb2.GetLatestBlockhashResponse.SerializeToString, - ), - 'GetBlockHeight': grpc.unary_unary_rpc_method_handler( - servicer.GetBlockHeight, - request_deserializer=geyser__pb2.GetBlockHeightRequest.FromString, - response_serializer=geyser__pb2.GetBlockHeightResponse.SerializeToString, - ), - 'GetSlot': grpc.unary_unary_rpc_method_handler( - servicer.GetSlot, - request_deserializer=geyser__pb2.GetSlotRequest.FromString, - response_serializer=geyser__pb2.GetSlotResponse.SerializeToString, - ), - 'IsBlockhashValid': grpc.unary_unary_rpc_method_handler( - servicer.IsBlockhashValid, - request_deserializer=geyser__pb2.IsBlockhashValidRequest.FromString, - response_serializer=geyser__pb2.IsBlockhashValidResponse.SerializeToString, - ), - 'GetVersion': grpc.unary_unary_rpc_method_handler( - servicer.GetVersion, - request_deserializer=geyser__pb2.GetVersionRequest.FromString, - response_serializer=geyser__pb2.GetVersionResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'geyser.Geyser', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers('geyser.Geyser', rpc_method_handlers) - - - # This class is part of an EXPERIMENTAL API. -class Geyser(object): - """Missing associated documentation comment in .proto file.""" - - @staticmethod - def Subscribe(request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.stream_stream( - request_iterator, - target, - '/geyser.Geyser/Subscribe', - geyser__pb2.SubscribeRequest.SerializeToString, - geyser__pb2.SubscribeUpdate.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def Ping(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/geyser.Geyser/Ping', - geyser__pb2.PingRequest.SerializeToString, - geyser__pb2.PongResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetLatestBlockhash(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/geyser.Geyser/GetLatestBlockhash', - geyser__pb2.GetLatestBlockhashRequest.SerializeToString, - geyser__pb2.GetLatestBlockhashResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetBlockHeight(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/geyser.Geyser/GetBlockHeight', - geyser__pb2.GetBlockHeightRequest.SerializeToString, - geyser__pb2.GetBlockHeightResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetSlot(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/geyser.Geyser/GetSlot', - geyser__pb2.GetSlotRequest.SerializeToString, - geyser__pb2.GetSlotResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def IsBlockhashValid(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/geyser.Geyser/IsBlockhashValid', - geyser__pb2.IsBlockhashValidRequest.SerializeToString, - geyser__pb2.IsBlockhashValidResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def GetVersion(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/geyser.Geyser/GetVersion', - geyser__pb2.GetVersionRequest.SerializeToString, - geyser__pb2.GetVersionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) diff --git a/fume/yellowstone_api/solana_storage_pb2.py b/fume/yellowstone_api/solana_storage_pb2.py deleted file mode 100644 index f434986..0000000 --- a/fume/yellowstone_api/solana_storage_pb2.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: solana-storage.proto -# Protobuf Python Version: 5.28.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 28, - 1, - '', - 'solana-storage.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14solana-storage.proto\x12\x1dsolana.storage.ConfirmedBlock\"\xa1\x03\n\x0e\x43onfirmedBlock\x12\x1a\n\x12previous_blockhash\x18\x01 \x01(\t\x12\x11\n\tblockhash\x18\x02 \x01(\t\x12\x13\n\x0bparent_slot\x18\x03 \x01(\x04\x12I\n\x0ctransactions\x18\x04 \x03(\x0b\x32\x33.solana.storage.ConfirmedBlock.ConfirmedTransaction\x12\x36\n\x07rewards\x18\x05 \x03(\x0b\x32%.solana.storage.ConfirmedBlock.Reward\x12@\n\nblock_time\x18\x06 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UnixTimestamp\x12@\n\x0c\x62lock_height\x18\x07 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.BlockHeight\x12\x44\n\x0enum_partitions\x18\x08 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.NumPartitions\"\x9b\x01\n\x14\x43onfirmedTransaction\x12?\n\x0btransaction\x18\x01 \x01(\x0b\x32*.solana.storage.ConfirmedBlock.Transaction\x12\x42\n\x04meta\x18\x02 \x01(\x0b\x32\x34.solana.storage.ConfirmedBlock.TransactionStatusMeta\"Z\n\x0bTransaction\x12\x12\n\nsignatures\x18\x01 \x03(\x0c\x12\x37\n\x07message\x18\x02 \x01(\x0b\x32&.solana.storage.ConfirmedBlock.Message\"\xad\x02\n\x07Message\x12<\n\x06header\x18\x01 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.MessageHeader\x12\x14\n\x0c\x61\x63\x63ount_keys\x18\x02 \x03(\x0c\x12\x18\n\x10recent_blockhash\x18\x03 \x01(\x0c\x12H\n\x0cinstructions\x18\x04 \x03(\x0b\x32\x32.solana.storage.ConfirmedBlock.CompiledInstruction\x12\x11\n\tversioned\x18\x05 \x01(\x08\x12W\n\x15\x61\x64\x64ress_table_lookups\x18\x06 \x03(\x0b\x32\x38.solana.storage.ConfirmedBlock.MessageAddressTableLookup\"~\n\rMessageHeader\x12\x1f\n\x17num_required_signatures\x18\x01 \x01(\r\x12$\n\x1cnum_readonly_signed_accounts\x18\x02 \x01(\r\x12&\n\x1enum_readonly_unsigned_accounts\x18\x03 \x01(\r\"d\n\x19MessageAddressTableLookup\x12\x13\n\x0b\x61\x63\x63ount_key\x18\x01 \x01(\x0c\x12\x18\n\x10writable_indexes\x18\x02 \x01(\x0c\x12\x18\n\x10readonly_indexes\x18\x03 \x01(\x0c\"\xda\x05\n\x15TransactionStatusMeta\x12<\n\x03\x65rr\x18\x01 \x01(\x0b\x32/.solana.storage.ConfirmedBlock.TransactionError\x12\x0b\n\x03\x66\x65\x65\x18\x02 \x01(\x04\x12\x14\n\x0cpre_balances\x18\x03 \x03(\x04\x12\x15\n\rpost_balances\x18\x04 \x03(\x04\x12L\n\x12inner_instructions\x18\x05 \x03(\x0b\x32\x30.solana.storage.ConfirmedBlock.InnerInstructions\x12\x1f\n\x17inner_instructions_none\x18\n \x01(\x08\x12\x14\n\x0clog_messages\x18\x06 \x03(\t\x12\x19\n\x11log_messages_none\x18\x0b \x01(\x08\x12G\n\x12pre_token_balances\x18\x07 \x03(\x0b\x32+.solana.storage.ConfirmedBlock.TokenBalance\x12H\n\x13post_token_balances\x18\x08 \x03(\x0b\x32+.solana.storage.ConfirmedBlock.TokenBalance\x12\x36\n\x07rewards\x18\t \x03(\x0b\x32%.solana.storage.ConfirmedBlock.Reward\x12!\n\x19loaded_writable_addresses\x18\x0c \x03(\x0c\x12!\n\x19loaded_readonly_addresses\x18\r \x03(\x0c\x12>\n\x0breturn_data\x18\x0e \x01(\x0b\x32).solana.storage.ConfirmedBlock.ReturnData\x12\x18\n\x10return_data_none\x18\x0f \x01(\x08\x12#\n\x16\x63ompute_units_consumed\x18\x10 \x01(\x04H\x00\x88\x01\x01\x42\x19\n\x17_compute_units_consumed\"\x1f\n\x10TransactionError\x12\x0b\n\x03\x65rr\x18\x01 \x01(\x0c\"i\n\x11InnerInstructions\x12\r\n\x05index\x18\x01 \x01(\r\x12\x45\n\x0cinstructions\x18\x02 \x03(\x0b\x32/.solana.storage.ConfirmedBlock.InnerInstruction\"x\n\x10InnerInstruction\x12\x18\n\x10program_id_index\x18\x01 \x01(\r\x12\x10\n\x08\x61\x63\x63ounts\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\x19\n\x0cstack_height\x18\x04 \x01(\rH\x00\x88\x01\x01\x42\x0f\n\r_stack_height\"O\n\x13\x43ompiledInstruction\x12\x18\n\x10program_id_index\x18\x01 \x01(\r\x12\x10\n\x08\x61\x63\x63ounts\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\"\x9d\x01\n\x0cTokenBalance\x12\x15\n\raccount_index\x18\x01 \x01(\r\x12\x0c\n\x04mint\x18\x02 \x01(\t\x12\x45\n\x0fui_token_amount\x18\x03 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.UiTokenAmount\x12\r\n\x05owner\x18\x04 \x01(\t\x12\x12\n\nprogram_id\x18\x05 \x01(\t\"^\n\rUiTokenAmount\x12\x11\n\tui_amount\x18\x01 \x01(\x01\x12\x10\n\x08\x64\x65\x63imals\x18\x02 \x01(\r\x12\x0e\n\x06\x61mount\x18\x03 \x01(\t\x12\x18\n\x10ui_amount_string\x18\x04 \x01(\t\".\n\nReturnData\x12\x12\n\nprogram_id\x18\x01 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\x94\x01\n\x06Reward\x12\x0e\n\x06pubkey\x18\x01 \x01(\t\x12\x10\n\x08lamports\x18\x02 \x01(\x03\x12\x14\n\x0cpost_balance\x18\x03 \x01(\x04\x12>\n\x0breward_type\x18\x04 \x01(\x0e\x32).solana.storage.ConfirmedBlock.RewardType\x12\x12\n\ncommission\x18\x05 \x01(\t\"\x87\x01\n\x07Rewards\x12\x36\n\x07rewards\x18\x01 \x03(\x0b\x32%.solana.storage.ConfirmedBlock.Reward\x12\x44\n\x0enum_partitions\x18\x02 \x01(\x0b\x32,.solana.storage.ConfirmedBlock.NumPartitions\"\"\n\rUnixTimestamp\x12\x11\n\ttimestamp\x18\x01 \x01(\x03\"#\n\x0b\x42lockHeight\x12\x14\n\x0c\x62lock_height\x18\x01 \x01(\x04\"\'\n\rNumPartitions\x12\x16\n\x0enum_partitions\x18\x01 \x01(\x04*I\n\nRewardType\x12\x0f\n\x0bUnspecified\x10\x00\x12\x07\n\x03\x46\x65\x65\x10\x01\x12\x08\n\x04Rent\x10\x02\x12\x0b\n\x07Staking\x10\x03\x12\n\n\x06Voting\x10\x04\x42;Z9github.com/rpcpool/yellowstone-grpc/examples/golang/protob\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'solana_storage_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'Z9github.com/rpcpool/yellowstone-grpc/examples/golang/proto' - _globals['_REWARDTYPE']._serialized_start=3042 - _globals['_REWARDTYPE']._serialized_end=3115 - _globals['_CONFIRMEDBLOCK']._serialized_start=56 - _globals['_CONFIRMEDBLOCK']._serialized_end=473 - _globals['_CONFIRMEDTRANSACTION']._serialized_start=476 - _globals['_CONFIRMEDTRANSACTION']._serialized_end=631 - _globals['_TRANSACTION']._serialized_start=633 - _globals['_TRANSACTION']._serialized_end=723 - _globals['_MESSAGE']._serialized_start=726 - _globals['_MESSAGE']._serialized_end=1027 - _globals['_MESSAGEHEADER']._serialized_start=1029 - _globals['_MESSAGEHEADER']._serialized_end=1155 - _globals['_MESSAGEADDRESSTABLELOOKUP']._serialized_start=1157 - _globals['_MESSAGEADDRESSTABLELOOKUP']._serialized_end=1257 - _globals['_TRANSACTIONSTATUSMETA']._serialized_start=1260 - _globals['_TRANSACTIONSTATUSMETA']._serialized_end=1990 - _globals['_TRANSACTIONERROR']._serialized_start=1992 - _globals['_TRANSACTIONERROR']._serialized_end=2023 - _globals['_INNERINSTRUCTIONS']._serialized_start=2025 - _globals['_INNERINSTRUCTIONS']._serialized_end=2130 - _globals['_INNERINSTRUCTION']._serialized_start=2132 - _globals['_INNERINSTRUCTION']._serialized_end=2252 - _globals['_COMPILEDINSTRUCTION']._serialized_start=2254 - _globals['_COMPILEDINSTRUCTION']._serialized_end=2333 - _globals['_TOKENBALANCE']._serialized_start=2336 - _globals['_TOKENBALANCE']._serialized_end=2493 - _globals['_UITOKENAMOUNT']._serialized_start=2495 - _globals['_UITOKENAMOUNT']._serialized_end=2589 - _globals['_RETURNDATA']._serialized_start=2591 - _globals['_RETURNDATA']._serialized_end=2637 - _globals['_REWARD']._serialized_start=2640 - _globals['_REWARD']._serialized_end=2788 - _globals['_REWARDS']._serialized_start=2791 - _globals['_REWARDS']._serialized_end=2926 - _globals['_UNIXTIMESTAMP']._serialized_start=2928 - _globals['_UNIXTIMESTAMP']._serialized_end=2962 - _globals['_BLOCKHEIGHT']._serialized_start=2964 - _globals['_BLOCKHEIGHT']._serialized_end=2999 - _globals['_NUMPARTITIONS']._serialized_start=3001 - _globals['_NUMPARTITIONS']._serialized_end=3040 -# @@protoc_insertion_point(module_scope) diff --git a/fume/yellowstone_api/solana_storage_pb2.pyi b/fume/yellowstone_api/solana_storage_pb2.pyi deleted file mode 100644 index 10312a6..0000000 --- a/fume/yellowstone_api/solana_storage_pb2.pyi +++ /dev/null @@ -1,238 +0,0 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class RewardType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - Unspecified: _ClassVar[RewardType] - Fee: _ClassVar[RewardType] - Rent: _ClassVar[RewardType] - Staking: _ClassVar[RewardType] - Voting: _ClassVar[RewardType] -Unspecified: RewardType -Fee: RewardType -Rent: RewardType -Staking: RewardType -Voting: RewardType - -class ConfirmedBlock(_message.Message): - __slots__ = ("previous_blockhash", "blockhash", "parent_slot", "transactions", "rewards", "block_time", "block_height", "num_partitions") - PREVIOUS_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - PARENT_SLOT_FIELD_NUMBER: _ClassVar[int] - TRANSACTIONS_FIELD_NUMBER: _ClassVar[int] - REWARDS_FIELD_NUMBER: _ClassVar[int] - BLOCK_TIME_FIELD_NUMBER: _ClassVar[int] - BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] - NUM_PARTITIONS_FIELD_NUMBER: _ClassVar[int] - previous_blockhash: str - blockhash: str - parent_slot: int - transactions: _containers.RepeatedCompositeFieldContainer[ConfirmedTransaction] - rewards: _containers.RepeatedCompositeFieldContainer[Reward] - block_time: UnixTimestamp - block_height: BlockHeight - num_partitions: NumPartitions - def __init__(self, previous_blockhash: _Optional[str] = ..., blockhash: _Optional[str] = ..., parent_slot: _Optional[int] = ..., transactions: _Optional[_Iterable[_Union[ConfirmedTransaction, _Mapping]]] = ..., rewards: _Optional[_Iterable[_Union[Reward, _Mapping]]] = ..., block_time: _Optional[_Union[UnixTimestamp, _Mapping]] = ..., block_height: _Optional[_Union[BlockHeight, _Mapping]] = ..., num_partitions: _Optional[_Union[NumPartitions, _Mapping]] = ...) -> None: ... - -class ConfirmedTransaction(_message.Message): - __slots__ = ("transaction", "meta") - TRANSACTION_FIELD_NUMBER: _ClassVar[int] - META_FIELD_NUMBER: _ClassVar[int] - transaction: Transaction - meta: TransactionStatusMeta - def __init__(self, transaction: _Optional[_Union[Transaction, _Mapping]] = ..., meta: _Optional[_Union[TransactionStatusMeta, _Mapping]] = ...) -> None: ... - -class Transaction(_message.Message): - __slots__ = ("signatures", "message") - SIGNATURES_FIELD_NUMBER: _ClassVar[int] - MESSAGE_FIELD_NUMBER: _ClassVar[int] - signatures: _containers.RepeatedScalarFieldContainer[bytes] - message: Message - def __init__(self, signatures: _Optional[_Iterable[bytes]] = ..., message: _Optional[_Union[Message, _Mapping]] = ...) -> None: ... - -class Message(_message.Message): - __slots__ = ("header", "account_keys", "recent_blockhash", "instructions", "versioned", "address_table_lookups") - HEADER_FIELD_NUMBER: _ClassVar[int] - ACCOUNT_KEYS_FIELD_NUMBER: _ClassVar[int] - RECENT_BLOCKHASH_FIELD_NUMBER: _ClassVar[int] - INSTRUCTIONS_FIELD_NUMBER: _ClassVar[int] - VERSIONED_FIELD_NUMBER: _ClassVar[int] - ADDRESS_TABLE_LOOKUPS_FIELD_NUMBER: _ClassVar[int] - header: MessageHeader - account_keys: _containers.RepeatedScalarFieldContainer[bytes] - recent_blockhash: bytes - instructions: _containers.RepeatedCompositeFieldContainer[CompiledInstruction] - versioned: bool - address_table_lookups: _containers.RepeatedCompositeFieldContainer[MessageAddressTableLookup] - def __init__(self, header: _Optional[_Union[MessageHeader, _Mapping]] = ..., account_keys: _Optional[_Iterable[bytes]] = ..., recent_blockhash: _Optional[bytes] = ..., instructions: _Optional[_Iterable[_Union[CompiledInstruction, _Mapping]]] = ..., versioned: bool = ..., address_table_lookups: _Optional[_Iterable[_Union[MessageAddressTableLookup, _Mapping]]] = ...) -> None: ... - -class MessageHeader(_message.Message): - __slots__ = ("num_required_signatures", "num_readonly_signed_accounts", "num_readonly_unsigned_accounts") - NUM_REQUIRED_SIGNATURES_FIELD_NUMBER: _ClassVar[int] - NUM_READONLY_SIGNED_ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - NUM_READONLY_UNSIGNED_ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - num_required_signatures: int - num_readonly_signed_accounts: int - num_readonly_unsigned_accounts: int - def __init__(self, num_required_signatures: _Optional[int] = ..., num_readonly_signed_accounts: _Optional[int] = ..., num_readonly_unsigned_accounts: _Optional[int] = ...) -> None: ... - -class MessageAddressTableLookup(_message.Message): - __slots__ = ("account_key", "writable_indexes", "readonly_indexes") - ACCOUNT_KEY_FIELD_NUMBER: _ClassVar[int] - WRITABLE_INDEXES_FIELD_NUMBER: _ClassVar[int] - READONLY_INDEXES_FIELD_NUMBER: _ClassVar[int] - account_key: bytes - writable_indexes: bytes - readonly_indexes: bytes - def __init__(self, account_key: _Optional[bytes] = ..., writable_indexes: _Optional[bytes] = ..., readonly_indexes: _Optional[bytes] = ...) -> None: ... - -class TransactionStatusMeta(_message.Message): - __slots__ = ("err", "fee", "pre_balances", "post_balances", "inner_instructions", "inner_instructions_none", "log_messages", "log_messages_none", "pre_token_balances", "post_token_balances", "rewards", "loaded_writable_addresses", "loaded_readonly_addresses", "return_data", "return_data_none", "compute_units_consumed") - ERR_FIELD_NUMBER: _ClassVar[int] - FEE_FIELD_NUMBER: _ClassVar[int] - PRE_BALANCES_FIELD_NUMBER: _ClassVar[int] - POST_BALANCES_FIELD_NUMBER: _ClassVar[int] - INNER_INSTRUCTIONS_FIELD_NUMBER: _ClassVar[int] - INNER_INSTRUCTIONS_NONE_FIELD_NUMBER: _ClassVar[int] - LOG_MESSAGES_FIELD_NUMBER: _ClassVar[int] - LOG_MESSAGES_NONE_FIELD_NUMBER: _ClassVar[int] - PRE_TOKEN_BALANCES_FIELD_NUMBER: _ClassVar[int] - POST_TOKEN_BALANCES_FIELD_NUMBER: _ClassVar[int] - REWARDS_FIELD_NUMBER: _ClassVar[int] - LOADED_WRITABLE_ADDRESSES_FIELD_NUMBER: _ClassVar[int] - LOADED_READONLY_ADDRESSES_FIELD_NUMBER: _ClassVar[int] - RETURN_DATA_FIELD_NUMBER: _ClassVar[int] - RETURN_DATA_NONE_FIELD_NUMBER: _ClassVar[int] - COMPUTE_UNITS_CONSUMED_FIELD_NUMBER: _ClassVar[int] - err: TransactionError - fee: int - pre_balances: _containers.RepeatedScalarFieldContainer[int] - post_balances: _containers.RepeatedScalarFieldContainer[int] - inner_instructions: _containers.RepeatedCompositeFieldContainer[InnerInstructions] - inner_instructions_none: bool - log_messages: _containers.RepeatedScalarFieldContainer[str] - log_messages_none: bool - pre_token_balances: _containers.RepeatedCompositeFieldContainer[TokenBalance] - post_token_balances: _containers.RepeatedCompositeFieldContainer[TokenBalance] - rewards: _containers.RepeatedCompositeFieldContainer[Reward] - loaded_writable_addresses: _containers.RepeatedScalarFieldContainer[bytes] - loaded_readonly_addresses: _containers.RepeatedScalarFieldContainer[bytes] - return_data: ReturnData - return_data_none: bool - compute_units_consumed: int - def __init__(self, err: _Optional[_Union[TransactionError, _Mapping]] = ..., fee: _Optional[int] = ..., pre_balances: _Optional[_Iterable[int]] = ..., post_balances: _Optional[_Iterable[int]] = ..., inner_instructions: _Optional[_Iterable[_Union[InnerInstructions, _Mapping]]] = ..., inner_instructions_none: bool = ..., log_messages: _Optional[_Iterable[str]] = ..., log_messages_none: bool = ..., pre_token_balances: _Optional[_Iterable[_Union[TokenBalance, _Mapping]]] = ..., post_token_balances: _Optional[_Iterable[_Union[TokenBalance, _Mapping]]] = ..., rewards: _Optional[_Iterable[_Union[Reward, _Mapping]]] = ..., loaded_writable_addresses: _Optional[_Iterable[bytes]] = ..., loaded_readonly_addresses: _Optional[_Iterable[bytes]] = ..., return_data: _Optional[_Union[ReturnData, _Mapping]] = ..., return_data_none: bool = ..., compute_units_consumed: _Optional[int] = ...) -> None: ... - -class TransactionError(_message.Message): - __slots__ = ("err",) - ERR_FIELD_NUMBER: _ClassVar[int] - err: bytes - def __init__(self, err: _Optional[bytes] = ...) -> None: ... - -class InnerInstructions(_message.Message): - __slots__ = ("index", "instructions") - INDEX_FIELD_NUMBER: _ClassVar[int] - INSTRUCTIONS_FIELD_NUMBER: _ClassVar[int] - index: int - instructions: _containers.RepeatedCompositeFieldContainer[InnerInstruction] - def __init__(self, index: _Optional[int] = ..., instructions: _Optional[_Iterable[_Union[InnerInstruction, _Mapping]]] = ...) -> None: ... - -class InnerInstruction(_message.Message): - __slots__ = ("program_id_index", "accounts", "data", "stack_height") - PROGRAM_ID_INDEX_FIELD_NUMBER: _ClassVar[int] - ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - DATA_FIELD_NUMBER: _ClassVar[int] - STACK_HEIGHT_FIELD_NUMBER: _ClassVar[int] - program_id_index: int - accounts: bytes - data: bytes - stack_height: int - def __init__(self, program_id_index: _Optional[int] = ..., accounts: _Optional[bytes] = ..., data: _Optional[bytes] = ..., stack_height: _Optional[int] = ...) -> None: ... - -class CompiledInstruction(_message.Message): - __slots__ = ("program_id_index", "accounts", "data") - PROGRAM_ID_INDEX_FIELD_NUMBER: _ClassVar[int] - ACCOUNTS_FIELD_NUMBER: _ClassVar[int] - DATA_FIELD_NUMBER: _ClassVar[int] - program_id_index: int - accounts: bytes - data: bytes - def __init__(self, program_id_index: _Optional[int] = ..., accounts: _Optional[bytes] = ..., data: _Optional[bytes] = ...) -> None: ... - -class TokenBalance(_message.Message): - __slots__ = ("account_index", "mint", "ui_token_amount", "owner", "program_id") - ACCOUNT_INDEX_FIELD_NUMBER: _ClassVar[int] - MINT_FIELD_NUMBER: _ClassVar[int] - UI_TOKEN_AMOUNT_FIELD_NUMBER: _ClassVar[int] - OWNER_FIELD_NUMBER: _ClassVar[int] - PROGRAM_ID_FIELD_NUMBER: _ClassVar[int] - account_index: int - mint: str - ui_token_amount: UiTokenAmount - owner: str - program_id: str - def __init__(self, account_index: _Optional[int] = ..., mint: _Optional[str] = ..., ui_token_amount: _Optional[_Union[UiTokenAmount, _Mapping]] = ..., owner: _Optional[str] = ..., program_id: _Optional[str] = ...) -> None: ... - -class UiTokenAmount(_message.Message): - __slots__ = ("ui_amount", "decimals", "amount", "ui_amount_string") - UI_AMOUNT_FIELD_NUMBER: _ClassVar[int] - DECIMALS_FIELD_NUMBER: _ClassVar[int] - AMOUNT_FIELD_NUMBER: _ClassVar[int] - UI_AMOUNT_STRING_FIELD_NUMBER: _ClassVar[int] - ui_amount: float - decimals: int - amount: str - ui_amount_string: str - def __init__(self, ui_amount: _Optional[float] = ..., decimals: _Optional[int] = ..., amount: _Optional[str] = ..., ui_amount_string: _Optional[str] = ...) -> None: ... - -class ReturnData(_message.Message): - __slots__ = ("program_id", "data") - PROGRAM_ID_FIELD_NUMBER: _ClassVar[int] - DATA_FIELD_NUMBER: _ClassVar[int] - program_id: bytes - data: bytes - def __init__(self, program_id: _Optional[bytes] = ..., data: _Optional[bytes] = ...) -> None: ... - -class Reward(_message.Message): - __slots__ = ("pubkey", "lamports", "post_balance", "reward_type", "commission") - PUBKEY_FIELD_NUMBER: _ClassVar[int] - LAMPORTS_FIELD_NUMBER: _ClassVar[int] - POST_BALANCE_FIELD_NUMBER: _ClassVar[int] - REWARD_TYPE_FIELD_NUMBER: _ClassVar[int] - COMMISSION_FIELD_NUMBER: _ClassVar[int] - pubkey: str - lamports: int - post_balance: int - reward_type: RewardType - commission: str - def __init__(self, pubkey: _Optional[str] = ..., lamports: _Optional[int] = ..., post_balance: _Optional[int] = ..., reward_type: _Optional[_Union[RewardType, str]] = ..., commission: _Optional[str] = ...) -> None: ... - -class Rewards(_message.Message): - __slots__ = ("rewards", "num_partitions") - REWARDS_FIELD_NUMBER: _ClassVar[int] - NUM_PARTITIONS_FIELD_NUMBER: _ClassVar[int] - rewards: _containers.RepeatedCompositeFieldContainer[Reward] - num_partitions: NumPartitions - def __init__(self, rewards: _Optional[_Iterable[_Union[Reward, _Mapping]]] = ..., num_partitions: _Optional[_Union[NumPartitions, _Mapping]] = ...) -> None: ... - -class UnixTimestamp(_message.Message): - __slots__ = ("timestamp",) - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - timestamp: int - def __init__(self, timestamp: _Optional[int] = ...) -> None: ... - -class BlockHeight(_message.Message): - __slots__ = ("block_height",) - BLOCK_HEIGHT_FIELD_NUMBER: _ClassVar[int] - block_height: int - def __init__(self, block_height: _Optional[int] = ...) -> None: ... - -class NumPartitions(_message.Message): - __slots__ = ("num_partitions",) - NUM_PARTITIONS_FIELD_NUMBER: _ClassVar[int] - num_partitions: int - def __init__(self, num_partitions: _Optional[int] = ...) -> None: ... diff --git a/fume/yellowstone_api/solana_storage_pb2_grpc.py b/fume/yellowstone_api/solana_storage_pb2_grpc.py deleted file mode 100644 index a777633..0000000 --- a/fume/yellowstone_api/solana_storage_pb2_grpc.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc -import warnings - - -GRPC_GENERATED_VERSION = '1.68.1' -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in solana_storage_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - ) diff --git a/python/yellowstone-fumarole-client/pyproject.toml b/python/yellowstone-fumarole-client/pyproject.toml index 90c55b7..79054bf 100644 --- a/python/yellowstone-fumarole-client/pyproject.toml +++ b/python/yellowstone-fumarole-client/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "yellowstone-fumarole-client" -version = "0.1.0-pre.1" +version = "0.1.0rc2" homepage = "https://github.com/rpcpool/yellowstone-fumarole" repository = "https://github.com/rpcpool/yellowstone-fumarole" description = "Yellowstone Fumarole Python Client" From 100e9361c6378b5c1b9ec5b9d533d7864668e4de Mon Sep 17 00:00:00 2001 From: lvboudre Date: Wed, 9 Jul 2025 14:23:39 -0400 Subject: [PATCH 40/56] updated to solana 2.2 new crates structure (#14) * updated to solana 2.2 new crates structure * update rust toolchains to 1.85 * updated to cargo edition to 2024 --- Cargo.lock | 2887 +++++++---------- Cargo.toml | 13 +- apps/yellowstone-fumarole-cli/Cargo.toml | 5 +- apps/yellowstone-fumarole-cli/src/bin/fume.rs | 3 +- apps/yellowstone-fumarole-cli/src/lib.rs | 12 +- crates/yellowstone-fumarole-client/Cargo.toml | 4 +- crates/yellowstone-fumarole-client/build.rs | 3 +- crates/yellowstone-fumarole-client/src/lib.rs | 2 +- .../src/runtime/mod.rs | 11 +- .../src/runtime/tokio.rs | 2 +- examples/rust/Cargo.toml | 4 +- examples/rust/src/bin/dragonsmouth.rs | 4 +- examples/rust/src/bin/example-fumarole.rs | 4 +- rust-toolchain.toml | 2 +- yellowstone-grpc | 2 +- 15 files changed, 1213 insertions(+), 1745 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 251a3a0..faf1ec2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,9 +23,9 @@ dependencies = [ [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aead" @@ -63,6 +63,31 @@ dependencies = [ "zeroize", ] +[[package]] +name = "agave-feature-set" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32056afd80249b32652813bc73f09e807276a820eec81babf5be92d11677e0a3" +dependencies = [ + "ahash", + "solana-epoch-schedule", + "solana-hash", + "solana-pubkey", + "solana-sha256-hasher", + "solana-svm-feature-set", +] + +[[package]] +name = "agave-reserved-account-keys" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48cb3593e075f6d4266b7d8865e22bdd3957d2d4221dfd24b362cb1bd7f529dc" +dependencies = [ + "agave-feature-set", + "solana-pubkey", + "solana-sdk-ids", +] + [[package]] name = "ahash" version = "0.8.12" @@ -70,6 +95,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", + "getrandom 0.3.3", "once_cell", "version_check", "zerocopy", @@ -84,21 +110,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - [[package]] name = "android-tzdata" version = "0.1.1" @@ -116,9 +127,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933" dependencies = [ "anstyle", "anstyle-parse", @@ -131,36 +142,36 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9" dependencies = [ "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.7" +version = "3.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882" dependencies = [ "anstyle", - "once_cell", + "once_cell_polyfill", "windows-sys 0.59.0", ] @@ -170,123 +181,6 @@ version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" -[[package]] -name = "ark-bn254" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ec" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba" -dependencies = [ - "ark-ff", - "ark-poly", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", - "itertools 0.10.5", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm", - "ark-ff-macros", - "ark-serialize", - "ark-std", - "derivative", - "digest 0.10.7", - "itertools 0.10.5", - "num-bigint 0.4.6", - "num-traits", - "paste", - "rustc_version", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint 0.4.6", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-poly" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf" -dependencies = [ - "ark-ff", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-serialize-derive", - "ark-std", - "digest 0.10.7", - "num-bigint 0.4.6", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - [[package]] name = "arrayref" version = "0.3.9" @@ -299,32 +193,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -[[package]] -name = "ascii" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab1c04a571841102f5345a8fc0f6bb3d31c315dec879b5c6e42e40ce7ffa34e" - -[[package]] -name = "assert_matches" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" - -[[package]] -name = "async-compression" -version = "0.4.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b37fc50485c4f3f736a4fb14199f6d5f5ba008d7f28fe710306c92780f004c07" -dependencies = [ - "brotli", - "flate2", - "futures-core", - "memchr", - "pin-project-lite", - "tokio", -] - [[package]] name = "async-stream" version = "0.3.6" @@ -344,7 +212,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -355,7 +223,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -366,9 +234,9 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "autotools" @@ -389,8 +257,8 @@ dependencies = [ "axum-core", "bytes", "futures-util", - "http 1.3.1", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", "itoa", "matchit", @@ -400,7 +268,7 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper 1.0.2", + "sync_wrapper", "tower 0.5.2", "tower-layer", "tower-service", @@ -415,13 +283,13 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.3.1", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.2", + "sync_wrapper", "tower-layer", "tower-service", ] @@ -447,12 +315,6 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - [[package]] name = "base64" version = "0.22.1" @@ -468,20 +330,11 @@ dependencies = [ "serde", ] -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" -dependencies = [ - "serde", -] [[package]] name = "blake3" @@ -558,7 +411,7 @@ dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -583,27 +436,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "brotli" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "5.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - [[package]] name = "bs58" version = "0.5.1" @@ -615,9 +447,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bv" @@ -631,28 +463,28 @@ dependencies = [ [[package]] name = "bytecount" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" +checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e" [[package]] name = "bytemuck" -version = "1.23.0" +version = "1.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9134a6ef01ce4b366b50689c94f82c14bc72bc5d0386829828a2e2752ef7958c" +checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.8.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" +checksum = "7ecc273b49b3205b83d648f0690daa588925572cc5063745bfe547fe7ec8e1a1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -669,9 +501,9 @@ checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cc" -version = "1.2.23" +version = "1.2.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4ac86a9e5bc1e2b3449ab9d7d3a6a405e3d1bb28d7b9be8614f55846ae3766" +checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362" dependencies = [ "jobserver", "libc", @@ -680,9 +512,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "cfg_aliases" @@ -690,17 +522,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" -[[package]] -name = "cfg_eval" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45565fc9416b9896014f5732ac776f810ee53a66730c17e4020c3ec064a8f88f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - [[package]] name = "chrono" version = "0.4.41" @@ -709,10 +530,8 @@ checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" dependencies = [ "android-tzdata", "iana-time-zone", - "js-sys", "num-traits", "serde", - "wasm-bindgen", "windows-link", ] @@ -728,9 +547,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.38" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" +checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" dependencies = [ "clap_builder", "clap_derive", @@ -738,9 +557,9 @@ dependencies = [ [[package]] name = "clap-verbosity-flag" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2678fade3b77aa3a8ff3aae87e9c008d3fb00473a41c71fbf74e91c8c7b37e84" +checksum = "eeab6a5cdfc795a05538422012f20a5496f050223c91be4e5420bfd13c641fb1" dependencies = [ "clap", "log", @@ -749,9 +568,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.38" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" +checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" dependencies = [ "anstream", "anstyle", @@ -761,40 +580,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] name = "clap_lex" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "colorchoice" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" - -[[package]] -name = "combine" -version = "3.8.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da3da6baa321ec19e1cc41d31bf599f00c783d0517095cdaf0332e3fe8d20680" -dependencies = [ - "ascii", - "byteorder", - "either", - "memchr", - "unreachable", -] +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "console_error_panic_hook" @@ -824,19 +630,9 @@ checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" dependencies = [ "core-foundation-sys", "libc", @@ -866,26 +662,11 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "crossbeam-channel" -version = "0.5.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - [[package]] name = "crunchy" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] name = "crypto-common" @@ -898,16 +679,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array", - "subtle", -] - [[package]] name = "ctr" version = "0.9.2" @@ -956,7 +727,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -980,7 +751,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -991,7 +762,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -1010,17 +781,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e5c37193a1db1d8ed868c03ec7b152175f26160a5b740e5e484143877e0adf0" -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "digest" version = "0.9.0" @@ -1042,21 +802,10 @@ dependencies = [ ] [[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "eager" -version = "0.1.0" +name = "dyn-clone" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abe71d579d1812060163dff96056261deb5bf6729b100fa2e36a68b9649ba3d3" +checksum = "1c7a8fb8a9fbf66c1f703fe16184d10ca0ee9d23be5b4436400408ba54a95005" [[package]] name = "ed25519" @@ -1081,53 +830,12 @@ dependencies = [ "zeroize", ] -[[package]] -name = "ed25519-dalek-bip32" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d2be62a4061b872c8c0873ee4fc6f101ce7b889d039f019c5fa2af471a59908" -dependencies = [ - "derivation-path", - "ed25519-dalek", - "hmac 0.12.1", - "sha2 0.10.9", -] - [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -[[package]] -name = "encoding_rs" -version = "0.8.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "enum-iterator" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fd242f399be1da0a5354aa462d57b4ab2b4ee0683cc552f7c007d2d12d36e94" -dependencies = [ - "enum-iterator-derive", -] - -[[package]] -name = "enum-iterator-derive" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ab991c1362ac86c61ab6f556cff143daa22e5a15e4e189df818b2fd19fe65b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - [[package]] name = "equivalent" version = "1.0.2" @@ -1136,12 +844,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.12" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -1162,6 +870,15 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" +[[package]] +name = "five8" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75b8549488b4715defcb0d8a8a1c1c76a80661b5fa106b4ca0e7fce59d7d875" +dependencies = [ + "five8_core", +] + [[package]] name = "five8_const" version = "0.1.4" @@ -1185,9 +902,9 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" [[package]] name = "flate2" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" dependencies = [ "crc32fast", "miniz_oxide", @@ -1199,30 +916,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" -dependencies = [ - "percent-encoding", -] - [[package]] name = "futures" version = "0.3.31" @@ -1279,7 +972,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -1327,21 +1020,10 @@ version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ - "serde", "typenum", "version_check", ] -[[package]] -name = "gethostname" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "getrandom" version = "0.1.16" @@ -1349,10 +1031,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ "cfg-if", - "js-sys", "libc", "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", ] [[package]] @@ -1364,7 +1044,7 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", "wasm-bindgen", ] @@ -1388,51 +1068,23 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.9.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "h2" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5" +checksum = "17da50a276f1e01e0ba6c029e47b7100754904ee8a278f886546e98575380785" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.3.1", - "indexmap 2.9.0", + "http", + "indexmap 2.10.0", "slab", "tokio", "tokio-util", "tracing", ] -[[package]] -name = "hash32" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" -dependencies = [ - "byteorder", -] - [[package]] name = "hashbrown" version = "0.12.3" @@ -1450,9 +1102,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.3" +version = "0.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" [[package]] name = "heck" @@ -1466,16 +1118,6 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -[[package]] -name = "hmac" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" -dependencies = [ - "crypto-mac", - "digest 0.9.0", -] - [[package]] name = "hmac" version = "0.12.1" @@ -1485,28 +1127,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "hmac-drbg" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" -dependencies = [ - "digest 0.9.0", - "generic-array", - "hmac 0.8.1", -] - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http" version = "1.3.1" @@ -1518,17 +1138,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - [[package]] name = "http-body" version = "1.0.1" @@ -1536,7 +1145,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.3.1", + "http", ] [[package]] @@ -1547,8 +1156,8 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http 1.3.1", - "http-body 1.0.1", + "http", + "http-body", "pin-project-lite", ] @@ -1564,30 +1173,6 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" -[[package]] -name = "hyper" -version = "0.14.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - [[package]] name = "hyper" version = "1.6.0" @@ -1597,9 +1182,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.10", - "http 1.3.1", - "http-body 1.0.1", + "h2", + "http", + "http-body", "httparse", "httpdate", "itoa", @@ -1609,27 +1194,13 @@ dependencies = [ "want", ] -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http 0.2.12", - "hyper 0.14.32", - "rustls 0.21.12", - "tokio", - "tokio-rustls 0.24.1", -] - [[package]] name = "hyper-timeout" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.6.0", + "hyper", "hyper-util", "pin-project-lite", "tokio", @@ -1638,16 +1209,17 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.11" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2" +checksum = "7f66d5bd4c6f02bf0542fad85d626775bab9258cf795a4256dcaf3161114d1df" dependencies = [ "bytes", "futures-channel", + "futures-core", "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "hyper 1.6.0", + "http", + "http-body", + "hyper", "libc", "pin-project-lite", "socket2", @@ -1680,119 +1252,12 @@ dependencies = [ "cc", ] -[[package]] -name = "icu_collections" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" - -[[package]] -name = "icu_properties" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2549ca8c7241c82f59c80ba2a6f415d931c5b58d24fb8412caa1a1f02c49139a" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "potential_utf", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8197e866e47b68f8f7d95249e172903bec06004b18b2937f1095d40a0c57de04" - -[[package]] -name = "icu_provider" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" -dependencies = [ - "displaydoc", - "icu_locale_core", - "stable_deref_trait", - "tinystr", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - [[package]] name = "ident_case" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" -[[package]] -name = "idna" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - [[package]] name = "indexmap" version = "1.9.3" @@ -1806,12 +1271,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", - "hashbrown 0.15.3", + "hashbrown 0.15.4", "serde", ] @@ -1825,10 +1290,15 @@ dependencies = [ ] [[package]] -name = "ipnet" -version = "2.11.0" +name = "io-uring" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" +dependencies = [ + "bitflags", + "cfg-if", + "libc", +] [[package]] name = "is_terminal_polyfill" @@ -1836,15 +1306,6 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.12.1" @@ -1889,6 +1350,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "kaigan" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ba15de5aeb137f0f65aa3bf82187647f1285abfe5b20c80c2c37f7007ad519a" +dependencies = [ + "borsh 0.10.4", + "serde", +] + [[package]] name = "keccak" version = "0.1.5" @@ -1906,9 +1377,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.172" +version = "0.2.174" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" [[package]] name = "libsecp256k1" @@ -1919,14 +1390,12 @@ dependencies = [ "arrayref", "base64 0.12.3", "digest 0.9.0", - "hmac-drbg", "libsecp256k1-core", "libsecp256k1-gen-ecmult", "libsecp256k1-gen-genmult", "rand 0.7.3", "serde", "sha2 0.9.9", - "typenum", ] [[package]] @@ -1964,17 +1433,11 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" -[[package]] -name = "litemap" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" - [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" dependencies = [ "autocfg", "scopeguard", @@ -2003,18 +1466,9 @@ checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" - -[[package]] -name = "memmap2" -version = "0.5.10" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" -dependencies = [ - "libc", -] +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memoffset" @@ -2045,22 +1499,22 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "miniz_oxide" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", ] [[package]] name = "mio" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", ] [[package]] @@ -2079,31 +1533,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "num" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8536030f9fea7127f841b45bb6243b27255787fb4eb83958aa1ef9d2fdc0c36" -dependencies = [ - "num-bigint 0.2.6", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - -[[package]] -name = "num-bigint" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-bigint" version = "0.4.6" @@ -2114,16 +1543,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-complex" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6b19411a9719e753aff12e5187b74d60d3dc449ec3f4dc21e3989c3f554bc95" -dependencies = [ - "autocfg", - "num-traits", -] - [[package]] name = "num-conv" version = "0.1.0" @@ -2138,7 +1557,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -2150,29 +1569,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef" -dependencies = [ - "autocfg", - "num-bigint 0.2.6", - "num-integer", - "num-traits", -] - [[package]] name = "num-traits" version = "0.2.19" @@ -2184,23 +1580,24 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" +checksum = "a973b4e44ce6cad84ce69d797acf9a044532e4184c4f267913d1b546a0727b7a" dependencies = [ "num_enum_derive", + "rustversion", ] [[package]] name = "num_enum_derive" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" +checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d" dependencies = [ "proc-macro-crate 3.3.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -2218,6 +1615,12 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +[[package]] +name = "once_cell_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" + [[package]] name = "opaque-debug" version = "0.3.1" @@ -2225,49 +1628,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] -name = "openssl" -version = "0.10.72" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" -dependencies = [ - "bitflags 2.9.1", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "openssl-probe" -version = "0.1.6" +name = "openssl-probe" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" -[[package]] -name = "openssl-sys" -version = "0.9.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e145e1651e858e820e4860f7b9c5e169bc1d8ce1c86043be79fa7b7634821847" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "overload" version = "0.1.1" @@ -2287,9 +1652,9 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" dependencies = [ "lock_api", "parking_lot_core", @@ -2297,9 +1662,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" dependencies = [ "cfg-if", "libc", @@ -2308,12 +1673,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - [[package]] name = "pbkdf2" version = "0.11.0" @@ -2329,15 +1688,6 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" -[[package]] -name = "percentage" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd23b938276f14057220b707937bcb42fa76dda7560e57a2da30cb52d557937" -dependencies = [ - "num", -] - [[package]] name = "petgraph" version = "0.7.1" @@ -2345,7 +1695,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" dependencies = [ "fixedbitset", - "indexmap 2.9.0", + "indexmap 2.10.0", ] [[package]] @@ -2365,7 +1715,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -2398,15 +1748,6 @@ dependencies = [ "universal-hash", ] -[[package]] -name = "potential_utf" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" -dependencies = [ - "zerovec", -] - [[package]] name = "powerfmt" version = "0.2.0" @@ -2424,12 +1765,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.32" +version = "0.2.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664ec5419c51e34154eec046ebcba56312d5a2fc3b09a06da188e1ad21afadf6" +checksum = "061c1221631e079b26479d25bbf2275bfe5917ae8419cd7e34f13bfc2aa7539a" dependencies = [ "proc-macro2", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -2469,7 +1810,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -2522,7 +1863,7 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 2.0.101", + "syn 2.0.104", "tempfile", ] @@ -2536,7 +1877,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -2597,9 +1938,9 @@ dependencies = [ [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rand" @@ -2674,11 +2015,31 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.12" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6" +dependencies = [ + "bitflags", +] + +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" dependencies = [ - "bitflags 2.9.1", + "proc-macro2", + "quote", + "syn 2.0.104", ] [[package]] @@ -2725,49 +2086,6 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "async-compression", - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", - "hyper-rustls", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls 0.21.12", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 0.1.2", - "system-configuration", - "tokio", - "tokio-rustls 0.24.1", - "tokio-util", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots", - "winreg", -] - [[package]] name = "ring" version = "0.17.14" @@ -2784,9 +2102,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" [[package]] name = "rustc_version" @@ -2803,7 +2121,7 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ - "bitflags 2.9.1", + "bitflags", "errno", "libc", "linux-raw-sys", @@ -2812,27 +2130,15 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" -dependencies = [ - "log", - "ring", - "rustls-webpki 0.101.7", - "sct", -] - -[[package]] -name = "rustls" -version = "0.23.27" +version = "0.23.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "730944ca083c1c233a75c09f199e973ca499344a2b7ba9e755c457e86fb4a321" +checksum = "7160e3e10bf4535308537f3c4e1641468cd0e485175d6163087c0393c7d46643" dependencies = [ "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.103.3", + "rustls-webpki", "subtle", "zeroize", ] @@ -2849,15 +2155,6 @@ dependencies = [ "security-framework", ] -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - [[package]] name = "rustls-pemfile" version = "2.2.0" @@ -2876,16 +2173,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "rustls-webpki" version = "0.103.3" @@ -2899,9 +2186,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" [[package]] name = "ryu" @@ -2919,26 +2206,34 @@ dependencies = [ ] [[package]] -name = "scopeguard" -version = "1.2.0" +name = "schemars" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] [[package]] -name = "scroll" -version = "0.11.0" +name = "schemars" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] [[package]] -name = "sct" -version = "0.7.1" +name = "scopeguard" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring", - "untrusted", -] +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "security-framework" @@ -2946,8 +2241,8 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" dependencies = [ - "bitflags 2.9.1", - "core-foundation 0.10.0", + "bitflags", + "core-foundation", "core-foundation-sys", "libc", "security-framework-sys", @@ -2978,6 +2273,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-big-array" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11fc7cc2c76d73e0f27ee52abbd64eec84d46f370c88371120433196934e4b7f" +dependencies = [ + "serde", +] + [[package]] name = "serde_bytes" version = "0.11.17" @@ -2995,7 +2299,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -3010,29 +2314,19 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - [[package]] name = "serde_with" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.9.0", + "indexmap 2.10.0", + "schemars 0.9.0", + "schemars 1.0.4", "serde", "serde_derive", "serde_json", @@ -3042,14 +2336,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -3058,7 +2352,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.9.0", + "indexmap 2.10.0", "itoa", "ryu", "serde", @@ -3129,32 +2423,23 @@ version = "1.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" -[[package]] -name = "siphasher" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" - [[package]] name = "slab" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" [[package]] name = "smallvec" -version = "1.15.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "socket2" -version = "0.5.9" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", @@ -3162,49 +2447,70 @@ dependencies = [ [[package]] name = "solana-account" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9a495abef137c65f58282720384262503172cddb937c94c1a01f0a6c553a0dc" +checksum = "0f949fe4edaeaea78c844023bfc1c898e0b1f5a100f8a8d2d0f85d0a7b090258" dependencies = [ "bincode", "serde", "serde_bytes", "serde_derive", + "solana-account-info", + "solana-clock", "solana-instruction", - "solana-program", + "solana-pubkey", + "solana-sdk-ids", + "solana-sysvar", ] [[package]] name = "solana-account-decoder" -version = "2.1.21" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86e83b9f421857e9aee51df52aab53d03a9f5860a57c0adcda8a480392f2f85a" +checksum = "43de39d8d625db4c6c3175d63afb5a69d6d223508f6b8a62b4dc254554f2c9d2" dependencies = [ "Inflector", "base64 0.22.1", "bincode", "bs58", "bv", - "lazy_static", "serde", "serde_derive", "serde_json", + "solana-account", "solana-account-decoder-client-types", - "solana-config-program", - "solana-sdk", + "solana-address-lookup-table-interface", + "solana-clock", + "solana-config-program-client", + "solana-epoch-schedule", + "solana-fee-calculator", + "solana-instruction", + "solana-loader-v3-interface", + "solana-nonce", + "solana-program-option", + "solana-program-pack", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-slot-hashes", + "solana-slot-history", + "solana-stake-interface", + "solana-sysvar", + "solana-vote-interface", + "spl-generic-token", "spl-token", "spl-token-2022", "spl-token-group-interface", "spl-token-metadata-interface", - "thiserror 1.0.69", + "thiserror 2.0.12", "zstd", ] [[package]] name = "solana-account-decoder-client-types" -version = "2.1.21" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4af8ffcad184f2486e5e677fe25250f8f0d77b2d5eb24045fb720963525272b7" +checksum = "a960fb78aeeabff9e62e58c35ce2bb474cf437a7dea832ea61483142be97634e" dependencies = [ "base64 0.22.1", "bs58", @@ -3218,9 +2524,9 @@ dependencies = [ [[package]] name = "solana-account-info" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b43b59c9659eb61504c4cc73a92a5995a117fa4ffc04bb0da002848cfdd7fcd" +checksum = "c8f5152a288ef1912300fc6efa6c2d1f9bb55d9398eb6c72326360b8063987da" dependencies = [ "bincode", "serde", @@ -3229,20 +2535,48 @@ dependencies = [ "solana-pubkey", ] +[[package]] +name = "solana-address-lookup-table-interface" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1673f67efe870b64a65cb39e6194be5b26527691ce5922909939961a6e6b395" +dependencies = [ + "bincode", + "bytemuck", + "serde", + "serde_derive", + "solana-clock", + "solana-instruction", + "solana-pubkey", + "solana-sdk-ids", + "solana-slot-hashes", +] + [[package]] name = "solana-atomic-u64" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f9c33447056f11c1c486ffc2d803366847ba712463d9640891e50490faafd56" +checksum = "d52e52720efe60465b052b9e7445a01c17550666beec855cce66f44766697bc2" dependencies = [ "parking_lot", ] +[[package]] +name = "solana-big-mod-exp" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75db7f2bbac3e62cfd139065d15bcda9e2428883ba61fc8d27ccb251081e7567" +dependencies = [ + "num-bigint", + "num-traits", + "solana-define-syscall", +] + [[package]] name = "solana-bincode" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c4ce8ddc2f5343e64346f9f8a05e9f27579d848f059485290d5b9c962c352c" +checksum = "19a3787b8cf9c9fe3dd360800e8b70982b9e5a8af9e11c354b6665dd4a003adc" dependencies = [ "bincode", "serde", @@ -3250,25 +2584,22 @@ dependencies = [ ] [[package]] -name = "solana-bn254" -version = "2.1.21" +name = "solana-blake3-hasher" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e485c027635dd7c6e558949d13bcc052340d704eef4219438593afea5632f42" +checksum = "a1a0801e25a1b31a14494fc80882a036be0ffd290efc4c2d640bfcca120a4672" dependencies = [ - "ark-bn254", - "ark-ec", - "ark-ff", - "ark-serialize", - "bytemuck", - "solana-program", - "thiserror 1.0.69", + "blake3", + "solana-define-syscall", + "solana-hash", + "solana-sanitize", ] [[package]] name = "solana-borsh" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad79f227829e9b3fa1227acf21b02877f1a0d99d1b753a8085254b706fbddfee" +checksum = "718333bcd0a1a7aed6655aa66bef8d7fb047944922b2d3a18f49cbc13e73d004" dependencies = [ "borsh 0.10.4", "borsh 1.5.7", @@ -3276,46 +2607,41 @@ dependencies = [ [[package]] name = "solana-clock" -version = "2.1.21" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd1a3f42e823861b812f388d4007bfb2d23aa316d999a2f2ca124fa33c72a40" +checksum = "1bb482ab70fced82ad3d7d3d87be33d466a3498eb8aa856434ff3c0dfc2e2e31" dependencies = [ "serde", "serde_derive", + "solana-sdk-ids", "solana-sdk-macro", "solana-sysvar-id", ] [[package]] -name = "solana-compute-budget" -version = "2.1.21" +name = "solana-commitment-config" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ac55f874d43496b1e1d091576dd99e74e5863911ca55ac36912fe8fe3aa155" -dependencies = [ - "solana-sdk", -] +checksum = "ac49c4dde3edfa832de1697e9bcdb7c3b3f7cb7a1981b7c62526c8bb6700fb73" [[package]] -name = "solana-config-program" -version = "2.1.21" +name = "solana-config-program-client" +version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb08509a0969a3929fc48de314af650ce77d678ec742e49b6f60535c57eccb2" +checksum = "53aceac36f105fd4922e29b4f0c1f785b69d7b3e7e387e384b8985c8e0c3595e" dependencies = [ "bincode", - "chrono", + "borsh 0.10.4", + "kaigan", "serde", - "serde_derive", - "solana-log-collector", - "solana-program-runtime", - "solana-sdk", - "solana-short-vec", + "solana-program", ] [[package]] name = "solana-cpi" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43b1391eecec7c15ae83477d0bfebc9a92ebe69f793683fd497ac02d180fcd5" +checksum = "8dc71126edddc2ba014622fc32d0f5e2e78ec6c5a1e0eb511b85618c09e9ea11" dependencies = [ "solana-account-info", "solana-define-syscall", @@ -3327,74 +2653,116 @@ dependencies = [ [[package]] name = "solana-curve25519" -version = "2.1.21" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdcc0923e1fbfe614d4a5675c3e4398b521c9d128c2114aaf3e404ea81ad08ee" +checksum = "41408a8fbd47775414e29d393319e8d3327cfac690b3f37ca44b938c5f93a77e" dependencies = [ "bytemuck", "bytemuck_derive", "curve25519-dalek 4.1.3", - "solana-program", - "thiserror 1.0.69", + "solana-define-syscall", + "subtle", + "thiserror 2.0.12", ] [[package]] name = "solana-decode-error" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "750a2c40fd97c96f7464ccf11142c598d5d35094d5988944f7ca5714e014737c" +checksum = "8c781686a18db2f942e70913f7ca15dc120ec38dcab42ff7557db2c70c625a35" dependencies = [ "num-traits", ] [[package]] name = "solana-define-syscall" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "592a9b501608cb642af6ad9d07c8be8827772c20d1afa5173c85c561da7942a3" +checksum = "2ae3e2abcf541c8122eafe9a625d4d194b4023c20adde1e251f94e056bb1aee2" [[package]] name = "solana-derivation-path" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59883e489a9f19caef8af9a198f791f2b62348e14e5279f87df950c233c7880" +checksum = "939756d798b25c5ec3cca10e06212bdca3b1443cb9bb740a38124f58b258737b" dependencies = [ "derivation-path", "qstring", "uriparse", ] +[[package]] +name = "solana-epoch-rewards" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b575d3dd323b9ea10bb6fe89bf6bf93e249b215ba8ed7f68f1a3633f384db7" +dependencies = [ + "serde", + "serde_derive", + "solana-hash", + "solana-sdk-ids", + "solana-sdk-macro", + "solana-sysvar-id", +] + [[package]] name = "solana-epoch-schedule" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80ec39611020935101afd9a57ac57ea6961796b7d8705974408601e97d4dfb30" +checksum = "3fce071fbddecc55d727b1d7ed16a629afe4f6e4c217bc8d00af3b785f6f67ed" dependencies = [ "serde", "serde_derive", + "solana-sdk-ids", "solana-sdk-macro", "solana-sysvar-id", ] [[package]] -name = "solana-feature-set" -version = "2.1.21" +name = "solana-example-mocks" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "286cb8e4d888f36026bcb1603ff8ab52565ff12705ed24ed44dc68047ef2f779" +checksum = "84461d56cbb8bb8d539347151e0525b53910102e4bced875d49d5139708e39d3" dependencies = [ - "lazy_static", + "serde", + "serde_derive", + "solana-address-lookup-table-interface", "solana-clock", - "solana-epoch-schedule", "solana-hash", + "solana-instruction", + "solana-keccak-hasher", + "solana-message", + "solana-nonce", "solana-pubkey", - "solana-sha256-hasher", + "solana-sdk-ids", + "solana-system-interface", + "thiserror 2.0.12", +] + +[[package]] +name = "solana-feature-gate-interface" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43f5c5382b449e8e4e3016fb05e418c53d57782d8b5c30aa372fc265654b956d" +dependencies = [ + "bincode", + "serde", + "serde_derive", + "solana-account", + "solana-account-info", + "solana-instruction", + "solana-program-error", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-system-interface", ] [[package]] name = "solana-fee-calculator" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb464dcd65c2737f25e85b4f40e11ae90b106fa5ed6127acc7e3f5d70a7128a2" +checksum = "d89bc408da0fb3812bc3008189d148b4d3e08252c79ad810b245482a3f70cd8d" dependencies = [ "log", "serde", @@ -3403,14 +2771,14 @@ dependencies = [ [[package]] name = "solana-hash" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591e9192a4575792bb6a175f82b6bcfb301fc1113c4342bce7789f00726e98a" +checksum = "b5b96e9f0300fa287b545613f007dfe20043d7812bee255f418c1eb649c93b63" dependencies = [ "borsh 1.5.7", - "bs58", "bytemuck", "bytemuck_derive", + "five8", "js-sys", "serde", "serde_derive", @@ -3419,21 +2787,11 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "solana-inflation" -version = "2.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c79ffef2dcf3c9361e8333276c08bd84f112f0760e2fe7273fd089d9bfd2c3b" -dependencies = [ - "serde", - "serde_derive", -] - [[package]] name = "solana-instruction" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cfdcaf08849c1828899c5f9ac8da6077bd3b339e2b0648b2d99d3fd780c3c6f" +checksum = "47298e2ce82876b64f71e9d13a46bc4b9056194e7f9937ad3084385befa50885" dependencies = [ "bincode", "borsh 1.5.7", @@ -3448,136 +2806,212 @@ dependencies = [ ] [[package]] -name = "solana-last-restart-slot" -version = "2.1.21" +name = "solana-instructions-sysvar" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b026280da05ff5a5fecd37057f85029b135b65df442467eeb15a8824b902294" +checksum = "e0e85a6fad5c2d0c4f5b91d34b8ca47118fc593af706e523cdbedf846a954f57" dependencies = [ - "serde", - "serde_derive", - "solana-sdk-macro", - "solana-sysvar-id", -] - -[[package]] -name = "solana-log-collector" -version = "2.1.21" + "bitflags", + "solana-account-info", + "solana-instruction", + "solana-program-error", + "solana-pubkey", + "solana-sanitize", + "solana-sdk-ids", + "solana-serialize-utils", + "solana-sysvar-id", +] + +[[package]] +name = "solana-keccak-hasher" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ca24a27f90bc5b3e27a97f0c7dd062a7ad068df240fd8df1267e4840f5d1991" +checksum = "c7aeb957fbd42a451b99235df4942d96db7ef678e8d5061ef34c9b34cae12f79" dependencies = [ - "log", + "sha3", + "solana-define-syscall", + "solana-hash", + "solana-sanitize", ] [[package]] -name = "solana-measure" -version = "2.1.21" +name = "solana-keypair" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1520b3d2e1271adc263807fb6bea9d1ded1aaf4b3dffc4e8c1d51c4444417db" +checksum = "bd3f04aa1a05c535e93e121a95f66e7dcccf57e007282e8255535d24bf1e98bb" +dependencies = [ + "ed25519-dalek", + "five8", + "rand 0.7.3", + "solana-pubkey", + "solana-seed-phrase", + "solana-signature", + "solana-signer", + "wasm-bindgen", +] [[package]] -name = "solana-metrics" -version = "2.1.21" +name = "solana-last-restart-slot" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e5546d4505ca1d7c07fd4bedf5f20d256929c4bf614e031c0e84c94cc8d1a94" +checksum = "4a6360ac2fdc72e7463565cd256eedcf10d7ef0c28a1249d261ec168c1b55cdd" dependencies = [ - "crossbeam-channel", - "gethostname", - "lazy_static", - "log", - "reqwest", - "solana-sdk", - "thiserror 1.0.69", + "serde", + "serde_derive", + "solana-sdk-ids", + "solana-sdk-macro", + "solana-sysvar-id", ] [[package]] -name = "solana-msg" -version = "2.1.21" +name = "solana-loader-v2-interface" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddbd7c6efaea83a2bd85a14a0a062ccc77039070311d2ca1af4c887be500192f" +checksum = "d8ab08006dad78ae7cd30df8eea0539e207d08d91eaefb3e1d49a446e1c49654" dependencies = [ - "solana-define-syscall", + "serde", + "serde_bytes", + "serde_derive", + "solana-instruction", + "solana-pubkey", + "solana-sdk-ids", ] [[package]] -name = "solana-native-token" -version = "2.1.21" +name = "solana-loader-v3-interface" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b58d96fb3ff6d29e0afdb933888f65bcdf0775b334eee4f1f6f72b475cb531" +checksum = "6f7162a05b8b0773156b443bccd674ea78bb9aa406325b467ea78c06c99a63a2" +dependencies = [ + "serde", + "serde_bytes", + "serde_derive", + "solana-instruction", + "solana-pubkey", + "solana-sdk-ids", + "solana-system-interface", +] [[package]] -name = "solana-packet" -version = "2.1.21" +name = "solana-loader-v4-interface" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a46ca92cd3303aa3a225b4b3b4d9b2d29e42927545f1c1ff4042ca516b4decbd" +checksum = "706a777242f1f39a83e2a96a2a6cb034cb41169c6ecbee2cf09cb873d9659e7e" +dependencies = [ + "serde", + "serde_bytes", + "serde_derive", + "solana-instruction", + "solana-pubkey", + "solana-sdk-ids", + "solana-system-interface", +] + +[[package]] +name = "solana-message" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1796aabce376ff74bf89b78d268fa5e683d7d7a96a0a4e4813ec34de49d5314b" dependencies = [ "bincode", - "bitflags 2.9.1", - "cfg_eval", + "blake3", + "lazy_static", "serde", "serde_derive", - "serde_with", + "solana-bincode", + "solana-hash", + "solana-instruction", + "solana-pubkey", + "solana-sanitize", + "solana-sdk-ids", + "solana-short-vec", + "solana-system-interface", + "solana-transaction-error", + "wasm-bindgen", ] [[package]] -name = "solana-precompile-error" -version = "2.1.21" +name = "solana-msg" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09c58acffc2369dd3965e666a69015a978ef639e7e11cfe950b80d3ccfb44115" +checksum = "f36a1a14399afaabc2781a1db09cb14ee4cc4ee5c7a5a3cfcc601811379a8092" dependencies = [ - "num-traits", - "solana-decode-error", + "solana-define-syscall", +] + +[[package]] +name = "solana-native-token" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61515b880c36974053dd499c0510066783f0cc6ac17def0c7ef2a244874cf4a9" + +[[package]] +name = "solana-nonce" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703e22eb185537e06204a5bd9d509b948f0066f2d1d814a6f475dafb3ddf1325" +dependencies = [ + "serde", + "serde_derive", + "solana-fee-calculator", + "solana-hash", + "solana-pubkey", + "solana-sha256-hasher", ] [[package]] name = "solana-program" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c3aa133068171f46e9583dc9c20221b9a67459e7b8aecd3be5b49af60b2887f" +checksum = "98eca145bd3545e2fbb07166e895370576e47a00a7d824e325390d33bf467210" dependencies = [ - "base64 0.22.1", "bincode", - "bitflags 2.9.1", "blake3", "borsh 0.10.4", "borsh 1.5.7", "bs58", - "bv", "bytemuck", - "bytemuck_derive", "console_error_panic_hook", "console_log", - "curve25519-dalek 4.1.3", - "five8_const", "getrandom 0.2.16", - "js-sys", "lazy_static", "log", "memoffset", - "num-bigint 0.4.6", + "num-bigint", "num-derive", "num-traits", - "parking_lot", "rand 0.8.5", "serde", "serde_bytes", "serde_derive", - "sha2 0.10.9", - "sha3", "solana-account-info", + "solana-address-lookup-table-interface", "solana-atomic-u64", + "solana-big-mod-exp", "solana-bincode", + "solana-blake3-hasher", "solana-borsh", "solana-clock", "solana-cpi", "solana-decode-error", "solana-define-syscall", + "solana-epoch-rewards", "solana-epoch-schedule", + "solana-example-mocks", + "solana-feature-gate-interface", "solana-fee-calculator", "solana-hash", "solana-instruction", + "solana-instructions-sysvar", + "solana-keccak-hasher", "solana-last-restart-slot", + "solana-loader-v2-interface", + "solana-loader-v3-interface", + "solana-loader-v4-interface", + "solana-message", "solana-msg", "solana-native-token", + "solana-nonce", "solana-program-entrypoint", "solana-program-error", "solana-program-memory", @@ -3586,6 +3020,7 @@ dependencies = [ "solana-pubkey", "solana-rent", "solana-sanitize", + "solana-sdk-ids", "solana-sdk-macro", "solana-secp256k1-recover", "solana-serde-varint", @@ -3595,17 +3030,20 @@ dependencies = [ "solana-slot-hashes", "solana-slot-history", "solana-stable-layout", + "solana-stake-interface", + "solana-system-interface", + "solana-sysvar", "solana-sysvar-id", - "solana-transaction-error", - "thiserror 1.0.69", + "solana-vote-interface", + "thiserror 2.0.12", "wasm-bindgen", ] [[package]] name = "solana-program-entrypoint" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42577056d9910b5e3badfb4ae8e234a814ff854e645e679b4286e1b4338c0f03" +checksum = "32ce041b1a0ed275290a5008ee1a4a6c48f5054c8a3d78d313c08958a06aedbd" dependencies = [ "solana-account-info", "solana-msg", @@ -3615,9 +3053,9 @@ dependencies = [ [[package]] name = "solana-program-error" -version = "2.1.21" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a268d99b9f7a2ebfee0e8aa03be2f926626575d2e3c33ef02245c1e99477202e" +checksum = "9ee2e0217d642e2ea4bee237f37bd61bb02aec60da3647c48ff88f6556ade775" dependencies = [ "borsh 1.5.7", "num-traits", @@ -3631,76 +3069,44 @@ dependencies = [ [[package]] name = "solana-program-memory" -version = "2.1.21" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46d5f1d48635ce777d931ba85a4be23d9da2443cb24bcd9cc380636444a3e234" +checksum = "3a5426090c6f3fd6cfdc10685322fede9ca8e5af43cd6a59e98bfe4e91671712" dependencies = [ - "num-traits", "solana-define-syscall", ] [[package]] name = "solana-program-option" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21f8d02965a1f8382b55c834fc9388c95112e8b238625f5c1e6289f20573b91a" +checksum = "dc677a2e9bc616eda6dbdab834d463372b92848b2bfe4a1ed4e4b4adba3397d0" [[package]] name = "solana-program-pack" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "019fc8c5e8698918bb0675afce683a17248e1c38b178c59f23577fbfd2374aa5" +checksum = "319f0ef15e6e12dc37c597faccb7d62525a509fec5f6975ecb9419efddeb277b" dependencies = [ "solana-program-error", ] -[[package]] -name = "solana-program-runtime" -version = "2.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7702436e95dadea0553a5b29b42271b81da61c2ae78e4425c74fadd7ce2252b5" -dependencies = [ - "base64 0.22.1", - "bincode", - "enum-iterator", - "itertools 0.12.1", - "libc", - "log", - "num-derive", - "num-traits", - "percentage", - "rand 0.8.5", - "serde", - "solana-compute-budget", - "solana-feature-set", - "solana-log-collector", - "solana-measure", - "solana-metrics", - "solana-sdk", - "solana-timings", - "solana-type-overrides", - "solana-vote", - "solana_rbpf", - "thiserror 1.0.69", -] - [[package]] name = "solana-pubkey" -version = "2.1.21" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16bcff57fc2a096f6c57851e22587a7a36b6fdaa567eab75bb0bcd06f449fcbc" +checksum = "9b62adb9c3261a052ca1f999398c388f1daf558a1b492f60a6d9e64857db4ff1" dependencies = [ "borsh 0.10.4", "borsh 1.5.7", - "bs58", "bytemuck", "bytemuck_derive", "curve25519-dalek 4.1.3", + "five8", "five8_const", "getrandom 0.2.16", "js-sys", "num-traits", - "rand 0.8.5", "serde", "serde_derive", "solana-atomic-u64", @@ -3713,144 +3119,105 @@ dependencies = [ [[package]] name = "solana-rent" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb867706bd6e7bbbdf303416f44a5560bcef6f9a0fc5610a725c1a92103abc7" +checksum = "d1aea8fdea9de98ca6e8c2da5827707fb3842833521b528a713810ca685d2480" dependencies = [ "serde", "serde_derive", + "solana-sdk-ids", "solana-sdk-macro", "solana-sysvar-id", ] +[[package]] +name = "solana-reward-info" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18205b69139b1ae0ab8f6e11cdcb627328c0814422ad2482000fa2ca54ae4a2f" +dependencies = [ + "serde", + "serde_derive", +] + [[package]] name = "solana-sanitize" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac599243d068ed88b40e781e02c92c8ed3edd0170337b1b4cee995e4fbe84af0" +checksum = "61f1bc1357b8188d9c4a3af3fc55276e56987265eb7ad073ae6f8180ee54cecf" [[package]] -name = "solana-sdk" -version = "2.1.21" +name = "solana-sdk-ids" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef4d9a579ff99aa5109921f729ab9cba07b207486b2c1eab8240c97777102ba" +checksum = "5c5d8b9cc68d5c88b062a33e23a6466722467dde0035152d8fb1afbcdf350a5f" dependencies = [ - "bincode", - "bitflags 2.9.1", - "borsh 1.5.7", - "bs58", - "bytemuck", - "bytemuck_derive", - "byteorder", - "chrono", - "digest 0.10.7", - "ed25519-dalek", - "ed25519-dalek-bip32", - "getrandom 0.1.16", - "hmac 0.12.1", - "itertools 0.12.1", - "js-sys", - "lazy_static", - "libsecp256k1", - "log", - "memmap2", - "num-derive", - "num-traits", - "num_enum", - "pbkdf2", - "rand 0.7.3", - "rand 0.8.5", - "serde", - "serde_bytes", - "serde_derive", - "serde_json", - "serde_with", - "sha2 0.10.9", - "sha3", - "siphasher", - "solana-account", - "solana-bn254", - "solana-decode-error", - "solana-derivation-path", - "solana-feature-set", - "solana-inflation", - "solana-instruction", - "solana-native-token", - "solana-packet", - "solana-precompile-error", - "solana-program", - "solana-program-memory", "solana-pubkey", - "solana-sanitize", - "solana-sdk-macro", - "solana-secp256k1-recover", - "solana-secp256r1-program", - "solana-serde-varint", - "solana-short-vec", - "solana-signature", - "solana-transaction-error", - "thiserror 1.0.69", - "wasm-bindgen", ] [[package]] name = "solana-sdk-macro" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ea6cfa40c712e5de92ffa2d62a2b296379d09411e0f1d7fcd9fecf5fcc5a30" +checksum = "86280da8b99d03560f6ab5aca9de2e38805681df34e0bb8f238e69b29433b9df" dependencies = [ "bs58", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] name = "solana-secp256k1-recover" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9d4483cde845bb0f70374d2905014650057eafdcc546a3e50059e7643318e8" +checksum = "baa3120b6cdaa270f39444f5093a90a7b03d296d362878f7a6991d6de3bbe496" dependencies = [ - "borsh 1.5.7", "libsecp256k1", "solana-define-syscall", - "thiserror 1.0.69", + "thiserror 2.0.12", ] [[package]] -name = "solana-secp256r1-program" -version = "2.1.21" +name = "solana-security-txt" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6feaf48cad9bf5ca1a04c8cd1fb45d4fa507ff86dae77802b8e8f47b89fb0eed" +checksum = "468aa43b7edb1f9b7b7b686d5c3aeb6630dc1708e86e31343499dd5c4d775183" + +[[package]] +name = "solana-seed-derivable" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beb82b5adb266c6ea90e5cf3967235644848eac476c5a1f2f9283a143b7c97f" dependencies = [ - "bytemuck", - "openssl", - "solana-feature-set", - "solana-instruction", - "solana-precompile-error", - "solana-pubkey", + "solana-derivation-path", ] [[package]] -name = "solana-security-txt" -version = "1.1.1" +name = "solana-seed-phrase" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "468aa43b7edb1f9b7b7b686d5c3aeb6630dc1708e86e31343499dd5c4d775183" +checksum = "36187af2324f079f65a675ec22b31c24919cb4ac22c79472e85d819db9bbbc15" +dependencies = [ + "hmac", + "pbkdf2", + "sha2 0.10.9", +] [[package]] name = "solana-serde-varint" -version = "2.1.21" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3778f75e718af3c3e4b42ca67ec3a4197658855eaa5372a2f41e2378290b4fc" +checksum = "2a7e155eba458ecfb0107b98236088c3764a09ddf0201ec29e52a0be40857113" dependencies = [ "serde", ] [[package]] name = "solana-serialize-utils" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ccf458c0aaa5d517fa358c70a6322e2cb7c18c659464eaa7135de5b3c1b2837" +checksum = "817a284b63197d2b27afdba829c5ab34231da4a9b4e763466a003c40ca4f535e" dependencies = [ "solana-instruction", "solana-pubkey", @@ -3859,9 +3226,9 @@ dependencies = [ [[package]] name = "solana-sha256-hasher" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4028550a372b5ce9514941fb1a04cfac9aa66f76fd9684a7b11ef37ac586e492" +checksum = "0037386961c0d633421f53560ad7c80675c0447cba4d1bb66d60974dd486c7ea" dependencies = [ "sha2 0.10.9", "solana-define-syscall", @@ -3870,87 +3237,207 @@ dependencies = [ [[package]] name = "solana-short-vec" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be97defacda69848b33aa1fec3571435e5a3bfb55cdd2afd66867604eee3ff84" +checksum = "5c54c66f19b9766a56fa0057d060de8378676cb64987533fa088861858fc5a69" dependencies = [ "serde", ] [[package]] name = "solana-signature" -version = "2.1.21" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fdb44ae08fa08bcaf5a3c01cf0d1b9c363ab2e3e2602e9b7806f653d08b4d0" +checksum = "64c8ec8e657aecfc187522fc67495142c12f35e55ddeca8698edbb738b8dbd8c" dependencies = [ - "bs58", "ed25519-dalek", - "generic-array", - "rand 0.8.5", + "five8", "serde", + "serde-big-array", "serde_derive", "solana-sanitize", ] +[[package]] +name = "solana-signer" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c41991508a4b02f021c1342ba00bcfa098630b213726ceadc7cb032e051975b" +dependencies = [ + "solana-pubkey", + "solana-signature", + "solana-transaction-error", +] + [[package]] name = "solana-slot-hashes" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c7bb06f75cb9d4c44f71dbc1cf46b1e67aaa9f737b66389b18471a8db1d9a09" +checksum = "0c8691982114513763e88d04094c9caa0376b867a29577939011331134c301ce" dependencies = [ "serde", "serde_derive", "solana-hash", + "solana-sdk-ids", "solana-sysvar-id", ] [[package]] name = "solana-slot-history" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff10530199def6788f8750cc274629e49e2c4baf181ddfb7c754813fd0bc252e" +checksum = "97ccc1b2067ca22754d5283afb2b0126d61eae734fc616d23871b0943b0d935e" dependencies = [ "bv", "serde", "serde_derive", + "solana-sdk-ids", "solana-sysvar-id", ] [[package]] name = "solana-stable-layout" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e36af09027fa5a658210d7add0c7661934a879439b68336dbe680263255d3f62" +checksum = "9f14f7d02af8f2bc1b5efeeae71bc1c2b7f0f65cd75bcc7d8180f2c762a57f54" dependencies = [ "solana-instruction", "solana-pubkey", ] +[[package]] +name = "solana-stake-interface" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5269e89fde216b4d7e1d1739cf5303f8398a1ff372a81232abbee80e554a838c" +dependencies = [ + "borsh 0.10.4", + "borsh 1.5.7", + "num-traits", + "serde", + "serde_derive", + "solana-clock", + "solana-cpi", + "solana-decode-error", + "solana-instruction", + "solana-program-error", + "solana-pubkey", + "solana-system-interface", + "solana-sysvar-id", +] + +[[package]] +name = "solana-svm-feature-set" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c57970988bfbc592b3a03df165fe5ef4d8594eec129d4efe3f6d3d6d97c9c787" + +[[package]] +name = "solana-system-interface" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94d7c18cb1a91c6be5f5a8ac9276a1d7c737e39a21beba9ea710ab4b9c63bc90" +dependencies = [ + "js-sys", + "num-traits", + "serde", + "serde_derive", + "solana-decode-error", + "solana-instruction", + "solana-pubkey", + "wasm-bindgen", +] + +[[package]] +name = "solana-sysvar" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d50c92bc019c590f5e42c61939676e18d14809ed00b2a59695dd5c67ae72c097" +dependencies = [ + "base64 0.22.1", + "bincode", + "bytemuck", + "bytemuck_derive", + "lazy_static", + "serde", + "serde_derive", + "solana-account-info", + "solana-clock", + "solana-define-syscall", + "solana-epoch-rewards", + "solana-epoch-schedule", + "solana-fee-calculator", + "solana-hash", + "solana-instruction", + "solana-instructions-sysvar", + "solana-last-restart-slot", + "solana-program-entrypoint", + "solana-program-error", + "solana-program-memory", + "solana-pubkey", + "solana-rent", + "solana-sanitize", + "solana-sdk-ids", + "solana-sdk-macro", + "solana-slot-hashes", + "solana-slot-history", + "solana-stake-interface", + "solana-sysvar-id", +] + [[package]] name = "solana-sysvar-id" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d72e2ff3e0a3b14d0fd4010d1fe7e4fcc9c4d8e5d43826c75c5629477e8b1a" +checksum = "5762b273d3325b047cfda250787f8d796d781746860d5d0a746ee29f3e8812c1" dependencies = [ "solana-pubkey", + "solana-sdk-ids", +] + +[[package]] +name = "solana-transaction" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80657d6088f721148f5d889c828ca60c7daeedac9a8679f9ec215e0c42bcbf41" +dependencies = [ + "serde", + "serde_derive", + "solana-hash", + "solana-instruction", + "solana-keypair", + "solana-message", + "solana-pubkey", + "solana-sanitize", + "solana-sdk-ids", + "solana-short-vec", + "solana-signature", + "solana-transaction-error", + "wasm-bindgen", ] [[package]] -name = "solana-timings" -version = "2.1.21" +name = "solana-transaction-context" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a4cb26e92fe54cff708393ea63cb4b5c983cd1c86c5b5d4962523f57e95239e" +checksum = "f7f01227c847bb9a33a5847f6c8977dc6ab71191a3f662d49c0913286efd22d6" dependencies = [ - "eager", - "enum-iterator", - "solana-sdk", + "bincode", + "serde", + "serde_derive", + "solana-account", + "solana-instruction", + "solana-instructions-sysvar", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", ] [[package]] name = "solana-transaction-error" -version = "2.1.21" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf8715c2acb247f4592eb7dc1d616454836f10c2d0e397a557fae4192337618" +checksum = "222a9dc8fdb61c6088baab34fc3a8b8473a03a7a5fd404ed8dd502fa79b67cb1" dependencies = [ "serde", "serde_derive", @@ -3960,37 +3447,53 @@ dependencies = [ [[package]] name = "solana-transaction-status" -version = "2.1.21" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c62ad1caf3e15f05cf40dc82e765319750b4f6a48e1ad3ae2e8a4f6be6e7de" +checksum = "584e6d1c6e9c6f3adbc1848f9d1e9540a402dda806f78004982ab90752f7b85d" dependencies = [ "Inflector", + "agave-reserved-account-keys", "base64 0.22.1", "bincode", "borsh 1.5.7", "bs58", - "lazy_static", "log", "serde", "serde_derive", "serde_json", "solana-account-decoder", - "solana-sdk", + "solana-address-lookup-table-interface", + "solana-clock", + "solana-hash", + "solana-instruction", + "solana-loader-v2-interface", + "solana-loader-v3-interface", + "solana-message", + "solana-program-option", + "solana-pubkey", + "solana-reward-info", + "solana-sdk-ids", + "solana-signature", + "solana-stake-interface", + "solana-system-interface", + "solana-transaction", + "solana-transaction-error", "solana-transaction-status-client-types", + "solana-vote-interface", "spl-associated-token-account", "spl-memo", "spl-token", "spl-token-2022", "spl-token-group-interface", "spl-token-metadata-interface", - "thiserror 1.0.69", + "thiserror 2.0.12", ] [[package]] name = "solana-transaction-status-client-types" -version = "2.1.21" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836a0e47c3d688676b37c1e3ff6a1fbb6a8b3bfaf050ee6343b6b993e3cc91cd" +checksum = "d493bb232e3e9feafe4cc8f75285d4cadb74af9ee0344f52bdbe93c857c79298" dependencies = [ "base64 0.22.1", "bincode", @@ -3999,50 +3502,54 @@ dependencies = [ "serde_derive", "serde_json", "solana-account-decoder-client-types", - "solana-sdk", + "solana-commitment-config", + "solana-message", + "solana-reward-info", "solana-signature", - "thiserror 1.0.69", -] - -[[package]] -name = "solana-type-overrides" -version = "2.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecb3bf3b629b5730bbc12ae75ef23b4665f3940fa98ac9bb4e251509952ae863" -dependencies = [ - "lazy_static", - "rand 0.8.5", + "solana-transaction", + "solana-transaction-context", + "solana-transaction-error", + "thiserror 2.0.12", ] [[package]] -name = "solana-vote" -version = "2.1.21" +name = "solana-vote-interface" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76360d4e0e639a8f593ea7a314b2d930fc473419479fc3ce1f3a633bf8ea159f" +checksum = "ef4f08746f154458f28b98330c0d55cb431e2de64ee4b8efc98dcbe292e0672b" dependencies = [ - "itertools 0.12.1", - "log", + "bincode", + "num-derive", + "num-traits", "serde", "serde_derive", - "solana-sdk", - "thiserror 1.0.69", + "solana-clock", + "solana-decode-error", + "solana-hash", + "solana-instruction", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-serde-varint", + "solana-serialize-utils", + "solana-short-vec", + "solana-system-interface", ] [[package]] -name = "solana-zk-token-sdk" -version = "2.1.21" +name = "solana-zk-sdk" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c2cc3254bc3d6f09fcf7677a4606d8bb5d7719860065cde3dd8e7e41022740a" +checksum = "69dbc9ddff2355e4b0a90790ac8b7f07cab4574367a15cc7886f743596a6f5f2" dependencies = [ "aes-gcm-siv", "base64 0.22.1", "bincode", "bytemuck", "bytemuck_derive", - "byteorder", "curve25519-dalek 4.1.3", "itertools 0.12.1", - "lazy_static", + "js-sys", "merlin", "num-derive", "num-traits", @@ -4051,57 +3558,55 @@ dependencies = [ "serde_derive", "serde_json", "sha3", - "solana-curve25519", "solana-derivation-path", - "solana-program", - "solana-sdk", + "solana-instruction", + "solana-pubkey", + "solana-sdk-ids", + "solana-seed-derivable", + "solana-seed-phrase", + "solana-signature", + "solana-signer", "subtle", - "thiserror 1.0.69", + "thiserror 2.0.12", + "wasm-bindgen", "zeroize", ] -[[package]] -name = "solana_rbpf" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c1941b5ef0c3ce8f2ac5dd984d0fb1a97423c4ff2a02eec81e3913f02e2ac2b" -dependencies = [ - "byteorder", - "combine", - "hash32", - "libc", - "log", - "rand 0.8.5", - "rustc-demangle", - "scroll", - "thiserror 1.0.69", - "winapi", -] - [[package]] name = "spl-associated-token-account" -version = "4.0.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68034596cf4804880d265f834af1ff2f821ad5293e41fa0f8f59086c181fc38e" +checksum = "ae179d4a26b3c7a20c839898e6aed84cb4477adf108a366c95532f058aea041b" dependencies = [ - "assert_matches", "borsh 1.5.7", "num-derive", "num-traits", "solana-program", + "spl-associated-token-account-client", "spl-token", "spl-token-2022", - "thiserror 1.0.69", + "thiserror 2.0.12", +] + +[[package]] +name = "spl-associated-token-account-client" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6f8349dbcbe575f354f9a533a21f272f3eb3808a49e2fdc1c34393b88ba76cb" +dependencies = [ + "solana-instruction", + "solana-pubkey", ] [[package]] name = "spl-discriminator" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38ea8b6dedb7065887f12d62ed62c1743aa70749e8558f963609793f6fb12bc" +checksum = "a7398da23554a31660f17718164e31d31900956054f54f52d5ec1be51cb4f4b3" dependencies = [ "bytemuck", - "solana-program", + "solana-program-error", + "solana-sha256-hasher", "spl-discriminator-derive", ] @@ -4113,7 +3618,7 @@ checksum = "d9e8418ea6269dcfb01c712f0444d2c75542c04448b480e87de59d2865edc750" dependencies = [ "quote", "spl-discriminator-syn", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -4125,173 +3630,324 @@ dependencies = [ "proc-macro2", "quote", "sha2 0.10.9", - "syn 2.0.101", + "syn 2.0.104", "thiserror 1.0.69", ] +[[package]] +name = "spl-elgamal-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65edfeed09cd4231e595616aa96022214f9c9d2be02dea62c2b30d5695a6833a" +dependencies = [ + "bytemuck", + "solana-account-info", + "solana-cpi", + "solana-instruction", + "solana-msg", + "solana-program-entrypoint", + "solana-program-error", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-system-interface", + "solana-sysvar", + "solana-zk-sdk", + "spl-pod", + "spl-token-confidential-transfer-proof-extraction", +] + +[[package]] +name = "spl-generic-token" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "741a62a566d97c58d33f9ed32337ceedd4e35109a686e31b1866c5dfa56abddc" +dependencies = [ + "bytemuck", + "solana-pubkey", +] + [[package]] name = "spl-memo" -version = "5.0.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0dba2f2bb6419523405d21c301a32c9f9568354d4742552e7972af801f4bdb3" +checksum = "9f09647c0974e33366efeb83b8e2daebb329f0420149e74d3a4bd2c08cf9f7cb" dependencies = [ - "solana-program", + "solana-account-info", + "solana-instruction", + "solana-msg", + "solana-program-entrypoint", + "solana-program-error", + "solana-pubkey", ] [[package]] name = "spl-pod" -version = "0.3.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c704c88fc457fa649ba3aabe195c79d885c3f26709efaddc453c8de352c90b87" +checksum = "d994afaf86b779104b4a95ba9ca75b8ced3fdb17ee934e38cb69e72afbe17799" dependencies = [ "borsh 1.5.7", "bytemuck", "bytemuck_derive", - "solana-program", - "solana-zk-token-sdk", - "spl-program-error", + "num-derive", + "num-traits", + "solana-decode-error", + "solana-msg", + "solana-program-error", + "solana-program-option", + "solana-pubkey", + "solana-zk-sdk", + "thiserror 2.0.12", ] [[package]] name = "spl-program-error" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7b28bed65356558133751cc32b48a7a5ddfc59ac4e941314630bbed1ac10532" +checksum = "9cdebc8b42553070b75aa5106f071fef2eb798c64a7ec63375da4b1f058688c6" dependencies = [ "num-derive", "num-traits", - "solana-program", + "solana-decode-error", + "solana-msg", + "solana-program-error", "spl-program-error-derive", - "thiserror 1.0.69", + "thiserror 2.0.12", ] [[package]] name = "spl-program-error-derive" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d375dd76c517836353e093c2dbb490938ff72821ab568b545fd30ab3256b3e" +checksum = "2a2539e259c66910d78593475540e8072f0b10f0f61d7607bbf7593899ed52d0" dependencies = [ "proc-macro2", "quote", "sha2 0.10.9", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] name = "spl-tlv-account-resolution" -version = "0.7.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a75a5f0fcc58126693ed78a17042e9dc53f07e357d6be91789f7d62aff61a4" +checksum = "1408e961215688715d5a1063cbdcf982de225c45f99c82b4f7d7e1dd22b998d7" dependencies = [ "bytemuck", - "solana-program", + "num-derive", + "num-traits", + "solana-account-info", + "solana-decode-error", + "solana-instruction", + "solana-msg", + "solana-program-error", + "solana-pubkey", "spl-discriminator", "spl-pod", "spl-program-error", "spl-type-length-value", + "thiserror 2.0.12", ] [[package]] name = "spl-token" -version = "6.0.0" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a0f06ac7f23dc0984931b1fe309468f14ea58e32660439c1cef19456f5d0e3" +checksum = "053067c6a82c705004f91dae058b11b4780407e9ccd6799dc9e7d0fab5f242da" dependencies = [ "arrayref", "bytemuck", "num-derive", "num-traits", "num_enum", - "solana-program", - "thiserror 1.0.69", + "solana-account-info", + "solana-cpi", + "solana-decode-error", + "solana-instruction", + "solana-msg", + "solana-program-entrypoint", + "solana-program-error", + "solana-program-memory", + "solana-program-option", + "solana-program-pack", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", + "solana-sysvar", + "thiserror 2.0.12", ] [[package]] name = "spl-token-2022" -version = "4.0.0" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c10f3483e48679619c76598d4e4aebb955bc49b0a5cc63323afbf44135c9bf" +checksum = "31f0dfbb079eebaee55e793e92ca5f433744f4b71ee04880bfd6beefba5973e5" dependencies = [ "arrayref", "bytemuck", "num-derive", "num-traits", "num_enum", - "solana-program", + "solana-account-info", + "solana-clock", + "solana-cpi", + "solana-decode-error", + "solana-instruction", + "solana-msg", + "solana-native-token", + "solana-program-entrypoint", + "solana-program-error", + "solana-program-memory", + "solana-program-option", + "solana-program-pack", + "solana-pubkey", + "solana-rent", + "solana-sdk-ids", "solana-security-txt", - "solana-zk-token-sdk", + "solana-system-interface", + "solana-sysvar", + "solana-zk-sdk", + "spl-elgamal-registry", "spl-memo", "spl-pod", - "spl-token", - "spl-token-group-interface", - "spl-token-metadata-interface", - "spl-transfer-hook-interface", - "spl-type-length-value", - "thiserror 1.0.69", + "spl-token", + "spl-token-confidential-transfer-ciphertext-arithmetic", + "spl-token-confidential-transfer-proof-extraction", + "spl-token-confidential-transfer-proof-generation", + "spl-token-group-interface", + "spl-token-metadata-interface", + "spl-transfer-hook-interface", + "spl-type-length-value", + "thiserror 2.0.12", +] + +[[package]] +name = "spl-token-confidential-transfer-ciphertext-arithmetic" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94ab20faf7b5edaa79acd240e0f21d5a2ef936aa99ed98f698573a2825b299c4" +dependencies = [ + "base64 0.22.1", + "bytemuck", + "solana-curve25519", + "solana-zk-sdk", +] + +[[package]] +name = "spl-token-confidential-transfer-proof-extraction" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe2629860ff04c17bafa9ba4bed8850a404ecac81074113e1f840dbd0ebb7bd6" +dependencies = [ + "bytemuck", + "solana-account-info", + "solana-curve25519", + "solana-instruction", + "solana-instructions-sysvar", + "solana-msg", + "solana-program-error", + "solana-pubkey", + "solana-sdk-ids", + "solana-zk-sdk", + "spl-pod", + "thiserror 2.0.12", +] + +[[package]] +name = "spl-token-confidential-transfer-proof-generation" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae5b124840d4aed474cef101d946a798b806b46a509ee4df91021e1ab1cef3ef" +dependencies = [ + "curve25519-dalek 4.1.3", + "solana-zk-sdk", + "thiserror 2.0.12", ] [[package]] name = "spl-token-group-interface" -version = "0.3.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df8752b85a5ecc1d9f3a43bce3dd9a6a053673aacf5deb513d1cbb88d3534ffd" +checksum = "5597b4cd76f85ce7cd206045b7dc22da8c25516573d42d267c8d1fd128db5129" dependencies = [ "bytemuck", - "solana-program", + "num-derive", + "num-traits", + "solana-decode-error", + "solana-instruction", + "solana-msg", + "solana-program-error", + "solana-pubkey", "spl-discriminator", "spl-pod", - "spl-program-error", + "thiserror 2.0.12", ] [[package]] name = "spl-token-metadata-interface" -version = "0.4.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c2318ddff97e006ed9b1291ebec0750a78547f870f62a69c56fe3b46a5d8fc" +checksum = "304d6e06f0de0c13a621464b1fd5d4b1bebf60d15ca71a44d3839958e0da16ee" dependencies = [ "borsh 1.5.7", - "solana-program", + "num-derive", + "num-traits", + "solana-borsh", + "solana-decode-error", + "solana-instruction", + "solana-msg", + "solana-program-error", + "solana-pubkey", "spl-discriminator", "spl-pod", - "spl-program-error", "spl-type-length-value", + "thiserror 2.0.12", ] [[package]] name = "spl-transfer-hook-interface" -version = "0.7.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a110f33d941275d9f868b96daaa993f1e73b6806cc8836e43075b4d3ad8338a7" +checksum = "a7e905b849b6aba63bde8c4badac944ebb6c8e6e14817029cbe1bc16829133bd" dependencies = [ "arrayref", "bytemuck", - "solana-program", + "num-derive", + "num-traits", + "solana-account-info", + "solana-cpi", + "solana-decode-error", + "solana-instruction", + "solana-msg", + "solana-program-error", + "solana-pubkey", "spl-discriminator", "spl-pod", "spl-program-error", "spl-tlv-account-resolution", "spl-type-length-value", + "thiserror 2.0.12", ] [[package]] name = "spl-type-length-value" -version = "0.5.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdcd73ec187bc409464c60759232e309f83b52a18a9c5610bf281c9c6432918c" +checksum = "d417eb548214fa822d93f84444024b4e57c13ed6719d4dcc68eec24fb481e9f5" dependencies = [ "bytemuck", - "solana-program", + "num-derive", + "num-traits", + "solana-account-info", + "solana-decode-error", + "solana-msg", + "solana-program-error", "spl-discriminator", "spl-pod", - "spl-program-error", + "thiserror 2.0.12", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - [[package]] name = "strsim" version = "0.11.1" @@ -4317,59 +3973,21 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.101" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - [[package]] name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation 0.9.4", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tabled" version = "0.18.0" @@ -4390,7 +4008,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -4432,7 +4050,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -4443,17 +4061,16 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] name = "thread_local" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", - "once_cell", ] [[package]] @@ -4487,16 +4104,6 @@ dependencies = [ "time-core", ] -[[package]] -name = "tinystr" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" -dependencies = [ - "displaydoc", - "zerovec", -] - [[package]] name = "tinyvec" version = "1.9.0" @@ -4514,16 +4121,18 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.45.0" +version = "1.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2513ca694ef9ede0fb23fe71a4ee4107cb102b9dc1930f6d0fd77aae068ae165" +checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17" dependencies = [ "backtrace", "bytes", + "io-uring", "libc", "mio", "pin-project-lite", "signal-hook-registry", + "slab", "socket2", "tokio-macros", "windows-sys 0.52.0", @@ -4537,17 +4146,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", -] - -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.12", - "tokio", + "syn 2.0.104", ] [[package]] @@ -4556,7 +4155,7 @@ version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" dependencies = [ - "rustls 0.23.27", + "rustls", "tokio", ] @@ -4595,17 +4194,17 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.9" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" [[package]] name = "toml_edit" -version = "0.22.26" +version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ - "indexmap 2.9.0", + "indexmap 2.10.0", "toml_datetime", "winnow", ] @@ -4622,21 +4221,21 @@ dependencies = [ "base64 0.22.1", "bytes", "flate2", - "h2 0.4.10", - "http 1.3.1", - "http-body 1.0.1", + "h2", + "http", + "http-body", "http-body-util", - "hyper 1.6.0", + "hyper", "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", "prost", "rustls-native-certs", - "rustls-pemfile 2.2.0", + "rustls-pemfile", "socket2", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls", "tokio-stream", "tower 0.4.13", "tower-layer", @@ -4656,7 +4255,7 @@ dependencies = [ "prost-build", "prost-types", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -4701,7 +4300,7 @@ dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 1.0.2", + "sync_wrapper", "tower-layer", "tower-service", ] @@ -4731,20 +4330,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", "valuable", @@ -4799,9 +4398,9 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-width" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" [[package]] name = "universal-hash" @@ -4813,15 +4412,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" -dependencies = [ - "void", -] - [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -4844,23 +4434,6 @@ dependencies = [ "lazy_static", ] -[[package]] -name = "url" -version = "2.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", -] - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - [[package]] name = "utf8parse" version = "0.2.2" @@ -4869,11 +4442,13 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" +checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" dependencies = [ "getrandom 0.3.3", + "js-sys", + "wasm-bindgen", ] [[package]] @@ -4882,24 +4457,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" -[[package]] -name = "void" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" - [[package]] name = "want" version = "0.3.1" @@ -4917,9 +4480,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" @@ -4952,23 +4515,10 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" -dependencies = [ - "cfg-if", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasm-bindgen-macro" version = "0.2.100" @@ -4987,7 +4537,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5011,12 +4561,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.25.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" - [[package]] name = "winapi" version = "0.3.9" @@ -5041,9 +4585,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.61.1" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46ec44dc15085cea82cf9c78f85a9114c463a369786585ad2882d1ff0b0acf40" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", @@ -5060,7 +4604,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] @@ -5071,42 +4615,33 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-result" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b895b5356fc36103d0f64dd1e94dfa7ac5633f1c9dd6e80fe9ec4adef69e09d" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a7ab927b2637c19b3dbe0965e75d8f2d30bdd697a1516191cad2ec4df8fb28a" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ "windows-link", ] -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - [[package]] name = "windows-sys" version = "0.52.0" @@ -5126,18 +4661,12 @@ dependencies = [ ] [[package]] -name = "windows-targets" -version = "0.48.5" +name = "windows-sys" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", + "windows-targets 0.53.2", ] [[package]] @@ -5149,7 +4678,7 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", + "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", @@ -5157,10 +4686,20 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" +name = "windows-targets" +version = "0.53.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] [[package]] name = "windows_aarch64_gnullvm" @@ -5169,10 +4708,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" +name = "windows_aarch64_gnullvm" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" [[package]] name = "windows_aarch64_msvc" @@ -5181,10 +4720,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] -name = "windows_i686_gnu" -version = "0.48.5" +name = "windows_aarch64_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" [[package]] name = "windows_i686_gnu" @@ -5192,6 +4731,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + [[package]] name = "windows_i686_gnullvm" version = "0.52.6" @@ -5199,10 +4744,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] -name = "windows_i686_msvc" -version = "0.48.5" +name = "windows_i686_gnullvm" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" [[package]] name = "windows_i686_msvc" @@ -5211,10 +4756,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" +name = "windows_i686_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" [[package]] name = "windows_x86_64_gnu" @@ -5223,10 +4768,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" +name = "windows_x86_64_gnu" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" [[package]] name = "windows_x86_64_gnullvm" @@ -5235,10 +4780,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" +name = "windows_x86_64_gnullvm" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" [[package]] name = "windows_x86_64_msvc" @@ -5247,22 +4792,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] -name = "winnow" -version = "0.7.10" +name = "windows_x86_64_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec" -dependencies = [ - "memchr", -] +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] -name = "winreg" -version = "0.50.0" +name = "winnow" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" dependencies = [ - "cfg-if", - "windows-sys 0.48.0", + "memchr", ] [[package]] @@ -5271,28 +4812,23 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.9.1", + "bitflags", ] -[[package]] -name = "writeable" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" - [[package]] name = "yellowstone-fumarole-cli" -version = "0.1.0-pre.3+solana.2.1" +version = "0.1.0-rc1+solana.2" dependencies = [ + "bs58", "clap", "clap-verbosity-flag", "futures", "http-body-util", - "hyper 1.6.0", + "hyper", "hyper-util", "prometheus", "serde_yaml", - "solana-sdk", + "solana-pubkey", "tabled", "thiserror 1.0.69", "tokio", @@ -5307,13 +4843,13 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-client" -version = "0.2.0-pre.3+solana.2.1" +version = "0.2.0-rc1+solana.2" dependencies = [ "async-trait", "futures", "fxhash", - "http 1.3.1", - "hyper 1.6.0", + "http", + "hyper", "lazy_static", "prometheus", "prost", @@ -5321,7 +4857,7 @@ dependencies = [ "serde", "serde_with", "serde_yaml", - "solana-sdk", + "solana-clock", "thiserror 1.0.69", "tokio", "tokio-stream", @@ -5335,11 +4871,12 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-client-simple" -version = "0.1.1+solana.2.1.11" +version = "0.0.0" dependencies = [ + "bs58", "clap", "serde_yaml", - "solana-sdk", + "solana-pubkey", "tokio", "tokio-stream", "tonic", @@ -5350,9 +4887,9 @@ dependencies = [ [[package]] name = "yellowstone-grpc-client" -version = "5.1.0" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "036a48ab3619ae9353e7b1797fbffc100c9b075196b32308ac7d3364d6a62460" +checksum = "16a47271b3fc935ef50b8af1344852b14139d1ad99a6745f76c5aec4fee1a5d4" dependencies = [ "bytes", "futures", @@ -5364,85 +4901,48 @@ dependencies = [ [[package]] name = "yellowstone-grpc-proto" -version = "5.1.0" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa8694ec221758f065002e93c2aa93ccad56989e884185b3e3e483d031fb3d61" +checksum = "6367570bc15796024aaac70a5fcb26f7aedb83341e9281f2795040b281348c8f" dependencies = [ "anyhow", "bincode", "prost", "prost-types", "protobuf-src", + "solana-account", "solana-account-decoder", - "solana-sdk", + "solana-clock", + "solana-hash", + "solana-message", + "solana-pubkey", + "solana-signature", + "solana-transaction", + "solana-transaction-context", + "solana-transaction-error", "solana-transaction-status", "tonic", "tonic-build", ] -[[package]] -name = "yoke" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" -dependencies = [ - "serde", - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", - "synstructure", -] - [[package]] name = "zerocopy" -version = "0.8.25" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", - "synstructure", + "syn 2.0.104", ] [[package]] @@ -5462,40 +4962,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", -] - -[[package]] -name = "zerotrie" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", + "syn 2.0.104", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index b9ea7c1..5391120 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,8 +8,7 @@ resolver = "2" [workspace.package] authors = ["Triton One"] -version = "0.1.1+solana.2.1.11" -edition = "2021" +edition = "2024" homepage = "https://triton.one" repository = "https://github.com/rpcpool/yellowstone-fumarole" license = "AGPL-3.0" @@ -19,6 +18,7 @@ publish = false [workspace.dependencies] async-trait = "~0.1.88" +bs58 = "~0.5.1" clap = "^4.5.7" clap-verbosity-flag = "^3.0.2" futures = "~0.3.31" @@ -34,7 +34,8 @@ protobuf-src = "~1.1.0" serde = "~1.0.203" serde_with = "^3.8.1" serde_yaml = "~0.9.34" -solana-sdk = "~2.1.21" +solana-clock = "~2.2.1" +solana-pubkey = "~2.4.0" tabled = "~0.18.0" thiserror = "^1.0.61" tokio = "1" @@ -45,9 +46,9 @@ tower = "~0.5.2" tracing = "~0.1.41" tracing-subscriber = { version = "~0.3.18", features = ["env-filter"] } uuid = { version = "1" } -yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client", version = "0.2.0-pre.3+solana.2.1" } -yellowstone-grpc-client = "5" -yellowstone-grpc-proto = "5" +yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client", version = "0.2.0-rc1+solana.2" } +yellowstone-grpc-client = "8" +yellowstone-grpc-proto = "8" [workspace.lints.clippy] clone_on_ref_ptr = "deny" diff --git a/apps/yellowstone-fumarole-cli/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml index 8563baf..db0a0d7 100644 --- a/apps/yellowstone-fumarole-cli/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-cli" description = "Yellowstone Fumarole CLI" -version = "0.1.0-pre.3+solana.2.1" +version = "0.1.0-rc1+solana.2" authors.workspace = true edition.workspace = true homepage.workspace = true @@ -14,6 +14,7 @@ publish = true name = "fume" [dependencies] +bs58 = { workspace = true } clap = { workspace = true, features = ["derive"] } clap-verbosity-flag = { workspace = true, features = ["tracing"] } futures = { workspace = true } @@ -21,8 +22,8 @@ http-body-util = { workspace = true } hyper = { workspace = true, features = ["http1"] } hyper-util = { workspace = true, features = ["http1"] } prometheus = { workspace = true } -solana-sdk = { workspace = true } serde_yaml = { workspace = true } +solana-pubkey = { workspace = true } tabled = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread", "signal"] } diff --git a/apps/yellowstone-fumarole-cli/src/bin/fume.rs b/apps/yellowstone-fumarole-cli/src/bin/fume.rs index db2c54b..574fb68 100644 --- a/apps/yellowstone-fumarole-cli/src/bin/fume.rs +++ b/apps/yellowstone-fumarole-cli/src/bin/fume.rs @@ -1,7 +1,7 @@ use { clap::Parser, futures::{future::BoxFuture, FutureExt}, - solana_sdk::{bs58, pubkey::Pubkey}, + solana_pubkey::Pubkey, std::{ collections::{HashMap, HashSet}, env, @@ -605,7 +605,6 @@ async fn subscribe(mut client: FumaroleClient, args: SubscribeArgs) { } } } - println!("Exiting subscribe loop"); } async fn test_config(mut fumarole_client: FumaroleClient) { diff --git a/apps/yellowstone-fumarole-cli/src/lib.rs b/apps/yellowstone-fumarole-cli/src/lib.rs index 1a9413f..9a54f14 100644 --- a/apps/yellowstone-fumarole-cli/src/lib.rs +++ b/apps/yellowstone-fumarole-cli/src/lib.rs @@ -1,18 +1,18 @@ //! Yellowstone Fumarole CLI //! This crate provides a command-line interface for interacting with the Yellowstone Fumarole client. //! It allows you to interact with Fumarole control-plane and manage your subscription resources. -//! +//! //! # Configuration -//! +//! //! ```yaml //! x_token: //! endpoint: //! ``` -//! +//! //! # Example usage: -//! +//! //! See the [`README`] for detailed usage instructions. -//! +//! //! [`README`]: https://github.com/rpcpool/yellowstone-fumarole/blob/main/apps/yellowstone-fumarole-cli/README.md -//! +//! pub mod prom; diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index 21a028b..d1ae8f2 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-client" description = "Yellowstone Fumarole Client" -version = "0.2.0-pre.3+solana.2.1" +version = "0.2.0-rc1+solana.2" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } @@ -26,13 +26,13 @@ futures = { workspace = true } fxhash = { workspace = true } http = { workspace = true } hyper = { workspace = true } +solana-clock = { workspace = true } lazy_static = { workspace = true } prometheus = { workspace = true, optional = true } prost = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_with = { workspace = true } serde_yaml = { workspace = true } -solana-sdk = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["time"] } tokio-stream = { workspace = true } diff --git a/crates/yellowstone-fumarole-client/build.rs b/crates/yellowstone-fumarole-client/build.rs index 02c5df0..58e0d58 100644 --- a/crates/yellowstone-fumarole-client/build.rs +++ b/crates/yellowstone-fumarole-client/build.rs @@ -6,7 +6,8 @@ fn main() { let yellowstone_grpc_proto_dir = path.join("yellowstone-grpc-proto"); let proto_dir = path.join("proto"); - env::set_var("PROTOC", protobuf_src::protoc()); + // TODO: Audit that the environment access only happens in single-threaded code. + unsafe { env::set_var("PROTOC", protobuf_src::protoc()) }; tonic_build::configure() .build_server(false) diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index 983071b..b161634 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -71,7 +71,7 @@ //! }; //! //! #[derive(Debug, Clone, Parser)] -//! #[clap(author, version, about = "Yellowstone gRPC ScyllaDB Tool")] +//! #[clap(author, version, about = "Yellowstone Fumarole Example")] //! struct Args { //! /// Path to static config file //! #[clap(long)] diff --git a/crates/yellowstone-fumarole-client/src/runtime/mod.rs b/crates/yellowstone-fumarole-client/src/runtime/mod.rs index ac60aab..5b7d6bf 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/mod.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/mod.rs @@ -6,7 +6,7 @@ pub(crate) mod tokio; use { crate::proto::{self, BlockchainEvent}, fxhash::FxHashMap, - solana_sdk::clock::Slot, + solana_clock::Slot, std::{ cmp::Reverse, collections::{hash_map, BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}, @@ -303,9 +303,8 @@ impl FumaroleSM { pub fn pop_next_slot_status(&mut self) -> Option { loop { let slot_status = self.slot_status_update_queue.pop_front()?; - if let Some(commitment_history) = - self.slot_commitment_progression.get_mut(&slot_status.slot) - { + match self.slot_commitment_progression.get_mut(&slot_status.slot) + { Some(commitment_history) => { if commitment_history .processed_commitment_levels .insert(slot_status.commitment_level) @@ -315,10 +314,10 @@ impl FumaroleSM { // We already processed this commitment level continue; } - } else { + } _ => { // This slot has not been downloaded yet, but still has a status to process unreachable!("slot status should not be available here"); - } + }} } } diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 4c349ed..0f31aae 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -16,7 +16,7 @@ use { FumaroleClient, FumaroleGrpcConnector, GrpcFumaroleClient, }, futures::StreamExt, - solana_sdk::clock::Slot, + solana_clock::Slot, std::{ collections::{HashMap, VecDeque}, time::{Duration, Instant}, diff --git a/examples/rust/Cargo.toml b/examples/rust/Cargo.toml index 33f1983..e43388e 100644 --- a/examples/rust/Cargo.toml +++ b/examples/rust/Cargo.toml @@ -1,7 +1,6 @@ [package] name = "yellowstone-fumarole-client-simple" description = "Simple Yellowstone Fumarole Client examples" -version = { workspace = true } authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } @@ -17,9 +16,10 @@ name = "example-fumarole" name = "dragonsmouth" [dependencies] -solana-sdk = { workspace = true } +bs58 = { workspace = true } clap = { workspace = true, features = ["derive"] } serde_yaml = { workspace = true } +solana-pubkey = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread"] } tokio-stream = { workspace = true } tonic = { workspace = true } diff --git a/examples/rust/src/bin/dragonsmouth.rs b/examples/rust/src/bin/dragonsmouth.rs index f8cfe86..5932222 100644 --- a/examples/rust/src/bin/dragonsmouth.rs +++ b/examples/rust/src/bin/dragonsmouth.rs @@ -1,6 +1,6 @@ use { clap::Parser, - solana_sdk::{bs58, pubkey::Pubkey}, + solana_pubkey::Pubkey, std::{collections::HashMap, path::PathBuf}, tokio_stream::StreamExt, yellowstone_fumarole_client::config::FumaroleConfig, @@ -12,7 +12,7 @@ use { }; #[derive(Debug, Clone, Parser)] -#[clap(author, version, about = "Yellowstone gRPC ScyllaDB Tool")] +#[clap(author, version, about = "Yellowstone Dragonsmouth Example")] struct Args { /// Path to static config file #[clap(long)] diff --git a/examples/rust/src/bin/example-fumarole.rs b/examples/rust/src/bin/example-fumarole.rs index 6fe6c5c..0f6c742 100644 --- a/examples/rust/src/bin/example-fumarole.rs +++ b/examples/rust/src/bin/example-fumarole.rs @@ -1,6 +1,6 @@ use { clap::Parser, - solana_sdk::{bs58, pubkey::Pubkey}, + solana_pubkey::Pubkey, std::{collections::HashMap, path::PathBuf}, yellowstone_fumarole_client::{ config::FumaroleConfig, DragonsmouthAdapterSession, FumaroleClient, @@ -12,7 +12,7 @@ use { }; #[derive(Debug, Clone, Parser)] -#[clap(author, version, about = "Yellowstone gRPC ScyllaDB Tool")] +#[clap(author, version, about = "Yellowstone Fumarole Example")] struct Args { /// Path to static config file #[clap(long)] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 86cda94..176ac0c 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.82.0" +channel = "1.85.0" components = ["clippy", "rustfmt"] targets = [] profile = "minimal" diff --git a/yellowstone-grpc b/yellowstone-grpc index b9f96ae..ea2a0d2 160000 --- a/yellowstone-grpc +++ b/yellowstone-grpc @@ -1 +1 @@ -Subproject commit b9f96ae944bb803b7e5c5a5acbdafe525b255566 +Subproject commit ea2a0d2ad00f4ea3ecca2b9b375175d18ae87279 From 89497dc089c388bcbfbeebd218c08d7b385d44eb Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 10 Jul 2025 16:00:10 -0400 Subject: [PATCH 41/56] proto: renamed fumarole_v2 package to fumarole --- Cargo.lock | 6 +- apps/yellowstone-fumarole-cli/Cargo.toml | 2 +- crates/yellowstone-fumarole-client/Cargo.toml | 2 +- crates/yellowstone-fumarole-client/build.rs | 1 - crates/yellowstone-fumarole-client/src/lib.rs | 6 +- .../src/runtime/tokio.rs | 1 + proto/fumarole.proto | 192 ++++++++++++----- proto/fumarole_v2.proto | 199 ------------------ 8 files changed, 152 insertions(+), 257 deletions(-) delete mode 100644 proto/fumarole_v2.proto diff --git a/Cargo.lock b/Cargo.lock index faf1ec2..3b5f1c5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "Inflector" @@ -4817,7 +4817,7 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-cli" -version = "0.1.0-rc1+solana.2" +version = "0.1.0-rc2+solana.2" dependencies = [ "bs58", "clap", @@ -4843,7 +4843,7 @@ dependencies = [ [[package]] name = "yellowstone-fumarole-client" -version = "0.2.0-rc1+solana.2" +version = "0.2.0-rc2+solana.2" dependencies = [ "async-trait", "futures", diff --git a/apps/yellowstone-fumarole-cli/Cargo.toml b/apps/yellowstone-fumarole-cli/Cargo.toml index db0a0d7..7077216 100644 --- a/apps/yellowstone-fumarole-cli/Cargo.toml +++ b/apps/yellowstone-fumarole-cli/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-cli" description = "Yellowstone Fumarole CLI" -version = "0.1.0-rc1+solana.2" +version = "0.1.0-rc2+solana.2" authors.workspace = true edition.workspace = true homepage.workspace = true diff --git a/crates/yellowstone-fumarole-client/Cargo.toml b/crates/yellowstone-fumarole-client/Cargo.toml index d1ae8f2..2a5d96f 100644 --- a/crates/yellowstone-fumarole-client/Cargo.toml +++ b/crates/yellowstone-fumarole-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "yellowstone-fumarole-client" description = "Yellowstone Fumarole Client" -version = "0.2.0-rc1+solana.2" +version = "0.2.0-rc2+solana.2" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } diff --git a/crates/yellowstone-fumarole-client/build.rs b/crates/yellowstone-fumarole-client/build.rs index 58e0d58..1b472b8 100644 --- a/crates/yellowstone-fumarole-client/build.rs +++ b/crates/yellowstone-fumarole-client/build.rs @@ -14,7 +14,6 @@ fn main() { .compile_protos( &[ proto_dir.join("fumarole.proto"), - proto_dir.join("fumarole_v2.proto"), ], &[proto_dir, yellowstone_grpc_proto_dir], ) diff --git a/crates/yellowstone-fumarole-client/src/lib.rs b/crates/yellowstone-fumarole-client/src/lib.rs index b161634..b4e70d8 100644 --- a/crates/yellowstone-fumarole-client/src/lib.rs +++ b/crates/yellowstone-fumarole-client/src/lib.rs @@ -275,7 +275,7 @@ mod geyser { #[allow(clippy::missing_const_for_fn)] #[allow(clippy::all)] pub mod proto { - include!(concat!(env!("OUT_DIR"), "/fumarole_v2.rs")); + include!(concat!(env!("OUT_DIR"), "/fumarole.rs")); } use { @@ -353,7 +353,7 @@ pub const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT: usize = 3; /// Default number of parallel data streams (TCP connections) to open to fumarole. /// // const _DEFAULT_PARA_DATA_STREAMS: u8 = 3; /**TODO: enable this after beta*/ - +/// /// /// Default maximum number of concurrent download requests to the fumarole service inside a single data plane TCP connection. /// @@ -400,7 +400,7 @@ pub struct FumaroleSubscribeConfig { /// Number of parallel data streams (TCP connections) to open to fumarole /// // pub num_data_plane_tcp_connections: NonZeroU8, /*TODO: enable this after beta */ - + /// /// /// Maximum number of concurrent download requests to the fumarole service inside a single data plane TCP connection. /// diff --git a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs index 0f31aae..55465e4 100644 --- a/crates/yellowstone-fumarole-client/src/runtime/tokio.rs +++ b/crates/yellowstone-fumarole-client/src/runtime/tokio.rs @@ -72,6 +72,7 @@ pub enum BackgroundJobResult { /// pub(crate) struct TokioFumeDragonsmouthRuntime { pub sm: FumaroleSM, + #[allow(dead_code)] pub blockchain_id: Vec, pub fumarole_client: FumaroleClient, pub download_task_runner_chans: DownloadTaskRunnerChannels, diff --git a/proto/fumarole.proto b/proto/fumarole.proto index 1613132..9707340 100644 --- a/proto/fumarole.proto +++ b/proto/fumarole.proto @@ -5,29 +5,46 @@ import public "geyser.proto"; package fumarole; service Fumarole { - rpc ListAvailableCommitmentLevels(ListAvailableCommitmentLevelsRequest) returns (ListAvailableCommitmentLevelsResponse) {} rpc GetConsumerGroupInfo(GetConsumerGroupInfoRequest) returns (ConsumerGroupInfo) {} rpc ListConsumerGroups(ListConsumerGroupsRequest) returns (ListConsumerGroupsResponse) {} rpc DeleteConsumerGroup(DeleteConsumerGroupRequest) returns (DeleteConsumerGroupResponse) {} - rpc CreateStaticConsumerGroup(CreateStaticConsumerGroupRequest) returns (CreateStaticConsumerGroupResponse) {} - rpc Subscribe(stream SubscribeRequest) returns (stream geyser.SubscribeUpdate) {} - rpc GetSlotLagInfo(GetSlotLagInfoRequest) returns (GetSlotLagInfoResponse) {} - rpc GetOldestSlot(GetOldestSlotRequest) returns (GetOldestSlotResponse) {} + rpc CreateConsumerGroup(CreateConsumerGroupRequest) returns (CreateConsumerGroupResponse) {} + + rpc DownloadBlock(DownloadBlockShard) returns (stream DataResponse) {} + + // Represents subscription to the data plane + rpc SubscribeData(stream DataCommand) returns (stream DataResponse) {} + + rpc GetChainTip(GetChainTipRequest) returns (GetChainTipResponse) {} + + // Represents subscription to the control plane + rpc Subscribe(stream ControlCommand) returns (stream ControlResponse) {} + + rpc Version(VersionRequest) returns (VersionResponse) {} } -message ListAvailableCommitmentLevelsRequest {} +message GetChainTipRequest { + bytes blockchain_id = 1; +} -message ListAvailableCommitmentLevelsResponse { - repeated geyser.CommitmentLevel commitment_levels = 1; +message GetChainTipResponse { + bytes blockchain_id = 1; + map shard_to_max_offset_map = 2; } -message GetConsumerGroupInfoRequest { - string consumer_group_label = 1; +message VersionRequest {} + +message VersionResponse { + string version = 1; } +message GetConsumerGroupInfoRequest { + string consumer_group_name = 1; +} + message DeleteConsumerGroupRequest { - string consumer_group_label = 1; + string consumer_group_name = 1; } message DeleteConsumerGroupResponse { @@ -43,63 +60,140 @@ message ListConsumerGroupsResponse { message ConsumerGroupInfo { string id = 1; - string consumer_group_label = 2; - ConsumerGroupType consumer_group_type = 3; - uint32 member_count = 4; - geyser.CommitmentLevel commitment_level = 5; - EventSubscriptionPolicy event_subscription_policy = 6; - bool is_stale = 7; + string consumer_group_name = 2; + bool is_stale = 3; + bytes blockchain_id = 4; } message GetSlotLagInfoRequest { - string consumer_group_label = 1; + string consumer_group_name = 1; } -message GetSlotLagInfoResponse { - uint64 max_slot_seen = 1; - uint64 global_max_slot = 2; +message BlockFilters { + map accounts = 1; + map transactions = 2; + map entries = 3; + map blocks_meta = 4; } -message SubscribeRequest { - string consumer_group_label = 1; - optional uint32 consumer_id = 2; - map accounts = 3; - map transactions = 4; +message DownloadBlockShard { + bytes blockchain_id = 1; + bytes block_uid = 2; + int32 shard_idx = 3; + optional BlockFilters blockFilters = 4; } -message CreateStaticConsumerGroupResponse { - string group_id = 1; + +message Ping { + uint32 ping_id = 1; } -enum ConsumerGroupType { - STATIC = 0; +message Pong { + uint32 ping_id = 1; } -enum InitialOffsetPolicy { - EARLIEST = 0; - LATEST = 1; - SLOT = 2; +message DataCommand { + oneof command { + DownloadBlockShard download_block_shard = 1; + BlockFilters filter_update = 2; + } +} + +message BlockShardDownloadFinish {} + +message BlockNotFound { + bytes blockchain_id = 1; + bytes block_uid = 2; + int32 shard_idx = 3; } -enum EventSubscriptionPolicy { - ACCOUNT_UPDATE_ONLY = 0; - TRANSACTION_ONLY = 1; - BOTH = 2; +message DataError { + oneof error { + BlockNotFound not_found = 1; + } } -message CreateStaticConsumerGroupRequest { - string consumer_group_label = 1; - optional uint32 member_count = 2; - InitialOffsetPolicy initial_offset_policy = 3; - geyser.CommitmentLevel commitment_level = 4; - EventSubscriptionPolicy event_subscription_policy = 5; - optional int64 at_slot = 6; +message DataResponse { + oneof response { + geyser.SubscribeUpdate update = 1; + BlockShardDownloadFinish block_shard_download_finish = 2; + } +} + +message CommitOffset { + int64 offset = 1; + int32 shard_id = 2; +} + + +message PollBlockchainHistory { + int32 shard_id = 1; + optional int64 from = 2; + optional int64 limit = 3; } -message GetOldestSlotRequest { - geyser.CommitmentLevel commitment_level = 1; +message BlockchainEvent { + int64 offset = 1; + bytes blockchain_id = 2; + bytes block_uid = 3; + uint32 num_shards = 4; + uint64 slot = 5; + optional uint64 parent_slot = 6; + geyser.CommitmentLevel commitment_level = 7; + int32 blockchain_shard_id = 8; + optional string dead_error = 9; +} + + +message BlockchainHistory { + repeated BlockchainEvent events = 1; +} + +message JoinControlPlane { + optional string consumer_group_name = 1; +} + +message ControlCommand { + oneof command { + JoinControlPlane initial_join = 1; + CommitOffset commit_offset = 2; + PollBlockchainHistory poll_hist = 3; + Ping ping = 4; + } +} + +message ControlResponse { + oneof response { + InitialConsumerGroupState init = 1; + CommitOffsetResult commit_offset = 2; + BlockchainHistory poll_hist = 3; + Pong pong = 4; + } +} + + +message CommitOffsetResult { + int64 offset = 1; + int32 shard_id = 2; +} + +message InitialConsumerGroupState { + bytes blockchain_id = 1; + map last_committed_offsets = 2; +} + + +message CreateConsumerGroupResponse { + string consumer_group_id = 1; +} + +enum InitialOffsetPolicy { + LATEST = 0; + //FROM_SLOT = 1; } -message GetOldestSlotResponse { - optional uint64 slot = 1; +message CreateConsumerGroupRequest { + string consumer_group_name = 1; + InitialOffsetPolicy initial_offset_policy = 2; + //optional uint64 from_slot = 3; } \ No newline at end of file diff --git a/proto/fumarole_v2.proto b/proto/fumarole_v2.proto deleted file mode 100644 index d9e801e..0000000 --- a/proto/fumarole_v2.proto +++ /dev/null @@ -1,199 +0,0 @@ -syntax = "proto3"; - -import public "geyser.proto"; - -package fumarole_v2; - -service Fumarole { - rpc GetConsumerGroupInfo(GetConsumerGroupInfoRequest) returns (ConsumerGroupInfo) {} - rpc ListConsumerGroups(ListConsumerGroupsRequest) returns (ListConsumerGroupsResponse) {} - rpc DeleteConsumerGroup(DeleteConsumerGroupRequest) returns (DeleteConsumerGroupResponse) {} - rpc CreateConsumerGroup(CreateConsumerGroupRequest) returns (CreateConsumerGroupResponse) {} - - rpc DownloadBlock(DownloadBlockShard) returns (stream DataResponse) {} - - // Represents subscription to the data plane - rpc SubscribeData(stream DataCommand) returns (stream DataResponse) {} - - rpc GetChainTip(GetChainTipRequest) returns (GetChainTipResponse) {} - - // Represents subscription to the control plane - rpc Subscribe(stream ControlCommand) returns (stream ControlResponse) {} - - rpc Version(VersionRequest) returns (VersionResponse) {} -} - -message GetChainTipRequest { - bytes blockchain_id = 1; -} - -message GetChainTipResponse { - bytes blockchain_id = 1; - map shard_to_max_offset_map = 2; -} - -message VersionRequest {} - -message VersionResponse { - string version = 1; -} - - -message GetConsumerGroupInfoRequest { - string consumer_group_name = 1; -} - -message DeleteConsumerGroupRequest { - string consumer_group_name = 1; -} - -message DeleteConsumerGroupResponse { - bool success = 1; -} - -message ListConsumerGroupsRequest {} - -message ListConsumerGroupsResponse { - repeated ConsumerGroupInfo consumer_groups = 1; -} - - -message ConsumerGroupInfo { - string id = 1; - string consumer_group_name = 2; - bool is_stale = 3; - bytes blockchain_id = 4; -} - -message GetSlotLagInfoRequest { - string consumer_group_name = 1; -} - -message BlockFilters { - map accounts = 1; - map transactions = 2; - map entries = 3; - map blocks_meta = 4; -} - -message DownloadBlockShard { - bytes blockchain_id = 1; - bytes block_uid = 2; - int32 shard_idx = 3; - optional BlockFilters blockFilters = 4; -} - - -message Ping { - uint32 ping_id = 1; -} - -message Pong { - uint32 ping_id = 1; -} - -message DataCommand { - oneof command { - DownloadBlockShard download_block_shard = 1; - BlockFilters filter_update = 2; - } -} - -message BlockShardDownloadFinish {} - -message BlockNotFound { - bytes blockchain_id = 1; - bytes block_uid = 2; - int32 shard_idx = 3; -} - -message DataError { - oneof error { - BlockNotFound not_found = 1; - } -} - -message DataResponse { - oneof response { - geyser.SubscribeUpdate update = 1; - BlockShardDownloadFinish block_shard_download_finish = 2; - } -} - -message CommitOffset { - int64 offset = 1; - int32 shard_id = 2; -} - - -message PollBlockchainHistory { - int32 shard_id = 1; - optional int64 from = 2; - optional int64 limit = 3; -} - -message BlockchainEvent { - int64 offset = 1; - bytes blockchain_id = 2; - bytes block_uid = 3; - uint32 num_shards = 4; - uint64 slot = 5; - optional uint64 parent_slot = 6; - geyser.CommitmentLevel commitment_level = 7; - int32 blockchain_shard_id = 8; - optional string dead_error = 9; -} - - -message BlockchainHistory { - repeated BlockchainEvent events = 1; -} - -message JoinControlPlane { - optional string consumer_group_name = 1; -} - -message ControlCommand { - oneof command { - JoinControlPlane initial_join = 1; - CommitOffset commit_offset = 2; - PollBlockchainHistory poll_hist = 3; - Ping ping = 4; - } -} - -message ControlResponse { - oneof response { - InitialConsumerGroupState init = 1; - CommitOffsetResult commit_offset = 2; - BlockchainHistory poll_hist = 3; - Pong pong = 4; - } -} - - -message CommitOffsetResult { - int64 offset = 1; - int32 shard_id = 2; -} - -message InitialConsumerGroupState { - bytes blockchain_id = 1; - map last_committed_offsets = 2; -} - - -message CreateConsumerGroupResponse { - string consumer_group_id = 1; -} - -enum InitialOffsetPolicy { - LATEST = 0; - //FROM_SLOT = 1; -} - -message CreateConsumerGroupRequest { - string consumer_group_name = 1; - InitialOffsetPolicy initial_offset_policy = 2; - //optional uint64 from_slot = 3; -} \ No newline at end of file From 550c0702a70265b7e1d6d8a87d58c2009ab2d728 Mon Sep 17 00:00:00 2001 From: Louis-Vincent Date: Thu, 10 Jul 2025 16:01:41 -0400 Subject: [PATCH 42/56] workspace: fix fumarole-client version --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 5391120..e32d2f4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,7 @@ tower = "~0.5.2" tracing = "~0.1.41" tracing-subscriber = { version = "~0.3.18", features = ["env-filter"] } uuid = { version = "1" } -yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client", version = "0.2.0-rc1+solana.2" } +yellowstone-fumarole-client = { path = "crates/yellowstone-fumarole-client", version = "0.2.0-rc2+solana.2" } yellowstone-grpc-client = "8" yellowstone-grpc-proto = "8" From 1b7045ebebe7c308dddce683ad994239123dab8a Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Mon, 11 Aug 2025 09:47:45 +0000 Subject: [PATCH 43/56] feat: initial code translation Signed-off-by: GitHub --- typescript-sdk/.prettierrc.json | 1 - typescript-sdk/Changelog.md | 26 - typescript-sdk/README.md | 4 - typescript-sdk/add-js-extensions.mjs | 107 - typescript-sdk/package-lock.json | 942 ----- typescript-sdk/package.json | 47 +- typescript-sdk/pnpm-lock.yaml | 335 ++ typescript-sdk/src/config/config.ts | 39 + typescript-sdk/src/connectivity.ts | 90 + typescript-sdk/src/grpc/connectivity.ts | 0 typescript-sdk/src/grpc/fumarole.ts | 3291 +++++++++++++---- typescript-sdk/src/grpc/geyser.ts | 1322 ++++--- .../src/grpc/google/protobuf/timestamp.ts | 57 +- typescript-sdk/src/grpc/solana-storage.ts | 559 +-- typescript-sdk/src/index.ts | 494 ++- typescript-sdk/src/runtime/aio.ts | 587 +++ typescript-sdk/src/runtime/queue.ts | 70 + typescript-sdk/src/runtime/state-machine.ts | 64 + typescript-sdk/src/types.ts | 99 + typescript-sdk/src/utils/aio.ts | 53 + typescript-sdk/tsconfig.cjs.json | 16 - typescript-sdk/tsconfig.esm.json | 14 - typescript-sdk/tsconfig.json | 8 - 23 files changed, 5243 insertions(+), 2982 deletions(-) delete mode 100644 typescript-sdk/.prettierrc.json delete mode 100644 typescript-sdk/Changelog.md delete mode 100644 typescript-sdk/README.md delete mode 100644 typescript-sdk/add-js-extensions.mjs delete mode 100644 typescript-sdk/package-lock.json create mode 100644 typescript-sdk/pnpm-lock.yaml create mode 100644 typescript-sdk/src/config/config.ts create mode 100644 typescript-sdk/src/connectivity.ts create mode 100644 typescript-sdk/src/grpc/connectivity.ts create mode 100644 typescript-sdk/src/runtime/aio.ts create mode 100644 typescript-sdk/src/runtime/queue.ts create mode 100644 typescript-sdk/src/runtime/state-machine.ts create mode 100644 typescript-sdk/src/types.ts create mode 100644 typescript-sdk/src/utils/aio.ts delete mode 100644 typescript-sdk/tsconfig.cjs.json delete mode 100644 typescript-sdk/tsconfig.esm.json delete mode 100644 typescript-sdk/tsconfig.json diff --git a/typescript-sdk/.prettierrc.json b/typescript-sdk/.prettierrc.json deleted file mode 100644 index 9e26dfe..0000000 --- a/typescript-sdk/.prettierrc.json +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/typescript-sdk/Changelog.md b/typescript-sdk/Changelog.md deleted file mode 100644 index d235734..0000000 --- a/typescript-sdk/Changelog.md +++ /dev/null @@ -1,26 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [Unreleased] - -### Breaking Changes - -### Features - -### Fixes - -## [0.1.1] - -### Fixes - -### Features - -- Enable gRPC GZIP compression support which can either be `gzip` or `none` (default). - -## [0.1.0] - -Initial release \ No newline at end of file diff --git a/typescript-sdk/README.md b/typescript-sdk/README.md deleted file mode 100644 index fead172..0000000 --- a/typescript-sdk/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Yellowstone Fumarole SDK -SDK client for [Yellowstone Fumarole](https://github.com/rpcpool/yellowstone-fumarole) - -Checkout the [example](https://github.com/rpcpool/yellowstone-fumarole/tree/main/examples/typescript) to learn how to use it \ No newline at end of file diff --git a/typescript-sdk/add-js-extensions.mjs b/typescript-sdk/add-js-extensions.mjs deleted file mode 100644 index 176147d..0000000 --- a/typescript-sdk/add-js-extensions.mjs +++ /dev/null @@ -1,107 +0,0 @@ -import { fileURLToPath } from 'url'; -import { dirname } from 'path'; -import fs from 'fs'; -import path from 'path'; -import recast from 'recast'; -import babelParser from '@babel/parser'; - -const __filename = fileURLToPath(import.meta.url); -const __dirname = dirname(__filename); - -// List of external packages that require '.js' extensions -const packagesRequiringJsExtension = [ - 'protobufjs/minimal', - // Add other package paths as needed -]; - -function shouldAppendJsExtension(source) { - // Check if the path has an extension already - if (path.extname(source)) { - return false; - } - - // Check if the path is relative - if (source.startsWith('./') || source.startsWith('../')) { - return true; - } - - // Check if the path is in the whitelist of external packages - return packagesRequiringJsExtension.some(pkg => source === pkg || source.startsWith(`${pkg}/`)); -} - -function processFile(filePath) { - const code = fs.readFileSync(filePath, 'utf8'); - const ast = recast.parse(code, { - parser: { - parse: (source) => babelParser.parse(source, { - sourceType: 'module', - plugins: ['typescript'] - }) - } - }); - - let modified = false; - - recast.types.visit(ast, { - visitImportDeclaration(pathNode) { - const source = pathNode.node.source.value; - if (shouldAppendJsExtension(source)) { - pathNode.node.source.value = `${source}.js`; - modified = true; - } - return false; - }, - visitExportNamedDeclaration(pathNode) { - if (pathNode.node.source?.value) { - const source = pathNode.node.source.value; - if (shouldAppendJsExtension(source)) { - pathNode.node.source.value = `${source}.js`; - modified = true; - } - } - return false; - }, - visitExportAllDeclaration(pathNode) { - if (pathNode.node.source?.value) { - const source = pathNode.node.source.value; - if (shouldAppendJsExtension(source)) { - pathNode.node.source.value = `${source}.js`; - modified = true; - } - } - return false; - } - }); - - if (modified) { - const output = recast.print(ast).code; - fs.writeFileSync(filePath, output, 'utf8'); - console.log(`Updated import/export paths in: ${filePath}`); - } -} - -function traverseDir(dir) { - fs.readdirSync(dir).forEach((file) => { - const fullPath = path.join(dir, file); - const stat = fs.statSync(fullPath); - - if (stat.isDirectory()) { - traverseDir(fullPath); - } else if (stat.isFile() && path.extname(fullPath) === '.js') { - processFile(fullPath); - } - }); -} - -function main() { - const esmDir = path.resolve(__dirname, 'dist/esm'); - - if (!fs.existsSync(esmDir)) { - console.error(`Directory not found: ${esmDir}`); - process.exit(1); - } - - traverseDir(esmDir); -} - -main(); \ No newline at end of file diff --git a/typescript-sdk/package-lock.json b/typescript-sdk/package-lock.json deleted file mode 100644 index 61d0c83..0000000 --- a/typescript-sdk/package-lock.json +++ /dev/null @@ -1,942 +0,0 @@ -{ - "name": "@triton-one/yellowstone-fumarole", - "version": "3.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "@triton-one/yellowstone-fumarole", - "version": "3.0.0", - "license": "Apache-2.0", - "dependencies": { - "@grpc/grpc-js": "^1.8.0" - }, - "devDependencies": { - "@babel/parser": "^7.26.3", - "@solana/rpc-api": "=2.0.0", - "prettier": "^2.8.3", - "recast": "^0.23.9", - "ts-proto": "^1.139.0", - "typescript": "=5.2.2" - }, - "engines": { - "node": ">=20.18.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", - "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", - "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.26.9", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.9.tgz", - "integrity": "sha512-81NWa1njQblgZbQHxWHpxxCzNsa3ZwvFqpUg7P+NNUU6f3UU2jBEg4OlF/J6rl8+PQGh1q6/zWScd001YwcA5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.26.9" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/types": { - "version": "7.26.9", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.9.tgz", - "integrity": "sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.25.9", - "@babel/helper-validator-identifier": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@grpc/grpc-js": { - "version": "1.12.6", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.6.tgz", - "integrity": "sha512-JXUj6PI0oqqzTGvKtzOkxtpsyPRNsrmhh41TtIz/zEB6J+AUiZZ0dxWzcMwO9Ns5rmSPuMdghlTbUuqIM48d3Q==", - "license": "Apache-2.0", - "dependencies": { - "@grpc/proto-loader": "^0.7.13", - "@js-sdsl/ordered-map": "^4.4.2" - }, - "engines": { - "node": ">=12.10.0" - } - }, - "node_modules/@grpc/proto-loader": { - "version": "0.7.13", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", - "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", - "license": "Apache-2.0", - "dependencies": { - "lodash.camelcase": "^4.3.0", - "long": "^5.0.0", - "protobufjs": "^7.2.5", - "yargs": "^17.7.2" - }, - "bin": { - "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@js-sdsl/ordered-map": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", - "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/js-sdsl" - } - }, - "node_modules/@protobufjs/aspromise": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/base64": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", - "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/codegen": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", - "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/eventemitter": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/fetch": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", - "license": "BSD-3-Clause", - "dependencies": { - "@protobufjs/aspromise": "^1.1.1", - "@protobufjs/inquire": "^1.1.0" - } - }, - "node_modules/@protobufjs/float": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/inquire": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/path": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/pool": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", - "license": "BSD-3-Clause" - }, - "node_modules/@protobufjs/utf8": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", - "license": "BSD-3-Clause" - }, - "node_modules/@solana/addresses": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/addresses/-/addresses-2.0.0.tgz", - "integrity": "sha512-8n3c/mUlH1/z+pM8e7OJ6uDSXw26Be0dgYiokiqblO66DGQ0d+7pqFUFZ5pEGjJ9PU2lDTSfY8rHf4cemOqwzQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/assertions": "2.0.0", - "@solana/codecs-core": "2.0.0", - "@solana/codecs-strings": "2.0.0", - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/assertions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/assertions/-/assertions-2.0.0.tgz", - "integrity": "sha512-NyPPqZRNGXs/GAjfgsw7YS6vCTXWt4ibXveS+ciy5sdmp/0v3pA6DlzYjleF9Sljrew0IiON15rjaXamhDxYfQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/codecs-core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/codecs-core/-/codecs-core-2.0.0.tgz", - "integrity": "sha512-qCG+3hDU5Pm8V6joJjR4j4Zv9md1z0RaecniNDIkEglnxmOUODnmPLWbtOjnDylfItyuZeDihK8hkewdj8cUtw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/codecs-data-structures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/codecs-data-structures/-/codecs-data-structures-2.0.0.tgz", - "integrity": "sha512-N98Y4jsrC/XeOgqrfsGqcOFIaOoMsKdAxOmy5oqVaEN67YoGSLNC9ROnqamOAOrsZdicTWx9/YLKFmQi9DPh1A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/codecs-core": "2.0.0", - "@solana/codecs-numbers": "2.0.0", - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/codecs-numbers": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/codecs-numbers/-/codecs-numbers-2.0.0.tgz", - "integrity": "sha512-r66i7VzJO1MZkQWZIAI6jjJOFVpnq0+FIabo2Z2ZDtrArFus/SbSEv543yCLeD2tdR/G/p+1+P5On10qF50Y1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/codecs-core": "2.0.0", - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/codecs-strings": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/codecs-strings/-/codecs-strings-2.0.0.tgz", - "integrity": "sha512-dNqeCypsvaHcjW86H0gYgAZGGkKVBeKVeh7WXlOZ9kno7PeQ2wNkpccyzDfuzaIsKv+HZUD3v/eo86GCvnKazQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/codecs-core": "2.0.0", - "@solana/codecs-numbers": "2.0.0", - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "fastestsmallesttextencoderdecoder": "^1.0.22", - "typescript": ">=5" - } - }, - "node_modules/@solana/errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/errors/-/errors-2.0.0.tgz", - "integrity": "sha512-IHlaPFSy4lvYco1oHJ3X8DbchWwAwJaL/4wZKnF1ugwZ0g0re8wbABrqNOe/jyZ84VU9Z14PYM8W9oDAebdJbw==", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^5.3.0", - "commander": "^12.1.0" - }, - "bin": { - "errors": "bin/cli.mjs" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/functional": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/functional/-/functional-2.0.0.tgz", - "integrity": "sha512-Sj+sLiUTimnMEyGnSLGt0lbih2xPDUhxhonnrIkPwA+hjQ3ULGHAxeevHU06nqiVEgENQYUJ5rCtHs4xhUFAkQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/instructions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/instructions/-/instructions-2.0.0.tgz", - "integrity": "sha512-MiTEiNF7Pzp+Y+x4yadl2VUcNHboaW5WP52psBuhHns3GpbbruRv5efMpM9OEQNe1OsN+Eg39vjEidX55+P+DQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/keys/-/keys-2.0.0.tgz", - "integrity": "sha512-SSLSX8BXRvfLKBqsmBghmlhMKpwHeWd5CHi5zXgTS1BRrtiU6lcrTVC9ie6B+WaNNq7oe3e6K5bdbhu3fFZ+0g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/assertions": "2.0.0", - "@solana/codecs-core": "2.0.0", - "@solana/codecs-strings": "2.0.0", - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/rpc-api": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/rpc-api/-/rpc-api-2.0.0.tgz", - "integrity": "sha512-1FwitYxwADMF/6zKP2kNXg8ESxB6GhNBNW1c4f5dEmuXuBbeD/enLV3WMrpg8zJkIaaYarEFNbt7R7HyFzmURQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/addresses": "2.0.0", - "@solana/codecs-core": "2.0.0", - "@solana/codecs-strings": "2.0.0", - "@solana/errors": "2.0.0", - "@solana/keys": "2.0.0", - "@solana/rpc-parsed-types": "2.0.0", - "@solana/rpc-spec": "2.0.0", - "@solana/rpc-transformers": "2.0.0", - "@solana/rpc-types": "2.0.0", - "@solana/transaction-messages": "2.0.0", - "@solana/transactions": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/rpc-parsed-types": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/rpc-parsed-types/-/rpc-parsed-types-2.0.0.tgz", - "integrity": "sha512-VCeY/oKVEtBnp8EDOc5LSSiOeIOLFIgLndcxqU0ij/cZaQ01DOoHbhluvhZtU80Z3dUeicec8TiMgkFzed+WhQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/rpc-spec": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/rpc-spec/-/rpc-spec-2.0.0.tgz", - "integrity": "sha512-1uIDzj7vocCUqfOifjv1zAuxQ53ugiup/42edVFoQLOnJresoEZLL6WjnsJq4oCTccEAvGhUBI1WWKeZTGNxFQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/errors": "2.0.0", - "@solana/rpc-spec-types": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/rpc-spec-types": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/rpc-spec-types/-/rpc-spec-types-2.0.0.tgz", - "integrity": "sha512-G2lmhFhgtxMQd/D6B04BHGE7bm5dMZdIPQNOqVGhzNAVjrmyapD3JN2hKAbmaYPe97wLfZERw0Ux1u4Y6q7TqA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/rpc-transformers": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/rpc-transformers/-/rpc-transformers-2.0.0.tgz", - "integrity": "sha512-H6tN0qcqzUangowsLLQtYXKJsf1Roe3/qJ1Cy0gv9ojY9uEvNbJqpeEj+7blv0MUZfEe+rECAwBhxxRKPMhYGw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/errors": "2.0.0", - "@solana/functional": "2.0.0", - "@solana/rpc-spec-types": "2.0.0", - "@solana/rpc-types": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/rpc-types": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/rpc-types/-/rpc-types-2.0.0.tgz", - "integrity": "sha512-o1ApB9PYR0A3XjVSOh//SOVWgjDcqMlR3UNmtqciuREIBmWqnvPirdOa5EJxD3iPhfA4gnNnhGzT+tMDeDW/Kw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/addresses": "2.0.0", - "@solana/codecs-core": "2.0.0", - "@solana/codecs-numbers": "2.0.0", - "@solana/codecs-strings": "2.0.0", - "@solana/errors": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/transaction-messages": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/transaction-messages/-/transaction-messages-2.0.0.tgz", - "integrity": "sha512-Uc6Fw1EJLBrmgS1lH2ZfLAAKFvprWPQQzOVwZS78Pv8Whsk7tweYTK6S0Upv0nHr50rGpnORJfmdBrXE6OfNGg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/addresses": "2.0.0", - "@solana/codecs-core": "2.0.0", - "@solana/codecs-data-structures": "2.0.0", - "@solana/codecs-numbers": "2.0.0", - "@solana/errors": "2.0.0", - "@solana/functional": "2.0.0", - "@solana/instructions": "2.0.0", - "@solana/rpc-types": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@solana/transactions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@solana/transactions/-/transactions-2.0.0.tgz", - "integrity": "sha512-VfdTE+59WKvuBG//6iE9RPjAB+ZT2kLgY2CDHabaz6RkH6OjOkMez9fWPVa3Xtcus+YQWN1SnQoryjF/xSx04w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@solana/addresses": "2.0.0", - "@solana/codecs-core": "2.0.0", - "@solana/codecs-data-structures": "2.0.0", - "@solana/codecs-numbers": "2.0.0", - "@solana/codecs-strings": "2.0.0", - "@solana/errors": "2.0.0", - "@solana/functional": "2.0.0", - "@solana/instructions": "2.0.0", - "@solana/keys": "2.0.0", - "@solana/rpc-types": "2.0.0", - "@solana/transaction-messages": "2.0.0" - }, - "engines": { - "node": ">=20.18.0" - }, - "peerDependencies": { - "typescript": ">=5" - } - }, - "node_modules/@types/node": { - "version": "22.13.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.5.tgz", - "integrity": "sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg==", - "license": "MIT", - "dependencies": { - "undici-types": "~6.20.0" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/ast-types": { - "version": "0.16.1", - "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.16.1.tgz", - "integrity": "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==", - "dev": true, - "license": "MIT", - "dependencies": { - "tslib": "^2.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/case-anything": { - "version": "2.1.13", - "resolved": "https://registry.npmjs.org/case-anything/-/case-anything-2.1.13.tgz", - "integrity": "sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.13" - }, - "funding": { - "url": "https://github.com/sponsors/mesqueeb" - } - }, - "node_modules/chalk": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", - "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "license": "MIT" - }, - "node_modules/commander": { - "version": "12.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", - "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "detect-libc": "bin/detect-libc.js" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/dprint-node": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/dprint-node/-/dprint-node-1.0.8.tgz", - "integrity": "sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg==", - "dev": true, - "license": "MIT", - "dependencies": { - "detect-libc": "^1.0.3" - } - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "license": "BSD-2-Clause", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/fastestsmallesttextencoderdecoder": { - "version": "1.0.22", - "resolved": "https://registry.npmjs.org/fastestsmallesttextencoderdecoder/-/fastestsmallesttextencoderdecoder-1.0.22.tgz", - "integrity": "sha512-Pb8d48e+oIuY4MaM64Cd7OW1gt4nxCHs7/ddPPZ/Ic3sg8yVGM7O9wDvZ7us6ScaUupzM+pfBolwtYhN1IxBIw==", - "dev": true, - "license": "CC0-1.0", - "peer": true - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/lodash.camelcase": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", - "license": "MIT" - }, - "node_modules/long": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/long/-/long-5.3.1.tgz", - "integrity": "sha512-ka87Jz3gcx/I7Hal94xaN2tZEOPoUOEVftkQqZx2EeQRN7LGdfLlI3FvZ+7WDplm+vK2Urx9ULrvSowtdCieng==", - "license": "Apache-2.0" - }, - "node_modules/prettier": { - "version": "2.8.8", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", - "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", - "dev": true, - "license": "MIT", - "bin": { - "prettier": "bin-prettier.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, - "node_modules/protobufjs": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", - "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", - "hasInstallScript": true, - "license": "BSD-3-Clause", - "dependencies": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/node": ">=13.7.0", - "long": "^5.0.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/recast": { - "version": "0.23.9", - "resolved": "https://registry.npmjs.org/recast/-/recast-0.23.9.tgz", - "integrity": "sha512-Hx/BGIbwj+Des3+xy5uAtAbdCyqK9y9wbBcDFDYanLS9JnMqf7OeF87HQwUimE87OEc72mr6tkKUKMBBL+hF9Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ast-types": "^0.16.1", - "esprima": "~4.0.0", - "source-map": "~0.6.1", - "tiny-invariant": "^1.3.3", - "tslib": "^2.0.1" - }, - "engines": { - "node": ">= 4" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tiny-invariant": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", - "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", - "dev": true, - "license": "MIT" - }, - "node_modules/ts-poet": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-6.11.0.tgz", - "integrity": "sha512-r5AGF8vvb+GjBsnqiTqbLhN1/U2FJt6BI+k0dfCrkKzWvUhNlwMmq9nDHuucHs45LomgHjZPvYj96dD3JawjJA==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "dprint-node": "^1.0.8" - } - }, - "node_modules/ts-proto": { - "version": "1.181.2", - "resolved": "https://registry.npmjs.org/ts-proto/-/ts-proto-1.181.2.tgz", - "integrity": "sha512-knJ8dtjn2Pd0c5ZGZG8z9DMiD4PUY8iGI9T9tb8DvGdWRMkLpf0WcPO7G+7cmbZyxvNTAG6ci3fybEaFgMZIvg==", - "dev": true, - "license": "ISC", - "dependencies": { - "case-anything": "^2.1.13", - "protobufjs": "^7.2.4", - "ts-poet": "^6.7.0", - "ts-proto-descriptors": "1.16.0" - }, - "bin": { - "protoc-gen-ts_proto": "protoc-gen-ts_proto" - } - }, - "node_modules/ts-proto-descriptors": { - "version": "1.16.0", - "resolved": "https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-1.16.0.tgz", - "integrity": "sha512-3yKuzMLpltdpcyQji1PJZRfoo4OJjNieKTYkQY8pF7xGKsYz/RHe3aEe4KiRxcinoBmnEhmuI+yJTxLb922ULA==", - "dev": true, - "license": "ISC", - "dependencies": { - "long": "^5.2.3", - "protobufjs": "^7.2.4" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "dev": true, - "license": "0BSD" - }, - "node_modules/typescript": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", - "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", - "license": "MIT" - }, - "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "license": "ISC", - "engines": { - "node": ">=12" - } - } - } -} diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index 95864f6..854cd37 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -10,7 +10,6 @@ "types": "./dist/types/index.d.ts", "scripts": { "build": "npm run grpc-generate && tsc --project tsconfig.esm.json && tsc --project tsconfig.cjs.json && node add-js-extensions.mjs", - "fmt": "prettier -w .", "grpc-generate": "mkdir -p src/grpc && protoc -I../yellowstone-grpc/yellowstone-grpc-proto/proto -I../proto --plugin=node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=forceLong=string --ts_proto_opt=outputServices=grpc-js --experimental_allow_proto3_optional --ts_proto_out=src/grpc fumarole.proto --ts_proto_opt=esModuleInterop=true" }, "repository": { @@ -30,48 +29,12 @@ "fumarole" ], "homepage": "https://triton.one", - "dependencies": { - "@grpc/grpc-js": "^1.8.0" - }, "devDependencies": { - "@babel/parser": "^7.26.3", - "@solana/rpc-api": "=2.0.0", - "prettier": "^2.8.3", - "recast": "^0.23.9", - "ts-proto": "^1.139.0", - "typescript": "=5.2.2" - }, - "engines": { - "node": ">=20.18.0" + "ts-proto": "^2.7.7" }, - "files": [ - "dist" - ], - "exports": { - ".": { - "types": "./dist/types/index.d.ts", - "import": "./dist/esm/index.js", - "require": "./dist/cjs/index.js" - }, - "./grpc/fumarole": { - "types": "./dist/types/grpc/fumarole.d.ts", - "import": "./dist/esm/grpc/fumarole.js", - "require": "./dist/cjs/grpc/fumarole.js" - }, - "./grpc/geyser": { - "types": "./dist/types/grpc/geyser.d.ts", - "import": "./dist/esm/grpc/geyser.js", - "require": "./dist/cjs/grpc/geyser.js" - }, - "./dist/types/grpc/fumarole": { - "types": "./dist/types/grpc/fumarole.d.ts", - "import": "./dist/esm/grpc/fumarole.js", - "require": "./dist/cjs/grpc/fumarole.js" - }, - "./dist/types/grpc/geyser": { - "types": "./dist/types/grpc/geyser.d.ts", - "import": "./dist/esm/grpc/geyser.js", - "require": "./dist/cjs/grpc/geyser.js" - } + "dependencies": { + "@grpc/grpc-js": "^1.13.4", + "@types/js-yaml": "^4.0.9", + "js-yaml": "^4.1.0" } } \ No newline at end of file diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml new file mode 100644 index 0000000..2ea94b9 --- /dev/null +++ b/typescript-sdk/pnpm-lock.yaml @@ -0,0 +1,335 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@grpc/grpc-js': + specifier: ^1.13.4 + version: 1.13.4 + '@types/js-yaml': + specifier: ^4.0.9 + version: 4.0.9 + js-yaml: + specifier: ^4.1.0 + version: 4.1.0 + devDependencies: + ts-proto: + specifier: ^2.7.7 + version: 2.7.7 + +packages: + + '@bufbuild/protobuf@2.6.3': + resolution: {integrity: sha512-w/gJKME9mYN7ZoUAmSMAWXk4hkVpxRKvEJCb3dV5g9wwWdxTJJ0ayOJAVcNxtdqaxDyFuC0uz4RSGVacJ030PQ==} + + '@grpc/grpc-js@1.13.4': + resolution: {integrity: sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.15': + resolution: {integrity: sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==} + engines: {node: '>=6'} + hasBin: true + + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + + '@types/js-yaml@4.0.9': + resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} + + '@types/node@24.2.1': + resolution: {integrity: sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ==} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + case-anything@2.1.13: + resolution: {integrity: sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==} + engines: {node: '>=12.13'} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + detect-libc@1.0.3: + resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} + engines: {node: '>=0.10'} + hasBin: true + + dprint-node@1.0.8: + resolution: {integrity: sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} + + protobufjs@7.5.3: + resolution: {integrity: sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==} + engines: {node: '>=12.0.0'} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + ts-poet@6.12.0: + resolution: {integrity: sha512-xo+iRNMWqyvXpFTaOAvLPA5QAWO6TZrSUs5s4Odaya3epqofBu/fMLHEWl8jPmjhA0s9sgj9sNvF1BmaQlmQkA==} + + ts-proto-descriptors@2.0.0: + resolution: {integrity: sha512-wHcTH3xIv11jxgkX5OyCSFfw27agpInAd6yh89hKG6zqIXnjW9SYqSER2CVQxdPj4czeOhGagNvZBEbJPy7qkw==} + + ts-proto@2.7.7: + resolution: {integrity: sha512-/OfN9/Yriji2bbpOysZ/Jzc96isOKz+eBTJEcKaIZ0PR6x1TNgVm4Lz0zfbo+J0jwFO7fJjJyssefBPQ0o1V9A==} + hasBin: true + + undici-types@7.10.0: + resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + +snapshots: + + '@bufbuild/protobuf@2.6.3': {} + + '@grpc/grpc-js@1.13.4': + dependencies: + '@grpc/proto-loader': 0.7.15 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.15': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.3 + yargs: 17.7.2 + + '@js-sdsl/ordered-map@4.4.2': {} + + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + + '@types/js-yaml@4.0.9': {} + + '@types/node@24.2.1': + dependencies: + undici-types: 7.10.0 + + ansi-regex@5.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + argparse@2.0.1: {} + + case-anything@2.1.13: {} + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + detect-libc@1.0.3: {} + + dprint-node@1.0.8: + dependencies: + detect-libc: 1.0.3 + + emoji-regex@8.0.0: {} + + escalade@3.2.0: {} + + get-caller-file@2.0.5: {} + + is-fullwidth-code-point@3.0.0: {} + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + lodash.camelcase@4.3.0: {} + + long@5.3.2: {} + + protobufjs@7.5.3: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 24.2.1 + long: 5.3.2 + + require-directory@2.1.1: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + ts-poet@6.12.0: + dependencies: + dprint-node: 1.0.8 + + ts-proto-descriptors@2.0.0: + dependencies: + '@bufbuild/protobuf': 2.6.3 + + ts-proto@2.7.7: + dependencies: + '@bufbuild/protobuf': 2.6.3 + case-anything: 2.1.13 + ts-poet: 6.12.0 + ts-proto-descriptors: 2.0.0 + + undici-types@7.10.0: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + y18n@5.0.8: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 diff --git a/typescript-sdk/src/config/config.ts b/typescript-sdk/src/config/config.ts new file mode 100644 index 0000000..6effb81 --- /dev/null +++ b/typescript-sdk/src/config/config.ts @@ -0,0 +1,39 @@ +import * as yaml from "js-yaml"; + +export interface FumaroleConfigOptions { + endpoint: string; + xToken?: string; + maxDecodingMessageSizeBytes?: number; + xMetadata?: Record; +} + +export class FumaroleConfig { + readonly endpoint: string; + readonly xToken?: string; + readonly maxDecodingMessageSizeBytes: number; + readonly xMetadata: Record; + + static readonly DEFAULT_MAX_DECODING_MESSAGE_SIZE = 512_000_000; + + constructor(options: FumaroleConfigOptions) { + this.endpoint = options.endpoint; + this.xToken = options.xToken; + this.maxDecodingMessageSizeBytes = + options.maxDecodingMessageSizeBytes ?? + FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE; + this.xMetadata = options.xMetadata ?? {}; + } + + static fromYaml(yamlContent: string): FumaroleConfig { + const data = yaml.load(yamlContent) as Record; + + return new FumaroleConfig({ + endpoint: data.endpoint, + xToken: data["x-token"] || data.x_token, + maxDecodingMessageSizeBytes: + data.max_decoding_message_size_bytes ?? + FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE, + xMetadata: data["x-metadata"] ?? {}, + }); + } +} diff --git a/typescript-sdk/src/connectivity.ts b/typescript-sdk/src/connectivity.ts new file mode 100644 index 0000000..b6222a4 --- /dev/null +++ b/typescript-sdk/src/connectivity.ts @@ -0,0 +1,90 @@ +import { ChannelCredentials, Metadata, credentials } from "@grpc/grpc-js"; +import { FumaroleClient } from "./grpc/fumarole"; +import { FumaroleConfig } from "./config/config"; + +const X_TOKEN_HEADER = "x-token"; + +class TritonAuthMetadataGenerator { + constructor(private readonly xToken: string) {} + + generateMetadata(): Promise { + const metadata = new Metadata(); + metadata.set(X_TOKEN_HEADER, this.xToken); + return Promise.resolve(metadata); + } +} + +interface CallMetadataOptions { + metadata?: Metadata; +} + +class MetadataProvider { + private metadata: Metadata; + + constructor(metadata: Record) { + this.metadata = new Metadata(); + Object.entries(metadata).forEach(([key, value]) => { + this.metadata.set(key, value); + }); + } + + getMetadata(): Promise { + return Promise.resolve(this.metadata); + } +} + +export class FumaroleGrpcConnector { + private static readonly logger = console; + + constructor( + private readonly config: FumaroleConfig, + private readonly endpoint: string + ) {} + + async connect( + grpcOptions: Record = {} + ): Promise { + const options = { + "grpc.max_receive_message_length": 111111110, + ...grpcOptions, + }; + + let channelCredentials: ChannelCredentials; + const metadataProvider = new MetadataProvider(this.config.xMetadata); + const callCredentials = credentials.createFromMetadataGenerator( + metadataProvider.getMetadata.bind(metadataProvider) + ); + + if (this.config.xToken) { + // SSL credentials for HTTPS endpoint + const sslCreds = credentials.createSsl(); + + // Create call credentials with token + const authGenerator = new TritonAuthMetadataGenerator(this.config.xToken); + const callCreds = credentials.createFromMetadataGenerator( + authGenerator.generateMetadata.bind(authGenerator) + ); + + // Combine credentials + channelCredentials = credentials.combineChannelCredentials( + sslCreds, + callCreds + ); + FumaroleGrpcConnector.logger.debug( + "Using secure channel with x-token authentication" + ); + } else { + channelCredentials = credentials.createInsecure(); + FumaroleGrpcConnector.logger.debug( + "Using insecure channel without authentication" + ); + } + + // Create the client with credentials and options + const client = new FumaroleClient(this.endpoint, channelCredentials, { + ...options, + }); + + return client; + } +} diff --git a/typescript-sdk/src/grpc/connectivity.ts b/typescript-sdk/src/grpc/connectivity.ts new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/src/grpc/fumarole.ts b/typescript-sdk/src/grpc/fumarole.ts index e5f9c33..da5e989 100644 --- a/typescript-sdk/src/grpc/fumarole.ts +++ b/typescript-sdk/src/grpc/fumarole.ts @@ -1,82 +1,51 @@ // Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: -// protoc-gen-ts_proto v1.181.2 -// protoc v6.30.0 +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 // source: fumarole.proto /* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; import { type CallOptions, - ChannelCredentials, + type ChannelCredentials, Client, - ClientDuplexStream, + type ClientDuplexStream, type ClientOptions, + type ClientReadableStream, type ClientUnaryCall, - handleBidiStreamingCall, + type handleBidiStreamingCall, + type handleServerStreamingCall, type handleUnaryCall, makeGenericClientConstructor, - Metadata, + type Metadata, type ServiceError, type UntypedServiceImplementation, } from "@grpc/grpc-js"; -import Long from "long"; -import _m0 from "protobufjs/minimal"; import { CommitmentLevel, commitmentLevelFromJSON, commitmentLevelToJSON, SubscribeRequestFilterAccounts, + SubscribeRequestFilterBlocksMeta, + SubscribeRequestFilterEntry, SubscribeRequestFilterTransactions, SubscribeUpdate, } from "./geyser"; export const protobufPackage = "fumarole"; -export enum ConsumerGroupType { - STATIC = 0, - UNRECOGNIZED = -1, -} - -export function consumerGroupTypeFromJSON(object: any): ConsumerGroupType { - switch (object) { - case 0: - case "STATIC": - return ConsumerGroupType.STATIC; - case -1: - case "UNRECOGNIZED": - default: - return ConsumerGroupType.UNRECOGNIZED; - } -} - -export function consumerGroupTypeToJSON(object: ConsumerGroupType): string { - switch (object) { - case ConsumerGroupType.STATIC: - return "STATIC"; - case ConsumerGroupType.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } -} - export enum InitialOffsetPolicy { - EARLIEST = 0, - LATEST = 1, - SLOT = 2, + /** LATEST - FROM_SLOT = 1; */ + LATEST = 0, UNRECOGNIZED = -1, } export function initialOffsetPolicyFromJSON(object: any): InitialOffsetPolicy { switch (object) { case 0: - case "EARLIEST": - return InitialOffsetPolicy.EARLIEST; - case 1: case "LATEST": return InitialOffsetPolicy.LATEST; - case 2: - case "SLOT": - return InitialOffsetPolicy.SLOT; case -1: case "UNRECOGNIZED": default: @@ -86,70 +55,41 @@ export function initialOffsetPolicyFromJSON(object: any): InitialOffsetPolicy { export function initialOffsetPolicyToJSON(object: InitialOffsetPolicy): string { switch (object) { - case InitialOffsetPolicy.EARLIEST: - return "EARLIEST"; case InitialOffsetPolicy.LATEST: return "LATEST"; - case InitialOffsetPolicy.SLOT: - return "SLOT"; case InitialOffsetPolicy.UNRECOGNIZED: default: return "UNRECOGNIZED"; } } -export enum EventSubscriptionPolicy { - ACCOUNT_UPDATE_ONLY = 0, - TRANSACTION_ONLY = 1, - BOTH = 2, - UNRECOGNIZED = -1, +export interface GetChainTipRequest { + blockchainId: Uint8Array; } -export function eventSubscriptionPolicyFromJSON(object: any): EventSubscriptionPolicy { - switch (object) { - case 0: - case "ACCOUNT_UPDATE_ONLY": - return EventSubscriptionPolicy.ACCOUNT_UPDATE_ONLY; - case 1: - case "TRANSACTION_ONLY": - return EventSubscriptionPolicy.TRANSACTION_ONLY; - case 2: - case "BOTH": - return EventSubscriptionPolicy.BOTH; - case -1: - case "UNRECOGNIZED": - default: - return EventSubscriptionPolicy.UNRECOGNIZED; - } +export interface GetChainTipResponse { + blockchainId: Uint8Array; + shardToMaxOffsetMap: { [key: number]: string }; } -export function eventSubscriptionPolicyToJSON(object: EventSubscriptionPolicy): string { - switch (object) { - case EventSubscriptionPolicy.ACCOUNT_UPDATE_ONLY: - return "ACCOUNT_UPDATE_ONLY"; - case EventSubscriptionPolicy.TRANSACTION_ONLY: - return "TRANSACTION_ONLY"; - case EventSubscriptionPolicy.BOTH: - return "BOTH"; - case EventSubscriptionPolicy.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } +export interface GetChainTipResponse_ShardToMaxOffsetMapEntry { + key: number; + value: string; } -export interface ListAvailableCommitmentLevelsRequest { +export interface VersionRequest { } -export interface ListAvailableCommitmentLevelsResponse { - commitmentLevels: CommitmentLevel[]; +export interface VersionResponse { + version: string; } export interface GetConsumerGroupInfoRequest { - consumerGroupLabel: string; + consumerGroupName: string; } export interface DeleteConsumerGroupRequest { - consumerGroupLabel: string; + consumerGroupName: string; } export interface DeleteConsumerGroupResponse { @@ -165,228 +105,534 @@ export interface ListConsumerGroupsResponse { export interface ConsumerGroupInfo { id: string; - consumerGroupLabel: string; - consumerGroupType: ConsumerGroupType; - memberCount: number; - commitmentLevel: CommitmentLevel; - eventSubscriptionPolicy: EventSubscriptionPolicy; + consumerGroupName: string; isStale: boolean; + blockchainId: Uint8Array; } export interface GetSlotLagInfoRequest { - consumerGroupLabel: string; -} - -export interface GetSlotLagInfoResponse { - maxSlotSeen: string; - globalMaxSlot: string; + consumerGroupName: string; } -export interface SubscribeRequest { - consumerGroupLabel: string; - consumerId?: number | undefined; +export interface BlockFilters { accounts: { [key: string]: SubscribeRequestFilterAccounts }; transactions: { [key: string]: SubscribeRequestFilterTransactions }; + entries: { [key: string]: SubscribeRequestFilterEntry }; + blocksMeta: { [key: string]: SubscribeRequestFilterBlocksMeta }; } -export interface SubscribeRequest_AccountsEntry { +export interface BlockFilters_AccountsEntry { key: string; value: SubscribeRequestFilterAccounts | undefined; } -export interface SubscribeRequest_TransactionsEntry { +export interface BlockFilters_TransactionsEntry { key: string; value: SubscribeRequestFilterTransactions | undefined; } -export interface CreateStaticConsumerGroupResponse { - groupId: string; +export interface BlockFilters_EntriesEntry { + key: string; + value: SubscribeRequestFilterEntry | undefined; +} + +export interface BlockFilters_BlocksMetaEntry { + key: string; + value: SubscribeRequestFilterBlocksMeta | undefined; } -export interface CreateStaticConsumerGroupRequest { - consumerGroupLabel: string; - memberCount?: number | undefined; - initialOffsetPolicy: InitialOffsetPolicy; - commitmentLevel: CommitmentLevel; - eventSubscriptionPolicy: EventSubscriptionPolicy; - atSlot?: string | undefined; +export interface DownloadBlockShard { + blockchainId: Uint8Array; + blockUid: Uint8Array; + shardIdx: number; + blockFilters?: BlockFilters | undefined; +} + +export interface Ping { + pingId: number; +} + +export interface Pong { + pingId: number; +} + +export interface DataCommand { + downloadBlockShard?: DownloadBlockShard | undefined; + filterUpdate?: BlockFilters | undefined; +} + +export interface BlockShardDownloadFinish { +} + +export interface BlockNotFound { + blockchainId: Uint8Array; + blockUid: Uint8Array; + shardIdx: number; +} + +export interface DataError { + notFound?: BlockNotFound | undefined; +} + +export interface DataResponse { + update?: SubscribeUpdate | undefined; + blockShardDownloadFinish?: BlockShardDownloadFinish | undefined; +} + +export interface CommitOffset { + offset: string; + shardId: number; +} + +export interface PollBlockchainHistory { + shardId: number; + from?: string | undefined; + limit?: string | undefined; } -export interface GetOldestSlotRequest { +export interface BlockchainEvent { + offset: string; + blockchainId: Uint8Array; + blockUid: Uint8Array; + numShards: number; + slot: string; + parentSlot?: string | undefined; commitmentLevel: CommitmentLevel; + blockchainShardId: number; + deadError?: string | undefined; } -export interface GetOldestSlotResponse { - slot?: string | undefined; +export interface BlockchainHistory { + events: BlockchainEvent[]; } -function createBaseListAvailableCommitmentLevelsRequest(): ListAvailableCommitmentLevelsRequest { - return {}; +export interface JoinControlPlane { + consumerGroupName?: string | undefined; +} + +export interface ControlCommand { + initialJoin?: JoinControlPlane | undefined; + commitOffset?: CommitOffset | undefined; + pollHist?: PollBlockchainHistory | undefined; + ping?: Ping | undefined; +} + +export interface ControlResponse { + init?: InitialConsumerGroupState | undefined; + commitOffset?: CommitOffsetResult | undefined; + pollHist?: BlockchainHistory | undefined; + pong?: Pong | undefined; +} + +export interface CommitOffsetResult { + offset: string; + shardId: number; +} + +export interface InitialConsumerGroupState { + blockchainId: Uint8Array; + lastCommittedOffsets: { [key: number]: string }; } -export const ListAvailableCommitmentLevelsRequest = { - encode(_: ListAvailableCommitmentLevelsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export interface InitialConsumerGroupState_LastCommittedOffsetsEntry { + key: number; + value: string; +} + +export interface CreateConsumerGroupResponse { + consumerGroupId: string; +} + +export interface CreateConsumerGroupRequest { + consumerGroupName: string; + /** optional uint64 from_slot = 3; */ + initialOffsetPolicy: InitialOffsetPolicy; +} + +function createBaseGetChainTipRequest(): GetChainTipRequest { + return { blockchainId: new Uint8Array(0) }; +} + +export const GetChainTipRequest: MessageFns = { + encode(message: GetChainTipRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ListAvailableCommitmentLevelsRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseListAvailableCommitmentLevelsRequest(); + decode(input: BinaryReader | Uint8Array, length?: number): GetChainTipRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetChainTipRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.blockchainId = reader.bytes(); + continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(_: any): ListAvailableCommitmentLevelsRequest { - return {}; + fromJSON(object: any): GetChainTipRequest { + return { blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0) }; }, - toJSON(_: ListAvailableCommitmentLevelsRequest): unknown { + toJSON(message: GetChainTipRequest): unknown { const obj: any = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } return obj; }, - create, I>>( - base?: I, - ): ListAvailableCommitmentLevelsRequest { - return ListAvailableCommitmentLevelsRequest.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): GetChainTipRequest { + return GetChainTipRequest.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>( - _: I, - ): ListAvailableCommitmentLevelsRequest { - const message = createBaseListAvailableCommitmentLevelsRequest(); + fromPartial, I>>(object: I): GetChainTipRequest { + const message = createBaseGetChainTipRequest(); + message.blockchainId = object.blockchainId ?? new Uint8Array(0); return message; }, }; -function createBaseListAvailableCommitmentLevelsResponse(): ListAvailableCommitmentLevelsResponse { - return { commitmentLevels: [] }; +function createBaseGetChainTipResponse(): GetChainTipResponse { + return { blockchainId: new Uint8Array(0), shardToMaxOffsetMap: {} }; } -export const ListAvailableCommitmentLevelsResponse = { - encode(message: ListAvailableCommitmentLevelsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - writer.uint32(10).fork(); - for (const v of message.commitmentLevels) { - writer.int32(v); +export const GetChainTipResponse: MessageFns = { + encode(message: GetChainTipResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); } - writer.ldelim(); + Object.entries(message.shardToMaxOffsetMap).forEach(([key, value]) => { + GetChainTipResponse_ShardToMaxOffsetMapEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).join(); + }); return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ListAvailableCommitmentLevelsResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseListAvailableCommitmentLevelsResponse(); + decode(input: BinaryReader | Uint8Array, length?: number): GetChainTipResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetChainTipResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (tag === 8) { - message.commitmentLevels.push(reader.int32() as any); + case 1: { + if (tag !== 10) { + break; + } + + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } - continue; + const entry2 = GetChainTipResponse_ShardToMaxOffsetMapEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.shardToMaxOffsetMap[entry2.key] = entry2.value; } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetChainTipResponse { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + shardToMaxOffsetMap: isObject(object.shardToMaxOffsetMap) + ? Object.entries(object.shardToMaxOffsetMap).reduce<{ [key: number]: string }>((acc, [key, value]) => { + acc[globalThis.Number(key)] = String(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: GetChainTipResponse): unknown { + const obj: any = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.shardToMaxOffsetMap) { + const entries = Object.entries(message.shardToMaxOffsetMap); + if (entries.length > 0) { + obj.shardToMaxOffsetMap = {}; + entries.forEach(([k, v]) => { + obj.shardToMaxOffsetMap[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): GetChainTipResponse { + return GetChainTipResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GetChainTipResponse { + const message = createBaseGetChainTipResponse(); + message.blockchainId = object.blockchainId ?? new Uint8Array(0); + message.shardToMaxOffsetMap = Object.entries(object.shardToMaxOffsetMap ?? {}).reduce<{ [key: number]: string }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[globalThis.Number(key)] = globalThis.String(value); + } + return acc; + }, + {}, + ); + return message; + }, +}; + +function createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(): GetChainTipResponse_ShardToMaxOffsetMapEntry { + return { key: 0, value: "0" }; +} + +export const GetChainTipResponse_ShardToMaxOffsetMapEntry: MessageFns = { + encode( + message: GetChainTipResponse_ShardToMaxOffsetMapEntry, + writer: BinaryWriter = new BinaryWriter(), + ): BinaryWriter { + if (message.key !== 0) { + writer.uint32(8).int32(message.key); + } + if (message.value !== "0") { + writer.uint32(16).int64(message.value); + } + return writer; + }, - if (tag === 10) { - const end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) { - message.commitmentLevels.push(reader.int32() as any); - } + decode(input: BinaryReader | Uint8Array, length?: number): GetChainTipResponse_ShardToMaxOffsetMapEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } - continue; + message.key = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; } - break; + message.value = reader.int64().toString(); + continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): ListAvailableCommitmentLevelsResponse { + fromJSON(object: any): GetChainTipResponse_ShardToMaxOffsetMapEntry { return { - commitmentLevels: globalThis.Array.isArray(object?.commitmentLevels) - ? object.commitmentLevels.map((e: any) => commitmentLevelFromJSON(e)) - : [], + key: isSet(object.key) ? globalThis.Number(object.key) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "0", }; }, - toJSON(message: ListAvailableCommitmentLevelsResponse): unknown { + toJSON(message: GetChainTipResponse_ShardToMaxOffsetMapEntry): unknown { const obj: any = {}; - if (message.commitmentLevels?.length) { - obj.commitmentLevels = message.commitmentLevels.map((e) => commitmentLevelToJSON(e)); + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== "0") { + obj.value = message.value; } return obj; }, - create, I>>( + create, I>>( base?: I, - ): ListAvailableCommitmentLevelsResponse { - return ListAvailableCommitmentLevelsResponse.fromPartial(base ?? ({} as any)); + ): GetChainTipResponse_ShardToMaxOffsetMapEntry { + return GetChainTipResponse_ShardToMaxOffsetMapEntry.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>( + fromPartial, I>>( object: I, - ): ListAvailableCommitmentLevelsResponse { - const message = createBaseListAvailableCommitmentLevelsResponse(); - message.commitmentLevels = object.commitmentLevels?.map((e) => e) || []; + ): GetChainTipResponse_ShardToMaxOffsetMapEntry { + const message = createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(); + message.key = object.key ?? 0; + message.value = object.value ?? "0"; + return message; + }, +}; + +function createBaseVersionRequest(): VersionRequest { + return {}; +} + +export const VersionRequest: MessageFns = { + encode(_: VersionRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): VersionRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersionRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): VersionRequest { + return {}; + }, + + toJSON(_: VersionRequest): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): VersionRequest { + return VersionRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): VersionRequest { + const message = createBaseVersionRequest(); + return message; + }, +}; + +function createBaseVersionResponse(): VersionResponse { + return { version: "" }; +} + +export const VersionResponse: MessageFns = { + encode(message: VersionResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.version !== "") { + writer.uint32(10).string(message.version); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): VersionResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.version = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): VersionResponse { + return { version: isSet(object.version) ? globalThis.String(object.version) : "" }; + }, + + toJSON(message: VersionResponse): unknown { + const obj: any = {}; + if (message.version !== "") { + obj.version = message.version; + } + return obj; + }, + + create, I>>(base?: I): VersionResponse { + return VersionResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): VersionResponse { + const message = createBaseVersionResponse(); + message.version = object.version ?? ""; return message; }, }; function createBaseGetConsumerGroupInfoRequest(): GetConsumerGroupInfoRequest { - return { consumerGroupLabel: "" }; + return { consumerGroupName: "" }; } -export const GetConsumerGroupInfoRequest = { - encode(message: GetConsumerGroupInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.consumerGroupLabel !== "") { - writer.uint32(10).string(message.consumerGroupLabel); +export const GetConsumerGroupInfoRequest: MessageFns = { + encode(message: GetConsumerGroupInfoRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetConsumerGroupInfoRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetConsumerGroupInfoRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetConsumerGroupInfoRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } - message.consumerGroupLabel = reader.string(); + message.consumerGroupName = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, fromJSON(object: any): GetConsumerGroupInfoRequest { - return { consumerGroupLabel: isSet(object.consumerGroupLabel) ? globalThis.String(object.consumerGroupLabel) : "" }; + return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; }, toJSON(message: GetConsumerGroupInfoRequest): unknown { const obj: any = {}; - if (message.consumerGroupLabel !== "") { - obj.consumerGroupLabel = message.consumerGroupLabel; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; } return obj; }, @@ -396,54 +642,55 @@ export const GetConsumerGroupInfoRequest = { }, fromPartial, I>>(object: I): GetConsumerGroupInfoRequest { const message = createBaseGetConsumerGroupInfoRequest(); - message.consumerGroupLabel = object.consumerGroupLabel ?? ""; + message.consumerGroupName = object.consumerGroupName ?? ""; return message; }, }; function createBaseDeleteConsumerGroupRequest(): DeleteConsumerGroupRequest { - return { consumerGroupLabel: "" }; + return { consumerGroupName: "" }; } -export const DeleteConsumerGroupRequest = { - encode(message: DeleteConsumerGroupRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.consumerGroupLabel !== "") { - writer.uint32(10).string(message.consumerGroupLabel); +export const DeleteConsumerGroupRequest: MessageFns = { + encode(message: DeleteConsumerGroupRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): DeleteConsumerGroupRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): DeleteConsumerGroupRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseDeleteConsumerGroupRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } - message.consumerGroupLabel = reader.string(); + message.consumerGroupName = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, fromJSON(object: any): DeleteConsumerGroupRequest { - return { consumerGroupLabel: isSet(object.consumerGroupLabel) ? globalThis.String(object.consumerGroupLabel) : "" }; + return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; }, toJSON(message: DeleteConsumerGroupRequest): unknown { const obj: any = {}; - if (message.consumerGroupLabel !== "") { - obj.consumerGroupLabel = message.consumerGroupLabel; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; } return obj; }, @@ -453,7 +700,7 @@ export const DeleteConsumerGroupRequest = { }, fromPartial, I>>(object: I): DeleteConsumerGroupRequest { const message = createBaseDeleteConsumerGroupRequest(); - message.consumerGroupLabel = object.consumerGroupLabel ?? ""; + message.consumerGroupName = object.consumerGroupName ?? ""; return message; }, }; @@ -462,33 +709,34 @@ function createBaseDeleteConsumerGroupResponse(): DeleteConsumerGroupResponse { return { success: false }; } -export const DeleteConsumerGroupResponse = { - encode(message: DeleteConsumerGroupResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const DeleteConsumerGroupResponse: MessageFns = { + encode(message: DeleteConsumerGroupResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.success !== false) { writer.uint32(8).bool(message.success); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): DeleteConsumerGroupResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): DeleteConsumerGroupResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseDeleteConsumerGroupResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.success = reader.bool(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -519,14 +767,14 @@ function createBaseListConsumerGroupsRequest(): ListConsumerGroupsRequest { return {}; } -export const ListConsumerGroupsRequest = { - encode(_: ListConsumerGroupsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const ListConsumerGroupsRequest: MessageFns = { + encode(_: ListConsumerGroupsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ListConsumerGroupsRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): ListConsumerGroupsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseListConsumerGroupsRequest(); while (reader.pos < end) { const tag = reader.uint32(); @@ -535,7 +783,7 @@ export const ListConsumerGroupsRequest = { if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -562,33 +810,34 @@ function createBaseListConsumerGroupsResponse(): ListConsumerGroupsResponse { return { consumerGroups: [] }; } -export const ListConsumerGroupsResponse = { - encode(message: ListConsumerGroupsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const ListConsumerGroupsResponse: MessageFns = { + encode(message: ListConsumerGroupsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { for (const v of message.consumerGroups) { - ConsumerGroupInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + ConsumerGroupInfo.encode(v!, writer.uint32(10).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ListConsumerGroupsResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): ListConsumerGroupsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseListConsumerGroupsResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.consumerGroups.push(ConsumerGroupInfo.decode(reader, reader.uint32())); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -620,104 +869,70 @@ export const ListConsumerGroupsResponse = { }; function createBaseConsumerGroupInfo(): ConsumerGroupInfo { - return { - id: "", - consumerGroupLabel: "", - consumerGroupType: 0, - memberCount: 0, - commitmentLevel: 0, - eventSubscriptionPolicy: 0, - isStale: false, - }; + return { id: "", consumerGroupName: "", isStale: false, blockchainId: new Uint8Array(0) }; } -export const ConsumerGroupInfo = { - encode(message: ConsumerGroupInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const ConsumerGroupInfo: MessageFns = { + encode(message: ConsumerGroupInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.id !== "") { writer.uint32(10).string(message.id); } - if (message.consumerGroupLabel !== "") { - writer.uint32(18).string(message.consumerGroupLabel); - } - if (message.consumerGroupType !== 0) { - writer.uint32(24).int32(message.consumerGroupType); - } - if (message.memberCount !== 0) { - writer.uint32(32).uint32(message.memberCount); - } - if (message.commitmentLevel !== 0) { - writer.uint32(40).int32(message.commitmentLevel); - } - if (message.eventSubscriptionPolicy !== 0) { - writer.uint32(48).int32(message.eventSubscriptionPolicy); + if (message.consumerGroupName !== "") { + writer.uint32(18).string(message.consumerGroupName); } if (message.isStale !== false) { - writer.uint32(56).bool(message.isStale); + writer.uint32(24).bool(message.isStale); + } + if (message.blockchainId.length !== 0) { + writer.uint32(34).bytes(message.blockchainId); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ConsumerGroupInfo { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): ConsumerGroupInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseConsumerGroupInfo(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.id = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } - message.consumerGroupLabel = reader.string(); + message.consumerGroupName = reader.string(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } - message.consumerGroupType = reader.int32() as any; - continue; - case 4: - if (tag !== 32) { - break; - } - - message.memberCount = reader.uint32(); - continue; - case 5: - if (tag !== 40) { - break; - } - - message.commitmentLevel = reader.int32() as any; - continue; - case 6: - if (tag !== 48) { - break; - } - - message.eventSubscriptionPolicy = reader.int32() as any; + message.isStale = reader.bool(); continue; - case 7: - if (tag !== 56) { + } + case 4: { + if (tag !== 34) { break; } - message.isStale = reader.bool(); + message.blockchainId = reader.bytes(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -725,14 +940,9 @@ export const ConsumerGroupInfo = { fromJSON(object: any): ConsumerGroupInfo { return { id: isSet(object.id) ? globalThis.String(object.id) : "", - consumerGroupLabel: isSet(object.consumerGroupLabel) ? globalThis.String(object.consumerGroupLabel) : "", - consumerGroupType: isSet(object.consumerGroupType) ? consumerGroupTypeFromJSON(object.consumerGroupType) : 0, - memberCount: isSet(object.memberCount) ? globalThis.Number(object.memberCount) : 0, - commitmentLevel: isSet(object.commitmentLevel) ? commitmentLevelFromJSON(object.commitmentLevel) : 0, - eventSubscriptionPolicy: isSet(object.eventSubscriptionPolicy) - ? eventSubscriptionPolicyFromJSON(object.eventSubscriptionPolicy) - : 0, + consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "", isStale: isSet(object.isStale) ? globalThis.Boolean(object.isStale) : false, + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), }; }, @@ -741,23 +951,14 @@ export const ConsumerGroupInfo = { if (message.id !== "") { obj.id = message.id; } - if (message.consumerGroupLabel !== "") { - obj.consumerGroupLabel = message.consumerGroupLabel; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; } - if (message.consumerGroupType !== 0) { - obj.consumerGroupType = consumerGroupTypeToJSON(message.consumerGroupType); + if (message.isStale !== false) { + obj.isStale = message.isStale; } - if (message.memberCount !== 0) { - obj.memberCount = Math.round(message.memberCount); - } - if (message.commitmentLevel !== 0) { - obj.commitmentLevel = commitmentLevelToJSON(message.commitmentLevel); - } - if (message.eventSubscriptionPolicy !== 0) { - obj.eventSubscriptionPolicy = eventSubscriptionPolicyToJSON(message.eventSubscriptionPolicy); - } - if (message.isStale !== false) { - obj.isStale = message.isStale; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); } return obj; }, @@ -768,59 +969,57 @@ export const ConsumerGroupInfo = { fromPartial, I>>(object: I): ConsumerGroupInfo { const message = createBaseConsumerGroupInfo(); message.id = object.id ?? ""; - message.consumerGroupLabel = object.consumerGroupLabel ?? ""; - message.consumerGroupType = object.consumerGroupType ?? 0; - message.memberCount = object.memberCount ?? 0; - message.commitmentLevel = object.commitmentLevel ?? 0; - message.eventSubscriptionPolicy = object.eventSubscriptionPolicy ?? 0; + message.consumerGroupName = object.consumerGroupName ?? ""; message.isStale = object.isStale ?? false; + message.blockchainId = object.blockchainId ?? new Uint8Array(0); return message; }, }; function createBaseGetSlotLagInfoRequest(): GetSlotLagInfoRequest { - return { consumerGroupLabel: "" }; + return { consumerGroupName: "" }; } -export const GetSlotLagInfoRequest = { - encode(message: GetSlotLagInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.consumerGroupLabel !== "") { - writer.uint32(10).string(message.consumerGroupLabel); +export const GetSlotLagInfoRequest: MessageFns = { + encode(message: GetSlotLagInfoRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetSlotLagInfoRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetSlotLagInfoRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetSlotLagInfoRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } - message.consumerGroupLabel = reader.string(); + message.consumerGroupName = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, fromJSON(object: any): GetSlotLagInfoRequest { - return { consumerGroupLabel: isSet(object.consumerGroupLabel) ? globalThis.String(object.consumerGroupLabel) : "" }; + return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; }, toJSON(message: GetSlotLagInfoRequest): unknown { const obj: any = {}; - if (message.consumerGroupLabel !== "") { - obj.consumerGroupLabel = message.consumerGroupLabel; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; } return obj; }, @@ -830,160 +1029,94 @@ export const GetSlotLagInfoRequest = { }, fromPartial, I>>(object: I): GetSlotLagInfoRequest { const message = createBaseGetSlotLagInfoRequest(); - message.consumerGroupLabel = object.consumerGroupLabel ?? ""; - return message; - }, -}; - -function createBaseGetSlotLagInfoResponse(): GetSlotLagInfoResponse { - return { maxSlotSeen: "0", globalMaxSlot: "0" }; -} - -export const GetSlotLagInfoResponse = { - encode(message: GetSlotLagInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.maxSlotSeen !== "0") { - writer.uint32(8).uint64(message.maxSlotSeen); - } - if (message.globalMaxSlot !== "0") { - writer.uint32(16).uint64(message.globalMaxSlot); - } - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): GetSlotLagInfoResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseGetSlotLagInfoResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (tag !== 8) { - break; - } - - message.maxSlotSeen = longToString(reader.uint64() as Long); - continue; - case 2: - if (tag !== 16) { - break; - } - - message.globalMaxSlot = longToString(reader.uint64() as Long); - continue; - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skipType(tag & 7); - } - return message; - }, - - fromJSON(object: any): GetSlotLagInfoResponse { - return { - maxSlotSeen: isSet(object.maxSlotSeen) ? globalThis.String(object.maxSlotSeen) : "0", - globalMaxSlot: isSet(object.globalMaxSlot) ? globalThis.String(object.globalMaxSlot) : "0", - }; - }, - - toJSON(message: GetSlotLagInfoResponse): unknown { - const obj: any = {}; - if (message.maxSlotSeen !== "0") { - obj.maxSlotSeen = message.maxSlotSeen; - } - if (message.globalMaxSlot !== "0") { - obj.globalMaxSlot = message.globalMaxSlot; - } - return obj; - }, - - create, I>>(base?: I): GetSlotLagInfoResponse { - return GetSlotLagInfoResponse.fromPartial(base ?? ({} as any)); - }, - fromPartial, I>>(object: I): GetSlotLagInfoResponse { - const message = createBaseGetSlotLagInfoResponse(); - message.maxSlotSeen = object.maxSlotSeen ?? "0"; - message.globalMaxSlot = object.globalMaxSlot ?? "0"; + message.consumerGroupName = object.consumerGroupName ?? ""; return message; }, }; -function createBaseSubscribeRequest(): SubscribeRequest { - return { consumerGroupLabel: "", consumerId: undefined, accounts: {}, transactions: {} }; +function createBaseBlockFilters(): BlockFilters { + return { accounts: {}, transactions: {}, entries: {}, blocksMeta: {} }; } -export const SubscribeRequest = { - encode(message: SubscribeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.consumerGroupLabel !== "") { - writer.uint32(10).string(message.consumerGroupLabel); - } - if (message.consumerId !== undefined) { - writer.uint32(16).uint32(message.consumerId); - } +export const BlockFilters: MessageFns = { + encode(message: BlockFilters, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { Object.entries(message.accounts).forEach(([key, value]) => { - SubscribeRequest_AccountsEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).ldelim(); + BlockFilters_AccountsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join(); }); Object.entries(message.transactions).forEach(([key, value]) => { - SubscribeRequest_TransactionsEntry.encode({ key: key as any, value }, writer.uint32(34).fork()).ldelim(); + BlockFilters_TransactionsEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).join(); + }); + Object.entries(message.entries).forEach(([key, value]) => { + BlockFilters_EntriesEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join(); + }); + Object.entries(message.blocksMeta).forEach(([key, value]) => { + BlockFilters_BlocksMetaEntry.encode({ key: key as any, value }, writer.uint32(34).fork()).join(); }); return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSubscribeRequest(); + decode(input: BinaryReader | Uint8Array, length?: number): BlockFilters { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockFilters(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } - message.consumerGroupLabel = reader.string(); + const entry1 = BlockFilters_AccountsEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.accounts[entry1.key] = entry1.value; + } continue; - case 2: - if (tag !== 16) { + } + case 2: { + if (tag !== 18) { break; } - message.consumerId = reader.uint32(); + const entry2 = BlockFilters_TransactionsEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.transactions[entry2.key] = entry2.value; + } continue; - case 3: + } + case 3: { if (tag !== 26) { break; } - const entry3 = SubscribeRequest_AccountsEntry.decode(reader, reader.uint32()); + const entry3 = BlockFilters_EntriesEntry.decode(reader, reader.uint32()); if (entry3.value !== undefined) { - message.accounts[entry3.key] = entry3.value; + message.entries[entry3.key] = entry3.value; } continue; - case 4: + } + case 4: { if (tag !== 34) { break; } - const entry4 = SubscribeRequest_TransactionsEntry.decode(reader, reader.uint32()); + const entry4 = BlockFilters_BlocksMetaEntry.decode(reader, reader.uint32()); if (entry4.value !== undefined) { - message.transactions[entry4.key] = entry4.value; + message.blocksMeta[entry4.key] = entry4.value; } continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): SubscribeRequest { + fromJSON(object: any): BlockFilters { return { - consumerGroupLabel: isSet(object.consumerGroupLabel) ? globalThis.String(object.consumerGroupLabel) : "", - consumerId: isSet(object.consumerId) ? globalThis.Number(object.consumerId) : undefined, accounts: isObject(object.accounts) ? Object.entries(object.accounts).reduce<{ [key: string]: SubscribeRequestFilterAccounts }>( (acc, [key, value]) => { @@ -1002,17 +1135,26 @@ export const SubscribeRequest = { {}, ) : {}, + entries: isObject(object.entries) + ? Object.entries(object.entries).reduce<{ [key: string]: SubscribeRequestFilterEntry }>((acc, [key, value]) => { + acc[key] = SubscribeRequestFilterEntry.fromJSON(value); + return acc; + }, {}) + : {}, + blocksMeta: isObject(object.blocksMeta) + ? Object.entries(object.blocksMeta).reduce<{ [key: string]: SubscribeRequestFilterBlocksMeta }>( + (acc, [key, value]) => { + acc[key] = SubscribeRequestFilterBlocksMeta.fromJSON(value); + return acc; + }, + {}, + ) + : {}, }; }, - toJSON(message: SubscribeRequest): unknown { + toJSON(message: BlockFilters): unknown { const obj: any = {}; - if (message.consumerGroupLabel !== "") { - obj.consumerGroupLabel = message.consumerGroupLabel; - } - if (message.consumerId !== undefined) { - obj.consumerId = Math.round(message.consumerId); - } if (message.accounts) { const entries = Object.entries(message.accounts); if (entries.length > 0) { @@ -1031,16 +1173,32 @@ export const SubscribeRequest = { }); } } + if (message.entries) { + const entries = Object.entries(message.entries); + if (entries.length > 0) { + obj.entries = {}; + entries.forEach(([k, v]) => { + obj.entries[k] = SubscribeRequestFilterEntry.toJSON(v); + }); + } + } + if (message.blocksMeta) { + const entries = Object.entries(message.blocksMeta); + if (entries.length > 0) { + obj.blocksMeta = {}; + entries.forEach(([k, v]) => { + obj.blocksMeta[k] = SubscribeRequestFilterBlocksMeta.toJSON(v); + }); + } + } return obj; }, - create, I>>(base?: I): SubscribeRequest { - return SubscribeRequest.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): BlockFilters { + return BlockFilters.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SubscribeRequest { - const message = createBaseSubscribeRequest(); - message.consumerGroupLabel = object.consumerGroupLabel ?? ""; - message.consumerId = object.consumerId ?? undefined; + fromPartial, I>>(object: I): BlockFilters { + const message = createBaseBlockFilters(); message.accounts = Object.entries(object.accounts ?? {}).reduce<{ [key: string]: SubscribeRequestFilterAccounts }>( (acc, [key, value]) => { if (value !== undefined) { @@ -1058,63 +1216,82 @@ export const SubscribeRequest = { } return acc; }, {}); + message.entries = Object.entries(object.entries ?? {}).reduce<{ [key: string]: SubscribeRequestFilterEntry }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = SubscribeRequestFilterEntry.fromPartial(value); + } + return acc; + }, + {}, + ); + message.blocksMeta = Object.entries(object.blocksMeta ?? {}).reduce< + { [key: string]: SubscribeRequestFilterBlocksMeta } + >((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = SubscribeRequestFilterBlocksMeta.fromPartial(value); + } + return acc; + }, {}); return message; }, }; -function createBaseSubscribeRequest_AccountsEntry(): SubscribeRequest_AccountsEntry { +function createBaseBlockFilters_AccountsEntry(): BlockFilters_AccountsEntry { return { key: "", value: undefined }; } -export const SubscribeRequest_AccountsEntry = { - encode(message: SubscribeRequest_AccountsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const BlockFilters_AccountsEntry: MessageFns = { + encode(message: BlockFilters_AccountsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_AccountsEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSubscribeRequest_AccountsEntry(); + decode(input: BinaryReader | Uint8Array, length?: number): BlockFilters_AccountsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockFilters_AccountsEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterAccounts.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): SubscribeRequest_AccountsEntry { + fromJSON(object: any): BlockFilters_AccountsEntry { return { key: isSet(object.key) ? globalThis.String(object.key) : "", value: isSet(object.value) ? SubscribeRequestFilterAccounts.fromJSON(object.value) : undefined, }; }, - toJSON(message: SubscribeRequest_AccountsEntry): unknown { + toJSON(message: BlockFilters_AccountsEntry): unknown { const obj: any = {}; if (message.key !== "") { obj.key = message.key; @@ -1125,13 +1302,11 @@ export const SubscribeRequest_AccountsEntry = { return obj; }, - create, I>>(base?: I): SubscribeRequest_AccountsEntry { - return SubscribeRequest_AccountsEntry.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): BlockFilters_AccountsEntry { + return BlockFilters_AccountsEntry.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>( - object: I, - ): SubscribeRequest_AccountsEntry { - const message = createBaseSubscribeRequest_AccountsEntry(); + fromPartial, I>>(object: I): BlockFilters_AccountsEntry { + const message = createBaseBlockFilters_AccountsEntry(); message.key = object.key ?? ""; message.value = (object.value !== undefined && object.value !== null) ? SubscribeRequestFilterAccounts.fromPartial(object.value) @@ -1140,59 +1315,61 @@ export const SubscribeRequest_AccountsEntry = { }, }; -function createBaseSubscribeRequest_TransactionsEntry(): SubscribeRequest_TransactionsEntry { +function createBaseBlockFilters_TransactionsEntry(): BlockFilters_TransactionsEntry { return { key: "", value: undefined }; } -export const SubscribeRequest_TransactionsEntry = { - encode(message: SubscribeRequest_TransactionsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const BlockFilters_TransactionsEntry: MessageFns = { + encode(message: BlockFilters_TransactionsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_TransactionsEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSubscribeRequest_TransactionsEntry(); + decode(input: BinaryReader | Uint8Array, length?: number): BlockFilters_TransactionsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockFilters_TransactionsEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): SubscribeRequest_TransactionsEntry { + fromJSON(object: any): BlockFilters_TransactionsEntry { return { key: isSet(object.key) ? globalThis.String(object.key) : "", value: isSet(object.value) ? SubscribeRequestFilterTransactions.fromJSON(object.value) : undefined, }; }, - toJSON(message: SubscribeRequest_TransactionsEntry): unknown { + toJSON(message: BlockFilters_TransactionsEntry): unknown { const obj: any = {}; if (message.key !== "") { obj.key = message.key; @@ -1203,15 +1380,13 @@ export const SubscribeRequest_TransactionsEntry = { return obj; }, - create, I>>( - base?: I, - ): SubscribeRequest_TransactionsEntry { - return SubscribeRequest_TransactionsEntry.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): BlockFilters_TransactionsEntry { + return BlockFilters_TransactionsEntry.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>( + fromPartial, I>>( object: I, - ): SubscribeRequest_TransactionsEntry { - const message = createBaseSubscribeRequest_TransactionsEntry(); + ): BlockFilters_TransactionsEntry { + const message = createBaseBlockFilters_TransactionsEntry(); message.key = object.key ?? ""; message.value = (object.value !== undefined && object.value !== null) ? SubscribeRequestFilterTransactions.fromPartial(object.value) @@ -1220,445 +1395,1969 @@ export const SubscribeRequest_TransactionsEntry = { }, }; -function createBaseCreateStaticConsumerGroupResponse(): CreateStaticConsumerGroupResponse { - return { groupId: "" }; +function createBaseBlockFilters_EntriesEntry(): BlockFilters_EntriesEntry { + return { key: "", value: undefined }; } -export const CreateStaticConsumerGroupResponse = { - encode(message: CreateStaticConsumerGroupResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.groupId !== "") { - writer.uint32(10).string(message.groupId); +export const BlockFilters_EntriesEntry: MessageFns = { + encode(message: BlockFilters_EntriesEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + SubscribeRequestFilterEntry.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreateStaticConsumerGroupResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseCreateStaticConsumerGroupResponse(); + decode(input: BinaryReader | Uint8Array, length?: number): BlockFilters_EntriesEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockFilters_EntriesEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } - message.groupId = reader.string(); + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = SubscribeRequestFilterEntry.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): CreateStaticConsumerGroupResponse { - return { groupId: isSet(object.groupId) ? globalThis.String(object.groupId) : "" }; + fromJSON(object: any): BlockFilters_EntriesEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? SubscribeRequestFilterEntry.fromJSON(object.value) : undefined, + }; }, - toJSON(message: CreateStaticConsumerGroupResponse): unknown { + toJSON(message: BlockFilters_EntriesEntry): unknown { const obj: any = {}; - if (message.groupId !== "") { - obj.groupId = message.groupId; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = SubscribeRequestFilterEntry.toJSON(message.value); } return obj; }, - create, I>>( - base?: I, - ): CreateStaticConsumerGroupResponse { - return CreateStaticConsumerGroupResponse.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): BlockFilters_EntriesEntry { + return BlockFilters_EntriesEntry.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>( - object: I, - ): CreateStaticConsumerGroupResponse { - const message = createBaseCreateStaticConsumerGroupResponse(); - message.groupId = object.groupId ?? ""; + fromPartial, I>>(object: I): BlockFilters_EntriesEntry { + const message = createBaseBlockFilters_EntriesEntry(); + message.key = object.key ?? ""; + message.value = (object.value !== undefined && object.value !== null) + ? SubscribeRequestFilterEntry.fromPartial(object.value) + : undefined; return message; }, }; -function createBaseCreateStaticConsumerGroupRequest(): CreateStaticConsumerGroupRequest { - return { - consumerGroupLabel: "", - memberCount: undefined, - initialOffsetPolicy: 0, - commitmentLevel: 0, - eventSubscriptionPolicy: 0, - atSlot: undefined, - }; +function createBaseBlockFilters_BlocksMetaEntry(): BlockFilters_BlocksMetaEntry { + return { key: "", value: undefined }; } -export const CreateStaticConsumerGroupRequest = { - encode(message: CreateStaticConsumerGroupRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.consumerGroupLabel !== "") { - writer.uint32(10).string(message.consumerGroupLabel); - } - if (message.memberCount !== undefined) { - writer.uint32(16).uint32(message.memberCount); - } - if (message.initialOffsetPolicy !== 0) { - writer.uint32(24).int32(message.initialOffsetPolicy); - } - if (message.commitmentLevel !== 0) { - writer.uint32(32).int32(message.commitmentLevel); - } - if (message.eventSubscriptionPolicy !== 0) { - writer.uint32(40).int32(message.eventSubscriptionPolicy); +export const BlockFilters_BlocksMetaEntry: MessageFns = { + encode(message: BlockFilters_BlocksMetaEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); } - if (message.atSlot !== undefined) { - writer.uint32(48).int64(message.atSlot); + if (message.value !== undefined) { + SubscribeRequestFilterBlocksMeta.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreateStaticConsumerGroupRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseCreateStaticConsumerGroupRequest(); + decode(input: BinaryReader | Uint8Array, length?: number): BlockFilters_BlocksMetaEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockFilters_BlocksMetaEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } - message.consumerGroupLabel = reader.string(); + message.key = reader.string(); continue; - case 2: - if (tag !== 16) { + } + case 2: { + if (tag !== 18) { break; } - message.memberCount = reader.uint32(); + message.value = SubscribeRequestFilterBlocksMeta.decode(reader, reader.uint32()); continue; - case 3: - if (tag !== 24) { + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): BlockFilters_BlocksMetaEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? SubscribeRequestFilterBlocksMeta.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: BlockFilters_BlocksMetaEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = SubscribeRequestFilterBlocksMeta.toJSON(message.value); + } + return obj; + }, + + create, I>>(base?: I): BlockFilters_BlocksMetaEntry { + return BlockFilters_BlocksMetaEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): BlockFilters_BlocksMetaEntry { + const message = createBaseBlockFilters_BlocksMetaEntry(); + message.key = object.key ?? ""; + message.value = (object.value !== undefined && object.value !== null) + ? SubscribeRequestFilterBlocksMeta.fromPartial(object.value) + : undefined; + return message; + }, +}; + +function createBaseDownloadBlockShard(): DownloadBlockShard { + return { blockchainId: new Uint8Array(0), blockUid: new Uint8Array(0), shardIdx: 0, blockFilters: undefined }; +} + +export const DownloadBlockShard: MessageFns = { + encode(message: DownloadBlockShard, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + writer.uint32(18).bytes(message.blockUid); + } + if (message.shardIdx !== 0) { + writer.uint32(24).int32(message.shardIdx); + } + if (message.blockFilters !== undefined) { + BlockFilters.encode(message.blockFilters, writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DownloadBlockShard { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDownloadBlockShard(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { break; } - message.initialOffsetPolicy = reader.int32() as any; + message.blockchainId = reader.bytes(); continue; - case 4: - if (tag !== 32) { + } + case 2: { + if (tag !== 18) { break; } - message.commitmentLevel = reader.int32() as any; + message.blockUid = reader.bytes(); continue; - case 5: - if (tag !== 40) { + } + case 3: { + if (tag !== 24) { break; } - message.eventSubscriptionPolicy = reader.int32() as any; + message.shardIdx = reader.int32(); continue; - case 6: - if (tag !== 48) { + } + case 4: { + if (tag !== 34) { break; } - message.atSlot = longToString(reader.int64() as Long); + message.blockFilters = BlockFilters.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): CreateStaticConsumerGroupRequest { + fromJSON(object: any): DownloadBlockShard { return { - consumerGroupLabel: isSet(object.consumerGroupLabel) ? globalThis.String(object.consumerGroupLabel) : "", - memberCount: isSet(object.memberCount) ? globalThis.Number(object.memberCount) : undefined, - initialOffsetPolicy: isSet(object.initialOffsetPolicy) - ? initialOffsetPolicyFromJSON(object.initialOffsetPolicy) - : 0, - commitmentLevel: isSet(object.commitmentLevel) ? commitmentLevelFromJSON(object.commitmentLevel) : 0, - eventSubscriptionPolicy: isSet(object.eventSubscriptionPolicy) - ? eventSubscriptionPolicyFromJSON(object.eventSubscriptionPolicy) - : 0, - atSlot: isSet(object.atSlot) ? globalThis.String(object.atSlot) : undefined, + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), + shardIdx: isSet(object.shardIdx) ? globalThis.Number(object.shardIdx) : 0, + blockFilters: isSet(object.blockFilters) ? BlockFilters.fromJSON(object.blockFilters) : undefined, }; }, - toJSON(message: CreateStaticConsumerGroupRequest): unknown { + toJSON(message: DownloadBlockShard): unknown { const obj: any = {}; - if (message.consumerGroupLabel !== "") { - obj.consumerGroupLabel = message.consumerGroupLabel; - } - if (message.memberCount !== undefined) { - obj.memberCount = Math.round(message.memberCount); - } - if (message.initialOffsetPolicy !== 0) { - obj.initialOffsetPolicy = initialOffsetPolicyToJSON(message.initialOffsetPolicy); + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); } - if (message.commitmentLevel !== 0) { - obj.commitmentLevel = commitmentLevelToJSON(message.commitmentLevel); + if (message.blockUid.length !== 0) { + obj.blockUid = base64FromBytes(message.blockUid); } - if (message.eventSubscriptionPolicy !== 0) { - obj.eventSubscriptionPolicy = eventSubscriptionPolicyToJSON(message.eventSubscriptionPolicy); + if (message.shardIdx !== 0) { + obj.shardIdx = Math.round(message.shardIdx); } - if (message.atSlot !== undefined) { - obj.atSlot = message.atSlot; + if (message.blockFilters !== undefined) { + obj.blockFilters = BlockFilters.toJSON(message.blockFilters); } return obj; }, - create, I>>( - base?: I, - ): CreateStaticConsumerGroupRequest { - return CreateStaticConsumerGroupRequest.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): DownloadBlockShard { + return DownloadBlockShard.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>( - object: I, - ): CreateStaticConsumerGroupRequest { - const message = createBaseCreateStaticConsumerGroupRequest(); - message.consumerGroupLabel = object.consumerGroupLabel ?? ""; - message.memberCount = object.memberCount ?? undefined; - message.initialOffsetPolicy = object.initialOffsetPolicy ?? 0; - message.commitmentLevel = object.commitmentLevel ?? 0; - message.eventSubscriptionPolicy = object.eventSubscriptionPolicy ?? 0; - message.atSlot = object.atSlot ?? undefined; + fromPartial, I>>(object: I): DownloadBlockShard { + const message = createBaseDownloadBlockShard(); + message.blockchainId = object.blockchainId ?? new Uint8Array(0); + message.blockUid = object.blockUid ?? new Uint8Array(0); + message.shardIdx = object.shardIdx ?? 0; + message.blockFilters = (object.blockFilters !== undefined && object.blockFilters !== null) + ? BlockFilters.fromPartial(object.blockFilters) + : undefined; return message; }, }; -function createBaseGetOldestSlotRequest(): GetOldestSlotRequest { - return { commitmentLevel: 0 }; +function createBasePing(): Ping { + return { pingId: 0 }; } -export const GetOldestSlotRequest = { - encode(message: GetOldestSlotRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.commitmentLevel !== 0) { - writer.uint32(8).int32(message.commitmentLevel); +export const Ping: MessageFns = { + encode(message: Ping, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.pingId !== 0) { + writer.uint32(8).uint32(message.pingId); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetOldestSlotRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseGetOldestSlotRequest(); + decode(input: BinaryReader | Uint8Array, length?: number): Ping { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePing(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.commitmentLevel = reader.int32() as any; + message.pingId = reader.uint32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): GetOldestSlotRequest { - return { commitmentLevel: isSet(object.commitmentLevel) ? commitmentLevelFromJSON(object.commitmentLevel) : 0 }; + fromJSON(object: any): Ping { + return { pingId: isSet(object.pingId) ? globalThis.Number(object.pingId) : 0 }; }, - toJSON(message: GetOldestSlotRequest): unknown { + toJSON(message: Ping): unknown { const obj: any = {}; - if (message.commitmentLevel !== 0) { - obj.commitmentLevel = commitmentLevelToJSON(message.commitmentLevel); + if (message.pingId !== 0) { + obj.pingId = Math.round(message.pingId); } return obj; }, - create, I>>(base?: I): GetOldestSlotRequest { - return GetOldestSlotRequest.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): Ping { + return Ping.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): GetOldestSlotRequest { - const message = createBaseGetOldestSlotRequest(); - message.commitmentLevel = object.commitmentLevel ?? 0; + fromPartial, I>>(object: I): Ping { + const message = createBasePing(); + message.pingId = object.pingId ?? 0; return message; }, }; -function createBaseGetOldestSlotResponse(): GetOldestSlotResponse { - return { slot: undefined }; +function createBasePong(): Pong { + return { pingId: 0 }; } -export const GetOldestSlotResponse = { - encode(message: GetOldestSlotResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.slot !== undefined) { - writer.uint32(8).uint64(message.slot); +export const Pong: MessageFns = { + encode(message: Pong, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.pingId !== 0) { + writer.uint32(8).uint32(message.pingId); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetOldestSlotResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseGetOldestSlotResponse(); + decode(input: BinaryReader | Uint8Array, length?: number): Pong { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePong(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.pingId = reader.uint32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, - fromJSON(object: any): GetOldestSlotResponse { - return { slot: isSet(object.slot) ? globalThis.String(object.slot) : undefined }; + fromJSON(object: any): Pong { + return { pingId: isSet(object.pingId) ? globalThis.Number(object.pingId) : 0 }; }, - toJSON(message: GetOldestSlotResponse): unknown { + toJSON(message: Pong): unknown { const obj: any = {}; - if (message.slot !== undefined) { - obj.slot = message.slot; + if (message.pingId !== 0) { + obj.pingId = Math.round(message.pingId); } return obj; }, - create, I>>(base?: I): GetOldestSlotResponse { - return GetOldestSlotResponse.fromPartial(base ?? ({} as any)); + create, I>>(base?: I): Pong { + return Pong.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): GetOldestSlotResponse { - const message = createBaseGetOldestSlotResponse(); - message.slot = object.slot ?? undefined; + fromPartial, I>>(object: I): Pong { + const message = createBasePong(); + message.pingId = object.pingId ?? 0; return message; }, }; -export type FumaroleService = typeof FumaroleService; -export const FumaroleService = { - listAvailableCommitmentLevels: { - path: "/fumarole.Fumarole/ListAvailableCommitmentLevels", - requestStream: false, - responseStream: false, - requestSerialize: (value: ListAvailableCommitmentLevelsRequest) => - Buffer.from(ListAvailableCommitmentLevelsRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => ListAvailableCommitmentLevelsRequest.decode(value), - responseSerialize: (value: ListAvailableCommitmentLevelsResponse) => - Buffer.from(ListAvailableCommitmentLevelsResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => ListAvailableCommitmentLevelsResponse.decode(value), - }, - getConsumerGroupInfo: { - path: "/fumarole.Fumarole/GetConsumerGroupInfo", - requestStream: false, - responseStream: false, - requestSerialize: (value: GetConsumerGroupInfoRequest) => - Buffer.from(GetConsumerGroupInfoRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => GetConsumerGroupInfoRequest.decode(value), - responseSerialize: (value: ConsumerGroupInfo) => Buffer.from(ConsumerGroupInfo.encode(value).finish()), - responseDeserialize: (value: Buffer) => ConsumerGroupInfo.decode(value), - }, - listConsumerGroups: { - path: "/fumarole.Fumarole/ListConsumerGroups", - requestStream: false, - responseStream: false, - requestSerialize: (value: ListConsumerGroupsRequest) => - Buffer.from(ListConsumerGroupsRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => ListConsumerGroupsRequest.decode(value), - responseSerialize: (value: ListConsumerGroupsResponse) => - Buffer.from(ListConsumerGroupsResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => ListConsumerGroupsResponse.decode(value), - }, - deleteConsumerGroup: { - path: "/fumarole.Fumarole/DeleteConsumerGroup", - requestStream: false, - responseStream: false, - requestSerialize: (value: DeleteConsumerGroupRequest) => - Buffer.from(DeleteConsumerGroupRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => DeleteConsumerGroupRequest.decode(value), - responseSerialize: (value: DeleteConsumerGroupResponse) => - Buffer.from(DeleteConsumerGroupResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => DeleteConsumerGroupResponse.decode(value), - }, - createStaticConsumerGroup: { - path: "/fumarole.Fumarole/CreateStaticConsumerGroup", - requestStream: false, - responseStream: false, - requestSerialize: (value: CreateStaticConsumerGroupRequest) => - Buffer.from(CreateStaticConsumerGroupRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => CreateStaticConsumerGroupRequest.decode(value), - responseSerialize: (value: CreateStaticConsumerGroupResponse) => - Buffer.from(CreateStaticConsumerGroupResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => CreateStaticConsumerGroupResponse.decode(value), +function createBaseDataCommand(): DataCommand { + return { downloadBlockShard: undefined, filterUpdate: undefined }; +} + +export const DataCommand: MessageFns = { + encode(message: DataCommand, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.downloadBlockShard !== undefined) { + DownloadBlockShard.encode(message.downloadBlockShard, writer.uint32(10).fork()).join(); + } + if (message.filterUpdate !== undefined) { + BlockFilters.encode(message.filterUpdate, writer.uint32(18).fork()).join(); + } + return writer; }, - subscribe: { - path: "/fumarole.Fumarole/Subscribe", + + decode(input: BinaryReader | Uint8Array, length?: number): DataCommand { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDataCommand(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.downloadBlockShard = DownloadBlockShard.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.filterUpdate = BlockFilters.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DataCommand { + return { + downloadBlockShard: isSet(object.downloadBlockShard) + ? DownloadBlockShard.fromJSON(object.downloadBlockShard) + : undefined, + filterUpdate: isSet(object.filterUpdate) ? BlockFilters.fromJSON(object.filterUpdate) : undefined, + }; + }, + + toJSON(message: DataCommand): unknown { + const obj: any = {}; + if (message.downloadBlockShard !== undefined) { + obj.downloadBlockShard = DownloadBlockShard.toJSON(message.downloadBlockShard); + } + if (message.filterUpdate !== undefined) { + obj.filterUpdate = BlockFilters.toJSON(message.filterUpdate); + } + return obj; + }, + + create, I>>(base?: I): DataCommand { + return DataCommand.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DataCommand { + const message = createBaseDataCommand(); + message.downloadBlockShard = (object.downloadBlockShard !== undefined && object.downloadBlockShard !== null) + ? DownloadBlockShard.fromPartial(object.downloadBlockShard) + : undefined; + message.filterUpdate = (object.filterUpdate !== undefined && object.filterUpdate !== null) + ? BlockFilters.fromPartial(object.filterUpdate) + : undefined; + return message; + }, +}; + +function createBaseBlockShardDownloadFinish(): BlockShardDownloadFinish { + return {}; +} + +export const BlockShardDownloadFinish: MessageFns = { + encode(_: BlockShardDownloadFinish, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): BlockShardDownloadFinish { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockShardDownloadFinish(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): BlockShardDownloadFinish { + return {}; + }, + + toJSON(_: BlockShardDownloadFinish): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): BlockShardDownloadFinish { + return BlockShardDownloadFinish.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): BlockShardDownloadFinish { + const message = createBaseBlockShardDownloadFinish(); + return message; + }, +}; + +function createBaseBlockNotFound(): BlockNotFound { + return { blockchainId: new Uint8Array(0), blockUid: new Uint8Array(0), shardIdx: 0 }; +} + +export const BlockNotFound: MessageFns = { + encode(message: BlockNotFound, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + writer.uint32(18).bytes(message.blockUid); + } + if (message.shardIdx !== 0) { + writer.uint32(24).int32(message.shardIdx); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): BlockNotFound { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockNotFound(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.blockUid = reader.bytes(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.shardIdx = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): BlockNotFound { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), + shardIdx: isSet(object.shardIdx) ? globalThis.Number(object.shardIdx) : 0, + }; + }, + + toJSON(message: BlockNotFound): unknown { + const obj: any = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + obj.blockUid = base64FromBytes(message.blockUid); + } + if (message.shardIdx !== 0) { + obj.shardIdx = Math.round(message.shardIdx); + } + return obj; + }, + + create, I>>(base?: I): BlockNotFound { + return BlockNotFound.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): BlockNotFound { + const message = createBaseBlockNotFound(); + message.blockchainId = object.blockchainId ?? new Uint8Array(0); + message.blockUid = object.blockUid ?? new Uint8Array(0); + message.shardIdx = object.shardIdx ?? 0; + return message; + }, +}; + +function createBaseDataError(): DataError { + return { notFound: undefined }; +} + +export const DataError: MessageFns = { + encode(message: DataError, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.notFound !== undefined) { + BlockNotFound.encode(message.notFound, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DataError { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDataError(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.notFound = BlockNotFound.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DataError { + return { notFound: isSet(object.notFound) ? BlockNotFound.fromJSON(object.notFound) : undefined }; + }, + + toJSON(message: DataError): unknown { + const obj: any = {}; + if (message.notFound !== undefined) { + obj.notFound = BlockNotFound.toJSON(message.notFound); + } + return obj; + }, + + create, I>>(base?: I): DataError { + return DataError.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DataError { + const message = createBaseDataError(); + message.notFound = (object.notFound !== undefined && object.notFound !== null) + ? BlockNotFound.fromPartial(object.notFound) + : undefined; + return message; + }, +}; + +function createBaseDataResponse(): DataResponse { + return { update: undefined, blockShardDownloadFinish: undefined }; +} + +export const DataResponse: MessageFns = { + encode(message: DataResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.update !== undefined) { + SubscribeUpdate.encode(message.update, writer.uint32(10).fork()).join(); + } + if (message.blockShardDownloadFinish !== undefined) { + BlockShardDownloadFinish.encode(message.blockShardDownloadFinish, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DataResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDataResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.update = SubscribeUpdate.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.blockShardDownloadFinish = BlockShardDownloadFinish.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DataResponse { + return { + update: isSet(object.update) ? SubscribeUpdate.fromJSON(object.update) : undefined, + blockShardDownloadFinish: isSet(object.blockShardDownloadFinish) + ? BlockShardDownloadFinish.fromJSON(object.blockShardDownloadFinish) + : undefined, + }; + }, + + toJSON(message: DataResponse): unknown { + const obj: any = {}; + if (message.update !== undefined) { + obj.update = SubscribeUpdate.toJSON(message.update); + } + if (message.blockShardDownloadFinish !== undefined) { + obj.blockShardDownloadFinish = BlockShardDownloadFinish.toJSON(message.blockShardDownloadFinish); + } + return obj; + }, + + create, I>>(base?: I): DataResponse { + return DataResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DataResponse { + const message = createBaseDataResponse(); + message.update = (object.update !== undefined && object.update !== null) + ? SubscribeUpdate.fromPartial(object.update) + : undefined; + message.blockShardDownloadFinish = + (object.blockShardDownloadFinish !== undefined && object.blockShardDownloadFinish !== null) + ? BlockShardDownloadFinish.fromPartial(object.blockShardDownloadFinish) + : undefined; + return message; + }, +}; + +function createBaseCommitOffset(): CommitOffset { + return { offset: "0", shardId: 0 }; +} + +export const CommitOffset: MessageFns = { + encode(message: CommitOffset, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.offset !== "0") { + writer.uint32(8).int64(message.offset); + } + if (message.shardId !== 0) { + writer.uint32(16).int32(message.shardId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CommitOffset { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitOffset(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.offset = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.shardId = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CommitOffset { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, + }; + }, + + toJSON(message: CommitOffset): unknown { + const obj: any = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.shardId !== 0) { + obj.shardId = Math.round(message.shardId); + } + return obj; + }, + + create, I>>(base?: I): CommitOffset { + return CommitOffset.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CommitOffset { + const message = createBaseCommitOffset(); + message.offset = object.offset ?? "0"; + message.shardId = object.shardId ?? 0; + return message; + }, +}; + +function createBasePollBlockchainHistory(): PollBlockchainHistory { + return { shardId: 0, from: undefined, limit: undefined }; +} + +export const PollBlockchainHistory: MessageFns = { + encode(message: PollBlockchainHistory, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.shardId !== 0) { + writer.uint32(8).int32(message.shardId); + } + if (message.from !== undefined) { + writer.uint32(16).int64(message.from); + } + if (message.limit !== undefined) { + writer.uint32(24).int64(message.limit); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PollBlockchainHistory { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePollBlockchainHistory(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.shardId = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.from = reader.int64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.limit = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PollBlockchainHistory { + return { + shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, + from: isSet(object.from) ? globalThis.String(object.from) : undefined, + limit: isSet(object.limit) ? globalThis.String(object.limit) : undefined, + }; + }, + + toJSON(message: PollBlockchainHistory): unknown { + const obj: any = {}; + if (message.shardId !== 0) { + obj.shardId = Math.round(message.shardId); + } + if (message.from !== undefined) { + obj.from = message.from; + } + if (message.limit !== undefined) { + obj.limit = message.limit; + } + return obj; + }, + + create, I>>(base?: I): PollBlockchainHistory { + return PollBlockchainHistory.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): PollBlockchainHistory { + const message = createBasePollBlockchainHistory(); + message.shardId = object.shardId ?? 0; + message.from = object.from ?? undefined; + message.limit = object.limit ?? undefined; + return message; + }, +}; + +function createBaseBlockchainEvent(): BlockchainEvent { + return { + offset: "0", + blockchainId: new Uint8Array(0), + blockUid: new Uint8Array(0), + numShards: 0, + slot: "0", + parentSlot: undefined, + commitmentLevel: 0, + blockchainShardId: 0, + deadError: undefined, + }; +} + +export const BlockchainEvent: MessageFns = { + encode(message: BlockchainEvent, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.offset !== "0") { + writer.uint32(8).int64(message.offset); + } + if (message.blockchainId.length !== 0) { + writer.uint32(18).bytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + writer.uint32(26).bytes(message.blockUid); + } + if (message.numShards !== 0) { + writer.uint32(32).uint32(message.numShards); + } + if (message.slot !== "0") { + writer.uint32(40).uint64(message.slot); + } + if (message.parentSlot !== undefined) { + writer.uint32(48).uint64(message.parentSlot); + } + if (message.commitmentLevel !== 0) { + writer.uint32(56).int32(message.commitmentLevel); + } + if (message.blockchainShardId !== 0) { + writer.uint32(64).int32(message.blockchainShardId); + } + if (message.deadError !== undefined) { + writer.uint32(74).string(message.deadError); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): BlockchainEvent { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockchainEvent(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.offset = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.blockchainId = reader.bytes(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.blockUid = reader.bytes(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.numShards = reader.uint32(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.slot = reader.uint64().toString(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.parentSlot = reader.uint64().toString(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.commitmentLevel = reader.int32() as any; + continue; + } + case 8: { + if (tag !== 64) { + break; + } + + message.blockchainShardId = reader.int32(); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.deadError = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): BlockchainEvent { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), + numShards: isSet(object.numShards) ? globalThis.Number(object.numShards) : 0, + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : undefined, + commitmentLevel: isSet(object.commitmentLevel) ? commitmentLevelFromJSON(object.commitmentLevel) : 0, + blockchainShardId: isSet(object.blockchainShardId) ? globalThis.Number(object.blockchainShardId) : 0, + deadError: isSet(object.deadError) ? globalThis.String(object.deadError) : undefined, + }; + }, + + toJSON(message: BlockchainEvent): unknown { + const obj: any = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + obj.blockUid = base64FromBytes(message.blockUid); + } + if (message.numShards !== 0) { + obj.numShards = Math.round(message.numShards); + } + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.parentSlot !== undefined) { + obj.parentSlot = message.parentSlot; + } + if (message.commitmentLevel !== 0) { + obj.commitmentLevel = commitmentLevelToJSON(message.commitmentLevel); + } + if (message.blockchainShardId !== 0) { + obj.blockchainShardId = Math.round(message.blockchainShardId); + } + if (message.deadError !== undefined) { + obj.deadError = message.deadError; + } + return obj; + }, + + create, I>>(base?: I): BlockchainEvent { + return BlockchainEvent.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): BlockchainEvent { + const message = createBaseBlockchainEvent(); + message.offset = object.offset ?? "0"; + message.blockchainId = object.blockchainId ?? new Uint8Array(0); + message.blockUid = object.blockUid ?? new Uint8Array(0); + message.numShards = object.numShards ?? 0; + message.slot = object.slot ?? "0"; + message.parentSlot = object.parentSlot ?? undefined; + message.commitmentLevel = object.commitmentLevel ?? 0; + message.blockchainShardId = object.blockchainShardId ?? 0; + message.deadError = object.deadError ?? undefined; + return message; + }, +}; + +function createBaseBlockchainHistory(): BlockchainHistory { + return { events: [] }; +} + +export const BlockchainHistory: MessageFns = { + encode(message: BlockchainHistory, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.events) { + BlockchainEvent.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): BlockchainHistory { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockchainHistory(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.events.push(BlockchainEvent.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): BlockchainHistory { + return { + events: globalThis.Array.isArray(object?.events) + ? object.events.map((e: any) => BlockchainEvent.fromJSON(e)) + : [], + }; + }, + + toJSON(message: BlockchainHistory): unknown { + const obj: any = {}; + if (message.events?.length) { + obj.events = message.events.map((e) => BlockchainEvent.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): BlockchainHistory { + return BlockchainHistory.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): BlockchainHistory { + const message = createBaseBlockchainHistory(); + message.events = object.events?.map((e) => BlockchainEvent.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseJoinControlPlane(): JoinControlPlane { + return { consumerGroupName: undefined }; +} + +export const JoinControlPlane: MessageFns = { + encode(message: JoinControlPlane, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.consumerGroupName !== undefined) { + writer.uint32(10).string(message.consumerGroupName); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JoinControlPlane { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJoinControlPlane(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.consumerGroupName = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JoinControlPlane { + return { + consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : undefined, + }; + }, + + toJSON(message: JoinControlPlane): unknown { + const obj: any = {}; + if (message.consumerGroupName !== undefined) { + obj.consumerGroupName = message.consumerGroupName; + } + return obj; + }, + + create, I>>(base?: I): JoinControlPlane { + return JoinControlPlane.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): JoinControlPlane { + const message = createBaseJoinControlPlane(); + message.consumerGroupName = object.consumerGroupName ?? undefined; + return message; + }, +}; + +function createBaseControlCommand(): ControlCommand { + return { initialJoin: undefined, commitOffset: undefined, pollHist: undefined, ping: undefined }; +} + +export const ControlCommand: MessageFns = { + encode(message: ControlCommand, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.initialJoin !== undefined) { + JoinControlPlane.encode(message.initialJoin, writer.uint32(10).fork()).join(); + } + if (message.commitOffset !== undefined) { + CommitOffset.encode(message.commitOffset, writer.uint32(18).fork()).join(); + } + if (message.pollHist !== undefined) { + PollBlockchainHistory.encode(message.pollHist, writer.uint32(26).fork()).join(); + } + if (message.ping !== undefined) { + Ping.encode(message.ping, writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ControlCommand { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseControlCommand(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.initialJoin = JoinControlPlane.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.commitOffset = CommitOffset.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.pollHist = PollBlockchainHistory.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.ping = Ping.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ControlCommand { + return { + initialJoin: isSet(object.initialJoin) ? JoinControlPlane.fromJSON(object.initialJoin) : undefined, + commitOffset: isSet(object.commitOffset) ? CommitOffset.fromJSON(object.commitOffset) : undefined, + pollHist: isSet(object.pollHist) ? PollBlockchainHistory.fromJSON(object.pollHist) : undefined, + ping: isSet(object.ping) ? Ping.fromJSON(object.ping) : undefined, + }; + }, + + toJSON(message: ControlCommand): unknown { + const obj: any = {}; + if (message.initialJoin !== undefined) { + obj.initialJoin = JoinControlPlane.toJSON(message.initialJoin); + } + if (message.commitOffset !== undefined) { + obj.commitOffset = CommitOffset.toJSON(message.commitOffset); + } + if (message.pollHist !== undefined) { + obj.pollHist = PollBlockchainHistory.toJSON(message.pollHist); + } + if (message.ping !== undefined) { + obj.ping = Ping.toJSON(message.ping); + } + return obj; + }, + + create, I>>(base?: I): ControlCommand { + return ControlCommand.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ControlCommand { + const message = createBaseControlCommand(); + message.initialJoin = (object.initialJoin !== undefined && object.initialJoin !== null) + ? JoinControlPlane.fromPartial(object.initialJoin) + : undefined; + message.commitOffset = (object.commitOffset !== undefined && object.commitOffset !== null) + ? CommitOffset.fromPartial(object.commitOffset) + : undefined; + message.pollHist = (object.pollHist !== undefined && object.pollHist !== null) + ? PollBlockchainHistory.fromPartial(object.pollHist) + : undefined; + message.ping = (object.ping !== undefined && object.ping !== null) ? Ping.fromPartial(object.ping) : undefined; + return message; + }, +}; + +function createBaseControlResponse(): ControlResponse { + return { init: undefined, commitOffset: undefined, pollHist: undefined, pong: undefined }; +} + +export const ControlResponse: MessageFns = { + encode(message: ControlResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.init !== undefined) { + InitialConsumerGroupState.encode(message.init, writer.uint32(10).fork()).join(); + } + if (message.commitOffset !== undefined) { + CommitOffsetResult.encode(message.commitOffset, writer.uint32(18).fork()).join(); + } + if (message.pollHist !== undefined) { + BlockchainHistory.encode(message.pollHist, writer.uint32(26).fork()).join(); + } + if (message.pong !== undefined) { + Pong.encode(message.pong, writer.uint32(34).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ControlResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseControlResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.init = InitialConsumerGroupState.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.commitOffset = CommitOffsetResult.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.pollHist = BlockchainHistory.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.pong = Pong.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ControlResponse { + return { + init: isSet(object.init) ? InitialConsumerGroupState.fromJSON(object.init) : undefined, + commitOffset: isSet(object.commitOffset) ? CommitOffsetResult.fromJSON(object.commitOffset) : undefined, + pollHist: isSet(object.pollHist) ? BlockchainHistory.fromJSON(object.pollHist) : undefined, + pong: isSet(object.pong) ? Pong.fromJSON(object.pong) : undefined, + }; + }, + + toJSON(message: ControlResponse): unknown { + const obj: any = {}; + if (message.init !== undefined) { + obj.init = InitialConsumerGroupState.toJSON(message.init); + } + if (message.commitOffset !== undefined) { + obj.commitOffset = CommitOffsetResult.toJSON(message.commitOffset); + } + if (message.pollHist !== undefined) { + obj.pollHist = BlockchainHistory.toJSON(message.pollHist); + } + if (message.pong !== undefined) { + obj.pong = Pong.toJSON(message.pong); + } + return obj; + }, + + create, I>>(base?: I): ControlResponse { + return ControlResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ControlResponse { + const message = createBaseControlResponse(); + message.init = (object.init !== undefined && object.init !== null) + ? InitialConsumerGroupState.fromPartial(object.init) + : undefined; + message.commitOffset = (object.commitOffset !== undefined && object.commitOffset !== null) + ? CommitOffsetResult.fromPartial(object.commitOffset) + : undefined; + message.pollHist = (object.pollHist !== undefined && object.pollHist !== null) + ? BlockchainHistory.fromPartial(object.pollHist) + : undefined; + message.pong = (object.pong !== undefined && object.pong !== null) ? Pong.fromPartial(object.pong) : undefined; + return message; + }, +}; + +function createBaseCommitOffsetResult(): CommitOffsetResult { + return { offset: "0", shardId: 0 }; +} + +export const CommitOffsetResult: MessageFns = { + encode(message: CommitOffsetResult, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.offset !== "0") { + writer.uint32(8).int64(message.offset); + } + if (message.shardId !== 0) { + writer.uint32(16).int32(message.shardId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CommitOffsetResult { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitOffsetResult(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.offset = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.shardId = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CommitOffsetResult { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, + }; + }, + + toJSON(message: CommitOffsetResult): unknown { + const obj: any = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.shardId !== 0) { + obj.shardId = Math.round(message.shardId); + } + return obj; + }, + + create, I>>(base?: I): CommitOffsetResult { + return CommitOffsetResult.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CommitOffsetResult { + const message = createBaseCommitOffsetResult(); + message.offset = object.offset ?? "0"; + message.shardId = object.shardId ?? 0; + return message; + }, +}; + +function createBaseInitialConsumerGroupState(): InitialConsumerGroupState { + return { blockchainId: new Uint8Array(0), lastCommittedOffsets: {} }; +} + +export const InitialConsumerGroupState: MessageFns = { + encode(message: InitialConsumerGroupState, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + Object.entries(message.lastCommittedOffsets).forEach(([key, value]) => { + InitialConsumerGroupState_LastCommittedOffsetsEntry.encode({ key: key as any, value }, writer.uint32(18).fork()) + .join(); + }); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): InitialConsumerGroupState { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInitialConsumerGroupState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + const entry2 = InitialConsumerGroupState_LastCommittedOffsetsEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.lastCommittedOffsets[entry2.key] = entry2.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): InitialConsumerGroupState { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + lastCommittedOffsets: isObject(object.lastCommittedOffsets) + ? Object.entries(object.lastCommittedOffsets).reduce<{ [key: number]: string }>((acc, [key, value]) => { + acc[globalThis.Number(key)] = String(value); + return acc; + }, {}) + : {}, + }; + }, + + toJSON(message: InitialConsumerGroupState): unknown { + const obj: any = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.lastCommittedOffsets) { + const entries = Object.entries(message.lastCommittedOffsets); + if (entries.length > 0) { + obj.lastCommittedOffsets = {}; + entries.forEach(([k, v]) => { + obj.lastCommittedOffsets[k] = v; + }); + } + } + return obj; + }, + + create, I>>(base?: I): InitialConsumerGroupState { + return InitialConsumerGroupState.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): InitialConsumerGroupState { + const message = createBaseInitialConsumerGroupState(); + message.blockchainId = object.blockchainId ?? new Uint8Array(0); + message.lastCommittedOffsets = Object.entries(object.lastCommittedOffsets ?? {}).reduce<{ [key: number]: string }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[globalThis.Number(key)] = globalThis.String(value); + } + return acc; + }, + {}, + ); + return message; + }, +}; + +function createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(): InitialConsumerGroupState_LastCommittedOffsetsEntry { + return { key: 0, value: "0" }; +} + +export const InitialConsumerGroupState_LastCommittedOffsetsEntry: MessageFns< + InitialConsumerGroupState_LastCommittedOffsetsEntry +> = { + encode( + message: InitialConsumerGroupState_LastCommittedOffsetsEntry, + writer: BinaryWriter = new BinaryWriter(), + ): BinaryWriter { + if (message.key !== 0) { + writer.uint32(8).int32(message.key); + } + if (message.value !== "0") { + writer.uint32(16).int64(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): InitialConsumerGroupState_LastCommittedOffsetsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.key = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.value = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): InitialConsumerGroupState_LastCommittedOffsetsEntry { + return { + key: isSet(object.key) ? globalThis.Number(object.key) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "0", + }; + }, + + toJSON(message: InitialConsumerGroupState_LastCommittedOffsetsEntry): unknown { + const obj: any = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== "0") { + obj.value = message.value; + } + return obj; + }, + + create, I>>( + base?: I, + ): InitialConsumerGroupState_LastCommittedOffsetsEntry { + return InitialConsumerGroupState_LastCommittedOffsetsEntry.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): InitialConsumerGroupState_LastCommittedOffsetsEntry { + const message = createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(); + message.key = object.key ?? 0; + message.value = object.value ?? "0"; + return message; + }, +}; + +function createBaseCreateConsumerGroupResponse(): CreateConsumerGroupResponse { + return { consumerGroupId: "" }; +} + +export const CreateConsumerGroupResponse: MessageFns = { + encode(message: CreateConsumerGroupResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.consumerGroupId !== "") { + writer.uint32(10).string(message.consumerGroupId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CreateConsumerGroupResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCreateConsumerGroupResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.consumerGroupId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CreateConsumerGroupResponse { + return { consumerGroupId: isSet(object.consumerGroupId) ? globalThis.String(object.consumerGroupId) : "" }; + }, + + toJSON(message: CreateConsumerGroupResponse): unknown { + const obj: any = {}; + if (message.consumerGroupId !== "") { + obj.consumerGroupId = message.consumerGroupId; + } + return obj; + }, + + create, I>>(base?: I): CreateConsumerGroupResponse { + return CreateConsumerGroupResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CreateConsumerGroupResponse { + const message = createBaseCreateConsumerGroupResponse(); + message.consumerGroupId = object.consumerGroupId ?? ""; + return message; + }, +}; + +function createBaseCreateConsumerGroupRequest(): CreateConsumerGroupRequest { + return { consumerGroupName: "", initialOffsetPolicy: 0 }; +} + +export const CreateConsumerGroupRequest: MessageFns = { + encode(message: CreateConsumerGroupRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); + } + if (message.initialOffsetPolicy !== 0) { + writer.uint32(16).int32(message.initialOffsetPolicy); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CreateConsumerGroupRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCreateConsumerGroupRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.consumerGroupName = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.initialOffsetPolicy = reader.int32() as any; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CreateConsumerGroupRequest { + return { + consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "", + initialOffsetPolicy: isSet(object.initialOffsetPolicy) + ? initialOffsetPolicyFromJSON(object.initialOffsetPolicy) + : 0, + }; + }, + + toJSON(message: CreateConsumerGroupRequest): unknown { + const obj: any = {}; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; + } + if (message.initialOffsetPolicy !== 0) { + obj.initialOffsetPolicy = initialOffsetPolicyToJSON(message.initialOffsetPolicy); + } + return obj; + }, + + create, I>>(base?: I): CreateConsumerGroupRequest { + return CreateConsumerGroupRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CreateConsumerGroupRequest { + const message = createBaseCreateConsumerGroupRequest(); + message.consumerGroupName = object.consumerGroupName ?? ""; + message.initialOffsetPolicy = object.initialOffsetPolicy ?? 0; + return message; + }, +}; + +export type FumaroleService = typeof FumaroleService; +export const FumaroleService = { + getConsumerGroupInfo: { + path: "/fumarole.Fumarole/GetConsumerGroupInfo", + requestStream: false, + responseStream: false, + requestSerialize: (value: GetConsumerGroupInfoRequest): Buffer => + Buffer.from(GetConsumerGroupInfoRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): GetConsumerGroupInfoRequest => GetConsumerGroupInfoRequest.decode(value), + responseSerialize: (value: ConsumerGroupInfo): Buffer => Buffer.from(ConsumerGroupInfo.encode(value).finish()), + responseDeserialize: (value: Buffer): ConsumerGroupInfo => ConsumerGroupInfo.decode(value), + }, + listConsumerGroups: { + path: "/fumarole.Fumarole/ListConsumerGroups", + requestStream: false, + responseStream: false, + requestSerialize: (value: ListConsumerGroupsRequest): Buffer => + Buffer.from(ListConsumerGroupsRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): ListConsumerGroupsRequest => ListConsumerGroupsRequest.decode(value), + responseSerialize: (value: ListConsumerGroupsResponse): Buffer => + Buffer.from(ListConsumerGroupsResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): ListConsumerGroupsResponse => ListConsumerGroupsResponse.decode(value), + }, + deleteConsumerGroup: { + path: "/fumarole.Fumarole/DeleteConsumerGroup", + requestStream: false, + responseStream: false, + requestSerialize: (value: DeleteConsumerGroupRequest): Buffer => + Buffer.from(DeleteConsumerGroupRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): DeleteConsumerGroupRequest => DeleteConsumerGroupRequest.decode(value), + responseSerialize: (value: DeleteConsumerGroupResponse): Buffer => + Buffer.from(DeleteConsumerGroupResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): DeleteConsumerGroupResponse => DeleteConsumerGroupResponse.decode(value), + }, + createConsumerGroup: { + path: "/fumarole.Fumarole/CreateConsumerGroup", + requestStream: false, + responseStream: false, + requestSerialize: (value: CreateConsumerGroupRequest): Buffer => + Buffer.from(CreateConsumerGroupRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): CreateConsumerGroupRequest => CreateConsumerGroupRequest.decode(value), + responseSerialize: (value: CreateConsumerGroupResponse): Buffer => + Buffer.from(CreateConsumerGroupResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): CreateConsumerGroupResponse => CreateConsumerGroupResponse.decode(value), + }, + downloadBlock: { + path: "/fumarole.Fumarole/DownloadBlock", + requestStream: false, + responseStream: true, + requestSerialize: (value: DownloadBlockShard): Buffer => Buffer.from(DownloadBlockShard.encode(value).finish()), + requestDeserialize: (value: Buffer): DownloadBlockShard => DownloadBlockShard.decode(value), + responseSerialize: (value: DataResponse): Buffer => Buffer.from(DataResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): DataResponse => DataResponse.decode(value), + }, + /** Represents subscription to the data plane */ + subscribeData: { + path: "/fumarole.Fumarole/SubscribeData", requestStream: true, responseStream: true, - requestSerialize: (value: SubscribeRequest) => Buffer.from(SubscribeRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => SubscribeRequest.decode(value), - responseSerialize: (value: SubscribeUpdate) => Buffer.from(SubscribeUpdate.encode(value).finish()), - responseDeserialize: (value: Buffer) => SubscribeUpdate.decode(value), + requestSerialize: (value: DataCommand): Buffer => Buffer.from(DataCommand.encode(value).finish()), + requestDeserialize: (value: Buffer): DataCommand => DataCommand.decode(value), + responseSerialize: (value: DataResponse): Buffer => Buffer.from(DataResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): DataResponse => DataResponse.decode(value), }, - getSlotLagInfo: { - path: "/fumarole.Fumarole/GetSlotLagInfo", + getChainTip: { + path: "/fumarole.Fumarole/GetChainTip", requestStream: false, responseStream: false, - requestSerialize: (value: GetSlotLagInfoRequest) => Buffer.from(GetSlotLagInfoRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => GetSlotLagInfoRequest.decode(value), - responseSerialize: (value: GetSlotLagInfoResponse) => Buffer.from(GetSlotLagInfoResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => GetSlotLagInfoResponse.decode(value), + requestSerialize: (value: GetChainTipRequest): Buffer => Buffer.from(GetChainTipRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): GetChainTipRequest => GetChainTipRequest.decode(value), + responseSerialize: (value: GetChainTipResponse): Buffer => Buffer.from(GetChainTipResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): GetChainTipResponse => GetChainTipResponse.decode(value), + }, + /** Represents subscription to the control plane */ + subscribe: { + path: "/fumarole.Fumarole/Subscribe", + requestStream: true, + responseStream: true, + requestSerialize: (value: ControlCommand): Buffer => Buffer.from(ControlCommand.encode(value).finish()), + requestDeserialize: (value: Buffer): ControlCommand => ControlCommand.decode(value), + responseSerialize: (value: ControlResponse): Buffer => Buffer.from(ControlResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): ControlResponse => ControlResponse.decode(value), }, - getOldestSlot: { - path: "/fumarole.Fumarole/GetOldestSlot", + version: { + path: "/fumarole.Fumarole/Version", requestStream: false, responseStream: false, - requestSerialize: (value: GetOldestSlotRequest) => Buffer.from(GetOldestSlotRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => GetOldestSlotRequest.decode(value), - responseSerialize: (value: GetOldestSlotResponse) => Buffer.from(GetOldestSlotResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => GetOldestSlotResponse.decode(value), + requestSerialize: (value: VersionRequest): Buffer => Buffer.from(VersionRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): VersionRequest => VersionRequest.decode(value), + responseSerialize: (value: VersionResponse): Buffer => Buffer.from(VersionResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): VersionResponse => VersionResponse.decode(value), }, } as const; export interface FumaroleServer extends UntypedServiceImplementation { - listAvailableCommitmentLevels: handleUnaryCall< - ListAvailableCommitmentLevelsRequest, - ListAvailableCommitmentLevelsResponse - >; getConsumerGroupInfo: handleUnaryCall; listConsumerGroups: handleUnaryCall; deleteConsumerGroup: handleUnaryCall; - createStaticConsumerGroup: handleUnaryCall; - subscribe: handleBidiStreamingCall; - getSlotLagInfo: handleUnaryCall; - getOldestSlot: handleUnaryCall; + createConsumerGroup: handleUnaryCall; + downloadBlock: handleServerStreamingCall; + /** Represents subscription to the data plane */ + subscribeData: handleBidiStreamingCall; + getChainTip: handleUnaryCall; + /** Represents subscription to the control plane */ + subscribe: handleBidiStreamingCall; + version: handleUnaryCall; } export interface FumaroleClient extends Client { - listAvailableCommitmentLevels( - request: ListAvailableCommitmentLevelsRequest, - callback: (error: ServiceError | null, response: ListAvailableCommitmentLevelsResponse) => void, - ): ClientUnaryCall; - listAvailableCommitmentLevels( - request: ListAvailableCommitmentLevelsRequest, - metadata: Metadata, - callback: (error: ServiceError | null, response: ListAvailableCommitmentLevelsResponse) => void, - ): ClientUnaryCall; - listAvailableCommitmentLevels( - request: ListAvailableCommitmentLevelsRequest, - metadata: Metadata, - options: Partial, - callback: (error: ServiceError | null, response: ListAvailableCommitmentLevelsResponse) => void, - ): ClientUnaryCall; getConsumerGroupInfo( request: GetConsumerGroupInfoRequest, callback: (error: ServiceError | null, response: ConsumerGroupInfo) => void, @@ -1704,53 +3403,64 @@ export interface FumaroleClient extends Client { options: Partial, callback: (error: ServiceError | null, response: DeleteConsumerGroupResponse) => void, ): ClientUnaryCall; - createStaticConsumerGroup( - request: CreateStaticConsumerGroupRequest, - callback: (error: ServiceError | null, response: CreateStaticConsumerGroupResponse) => void, + createConsumerGroup( + request: CreateConsumerGroupRequest, + callback: (error: ServiceError | null, response: CreateConsumerGroupResponse) => void, ): ClientUnaryCall; - createStaticConsumerGroup( - request: CreateStaticConsumerGroupRequest, + createConsumerGroup( + request: CreateConsumerGroupRequest, metadata: Metadata, - callback: (error: ServiceError | null, response: CreateStaticConsumerGroupResponse) => void, + callback: (error: ServiceError | null, response: CreateConsumerGroupResponse) => void, ): ClientUnaryCall; - createStaticConsumerGroup( - request: CreateStaticConsumerGroupRequest, + createConsumerGroup( + request: CreateConsumerGroupRequest, metadata: Metadata, options: Partial, - callback: (error: ServiceError | null, response: CreateStaticConsumerGroupResponse) => void, + callback: (error: ServiceError | null, response: CreateConsumerGroupResponse) => void, ): ClientUnaryCall; - subscribe(): ClientDuplexStream; - subscribe(options: Partial): ClientDuplexStream; - subscribe(metadata: Metadata, options?: Partial): ClientDuplexStream; - getSlotLagInfo( - request: GetSlotLagInfoRequest, - callback: (error: ServiceError | null, response: GetSlotLagInfoResponse) => void, + downloadBlock(request: DownloadBlockShard, options?: Partial): ClientReadableStream; + downloadBlock( + request: DownloadBlockShard, + metadata?: Metadata, + options?: Partial, + ): ClientReadableStream; + /** Represents subscription to the data plane */ + subscribeData(): ClientDuplexStream; + subscribeData(options: Partial): ClientDuplexStream; + subscribeData(metadata: Metadata, options?: Partial): ClientDuplexStream; + getChainTip( + request: GetChainTipRequest, + callback: (error: ServiceError | null, response: GetChainTipResponse) => void, ): ClientUnaryCall; - getSlotLagInfo( - request: GetSlotLagInfoRequest, + getChainTip( + request: GetChainTipRequest, metadata: Metadata, - callback: (error: ServiceError | null, response: GetSlotLagInfoResponse) => void, + callback: (error: ServiceError | null, response: GetChainTipResponse) => void, ): ClientUnaryCall; - getSlotLagInfo( - request: GetSlotLagInfoRequest, + getChainTip( + request: GetChainTipRequest, metadata: Metadata, options: Partial, - callback: (error: ServiceError | null, response: GetSlotLagInfoResponse) => void, + callback: (error: ServiceError | null, response: GetChainTipResponse) => void, ): ClientUnaryCall; - getOldestSlot( - request: GetOldestSlotRequest, - callback: (error: ServiceError | null, response: GetOldestSlotResponse) => void, + /** Represents subscription to the control plane */ + subscribe(): ClientDuplexStream; + subscribe(options: Partial): ClientDuplexStream; + subscribe(metadata: Metadata, options?: Partial): ClientDuplexStream; + version( + request: VersionRequest, + callback: (error: ServiceError | null, response: VersionResponse) => void, ): ClientUnaryCall; - getOldestSlot( - request: GetOldestSlotRequest, + version( + request: VersionRequest, metadata: Metadata, - callback: (error: ServiceError | null, response: GetOldestSlotResponse) => void, + callback: (error: ServiceError | null, response: VersionResponse) => void, ): ClientUnaryCall; - getOldestSlot( - request: GetOldestSlotRequest, + version( + request: VersionRequest, metadata: Metadata, options: Partial, - callback: (error: ServiceError | null, response: GetOldestSlotResponse) => void, + callback: (error: ServiceError | null, response: VersionResponse) => void, ): ClientUnaryCall; } @@ -1760,6 +3470,31 @@ export const FumaroleClient = makeGenericClientConstructor(FumaroleService, "fum serviceName: string; }; +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; export type DeepPartial = T extends Builtin ? T @@ -1772,15 +3507,6 @@ type KeysOfUnion = T extends T ? keyof T : never; export type Exact = P extends Builtin ? P : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; -function longToString(long: Long) { - return long.toString(); -} - -if (_m0.util.Long !== Long) { - _m0.util.Long = Long as any; - _m0.configure(); -} - function isObject(value: any): boolean { return typeof value === "object" && value !== null; } @@ -1788,3 +3514,12 @@ function isObject(value: any): boolean { function isSet(value: any): boolean { return value !== null && value !== undefined; } + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/typescript-sdk/src/grpc/geyser.ts b/typescript-sdk/src/grpc/geyser.ts index bdb1e14..6bd7f3f 100644 --- a/typescript-sdk/src/grpc/geyser.ts +++ b/typescript-sdk/src/grpc/geyser.ts @@ -1,26 +1,25 @@ // Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: -// protoc-gen-ts_proto v1.181.2 -// protoc v6.30.0 +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 // source: geyser.proto /* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; import { type CallOptions, - ChannelCredentials, + type ChannelCredentials, Client, - ClientDuplexStream, + type ClientDuplexStream, type ClientOptions, type ClientUnaryCall, - handleBidiStreamingCall, + type handleBidiStreamingCall, type handleUnaryCall, makeGenericClientConstructor, - Metadata, + type Metadata, type ServiceError, type UntypedServiceImplementation, } from "@grpc/grpc-js"; -import Long from "long"; -import _m0 from "protobufjs/minimal"; import { Timestamp } from "./google/protobuf/timestamp"; import { BlockHeight, @@ -352,6 +351,13 @@ export interface SubscribeUpdatePong { id: number; } +export interface SubscribeReplayInfoRequest { +} + +export interface SubscribeReplayInfoResponse { + firstAvailable?: string | undefined; +} + export interface PingRequest { count: number; } @@ -419,37 +425,37 @@ function createBaseSubscribeRequest(): SubscribeRequest { }; } -export const SubscribeRequest = { - encode(message: SubscribeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest: MessageFns = { + encode(message: SubscribeRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { Object.entries(message.accounts).forEach(([key, value]) => { - SubscribeRequest_AccountsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).ldelim(); + SubscribeRequest_AccountsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join(); }); Object.entries(message.slots).forEach(([key, value]) => { - SubscribeRequest_SlotsEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).ldelim(); + SubscribeRequest_SlotsEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).join(); }); Object.entries(message.transactions).forEach(([key, value]) => { - SubscribeRequest_TransactionsEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).ldelim(); + SubscribeRequest_TransactionsEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join(); }); Object.entries(message.transactionsStatus).forEach(([key, value]) => { - SubscribeRequest_TransactionsStatusEntry.encode({ key: key as any, value }, writer.uint32(82).fork()).ldelim(); + SubscribeRequest_TransactionsStatusEntry.encode({ key: key as any, value }, writer.uint32(82).fork()).join(); }); Object.entries(message.blocks).forEach(([key, value]) => { - SubscribeRequest_BlocksEntry.encode({ key: key as any, value }, writer.uint32(34).fork()).ldelim(); + SubscribeRequest_BlocksEntry.encode({ key: key as any, value }, writer.uint32(34).fork()).join(); }); Object.entries(message.blocksMeta).forEach(([key, value]) => { - SubscribeRequest_BlocksMetaEntry.encode({ key: key as any, value }, writer.uint32(42).fork()).ldelim(); + SubscribeRequest_BlocksMetaEntry.encode({ key: key as any, value }, writer.uint32(42).fork()).join(); }); Object.entries(message.entry).forEach(([key, value]) => { - SubscribeRequest_EntryEntry.encode({ key: key as any, value }, writer.uint32(66).fork()).ldelim(); + SubscribeRequest_EntryEntry.encode({ key: key as any, value }, writer.uint32(66).fork()).join(); }); if (message.commitment !== undefined) { writer.uint32(48).int32(message.commitment); } for (const v of message.accountsDataSlice) { - SubscribeRequestAccountsDataSlice.encode(v!, writer.uint32(58).fork()).ldelim(); + SubscribeRequestAccountsDataSlice.encode(v!, writer.uint32(58).fork()).join(); } if (message.ping !== undefined) { - SubscribeRequestPing.encode(message.ping, writer.uint32(74).fork()).ldelim(); + SubscribeRequestPing.encode(message.ping, writer.uint32(74).fork()).join(); } if (message.fromSlot !== undefined) { writer.uint32(88).uint64(message.fromSlot); @@ -457,14 +463,14 @@ export const SubscribeRequest = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } @@ -474,7 +480,8 @@ export const SubscribeRequest = { message.accounts[entry1.key] = entry1.value; } continue; - case 2: + } + case 2: { if (tag !== 18) { break; } @@ -484,7 +491,8 @@ export const SubscribeRequest = { message.slots[entry2.key] = entry2.value; } continue; - case 3: + } + case 3: { if (tag !== 26) { break; } @@ -494,7 +502,8 @@ export const SubscribeRequest = { message.transactions[entry3.key] = entry3.value; } continue; - case 10: + } + case 10: { if (tag !== 82) { break; } @@ -504,7 +513,8 @@ export const SubscribeRequest = { message.transactionsStatus[entry10.key] = entry10.value; } continue; - case 4: + } + case 4: { if (tag !== 34) { break; } @@ -514,7 +524,8 @@ export const SubscribeRequest = { message.blocks[entry4.key] = entry4.value; } continue; - case 5: + } + case 5: { if (tag !== 42) { break; } @@ -524,7 +535,8 @@ export const SubscribeRequest = { message.blocksMeta[entry5.key] = entry5.value; } continue; - case 8: + } + case 8: { if (tag !== 66) { break; } @@ -534,39 +546,44 @@ export const SubscribeRequest = { message.entry[entry8.key] = entry8.value; } continue; - case 6: + } + case 6: { if (tag !== 48) { break; } message.commitment = reader.int32() as any; continue; - case 7: + } + case 7: { if (tag !== 58) { break; } message.accountsDataSlice.push(SubscribeRequestAccountsDataSlice.decode(reader, reader.uint32())); continue; - case 9: + } + case 9: { if (tag !== 74) { break; } message.ping = SubscribeRequestPing.decode(reader, reader.uint32()); continue; - case 11: + } + case 11: { if (tag !== 88) { break; } - message.fromSlot = longToString(reader.uint64() as Long); + message.fromSlot = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -796,43 +813,45 @@ function createBaseSubscribeRequest_AccountsEntry(): SubscribeRequest_AccountsEn return { key: "", value: undefined }; } -export const SubscribeRequest_AccountsEntry = { - encode(message: SubscribeRequest_AccountsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest_AccountsEntry: MessageFns = { + encode(message: SubscribeRequest_AccountsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_AccountsEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest_AccountsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest_AccountsEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterAccounts.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -874,43 +893,45 @@ function createBaseSubscribeRequest_SlotsEntry(): SubscribeRequest_SlotsEntry { return { key: "", value: undefined }; } -export const SubscribeRequest_SlotsEntry = { - encode(message: SubscribeRequest_SlotsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest_SlotsEntry: MessageFns = { + encode(message: SubscribeRequest_SlotsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterSlots.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterSlots.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_SlotsEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest_SlotsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest_SlotsEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterSlots.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -950,43 +971,45 @@ function createBaseSubscribeRequest_TransactionsEntry(): SubscribeRequest_Transa return { key: "", value: undefined }; } -export const SubscribeRequest_TransactionsEntry = { - encode(message: SubscribeRequest_TransactionsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest_TransactionsEntry: MessageFns = { + encode(message: SubscribeRequest_TransactionsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_TransactionsEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest_TransactionsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest_TransactionsEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1030,43 +1053,45 @@ function createBaseSubscribeRequest_TransactionsStatusEntry(): SubscribeRequest_ return { key: "", value: undefined }; } -export const SubscribeRequest_TransactionsStatusEntry = { - encode(message: SubscribeRequest_TransactionsStatusEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest_TransactionsStatusEntry: MessageFns = { + encode(message: SubscribeRequest_TransactionsStatusEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_TransactionsStatusEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest_TransactionsStatusEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest_TransactionsStatusEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1110,43 +1135,45 @@ function createBaseSubscribeRequest_BlocksEntry(): SubscribeRequest_BlocksEntry return { key: "", value: undefined }; } -export const SubscribeRequest_BlocksEntry = { - encode(message: SubscribeRequest_BlocksEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest_BlocksEntry: MessageFns = { + encode(message: SubscribeRequest_BlocksEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterBlocks.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterBlocks.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_BlocksEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest_BlocksEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest_BlocksEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterBlocks.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1186,43 +1213,45 @@ function createBaseSubscribeRequest_BlocksMetaEntry(): SubscribeRequest_BlocksMe return { key: "", value: undefined }; } -export const SubscribeRequest_BlocksMetaEntry = { - encode(message: SubscribeRequest_BlocksMetaEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest_BlocksMetaEntry: MessageFns = { + encode(message: SubscribeRequest_BlocksMetaEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterBlocksMeta.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterBlocksMeta.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_BlocksMetaEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest_BlocksMetaEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest_BlocksMetaEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterBlocksMeta.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1266,43 +1295,45 @@ function createBaseSubscribeRequest_EntryEntry(): SubscribeRequest_EntryEntry { return { key: "", value: undefined }; } -export const SubscribeRequest_EntryEntry = { - encode(message: SubscribeRequest_EntryEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequest_EntryEntry: MessageFns = { + encode(message: SubscribeRequest_EntryEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.key !== "") { writer.uint32(10).string(message.key); } if (message.value !== undefined) { - SubscribeRequestFilterEntry.encode(message.value, writer.uint32(18).fork()).ldelim(); + SubscribeRequestFilterEntry.encode(message.value, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequest_EntryEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequest_EntryEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequest_EntryEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.key = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.value = SubscribeRequestFilterEntry.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1342,8 +1373,8 @@ function createBaseSubscribeRequestFilterAccounts(): SubscribeRequestFilterAccou return { account: [], owner: [], filters: [], nonemptyTxnSignature: undefined }; } -export const SubscribeRequestFilterAccounts = { - encode(message: SubscribeRequestFilterAccounts, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterAccounts: MessageFns = { + encode(message: SubscribeRequestFilterAccounts, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { for (const v of message.account) { writer.uint32(18).string(v!); } @@ -1351,7 +1382,7 @@ export const SubscribeRequestFilterAccounts = { writer.uint32(26).string(v!); } for (const v of message.filters) { - SubscribeRequestFilterAccountsFilter.encode(v!, writer.uint32(34).fork()).ldelim(); + SubscribeRequestFilterAccountsFilter.encode(v!, writer.uint32(34).fork()).join(); } if (message.nonemptyTxnSignature !== undefined) { writer.uint32(40).bool(message.nonemptyTxnSignature); @@ -1359,46 +1390,50 @@ export const SubscribeRequestFilterAccounts = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterAccounts { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterAccounts { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterAccounts(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 2: + case 2: { if (tag !== 18) { break; } message.account.push(reader.string()); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.owner.push(reader.string()); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.filters.push(SubscribeRequestFilterAccountsFilter.decode(reader, reader.uint32())); continue; - case 5: + } + case 5: { if (tag !== 40) { break; } message.nonemptyTxnSignature = reader.bool(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1452,10 +1487,10 @@ function createBaseSubscribeRequestFilterAccountsFilter(): SubscribeRequestFilte return { memcmp: undefined, datasize: undefined, tokenAccountState: undefined, lamports: undefined }; } -export const SubscribeRequestFilterAccountsFilter = { - encode(message: SubscribeRequestFilterAccountsFilter, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterAccountsFilter: MessageFns = { + encode(message: SubscribeRequestFilterAccountsFilter, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.memcmp !== undefined) { - SubscribeRequestFilterAccountsFilterMemcmp.encode(message.memcmp, writer.uint32(10).fork()).ldelim(); + SubscribeRequestFilterAccountsFilterMemcmp.encode(message.memcmp, writer.uint32(10).fork()).join(); } if (message.datasize !== undefined) { writer.uint32(16).uint64(message.datasize); @@ -1464,51 +1499,55 @@ export const SubscribeRequestFilterAccountsFilter = { writer.uint32(24).bool(message.tokenAccountState); } if (message.lamports !== undefined) { - SubscribeRequestFilterAccountsFilterLamports.encode(message.lamports, writer.uint32(34).fork()).ldelim(); + SubscribeRequestFilterAccountsFilterLamports.encode(message.lamports, writer.uint32(34).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterAccountsFilter { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterAccountsFilter { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterAccountsFilter(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.memcmp = SubscribeRequestFilterAccountsFilterMemcmp.decode(reader, reader.uint32()); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.datasize = longToString(reader.uint64() as Long); + message.datasize = reader.uint64().toString(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } message.tokenAccountState = reader.bool(); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.lamports = SubscribeRequestFilterAccountsFilterLamports.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1566,8 +1605,8 @@ function createBaseSubscribeRequestFilterAccountsFilterMemcmp(): SubscribeReques return { offset: "0", bytes: undefined, base58: undefined, base64: undefined }; } -export const SubscribeRequestFilterAccountsFilterMemcmp = { - encode(message: SubscribeRequestFilterAccountsFilterMemcmp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterAccountsFilterMemcmp: MessageFns = { + encode(message: SubscribeRequestFilterAccountsFilterMemcmp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.offset !== "0") { writer.uint32(8).uint64(message.offset); } @@ -1583,46 +1622,50 @@ export const SubscribeRequestFilterAccountsFilterMemcmp = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterAccountsFilterMemcmp { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterAccountsFilterMemcmp { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterAccountsFilterMemcmp(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.offset = longToString(reader.uint64() as Long); + message.offset = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.bytes = reader.bytes(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.base58 = reader.string(); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.base64 = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1674,8 +1717,11 @@ function createBaseSubscribeRequestFilterAccountsFilterLamports(): SubscribeRequ return { eq: undefined, ne: undefined, lt: undefined, gt: undefined }; } -export const SubscribeRequestFilterAccountsFilterLamports = { - encode(message: SubscribeRequestFilterAccountsFilterLamports, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterAccountsFilterLamports: MessageFns = { + encode( + message: SubscribeRequestFilterAccountsFilterLamports, + writer: BinaryWriter = new BinaryWriter(), + ): BinaryWriter { if (message.eq !== undefined) { writer.uint32(8).uint64(message.eq); } @@ -1691,46 +1737,50 @@ export const SubscribeRequestFilterAccountsFilterLamports = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterAccountsFilterLamports { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterAccountsFilterLamports { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterAccountsFilterLamports(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.eq = longToString(reader.uint64() as Long); + message.eq = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.ne = longToString(reader.uint64() as Long); + message.ne = reader.uint64().toString(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } - message.lt = longToString(reader.uint64() as Long); + message.lt = reader.uint64().toString(); continue; - case 4: + } + case 4: { if (tag !== 32) { break; } - message.gt = longToString(reader.uint64() as Long); + message.gt = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1782,8 +1832,8 @@ function createBaseSubscribeRequestFilterSlots(): SubscribeRequestFilterSlots { return { filterByCommitment: undefined, interslotUpdates: undefined }; } -export const SubscribeRequestFilterSlots = { - encode(message: SubscribeRequestFilterSlots, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterSlots: MessageFns = { + encode(message: SubscribeRequestFilterSlots, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.filterByCommitment !== undefined) { writer.uint32(8).bool(message.filterByCommitment); } @@ -1793,32 +1843,34 @@ export const SubscribeRequestFilterSlots = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterSlots { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterSlots { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterSlots(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.filterByCommitment = reader.bool(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.interslotUpdates = reader.bool(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1863,8 +1915,8 @@ function createBaseSubscribeRequestFilterTransactions(): SubscribeRequestFilterT }; } -export const SubscribeRequestFilterTransactions = { - encode(message: SubscribeRequestFilterTransactions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterTransactions: MessageFns = { + encode(message: SubscribeRequestFilterTransactions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.vote !== undefined) { writer.uint32(8).bool(message.vote); } @@ -1886,60 +1938,66 @@ export const SubscribeRequestFilterTransactions = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterTransactions { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterTransactions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterTransactions(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.vote = reader.bool(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.failed = reader.bool(); continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.signature = reader.string(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.accountInclude.push(reader.string()); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.accountExclude.push(reader.string()); continue; - case 6: + } + case 6: { if (tag !== 50) { break; } message.accountRequired.push(reader.string()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2007,8 +2065,8 @@ function createBaseSubscribeRequestFilterBlocks(): SubscribeRequestFilterBlocks return { accountInclude: [], includeTransactions: undefined, includeAccounts: undefined, includeEntries: undefined }; } -export const SubscribeRequestFilterBlocks = { - encode(message: SubscribeRequestFilterBlocks, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterBlocks: MessageFns = { + encode(message: SubscribeRequestFilterBlocks, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { for (const v of message.accountInclude) { writer.uint32(10).string(v!); } @@ -2024,46 +2082,50 @@ export const SubscribeRequestFilterBlocks = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterBlocks { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterBlocks { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterBlocks(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.accountInclude.push(reader.string()); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.includeTransactions = reader.bool(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } message.includeAccounts = reader.bool(); continue; - case 4: + } + case 4: { if (tag !== 32) { break; } message.includeEntries = reader.bool(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2115,14 +2177,14 @@ function createBaseSubscribeRequestFilterBlocksMeta(): SubscribeRequestFilterBlo return {}; } -export const SubscribeRequestFilterBlocksMeta = { - encode(_: SubscribeRequestFilterBlocksMeta, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterBlocksMeta: MessageFns = { + encode(_: SubscribeRequestFilterBlocksMeta, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterBlocksMeta { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterBlocksMeta { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterBlocksMeta(); while (reader.pos < end) { const tag = reader.uint32(); @@ -2131,7 +2193,7 @@ export const SubscribeRequestFilterBlocksMeta = { if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2162,14 +2224,14 @@ function createBaseSubscribeRequestFilterEntry(): SubscribeRequestFilterEntry { return {}; } -export const SubscribeRequestFilterEntry = { - encode(_: SubscribeRequestFilterEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestFilterEntry: MessageFns = { + encode(_: SubscribeRequestFilterEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestFilterEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestFilterEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestFilterEntry(); while (reader.pos < end) { const tag = reader.uint32(); @@ -2178,7 +2240,7 @@ export const SubscribeRequestFilterEntry = { if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2205,8 +2267,8 @@ function createBaseSubscribeRequestAccountsDataSlice(): SubscribeRequestAccounts return { offset: "0", length: "0" }; } -export const SubscribeRequestAccountsDataSlice = { - encode(message: SubscribeRequestAccountsDataSlice, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestAccountsDataSlice: MessageFns = { + encode(message: SubscribeRequestAccountsDataSlice, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.offset !== "0") { writer.uint32(8).uint64(message.offset); } @@ -2216,32 +2278,34 @@ export const SubscribeRequestAccountsDataSlice = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestAccountsDataSlice { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestAccountsDataSlice { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestAccountsDataSlice(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.offset = longToString(reader.uint64() as Long); + message.offset = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.length = longToString(reader.uint64() as Long); + message.length = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2283,33 +2347,34 @@ function createBaseSubscribeRequestPing(): SubscribeRequestPing { return { id: 0 }; } -export const SubscribeRequestPing = { - encode(message: SubscribeRequestPing, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeRequestPing: MessageFns = { + encode(message: SubscribeRequestPing, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.id !== 0) { writer.uint32(8).int32(message.id); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeRequestPing { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeRequestPing { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeRequestPing(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.id = reader.int32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2352,133 +2417,144 @@ function createBaseSubscribeUpdate(): SubscribeUpdate { }; } -export const SubscribeUpdate = { - encode(message: SubscribeUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdate: MessageFns = { + encode(message: SubscribeUpdate, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { for (const v of message.filters) { writer.uint32(10).string(v!); } if (message.account !== undefined) { - SubscribeUpdateAccount.encode(message.account, writer.uint32(18).fork()).ldelim(); + SubscribeUpdateAccount.encode(message.account, writer.uint32(18).fork()).join(); } if (message.slot !== undefined) { - SubscribeUpdateSlot.encode(message.slot, writer.uint32(26).fork()).ldelim(); + SubscribeUpdateSlot.encode(message.slot, writer.uint32(26).fork()).join(); } if (message.transaction !== undefined) { - SubscribeUpdateTransaction.encode(message.transaction, writer.uint32(34).fork()).ldelim(); + SubscribeUpdateTransaction.encode(message.transaction, writer.uint32(34).fork()).join(); } if (message.transactionStatus !== undefined) { - SubscribeUpdateTransactionStatus.encode(message.transactionStatus, writer.uint32(82).fork()).ldelim(); + SubscribeUpdateTransactionStatus.encode(message.transactionStatus, writer.uint32(82).fork()).join(); } if (message.block !== undefined) { - SubscribeUpdateBlock.encode(message.block, writer.uint32(42).fork()).ldelim(); + SubscribeUpdateBlock.encode(message.block, writer.uint32(42).fork()).join(); } if (message.ping !== undefined) { - SubscribeUpdatePing.encode(message.ping, writer.uint32(50).fork()).ldelim(); + SubscribeUpdatePing.encode(message.ping, writer.uint32(50).fork()).join(); } if (message.pong !== undefined) { - SubscribeUpdatePong.encode(message.pong, writer.uint32(74).fork()).ldelim(); + SubscribeUpdatePong.encode(message.pong, writer.uint32(74).fork()).join(); } if (message.blockMeta !== undefined) { - SubscribeUpdateBlockMeta.encode(message.blockMeta, writer.uint32(58).fork()).ldelim(); + SubscribeUpdateBlockMeta.encode(message.blockMeta, writer.uint32(58).fork()).join(); } if (message.entry !== undefined) { - SubscribeUpdateEntry.encode(message.entry, writer.uint32(66).fork()).ldelim(); + SubscribeUpdateEntry.encode(message.entry, writer.uint32(66).fork()).join(); } if (message.createdAt !== undefined) { - Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(90).fork()).ldelim(); + Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(90).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdate { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdate { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdate(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.filters.push(reader.string()); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.account = SubscribeUpdateAccount.decode(reader, reader.uint32()); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.slot = SubscribeUpdateSlot.decode(reader, reader.uint32()); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.transaction = SubscribeUpdateTransaction.decode(reader, reader.uint32()); continue; - case 10: + } + case 10: { if (tag !== 82) { break; } message.transactionStatus = SubscribeUpdateTransactionStatus.decode(reader, reader.uint32()); continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.block = SubscribeUpdateBlock.decode(reader, reader.uint32()); continue; - case 6: + } + case 6: { if (tag !== 50) { break; } message.ping = SubscribeUpdatePing.decode(reader, reader.uint32()); continue; - case 9: + } + case 9: { if (tag !== 74) { break; } message.pong = SubscribeUpdatePong.decode(reader, reader.uint32()); continue; - case 7: + } + case 7: { if (tag !== 58) { break; } message.blockMeta = SubscribeUpdateBlockMeta.decode(reader, reader.uint32()); continue; - case 8: + } + case 8: { if (tag !== 66) { break; } message.entry = SubscribeUpdateEntry.decode(reader, reader.uint32()); continue; - case 11: + } + case 11: { if (tag !== 90) { break; } message.createdAt = fromTimestamp(Timestamp.decode(reader, reader.uint32())); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2581,10 +2657,10 @@ function createBaseSubscribeUpdateAccount(): SubscribeUpdateAccount { return { account: undefined, slot: "0", isStartup: false }; } -export const SubscribeUpdateAccount = { - encode(message: SubscribeUpdateAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateAccount: MessageFns = { + encode(message: SubscribeUpdateAccount, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.account !== undefined) { - SubscribeUpdateAccountInfo.encode(message.account, writer.uint32(10).fork()).ldelim(); + SubscribeUpdateAccountInfo.encode(message.account, writer.uint32(10).fork()).join(); } if (message.slot !== "0") { writer.uint32(16).uint64(message.slot); @@ -2595,39 +2671,42 @@ export const SubscribeUpdateAccount = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateAccount { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateAccount { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateAccount(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.account = SubscribeUpdateAccountInfo.decode(reader, reader.uint32()); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } message.isStartup = reader.bool(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2681,8 +2760,8 @@ function createBaseSubscribeUpdateAccountInfo(): SubscribeUpdateAccountInfo { }; } -export const SubscribeUpdateAccountInfo = { - encode(message: SubscribeUpdateAccountInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateAccountInfo: MessageFns = { + encode(message: SubscribeUpdateAccountInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.pubkey.length !== 0) { writer.uint32(10).bytes(message.pubkey); } @@ -2710,74 +2789,82 @@ export const SubscribeUpdateAccountInfo = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateAccountInfo { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateAccountInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateAccountInfo(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.pubkey = reader.bytes(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.lamports = longToString(reader.uint64() as Long); + message.lamports = reader.uint64().toString(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.owner = reader.bytes(); continue; - case 4: + } + case 4: { if (tag !== 32) { break; } message.executable = reader.bool(); continue; - case 5: + } + case 5: { if (tag !== 40) { break; } - message.rentEpoch = longToString(reader.uint64() as Long); + message.rentEpoch = reader.uint64().toString(); continue; - case 6: + } + case 6: { if (tag !== 50) { break; } message.data = reader.bytes(); continue; - case 7: + } + case 7: { if (tag !== 56) { break; } - message.writeVersion = longToString(reader.uint64() as Long); + message.writeVersion = reader.uint64().toString(); continue; - case 8: + } + case 8: { if (tag !== 66) { break; } message.txnSignature = reader.bytes(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2845,8 +2932,8 @@ function createBaseSubscribeUpdateSlot(): SubscribeUpdateSlot { return { slot: "0", parent: undefined, status: 0, deadError: undefined }; } -export const SubscribeUpdateSlot = { - encode(message: SubscribeUpdateSlot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateSlot: MessageFns = { + encode(message: SubscribeUpdateSlot, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } @@ -2862,46 +2949,50 @@ export const SubscribeUpdateSlot = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateSlot { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateSlot { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateSlot(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.parent = longToString(reader.uint64() as Long); + message.parent = reader.uint64().toString(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } message.status = reader.int32() as any; continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.deadError = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2949,10 +3040,10 @@ function createBaseSubscribeUpdateTransaction(): SubscribeUpdateTransaction { return { transaction: undefined, slot: "0" }; } -export const SubscribeUpdateTransaction = { - encode(message: SubscribeUpdateTransaction, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateTransaction: MessageFns = { + encode(message: SubscribeUpdateTransaction, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.transaction !== undefined) { - SubscribeUpdateTransactionInfo.encode(message.transaction, writer.uint32(10).fork()).ldelim(); + SubscribeUpdateTransactionInfo.encode(message.transaction, writer.uint32(10).fork()).join(); } if (message.slot !== "0") { writer.uint32(16).uint64(message.slot); @@ -2960,32 +3051,34 @@ export const SubscribeUpdateTransaction = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateTransaction { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateTransaction { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateTransaction(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.transaction = SubscribeUpdateTransactionInfo.decode(reader, reader.uint32()); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3025,8 +3118,8 @@ function createBaseSubscribeUpdateTransactionInfo(): SubscribeUpdateTransactionI return { signature: new Uint8Array(0), isVote: false, transaction: undefined, meta: undefined, index: "0" }; } -export const SubscribeUpdateTransactionInfo = { - encode(message: SubscribeUpdateTransactionInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateTransactionInfo: MessageFns = { + encode(message: SubscribeUpdateTransactionInfo, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.signature.length !== 0) { writer.uint32(10).bytes(message.signature); } @@ -3034,10 +3127,10 @@ export const SubscribeUpdateTransactionInfo = { writer.uint32(16).bool(message.isVote); } if (message.transaction !== undefined) { - Transaction.encode(message.transaction, writer.uint32(26).fork()).ldelim(); + Transaction.encode(message.transaction, writer.uint32(26).fork()).join(); } if (message.meta !== undefined) { - TransactionStatusMeta.encode(message.meta, writer.uint32(34).fork()).ldelim(); + TransactionStatusMeta.encode(message.meta, writer.uint32(34).fork()).join(); } if (message.index !== "0") { writer.uint32(40).uint64(message.index); @@ -3045,53 +3138,58 @@ export const SubscribeUpdateTransactionInfo = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateTransactionInfo { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateTransactionInfo { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateTransactionInfo(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.signature = reader.bytes(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.isVote = reader.bool(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.transaction = Transaction.decode(reader, reader.uint32()); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.meta = TransactionStatusMeta.decode(reader, reader.uint32()); continue; - case 5: + } + case 5: { if (tag !== 40) { break; } - message.index = longToString(reader.uint64() as Long); + message.index = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3150,8 +3248,8 @@ function createBaseSubscribeUpdateTransactionStatus(): SubscribeUpdateTransactio return { slot: "0", signature: new Uint8Array(0), isVote: false, index: "0", err: undefined }; } -export const SubscribeUpdateTransactionStatus = { - encode(message: SubscribeUpdateTransactionStatus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateTransactionStatus: MessageFns = { + encode(message: SubscribeUpdateTransactionStatus, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } @@ -3165,58 +3263,63 @@ export const SubscribeUpdateTransactionStatus = { writer.uint32(32).uint64(message.index); } if (message.err !== undefined) { - TransactionError.encode(message.err, writer.uint32(42).fork()).ldelim(); + TransactionError.encode(message.err, writer.uint32(42).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateTransactionStatus { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateTransactionStatus { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateTransactionStatus(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.signature = reader.bytes(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } message.isVote = reader.bool(); continue; - case 4: + } + case 4: { if (tag !== 32) { break; } - message.index = longToString(reader.uint64() as Long); + message.index = reader.uint64().toString(); continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.err = TransactionError.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3289,8 +3392,8 @@ function createBaseSubscribeUpdateBlock(): SubscribeUpdateBlock { }; } -export const SubscribeUpdateBlock = { - encode(message: SubscribeUpdateBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateBlock: MessageFns = { + encode(message: SubscribeUpdateBlock, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } @@ -3298,13 +3401,13 @@ export const SubscribeUpdateBlock = { writer.uint32(18).string(message.blockhash); } if (message.rewards !== undefined) { - Rewards.encode(message.rewards, writer.uint32(26).fork()).ldelim(); + Rewards.encode(message.rewards, writer.uint32(26).fork()).join(); } if (message.blockTime !== undefined) { - UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).ldelim(); + UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).join(); } if (message.blockHeight !== undefined) { - BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).ldelim(); + BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); } if (message.parentSlot !== "0") { writer.uint32(56).uint64(message.parentSlot); @@ -3316,126 +3419,139 @@ export const SubscribeUpdateBlock = { writer.uint32(72).uint64(message.executedTransactionCount); } for (const v of message.transactions) { - SubscribeUpdateTransactionInfo.encode(v!, writer.uint32(50).fork()).ldelim(); + SubscribeUpdateTransactionInfo.encode(v!, writer.uint32(50).fork()).join(); } if (message.updatedAccountCount !== "0") { writer.uint32(80).uint64(message.updatedAccountCount); } for (const v of message.accounts) { - SubscribeUpdateAccountInfo.encode(v!, writer.uint32(90).fork()).ldelim(); + SubscribeUpdateAccountInfo.encode(v!, writer.uint32(90).fork()).join(); } if (message.entriesCount !== "0") { writer.uint32(96).uint64(message.entriesCount); } for (const v of message.entries) { - SubscribeUpdateEntry.encode(v!, writer.uint32(106).fork()).ldelim(); + SubscribeUpdateEntry.encode(v!, writer.uint32(106).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateBlock { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateBlock { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateBlock(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.blockhash = reader.string(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.rewards = Rewards.decode(reader, reader.uint32()); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.blockTime = UnixTimestamp.decode(reader, reader.uint32()); continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.blockHeight = BlockHeight.decode(reader, reader.uint32()); continue; - case 7: + } + case 7: { if (tag !== 56) { break; } - message.parentSlot = longToString(reader.uint64() as Long); + message.parentSlot = reader.uint64().toString(); continue; - case 8: + } + case 8: { if (tag !== 66) { break; } message.parentBlockhash = reader.string(); continue; - case 9: + } + case 9: { if (tag !== 72) { break; } - message.executedTransactionCount = longToString(reader.uint64() as Long); + message.executedTransactionCount = reader.uint64().toString(); continue; - case 6: + } + case 6: { if (tag !== 50) { break; } message.transactions.push(SubscribeUpdateTransactionInfo.decode(reader, reader.uint32())); continue; - case 10: + } + case 10: { if (tag !== 80) { break; } - message.updatedAccountCount = longToString(reader.uint64() as Long); + message.updatedAccountCount = reader.uint64().toString(); continue; - case 11: + } + case 11: { if (tag !== 90) { break; } message.accounts.push(SubscribeUpdateAccountInfo.decode(reader, reader.uint32())); continue; - case 12: + } + case 12: { if (tag !== 96) { break; } - message.entriesCount = longToString(reader.uint64() as Long); + message.entriesCount = reader.uint64().toString(); continue; - case 13: + } + case 13: { if (tag !== 106) { break; } message.entries.push(SubscribeUpdateEntry.decode(reader, reader.uint32())); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3552,8 +3668,8 @@ function createBaseSubscribeUpdateBlockMeta(): SubscribeUpdateBlockMeta { }; } -export const SubscribeUpdateBlockMeta = { - encode(message: SubscribeUpdateBlockMeta, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateBlockMeta: MessageFns = { + encode(message: SubscribeUpdateBlockMeta, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } @@ -3561,13 +3677,13 @@ export const SubscribeUpdateBlockMeta = { writer.uint32(18).string(message.blockhash); } if (message.rewards !== undefined) { - Rewards.encode(message.rewards, writer.uint32(26).fork()).ldelim(); + Rewards.encode(message.rewards, writer.uint32(26).fork()).join(); } if (message.blockTime !== undefined) { - UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).ldelim(); + UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).join(); } if (message.blockHeight !== undefined) { - BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).ldelim(); + BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); } if (message.parentSlot !== "0") { writer.uint32(48).uint64(message.parentSlot); @@ -3584,81 +3700,90 @@ export const SubscribeUpdateBlockMeta = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateBlockMeta { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateBlockMeta { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateBlockMeta(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.blockhash = reader.string(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.rewards = Rewards.decode(reader, reader.uint32()); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.blockTime = UnixTimestamp.decode(reader, reader.uint32()); continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.blockHeight = BlockHeight.decode(reader, reader.uint32()); continue; - case 6: + } + case 6: { if (tag !== 48) { break; } - message.parentSlot = longToString(reader.uint64() as Long); + message.parentSlot = reader.uint64().toString(); continue; - case 7: + } + case 7: { if (tag !== 58) { break; } message.parentBlockhash = reader.string(); continue; - case 8: + } + case 8: { if (tag !== 64) { break; } - message.executedTransactionCount = longToString(reader.uint64() as Long); + message.executedTransactionCount = reader.uint64().toString(); continue; - case 9: + } + case 9: { if (tag !== 72) { break; } - message.entriesCount = longToString(reader.uint64() as Long); + message.entriesCount = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3746,8 +3871,8 @@ function createBaseSubscribeUpdateEntry(): SubscribeUpdateEntry { }; } -export const SubscribeUpdateEntry = { - encode(message: SubscribeUpdateEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdateEntry: MessageFns = { + encode(message: SubscribeUpdateEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } @@ -3769,60 +3894,66 @@ export const SubscribeUpdateEntry = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdateEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdateEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdateEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.index = longToString(reader.uint64() as Long); + message.index = reader.uint64().toString(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } - message.numHashes = longToString(reader.uint64() as Long); + message.numHashes = reader.uint64().toString(); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.hash = reader.bytes(); continue; - case 5: + } + case 5: { if (tag !== 40) { break; } - message.executedTransactionCount = longToString(reader.uint64() as Long); + message.executedTransactionCount = reader.uint64().toString(); continue; - case 6: + } + case 6: { if (tag !== 48) { break; } - message.startingTransactionIndex = longToString(reader.uint64() as Long); + message.startingTransactionIndex = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3884,14 +4015,14 @@ function createBaseSubscribeUpdatePing(): SubscribeUpdatePing { return {}; } -export const SubscribeUpdatePing = { - encode(_: SubscribeUpdatePing, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdatePing: MessageFns = { + encode(_: SubscribeUpdatePing, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdatePing { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdatePing { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdatePing(); while (reader.pos < end) { const tag = reader.uint32(); @@ -3900,7 +4031,7 @@ export const SubscribeUpdatePing = { if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3927,33 +4058,34 @@ function createBaseSubscribeUpdatePong(): SubscribeUpdatePong { return { id: 0 }; } -export const SubscribeUpdatePong = { - encode(message: SubscribeUpdatePong, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const SubscribeUpdatePong: MessageFns = { + encode(message: SubscribeUpdatePong, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.id !== 0) { writer.uint32(8).int32(message.id); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SubscribeUpdatePong { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeUpdatePong { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSubscribeUpdatePong(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.id = reader.int32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -3980,37 +4112,139 @@ export const SubscribeUpdatePong = { }, }; +function createBaseSubscribeReplayInfoRequest(): SubscribeReplayInfoRequest { + return {}; +} + +export const SubscribeReplayInfoRequest: MessageFns = { + encode(_: SubscribeReplayInfoRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeReplayInfoRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSubscribeReplayInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): SubscribeReplayInfoRequest { + return {}; + }, + + toJSON(_: SubscribeReplayInfoRequest): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): SubscribeReplayInfoRequest { + return SubscribeReplayInfoRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): SubscribeReplayInfoRequest { + const message = createBaseSubscribeReplayInfoRequest(); + return message; + }, +}; + +function createBaseSubscribeReplayInfoResponse(): SubscribeReplayInfoResponse { + return { firstAvailable: undefined }; +} + +export const SubscribeReplayInfoResponse: MessageFns = { + encode(message: SubscribeReplayInfoResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.firstAvailable !== undefined) { + writer.uint32(8).uint64(message.firstAvailable); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SubscribeReplayInfoResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSubscribeReplayInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.firstAvailable = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SubscribeReplayInfoResponse { + return { firstAvailable: isSet(object.firstAvailable) ? globalThis.String(object.firstAvailable) : undefined }; + }, + + toJSON(message: SubscribeReplayInfoResponse): unknown { + const obj: any = {}; + if (message.firstAvailable !== undefined) { + obj.firstAvailable = message.firstAvailable; + } + return obj; + }, + + create, I>>(base?: I): SubscribeReplayInfoResponse { + return SubscribeReplayInfoResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SubscribeReplayInfoResponse { + const message = createBaseSubscribeReplayInfoResponse(); + message.firstAvailable = object.firstAvailable ?? undefined; + return message; + }, +}; + function createBasePingRequest(): PingRequest { return { count: 0 }; } -export const PingRequest = { - encode(message: PingRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const PingRequest: MessageFns = { + encode(message: PingRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.count !== 0) { writer.uint32(8).int32(message.count); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): PingRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): PingRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBasePingRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.count = reader.int32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4041,33 +4275,34 @@ function createBasePongResponse(): PongResponse { return { count: 0 }; } -export const PongResponse = { - encode(message: PongResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const PongResponse: MessageFns = { + encode(message: PongResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.count !== 0) { writer.uint32(8).int32(message.count); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): PongResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): PongResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBasePongResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.count = reader.int32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4098,33 +4333,34 @@ function createBaseGetLatestBlockhashRequest(): GetLatestBlockhashRequest { return { commitment: undefined }; } -export const GetLatestBlockhashRequest = { - encode(message: GetLatestBlockhashRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetLatestBlockhashRequest: MessageFns = { + encode(message: GetLatestBlockhashRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.commitment !== undefined) { writer.uint32(8).int32(message.commitment); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockhashRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetLatestBlockhashRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetLatestBlockhashRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.commitment = reader.int32() as any; continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4155,8 +4391,8 @@ function createBaseGetLatestBlockhashResponse(): GetLatestBlockhashResponse { return { slot: "0", blockhash: "", lastValidBlockHeight: "0" }; } -export const GetLatestBlockhashResponse = { - encode(message: GetLatestBlockhashResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetLatestBlockhashResponse: MessageFns = { + encode(message: GetLatestBlockhashResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } @@ -4169,39 +4405,42 @@ export const GetLatestBlockhashResponse = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockhashResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetLatestBlockhashResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetLatestBlockhashResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.blockhash = reader.string(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } - message.lastValidBlockHeight = longToString(reader.uint64() as Long); + message.lastValidBlockHeight = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4244,33 +4483,34 @@ function createBaseGetBlockHeightRequest(): GetBlockHeightRequest { return { commitment: undefined }; } -export const GetBlockHeightRequest = { - encode(message: GetBlockHeightRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetBlockHeightRequest: MessageFns = { + encode(message: GetBlockHeightRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.commitment !== undefined) { writer.uint32(8).int32(message.commitment); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockHeightRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetBlockHeightRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetBlockHeightRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.commitment = reader.int32() as any; continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4301,33 +4541,34 @@ function createBaseGetBlockHeightResponse(): GetBlockHeightResponse { return { blockHeight: "0" }; } -export const GetBlockHeightResponse = { - encode(message: GetBlockHeightResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetBlockHeightResponse: MessageFns = { + encode(message: GetBlockHeightResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.blockHeight !== "0") { writer.uint32(8).uint64(message.blockHeight); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockHeightResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetBlockHeightResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetBlockHeightResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.blockHeight = longToString(reader.uint64() as Long); + message.blockHeight = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4358,33 +4599,34 @@ function createBaseGetSlotRequest(): GetSlotRequest { return { commitment: undefined }; } -export const GetSlotRequest = { - encode(message: GetSlotRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetSlotRequest: MessageFns = { + encode(message: GetSlotRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.commitment !== undefined) { writer.uint32(8).int32(message.commitment); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetSlotRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetSlotRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetSlotRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.commitment = reader.int32() as any; continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4415,33 +4657,34 @@ function createBaseGetSlotResponse(): GetSlotResponse { return { slot: "0" }; } -export const GetSlotResponse = { - encode(message: GetSlotResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetSlotResponse: MessageFns = { + encode(message: GetSlotResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetSlotResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetSlotResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetSlotResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4472,14 +4715,14 @@ function createBaseGetVersionRequest(): GetVersionRequest { return {}; } -export const GetVersionRequest = { - encode(_: GetVersionRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetVersionRequest: MessageFns = { + encode(_: GetVersionRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetVersionRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetVersionRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetVersionRequest(); while (reader.pos < end) { const tag = reader.uint32(); @@ -4488,7 +4731,7 @@ export const GetVersionRequest = { if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4515,33 +4758,34 @@ function createBaseGetVersionResponse(): GetVersionResponse { return { version: "" }; } -export const GetVersionResponse = { - encode(message: GetVersionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const GetVersionResponse: MessageFns = { + encode(message: GetVersionResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.version !== "") { writer.uint32(10).string(message.version); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetVersionResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): GetVersionResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetVersionResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.version = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4572,8 +4816,8 @@ function createBaseIsBlockhashValidRequest(): IsBlockhashValidRequest { return { blockhash: "", commitment: undefined }; } -export const IsBlockhashValidRequest = { - encode(message: IsBlockhashValidRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const IsBlockhashValidRequest: MessageFns = { + encode(message: IsBlockhashValidRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.blockhash !== "") { writer.uint32(10).string(message.blockhash); } @@ -4583,32 +4827,34 @@ export const IsBlockhashValidRequest = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): IsBlockhashValidRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): IsBlockhashValidRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseIsBlockhashValidRequest(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.blockhash = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.commitment = reader.int32() as any; continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4646,8 +4892,8 @@ function createBaseIsBlockhashValidResponse(): IsBlockhashValidResponse { return { slot: "0", valid: false }; } -export const IsBlockhashValidResponse = { - encode(message: IsBlockhashValidResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const IsBlockhashValidResponse: MessageFns = { + encode(message: IsBlockhashValidResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.slot !== "0") { writer.uint32(8).uint64(message.slot); } @@ -4657,32 +4903,34 @@ export const IsBlockhashValidResponse = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): IsBlockhashValidResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): IsBlockhashValidResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseIsBlockhashValidResponse(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.slot = longToString(reader.uint64() as Long); + message.slot = reader.uint64().toString(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.valid = reader.bool(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -4722,72 +4970,87 @@ export const GeyserService = { path: "/geyser.Geyser/Subscribe", requestStream: true, responseStream: true, - requestSerialize: (value: SubscribeRequest) => Buffer.from(SubscribeRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => SubscribeRequest.decode(value), - responseSerialize: (value: SubscribeUpdate) => Buffer.from(SubscribeUpdate.encode(value).finish()), - responseDeserialize: (value: Buffer) => SubscribeUpdate.decode(value), + requestSerialize: (value: SubscribeRequest): Buffer => Buffer.from(SubscribeRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): SubscribeRequest => SubscribeRequest.decode(value), + responseSerialize: (value: SubscribeUpdate): Buffer => Buffer.from(SubscribeUpdate.encode(value).finish()), + responseDeserialize: (value: Buffer): SubscribeUpdate => SubscribeUpdate.decode(value), + }, + subscribeReplayInfo: { + path: "/geyser.Geyser/SubscribeReplayInfo", + requestStream: false, + responseStream: false, + requestSerialize: (value: SubscribeReplayInfoRequest): Buffer => + Buffer.from(SubscribeReplayInfoRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): SubscribeReplayInfoRequest => SubscribeReplayInfoRequest.decode(value), + responseSerialize: (value: SubscribeReplayInfoResponse): Buffer => + Buffer.from(SubscribeReplayInfoResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): SubscribeReplayInfoResponse => SubscribeReplayInfoResponse.decode(value), }, ping: { path: "/geyser.Geyser/Ping", requestStream: false, responseStream: false, - requestSerialize: (value: PingRequest) => Buffer.from(PingRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => PingRequest.decode(value), - responseSerialize: (value: PongResponse) => Buffer.from(PongResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => PongResponse.decode(value), + requestSerialize: (value: PingRequest): Buffer => Buffer.from(PingRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): PingRequest => PingRequest.decode(value), + responseSerialize: (value: PongResponse): Buffer => Buffer.from(PongResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): PongResponse => PongResponse.decode(value), }, getLatestBlockhash: { path: "/geyser.Geyser/GetLatestBlockhash", requestStream: false, responseStream: false, - requestSerialize: (value: GetLatestBlockhashRequest) => + requestSerialize: (value: GetLatestBlockhashRequest): Buffer => Buffer.from(GetLatestBlockhashRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => GetLatestBlockhashRequest.decode(value), - responseSerialize: (value: GetLatestBlockhashResponse) => + requestDeserialize: (value: Buffer): GetLatestBlockhashRequest => GetLatestBlockhashRequest.decode(value), + responseSerialize: (value: GetLatestBlockhashResponse): Buffer => Buffer.from(GetLatestBlockhashResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => GetLatestBlockhashResponse.decode(value), + responseDeserialize: (value: Buffer): GetLatestBlockhashResponse => GetLatestBlockhashResponse.decode(value), }, getBlockHeight: { path: "/geyser.Geyser/GetBlockHeight", requestStream: false, responseStream: false, - requestSerialize: (value: GetBlockHeightRequest) => Buffer.from(GetBlockHeightRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => GetBlockHeightRequest.decode(value), - responseSerialize: (value: GetBlockHeightResponse) => Buffer.from(GetBlockHeightResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => GetBlockHeightResponse.decode(value), + requestSerialize: (value: GetBlockHeightRequest): Buffer => + Buffer.from(GetBlockHeightRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): GetBlockHeightRequest => GetBlockHeightRequest.decode(value), + responseSerialize: (value: GetBlockHeightResponse): Buffer => + Buffer.from(GetBlockHeightResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): GetBlockHeightResponse => GetBlockHeightResponse.decode(value), }, getSlot: { path: "/geyser.Geyser/GetSlot", requestStream: false, responseStream: false, - requestSerialize: (value: GetSlotRequest) => Buffer.from(GetSlotRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => GetSlotRequest.decode(value), - responseSerialize: (value: GetSlotResponse) => Buffer.from(GetSlotResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => GetSlotResponse.decode(value), + requestSerialize: (value: GetSlotRequest): Buffer => Buffer.from(GetSlotRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): GetSlotRequest => GetSlotRequest.decode(value), + responseSerialize: (value: GetSlotResponse): Buffer => Buffer.from(GetSlotResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): GetSlotResponse => GetSlotResponse.decode(value), }, isBlockhashValid: { path: "/geyser.Geyser/IsBlockhashValid", requestStream: false, responseStream: false, - requestSerialize: (value: IsBlockhashValidRequest) => Buffer.from(IsBlockhashValidRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => IsBlockhashValidRequest.decode(value), - responseSerialize: (value: IsBlockhashValidResponse) => + requestSerialize: (value: IsBlockhashValidRequest): Buffer => + Buffer.from(IsBlockhashValidRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): IsBlockhashValidRequest => IsBlockhashValidRequest.decode(value), + responseSerialize: (value: IsBlockhashValidResponse): Buffer => Buffer.from(IsBlockhashValidResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => IsBlockhashValidResponse.decode(value), + responseDeserialize: (value: Buffer): IsBlockhashValidResponse => IsBlockhashValidResponse.decode(value), }, getVersion: { path: "/geyser.Geyser/GetVersion", requestStream: false, responseStream: false, - requestSerialize: (value: GetVersionRequest) => Buffer.from(GetVersionRequest.encode(value).finish()), - requestDeserialize: (value: Buffer) => GetVersionRequest.decode(value), - responseSerialize: (value: GetVersionResponse) => Buffer.from(GetVersionResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => GetVersionResponse.decode(value), + requestSerialize: (value: GetVersionRequest): Buffer => Buffer.from(GetVersionRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): GetVersionRequest => GetVersionRequest.decode(value), + responseSerialize: (value: GetVersionResponse): Buffer => Buffer.from(GetVersionResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): GetVersionResponse => GetVersionResponse.decode(value), }, } as const; export interface GeyserServer extends UntypedServiceImplementation { subscribe: handleBidiStreamingCall; + subscribeReplayInfo: handleUnaryCall; ping: handleUnaryCall; getLatestBlockhash: handleUnaryCall; getBlockHeight: handleUnaryCall; @@ -4800,6 +5063,21 @@ export interface GeyserClient extends Client { subscribe(): ClientDuplexStream; subscribe(options: Partial): ClientDuplexStream; subscribe(metadata: Metadata, options?: Partial): ClientDuplexStream; + subscribeReplayInfo( + request: SubscribeReplayInfoRequest, + callback: (error: ServiceError | null, response: SubscribeReplayInfoResponse) => void, + ): ClientUnaryCall; + subscribeReplayInfo( + request: SubscribeReplayInfoRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: SubscribeReplayInfoResponse) => void, + ): ClientUnaryCall; + subscribeReplayInfo( + request: SubscribeReplayInfoRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: SubscribeReplayInfoResponse) => void, + ): ClientUnaryCall; ping(request: PingRequest, callback: (error: ServiceError | null, response: PongResponse) => void): ClientUnaryCall; ping( request: PingRequest, @@ -4954,15 +5232,6 @@ function fromJsonTimestamp(o: any): Date { } } -function longToString(long: Long) { - return long.toString(); -} - -if (_m0.util.Long !== Long) { - _m0.util.Long = Long as any; - _m0.configure(); -} - function isObject(value: any): boolean { return typeof value === "object" && value !== null; } @@ -4970,3 +5239,12 @@ function isObject(value: any): boolean { function isSet(value: any): boolean { return value !== null && value !== undefined; } + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/typescript-sdk/src/grpc/google/protobuf/timestamp.ts b/typescript-sdk/src/grpc/google/protobuf/timestamp.ts index b40f088..e116ef7 100644 --- a/typescript-sdk/src/grpc/google/protobuf/timestamp.ts +++ b/typescript-sdk/src/grpc/google/protobuf/timestamp.ts @@ -1,12 +1,11 @@ // Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: -// protoc-gen-ts_proto v1.181.2 -// protoc v6.30.0 +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 // source: google/protobuf/timestamp.proto /* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal"; +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; export const protobufPackage = "google.protobuf"; @@ -61,15 +60,7 @@ export const protobufPackage = "google.protobuf"; * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) * .setNanos((int) ((millis % 1000) * 1000000)).build(); * - * Example 5: Compute Timestamp from Java `Instant.now()`. - * - * Instant now = Instant.now(); - * - * Timestamp timestamp = - * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - * .setNanos(now.getNano()).build(); - * - * Example 6: Compute Timestamp from current time in Python. + * Example 5: Compute Timestamp from current time in Python. * * timestamp = Timestamp() * timestamp.GetCurrentTime() @@ -98,7 +89,7 @@ export const protobufPackage = "google.protobuf"; * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use * the Joda Time's [`ISODateTimeFormat.dateTime()`]( - * http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D * ) to obtain a formatter capable of generating timestamps in this format. */ export interface Timestamp { @@ -121,8 +112,8 @@ function createBaseTimestamp(): Timestamp { return { seconds: "0", nanos: 0 }; } -export const Timestamp = { - encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const Timestamp: MessageFns = { + encode(message: Timestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.seconds !== "0") { writer.uint32(8).int64(message.seconds); } @@ -132,32 +123,34 @@ export const Timestamp = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTimestamp(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.seconds = longToString(reader.int64() as Long); + message.seconds = reader.int64().toString(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.nanos = reader.int32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -203,15 +196,15 @@ type KeysOfUnion = T extends T ? keyof T : never; export type Exact = P extends Builtin ? P : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; -function longToString(long: Long) { - return long.toString(); -} - -if (_m0.util.Long !== Long) { - _m0.util.Long = Long as any; - _m0.configure(); -} - function isSet(value: any): boolean { return value !== null && value !== undefined; } + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/typescript-sdk/src/grpc/solana-storage.ts b/typescript-sdk/src/grpc/solana-storage.ts index b8ea6de..276b24f 100644 --- a/typescript-sdk/src/grpc/solana-storage.ts +++ b/typescript-sdk/src/grpc/solana-storage.ts @@ -1,12 +1,11 @@ // Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: -// protoc-gen-ts_proto v1.181.2 -// protoc v6.30.0 +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 // source: solana-storage.proto /* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal"; +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; export const protobufPackage = "solana.storage.ConfirmedBlock"; @@ -124,7 +123,11 @@ export interface TransactionStatusMeta { * Available since Solana v1.10.35 / v1.11.6. * Set to `None` for txs executed on earlier versions. */ - computeUnitsConsumed?: string | undefined; + computeUnitsConsumed?: + | string + | undefined; + /** Total transaction cost */ + costUnits?: string | undefined; } export interface TransactionError { @@ -212,8 +215,8 @@ function createBaseConfirmedBlock(): ConfirmedBlock { }; } -export const ConfirmedBlock = { - encode(message: ConfirmedBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const ConfirmedBlock: MessageFns = { + encode(message: ConfirmedBlock, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.previousBlockhash !== "") { writer.uint32(10).string(message.previousBlockhash); } @@ -224,91 +227,99 @@ export const ConfirmedBlock = { writer.uint32(24).uint64(message.parentSlot); } for (const v of message.transactions) { - ConfirmedTransaction.encode(v!, writer.uint32(34).fork()).ldelim(); + ConfirmedTransaction.encode(v!, writer.uint32(34).fork()).join(); } for (const v of message.rewards) { - Reward.encode(v!, writer.uint32(42).fork()).ldelim(); + Reward.encode(v!, writer.uint32(42).fork()).join(); } if (message.blockTime !== undefined) { - UnixTimestamp.encode(message.blockTime, writer.uint32(50).fork()).ldelim(); + UnixTimestamp.encode(message.blockTime, writer.uint32(50).fork()).join(); } if (message.blockHeight !== undefined) { - BlockHeight.encode(message.blockHeight, writer.uint32(58).fork()).ldelim(); + BlockHeight.encode(message.blockHeight, writer.uint32(58).fork()).join(); } if (message.numPartitions !== undefined) { - NumPartitions.encode(message.numPartitions, writer.uint32(66).fork()).ldelim(); + NumPartitions.encode(message.numPartitions, writer.uint32(66).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ConfirmedBlock { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): ConfirmedBlock { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseConfirmedBlock(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.previousBlockhash = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.blockhash = reader.string(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } - message.parentSlot = longToString(reader.uint64() as Long); + message.parentSlot = reader.uint64().toString(); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.transactions.push(ConfirmedTransaction.decode(reader, reader.uint32())); continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.rewards.push(Reward.decode(reader, reader.uint32())); continue; - case 6: + } + case 6: { if (tag !== 50) { break; } message.blockTime = UnixTimestamp.decode(reader, reader.uint32()); continue; - case 7: + } + case 7: { if (tag !== 58) { break; } message.blockHeight = BlockHeight.decode(reader, reader.uint32()); continue; - case 8: + } + case 8: { if (tag !== 66) { break; } message.numPartitions = NumPartitions.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -384,43 +395,45 @@ function createBaseConfirmedTransaction(): ConfirmedTransaction { return { transaction: undefined, meta: undefined }; } -export const ConfirmedTransaction = { - encode(message: ConfirmedTransaction, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const ConfirmedTransaction: MessageFns = { + encode(message: ConfirmedTransaction, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.transaction !== undefined) { - Transaction.encode(message.transaction, writer.uint32(10).fork()).ldelim(); + Transaction.encode(message.transaction, writer.uint32(10).fork()).join(); } if (message.meta !== undefined) { - TransactionStatusMeta.encode(message.meta, writer.uint32(18).fork()).ldelim(); + TransactionStatusMeta.encode(message.meta, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ConfirmedTransaction { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): ConfirmedTransaction { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseConfirmedTransaction(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.transaction = Transaction.decode(reader, reader.uint32()); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.meta = TransactionStatusMeta.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -462,43 +475,45 @@ function createBaseTransaction(): Transaction { return { signatures: [], message: undefined }; } -export const Transaction = { - encode(message: Transaction, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const Transaction: MessageFns = { + encode(message: Transaction, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { for (const v of message.signatures) { writer.uint32(10).bytes(v!); } if (message.message !== undefined) { - Message.encode(message.message, writer.uint32(18).fork()).ldelim(); + Message.encode(message.message, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): Transaction { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): Transaction { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTransaction(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.signatures.push(reader.bytes()); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.message = Message.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -547,10 +562,10 @@ function createBaseMessage(): Message { }; } -export const Message = { - encode(message: Message, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const Message: MessageFns = { + encode(message: Message, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.header !== undefined) { - MessageHeader.encode(message.header, writer.uint32(10).fork()).ldelim(); + MessageHeader.encode(message.header, writer.uint32(10).fork()).join(); } for (const v of message.accountKeys) { writer.uint32(18).bytes(v!); @@ -559,71 +574,77 @@ export const Message = { writer.uint32(26).bytes(message.recentBlockhash); } for (const v of message.instructions) { - CompiledInstruction.encode(v!, writer.uint32(34).fork()).ldelim(); + CompiledInstruction.encode(v!, writer.uint32(34).fork()).join(); } if (message.versioned !== false) { writer.uint32(40).bool(message.versioned); } for (const v of message.addressTableLookups) { - MessageAddressTableLookup.encode(v!, writer.uint32(50).fork()).ldelim(); + MessageAddressTableLookup.encode(v!, writer.uint32(50).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): Message { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): Message { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseMessage(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.header = MessageHeader.decode(reader, reader.uint32()); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.accountKeys.push(reader.bytes()); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.recentBlockhash = reader.bytes(); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.instructions.push(CompiledInstruction.decode(reader, reader.uint32())); continue; - case 5: + } + case 5: { if (tag !== 40) { break; } message.versioned = reader.bool(); continue; - case 6: + } + case 6: { if (tag !== 50) { break; } message.addressTableLookups.push(MessageAddressTableLookup.decode(reader, reader.uint32())); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -690,8 +711,8 @@ function createBaseMessageHeader(): MessageHeader { return { numRequiredSignatures: 0, numReadonlySignedAccounts: 0, numReadonlyUnsignedAccounts: 0 }; } -export const MessageHeader = { - encode(message: MessageHeader, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const MessageHeader: MessageFns = { + encode(message: MessageHeader, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.numRequiredSignatures !== 0) { writer.uint32(8).uint32(message.numRequiredSignatures); } @@ -704,39 +725,42 @@ export const MessageHeader = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MessageHeader { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): MessageHeader { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseMessageHeader(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.numRequiredSignatures = reader.uint32(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.numReadonlySignedAccounts = reader.uint32(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } message.numReadonlyUnsignedAccounts = reader.uint32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -783,8 +807,8 @@ function createBaseMessageAddressTableLookup(): MessageAddressTableLookup { return { accountKey: new Uint8Array(0), writableIndexes: new Uint8Array(0), readonlyIndexes: new Uint8Array(0) }; } -export const MessageAddressTableLookup = { - encode(message: MessageAddressTableLookup, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const MessageAddressTableLookup: MessageFns = { + encode(message: MessageAddressTableLookup, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.accountKey.length !== 0) { writer.uint32(10).bytes(message.accountKey); } @@ -797,39 +821,42 @@ export const MessageAddressTableLookup = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): MessageAddressTableLookup { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): MessageAddressTableLookup { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseMessageAddressTableLookup(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.accountKey = reader.bytes(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.writableIndexes = reader.bytes(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.readonlyIndexes = reader.bytes(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -886,13 +913,14 @@ function createBaseTransactionStatusMeta(): TransactionStatusMeta { returnData: undefined, returnDataNone: false, computeUnitsConsumed: undefined, + costUnits: undefined, }; } -export const TransactionStatusMeta = { - encode(message: TransactionStatusMeta, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const TransactionStatusMeta: MessageFns = { + encode(message: TransactionStatusMeta, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.err !== undefined) { - TransactionError.encode(message.err, writer.uint32(10).fork()).ldelim(); + TransactionError.encode(message.err, writer.uint32(10).fork()).join(); } if (message.fee !== "0") { writer.uint32(16).uint64(message.fee); @@ -901,14 +929,14 @@ export const TransactionStatusMeta = { for (const v of message.preBalances) { writer.uint64(v); } - writer.ldelim(); + writer.join(); writer.uint32(34).fork(); for (const v of message.postBalances) { writer.uint64(v); } - writer.ldelim(); + writer.join(); for (const v of message.innerInstructions) { - InnerInstructions.encode(v!, writer.uint32(42).fork()).ldelim(); + InnerInstructions.encode(v!, writer.uint32(42).fork()).join(); } if (message.innerInstructionsNone !== false) { writer.uint32(80).bool(message.innerInstructionsNone); @@ -920,13 +948,13 @@ export const TransactionStatusMeta = { writer.uint32(88).bool(message.logMessagesNone); } for (const v of message.preTokenBalances) { - TokenBalance.encode(v!, writer.uint32(58).fork()).ldelim(); + TokenBalance.encode(v!, writer.uint32(58).fork()).join(); } for (const v of message.postTokenBalances) { - TokenBalance.encode(v!, writer.uint32(66).fork()).ldelim(); + TokenBalance.encode(v!, writer.uint32(66).fork()).join(); } for (const v of message.rewards) { - Reward.encode(v!, writer.uint32(74).fork()).ldelim(); + Reward.encode(v!, writer.uint32(74).fork()).join(); } for (const v of message.loadedWritableAddresses) { writer.uint32(98).bytes(v!); @@ -935,7 +963,7 @@ export const TransactionStatusMeta = { writer.uint32(106).bytes(v!); } if (message.returnData !== undefined) { - ReturnData.encode(message.returnData, writer.uint32(114).fork()).ldelim(); + ReturnData.encode(message.returnData, writer.uint32(114).fork()).join(); } if (message.returnDataNone !== false) { writer.uint32(120).bool(message.returnDataNone); @@ -943,33 +971,38 @@ export const TransactionStatusMeta = { if (message.computeUnitsConsumed !== undefined) { writer.uint32(128).uint64(message.computeUnitsConsumed); } + if (message.costUnits !== undefined) { + writer.uint32(136).uint64(message.costUnits); + } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): TransactionStatusMeta { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): TransactionStatusMeta { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTransactionStatusMeta(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.err = TransactionError.decode(reader, reader.uint32()); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.fee = longToString(reader.uint64() as Long); + message.fee = reader.uint64().toString(); continue; - case 3: + } + case 3: { if (tag === 24) { - message.preBalances.push(longToString(reader.uint64() as Long)); + message.preBalances.push(reader.uint64().toString()); continue; } @@ -977,16 +1010,17 @@ export const TransactionStatusMeta = { if (tag === 26) { const end2 = reader.uint32() + reader.pos; while (reader.pos < end2) { - message.preBalances.push(longToString(reader.uint64() as Long)); + message.preBalances.push(reader.uint64().toString()); } continue; } break; - case 4: + } + case 4: { if (tag === 32) { - message.postBalances.push(longToString(reader.uint64() as Long)); + message.postBalances.push(reader.uint64().toString()); continue; } @@ -994,102 +1028,123 @@ export const TransactionStatusMeta = { if (tag === 34) { const end2 = reader.uint32() + reader.pos; while (reader.pos < end2) { - message.postBalances.push(longToString(reader.uint64() as Long)); + message.postBalances.push(reader.uint64().toString()); } continue; } break; - case 5: + } + case 5: { if (tag !== 42) { break; } message.innerInstructions.push(InnerInstructions.decode(reader, reader.uint32())); continue; - case 10: + } + case 10: { if (tag !== 80) { break; } message.innerInstructionsNone = reader.bool(); continue; - case 6: + } + case 6: { if (tag !== 50) { break; } message.logMessages.push(reader.string()); continue; - case 11: + } + case 11: { if (tag !== 88) { break; } message.logMessagesNone = reader.bool(); continue; - case 7: + } + case 7: { if (tag !== 58) { break; } message.preTokenBalances.push(TokenBalance.decode(reader, reader.uint32())); continue; - case 8: + } + case 8: { if (tag !== 66) { break; } message.postTokenBalances.push(TokenBalance.decode(reader, reader.uint32())); continue; - case 9: + } + case 9: { if (tag !== 74) { break; } message.rewards.push(Reward.decode(reader, reader.uint32())); continue; - case 12: + } + case 12: { if (tag !== 98) { break; } message.loadedWritableAddresses.push(reader.bytes()); continue; - case 13: + } + case 13: { if (tag !== 106) { break; } message.loadedReadonlyAddresses.push(reader.bytes()); continue; - case 14: + } + case 14: { if (tag !== 114) { break; } message.returnData = ReturnData.decode(reader, reader.uint32()); continue; - case 15: + } + case 15: { if (tag !== 120) { break; } message.returnDataNone = reader.bool(); continue; - case 16: + } + case 16: { if (tag !== 128) { break; } - message.computeUnitsConsumed = longToString(reader.uint64() as Long); + message.computeUnitsConsumed = reader.uint64().toString(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + + message.costUnits = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1132,6 +1187,7 @@ export const TransactionStatusMeta = { computeUnitsConsumed: isSet(object.computeUnitsConsumed) ? globalThis.String(object.computeUnitsConsumed) : undefined, + costUnits: isSet(object.costUnits) ? globalThis.String(object.costUnits) : undefined, }; }, @@ -1185,6 +1241,9 @@ export const TransactionStatusMeta = { if (message.computeUnitsConsumed !== undefined) { obj.computeUnitsConsumed = message.computeUnitsConsumed; } + if (message.costUnits !== undefined) { + obj.costUnits = message.costUnits; + } return obj; }, @@ -1213,6 +1272,7 @@ export const TransactionStatusMeta = { : undefined; message.returnDataNone = object.returnDataNone ?? false; message.computeUnitsConsumed = object.computeUnitsConsumed ?? undefined; + message.costUnits = object.costUnits ?? undefined; return message; }, }; @@ -1221,33 +1281,34 @@ function createBaseTransactionError(): TransactionError { return { err: new Uint8Array(0) }; } -export const TransactionError = { - encode(message: TransactionError, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const TransactionError: MessageFns = { + encode(message: TransactionError, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.err.length !== 0) { writer.uint32(10).bytes(message.err); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): TransactionError { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): TransactionError { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTransactionError(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.err = reader.bytes(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1278,43 +1339,45 @@ function createBaseInnerInstructions(): InnerInstructions { return { index: 0, instructions: [] }; } -export const InnerInstructions = { - encode(message: InnerInstructions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const InnerInstructions: MessageFns = { + encode(message: InnerInstructions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.index !== 0) { writer.uint32(8).uint32(message.index); } for (const v of message.instructions) { - InnerInstruction.encode(v!, writer.uint32(18).fork()).ldelim(); + InnerInstruction.encode(v!, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): InnerInstructions { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): InnerInstructions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseInnerInstructions(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.index = reader.uint32(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.instructions.push(InnerInstruction.decode(reader, reader.uint32())); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1354,8 +1417,8 @@ function createBaseInnerInstruction(): InnerInstruction { return { programIdIndex: 0, accounts: new Uint8Array(0), data: new Uint8Array(0), stackHeight: undefined }; } -export const InnerInstruction = { - encode(message: InnerInstruction, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const InnerInstruction: MessageFns = { + encode(message: InnerInstruction, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.programIdIndex !== 0) { writer.uint32(8).uint32(message.programIdIndex); } @@ -1371,46 +1434,50 @@ export const InnerInstruction = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): InnerInstruction { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): InnerInstruction { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseInnerInstruction(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.programIdIndex = reader.uint32(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.accounts = reader.bytes(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.data = reader.bytes(); continue; - case 4: + } + case 4: { if (tag !== 32) { break; } message.stackHeight = reader.uint32(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1458,8 +1525,8 @@ function createBaseCompiledInstruction(): CompiledInstruction { return { programIdIndex: 0, accounts: new Uint8Array(0), data: new Uint8Array(0) }; } -export const CompiledInstruction = { - encode(message: CompiledInstruction, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const CompiledInstruction: MessageFns = { + encode(message: CompiledInstruction, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.programIdIndex !== 0) { writer.uint32(8).uint32(message.programIdIndex); } @@ -1472,39 +1539,42 @@ export const CompiledInstruction = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CompiledInstruction { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): CompiledInstruction { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCompiledInstruction(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.programIdIndex = reader.uint32(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.accounts = reader.bytes(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.data = reader.bytes(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1547,8 +1617,8 @@ function createBaseTokenBalance(): TokenBalance { return { accountIndex: 0, mint: "", uiTokenAmount: undefined, owner: "", programId: "" }; } -export const TokenBalance = { - encode(message: TokenBalance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const TokenBalance: MessageFns = { + encode(message: TokenBalance, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.accountIndex !== 0) { writer.uint32(8).uint32(message.accountIndex); } @@ -1556,7 +1626,7 @@ export const TokenBalance = { writer.uint32(18).string(message.mint); } if (message.uiTokenAmount !== undefined) { - UiTokenAmount.encode(message.uiTokenAmount, writer.uint32(26).fork()).ldelim(); + UiTokenAmount.encode(message.uiTokenAmount, writer.uint32(26).fork()).join(); } if (message.owner !== "") { writer.uint32(34).string(message.owner); @@ -1567,53 +1637,58 @@ export const TokenBalance = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): TokenBalance { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): TokenBalance { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTokenBalance(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } message.accountIndex = reader.uint32(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.mint = reader.string(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.uiTokenAmount = UiTokenAmount.decode(reader, reader.uint32()); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.owner = reader.string(); continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.programId = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1668,8 +1743,8 @@ function createBaseUiTokenAmount(): UiTokenAmount { return { uiAmount: 0, decimals: 0, amount: "", uiAmountString: "" }; } -export const UiTokenAmount = { - encode(message: UiTokenAmount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const UiTokenAmount: MessageFns = { + encode(message: UiTokenAmount, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.uiAmount !== 0) { writer.uint32(9).double(message.uiAmount); } @@ -1685,46 +1760,50 @@ export const UiTokenAmount = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): UiTokenAmount { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): UiTokenAmount { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseUiTokenAmount(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 9) { break; } message.uiAmount = reader.double(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } message.decimals = reader.uint32(); continue; - case 3: + } + case 3: { if (tag !== 26) { break; } message.amount = reader.string(); continue; - case 4: + } + case 4: { if (tag !== 34) { break; } message.uiAmountString = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1772,8 +1851,8 @@ function createBaseReturnData(): ReturnData { return { programId: new Uint8Array(0), data: new Uint8Array(0) }; } -export const ReturnData = { - encode(message: ReturnData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const ReturnData: MessageFns = { + encode(message: ReturnData, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.programId.length !== 0) { writer.uint32(10).bytes(message.programId); } @@ -1783,32 +1862,34 @@ export const ReturnData = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): ReturnData { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): ReturnData { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseReturnData(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.programId = reader.bytes(); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.data = reader.bytes(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1846,8 +1927,8 @@ function createBaseReward(): Reward { return { pubkey: "", lamports: "0", postBalance: "0", rewardType: 0, commission: "" }; } -export const Reward = { - encode(message: Reward, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const Reward: MessageFns = { + encode(message: Reward, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.pubkey !== "") { writer.uint32(10).string(message.pubkey); } @@ -1866,53 +1947,58 @@ export const Reward = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): Reward { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): Reward { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseReward(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.pubkey = reader.string(); continue; - case 2: + } + case 2: { if (tag !== 16) { break; } - message.lamports = longToString(reader.int64() as Long); + message.lamports = reader.int64().toString(); continue; - case 3: + } + case 3: { if (tag !== 24) { break; } - message.postBalance = longToString(reader.uint64() as Long); + message.postBalance = reader.uint64().toString(); continue; - case 4: + } + case 4: { if (tag !== 32) { break; } message.rewardType = reader.int32() as any; continue; - case 5: + } + case 5: { if (tag !== 42) { break; } message.commission = reader.string(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -1965,43 +2051,45 @@ function createBaseRewards(): Rewards { return { rewards: [], numPartitions: undefined }; } -export const Rewards = { - encode(message: Rewards, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const Rewards: MessageFns = { + encode(message: Rewards, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { for (const v of message.rewards) { - Reward.encode(v!, writer.uint32(10).fork()).ldelim(); + Reward.encode(v!, writer.uint32(10).fork()).join(); } if (message.numPartitions !== undefined) { - NumPartitions.encode(message.numPartitions, writer.uint32(18).fork()).ldelim(); + NumPartitions.encode(message.numPartitions, writer.uint32(18).fork()).join(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): Rewards { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): Rewards { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseRewards(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 10) { break; } message.rewards.push(Reward.decode(reader, reader.uint32())); continue; - case 2: + } + case 2: { if (tag !== 18) { break; } message.numPartitions = NumPartitions.decode(reader, reader.uint32()); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2041,33 +2129,34 @@ function createBaseUnixTimestamp(): UnixTimestamp { return { timestamp: "0" }; } -export const UnixTimestamp = { - encode(message: UnixTimestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const UnixTimestamp: MessageFns = { + encode(message: UnixTimestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.timestamp !== "0") { writer.uint32(8).int64(message.timestamp); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): UnixTimestamp { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): UnixTimestamp { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseUnixTimestamp(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.timestamp = longToString(reader.int64() as Long); + message.timestamp = reader.int64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2098,33 +2187,34 @@ function createBaseBlockHeight(): BlockHeight { return { blockHeight: "0" }; } -export const BlockHeight = { - encode(message: BlockHeight, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const BlockHeight: MessageFns = { + encode(message: BlockHeight, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.blockHeight !== "0") { writer.uint32(8).uint64(message.blockHeight); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): BlockHeight { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): BlockHeight { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseBlockHeight(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.blockHeight = longToString(reader.uint64() as Long); + message.blockHeight = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2155,33 +2245,34 @@ function createBaseNumPartitions(): NumPartitions { return { numPartitions: "0" }; } -export const NumPartitions = { - encode(message: NumPartitions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { +export const NumPartitions: MessageFns = { + encode(message: NumPartitions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.numPartitions !== "0") { writer.uint32(8).uint64(message.numPartitions); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): NumPartitions { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; + decode(input: BinaryReader | Uint8Array, length?: number): NumPartitions { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; const message = createBaseNumPartitions(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: + case 1: { if (tag !== 8) { break; } - message.numPartitions = longToString(reader.uint64() as Long); + message.numPartitions = reader.uint64().toString(); continue; + } } if ((tag & 7) === 4 || tag === 0) { break; } - reader.skipType(tag & 7); + reader.skip(tag & 7); } return message; }, @@ -2245,15 +2336,15 @@ type KeysOfUnion = T extends T ? keyof T : never; export type Exact = P extends Builtin ? P : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; -function longToString(long: Long) { - return long.toString(); -} - -if (_m0.util.Long !== Long) { - _m0.util.Long = Long as any; - _m0.configure(); -} - function isSet(value: any): boolean { return value !== null && value !== undefined; } + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create, I>>(base?: I): T; + fromPartial, I>>(object: I): T; +} diff --git a/typescript-sdk/src/index.ts b/typescript-sdk/src/index.ts index 74fc0b3..1511974 100644 --- a/typescript-sdk/src/index.ts +++ b/typescript-sdk/src/index.ts @@ -1,300 +1,282 @@ -/** - * @fileoverview Fumarole TypeScript SDK for streaming Solana account and transaction data - * - * Fumarole provides: - * - High availability through multi-node data collection - * - Persistent storage of historical state - * - Horizontal scalability via consumer groups - * - * @see https://github.com/rpcpool/yellowstone-fumarole - */ -import { - ChannelCredentials, - credentials, - ChannelOptions, - Metadata, -} from "@grpc/grpc-js"; +import { ServiceError } from "@grpc/grpc-js"; +import { FumaroleConfig } from "./config/config"; +import { FumaroleClient as GrpcClient } from "./grpc/fumarole"; +import { FumaroleGrpcConnector } from "./connectivity"; import { + VersionRequest, + VersionResponse, + ControlResponse, + JoinControlPlane, + ControlCommand, + ListConsumerGroupsRequest, + ListConsumerGroupsResponse, + GetConsumerGroupInfoRequest, ConsumerGroupInfo, - CreateStaticConsumerGroupRequest, - CreateStaticConsumerGroupResponse, DeleteConsumerGroupRequest, DeleteConsumerGroupResponse, - FumaroleClient, - GetConsumerGroupInfoRequest, - GetOldestSlotRequest, - GetOldestSlotResponse, - GetSlotLagInfoRequest, - GetSlotLagInfoResponse, - ListAvailableCommitmentLevelsRequest, - ListAvailableCommitmentLevelsResponse, - ListConsumerGroupsResponse, - SubscribeRequest, + CreateConsumerGroupRequest, + CreateConsumerGroupResponse, } from "./grpc/fumarole"; +import { SubscribeRequest, SubscribeUpdate } from "./grpc/geyser"; +import { + AsyncQueue, + DragonsmouthAdapterSession, + FumaroleSubscribeConfig, + DEFAULT_DRAGONSMOUTH_CAPACITY, + DEFAULT_COMMIT_INTERVAL, + DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, + DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, + DEFAULT_GC_INTERVAL, + DEFAULT_SLOT_MEMORY_RETENTION, +} from "./types"; -export type FumaroleSubscribeRequest = SubscribeRequest; +export class FumaroleClient { + private static readonly logger = console; + private readonly connector: FumaroleGrpcConnector; + private readonly stub: GrpcClient; -/** - * Configuration options for Fumarole subscription - * @example - * ```typescript - * const stream = await client.subscribe({ compression: "gzip" }); - * ``` - */ -export type SubscribeConfig = { - /** Enable gzip compression for reduced bandwidth usage */ - compression?: "gzip"; -}; + constructor(connector: FumaroleGrpcConnector, stub: GrpcClient) { + this.connector = connector; + this.stub = stub; + } + + static async connect(config: FumaroleConfig): Promise { + const endpoint = config.endpoint; + const connector = new FumaroleGrpcConnector(config, endpoint); + FumaroleClient.logger.debug(`Connecting to ${endpoint}`); + const client = await connector.connect(); + FumaroleClient.logger.debug(`Connected to ${endpoint}`); + return new FumaroleClient(connector, client); + } + + async version(): Promise { + const request = {} as VersionRequest; + return new Promise((resolve, reject) => { + this.stub.version( + request, + (error: ServiceError | null, response: VersionResponse) => { + if (error) { + reject(error); + } else { + resolve(response); + } + } + ); + }); + } + + async dragonsmouthSubscribe( + consumerGroupName: string, + request: SubscribeRequest + ): Promise { + return this.dragonsmouthSubscribeWithConfig(consumerGroupName, request, {}); + } + + async dragonsmouthSubscribeWithConfig( + consumerGroupName: string, + request: SubscribeRequest, + config: FumaroleSubscribeConfig + ): Promise { + const finalConfig = { + concurrentDownloadLimit: DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, + commitInterval: DEFAULT_COMMIT_INTERVAL, + maxFailedSlotDownloadAttempt: DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, + dataChannelCapacity: DEFAULT_DRAGONSMOUTH_CAPACITY, + gcInterval: DEFAULT_GC_INTERVAL, + slotMemoryRetention: DEFAULT_SLOT_MEMORY_RETENTION, + ...config, + }; -/** - * Main client for interacting with the Fumarole service - */ -export default class Client { - _client: FumaroleClient; - _insecureXToken: string | undefined; + const dragonsmouthOutlet = new AsyncQueue( + finalConfig.dataChannelCapacity + ); + const fumeControlPlaneQ = new AsyncQueue(100); + + const initialJoin = { consumerGroupName } as JoinControlPlane; + const initialJoinCommand = { initialJoin } as ControlCommand; + await fumeControlPlaneQ.put(initialJoinCommand); + + FumaroleClient.logger.debug( + `Sent initial join command: ${JSON.stringify(initialJoinCommand)}` + ); - /** - * Creates a new Fumarole client instance - * - * @param endpoint - The Fumarole service endpoint URL - * @param xToken - Authentication token provided by Triton - * @param channelOptions - Additional gRPC channel options - */ - constructor( - endpoint: string, - xToken: string | undefined, - channelOptions: ChannelOptions | undefined - ) { - let creds: ChannelCredentials; + const controlPlaneStream = this.stub.subscribe(); + const subscribeRequestQueue = new AsyncQueue(100); + const fumeControlPlaneRxQ = new AsyncQueue(100); - const endpointURL = new URL(endpoint); - let port = endpointURL.port; - if (!port) { - switch (endpointURL.protocol) { - case "https:": - port = "443"; - break; - case "http:": - port = "80"; - break; + // Start the control plane source task + const controlPlaneSourceTask = (async () => { + try { + for await (const update of controlPlaneStream) { + await fumeControlPlaneRxQ.put(update); + } + } catch (error) { + if (error.code !== "CANCELLED") { + throw error; + } } - } + })(); - // Check if we need to use TLS. - if (endpointURL.protocol.startsWith("https:")) { - creds = credentials.combineChannelCredentials( - credentials.createSsl(), - credentials.createFromMetadataGenerator((_params, callback) => { - const metadata = new Metadata(); - if (xToken !== undefined) { - metadata.add("x-token", xToken); - } - return callback(null, metadata); - }) + // Read the initial response + const controlResponse = + (await fumeControlPlaneRxQ.get()) as ControlResponse; + const init = controlResponse.init; + if (!init) { + throw new Error( + `Unexpected initial response: ${JSON.stringify(controlResponse)}` ); - } else { - creds = ChannelCredentials.createInsecure(); - if (xToken !== undefined) { - this._insecureXToken = xToken; - } } - this._client = new FumaroleClient( - `${endpointURL.hostname}:${port}`, - creds, - channelOptions + FumaroleClient.logger.debug( + `Control response: ${JSON.stringify(controlResponse)}` ); - } - private _getInsecureMetadata(): Metadata { - const metadata = new Metadata(); - if (this._insecureXToken) { - metadata.add("x-token", this._insecureXToken); + const lastCommittedOffsetStr = init.lastCommittedOffsets?.[0]; + if (!lastCommittedOffsetStr) { + throw new Error("No last committed offset"); } - return metadata; + const lastCommittedOffset = BigInt(lastCommittedOffsetStr); + + // Create the runtime + const dataPlaneClient = await this.connector.connect(); + + // Start the runtime task + const runtimeTask = this.startRuntime( + subscribeRequestQueue, + fumeControlPlaneQ, + fumeControlPlaneRxQ, + dragonsmouthOutlet, + request, + consumerGroupName, + lastCommittedOffset, + finalConfig, + dataPlaneClient + ); + + FumaroleClient.logger.debug(`Fumarole handle created: ${runtimeTask}`); + + return { + sink: subscribeRequestQueue, + source: dragonsmouthOutlet, + fumaroleHandle: runtimeTask, + }; } - /** - * Creates a new static consumer group for horizontal scaling - * - * @example - * ```typescript - * const group = await client.createStaticConsumerGroup({ - * commitmentLevel: CommitmentLevel.CONFIRMED, - * consumerGroupLabel: "my-group", - * eventSubscriptionPolicy: EventSubscriptionPolicy.BOTH, - * initialOffsetPolicy: InitialOffsetPolicy.LATEST, - * }); - * ``` - */ - async createStaticConsumerGroup( - request: CreateStaticConsumerGroupRequest - ): Promise { - return await new Promise((resolve, reject) => { - this._client.createStaticConsumerGroup(request, (error, response) => { - if (error === null || error === undefined) { - resolve(response); - } else { - reject(error); - } - }); - }); + private async startRuntime( + subscribeRequestQueue: AsyncQueue, + controlPlaneTxQ: AsyncQueue, + controlPlaneRxQ: AsyncQueue, + dragonsmouthOutlet: AsyncQueue, + request: SubscribeRequest, + consumerGroupName: string, + lastCommittedOffset: bigint, + config: Required, + dataPlaneClient: GrpcClient + ): Promise { + // Implementation of runtime task here + // This would be equivalent to AsyncioFumeDragonsmouthRuntime in Python + // For brevity, this is a placeholder implementation + return Promise.resolve(); } - /** - * Lists all available consumer groups - * - * @param request - List request parameters - * @returns Promise resolving to list of consumer groups - */ - async listConsumerGroups( - request: ListAvailableCommitmentLevelsRequest - ): Promise { - return await new Promise((resolve, reject) => { - this._client.listConsumerGroups(request, (error, response) => { - if (error === null || error === undefined) { - resolve(response); - } else { - reject(error); + async listConsumerGroups(): Promise { + const request = {} as ListConsumerGroupsRequest; + return new Promise((resolve, reject) => { + this.stub.listConsumerGroups( + request, + (error: ServiceError | null, response: ListConsumerGroupsResponse) => { + if (error) { + reject(error); + } else { + resolve(response); + } } - }); + ); }); } - /** - * Gets detailed information about a specific consumer group - * - * @param request - Consumer group info request - * @returns Promise resolving to consumer group details - */ async getConsumerGroupInfo( - request: GetConsumerGroupInfoRequest - ): Promise { - return await new Promise((resolve, reject) => { - this._client.getConsumerGroupInfo(request, (error, response) => { - if (error === null || error === undefined) { - resolve(response); - } else { - reject(error); + consumerGroupName: string + ): Promise { + const request = { consumerGroupName } as GetConsumerGroupInfoRequest; + return new Promise((resolve, reject) => { + this.stub.getConsumerGroupInfo( + request, + (error: ServiceError | null, response: ConsumerGroupInfo) => { + if (error) { + if (error.code === 14) { + // grpc.status.NOT_FOUND + resolve(null); + } else { + reject(error); + } + } else { + resolve(response); + } } - }); + ); }); } - /** - * Deletes an existing consumer group - * - * @param request - Delete request parameters - * @returns Promise resolving when deletion is complete - */ async deleteConsumerGroup( - request: DeleteConsumerGroupRequest + consumerGroupName: string ): Promise { - return await new Promise((resolve, reject) => { - this._client.deleteConsumerGroup(request, (error, response) => { - if (error === null || error === undefined) { - resolve(response); - } else { - reject(error); + const request = { consumerGroupName } as DeleteConsumerGroupRequest; + return new Promise((resolve, reject) => { + this.stub.deleteConsumerGroup( + request, + (error: ServiceError | null, response: DeleteConsumerGroupResponse) => { + if (error) { + reject(error); + } else { + resolve(response); + } } - }); + ); }); } - /** - * Gets information about slot lag for a subscription - * - * @param request - Slot lag info request - * @returns Promise resolving to slot lag details - */ - async getSlotLagInfo( - request: GetSlotLagInfoRequest - ): Promise { - return await new Promise((resolve, reject) => { - this._client.getSlotLagInfo(request, (error, response) => { - if (error === null || error === undefined) { - resolve(response); - } else { - reject(error); - } - }); - }); - } + async deleteAllConsumerGroups(): Promise { + const response = await this.listConsumerGroups(); + const deletePromises = response.consumerGroups.map((group) => + this.deleteConsumerGroup(group.consumerGroupName) + ); - /** - * Gets the oldest available slot in the persistence store - * - * @param request - Oldest slot request parameters - * @returns Promise resolving to oldest slot information - */ - async getOldestSlot( - request: GetOldestSlotRequest - ): Promise { - return await new Promise((resolve, reject) => { - this._client.getOldestSlot(request, (error, response) => { - if (error === null || error === undefined) { - resolve(response); - } else { - reject(error); - } - }); - }); - } + const results = await Promise.all(deletePromises); - /** - * Lists available commitment levels for subscriptions - * - * @param request - List commitment levels request - * @returns Promise resolving to available commitment levels - */ - async listAvailableCommitmentLevels( - request: ListAvailableCommitmentLevelsRequest - ): Promise { - return await new Promise((resolve, reject) => { - this._client.listAvailableCommitmentLevels(request, (error, response) => { - if (error === null || error === undefined) { - resolve(response); - } else { - reject(error); - } - }); - }); + // Check for any failures + const failures = results.filter((result) => !result.success); + if (failures.length > 0) { + throw new Error( + `Failed to delete some consumer groups: ${JSON.stringify(failures)}` + ); + } } - /** - * Subscribes to account and transaction updates - * - * @example - * ```typescript - * const stream = await client.subscribe({ compression: "gzip" }); - * - * stream.on('data', (data) => console.log(data)); - * stream.write({ - * accounts: { - * tokenKeg: { - * account: ["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"], - * filters: [], - * owner: ["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"], - * } - * }, - * consumerGroupLabel: "my-group" - * }); - * ``` - */ - async subscribe(config?: SubscribeConfig) { - const options: any = {}; - if (config) { - if (config.compression) { - switch (config.compression) { - case "gzip": - options["grpc.default_compression_algorithm"] = 2; // set compression to: gzip - break; - default: - options["grpc.default_compression_algorithm"] = 0; // set compression to: none - break; + async createConsumerGroup( + request: CreateConsumerGroupRequest + ): Promise { + return new Promise((resolve, reject) => { + this.stub.createConsumerGroup( + request, + (error: ServiceError | null, response: CreateConsumerGroupResponse) => { + if (error) { + reject(error); + } else { + resolve(response); + } } - } - } - return await this._client.subscribe(this._getInsecureMetadata(), options); + ); + }); } } + +export { + FumaroleConfig, + FumaroleSubscribeConfig, + DragonsmouthAdapterSession, + DEFAULT_DRAGONSMOUTH_CAPACITY, + DEFAULT_COMMIT_INTERVAL, + DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, + DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, +}; diff --git a/typescript-sdk/src/runtime/aio.ts b/typescript-sdk/src/runtime/aio.ts new file mode 100644 index 0000000..1b28c6c --- /dev/null +++ b/typescript-sdk/src/runtime/aio.ts @@ -0,0 +1,587 @@ +import { ChannelCredentials, ServiceError, status } from "@grpc/grpc-js"; +import { Queue as AsyncQueue } from "./queue"; +import { Interval } from "../utils/aio"; +import { + FumaroleSM, + FumeDownloadRequest, + FumeOffset, + FumeShardIdx, +} from "./state-machine"; +import { + SubscribeRequest, + SubscribeUpdate, + SubscribeUpdateSlot, + CommitmentLevel as ProtoCommitmentLevel, +} from "../grpc/geyser"; +import { + ControlCommand, + PollBlockchainHistory, + CommitOffset, + ControlResponse, + DownloadBlockShard, + BlockFilters, + FumaroleClient, +} from "../grpc/fumarole"; + +// Constants +export const DEFAULT_GC_INTERVAL = 5; +export const DEFAULT_SLOT_MEMORY_RETENTION = 10000; + +// Types and Interfaces +export interface CompletedDownloadBlockTask { + slot: number; + blockUid: Uint8Array; + shardIdx: FumeShardIdx; + totalEventDownloaded: number; +} + +export interface DownloadBlockError { + kind: + | "Disconnected" + | "OutletDisconnected" + | "BlockShardNotFound" + | "FailedDownload" + | "Fatal"; + message: string; +} + +export interface DownloadTaskResult { + kind: "Ok" | "Err"; + completed?: CompletedDownloadBlockTask; + slot?: number; + err?: DownloadBlockError; +} + +export interface AsyncSlotDownloader { + runDownload( + subscribeRequest: SubscribeRequest, + spec: DownloadTaskArgs + ): Promise; +} + +const LOGGER = console; + +export class AsyncioFumeDragonsmouthRuntime { + private readonly sm: FumaroleSM; + private readonly slotDownloader: AsyncSlotDownloader; + private subscribeRequestUpdateQ: AsyncQueue; + private subscribeRequest: SubscribeRequest; + private readonly consumerGroupName: string; + private readonly controlPlaneTx: AsyncQueue; + private readonly controlPlaneRx: AsyncQueue; + private readonly dragonsmouthOutlet: AsyncQueue; + private readonly commitInterval: number; + private readonly gcInterval: number; + private readonly maxConcurrentDownload: number; + private readonly downloadTasks: Map< + Promise, + FumeDownloadRequest + >; + private lastCommit: number; + + constructor( + sm: FumaroleSM, + slotDownloader: AsyncSlotDownloader, + subscribeRequestUpdateQ: AsyncQueue, + subscribeRequest: SubscribeRequest, + consumerGroupName: string, + controlPlaneTxQ: AsyncQueue, + controlPlaneRxQ: AsyncQueue, + dragonsmouthOutlet: AsyncQueue, + commitInterval: number, + gcInterval: number, + maxConcurrentDownload: number = 10 + ) { + this.sm = sm; + this.slotDownloader = slotDownloader; + this.subscribeRequestUpdateQ = subscribeRequestUpdateQ; + this.subscribeRequest = subscribeRequest; + this.consumerGroupName = consumerGroupName; + this.controlPlaneTx = controlPlaneTxQ; + this.controlPlaneRx = controlPlaneRxQ; + this.dragonsmouthOutlet = dragonsmouthOutlet; + this.commitInterval = commitInterval; + this.gcInterval = gcInterval; + this.maxConcurrentDownload = maxConcurrentDownload; + this.downloadTasks = new Map(); + this.lastCommit = Date.now(); + } + + private buildPollHistoryCmd(fromOffset?: FumeOffset): ControlCommand { + return { pollHist: { shardId: 0 } } as ControlCommand; + } + + private buildCommitOffsetCmd(offset: FumeOffset): ControlCommand { + return { commitOffset: { offset, shardId: 0 } } as ControlCommand; + } + + private handleControlResponse(controlResponse: ControlResponse): void { + // Get first defined property from controlResponse + const responseField = Object.keys(controlResponse).find( + (key) => controlResponse[key] !== undefined && key !== "response" + ); + + if (!responseField) { + throw new Error("Control response is empty"); + } + + switch (responseField) { + case "pollHist": { + const pollHist = controlResponse.pollHist!; + LOGGER.debug(`Received poll history ${pollHist.events?.length} events`); + this.sm.queueBlockchainEvent(pollHist.events); + break; + } + case "commitOffset": { + const commitOffset = controlResponse.commitOffset!; + LOGGER.debug(`Received commit offset: ${commitOffset}`); + this.sm.updateCommittedOffset(commitOffset.offset); + break; + } + case "pong": + LOGGER.debug("Received pong"); + break; + default: + throw new Error("Unexpected control response"); + } + } + + private async pollHistoryIfNeeded(): Promise { + if (this.sm.needNewBlockchainEvents()) { + const cmd = this.buildPollHistoryCmd(this.sm.committableOffset); + await this.controlPlaneTx.put(cmd); + } + } + + private commitmentLevel(): number { + return this.subscribeRequest.commitment || 0; + } + + private scheduleDownloadTaskIfAny(): void { + while (true) { + LOGGER.debug("Checking for download tasks to schedule"); + if (this.downloadTasks.size >= this.maxConcurrentDownload) { + break; + } + + LOGGER.debug("Popping slot to download"); + const downloadRequest = this.sm.popSlotToDownload(this.commitmentLevel()); + if (!downloadRequest) { + LOGGER.debug("No download request available"); + break; + } + + LOGGER.debug(`Download request for slot ${downloadRequest.slot} popped`); + if (!downloadRequest.blockchainId) { + throw new Error("Download request must have a blockchain ID"); + } + + const downloadTaskArgs: DownloadTaskArgs = { + downloadRequest, + dragonsmouthOutlet: this.dragonsmouthOutlet, + }; + + const downloadPromise = this.slotDownloader.runDownload( + this.subscribeRequest, + downloadTaskArgs + ); + this.downloadTasks.set(downloadPromise, downloadRequest); + LOGGER.debug(`Scheduling download task for slot ${downloadRequest.slot}`); + } + } + + private handleDownloadResult(downloadResult: DownloadTaskResult): void { + if (downloadResult.kind === "Ok") { + const completed = downloadResult.completed!; + LOGGER.debug( + `Download completed for slot ${completed.slot}, shard ${completed.shardIdx}, ${completed.totalEventDownloaded} total events` + ); + this.sm.makeSlotDownloadProgress(completed.slot, completed.shardIdx); + } else { + const slot = downloadResult.slot!; + const err = downloadResult.err!; + throw new Error(`Failed to download slot ${slot}: ${err.message}`); + } + } + + private async forceCommitOffset(): Promise { + LOGGER.debug(`Force committing offset ${this.sm.committableOffset}`); + await this.controlPlaneTx.put( + this.buildCommitOffsetCmd(this.sm.committableOffset) + ); + } + + private async commitOffset(): Promise { + if (this.sm.lastCommittedOffset < this.sm.committableOffset) { + LOGGER.debug(`Committing offset ${this.sm.committableOffset}`); + await this.forceCommitOffset(); + } + this.lastCommit = Date.now(); + } + + private async drainSlotStatus(): Promise { + const commitment = this.subscribeRequest.commitment || 0; + const slotStatusVec: any[] = []; + + while (true) { + const slotStatus = this.sm.popNextSlotStatus(); + if (!slotStatus) break; + slotStatusVec.push(slotStatus); + } + + if (!slotStatusVec.length) return; + + LOGGER.debug(`Draining ${slotStatusVec.length} slot status`); + + for (const slotStatus of slotStatusVec) { + const matchedFilters: string[] = []; + for (const [filterName, filter] of Object.entries( + this.subscribeRequest.slots || {} + )) { + if ( + filter.filterByCommitment && + slotStatus.commitmentLevel === commitment + ) { + matchedFilters.push(filterName); + } else if (!filter.filterByCommitment) { + matchedFilters.push(filterName); + } + } + + if (matchedFilters.length) { + const update: SubscribeUpdate = { + filters: matchedFilters, + createdAt: undefined, + slot: { + slot: slotStatus.slot, + parent: slotStatus.parentSlot, + status: slotStatus.commitmentLevel, + deadError: slotStatus.deadError, + } as SubscribeUpdateSlot, + }; + + try { + await this.dragonsmouthOutlet.put(update); + } catch (error) { + if (error.message === "Queue full") return; + throw error; + } + } + + this.sm.markEventAsProcessed(slotStatus.sessionSequence); + } + } + + private async handleControlPlaneResp( + result: ControlResponse | Error + ): Promise { + if (result instanceof Error) { + // Create a slot update with the error information + const errorUpdate: SubscribeUpdate = { + filters: [], + createdAt: undefined, + slot: { + slot: "0", + parent: "0", + status: 0, // Using 0 as default status for error case + deadError: result.message, + }, + }; + await this.dragonsmouthOutlet.put(errorUpdate); + LOGGER.error(`Control plane error: ${result.message}`); + return false; + } + this.handleControlResponse(result); + return true; + } + + public handleNewSubscribeRequest(subscribeRequest: SubscribeRequest): void { + this.subscribeRequest = subscribeRequest; + } + + public async run(): Promise { + LOGGER.debug("Fumarole runtime starting..."); + await this.controlPlaneTx.put(this.buildPollHistoryCmd()); + LOGGER.debug("Initial poll history command sent"); + await this.forceCommitOffset(); + LOGGER.debug("Initial commit offset command sent"); + let ticks = 0; + + const taskMap = new Map, string>(); + + // Initial tasks + taskMap.set(this.subscribeRequestUpdateQ.get(), "dragonsmouth_bidi"); + taskMap.set(this.controlPlaneRx.get(), "control_plane_rx"); + taskMap.set(new Interval(this.commitInterval).tick(), "commit_tick"); + + while (taskMap.size > 0) { + ticks++; + LOGGER.debug("Runtime loop tick"); + + if (ticks % this.gcInterval === 0) { + LOGGER.debug("Running garbage collection"); + this.sm.gc(); + ticks = 0; + } + + LOGGER.debug("Polling history if needed"); + await this.pollHistoryIfNeeded(); + + LOGGER.debug("Scheduling download tasks if any"); + this.scheduleDownloadTaskIfAny(); + + for (const [task] of this.downloadTasks) { + taskMap.set(task, "download_task"); + } + + const downloadTaskInFlight = this.downloadTasks.size; + LOGGER.debug( + `Current download tasks in flight: ${downloadTaskInFlight} / ${this.maxConcurrentDownload}` + ); + + const promises = Array.from(taskMap.keys()); + const done = await Promise.race( + promises.map((p) => p.then((result) => ({ promise: p, result }))) + ); + + const taskName = taskMap.get(done.promise); + taskMap.delete(done.promise); + + switch (taskName) { + case "dragonsmouth_bidi": { + LOGGER.debug("Dragonsmouth subscribe request received"); + const result = done.result as SubscribeRequest; + this.handleNewSubscribeRequest(result); + const newTask = this.subscribeRequestUpdateQ.get(); + taskMap.set(newTask, "dragonsmouth_bidi"); + break; + } + case "control_plane_rx": { + LOGGER.debug("Control plane response received"); + if (!(await this.handleControlPlaneResp(done.result))) { + LOGGER.debug("Control plane error"); + return; + } + const newTask = this.controlPlaneRx.get(); + taskMap.set(newTask, "control_plane_rx"); + break; + } + case "download_task": { + LOGGER.debug("Download task result received"); + this.downloadTasks.delete(done.promise); + this.handleDownloadResult(done.result); + break; + } + case "commit_tick": { + LOGGER.debug("Commit tick reached"); + await this.commitOffset(); + const newTask = new Interval(this.commitInterval).tick(); + taskMap.set(newTask, "commit_tick"); + break; + } + default: + throw new Error(`Unexpected task name: ${taskName}`); + } + + await this.drainSlotStatus(); + } + + LOGGER.debug("Fumarole runtime exiting"); + } +} + +export interface DownloadTaskRunnerChannels { + downloadTaskQueueTx: AsyncQueue; + cncTx: AsyncQueue; + downloadResultRx: AsyncQueue; +} + +export interface DownloadTaskRunnerCommand { + kind: string; + subscribeRequest?: SubscribeRequest; +} + +export interface DownloadTaskArgs { + downloadRequest: FumeDownloadRequest; + dragonsmouthOutlet: AsyncQueue; +} + +export class GrpcSlotDownloader implements AsyncSlotDownloader { + private client: FumaroleClient; + + constructor(client: FumaroleClient) { + this.client = client; + } + + public async runDownload( + subscribeRequest: SubscribeRequest, + spec: DownloadTaskArgs + ): Promise { + const downloadTask = new GrpcDownloadBlockTaskRun( + spec.downloadRequest, + this.client, + { + accounts: subscribeRequest.accounts, + transactions: subscribeRequest.transactions, + entries: subscribeRequest.entry, + blocksMeta: subscribeRequest.blocksMeta, + } as BlockFilters, + spec.dragonsmouthOutlet + ); + + LOGGER.debug(`Running download task for slot ${spec.downloadRequest.slot}`); + return await downloadTask.run(); + } +} + +export class GrpcDownloadBlockTaskRun { + private downloadRequest: FumeDownloadRequest; + private client: FumaroleClient; + private filters: BlockFilters; + private dragonsmouthOutlet: AsyncQueue; + + constructor( + downloadRequest: FumeDownloadRequest, + client: FumaroleClient, + filters: BlockFilters, + dragonsmouthOutlet: AsyncQueue + ) { + this.downloadRequest = downloadRequest; + this.client = client; + this.filters = filters; + this.dragonsmouthOutlet = dragonsmouthOutlet; + } + + private mapTonicErrorCodeToDownloadBlockError( + error: ServiceError + ): DownloadBlockError { + switch (error.code) { + case status.NOT_FOUND: + return { + kind: "BlockShardNotFound", + message: "Block shard not found", + }; + case status.UNAVAILABLE: + return { + kind: "Disconnected", + message: "Disconnected", + }; + case status.INTERNAL: + case status.ABORTED: + case status.DATA_LOSS: + case status.RESOURCE_EXHAUSTED: + case status.UNKNOWN: + case status.CANCELLED: + case status.DEADLINE_EXCEEDED: + return { + kind: "FailedDownload", + message: "Failed download", + }; + case status.INVALID_ARGUMENT: + throw new Error("Invalid argument"); + default: + return { + kind: "Fatal", + message: `Unknown error: ${error.code}`, + }; + } + } + + public async run(): Promise { + const request = { + blockchainId: this.downloadRequest.blockchainId, + blockUid: this.downloadRequest.blockUid, + shardIdx: 0, + blockFilters: this.filters, + } as DownloadBlockShard; + + try { + LOGGER.debug( + `Requesting download for block ${Buffer.from( + this.downloadRequest.blockUid + ).toString("hex")} at slot ${this.downloadRequest.slot}` + ); + + let totalEventDownloaded = 0; + const stream = this.client.downloadBlock(request); + + return new Promise((resolve, reject) => { + stream.on("data", async (data: any) => { + const kind = Object.keys(data).find( + (k) => data[k] !== undefined && k !== "response" + ); + if (!kind) return; + + switch (kind) { + case "update": { + const update = data.update; + if (!update) throw new Error("Update is null"); + totalEventDownloaded++; + try { + await this.dragonsmouthOutlet.put(update); + } catch (error) { + if (error.message === "Queue shutdown") { + LOGGER.error("Dragonsmouth outlet is disconnected"); + resolve({ + kind: "Err", + slot: this.downloadRequest.slot, + err: { + kind: "OutletDisconnected", + message: "Outlet disconnected", + }, + }); + } + } + break; + } + case "blockShardDownloadFinish": + LOGGER.debug( + `Download finished for block ${Buffer.from( + this.downloadRequest.blockUid + ).toString("hex")} at slot ${this.downloadRequest.slot}` + ); + resolve({ + kind: "Ok", + completed: { + slot: this.downloadRequest.slot, + blockUid: this.downloadRequest.blockUid, + shardIdx: 0, + totalEventDownloaded, + }, + }); + break; + default: + reject(new Error(`Unexpected response kind: ${kind}`)); + } + }); + + stream.on("error", (error: ServiceError) => { + LOGGER.error(`Download block error: ${error}`); + resolve({ + kind: "Err", + slot: this.downloadRequest.slot, + err: this.mapTonicErrorCodeToDownloadBlockError(error), + }); + }); + + stream.on("end", () => { + resolve({ + kind: "Err", + slot: this.downloadRequest.slot, + err: { + kind: "FailedDownload", + message: "Failed download", + }, + }); + }); + }); + } catch (error) { + LOGGER.error(`Download block error: ${error}`); + return { + kind: "Err", + slot: this.downloadRequest.slot, + err: this.mapTonicErrorCodeToDownloadBlockError(error as ServiceError), + }; + } + } +} diff --git a/typescript-sdk/src/runtime/queue.ts b/typescript-sdk/src/runtime/queue.ts new file mode 100644 index 0000000..48fc2f9 --- /dev/null +++ b/typescript-sdk/src/runtime/queue.ts @@ -0,0 +1,70 @@ +export class Queue { + private items: T[] = []; + private maxSize: number; + private closed: boolean = false; + + constructor(maxSize: number = Infinity) { + this.maxSize = maxSize; + } + + async put(item: T): Promise { + if (this.closed) { + throw new Error("Queue shutdown"); + } + + if (this.items.length >= this.maxSize) { + throw new Error("Queue full"); + } + + this.items.push(item); + } + + async get(): Promise { + if (this.closed && this.items.length === 0) { + throw new Error("Queue shutdown"); + } + + // Wait for an item to be available + while (this.items.length === 0) { + await new Promise((resolve) => setTimeout(resolve, 10)); + } + + return this.items.shift()!; + } + + isEmpty(): boolean { + return this.items.length === 0; + } + + isFull(): boolean { + return this.items.length >= this.maxSize; + } + + size(): number { + return this.items.length; + } + + close(): void { + this.closed = true; + } + + [Symbol.asyncIterator](): AsyncIterator { + return { + next: async (): Promise> => { + if (this.closed && this.isEmpty()) { + return { done: true, value: undefined }; + } + + try { + const value = await this.get(); + return { done: false, value }; + } catch (error) { + if (error.message === "Queue shutdown") { + return { done: true, value: undefined }; + } + throw error; + } + }, + }; + } +} diff --git a/typescript-sdk/src/runtime/state-machine.ts b/typescript-sdk/src/runtime/state-machine.ts new file mode 100644 index 0000000..807017c --- /dev/null +++ b/typescript-sdk/src/runtime/state-machine.ts @@ -0,0 +1,64 @@ +export type FumeShardIdx = number; +export type FumeOffset = string; + +export interface FumeDownloadRequest { + slot: number; + blockchainId: Uint8Array; + blockUid: Uint8Array; +} + +export class FumaroleSM { + private _lastCommittedOffset: FumeOffset; + private _committableOffset: FumeOffset; + private _slotStatusQueue: any[]; + private _needNewEvents: boolean; + + constructor() { + this._lastCommittedOffset = "0"; + this._committableOffset = "0"; + this._slotStatusQueue = []; + this._needNewEvents = true; + } + + get lastCommittedOffset(): FumeOffset { + return this._lastCommittedOffset; + } + + get committableOffset(): FumeOffset { + return this._committableOffset; + } + + needNewBlockchainEvents(): boolean { + return this._needNewEvents; + } + + updateCommittedOffset(offset: FumeOffset): void { + this._lastCommittedOffset = offset; + } + + queueBlockchainEvent(events: any[]): void { + // Implementation would go here + this._needNewEvents = false; + } + + gc(): void { + // Implementation of garbage collection + } + + popSlotToDownload(commitment: number): FumeDownloadRequest | null { + // Implementation would go here + return null; + } + + makeSlotDownloadProgress(slot: number, shardIdx: FumeShardIdx): void { + // Implementation would go here + } + + popNextSlotStatus(): any | null { + return this._slotStatusQueue.shift() || null; + } + + markEventAsProcessed(sessionSequence: number): void { + // Implementation would go here + } +} diff --git a/typescript-sdk/src/types.ts b/typescript-sdk/src/types.ts new file mode 100644 index 0000000..f46d09a --- /dev/null +++ b/typescript-sdk/src/types.ts @@ -0,0 +1,99 @@ +import { SubscribeRequest, SubscribeUpdate } from "./grpc/geyser"; + +// Constants +export const DEFAULT_DRAGONSMOUTH_CAPACITY = 10000; +export const DEFAULT_COMMIT_INTERVAL = 5.0; // seconds +export const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = 3; +export const DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = 10; +export const DEFAULT_GC_INTERVAL = 60; // seconds +export const DEFAULT_SLOT_MEMORY_RETENTION = 300; // seconds + +export interface FumaroleSubscribeConfig { + // The maximum number of concurrent download tasks per TCP connection. + concurrentDownloadLimit?: number; + + // The interval at which to commit the slot memory. + commitInterval?: number; + + // The maximum number of failed slot download attempts before giving up. + maxFailedSlotDownloadAttempt?: number; + + // The maximum number of slots to download concurrently. + dataChannelCapacity?: number; + + // The interval at which to perform garbage collection on the slot memory. + gcInterval?: number; + + // The retention period for slot memory in seconds. + slotMemoryRetention?: number; +} + +export interface DragonsmouthAdapterSession { + // The queue for sending SubscribeRequest update to the dragonsmouth stream + sink: AsyncQueue; + + // The queue for receiving SubscribeUpdate from the dragonsmouth stream + source: AsyncQueue; + + // The handle for the fumarole runtime + fumaroleHandle: Promise; +} + +// Generic async queue interface to mimic Python's asyncio.Queue +export class AsyncQueue { + private queue: T[] = []; + private maxSize: number; + private resolvers: ((value: T) => void)[] = []; + private full_resolvers: (() => void)[] = []; + private closed = false; + + constructor(maxSize = 0) { + this.maxSize = maxSize; + } + + async put(item: T): Promise { + if (this.closed) { + throw new Error("Queue is closed"); + } + + if (this.maxSize > 0 && this.queue.length >= this.maxSize) { + return new Promise((resolve) => { + this.full_resolvers.push(resolve); + }); + } + + this.queue.push(item); + const resolver = this.resolvers.shift(); + if (resolver) { + resolver(this.queue.shift()!); + } + } + + async get(): Promise { + if (this.closed && this.queue.length === 0) { + throw new Error("Queue is closed"); + } + + if (this.queue.length === 0) { + return new Promise((resolve) => { + this.resolvers.push(resolve); + }); + } + + const item = this.queue.shift()!; + const full_resolver = this.full_resolvers.shift(); + if (full_resolver) { + full_resolver(); + } + return item; + } + + close(): void { + this.closed = true; + // Resolve all pending gets with an error + this.resolvers.forEach((resolve) => { + resolve(undefined as any); + }); + this.resolvers = []; + } +} diff --git a/typescript-sdk/src/utils/aio.ts b/typescript-sdk/src/utils/aio.ts new file mode 100644 index 0000000..1bd9f5b --- /dev/null +++ b/typescript-sdk/src/utils/aio.ts @@ -0,0 +1,53 @@ +/** + * Asynchronous utilities for TypeScript + */ + +/** + * Create a forever pending promise. This promise is not resolved and will never be resolved. + * This is useful for testing purposes. + * @returns A promise that never resolves + */ +export async function never(): Promise { + return new Promise(() => { + // This promise intentionally never resolves + }); +} + +/** + * A class that represents an interval that can be used to run async operations periodically + */ +export class Interval { + private readonly interval: number; + + /** + * Create an interval that will run every `interval` seconds. + * @param interval The interval in seconds + */ + constructor(interval: number) { + this.interval = interval; + } + + /** + * Wait for the interval duration + * @returns A promise that resolves after the interval duration + */ + async tick(): Promise { + // Convert seconds to milliseconds for setTimeout + return new Promise((resolve) => setTimeout(resolve, this.interval * 1000)); + } +} + +/** + * Type for any function that returns a Promise + */ +export type AsyncFunction = () => Promise; + +/** + * Helper functions and utilities for logging + */ +export const logger = { + debug: (...args: any[]) => console.debug("[DEBUG]", ...args), + info: (...args: any[]) => console.info("[INFO]", ...args), + warn: (...args: any[]) => console.warn("[WARN]", ...args), + error: (...args: any[]) => console.error("[ERROR]", ...args), +}; diff --git a/typescript-sdk/tsconfig.cjs.json b/typescript-sdk/tsconfig.cjs.json deleted file mode 100644 index 9227db7..0000000 --- a/typescript-sdk/tsconfig.cjs.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "compilerOptions": { - "module": "CommonJS", - "esModuleInterop": true, - "noImplicitAny": true, - "removeComments": true, - "preserveConstEnums": true, - "sourceMap": true, - "allowJs": true, - "outDir": "dist/cjs", - "moduleResolution": "Node" - }, - "files": ["src/index.ts"], - "include": ["src/**/*"], - "exclude": ["node_modules", "**/*.test.js"] -} \ No newline at end of file diff --git a/typescript-sdk/tsconfig.esm.json b/typescript-sdk/tsconfig.esm.json deleted file mode 100644 index fe32f25..0000000 --- a/typescript-sdk/tsconfig.esm.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "compilerOptions": { - "module": "ESNext", - "target": "ES2022", - "moduleResolution": "Node", - "esModuleInterop": true, - "outDir": "dist/esm", - "declaration": true, - "declarationDir": "dist/types" - }, - "files": ["src/index.ts"], - "include": ["src/**/*"], - "exclude": ["node_modules", "**/*.test.js"] -} diff --git a/typescript-sdk/tsconfig.json b/typescript-sdk/tsconfig.json deleted file mode 100644 index 26ba6c4..0000000 --- a/typescript-sdk/tsconfig.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "compilerOptions": { - "esModuleInterop": true, - "outDir": "dist", - "rootDir": "src", - "declaration": true - } -} From 63e6aba16026ac5fb3803f442b7c7b5239abe342 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Mon, 11 Aug 2025 11:39:17 +0000 Subject: [PATCH 44/56] refactor: list, get consumer groups works Signed-off-by: GitHub --- typescript-sdk/add-js-extensions.mjs | 0 typescript-sdk/examples/README.md | 0 .../examples/list-consumer-groups.js | 138 + .../examples/list-consumer-groups.ts | 90 + typescript-sdk/package.json | 5 +- typescript-sdk/pnpm-lock.yaml | 32 +- typescript-sdk/src/config/config.js | 27 + typescript-sdk/src/connectivity.js | 170 + typescript-sdk/src/connectivity.ts | 102 +- typescript-sdk/src/grpc/connectivity.js | 0 typescript-sdk/src/grpc/fumarole.js | 2927 +++++++++++ typescript-sdk/src/grpc/geyser.js | 4457 +++++++++++++++++ .../src/grpc/google/protobuf/timestamp.js | 84 + typescript-sdk/src/grpc/solana-storage.js | 2055 ++++++++ typescript-sdk/src/index.js | 448 ++ typescript-sdk/src/index.ts | 199 +- typescript-sdk/src/runtime/aio.js | 593 +++ typescript-sdk/src/runtime/aio.ts | 26 +- typescript-sdk/src/runtime/queue.js | 125 + typescript-sdk/src/runtime/state-machine.js | 369 ++ typescript-sdk/src/runtime/state-machine.ts | 382 +- typescript-sdk/src/types.js | 111 + typescript-sdk/src/utils/aio.js | 126 + typescript-sdk/tsconfig.cjs.json | 0 typescript-sdk/tsconfig.esm.json | 0 typescript-sdk/tsconfig.json | 0 26 files changed, 12376 insertions(+), 90 deletions(-) create mode 100644 typescript-sdk/add-js-extensions.mjs create mode 100644 typescript-sdk/examples/README.md create mode 100644 typescript-sdk/examples/list-consumer-groups.js create mode 100644 typescript-sdk/examples/list-consumer-groups.ts create mode 100644 typescript-sdk/src/config/config.js create mode 100644 typescript-sdk/src/connectivity.js create mode 100644 typescript-sdk/src/grpc/connectivity.js create mode 100644 typescript-sdk/src/grpc/fumarole.js create mode 100644 typescript-sdk/src/grpc/geyser.js create mode 100644 typescript-sdk/src/grpc/google/protobuf/timestamp.js create mode 100644 typescript-sdk/src/grpc/solana-storage.js create mode 100644 typescript-sdk/src/index.js create mode 100644 typescript-sdk/src/runtime/aio.js create mode 100644 typescript-sdk/src/runtime/queue.js create mode 100644 typescript-sdk/src/runtime/state-machine.js create mode 100644 typescript-sdk/src/types.js create mode 100644 typescript-sdk/src/utils/aio.js create mode 100644 typescript-sdk/tsconfig.cjs.json create mode 100644 typescript-sdk/tsconfig.esm.json create mode 100644 typescript-sdk/tsconfig.json diff --git a/typescript-sdk/add-js-extensions.mjs b/typescript-sdk/add-js-extensions.mjs new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/examples/README.md b/typescript-sdk/examples/README.md new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/examples/list-consumer-groups.js b/typescript-sdk/examples/list-consumer-groups.js new file mode 100644 index 0000000..ce736af --- /dev/null +++ b/typescript-sdk/examples/list-consumer-groups.js @@ -0,0 +1,138 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var src_1 = require("../src"); +function main() { + return __awaiter(this, void 0, void 0, function () { + var config, client, response, _i, _a, group, info, err_1, error_1, error_2; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + _b.trys.push([0, 13, , 14]); + config = { + endpoint: "https://fra141.nodes.rpcpool.com", // Replace with your Fumarole endpoint + xToken: "7b042cd6-ea1e-46af-b46b-653bdce119f6", + maxDecodingMessageSizeBytes: 100 * 1024 * 1024, // 100MB max message size + xMetadata: {}, // Additional metadata if needed + }; + // Connect to the Fumarole server + console.log("Connecting to Fumarole server..."); + return [4 /*yield*/, src_1.FumaroleClient.connect(config)]; + case 1: + client = _b.sent(); + console.log("Connected successfully"); + // List all consumer groups + console.log("\nFetching consumer groups..."); + _b.label = 2; + case 2: + _b.trys.push([2, 11, , 12]); + console.log("Sending listConsumerGroups request to server..."); + process.on("unhandledRejection", function (reason, promise) { + console.error("Unhandled Rejection at:", promise, "reason:", reason); + }); + return [4 /*yield*/, client.listConsumerGroups().catch(function (error) { + console.error("Caught error during listConsumerGroups:", error); + if (error.code) + console.error("Error code:", error.code); + if (error.details) + console.error("Error details:", error.details); + if (error.metadata) + console.error("Error metadata:", error.metadata); + if (error.stack) + console.error("Error stack:", error.stack); + throw error; + })]; + case 3: + response = _b.sent(); + console.log("\n=== ListConsumerGroups Response ==="); + console.log(JSON.stringify(response, null, 2)); + console.log("=====================================\n"); + if (!(!response.consumerGroups || response.consumerGroups.length === 0)) return [3 /*break*/, 4]; + console.log("No consumer groups found on server"); + return [3 /*break*/, 10]; + case 4: + console.log("Found ".concat(response.consumerGroups.length, " consumer groups. Fetching details...\n")); + _i = 0, _a = response.consumerGroups; + _b.label = 5; + case 5: + if (!(_i < _a.length)) return [3 /*break*/, 10]; + group = _a[_i]; + console.log("=== Consumer Group: ".concat(group.consumerGroupName, " ===")); + console.log("Basic info:", JSON.stringify(group, null, 2)); + _b.label = 6; + case 6: + _b.trys.push([6, 8, , 9]); + console.log("\nFetching detailed info for group: ".concat(group.consumerGroupName)); + return [4 /*yield*/, client.getConsumerGroupInfo(group.consumerGroupName)]; + case 7: + info = _b.sent(); + if (info) { + console.log("\nDetailed Group Info:"); + console.log("Status: Active"); + console.log("Server Response:", JSON.stringify(info, null, 2)); + } + else { + console.log("\nGroup Status: Not found or inactive"); + } + console.log("===============================\n"); + return [3 /*break*/, 9]; + case 8: + err_1 = _b.sent(); + console.error("\nError fetching group info from server: ".concat(err_1 instanceof Error ? err_1.message : String(err_1))); + return [3 /*break*/, 9]; + case 9: + _i++; + return [3 /*break*/, 5]; + case 10: return [3 /*break*/, 12]; + case 11: + error_1 = _b.sent(); + console.error("Error:", error_1 instanceof Error ? error_1.message : String(error_1)); + process.exit(1); + return [3 /*break*/, 12]; + case 12: return [3 /*break*/, 14]; + case 13: + error_2 = _b.sent(); + console.error("Error:", error_2 instanceof Error ? error_2.message : String(error_2)); + process.exit(1); + return [3 /*break*/, 14]; + case 14: return [2 /*return*/]; + } + }); + }); +} +main().catch(console.error); diff --git a/typescript-sdk/examples/list-consumer-groups.ts b/typescript-sdk/examples/list-consumer-groups.ts new file mode 100644 index 0000000..93cd678 --- /dev/null +++ b/typescript-sdk/examples/list-consumer-groups.ts @@ -0,0 +1,90 @@ +import { FumaroleClient, FumaroleConfig } from "../src"; + +async function main() { + try { + // Configure the client + const config: FumaroleConfig = { + endpoint: "https://fra141.nodes.rpcpool.com", // Replace with your Fumarole endpoint + xToken: "7b042cd6-ea1e-46af-b46b-653bdce119f6", + maxDecodingMessageSizeBytes: 100 * 1024 * 1024, // 100MB max message size + xMetadata: {}, // Additional metadata if needed + }; + + // Connect to the Fumarole server + console.log("Connecting to Fumarole server..."); + const client = await FumaroleClient.connect(config); + console.log("Connected successfully"); + + // List all consumer groups + console.log("\nFetching consumer groups..."); + try { + console.log("Sending listConsumerGroups request to server..."); + process.on("unhandledRejection", (reason, promise) => { + console.error("Unhandled Rejection at:", promise, "reason:", reason); + }); + + const response = await client.listConsumerGroups().catch((error) => { + console.error("Caught error during listConsumerGroups:", error); + if (error.code) console.error("Error code:", error.code); + if (error.details) console.error("Error details:", error.details); + if (error.metadata) console.error("Error metadata:", error.metadata); + if (error.stack) console.error("Error stack:", error.stack); + throw error; + }); + + console.log("\n=== ListConsumerGroups Response ==="); + console.log(JSON.stringify(response, null, 2)); + console.log("=====================================\n"); + + if (!response.consumerGroups || response.consumerGroups.length === 0) { + console.log("No consumer groups found on server"); + } else { + console.log( + `Found ${response.consumerGroups.length} consumer groups. Fetching details...\n` + ); + for (const group of response.consumerGroups) { + console.log(`=== Consumer Group: ${group.consumerGroupName} ===`); + console.log("Basic info:", JSON.stringify(group, null, 2)); + + // Get detailed info for the group + try { + console.log( + `\nFetching detailed info for group: ${group.consumerGroupName}` + ); + const info = await client.getConsumerGroupInfo( + group.consumerGroupName + ); + if (info) { + console.log("\nDetailed Group Info:"); + console.log("Status: Active"); + console.log("Server Response:", JSON.stringify(info, null, 2)); + } else { + console.log("\nGroup Status: Not found or inactive"); + } + console.log("===============================\n"); + } catch (err) { + console.error( + `\nError fetching group info from server: ${ + err instanceof Error ? err.message : String(err) + }` + ); + } + } + } + } catch (error) { + console.error( + "Error:", + error instanceof Error ? error.message : String(error) + ); + process.exit(1); + } + } catch (error) { + console.error( + "Error:", + error instanceof Error ? error.message : String(error) + ); + process.exit(1); + } +} + +main().catch(console.error); diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index 854cd37..c52b5cb 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -30,9 +30,12 @@ ], "homepage": "https://triton.one", "devDependencies": { - "ts-proto": "^2.7.7" + "ts-proto": "^2.7.7", + "typescript": "^5.2.2", + "@types/node": "^22.17.1" }, "dependencies": { + "@bufbuild/protobuf": "^2.6.3", "@grpc/grpc-js": "^1.13.4", "@types/js-yaml": "^4.0.9", "js-yaml": "^4.1.0" diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml index 2ea94b9..4cd325f 100644 --- a/typescript-sdk/pnpm-lock.yaml +++ b/typescript-sdk/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + '@bufbuild/protobuf': + specifier: ^2.6.3 + version: 2.6.3 '@grpc/grpc-js': specifier: ^1.13.4 version: 1.13.4 @@ -18,9 +21,15 @@ importers: specifier: ^4.1.0 version: 4.1.0 devDependencies: + '@types/node': + specifier: ^22.17.1 + version: 22.17.1 ts-proto: specifier: ^2.7.7 version: 2.7.7 + typescript: + specifier: ^5.2.2 + version: 5.9.2 packages: @@ -72,8 +81,8 @@ packages: '@types/js-yaml@4.0.9': resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} - '@types/node@24.2.1': - resolution: {integrity: sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ==} + '@types/node@22.17.1': + resolution: {integrity: sha512-y3tBaz+rjspDTylNjAX37jEC3TETEFGNJL6uQDxwF9/8GLLIjW1rvVHlynyuUKMnMr1Roq8jOv3vkopBjC4/VA==} ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} @@ -160,8 +169,13 @@ packages: resolution: {integrity: sha512-/OfN9/Yriji2bbpOysZ/Jzc96isOKz+eBTJEcKaIZ0PR6x1TNgVm4Lz0zfbo+J0jwFO7fJjJyssefBPQ0o1V9A==} hasBin: true - undici-types@7.10.0: - resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==} + typescript@5.9.2: + resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} + engines: {node: '>=14.17'} + hasBin: true + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} @@ -222,9 +236,9 @@ snapshots: '@types/js-yaml@4.0.9': {} - '@types/node@24.2.1': + '@types/node@22.17.1': dependencies: - undici-types: 7.10.0 + undici-types: 6.21.0 ansi-regex@5.0.1: {} @@ -282,7 +296,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 24.2.1 + '@types/node': 22.17.1 long: 5.3.2 require-directory@2.1.1: {} @@ -312,7 +326,9 @@ snapshots: ts-poet: 6.12.0 ts-proto-descriptors: 2.0.0 - undici-types@7.10.0: {} + typescript@5.9.2: {} + + undici-types@6.21.0: {} wrap-ansi@7.0.0: dependencies: diff --git a/typescript-sdk/src/config/config.js b/typescript-sdk/src/config/config.js new file mode 100644 index 0000000..3679d89 --- /dev/null +++ b/typescript-sdk/src/config/config.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FumaroleConfig = void 0; +var yaml = require("js-yaml"); +var FumaroleConfig = /** @class */ (function () { + function FumaroleConfig(options) { + var _a, _b; + this.endpoint = options.endpoint; + this.xToken = options.xToken; + this.maxDecodingMessageSizeBytes = + (_a = options.maxDecodingMessageSizeBytes) !== null && _a !== void 0 ? _a : FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE; + this.xMetadata = (_b = options.xMetadata) !== null && _b !== void 0 ? _b : {}; + } + FumaroleConfig.fromYaml = function (yamlContent) { + var _a, _b; + var data = yaml.load(yamlContent); + return new FumaroleConfig({ + endpoint: data.endpoint, + xToken: data["x-token"] || data.x_token, + maxDecodingMessageSizeBytes: (_a = data.max_decoding_message_size_bytes) !== null && _a !== void 0 ? _a : FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE, + xMetadata: (_b = data["x-metadata"]) !== null && _b !== void 0 ? _b : {}, + }); + }; + FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE = 512000000; + return FumaroleConfig; +}()); +exports.FumaroleConfig = FumaroleConfig; diff --git a/typescript-sdk/src/connectivity.js b/typescript-sdk/src/connectivity.js new file mode 100644 index 0000000..ad5f135 --- /dev/null +++ b/typescript-sdk/src/connectivity.js @@ -0,0 +1,170 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FumaroleGrpcConnector = void 0; +var grpc_js_1 = require("@grpc/grpc-js"); +var fumarole_1 = require("./grpc/fumarole"); +var X_TOKEN_HEADER = "x-token"; +var TritonAuthMetadataGenerator = /** @class */ (function () { + function TritonAuthMetadataGenerator(xToken) { + this.xToken = xToken; + } + TritonAuthMetadataGenerator.prototype.generateMetadata = function () { + var metadata = new grpc_js_1.Metadata(); + metadata.set(X_TOKEN_HEADER, this.xToken); + return Promise.resolve(metadata); + }; + return TritonAuthMetadataGenerator; +}()); +var MetadataProvider = /** @class */ (function () { + function MetadataProvider(metadata) { + var _this = this; + this.metadata = new grpc_js_1.Metadata(); + Object.entries(metadata).forEach(function (_a) { + var key = _a[0], value = _a[1]; + _this.metadata.set(key, value); + }); + } + MetadataProvider.prototype.getMetadata = function () { + return Promise.resolve(this.metadata); + }; + return MetadataProvider; +}()); +var FumaroleGrpcConnector = /** @class */ (function () { + function FumaroleGrpcConnector(config, endpoint) { + this.config = config; + this.endpoint = endpoint; + } + FumaroleGrpcConnector.prototype.connect = function () { + return __awaiter(this, arguments, void 0, function (grpcOptions) { + var options, channelCredentials, insecureXToken, endpointURL, port, address, clientOptions, client, error_1; + var _this = this; + if (grpcOptions === void 0) { grpcOptions = {}; } + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + options = __assign({ "grpc.max_receive_message_length": 111111110 }, grpcOptions); + endpointURL = new URL(this.endpoint); + port = endpointURL.port; + if (port === "") { + port = endpointURL.protocol === "https:" ? "443" : "80"; + } + address = "".concat(endpointURL.hostname, ":").concat(port); + // Handle credentials based on protocol + if (endpointURL.protocol === "https:") { + channelCredentials = grpc_js_1.credentials.combineChannelCredentials(grpc_js_1.credentials.createSsl(), grpc_js_1.credentials.createFromMetadataGenerator(function (_params, callback) { + var metadata = new grpc_js_1.Metadata(); + if (_this.config.xToken) { + metadata.add("x-token", _this.config.xToken); + } + if (_this.config.xMetadata) { + Object.entries(_this.config.xMetadata).forEach(function (_a) { + var key = _a[0], value = _a[1]; + metadata.add(key, value); + }); + } + callback(null, metadata); + })); + } + else { + channelCredentials = grpc_js_1.credentials.createInsecure(); + if (this.config.xToken) { + insecureXToken = this.config.xToken; + } + } + clientOptions = __assign(__assign({}, options), { "grpc.enable_http_proxy": 0, + // Basic keepalive settings + "grpc.keepalive_time_ms": 20000, "grpc.keepalive_timeout_ms": 10000, "grpc.http2.min_time_between_pings_ms": 10000, + // Connection settings + "grpc.initial_reconnect_backoff_ms": 100, "grpc.max_reconnect_backoff_ms": 3000, "grpc.min_reconnect_backoff_ms": 100, + // Enable retries + "grpc.enable_retries": 1, "grpc.service_config": JSON.stringify({ + methodConfig: [ + { + name: [{}], // Apply to all methods + retryPolicy: { + maxAttempts: 5, + initialBackoff: "0.1s", + maxBackoff: "3s", + backoffMultiplier: 2, + retryableStatusCodes: ["UNAVAILABLE", "DEADLINE_EXCEEDED"], + }, + }, + ], + }) }); + client = new fumarole_1.FumaroleClient(address, channelCredentials, clientOptions); + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, new Promise(function (resolve, reject) { + var deadline = Date.now() + 5000; // 5 second timeout + client.waitForReady(deadline, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + })]; + case 2: + _a.sent(); + return [3 /*break*/, 4]; + case 3: + error_1 = _a.sent(); + throw error_1; + case 4: return [2 /*return*/, client]; + } + }); + }); + }; + FumaroleGrpcConnector.logger = console; + return FumaroleGrpcConnector; +}()); +exports.FumaroleGrpcConnector = FumaroleGrpcConnector; diff --git a/typescript-sdk/src/connectivity.ts b/typescript-sdk/src/connectivity.ts index b6222a4..3c101de 100644 --- a/typescript-sdk/src/connectivity.ts +++ b/typescript-sdk/src/connectivity.ts @@ -50,40 +50,92 @@ export class FumaroleGrpcConnector { }; let channelCredentials: ChannelCredentials; - const metadataProvider = new MetadataProvider(this.config.xMetadata); - const callCredentials = credentials.createFromMetadataGenerator( - metadataProvider.getMetadata.bind(metadataProvider) - ); - - if (this.config.xToken) { - // SSL credentials for HTTPS endpoint - const sslCreds = credentials.createSsl(); + let insecureXToken: string | undefined; - // Create call credentials with token - const authGenerator = new TritonAuthMetadataGenerator(this.config.xToken); - const callCreds = credentials.createFromMetadataGenerator( - authGenerator.generateMetadata.bind(authGenerator) - ); + // Parse endpoint properly + const endpointURL = new URL(this.endpoint); + let port = endpointURL.port; + if (port === "") { + port = endpointURL.protocol === "https:" ? "443" : "80"; + } + const address = `${endpointURL.hostname}:${port}`; - // Combine credentials + // Handle credentials based on protocol + if (endpointURL.protocol === "https:") { channelCredentials = credentials.combineChannelCredentials( - sslCreds, - callCreds - ); - FumaroleGrpcConnector.logger.debug( - "Using secure channel with x-token authentication" + credentials.createSsl(), + credentials.createFromMetadataGenerator((_params, callback) => { + const metadata = new Metadata(); + if (this.config.xToken) { + metadata.add("x-token", this.config.xToken); + } + if (this.config.xMetadata) { + Object.entries(this.config.xMetadata).forEach(([key, value]) => { + metadata.add(key, value); + }); + } + callback(null, metadata); + }) ); } else { channelCredentials = credentials.createInsecure(); - FumaroleGrpcConnector.logger.debug( - "Using insecure channel without authentication" - ); + if (this.config.xToken) { + insecureXToken = this.config.xToken; + } } - // Create the client with credentials and options - const client = new FumaroleClient(this.endpoint, channelCredentials, { + // Create the client options with simpler settings + const clientOptions = { ...options, - }); + "grpc.enable_http_proxy": 0, + // Basic keepalive settings + "grpc.keepalive_time_ms": 20000, + "grpc.keepalive_timeout_ms": 10000, + "grpc.http2.min_time_between_pings_ms": 10000, + // Connection settings + "grpc.initial_reconnect_backoff_ms": 100, + "grpc.max_reconnect_backoff_ms": 3000, + "grpc.min_reconnect_backoff_ms": 100, + // Enable retries + "grpc.enable_retries": 1, + "grpc.service_config": JSON.stringify({ + methodConfig: [ + { + name: [{}], // Apply to all methods + retryPolicy: { + maxAttempts: 5, + initialBackoff: "0.1s", + maxBackoff: "3s", + backoffMultiplier: 2, + retryableStatusCodes: ["UNAVAILABLE", "DEADLINE_EXCEEDED"], + }, + }, + ], + }), + }; + + // Create the client with credentials and options + const client = new FumaroleClient( + address, + channelCredentials, + clientOptions + ); + + // Do a simple connection check + try { + await new Promise((resolve, reject) => { + const deadline = Date.now() + 5000; // 5 second timeout + client.waitForReady(deadline, (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + } catch (error) { + throw error; + } return client; } diff --git a/typescript-sdk/src/grpc/connectivity.js b/typescript-sdk/src/grpc/connectivity.js new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/src/grpc/fumarole.js b/typescript-sdk/src/grpc/fumarole.js new file mode 100644 index 0000000..ffcea80 --- /dev/null +++ b/typescript-sdk/src/grpc/fumarole.js @@ -0,0 +1,2927 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 +// source: fumarole.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FumaroleClient = exports.FumaroleService = exports.CreateConsumerGroupRequest = exports.CreateConsumerGroupResponse = exports.InitialConsumerGroupState_LastCommittedOffsetsEntry = exports.InitialConsumerGroupState = exports.CommitOffsetResult = exports.ControlResponse = exports.ControlCommand = exports.JoinControlPlane = exports.BlockchainHistory = exports.BlockchainEvent = exports.PollBlockchainHistory = exports.CommitOffset = exports.DataResponse = exports.DataError = exports.BlockNotFound = exports.BlockShardDownloadFinish = exports.DataCommand = exports.Pong = exports.Ping = exports.DownloadBlockShard = exports.BlockFilters_BlocksMetaEntry = exports.BlockFilters_EntriesEntry = exports.BlockFilters_TransactionsEntry = exports.BlockFilters_AccountsEntry = exports.BlockFilters = exports.GetSlotLagInfoRequest = exports.ConsumerGroupInfo = exports.ListConsumerGroupsResponse = exports.ListConsumerGroupsRequest = exports.DeleteConsumerGroupResponse = exports.DeleteConsumerGroupRequest = exports.GetConsumerGroupInfoRequest = exports.VersionResponse = exports.VersionRequest = exports.GetChainTipResponse_ShardToMaxOffsetMapEntry = exports.GetChainTipResponse = exports.GetChainTipRequest = exports.InitialOffsetPolicy = exports.protobufPackage = void 0; +exports.initialOffsetPolicyFromJSON = initialOffsetPolicyFromJSON; +exports.initialOffsetPolicyToJSON = initialOffsetPolicyToJSON; +/* eslint-disable */ +var wire_1 = require("@bufbuild/protobuf/wire"); +var grpc_js_1 = require("@grpc/grpc-js"); +var geyser_1 = require("./geyser"); +exports.protobufPackage = "fumarole"; +var InitialOffsetPolicy; +(function (InitialOffsetPolicy) { + /** LATEST - FROM_SLOT = 1; */ + InitialOffsetPolicy[InitialOffsetPolicy["LATEST"] = 0] = "LATEST"; + InitialOffsetPolicy[InitialOffsetPolicy["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; +})(InitialOffsetPolicy || (exports.InitialOffsetPolicy = InitialOffsetPolicy = {})); +function initialOffsetPolicyFromJSON(object) { + switch (object) { + case 0: + case "LATEST": + return InitialOffsetPolicy.LATEST; + case -1: + case "UNRECOGNIZED": + default: + return InitialOffsetPolicy.UNRECOGNIZED; + } +} +function initialOffsetPolicyToJSON(object) { + switch (object) { + case InitialOffsetPolicy.LATEST: + return "LATEST"; + case InitialOffsetPolicy.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +function createBaseGetChainTipRequest() { + return { blockchainId: new Uint8Array(0) }; +} +exports.GetChainTipRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetChainTipRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.blockchainId = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0) }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + return obj; + }, + create: function (base) { + return exports.GetChainTipRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetChainTipRequest(); + message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); + return message; + }, +}; +function createBaseGetChainTipResponse() { + return { blockchainId: new Uint8Array(0), shardToMaxOffsetMap: {} }; +} +exports.GetChainTipResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + Object.entries(message.shardToMaxOffsetMap).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.GetChainTipResponse_ShardToMaxOffsetMapEntry.encode({ key: key, value: value }, writer.uint32(18).fork()).join(); + }); + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetChainTipResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + var entry2 = exports.GetChainTipResponse_ShardToMaxOffsetMapEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.shardToMaxOffsetMap[entry2.key] = entry2.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + shardToMaxOffsetMap: isObject(object.shardToMaxOffsetMap) + ? Object.entries(object.shardToMaxOffsetMap).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[globalThis.Number(key)] = String(value); + return acc; + }, {}) + : {}, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.shardToMaxOffsetMap) { + var entries = Object.entries(message.shardToMaxOffsetMap); + if (entries.length > 0) { + obj.shardToMaxOffsetMap = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.shardToMaxOffsetMap[k] = v; + }); + } + } + return obj; + }, + create: function (base) { + return exports.GetChainTipResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseGetChainTipResponse(); + message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.shardToMaxOffsetMap = Object.entries((_b = object.shardToMaxOffsetMap) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[globalThis.Number(key)] = globalThis.String(value); + } + return acc; + }, {}); + return message; + }, +}; +function createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry() { + return { key: 0, value: "0" }; +} +exports.GetChainTipResponse_ShardToMaxOffsetMapEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== 0) { + writer.uint32(8).int32(message.key); + } + if (message.value !== "0") { + writer.uint32(16).int64(message.value); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.key = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.value = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.Number(object.key) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== "0") { + obj.value = message.value; + } + return obj; + }, + create: function (base) { + return exports.GetChainTipResponse_ShardToMaxOffsetMapEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : 0; + message.value = (_b = object.value) !== null && _b !== void 0 ? _b : "0"; + return message; + }, +}; +function createBaseVersionRequest() { + return {}; +} +exports.VersionRequest = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseVersionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.VersionRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseVersionRequest(); + return message; + }, +}; +function createBaseVersionResponse() { + return { version: "" }; +} +exports.VersionResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.version !== "") { + writer.uint32(10).string(message.version); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseVersionResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.version = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { version: isSet(object.version) ? globalThis.String(object.version) : "" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.version !== "") { + obj.version = message.version; + } + return obj; + }, + create: function (base) { + return exports.VersionResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseVersionResponse(); + message.version = (_a = object.version) !== null && _a !== void 0 ? _a : ""; + return message; + }, +}; +function createBaseGetConsumerGroupInfoRequest() { + return { consumerGroupName: "" }; +} +exports.GetConsumerGroupInfoRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetConsumerGroupInfoRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.consumerGroupName = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; + } + return obj; + }, + create: function (base) { + return exports.GetConsumerGroupInfoRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetConsumerGroupInfoRequest(); + message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; + return message; + }, +}; +function createBaseDeleteConsumerGroupRequest() { + return { consumerGroupName: "" }; +} +exports.DeleteConsumerGroupRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseDeleteConsumerGroupRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.consumerGroupName = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; + } + return obj; + }, + create: function (base) { + return exports.DeleteConsumerGroupRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseDeleteConsumerGroupRequest(); + message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; + return message; + }, +}; +function createBaseDeleteConsumerGroupResponse() { + return { success: false }; +} +exports.DeleteConsumerGroupResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.success !== false) { + writer.uint32(8).bool(message.success); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseDeleteConsumerGroupResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.success = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { success: isSet(object.success) ? globalThis.Boolean(object.success) : false }; + }, + toJSON: function (message) { + var obj = {}; + if (message.success !== false) { + obj.success = message.success; + } + return obj; + }, + create: function (base) { + return exports.DeleteConsumerGroupResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseDeleteConsumerGroupResponse(); + message.success = (_a = object.success) !== null && _a !== void 0 ? _a : false; + return message; + }, +}; +function createBaseListConsumerGroupsRequest() { + return {}; +} +exports.ListConsumerGroupsRequest = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseListConsumerGroupsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.ListConsumerGroupsRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseListConsumerGroupsRequest(); + return message; + }, +}; +function createBaseListConsumerGroupsResponse() { + return { consumerGroups: [] }; +} +exports.ListConsumerGroupsResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + for (var _i = 0, _a = message.consumerGroups; _i < _a.length; _i++) { + var v = _a[_i]; + exports.ConsumerGroupInfo.encode(v, writer.uint32(10).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseListConsumerGroupsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.consumerGroups.push(exports.ConsumerGroupInfo.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + consumerGroups: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.consumerGroups) + ? object.consumerGroups.map(function (e) { return exports.ConsumerGroupInfo.fromJSON(e); }) + : [], + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if ((_a = message.consumerGroups) === null || _a === void 0 ? void 0 : _a.length) { + obj.consumerGroups = message.consumerGroups.map(function (e) { return exports.ConsumerGroupInfo.toJSON(e); }); + } + return obj; + }, + create: function (base) { + return exports.ListConsumerGroupsResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseListConsumerGroupsResponse(); + message.consumerGroups = ((_a = object.consumerGroups) === null || _a === void 0 ? void 0 : _a.map(function (e) { return exports.ConsumerGroupInfo.fromPartial(e); })) || []; + return message; + }, +}; +function createBaseConsumerGroupInfo() { + return { id: "", consumerGroupName: "", isStale: false, blockchainId: new Uint8Array(0) }; +} +exports.ConsumerGroupInfo = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.consumerGroupName !== "") { + writer.uint32(18).string(message.consumerGroupName); + } + if (message.isStale !== false) { + writer.uint32(24).bool(message.isStale); + } + if (message.blockchainId.length !== 0) { + writer.uint32(34).bytes(message.blockchainId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseConsumerGroupInfo(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.consumerGroupName = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.isStale = reader.bool(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.blockchainId = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "", + isStale: isSet(object.isStale) ? globalThis.Boolean(object.isStale) : false, + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; + } + if (message.isStale !== false) { + obj.isStale = message.isStale; + } + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + return obj; + }, + create: function (base) { + return exports.ConsumerGroupInfo.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseConsumerGroupInfo(); + message.id = (_a = object.id) !== null && _a !== void 0 ? _a : ""; + message.consumerGroupName = (_b = object.consumerGroupName) !== null && _b !== void 0 ? _b : ""; + message.isStale = (_c = object.isStale) !== null && _c !== void 0 ? _c : false; + message.blockchainId = (_d = object.blockchainId) !== null && _d !== void 0 ? _d : new Uint8Array(0); + return message; + }, +}; +function createBaseGetSlotLagInfoRequest() { + return { consumerGroupName: "" }; +} +exports.GetSlotLagInfoRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetSlotLagInfoRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.consumerGroupName = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; + } + return obj; + }, + create: function (base) { + return exports.GetSlotLagInfoRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetSlotLagInfoRequest(); + message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; + return message; + }, +}; +function createBaseBlockFilters() { + return { accounts: {}, transactions: {}, entries: {}, blocksMeta: {} }; +} +exports.BlockFilters = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + Object.entries(message.accounts).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.BlockFilters_AccountsEntry.encode({ key: key, value: value }, writer.uint32(10).fork()).join(); + }); + Object.entries(message.transactions).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.BlockFilters_TransactionsEntry.encode({ key: key, value: value }, writer.uint32(18).fork()).join(); + }); + Object.entries(message.entries).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.BlockFilters_EntriesEntry.encode({ key: key, value: value }, writer.uint32(26).fork()).join(); + }); + Object.entries(message.blocksMeta).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.BlockFilters_BlocksMetaEntry.encode({ key: key, value: value }, writer.uint32(34).fork()).join(); + }); + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockFilters(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + var entry1 = exports.BlockFilters_AccountsEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.accounts[entry1.key] = entry1.value; + } + continue; + } + case 2: { + if (tag !== 18) { + break; + } + var entry2 = exports.BlockFilters_TransactionsEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.transactions[entry2.key] = entry2.value; + } + continue; + } + case 3: { + if (tag !== 26) { + break; + } + var entry3 = exports.BlockFilters_EntriesEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.entries[entry3.key] = entry3.value; + } + continue; + } + case 4: { + if (tag !== 34) { + break; + } + var entry4 = exports.BlockFilters_BlocksMetaEntry.decode(reader, reader.uint32()); + if (entry4.value !== undefined) { + message.blocksMeta[entry4.key] = entry4.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + accounts: isObject(object.accounts) + ? Object.entries(object.accounts).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = geyser_1.SubscribeRequestFilterAccounts.fromJSON(value); + return acc; + }, {}) + : {}, + transactions: isObject(object.transactions) + ? Object.entries(object.transactions).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = geyser_1.SubscribeRequestFilterTransactions.fromJSON(value); + return acc; + }, {}) + : {}, + entries: isObject(object.entries) + ? Object.entries(object.entries).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = geyser_1.SubscribeRequestFilterEntry.fromJSON(value); + return acc; + }, {}) + : {}, + blocksMeta: isObject(object.blocksMeta) + ? Object.entries(object.blocksMeta).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = geyser_1.SubscribeRequestFilterBlocksMeta.fromJSON(value); + return acc; + }, {}) + : {}, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.accounts) { + var entries = Object.entries(message.accounts); + if (entries.length > 0) { + obj.accounts = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.accounts[k] = geyser_1.SubscribeRequestFilterAccounts.toJSON(v); + }); + } + } + if (message.transactions) { + var entries = Object.entries(message.transactions); + if (entries.length > 0) { + obj.transactions = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.transactions[k] = geyser_1.SubscribeRequestFilterTransactions.toJSON(v); + }); + } + } + if (message.entries) { + var entries = Object.entries(message.entries); + if (entries.length > 0) { + obj.entries = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.entries[k] = geyser_1.SubscribeRequestFilterEntry.toJSON(v); + }); + } + } + if (message.blocksMeta) { + var entries = Object.entries(message.blocksMeta); + if (entries.length > 0) { + obj.blocksMeta = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.blocksMeta[k] = geyser_1.SubscribeRequestFilterBlocksMeta.toJSON(v); + }); + } + } + return obj; + }, + create: function (base) { + return exports.BlockFilters.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseBlockFilters(); + message.accounts = Object.entries((_a = object.accounts) !== null && _a !== void 0 ? _a : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = geyser_1.SubscribeRequestFilterAccounts.fromPartial(value); + } + return acc; + }, {}); + message.transactions = Object.entries((_b = object.transactions) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = geyser_1.SubscribeRequestFilterTransactions.fromPartial(value); + } + return acc; + }, {}); + message.entries = Object.entries((_c = object.entries) !== null && _c !== void 0 ? _c : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = geyser_1.SubscribeRequestFilterEntry.fromPartial(value); + } + return acc; + }, {}); + message.blocksMeta = Object.entries((_d = object.blocksMeta) !== null && _d !== void 0 ? _d : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = geyser_1.SubscribeRequestFilterBlocksMeta.fromPartial(value); + } + return acc; + }, {}); + return message; + }, +}; +function createBaseBlockFilters_AccountsEntry() { + return { key: "", value: undefined }; +} +exports.BlockFilters_AccountsEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + geyser_1.SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockFilters_AccountsEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = geyser_1.SubscribeRequestFilterAccounts.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? geyser_1.SubscribeRequestFilterAccounts.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = geyser_1.SubscribeRequestFilterAccounts.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.BlockFilters_AccountsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseBlockFilters_AccountsEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? geyser_1.SubscribeRequestFilterAccounts.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseBlockFilters_TransactionsEntry() { + return { key: "", value: undefined }; +} +exports.BlockFilters_TransactionsEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + geyser_1.SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockFilters_TransactionsEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = geyser_1.SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? geyser_1.SubscribeRequestFilterTransactions.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = geyser_1.SubscribeRequestFilterTransactions.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.BlockFilters_TransactionsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseBlockFilters_TransactionsEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? geyser_1.SubscribeRequestFilterTransactions.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseBlockFilters_EntriesEntry() { + return { key: "", value: undefined }; +} +exports.BlockFilters_EntriesEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + geyser_1.SubscribeRequestFilterEntry.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockFilters_EntriesEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = geyser_1.SubscribeRequestFilterEntry.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? geyser_1.SubscribeRequestFilterEntry.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = geyser_1.SubscribeRequestFilterEntry.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.BlockFilters_EntriesEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseBlockFilters_EntriesEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? geyser_1.SubscribeRequestFilterEntry.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseBlockFilters_BlocksMetaEntry() { + return { key: "", value: undefined }; +} +exports.BlockFilters_BlocksMetaEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + geyser_1.SubscribeRequestFilterBlocksMeta.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockFilters_BlocksMetaEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = geyser_1.SubscribeRequestFilterBlocksMeta.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? geyser_1.SubscribeRequestFilterBlocksMeta.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = geyser_1.SubscribeRequestFilterBlocksMeta.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.BlockFilters_BlocksMetaEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseBlockFilters_BlocksMetaEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? geyser_1.SubscribeRequestFilterBlocksMeta.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseDownloadBlockShard() { + return { blockchainId: new Uint8Array(0), blockUid: new Uint8Array(0), shardIdx: 0, blockFilters: undefined }; +} +exports.DownloadBlockShard = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + writer.uint32(18).bytes(message.blockUid); + } + if (message.shardIdx !== 0) { + writer.uint32(24).int32(message.shardIdx); + } + if (message.blockFilters !== undefined) { + exports.BlockFilters.encode(message.blockFilters, writer.uint32(34).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseDownloadBlockShard(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockUid = reader.bytes(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.shardIdx = reader.int32(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.blockFilters = exports.BlockFilters.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), + shardIdx: isSet(object.shardIdx) ? globalThis.Number(object.shardIdx) : 0, + blockFilters: isSet(object.blockFilters) ? exports.BlockFilters.fromJSON(object.blockFilters) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + obj.blockUid = base64FromBytes(message.blockUid); + } + if (message.shardIdx !== 0) { + obj.shardIdx = Math.round(message.shardIdx); + } + if (message.blockFilters !== undefined) { + obj.blockFilters = exports.BlockFilters.toJSON(message.blockFilters); + } + return obj; + }, + create: function (base) { + return exports.DownloadBlockShard.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBaseDownloadBlockShard(); + message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.blockUid = (_b = object.blockUid) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.shardIdx = (_c = object.shardIdx) !== null && _c !== void 0 ? _c : 0; + message.blockFilters = (object.blockFilters !== undefined && object.blockFilters !== null) + ? exports.BlockFilters.fromPartial(object.blockFilters) + : undefined; + return message; + }, +}; +function createBasePing() { + return { pingId: 0 }; +} +exports.Ping = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.pingId !== 0) { + writer.uint32(8).uint32(message.pingId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBasePing(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.pingId = reader.uint32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { pingId: isSet(object.pingId) ? globalThis.Number(object.pingId) : 0 }; + }, + toJSON: function (message) { + var obj = {}; + if (message.pingId !== 0) { + obj.pingId = Math.round(message.pingId); + } + return obj; + }, + create: function (base) { + return exports.Ping.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBasePing(); + message.pingId = (_a = object.pingId) !== null && _a !== void 0 ? _a : 0; + return message; + }, +}; +function createBasePong() { + return { pingId: 0 }; +} +exports.Pong = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.pingId !== 0) { + writer.uint32(8).uint32(message.pingId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBasePong(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.pingId = reader.uint32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { pingId: isSet(object.pingId) ? globalThis.Number(object.pingId) : 0 }; + }, + toJSON: function (message) { + var obj = {}; + if (message.pingId !== 0) { + obj.pingId = Math.round(message.pingId); + } + return obj; + }, + create: function (base) { + return exports.Pong.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBasePong(); + message.pingId = (_a = object.pingId) !== null && _a !== void 0 ? _a : 0; + return message; + }, +}; +function createBaseDataCommand() { + return { downloadBlockShard: undefined, filterUpdate: undefined }; +} +exports.DataCommand = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.downloadBlockShard !== undefined) { + exports.DownloadBlockShard.encode(message.downloadBlockShard, writer.uint32(10).fork()).join(); + } + if (message.filterUpdate !== undefined) { + exports.BlockFilters.encode(message.filterUpdate, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseDataCommand(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.downloadBlockShard = exports.DownloadBlockShard.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.filterUpdate = exports.BlockFilters.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + downloadBlockShard: isSet(object.downloadBlockShard) + ? exports.DownloadBlockShard.fromJSON(object.downloadBlockShard) + : undefined, + filterUpdate: isSet(object.filterUpdate) ? exports.BlockFilters.fromJSON(object.filterUpdate) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.downloadBlockShard !== undefined) { + obj.downloadBlockShard = exports.DownloadBlockShard.toJSON(message.downloadBlockShard); + } + if (message.filterUpdate !== undefined) { + obj.filterUpdate = exports.BlockFilters.toJSON(message.filterUpdate); + } + return obj; + }, + create: function (base) { + return exports.DataCommand.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var message = createBaseDataCommand(); + message.downloadBlockShard = (object.downloadBlockShard !== undefined && object.downloadBlockShard !== null) + ? exports.DownloadBlockShard.fromPartial(object.downloadBlockShard) + : undefined; + message.filterUpdate = (object.filterUpdate !== undefined && object.filterUpdate !== null) + ? exports.BlockFilters.fromPartial(object.filterUpdate) + : undefined; + return message; + }, +}; +function createBaseBlockShardDownloadFinish() { + return {}; +} +exports.BlockShardDownloadFinish = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockShardDownloadFinish(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.BlockShardDownloadFinish.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseBlockShardDownloadFinish(); + return message; + }, +}; +function createBaseBlockNotFound() { + return { blockchainId: new Uint8Array(0), blockUid: new Uint8Array(0), shardIdx: 0 }; +} +exports.BlockNotFound = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + writer.uint32(18).bytes(message.blockUid); + } + if (message.shardIdx !== 0) { + writer.uint32(24).int32(message.shardIdx); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockNotFound(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockUid = reader.bytes(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.shardIdx = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), + shardIdx: isSet(object.shardIdx) ? globalThis.Number(object.shardIdx) : 0, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + obj.blockUid = base64FromBytes(message.blockUid); + } + if (message.shardIdx !== 0) { + obj.shardIdx = Math.round(message.shardIdx); + } + return obj; + }, + create: function (base) { + return exports.BlockNotFound.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBaseBlockNotFound(); + message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.blockUid = (_b = object.blockUid) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.shardIdx = (_c = object.shardIdx) !== null && _c !== void 0 ? _c : 0; + return message; + }, +}; +function createBaseDataError() { + return { notFound: undefined }; +} +exports.DataError = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.notFound !== undefined) { + exports.BlockNotFound.encode(message.notFound, writer.uint32(10).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseDataError(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.notFound = exports.BlockNotFound.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { notFound: isSet(object.notFound) ? exports.BlockNotFound.fromJSON(object.notFound) : undefined }; + }, + toJSON: function (message) { + var obj = {}; + if (message.notFound !== undefined) { + obj.notFound = exports.BlockNotFound.toJSON(message.notFound); + } + return obj; + }, + create: function (base) { + return exports.DataError.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var message = createBaseDataError(); + message.notFound = (object.notFound !== undefined && object.notFound !== null) + ? exports.BlockNotFound.fromPartial(object.notFound) + : undefined; + return message; + }, +}; +function createBaseDataResponse() { + return { update: undefined, blockShardDownloadFinish: undefined }; +} +exports.DataResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.update !== undefined) { + geyser_1.SubscribeUpdate.encode(message.update, writer.uint32(10).fork()).join(); + } + if (message.blockShardDownloadFinish !== undefined) { + exports.BlockShardDownloadFinish.encode(message.blockShardDownloadFinish, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseDataResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.update = geyser_1.SubscribeUpdate.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockShardDownloadFinish = exports.BlockShardDownloadFinish.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + update: isSet(object.update) ? geyser_1.SubscribeUpdate.fromJSON(object.update) : undefined, + blockShardDownloadFinish: isSet(object.blockShardDownloadFinish) + ? exports.BlockShardDownloadFinish.fromJSON(object.blockShardDownloadFinish) + : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.update !== undefined) { + obj.update = geyser_1.SubscribeUpdate.toJSON(message.update); + } + if (message.blockShardDownloadFinish !== undefined) { + obj.blockShardDownloadFinish = exports.BlockShardDownloadFinish.toJSON(message.blockShardDownloadFinish); + } + return obj; + }, + create: function (base) { + return exports.DataResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var message = createBaseDataResponse(); + message.update = (object.update !== undefined && object.update !== null) + ? geyser_1.SubscribeUpdate.fromPartial(object.update) + : undefined; + message.blockShardDownloadFinish = + (object.blockShardDownloadFinish !== undefined && object.blockShardDownloadFinish !== null) + ? exports.BlockShardDownloadFinish.fromPartial(object.blockShardDownloadFinish) + : undefined; + return message; + }, +}; +function createBaseCommitOffset() { + return { offset: "0", shardId: 0 }; +} +exports.CommitOffset = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.offset !== "0") { + writer.uint32(8).int64(message.offset); + } + if (message.shardId !== 0) { + writer.uint32(16).int32(message.shardId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseCommitOffset(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.offset = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.shardId = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.shardId !== 0) { + obj.shardId = Math.round(message.shardId); + } + return obj; + }, + create: function (base) { + return exports.CommitOffset.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseCommitOffset(); + message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; + message.shardId = (_b = object.shardId) !== null && _b !== void 0 ? _b : 0; + return message; + }, +}; +function createBasePollBlockchainHistory() { + return { shardId: 0, from: undefined, limit: undefined }; +} +exports.PollBlockchainHistory = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.shardId !== 0) { + writer.uint32(8).int32(message.shardId); + } + if (message.from !== undefined) { + writer.uint32(16).int64(message.from); + } + if (message.limit !== undefined) { + writer.uint32(24).int64(message.limit); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBasePollBlockchainHistory(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.shardId = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.from = reader.int64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.limit = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, + from: isSet(object.from) ? globalThis.String(object.from) : undefined, + limit: isSet(object.limit) ? globalThis.String(object.limit) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.shardId !== 0) { + obj.shardId = Math.round(message.shardId); + } + if (message.from !== undefined) { + obj.from = message.from; + } + if (message.limit !== undefined) { + obj.limit = message.limit; + } + return obj; + }, + create: function (base) { + return exports.PollBlockchainHistory.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBasePollBlockchainHistory(); + message.shardId = (_a = object.shardId) !== null && _a !== void 0 ? _a : 0; + message.from = (_b = object.from) !== null && _b !== void 0 ? _b : undefined; + message.limit = (_c = object.limit) !== null && _c !== void 0 ? _c : undefined; + return message; + }, +}; +function createBaseBlockchainEvent() { + return { + offset: "0", + blockchainId: new Uint8Array(0), + blockUid: new Uint8Array(0), + numShards: 0, + slot: "0", + parentSlot: undefined, + commitmentLevel: 0, + blockchainShardId: 0, + deadError: undefined, + }; +} +exports.BlockchainEvent = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.offset !== "0") { + writer.uint32(8).int64(message.offset); + } + if (message.blockchainId.length !== 0) { + writer.uint32(18).bytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + writer.uint32(26).bytes(message.blockUid); + } + if (message.numShards !== 0) { + writer.uint32(32).uint32(message.numShards); + } + if (message.slot !== "0") { + writer.uint32(40).uint64(message.slot); + } + if (message.parentSlot !== undefined) { + writer.uint32(48).uint64(message.parentSlot); + } + if (message.commitmentLevel !== 0) { + writer.uint32(56).int32(message.commitmentLevel); + } + if (message.blockchainShardId !== 0) { + writer.uint32(64).int32(message.blockchainShardId); + } + if (message.deadError !== undefined) { + writer.uint32(74).string(message.deadError); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockchainEvent(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.offset = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockchainId = reader.bytes(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.blockUid = reader.bytes(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + message.numShards = reader.uint32(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + message.parentSlot = reader.uint64().toString(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + message.commitmentLevel = reader.int32(); + continue; + } + case 8: { + if (tag !== 64) { + break; + } + message.blockchainShardId = reader.int32(); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + message.deadError = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), + numShards: isSet(object.numShards) ? globalThis.Number(object.numShards) : 0, + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : undefined, + commitmentLevel: isSet(object.commitmentLevel) ? (0, geyser_1.commitmentLevelFromJSON)(object.commitmentLevel) : 0, + blockchainShardId: isSet(object.blockchainShardId) ? globalThis.Number(object.blockchainShardId) : 0, + deadError: isSet(object.deadError) ? globalThis.String(object.deadError) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.blockUid.length !== 0) { + obj.blockUid = base64FromBytes(message.blockUid); + } + if (message.numShards !== 0) { + obj.numShards = Math.round(message.numShards); + } + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.parentSlot !== undefined) { + obj.parentSlot = message.parentSlot; + } + if (message.commitmentLevel !== 0) { + obj.commitmentLevel = (0, geyser_1.commitmentLevelToJSON)(message.commitmentLevel); + } + if (message.blockchainShardId !== 0) { + obj.blockchainShardId = Math.round(message.blockchainShardId); + } + if (message.deadError !== undefined) { + obj.deadError = message.deadError; + } + return obj; + }, + create: function (base) { + return exports.BlockchainEvent.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j; + var message = createBaseBlockchainEvent(); + message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; + message.blockchainId = (_b = object.blockchainId) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.blockUid = (_c = object.blockUid) !== null && _c !== void 0 ? _c : new Uint8Array(0); + message.numShards = (_d = object.numShards) !== null && _d !== void 0 ? _d : 0; + message.slot = (_e = object.slot) !== null && _e !== void 0 ? _e : "0"; + message.parentSlot = (_f = object.parentSlot) !== null && _f !== void 0 ? _f : undefined; + message.commitmentLevel = (_g = object.commitmentLevel) !== null && _g !== void 0 ? _g : 0; + message.blockchainShardId = (_h = object.blockchainShardId) !== null && _h !== void 0 ? _h : 0; + message.deadError = (_j = object.deadError) !== null && _j !== void 0 ? _j : undefined; + return message; + }, +}; +function createBaseBlockchainHistory() { + return { events: [] }; +} +exports.BlockchainHistory = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + for (var _i = 0, _a = message.events; _i < _a.length; _i++) { + var v = _a[_i]; + exports.BlockchainEvent.encode(v, writer.uint32(10).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockchainHistory(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.events.push(exports.BlockchainEvent.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + events: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.events) + ? object.events.map(function (e) { return exports.BlockchainEvent.fromJSON(e); }) + : [], + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if ((_a = message.events) === null || _a === void 0 ? void 0 : _a.length) { + obj.events = message.events.map(function (e) { return exports.BlockchainEvent.toJSON(e); }); + } + return obj; + }, + create: function (base) { + return exports.BlockchainHistory.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseBlockchainHistory(); + message.events = ((_a = object.events) === null || _a === void 0 ? void 0 : _a.map(function (e) { return exports.BlockchainEvent.fromPartial(e); })) || []; + return message; + }, +}; +function createBaseJoinControlPlane() { + return { consumerGroupName: undefined }; +} +exports.JoinControlPlane = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.consumerGroupName !== undefined) { + writer.uint32(10).string(message.consumerGroupName); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseJoinControlPlane(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.consumerGroupName = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.consumerGroupName !== undefined) { + obj.consumerGroupName = message.consumerGroupName; + } + return obj; + }, + create: function (base) { + return exports.JoinControlPlane.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseJoinControlPlane(); + message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : undefined; + return message; + }, +}; +function createBaseControlCommand() { + return { initialJoin: undefined, commitOffset: undefined, pollHist: undefined, ping: undefined }; +} +exports.ControlCommand = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.initialJoin !== undefined) { + exports.JoinControlPlane.encode(message.initialJoin, writer.uint32(10).fork()).join(); + } + if (message.commitOffset !== undefined) { + exports.CommitOffset.encode(message.commitOffset, writer.uint32(18).fork()).join(); + } + if (message.pollHist !== undefined) { + exports.PollBlockchainHistory.encode(message.pollHist, writer.uint32(26).fork()).join(); + } + if (message.ping !== undefined) { + exports.Ping.encode(message.ping, writer.uint32(34).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseControlCommand(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.initialJoin = exports.JoinControlPlane.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.commitOffset = exports.CommitOffset.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.pollHist = exports.PollBlockchainHistory.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.ping = exports.Ping.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + initialJoin: isSet(object.initialJoin) ? exports.JoinControlPlane.fromJSON(object.initialJoin) : undefined, + commitOffset: isSet(object.commitOffset) ? exports.CommitOffset.fromJSON(object.commitOffset) : undefined, + pollHist: isSet(object.pollHist) ? exports.PollBlockchainHistory.fromJSON(object.pollHist) : undefined, + ping: isSet(object.ping) ? exports.Ping.fromJSON(object.ping) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.initialJoin !== undefined) { + obj.initialJoin = exports.JoinControlPlane.toJSON(message.initialJoin); + } + if (message.commitOffset !== undefined) { + obj.commitOffset = exports.CommitOffset.toJSON(message.commitOffset); + } + if (message.pollHist !== undefined) { + obj.pollHist = exports.PollBlockchainHistory.toJSON(message.pollHist); + } + if (message.ping !== undefined) { + obj.ping = exports.Ping.toJSON(message.ping); + } + return obj; + }, + create: function (base) { + return exports.ControlCommand.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var message = createBaseControlCommand(); + message.initialJoin = (object.initialJoin !== undefined && object.initialJoin !== null) + ? exports.JoinControlPlane.fromPartial(object.initialJoin) + : undefined; + message.commitOffset = (object.commitOffset !== undefined && object.commitOffset !== null) + ? exports.CommitOffset.fromPartial(object.commitOffset) + : undefined; + message.pollHist = (object.pollHist !== undefined && object.pollHist !== null) + ? exports.PollBlockchainHistory.fromPartial(object.pollHist) + : undefined; + message.ping = (object.ping !== undefined && object.ping !== null) ? exports.Ping.fromPartial(object.ping) : undefined; + return message; + }, +}; +function createBaseControlResponse() { + return { init: undefined, commitOffset: undefined, pollHist: undefined, pong: undefined }; +} +exports.ControlResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.init !== undefined) { + exports.InitialConsumerGroupState.encode(message.init, writer.uint32(10).fork()).join(); + } + if (message.commitOffset !== undefined) { + exports.CommitOffsetResult.encode(message.commitOffset, writer.uint32(18).fork()).join(); + } + if (message.pollHist !== undefined) { + exports.BlockchainHistory.encode(message.pollHist, writer.uint32(26).fork()).join(); + } + if (message.pong !== undefined) { + exports.Pong.encode(message.pong, writer.uint32(34).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseControlResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.init = exports.InitialConsumerGroupState.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.commitOffset = exports.CommitOffsetResult.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.pollHist = exports.BlockchainHistory.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.pong = exports.Pong.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + init: isSet(object.init) ? exports.InitialConsumerGroupState.fromJSON(object.init) : undefined, + commitOffset: isSet(object.commitOffset) ? exports.CommitOffsetResult.fromJSON(object.commitOffset) : undefined, + pollHist: isSet(object.pollHist) ? exports.BlockchainHistory.fromJSON(object.pollHist) : undefined, + pong: isSet(object.pong) ? exports.Pong.fromJSON(object.pong) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.init !== undefined) { + obj.init = exports.InitialConsumerGroupState.toJSON(message.init); + } + if (message.commitOffset !== undefined) { + obj.commitOffset = exports.CommitOffsetResult.toJSON(message.commitOffset); + } + if (message.pollHist !== undefined) { + obj.pollHist = exports.BlockchainHistory.toJSON(message.pollHist); + } + if (message.pong !== undefined) { + obj.pong = exports.Pong.toJSON(message.pong); + } + return obj; + }, + create: function (base) { + return exports.ControlResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var message = createBaseControlResponse(); + message.init = (object.init !== undefined && object.init !== null) + ? exports.InitialConsumerGroupState.fromPartial(object.init) + : undefined; + message.commitOffset = (object.commitOffset !== undefined && object.commitOffset !== null) + ? exports.CommitOffsetResult.fromPartial(object.commitOffset) + : undefined; + message.pollHist = (object.pollHist !== undefined && object.pollHist !== null) + ? exports.BlockchainHistory.fromPartial(object.pollHist) + : undefined; + message.pong = (object.pong !== undefined && object.pong !== null) ? exports.Pong.fromPartial(object.pong) : undefined; + return message; + }, +}; +function createBaseCommitOffsetResult() { + return { offset: "0", shardId: 0 }; +} +exports.CommitOffsetResult = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.offset !== "0") { + writer.uint32(8).int64(message.offset); + } + if (message.shardId !== 0) { + writer.uint32(16).int32(message.shardId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseCommitOffsetResult(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.offset = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.shardId = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.shardId !== 0) { + obj.shardId = Math.round(message.shardId); + } + return obj; + }, + create: function (base) { + return exports.CommitOffsetResult.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseCommitOffsetResult(); + message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; + message.shardId = (_b = object.shardId) !== null && _b !== void 0 ? _b : 0; + return message; + }, +}; +function createBaseInitialConsumerGroupState() { + return { blockchainId: new Uint8Array(0), lastCommittedOffsets: {} }; +} +exports.InitialConsumerGroupState = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + Object.entries(message.lastCommittedOffsets).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.InitialConsumerGroupState_LastCommittedOffsetsEntry.encode({ key: key, value: value }, writer.uint32(18).fork()) + .join(); + }); + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseInitialConsumerGroupState(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + var entry2 = exports.InitialConsumerGroupState_LastCommittedOffsetsEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.lastCommittedOffsets[entry2.key] = entry2.value; + } + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + lastCommittedOffsets: isObject(object.lastCommittedOffsets) + ? Object.entries(object.lastCommittedOffsets).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[globalThis.Number(key)] = String(value); + return acc; + }, {}) + : {}, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.lastCommittedOffsets) { + var entries = Object.entries(message.lastCommittedOffsets); + if (entries.length > 0) { + obj.lastCommittedOffsets = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.lastCommittedOffsets[k] = v; + }); + } + } + return obj; + }, + create: function (base) { + return exports.InitialConsumerGroupState.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseInitialConsumerGroupState(); + message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.lastCommittedOffsets = Object.entries((_b = object.lastCommittedOffsets) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[globalThis.Number(key)] = globalThis.String(value); + } + return acc; + }, {}); + return message; + }, +}; +function createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry() { + return { key: 0, value: "0" }; +} +exports.InitialConsumerGroupState_LastCommittedOffsetsEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== 0) { + writer.uint32(8).int32(message.key); + } + if (message.value !== "0") { + writer.uint32(16).int64(message.value); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.key = reader.int32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.value = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.Number(object.key) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== 0) { + obj.key = Math.round(message.key); + } + if (message.value !== "0") { + obj.value = message.value; + } + return obj; + }, + create: function (base) { + return exports.InitialConsumerGroupState_LastCommittedOffsetsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : 0; + message.value = (_b = object.value) !== null && _b !== void 0 ? _b : "0"; + return message; + }, +}; +function createBaseCreateConsumerGroupResponse() { + return { consumerGroupId: "" }; +} +exports.CreateConsumerGroupResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.consumerGroupId !== "") { + writer.uint32(10).string(message.consumerGroupId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseCreateConsumerGroupResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.consumerGroupId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { consumerGroupId: isSet(object.consumerGroupId) ? globalThis.String(object.consumerGroupId) : "" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.consumerGroupId !== "") { + obj.consumerGroupId = message.consumerGroupId; + } + return obj; + }, + create: function (base) { + return exports.CreateConsumerGroupResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseCreateConsumerGroupResponse(); + message.consumerGroupId = (_a = object.consumerGroupId) !== null && _a !== void 0 ? _a : ""; + return message; + }, +}; +function createBaseCreateConsumerGroupRequest() { + return { consumerGroupName: "", initialOffsetPolicy: 0 }; +} +exports.CreateConsumerGroupRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.consumerGroupName !== "") { + writer.uint32(10).string(message.consumerGroupName); + } + if (message.initialOffsetPolicy !== 0) { + writer.uint32(16).int32(message.initialOffsetPolicy); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseCreateConsumerGroupRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.consumerGroupName = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.initialOffsetPolicy = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "", + initialOffsetPolicy: isSet(object.initialOffsetPolicy) + ? initialOffsetPolicyFromJSON(object.initialOffsetPolicy) + : 0, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.consumerGroupName !== "") { + obj.consumerGroupName = message.consumerGroupName; + } + if (message.initialOffsetPolicy !== 0) { + obj.initialOffsetPolicy = initialOffsetPolicyToJSON(message.initialOffsetPolicy); + } + return obj; + }, + create: function (base) { + return exports.CreateConsumerGroupRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseCreateConsumerGroupRequest(); + message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; + message.initialOffsetPolicy = (_b = object.initialOffsetPolicy) !== null && _b !== void 0 ? _b : 0; + return message; + }, +}; +exports.FumaroleService = { + getConsumerGroupInfo: { + path: "/fumarole.Fumarole/GetConsumerGroupInfo", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.GetConsumerGroupInfoRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.GetConsumerGroupInfoRequest.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.ConsumerGroupInfo.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.ConsumerGroupInfo.decode(value); }, + }, + listConsumerGroups: { + path: "/fumarole.Fumarole/ListConsumerGroups", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.ListConsumerGroupsRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.ListConsumerGroupsRequest.decode(value); }, + responseSerialize: function (value) { + return Buffer.from(exports.ListConsumerGroupsResponse.encode(value).finish()); + }, + responseDeserialize: function (value) { return exports.ListConsumerGroupsResponse.decode(value); }, + }, + deleteConsumerGroup: { + path: "/fumarole.Fumarole/DeleteConsumerGroup", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.DeleteConsumerGroupRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.DeleteConsumerGroupRequest.decode(value); }, + responseSerialize: function (value) { + return Buffer.from(exports.DeleteConsumerGroupResponse.encode(value).finish()); + }, + responseDeserialize: function (value) { return exports.DeleteConsumerGroupResponse.decode(value); }, + }, + createConsumerGroup: { + path: "/fumarole.Fumarole/CreateConsumerGroup", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.CreateConsumerGroupRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.CreateConsumerGroupRequest.decode(value); }, + responseSerialize: function (value) { + return Buffer.from(exports.CreateConsumerGroupResponse.encode(value).finish()); + }, + responseDeserialize: function (value) { return exports.CreateConsumerGroupResponse.decode(value); }, + }, + downloadBlock: { + path: "/fumarole.Fumarole/DownloadBlock", + requestStream: false, + responseStream: true, + requestSerialize: function (value) { return Buffer.from(exports.DownloadBlockShard.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.DownloadBlockShard.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.DataResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.DataResponse.decode(value); }, + }, + /** Represents subscription to the data plane */ + subscribeData: { + path: "/fumarole.Fumarole/SubscribeData", + requestStream: true, + responseStream: true, + requestSerialize: function (value) { return Buffer.from(exports.DataCommand.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.DataCommand.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.DataResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.DataResponse.decode(value); }, + }, + getChainTip: { + path: "/fumarole.Fumarole/GetChainTip", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { return Buffer.from(exports.GetChainTipRequest.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.GetChainTipRequest.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.GetChainTipResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.GetChainTipResponse.decode(value); }, + }, + /** Represents subscription to the control plane */ + subscribe: { + path: "/fumarole.Fumarole/Subscribe", + requestStream: true, + responseStream: true, + requestSerialize: function (value) { return Buffer.from(exports.ControlCommand.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.ControlCommand.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.ControlResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.ControlResponse.decode(value); }, + }, + version: { + path: "/fumarole.Fumarole/Version", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { return Buffer.from(exports.VersionRequest.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.VersionRequest.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.VersionResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.VersionResponse.decode(value); }, + }, +}; +exports.FumaroleClient = (0, grpc_js_1.makeGenericClientConstructor)(exports.FumaroleService, "fumarole.Fumarole"); +function bytesFromBase64(b64) { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } + else { + var bin = globalThis.atob(b64); + var arr = new Uint8Array(bin.length); + for (var i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } + else { + var bin_1 = []; + arr.forEach(function (byte) { + bin_1.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin_1.join("")); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/typescript-sdk/src/grpc/geyser.js b/typescript-sdk/src/grpc/geyser.js new file mode 100644 index 0000000..4563b91 --- /dev/null +++ b/typescript-sdk/src/grpc/geyser.js @@ -0,0 +1,4457 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 +// source: geyser.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GeyserClient = exports.GeyserService = exports.IsBlockhashValidResponse = exports.IsBlockhashValidRequest = exports.GetVersionResponse = exports.GetVersionRequest = exports.GetSlotResponse = exports.GetSlotRequest = exports.GetBlockHeightResponse = exports.GetBlockHeightRequest = exports.GetLatestBlockhashResponse = exports.GetLatestBlockhashRequest = exports.PongResponse = exports.PingRequest = exports.SubscribeReplayInfoResponse = exports.SubscribeReplayInfoRequest = exports.SubscribeUpdatePong = exports.SubscribeUpdatePing = exports.SubscribeUpdateEntry = exports.SubscribeUpdateBlockMeta = exports.SubscribeUpdateBlock = exports.SubscribeUpdateTransactionStatus = exports.SubscribeUpdateTransactionInfo = exports.SubscribeUpdateTransaction = exports.SubscribeUpdateSlot = exports.SubscribeUpdateAccountInfo = exports.SubscribeUpdateAccount = exports.SubscribeUpdate = exports.SubscribeRequestPing = exports.SubscribeRequestAccountsDataSlice = exports.SubscribeRequestFilterEntry = exports.SubscribeRequestFilterBlocksMeta = exports.SubscribeRequestFilterBlocks = exports.SubscribeRequestFilterTransactions = exports.SubscribeRequestFilterSlots = exports.SubscribeRequestFilterAccountsFilterLamports = exports.SubscribeRequestFilterAccountsFilterMemcmp = exports.SubscribeRequestFilterAccountsFilter = exports.SubscribeRequestFilterAccounts = exports.SubscribeRequest_EntryEntry = exports.SubscribeRequest_BlocksMetaEntry = exports.SubscribeRequest_BlocksEntry = exports.SubscribeRequest_TransactionsStatusEntry = exports.SubscribeRequest_TransactionsEntry = exports.SubscribeRequest_SlotsEntry = exports.SubscribeRequest_AccountsEntry = exports.SubscribeRequest = exports.SlotStatus = exports.CommitmentLevel = exports.protobufPackage = void 0; +exports.commitmentLevelFromJSON = commitmentLevelFromJSON; +exports.commitmentLevelToJSON = commitmentLevelToJSON; +exports.slotStatusFromJSON = slotStatusFromJSON; +exports.slotStatusToJSON = slotStatusToJSON; +/* eslint-disable */ +var wire_1 = require("@bufbuild/protobuf/wire"); +var grpc_js_1 = require("@grpc/grpc-js"); +var timestamp_1 = require("./google/protobuf/timestamp"); +var solana_storage_1 = require("./solana-storage"); +exports.protobufPackage = "geyser"; +var CommitmentLevel; +(function (CommitmentLevel) { + CommitmentLevel[CommitmentLevel["PROCESSED"] = 0] = "PROCESSED"; + CommitmentLevel[CommitmentLevel["CONFIRMED"] = 1] = "CONFIRMED"; + CommitmentLevel[CommitmentLevel["FINALIZED"] = 2] = "FINALIZED"; + CommitmentLevel[CommitmentLevel["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; +})(CommitmentLevel || (exports.CommitmentLevel = CommitmentLevel = {})); +function commitmentLevelFromJSON(object) { + switch (object) { + case 0: + case "PROCESSED": + return CommitmentLevel.PROCESSED; + case 1: + case "CONFIRMED": + return CommitmentLevel.CONFIRMED; + case 2: + case "FINALIZED": + return CommitmentLevel.FINALIZED; + case -1: + case "UNRECOGNIZED": + default: + return CommitmentLevel.UNRECOGNIZED; + } +} +function commitmentLevelToJSON(object) { + switch (object) { + case CommitmentLevel.PROCESSED: + return "PROCESSED"; + case CommitmentLevel.CONFIRMED: + return "CONFIRMED"; + case CommitmentLevel.FINALIZED: + return "FINALIZED"; + case CommitmentLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +var SlotStatus; +(function (SlotStatus) { + SlotStatus[SlotStatus["SLOT_PROCESSED"] = 0] = "SLOT_PROCESSED"; + SlotStatus[SlotStatus["SLOT_CONFIRMED"] = 1] = "SLOT_CONFIRMED"; + SlotStatus[SlotStatus["SLOT_FINALIZED"] = 2] = "SLOT_FINALIZED"; + SlotStatus[SlotStatus["SLOT_FIRST_SHRED_RECEIVED"] = 3] = "SLOT_FIRST_SHRED_RECEIVED"; + SlotStatus[SlotStatus["SLOT_COMPLETED"] = 4] = "SLOT_COMPLETED"; + SlotStatus[SlotStatus["SLOT_CREATED_BANK"] = 5] = "SLOT_CREATED_BANK"; + SlotStatus[SlotStatus["SLOT_DEAD"] = 6] = "SLOT_DEAD"; + SlotStatus[SlotStatus["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; +})(SlotStatus || (exports.SlotStatus = SlotStatus = {})); +function slotStatusFromJSON(object) { + switch (object) { + case 0: + case "SLOT_PROCESSED": + return SlotStatus.SLOT_PROCESSED; + case 1: + case "SLOT_CONFIRMED": + return SlotStatus.SLOT_CONFIRMED; + case 2: + case "SLOT_FINALIZED": + return SlotStatus.SLOT_FINALIZED; + case 3: + case "SLOT_FIRST_SHRED_RECEIVED": + return SlotStatus.SLOT_FIRST_SHRED_RECEIVED; + case 4: + case "SLOT_COMPLETED": + return SlotStatus.SLOT_COMPLETED; + case 5: + case "SLOT_CREATED_BANK": + return SlotStatus.SLOT_CREATED_BANK; + case 6: + case "SLOT_DEAD": + return SlotStatus.SLOT_DEAD; + case -1: + case "UNRECOGNIZED": + default: + return SlotStatus.UNRECOGNIZED; + } +} +function slotStatusToJSON(object) { + switch (object) { + case SlotStatus.SLOT_PROCESSED: + return "SLOT_PROCESSED"; + case SlotStatus.SLOT_CONFIRMED: + return "SLOT_CONFIRMED"; + case SlotStatus.SLOT_FINALIZED: + return "SLOT_FINALIZED"; + case SlotStatus.SLOT_FIRST_SHRED_RECEIVED: + return "SLOT_FIRST_SHRED_RECEIVED"; + case SlotStatus.SLOT_COMPLETED: + return "SLOT_COMPLETED"; + case SlotStatus.SLOT_CREATED_BANK: + return "SLOT_CREATED_BANK"; + case SlotStatus.SLOT_DEAD: + return "SLOT_DEAD"; + case SlotStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +function createBaseSubscribeRequest() { + return { + accounts: {}, + slots: {}, + transactions: {}, + transactionsStatus: {}, + blocks: {}, + blocksMeta: {}, + entry: {}, + commitment: undefined, + accountsDataSlice: [], + ping: undefined, + fromSlot: undefined, + }; +} +exports.SubscribeRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + Object.entries(message.accounts).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.SubscribeRequest_AccountsEntry.encode({ key: key, value: value }, writer.uint32(10).fork()).join(); + }); + Object.entries(message.slots).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.SubscribeRequest_SlotsEntry.encode({ key: key, value: value }, writer.uint32(18).fork()).join(); + }); + Object.entries(message.transactions).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.SubscribeRequest_TransactionsEntry.encode({ key: key, value: value }, writer.uint32(26).fork()).join(); + }); + Object.entries(message.transactionsStatus).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.SubscribeRequest_TransactionsStatusEntry.encode({ key: key, value: value }, writer.uint32(82).fork()).join(); + }); + Object.entries(message.blocks).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.SubscribeRequest_BlocksEntry.encode({ key: key, value: value }, writer.uint32(34).fork()).join(); + }); + Object.entries(message.blocksMeta).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.SubscribeRequest_BlocksMetaEntry.encode({ key: key, value: value }, writer.uint32(42).fork()).join(); + }); + Object.entries(message.entry).forEach(function (_a) { + var key = _a[0], value = _a[1]; + exports.SubscribeRequest_EntryEntry.encode({ key: key, value: value }, writer.uint32(66).fork()).join(); + }); + if (message.commitment !== undefined) { + writer.uint32(48).int32(message.commitment); + } + for (var _i = 0, _a = message.accountsDataSlice; _i < _a.length; _i++) { + var v = _a[_i]; + exports.SubscribeRequestAccountsDataSlice.encode(v, writer.uint32(58).fork()).join(); + } + if (message.ping !== undefined) { + exports.SubscribeRequestPing.encode(message.ping, writer.uint32(74).fork()).join(); + } + if (message.fromSlot !== undefined) { + writer.uint32(88).uint64(message.fromSlot); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + var entry1 = exports.SubscribeRequest_AccountsEntry.decode(reader, reader.uint32()); + if (entry1.value !== undefined) { + message.accounts[entry1.key] = entry1.value; + } + continue; + } + case 2: { + if (tag !== 18) { + break; + } + var entry2 = exports.SubscribeRequest_SlotsEntry.decode(reader, reader.uint32()); + if (entry2.value !== undefined) { + message.slots[entry2.key] = entry2.value; + } + continue; + } + case 3: { + if (tag !== 26) { + break; + } + var entry3 = exports.SubscribeRequest_TransactionsEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.transactions[entry3.key] = entry3.value; + } + continue; + } + case 10: { + if (tag !== 82) { + break; + } + var entry10 = exports.SubscribeRequest_TransactionsStatusEntry.decode(reader, reader.uint32()); + if (entry10.value !== undefined) { + message.transactionsStatus[entry10.key] = entry10.value; + } + continue; + } + case 4: { + if (tag !== 34) { + break; + } + var entry4 = exports.SubscribeRequest_BlocksEntry.decode(reader, reader.uint32()); + if (entry4.value !== undefined) { + message.blocks[entry4.key] = entry4.value; + } + continue; + } + case 5: { + if (tag !== 42) { + break; + } + var entry5 = exports.SubscribeRequest_BlocksMetaEntry.decode(reader, reader.uint32()); + if (entry5.value !== undefined) { + message.blocksMeta[entry5.key] = entry5.value; + } + continue; + } + case 8: { + if (tag !== 66) { + break; + } + var entry8 = exports.SubscribeRequest_EntryEntry.decode(reader, reader.uint32()); + if (entry8.value !== undefined) { + message.entry[entry8.key] = entry8.value; + } + continue; + } + case 6: { + if (tag !== 48) { + break; + } + message.commitment = reader.int32(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + message.accountsDataSlice.push(exports.SubscribeRequestAccountsDataSlice.decode(reader, reader.uint32())); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + message.ping = exports.SubscribeRequestPing.decode(reader, reader.uint32()); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + message.fromSlot = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + accounts: isObject(object.accounts) + ? Object.entries(object.accounts).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = exports.SubscribeRequestFilterAccounts.fromJSON(value); + return acc; + }, {}) + : {}, + slots: isObject(object.slots) + ? Object.entries(object.slots).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = exports.SubscribeRequestFilterSlots.fromJSON(value); + return acc; + }, {}) + : {}, + transactions: isObject(object.transactions) + ? Object.entries(object.transactions).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = exports.SubscribeRequestFilterTransactions.fromJSON(value); + return acc; + }, {}) + : {}, + transactionsStatus: isObject(object.transactionsStatus) + ? Object.entries(object.transactionsStatus).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = exports.SubscribeRequestFilterTransactions.fromJSON(value); + return acc; + }, {}) + : {}, + blocks: isObject(object.blocks) + ? Object.entries(object.blocks).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = exports.SubscribeRequestFilterBlocks.fromJSON(value); + return acc; + }, {}) + : {}, + blocksMeta: isObject(object.blocksMeta) + ? Object.entries(object.blocksMeta).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = exports.SubscribeRequestFilterBlocksMeta.fromJSON(value); + return acc; + }, {}) + : {}, + entry: isObject(object.entry) + ? Object.entries(object.entry).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + acc[key] = exports.SubscribeRequestFilterEntry.fromJSON(value); + return acc; + }, {}) + : {}, + commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined, + accountsDataSlice: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountsDataSlice) + ? object.accountsDataSlice.map(function (e) { return exports.SubscribeRequestAccountsDataSlice.fromJSON(e); }) + : [], + ping: isSet(object.ping) ? exports.SubscribeRequestPing.fromJSON(object.ping) : undefined, + fromSlot: isSet(object.fromSlot) ? globalThis.String(object.fromSlot) : undefined, + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if (message.accounts) { + var entries = Object.entries(message.accounts); + if (entries.length > 0) { + obj.accounts = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.accounts[k] = exports.SubscribeRequestFilterAccounts.toJSON(v); + }); + } + } + if (message.slots) { + var entries = Object.entries(message.slots); + if (entries.length > 0) { + obj.slots = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.slots[k] = exports.SubscribeRequestFilterSlots.toJSON(v); + }); + } + } + if (message.transactions) { + var entries = Object.entries(message.transactions); + if (entries.length > 0) { + obj.transactions = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.transactions[k] = exports.SubscribeRequestFilterTransactions.toJSON(v); + }); + } + } + if (message.transactionsStatus) { + var entries = Object.entries(message.transactionsStatus); + if (entries.length > 0) { + obj.transactionsStatus = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.transactionsStatus[k] = exports.SubscribeRequestFilterTransactions.toJSON(v); + }); + } + } + if (message.blocks) { + var entries = Object.entries(message.blocks); + if (entries.length > 0) { + obj.blocks = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.blocks[k] = exports.SubscribeRequestFilterBlocks.toJSON(v); + }); + } + } + if (message.blocksMeta) { + var entries = Object.entries(message.blocksMeta); + if (entries.length > 0) { + obj.blocksMeta = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.blocksMeta[k] = exports.SubscribeRequestFilterBlocksMeta.toJSON(v); + }); + } + } + if (message.entry) { + var entries = Object.entries(message.entry); + if (entries.length > 0) { + obj.entry = {}; + entries.forEach(function (_a) { + var k = _a[0], v = _a[1]; + obj.entry[k] = exports.SubscribeRequestFilterEntry.toJSON(v); + }); + } + } + if (message.commitment !== undefined) { + obj.commitment = commitmentLevelToJSON(message.commitment); + } + if ((_a = message.accountsDataSlice) === null || _a === void 0 ? void 0 : _a.length) { + obj.accountsDataSlice = message.accountsDataSlice.map(function (e) { return exports.SubscribeRequestAccountsDataSlice.toJSON(e); }); + } + if (message.ping !== undefined) { + obj.ping = exports.SubscribeRequestPing.toJSON(message.ping); + } + if (message.fromSlot !== undefined) { + obj.fromSlot = message.fromSlot; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k; + var message = createBaseSubscribeRequest(); + message.accounts = Object.entries((_a = object.accounts) !== null && _a !== void 0 ? _a : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = exports.SubscribeRequestFilterAccounts.fromPartial(value); + } + return acc; + }, {}); + message.slots = Object.entries((_b = object.slots) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = exports.SubscribeRequestFilterSlots.fromPartial(value); + } + return acc; + }, {}); + message.transactions = Object.entries((_c = object.transactions) !== null && _c !== void 0 ? _c : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = exports.SubscribeRequestFilterTransactions.fromPartial(value); + } + return acc; + }, {}); + message.transactionsStatus = Object.entries((_d = object.transactionsStatus) !== null && _d !== void 0 ? _d : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = exports.SubscribeRequestFilterTransactions.fromPartial(value); + } + return acc; + }, {}); + message.blocks = Object.entries((_e = object.blocks) !== null && _e !== void 0 ? _e : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = exports.SubscribeRequestFilterBlocks.fromPartial(value); + } + return acc; + }, {}); + message.blocksMeta = Object.entries((_f = object.blocksMeta) !== null && _f !== void 0 ? _f : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = exports.SubscribeRequestFilterBlocksMeta.fromPartial(value); + } + return acc; + }, {}); + message.entry = Object.entries((_g = object.entry) !== null && _g !== void 0 ? _g : {}).reduce(function (acc, _a) { + var key = _a[0], value = _a[1]; + if (value !== undefined) { + acc[key] = exports.SubscribeRequestFilterEntry.fromPartial(value); + } + return acc; + }, {}); + message.commitment = (_h = object.commitment) !== null && _h !== void 0 ? _h : undefined; + message.accountsDataSlice = + ((_j = object.accountsDataSlice) === null || _j === void 0 ? void 0 : _j.map(function (e) { return exports.SubscribeRequestAccountsDataSlice.fromPartial(e); })) || []; + message.ping = (object.ping !== undefined && object.ping !== null) + ? exports.SubscribeRequestPing.fromPartial(object.ping) + : undefined; + message.fromSlot = (_k = object.fromSlot) !== null && _k !== void 0 ? _k : undefined; + return message; + }, +}; +function createBaseSubscribeRequest_AccountsEntry() { + return { key: "", value: undefined }; +} +exports.SubscribeRequest_AccountsEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + exports.SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest_AccountsEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = exports.SubscribeRequestFilterAccounts.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.SubscribeRequestFilterAccounts.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.SubscribeRequestFilterAccounts.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest_AccountsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequest_AccountsEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? exports.SubscribeRequestFilterAccounts.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequest_SlotsEntry() { + return { key: "", value: undefined }; +} +exports.SubscribeRequest_SlotsEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + exports.SubscribeRequestFilterSlots.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest_SlotsEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = exports.SubscribeRequestFilterSlots.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.SubscribeRequestFilterSlots.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.SubscribeRequestFilterSlots.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest_SlotsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequest_SlotsEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? exports.SubscribeRequestFilterSlots.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequest_TransactionsEntry() { + return { key: "", value: undefined }; +} +exports.SubscribeRequest_TransactionsEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + exports.SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest_TransactionsEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = exports.SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.SubscribeRequestFilterTransactions.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.SubscribeRequestFilterTransactions.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest_TransactionsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequest_TransactionsEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? exports.SubscribeRequestFilterTransactions.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequest_TransactionsStatusEntry() { + return { key: "", value: undefined }; +} +exports.SubscribeRequest_TransactionsStatusEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + exports.SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest_TransactionsStatusEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = exports.SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.SubscribeRequestFilterTransactions.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.SubscribeRequestFilterTransactions.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest_TransactionsStatusEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequest_TransactionsStatusEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? exports.SubscribeRequestFilterTransactions.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequest_BlocksEntry() { + return { key: "", value: undefined }; +} +exports.SubscribeRequest_BlocksEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + exports.SubscribeRequestFilterBlocks.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest_BlocksEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = exports.SubscribeRequestFilterBlocks.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.SubscribeRequestFilterBlocks.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.SubscribeRequestFilterBlocks.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest_BlocksEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequest_BlocksEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? exports.SubscribeRequestFilterBlocks.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequest_BlocksMetaEntry() { + return { key: "", value: undefined }; +} +exports.SubscribeRequest_BlocksMetaEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + exports.SubscribeRequestFilterBlocksMeta.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest_BlocksMetaEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = exports.SubscribeRequestFilterBlocksMeta.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.SubscribeRequestFilterBlocksMeta.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.SubscribeRequestFilterBlocksMeta.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest_BlocksMetaEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequest_BlocksMetaEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? exports.SubscribeRequestFilterBlocksMeta.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequest_EntryEntry() { + return { key: "", value: undefined }; +} +exports.SubscribeRequest_EntryEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + exports.SubscribeRequestFilterEntry.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequest_EntryEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.value = exports.SubscribeRequestFilterEntry.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.SubscribeRequestFilterEntry.fromJSON(object.value) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.SubscribeRequestFilterEntry.toJSON(message.value); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequest_EntryEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequest_EntryEntry(); + message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; + message.value = (object.value !== undefined && object.value !== null) + ? exports.SubscribeRequestFilterEntry.fromPartial(object.value) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequestFilterAccounts() { + return { account: [], owner: [], filters: [], nonemptyTxnSignature: undefined }; +} +exports.SubscribeRequestFilterAccounts = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + for (var _i = 0, _a = message.account; _i < _a.length; _i++) { + var v = _a[_i]; + writer.uint32(18).string(v); + } + for (var _b = 0, _c = message.owner; _b < _c.length; _b++) { + var v = _c[_b]; + writer.uint32(26).string(v); + } + for (var _d = 0, _e = message.filters; _d < _e.length; _d++) { + var v = _e[_d]; + exports.SubscribeRequestFilterAccountsFilter.encode(v, writer.uint32(34).fork()).join(); + } + if (message.nonemptyTxnSignature !== undefined) { + writer.uint32(40).bool(message.nonemptyTxnSignature); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterAccounts(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 2: { + if (tag !== 18) { + break; + } + message.account.push(reader.string()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.owner.push(reader.string()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.filters.push(exports.SubscribeRequestFilterAccountsFilter.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + message.nonemptyTxnSignature = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + account: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.account) ? object.account.map(function (e) { return globalThis.String(e); }) : [], + owner: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.owner) ? object.owner.map(function (e) { return globalThis.String(e); }) : [], + filters: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.filters) + ? object.filters.map(function (e) { return exports.SubscribeRequestFilterAccountsFilter.fromJSON(e); }) + : [], + nonemptyTxnSignature: isSet(object.nonemptyTxnSignature) + ? globalThis.Boolean(object.nonemptyTxnSignature) + : undefined, + }; + }, + toJSON: function (message) { + var _a, _b, _c; + var obj = {}; + if ((_a = message.account) === null || _a === void 0 ? void 0 : _a.length) { + obj.account = message.account; + } + if ((_b = message.owner) === null || _b === void 0 ? void 0 : _b.length) { + obj.owner = message.owner; + } + if ((_c = message.filters) === null || _c === void 0 ? void 0 : _c.length) { + obj.filters = message.filters.map(function (e) { return exports.SubscribeRequestFilterAccountsFilter.toJSON(e); }); + } + if (message.nonemptyTxnSignature !== undefined) { + obj.nonemptyTxnSignature = message.nonemptyTxnSignature; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterAccounts.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseSubscribeRequestFilterAccounts(); + message.account = ((_a = object.account) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; + message.owner = ((_b = object.owner) === null || _b === void 0 ? void 0 : _b.map(function (e) { return e; })) || []; + message.filters = ((_c = object.filters) === null || _c === void 0 ? void 0 : _c.map(function (e) { return exports.SubscribeRequestFilterAccountsFilter.fromPartial(e); })) || []; + message.nonemptyTxnSignature = (_d = object.nonemptyTxnSignature) !== null && _d !== void 0 ? _d : undefined; + return message; + }, +}; +function createBaseSubscribeRequestFilterAccountsFilter() { + return { memcmp: undefined, datasize: undefined, tokenAccountState: undefined, lamports: undefined }; +} +exports.SubscribeRequestFilterAccountsFilter = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.memcmp !== undefined) { + exports.SubscribeRequestFilterAccountsFilterMemcmp.encode(message.memcmp, writer.uint32(10).fork()).join(); + } + if (message.datasize !== undefined) { + writer.uint32(16).uint64(message.datasize); + } + if (message.tokenAccountState !== undefined) { + writer.uint32(24).bool(message.tokenAccountState); + } + if (message.lamports !== undefined) { + exports.SubscribeRequestFilterAccountsFilterLamports.encode(message.lamports, writer.uint32(34).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterAccountsFilter(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.memcmp = exports.SubscribeRequestFilterAccountsFilterMemcmp.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.datasize = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.tokenAccountState = reader.bool(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.lamports = exports.SubscribeRequestFilterAccountsFilterLamports.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + memcmp: isSet(object.memcmp) ? exports.SubscribeRequestFilterAccountsFilterMemcmp.fromJSON(object.memcmp) : undefined, + datasize: isSet(object.datasize) ? globalThis.String(object.datasize) : undefined, + tokenAccountState: isSet(object.tokenAccountState) ? globalThis.Boolean(object.tokenAccountState) : undefined, + lamports: isSet(object.lamports) + ? exports.SubscribeRequestFilterAccountsFilterLamports.fromJSON(object.lamports) + : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.memcmp !== undefined) { + obj.memcmp = exports.SubscribeRequestFilterAccountsFilterMemcmp.toJSON(message.memcmp); + } + if (message.datasize !== undefined) { + obj.datasize = message.datasize; + } + if (message.tokenAccountState !== undefined) { + obj.tokenAccountState = message.tokenAccountState; + } + if (message.lamports !== undefined) { + obj.lamports = exports.SubscribeRequestFilterAccountsFilterLamports.toJSON(message.lamports); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterAccountsFilter.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseSubscribeRequestFilterAccountsFilter(); + message.memcmp = (object.memcmp !== undefined && object.memcmp !== null) + ? exports.SubscribeRequestFilterAccountsFilterMemcmp.fromPartial(object.memcmp) + : undefined; + message.datasize = (_a = object.datasize) !== null && _a !== void 0 ? _a : undefined; + message.tokenAccountState = (_b = object.tokenAccountState) !== null && _b !== void 0 ? _b : undefined; + message.lamports = (object.lamports !== undefined && object.lamports !== null) + ? exports.SubscribeRequestFilterAccountsFilterLamports.fromPartial(object.lamports) + : undefined; + return message; + }, +}; +function createBaseSubscribeRequestFilterAccountsFilterMemcmp() { + return { offset: "0", bytes: undefined, base58: undefined, base64: undefined }; +} +exports.SubscribeRequestFilterAccountsFilterMemcmp = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.offset !== "0") { + writer.uint32(8).uint64(message.offset); + } + if (message.bytes !== undefined) { + writer.uint32(18).bytes(message.bytes); + } + if (message.base58 !== undefined) { + writer.uint32(26).string(message.base58); + } + if (message.base64 !== undefined) { + writer.uint32(34).string(message.base64); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterAccountsFilterMemcmp(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.offset = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.bytes = reader.bytes(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.base58 = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.base64 = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + bytes: isSet(object.bytes) ? bytesFromBase64(object.bytes) : undefined, + base58: isSet(object.base58) ? globalThis.String(object.base58) : undefined, + base64: isSet(object.base64) ? globalThis.String(object.base64) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.bytes !== undefined) { + obj.bytes = base64FromBytes(message.bytes); + } + if (message.base58 !== undefined) { + obj.base58 = message.base58; + } + if (message.base64 !== undefined) { + obj.base64 = message.base64; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterAccountsFilterMemcmp.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseSubscribeRequestFilterAccountsFilterMemcmp(); + message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; + message.bytes = (_b = object.bytes) !== null && _b !== void 0 ? _b : undefined; + message.base58 = (_c = object.base58) !== null && _c !== void 0 ? _c : undefined; + message.base64 = (_d = object.base64) !== null && _d !== void 0 ? _d : undefined; + return message; + }, +}; +function createBaseSubscribeRequestFilterAccountsFilterLamports() { + return { eq: undefined, ne: undefined, lt: undefined, gt: undefined }; +} +exports.SubscribeRequestFilterAccountsFilterLamports = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.eq !== undefined) { + writer.uint32(8).uint64(message.eq); + } + if (message.ne !== undefined) { + writer.uint32(16).uint64(message.ne); + } + if (message.lt !== undefined) { + writer.uint32(24).uint64(message.lt); + } + if (message.gt !== undefined) { + writer.uint32(32).uint64(message.gt); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterAccountsFilterLamports(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.eq = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.ne = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.lt = reader.uint64().toString(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + message.gt = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + eq: isSet(object.eq) ? globalThis.String(object.eq) : undefined, + ne: isSet(object.ne) ? globalThis.String(object.ne) : undefined, + lt: isSet(object.lt) ? globalThis.String(object.lt) : undefined, + gt: isSet(object.gt) ? globalThis.String(object.gt) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.eq !== undefined) { + obj.eq = message.eq; + } + if (message.ne !== undefined) { + obj.ne = message.ne; + } + if (message.lt !== undefined) { + obj.lt = message.lt; + } + if (message.gt !== undefined) { + obj.gt = message.gt; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterAccountsFilterLamports.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseSubscribeRequestFilterAccountsFilterLamports(); + message.eq = (_a = object.eq) !== null && _a !== void 0 ? _a : undefined; + message.ne = (_b = object.ne) !== null && _b !== void 0 ? _b : undefined; + message.lt = (_c = object.lt) !== null && _c !== void 0 ? _c : undefined; + message.gt = (_d = object.gt) !== null && _d !== void 0 ? _d : undefined; + return message; + }, +}; +function createBaseSubscribeRequestFilterSlots() { + return { filterByCommitment: undefined, interslotUpdates: undefined }; +} +exports.SubscribeRequestFilterSlots = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.filterByCommitment !== undefined) { + writer.uint32(8).bool(message.filterByCommitment); + } + if (message.interslotUpdates !== undefined) { + writer.uint32(16).bool(message.interslotUpdates); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterSlots(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.filterByCommitment = reader.bool(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.interslotUpdates = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + filterByCommitment: isSet(object.filterByCommitment) ? globalThis.Boolean(object.filterByCommitment) : undefined, + interslotUpdates: isSet(object.interslotUpdates) ? globalThis.Boolean(object.interslotUpdates) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.filterByCommitment !== undefined) { + obj.filterByCommitment = message.filterByCommitment; + } + if (message.interslotUpdates !== undefined) { + obj.interslotUpdates = message.interslotUpdates; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterSlots.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseSubscribeRequestFilterSlots(); + message.filterByCommitment = (_a = object.filterByCommitment) !== null && _a !== void 0 ? _a : undefined; + message.interslotUpdates = (_b = object.interslotUpdates) !== null && _b !== void 0 ? _b : undefined; + return message; + }, +}; +function createBaseSubscribeRequestFilterTransactions() { + return { + vote: undefined, + failed: undefined, + signature: undefined, + accountInclude: [], + accountExclude: [], + accountRequired: [], + }; +} +exports.SubscribeRequestFilterTransactions = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.vote !== undefined) { + writer.uint32(8).bool(message.vote); + } + if (message.failed !== undefined) { + writer.uint32(16).bool(message.failed); + } + if (message.signature !== undefined) { + writer.uint32(42).string(message.signature); + } + for (var _i = 0, _a = message.accountInclude; _i < _a.length; _i++) { + var v = _a[_i]; + writer.uint32(26).string(v); + } + for (var _b = 0, _c = message.accountExclude; _b < _c.length; _b++) { + var v = _c[_b]; + writer.uint32(34).string(v); + } + for (var _d = 0, _e = message.accountRequired; _d < _e.length; _d++) { + var v = _e[_d]; + writer.uint32(50).string(v); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterTransactions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.vote = reader.bool(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.failed = reader.bool(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.signature = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.accountInclude.push(reader.string()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.accountExclude.push(reader.string()); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + message.accountRequired.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + vote: isSet(object.vote) ? globalThis.Boolean(object.vote) : undefined, + failed: isSet(object.failed) ? globalThis.Boolean(object.failed) : undefined, + signature: isSet(object.signature) ? globalThis.String(object.signature) : undefined, + accountInclude: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountInclude) + ? object.accountInclude.map(function (e) { return globalThis.String(e); }) + : [], + accountExclude: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountExclude) + ? object.accountExclude.map(function (e) { return globalThis.String(e); }) + : [], + accountRequired: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountRequired) + ? object.accountRequired.map(function (e) { return globalThis.String(e); }) + : [], + }; + }, + toJSON: function (message) { + var _a, _b, _c; + var obj = {}; + if (message.vote !== undefined) { + obj.vote = message.vote; + } + if (message.failed !== undefined) { + obj.failed = message.failed; + } + if (message.signature !== undefined) { + obj.signature = message.signature; + } + if ((_a = message.accountInclude) === null || _a === void 0 ? void 0 : _a.length) { + obj.accountInclude = message.accountInclude; + } + if ((_b = message.accountExclude) === null || _b === void 0 ? void 0 : _b.length) { + obj.accountExclude = message.accountExclude; + } + if ((_c = message.accountRequired) === null || _c === void 0 ? void 0 : _c.length) { + obj.accountRequired = message.accountRequired; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterTransactions.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f; + var message = createBaseSubscribeRequestFilterTransactions(); + message.vote = (_a = object.vote) !== null && _a !== void 0 ? _a : undefined; + message.failed = (_b = object.failed) !== null && _b !== void 0 ? _b : undefined; + message.signature = (_c = object.signature) !== null && _c !== void 0 ? _c : undefined; + message.accountInclude = ((_d = object.accountInclude) === null || _d === void 0 ? void 0 : _d.map(function (e) { return e; })) || []; + message.accountExclude = ((_e = object.accountExclude) === null || _e === void 0 ? void 0 : _e.map(function (e) { return e; })) || []; + message.accountRequired = ((_f = object.accountRequired) === null || _f === void 0 ? void 0 : _f.map(function (e) { return e; })) || []; + return message; + }, +}; +function createBaseSubscribeRequestFilterBlocks() { + return { accountInclude: [], includeTransactions: undefined, includeAccounts: undefined, includeEntries: undefined }; +} +exports.SubscribeRequestFilterBlocks = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + for (var _i = 0, _a = message.accountInclude; _i < _a.length; _i++) { + var v = _a[_i]; + writer.uint32(10).string(v); + } + if (message.includeTransactions !== undefined) { + writer.uint32(16).bool(message.includeTransactions); + } + if (message.includeAccounts !== undefined) { + writer.uint32(24).bool(message.includeAccounts); + } + if (message.includeEntries !== undefined) { + writer.uint32(32).bool(message.includeEntries); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterBlocks(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.accountInclude.push(reader.string()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.includeTransactions = reader.bool(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.includeAccounts = reader.bool(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + message.includeEntries = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + accountInclude: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountInclude) + ? object.accountInclude.map(function (e) { return globalThis.String(e); }) + : [], + includeTransactions: isSet(object.includeTransactions) + ? globalThis.Boolean(object.includeTransactions) + : undefined, + includeAccounts: isSet(object.includeAccounts) ? globalThis.Boolean(object.includeAccounts) : undefined, + includeEntries: isSet(object.includeEntries) ? globalThis.Boolean(object.includeEntries) : undefined, + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if ((_a = message.accountInclude) === null || _a === void 0 ? void 0 : _a.length) { + obj.accountInclude = message.accountInclude; + } + if (message.includeTransactions !== undefined) { + obj.includeTransactions = message.includeTransactions; + } + if (message.includeAccounts !== undefined) { + obj.includeAccounts = message.includeAccounts; + } + if (message.includeEntries !== undefined) { + obj.includeEntries = message.includeEntries; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterBlocks.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseSubscribeRequestFilterBlocks(); + message.accountInclude = ((_a = object.accountInclude) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; + message.includeTransactions = (_b = object.includeTransactions) !== null && _b !== void 0 ? _b : undefined; + message.includeAccounts = (_c = object.includeAccounts) !== null && _c !== void 0 ? _c : undefined; + message.includeEntries = (_d = object.includeEntries) !== null && _d !== void 0 ? _d : undefined; + return message; + }, +}; +function createBaseSubscribeRequestFilterBlocksMeta() { + return {}; +} +exports.SubscribeRequestFilterBlocksMeta = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterBlocksMeta(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterBlocksMeta.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseSubscribeRequestFilterBlocksMeta(); + return message; + }, +}; +function createBaseSubscribeRequestFilterEntry() { + return {}; +} +exports.SubscribeRequestFilterEntry = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestFilterEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.SubscribeRequestFilterEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseSubscribeRequestFilterEntry(); + return message; + }, +}; +function createBaseSubscribeRequestAccountsDataSlice() { + return { offset: "0", length: "0" }; +} +exports.SubscribeRequestAccountsDataSlice = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.offset !== "0") { + writer.uint32(8).uint64(message.offset); + } + if (message.length !== "0") { + writer.uint32(16).uint64(message.length); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestAccountsDataSlice(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.offset = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.length = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + length: isSet(object.length) ? globalThis.String(object.length) : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.offset !== "0") { + obj.offset = message.offset; + } + if (message.length !== "0") { + obj.length = message.length; + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestAccountsDataSlice.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseSubscribeRequestAccountsDataSlice(); + message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; + message.length = (_b = object.length) !== null && _b !== void 0 ? _b : "0"; + return message; + }, +}; +function createBaseSubscribeRequestPing() { + return { id: 0 }; +} +exports.SubscribeRequestPing = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.id !== 0) { + writer.uint32(8).int32(message.id); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeRequestPing(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.id = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { id: isSet(object.id) ? globalThis.Number(object.id) : 0 }; + }, + toJSON: function (message) { + var obj = {}; + if (message.id !== 0) { + obj.id = Math.round(message.id); + } + return obj; + }, + create: function (base) { + return exports.SubscribeRequestPing.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeRequestPing(); + message.id = (_a = object.id) !== null && _a !== void 0 ? _a : 0; + return message; + }, +}; +function createBaseSubscribeUpdate() { + return { + filters: [], + account: undefined, + slot: undefined, + transaction: undefined, + transactionStatus: undefined, + block: undefined, + ping: undefined, + pong: undefined, + blockMeta: undefined, + entry: undefined, + createdAt: undefined, + }; +} +exports.SubscribeUpdate = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + for (var _i = 0, _a = message.filters; _i < _a.length; _i++) { + var v = _a[_i]; + writer.uint32(10).string(v); + } + if (message.account !== undefined) { + exports.SubscribeUpdateAccount.encode(message.account, writer.uint32(18).fork()).join(); + } + if (message.slot !== undefined) { + exports.SubscribeUpdateSlot.encode(message.slot, writer.uint32(26).fork()).join(); + } + if (message.transaction !== undefined) { + exports.SubscribeUpdateTransaction.encode(message.transaction, writer.uint32(34).fork()).join(); + } + if (message.transactionStatus !== undefined) { + exports.SubscribeUpdateTransactionStatus.encode(message.transactionStatus, writer.uint32(82).fork()).join(); + } + if (message.block !== undefined) { + exports.SubscribeUpdateBlock.encode(message.block, writer.uint32(42).fork()).join(); + } + if (message.ping !== undefined) { + exports.SubscribeUpdatePing.encode(message.ping, writer.uint32(50).fork()).join(); + } + if (message.pong !== undefined) { + exports.SubscribeUpdatePong.encode(message.pong, writer.uint32(74).fork()).join(); + } + if (message.blockMeta !== undefined) { + exports.SubscribeUpdateBlockMeta.encode(message.blockMeta, writer.uint32(58).fork()).join(); + } + if (message.entry !== undefined) { + exports.SubscribeUpdateEntry.encode(message.entry, writer.uint32(66).fork()).join(); + } + if (message.createdAt !== undefined) { + timestamp_1.Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(90).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdate(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.filters.push(reader.string()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.account = exports.SubscribeUpdateAccount.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.slot = exports.SubscribeUpdateSlot.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.transaction = exports.SubscribeUpdateTransaction.decode(reader, reader.uint32()); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + message.transactionStatus = exports.SubscribeUpdateTransactionStatus.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.block = exports.SubscribeUpdateBlock.decode(reader, reader.uint32()); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + message.ping = exports.SubscribeUpdatePing.decode(reader, reader.uint32()); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + message.pong = exports.SubscribeUpdatePong.decode(reader, reader.uint32()); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + message.blockMeta = exports.SubscribeUpdateBlockMeta.decode(reader, reader.uint32()); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + message.entry = exports.SubscribeUpdateEntry.decode(reader, reader.uint32()); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + message.createdAt = fromTimestamp(timestamp_1.Timestamp.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + filters: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.filters) ? object.filters.map(function (e) { return globalThis.String(e); }) : [], + account: isSet(object.account) ? exports.SubscribeUpdateAccount.fromJSON(object.account) : undefined, + slot: isSet(object.slot) ? exports.SubscribeUpdateSlot.fromJSON(object.slot) : undefined, + transaction: isSet(object.transaction) ? exports.SubscribeUpdateTransaction.fromJSON(object.transaction) : undefined, + transactionStatus: isSet(object.transactionStatus) + ? exports.SubscribeUpdateTransactionStatus.fromJSON(object.transactionStatus) + : undefined, + block: isSet(object.block) ? exports.SubscribeUpdateBlock.fromJSON(object.block) : undefined, + ping: isSet(object.ping) ? exports.SubscribeUpdatePing.fromJSON(object.ping) : undefined, + pong: isSet(object.pong) ? exports.SubscribeUpdatePong.fromJSON(object.pong) : undefined, + blockMeta: isSet(object.blockMeta) ? exports.SubscribeUpdateBlockMeta.fromJSON(object.blockMeta) : undefined, + entry: isSet(object.entry) ? exports.SubscribeUpdateEntry.fromJSON(object.entry) : undefined, + createdAt: isSet(object.createdAt) ? fromJsonTimestamp(object.createdAt) : undefined, + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if ((_a = message.filters) === null || _a === void 0 ? void 0 : _a.length) { + obj.filters = message.filters; + } + if (message.account !== undefined) { + obj.account = exports.SubscribeUpdateAccount.toJSON(message.account); + } + if (message.slot !== undefined) { + obj.slot = exports.SubscribeUpdateSlot.toJSON(message.slot); + } + if (message.transaction !== undefined) { + obj.transaction = exports.SubscribeUpdateTransaction.toJSON(message.transaction); + } + if (message.transactionStatus !== undefined) { + obj.transactionStatus = exports.SubscribeUpdateTransactionStatus.toJSON(message.transactionStatus); + } + if (message.block !== undefined) { + obj.block = exports.SubscribeUpdateBlock.toJSON(message.block); + } + if (message.ping !== undefined) { + obj.ping = exports.SubscribeUpdatePing.toJSON(message.ping); + } + if (message.pong !== undefined) { + obj.pong = exports.SubscribeUpdatePong.toJSON(message.pong); + } + if (message.blockMeta !== undefined) { + obj.blockMeta = exports.SubscribeUpdateBlockMeta.toJSON(message.blockMeta); + } + if (message.entry !== undefined) { + obj.entry = exports.SubscribeUpdateEntry.toJSON(message.entry); + } + if (message.createdAt !== undefined) { + obj.createdAt = message.createdAt.toISOString(); + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdate.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseSubscribeUpdate(); + message.filters = ((_a = object.filters) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; + message.account = (object.account !== undefined && object.account !== null) + ? exports.SubscribeUpdateAccount.fromPartial(object.account) + : undefined; + message.slot = (object.slot !== undefined && object.slot !== null) + ? exports.SubscribeUpdateSlot.fromPartial(object.slot) + : undefined; + message.transaction = (object.transaction !== undefined && object.transaction !== null) + ? exports.SubscribeUpdateTransaction.fromPartial(object.transaction) + : undefined; + message.transactionStatus = (object.transactionStatus !== undefined && object.transactionStatus !== null) + ? exports.SubscribeUpdateTransactionStatus.fromPartial(object.transactionStatus) + : undefined; + message.block = (object.block !== undefined && object.block !== null) + ? exports.SubscribeUpdateBlock.fromPartial(object.block) + : undefined; + message.ping = (object.ping !== undefined && object.ping !== null) + ? exports.SubscribeUpdatePing.fromPartial(object.ping) + : undefined; + message.pong = (object.pong !== undefined && object.pong !== null) + ? exports.SubscribeUpdatePong.fromPartial(object.pong) + : undefined; + message.blockMeta = (object.blockMeta !== undefined && object.blockMeta !== null) + ? exports.SubscribeUpdateBlockMeta.fromPartial(object.blockMeta) + : undefined; + message.entry = (object.entry !== undefined && object.entry !== null) + ? exports.SubscribeUpdateEntry.fromPartial(object.entry) + : undefined; + message.createdAt = (_b = object.createdAt) !== null && _b !== void 0 ? _b : undefined; + return message; + }, +}; +function createBaseSubscribeUpdateAccount() { + return { account: undefined, slot: "0", isStartup: false }; +} +exports.SubscribeUpdateAccount = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.account !== undefined) { + exports.SubscribeUpdateAccountInfo.encode(message.account, writer.uint32(10).fork()).join(); + } + if (message.slot !== "0") { + writer.uint32(16).uint64(message.slot); + } + if (message.isStartup !== false) { + writer.uint32(24).bool(message.isStartup); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateAccount(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.account = exports.SubscribeUpdateAccountInfo.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.isStartup = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + account: isSet(object.account) ? exports.SubscribeUpdateAccountInfo.fromJSON(object.account) : undefined, + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + isStartup: isSet(object.isStartup) ? globalThis.Boolean(object.isStartup) : false, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.account !== undefined) { + obj.account = exports.SubscribeUpdateAccountInfo.toJSON(message.account); + } + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.isStartup !== false) { + obj.isStartup = message.isStartup; + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateAccount.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseSubscribeUpdateAccount(); + message.account = (object.account !== undefined && object.account !== null) + ? exports.SubscribeUpdateAccountInfo.fromPartial(object.account) + : undefined; + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.isStartup = (_b = object.isStartup) !== null && _b !== void 0 ? _b : false; + return message; + }, +}; +function createBaseSubscribeUpdateAccountInfo() { + return { + pubkey: new Uint8Array(0), + lamports: "0", + owner: new Uint8Array(0), + executable: false, + rentEpoch: "0", + data: new Uint8Array(0), + writeVersion: "0", + txnSignature: undefined, + }; +} +exports.SubscribeUpdateAccountInfo = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.pubkey.length !== 0) { + writer.uint32(10).bytes(message.pubkey); + } + if (message.lamports !== "0") { + writer.uint32(16).uint64(message.lamports); + } + if (message.owner.length !== 0) { + writer.uint32(26).bytes(message.owner); + } + if (message.executable !== false) { + writer.uint32(32).bool(message.executable); + } + if (message.rentEpoch !== "0") { + writer.uint32(40).uint64(message.rentEpoch); + } + if (message.data.length !== 0) { + writer.uint32(50).bytes(message.data); + } + if (message.writeVersion !== "0") { + writer.uint32(56).uint64(message.writeVersion); + } + if (message.txnSignature !== undefined) { + writer.uint32(66).bytes(message.txnSignature); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateAccountInfo(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.pubkey = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.lamports = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.owner = reader.bytes(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + message.executable = reader.bool(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + message.rentEpoch = reader.uint64().toString(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + message.data = reader.bytes(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + message.writeVersion = reader.uint64().toString(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + message.txnSignature = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + pubkey: isSet(object.pubkey) ? bytesFromBase64(object.pubkey) : new Uint8Array(0), + lamports: isSet(object.lamports) ? globalThis.String(object.lamports) : "0", + owner: isSet(object.owner) ? bytesFromBase64(object.owner) : new Uint8Array(0), + executable: isSet(object.executable) ? globalThis.Boolean(object.executable) : false, + rentEpoch: isSet(object.rentEpoch) ? globalThis.String(object.rentEpoch) : "0", + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + writeVersion: isSet(object.writeVersion) ? globalThis.String(object.writeVersion) : "0", + txnSignature: isSet(object.txnSignature) ? bytesFromBase64(object.txnSignature) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.pubkey.length !== 0) { + obj.pubkey = base64FromBytes(message.pubkey); + } + if (message.lamports !== "0") { + obj.lamports = message.lamports; + } + if (message.owner.length !== 0) { + obj.owner = base64FromBytes(message.owner); + } + if (message.executable !== false) { + obj.executable = message.executable; + } + if (message.rentEpoch !== "0") { + obj.rentEpoch = message.rentEpoch; + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + if (message.writeVersion !== "0") { + obj.writeVersion = message.writeVersion; + } + if (message.txnSignature !== undefined) { + obj.txnSignature = base64FromBytes(message.txnSignature); + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateAccountInfo.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f, _g, _h; + var message = createBaseSubscribeUpdateAccountInfo(); + message.pubkey = (_a = object.pubkey) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.lamports = (_b = object.lamports) !== null && _b !== void 0 ? _b : "0"; + message.owner = (_c = object.owner) !== null && _c !== void 0 ? _c : new Uint8Array(0); + message.executable = (_d = object.executable) !== null && _d !== void 0 ? _d : false; + message.rentEpoch = (_e = object.rentEpoch) !== null && _e !== void 0 ? _e : "0"; + message.data = (_f = object.data) !== null && _f !== void 0 ? _f : new Uint8Array(0); + message.writeVersion = (_g = object.writeVersion) !== null && _g !== void 0 ? _g : "0"; + message.txnSignature = (_h = object.txnSignature) !== null && _h !== void 0 ? _h : undefined; + return message; + }, +}; +function createBaseSubscribeUpdateSlot() { + return { slot: "0", parent: undefined, status: 0, deadError: undefined }; +} +exports.SubscribeUpdateSlot = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + if (message.parent !== undefined) { + writer.uint32(16).uint64(message.parent); + } + if (message.status !== 0) { + writer.uint32(24).int32(message.status); + } + if (message.deadError !== undefined) { + writer.uint32(34).string(message.deadError); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateSlot(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.parent = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.status = reader.int32(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.deadError = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + parent: isSet(object.parent) ? globalThis.String(object.parent) : undefined, + status: isSet(object.status) ? slotStatusFromJSON(object.status) : 0, + deadError: isSet(object.deadError) ? globalThis.String(object.deadError) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.parent !== undefined) { + obj.parent = message.parent; + } + if (message.status !== 0) { + obj.status = slotStatusToJSON(message.status); + } + if (message.deadError !== undefined) { + obj.deadError = message.deadError; + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateSlot.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseSubscribeUpdateSlot(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.parent = (_b = object.parent) !== null && _b !== void 0 ? _b : undefined; + message.status = (_c = object.status) !== null && _c !== void 0 ? _c : 0; + message.deadError = (_d = object.deadError) !== null && _d !== void 0 ? _d : undefined; + return message; + }, +}; +function createBaseSubscribeUpdateTransaction() { + return { transaction: undefined, slot: "0" }; +} +exports.SubscribeUpdateTransaction = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.transaction !== undefined) { + exports.SubscribeUpdateTransactionInfo.encode(message.transaction, writer.uint32(10).fork()).join(); + } + if (message.slot !== "0") { + writer.uint32(16).uint64(message.slot); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateTransaction(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.transaction = exports.SubscribeUpdateTransactionInfo.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + transaction: isSet(object.transaction) ? exports.SubscribeUpdateTransactionInfo.fromJSON(object.transaction) : undefined, + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.transaction !== undefined) { + obj.transaction = exports.SubscribeUpdateTransactionInfo.toJSON(message.transaction); + } + if (message.slot !== "0") { + obj.slot = message.slot; + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateTransaction.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeUpdateTransaction(); + message.transaction = (object.transaction !== undefined && object.transaction !== null) + ? exports.SubscribeUpdateTransactionInfo.fromPartial(object.transaction) + : undefined; + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + return message; + }, +}; +function createBaseSubscribeUpdateTransactionInfo() { + return { signature: new Uint8Array(0), isVote: false, transaction: undefined, meta: undefined, index: "0" }; +} +exports.SubscribeUpdateTransactionInfo = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.signature.length !== 0) { + writer.uint32(10).bytes(message.signature); + } + if (message.isVote !== false) { + writer.uint32(16).bool(message.isVote); + } + if (message.transaction !== undefined) { + solana_storage_1.Transaction.encode(message.transaction, writer.uint32(26).fork()).join(); + } + if (message.meta !== undefined) { + solana_storage_1.TransactionStatusMeta.encode(message.meta, writer.uint32(34).fork()).join(); + } + if (message.index !== "0") { + writer.uint32(40).uint64(message.index); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateTransactionInfo(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.signature = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.isVote = reader.bool(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.transaction = solana_storage_1.Transaction.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.meta = solana_storage_1.TransactionStatusMeta.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + message.index = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(0), + isVote: isSet(object.isVote) ? globalThis.Boolean(object.isVote) : false, + transaction: isSet(object.transaction) ? solana_storage_1.Transaction.fromJSON(object.transaction) : undefined, + meta: isSet(object.meta) ? solana_storage_1.TransactionStatusMeta.fromJSON(object.meta) : undefined, + index: isSet(object.index) ? globalThis.String(object.index) : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.signature.length !== 0) { + obj.signature = base64FromBytes(message.signature); + } + if (message.isVote !== false) { + obj.isVote = message.isVote; + } + if (message.transaction !== undefined) { + obj.transaction = solana_storage_1.Transaction.toJSON(message.transaction); + } + if (message.meta !== undefined) { + obj.meta = solana_storage_1.TransactionStatusMeta.toJSON(message.meta); + } + if (message.index !== "0") { + obj.index = message.index; + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateTransactionInfo.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBaseSubscribeUpdateTransactionInfo(); + message.signature = (_a = object.signature) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.isVote = (_b = object.isVote) !== null && _b !== void 0 ? _b : false; + message.transaction = (object.transaction !== undefined && object.transaction !== null) + ? solana_storage_1.Transaction.fromPartial(object.transaction) + : undefined; + message.meta = (object.meta !== undefined && object.meta !== null) + ? solana_storage_1.TransactionStatusMeta.fromPartial(object.meta) + : undefined; + message.index = (_c = object.index) !== null && _c !== void 0 ? _c : "0"; + return message; + }, +}; +function createBaseSubscribeUpdateTransactionStatus() { + return { slot: "0", signature: new Uint8Array(0), isVote: false, index: "0", err: undefined }; +} +exports.SubscribeUpdateTransactionStatus = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + if (message.signature.length !== 0) { + writer.uint32(18).bytes(message.signature); + } + if (message.isVote !== false) { + writer.uint32(24).bool(message.isVote); + } + if (message.index !== "0") { + writer.uint32(32).uint64(message.index); + } + if (message.err !== undefined) { + solana_storage_1.TransactionError.encode(message.err, writer.uint32(42).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateTransactionStatus(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.signature = reader.bytes(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.isVote = reader.bool(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + message.index = reader.uint64().toString(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.err = solana_storage_1.TransactionError.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(0), + isVote: isSet(object.isVote) ? globalThis.Boolean(object.isVote) : false, + index: isSet(object.index) ? globalThis.String(object.index) : "0", + err: isSet(object.err) ? solana_storage_1.TransactionError.fromJSON(object.err) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.signature.length !== 0) { + obj.signature = base64FromBytes(message.signature); + } + if (message.isVote !== false) { + obj.isVote = message.isVote; + } + if (message.index !== "0") { + obj.index = message.index; + } + if (message.err !== undefined) { + obj.err = solana_storage_1.TransactionError.toJSON(message.err); + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateTransactionStatus.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseSubscribeUpdateTransactionStatus(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.signature = (_b = object.signature) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.isVote = (_c = object.isVote) !== null && _c !== void 0 ? _c : false; + message.index = (_d = object.index) !== null && _d !== void 0 ? _d : "0"; + message.err = (object.err !== undefined && object.err !== null) + ? solana_storage_1.TransactionError.fromPartial(object.err) + : undefined; + return message; + }, +}; +function createBaseSubscribeUpdateBlock() { + return { + slot: "0", + blockhash: "", + rewards: undefined, + blockTime: undefined, + blockHeight: undefined, + parentSlot: "0", + parentBlockhash: "", + executedTransactionCount: "0", + transactions: [], + updatedAccountCount: "0", + accounts: [], + entriesCount: "0", + entries: [], + }; +} +exports.SubscribeUpdateBlock = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + if (message.blockhash !== "") { + writer.uint32(18).string(message.blockhash); + } + if (message.rewards !== undefined) { + solana_storage_1.Rewards.encode(message.rewards, writer.uint32(26).fork()).join(); + } + if (message.blockTime !== undefined) { + solana_storage_1.UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).join(); + } + if (message.blockHeight !== undefined) { + solana_storage_1.BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); + } + if (message.parentSlot !== "0") { + writer.uint32(56).uint64(message.parentSlot); + } + if (message.parentBlockhash !== "") { + writer.uint32(66).string(message.parentBlockhash); + } + if (message.executedTransactionCount !== "0") { + writer.uint32(72).uint64(message.executedTransactionCount); + } + for (var _i = 0, _a = message.transactions; _i < _a.length; _i++) { + var v = _a[_i]; + exports.SubscribeUpdateTransactionInfo.encode(v, writer.uint32(50).fork()).join(); + } + if (message.updatedAccountCount !== "0") { + writer.uint32(80).uint64(message.updatedAccountCount); + } + for (var _b = 0, _c = message.accounts; _b < _c.length; _b++) { + var v = _c[_b]; + exports.SubscribeUpdateAccountInfo.encode(v, writer.uint32(90).fork()).join(); + } + if (message.entriesCount !== "0") { + writer.uint32(96).uint64(message.entriesCount); + } + for (var _d = 0, _e = message.entries; _d < _e.length; _d++) { + var v = _e[_d]; + exports.SubscribeUpdateEntry.encode(v, writer.uint32(106).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateBlock(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockhash = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.rewards = solana_storage_1.Rewards.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.blockTime = solana_storage_1.UnixTimestamp.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.blockHeight = solana_storage_1.BlockHeight.decode(reader, reader.uint32()); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + message.parentSlot = reader.uint64().toString(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + message.parentBlockhash = reader.string(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + message.executedTransactionCount = reader.uint64().toString(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + message.transactions.push(exports.SubscribeUpdateTransactionInfo.decode(reader, reader.uint32())); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + message.updatedAccountCount = reader.uint64().toString(); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + message.accounts.push(exports.SubscribeUpdateAccountInfo.decode(reader, reader.uint32())); + continue; + } + case 12: { + if (tag !== 96) { + break; + } + message.entriesCount = reader.uint64().toString(); + continue; + } + case 13: { + if (tag !== 106) { + break; + } + message.entries.push(exports.SubscribeUpdateEntry.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", + rewards: isSet(object.rewards) ? solana_storage_1.Rewards.fromJSON(object.rewards) : undefined, + blockTime: isSet(object.blockTime) ? solana_storage_1.UnixTimestamp.fromJSON(object.blockTime) : undefined, + blockHeight: isSet(object.blockHeight) ? solana_storage_1.BlockHeight.fromJSON(object.blockHeight) : undefined, + parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", + parentBlockhash: isSet(object.parentBlockhash) ? globalThis.String(object.parentBlockhash) : "", + executedTransactionCount: isSet(object.executedTransactionCount) + ? globalThis.String(object.executedTransactionCount) + : "0", + transactions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.transactions) + ? object.transactions.map(function (e) { return exports.SubscribeUpdateTransactionInfo.fromJSON(e); }) + : [], + updatedAccountCount: isSet(object.updatedAccountCount) ? globalThis.String(object.updatedAccountCount) : "0", + accounts: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accounts) + ? object.accounts.map(function (e) { return exports.SubscribeUpdateAccountInfo.fromJSON(e); }) + : [], + entriesCount: isSet(object.entriesCount) ? globalThis.String(object.entriesCount) : "0", + entries: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.entries) + ? object.entries.map(function (e) { return exports.SubscribeUpdateEntry.fromJSON(e); }) + : [], + }; + }, + toJSON: function (message) { + var _a, _b, _c; + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.blockhash !== "") { + obj.blockhash = message.blockhash; + } + if (message.rewards !== undefined) { + obj.rewards = solana_storage_1.Rewards.toJSON(message.rewards); + } + if (message.blockTime !== undefined) { + obj.blockTime = solana_storage_1.UnixTimestamp.toJSON(message.blockTime); + } + if (message.blockHeight !== undefined) { + obj.blockHeight = solana_storage_1.BlockHeight.toJSON(message.blockHeight); + } + if (message.parentSlot !== "0") { + obj.parentSlot = message.parentSlot; + } + if (message.parentBlockhash !== "") { + obj.parentBlockhash = message.parentBlockhash; + } + if (message.executedTransactionCount !== "0") { + obj.executedTransactionCount = message.executedTransactionCount; + } + if ((_a = message.transactions) === null || _a === void 0 ? void 0 : _a.length) { + obj.transactions = message.transactions.map(function (e) { return exports.SubscribeUpdateTransactionInfo.toJSON(e); }); + } + if (message.updatedAccountCount !== "0") { + obj.updatedAccountCount = message.updatedAccountCount; + } + if ((_b = message.accounts) === null || _b === void 0 ? void 0 : _b.length) { + obj.accounts = message.accounts.map(function (e) { return exports.SubscribeUpdateAccountInfo.toJSON(e); }); + } + if (message.entriesCount !== "0") { + obj.entriesCount = message.entriesCount; + } + if ((_c = message.entries) === null || _c === void 0 ? void 0 : _c.length) { + obj.entries = message.entries.map(function (e) { return exports.SubscribeUpdateEntry.toJSON(e); }); + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateBlock.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k; + var message = createBaseSubscribeUpdateBlock(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; + message.rewards = (object.rewards !== undefined && object.rewards !== null) + ? solana_storage_1.Rewards.fromPartial(object.rewards) + : undefined; + message.blockTime = (object.blockTime !== undefined && object.blockTime !== null) + ? solana_storage_1.UnixTimestamp.fromPartial(object.blockTime) + : undefined; + message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) + ? solana_storage_1.BlockHeight.fromPartial(object.blockHeight) + : undefined; + message.parentSlot = (_c = object.parentSlot) !== null && _c !== void 0 ? _c : "0"; + message.parentBlockhash = (_d = object.parentBlockhash) !== null && _d !== void 0 ? _d : ""; + message.executedTransactionCount = (_e = object.executedTransactionCount) !== null && _e !== void 0 ? _e : "0"; + message.transactions = ((_f = object.transactions) === null || _f === void 0 ? void 0 : _f.map(function (e) { return exports.SubscribeUpdateTransactionInfo.fromPartial(e); })) || []; + message.updatedAccountCount = (_g = object.updatedAccountCount) !== null && _g !== void 0 ? _g : "0"; + message.accounts = ((_h = object.accounts) === null || _h === void 0 ? void 0 : _h.map(function (e) { return exports.SubscribeUpdateAccountInfo.fromPartial(e); })) || []; + message.entriesCount = (_j = object.entriesCount) !== null && _j !== void 0 ? _j : "0"; + message.entries = ((_k = object.entries) === null || _k === void 0 ? void 0 : _k.map(function (e) { return exports.SubscribeUpdateEntry.fromPartial(e); })) || []; + return message; + }, +}; +function createBaseSubscribeUpdateBlockMeta() { + return { + slot: "0", + blockhash: "", + rewards: undefined, + blockTime: undefined, + blockHeight: undefined, + parentSlot: "0", + parentBlockhash: "", + executedTransactionCount: "0", + entriesCount: "0", + }; +} +exports.SubscribeUpdateBlockMeta = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + if (message.blockhash !== "") { + writer.uint32(18).string(message.blockhash); + } + if (message.rewards !== undefined) { + solana_storage_1.Rewards.encode(message.rewards, writer.uint32(26).fork()).join(); + } + if (message.blockTime !== undefined) { + solana_storage_1.UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).join(); + } + if (message.blockHeight !== undefined) { + solana_storage_1.BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); + } + if (message.parentSlot !== "0") { + writer.uint32(48).uint64(message.parentSlot); + } + if (message.parentBlockhash !== "") { + writer.uint32(58).string(message.parentBlockhash); + } + if (message.executedTransactionCount !== "0") { + writer.uint32(64).uint64(message.executedTransactionCount); + } + if (message.entriesCount !== "0") { + writer.uint32(72).uint64(message.entriesCount); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateBlockMeta(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockhash = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.rewards = solana_storage_1.Rewards.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.blockTime = solana_storage_1.UnixTimestamp.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.blockHeight = solana_storage_1.BlockHeight.decode(reader, reader.uint32()); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + message.parentSlot = reader.uint64().toString(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + message.parentBlockhash = reader.string(); + continue; + } + case 8: { + if (tag !== 64) { + break; + } + message.executedTransactionCount = reader.uint64().toString(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + message.entriesCount = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", + rewards: isSet(object.rewards) ? solana_storage_1.Rewards.fromJSON(object.rewards) : undefined, + blockTime: isSet(object.blockTime) ? solana_storage_1.UnixTimestamp.fromJSON(object.blockTime) : undefined, + blockHeight: isSet(object.blockHeight) ? solana_storage_1.BlockHeight.fromJSON(object.blockHeight) : undefined, + parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", + parentBlockhash: isSet(object.parentBlockhash) ? globalThis.String(object.parentBlockhash) : "", + executedTransactionCount: isSet(object.executedTransactionCount) + ? globalThis.String(object.executedTransactionCount) + : "0", + entriesCount: isSet(object.entriesCount) ? globalThis.String(object.entriesCount) : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.blockhash !== "") { + obj.blockhash = message.blockhash; + } + if (message.rewards !== undefined) { + obj.rewards = solana_storage_1.Rewards.toJSON(message.rewards); + } + if (message.blockTime !== undefined) { + obj.blockTime = solana_storage_1.UnixTimestamp.toJSON(message.blockTime); + } + if (message.blockHeight !== undefined) { + obj.blockHeight = solana_storage_1.BlockHeight.toJSON(message.blockHeight); + } + if (message.parentSlot !== "0") { + obj.parentSlot = message.parentSlot; + } + if (message.parentBlockhash !== "") { + obj.parentBlockhash = message.parentBlockhash; + } + if (message.executedTransactionCount !== "0") { + obj.executedTransactionCount = message.executedTransactionCount; + } + if (message.entriesCount !== "0") { + obj.entriesCount = message.entriesCount; + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateBlockMeta.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f; + var message = createBaseSubscribeUpdateBlockMeta(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; + message.rewards = (object.rewards !== undefined && object.rewards !== null) + ? solana_storage_1.Rewards.fromPartial(object.rewards) + : undefined; + message.blockTime = (object.blockTime !== undefined && object.blockTime !== null) + ? solana_storage_1.UnixTimestamp.fromPartial(object.blockTime) + : undefined; + message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) + ? solana_storage_1.BlockHeight.fromPartial(object.blockHeight) + : undefined; + message.parentSlot = (_c = object.parentSlot) !== null && _c !== void 0 ? _c : "0"; + message.parentBlockhash = (_d = object.parentBlockhash) !== null && _d !== void 0 ? _d : ""; + message.executedTransactionCount = (_e = object.executedTransactionCount) !== null && _e !== void 0 ? _e : "0"; + message.entriesCount = (_f = object.entriesCount) !== null && _f !== void 0 ? _f : "0"; + return message; + }, +}; +function createBaseSubscribeUpdateEntry() { + return { + slot: "0", + index: "0", + numHashes: "0", + hash: new Uint8Array(0), + executedTransactionCount: "0", + startingTransactionIndex: "0", + }; +} +exports.SubscribeUpdateEntry = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + if (message.index !== "0") { + writer.uint32(16).uint64(message.index); + } + if (message.numHashes !== "0") { + writer.uint32(24).uint64(message.numHashes); + } + if (message.hash.length !== 0) { + writer.uint32(34).bytes(message.hash); + } + if (message.executedTransactionCount !== "0") { + writer.uint32(40).uint64(message.executedTransactionCount); + } + if (message.startingTransactionIndex !== "0") { + writer.uint32(48).uint64(message.startingTransactionIndex); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdateEntry(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.index = reader.uint64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.numHashes = reader.uint64().toString(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.hash = reader.bytes(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + message.executedTransactionCount = reader.uint64().toString(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + message.startingTransactionIndex = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + index: isSet(object.index) ? globalThis.String(object.index) : "0", + numHashes: isSet(object.numHashes) ? globalThis.String(object.numHashes) : "0", + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(0), + executedTransactionCount: isSet(object.executedTransactionCount) + ? globalThis.String(object.executedTransactionCount) + : "0", + startingTransactionIndex: isSet(object.startingTransactionIndex) + ? globalThis.String(object.startingTransactionIndex) + : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.index !== "0") { + obj.index = message.index; + } + if (message.numHashes !== "0") { + obj.numHashes = message.numHashes; + } + if (message.hash.length !== 0) { + obj.hash = base64FromBytes(message.hash); + } + if (message.executedTransactionCount !== "0") { + obj.executedTransactionCount = message.executedTransactionCount; + } + if (message.startingTransactionIndex !== "0") { + obj.startingTransactionIndex = message.startingTransactionIndex; + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdateEntry.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f; + var message = createBaseSubscribeUpdateEntry(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.index = (_b = object.index) !== null && _b !== void 0 ? _b : "0"; + message.numHashes = (_c = object.numHashes) !== null && _c !== void 0 ? _c : "0"; + message.hash = (_d = object.hash) !== null && _d !== void 0 ? _d : new Uint8Array(0); + message.executedTransactionCount = (_e = object.executedTransactionCount) !== null && _e !== void 0 ? _e : "0"; + message.startingTransactionIndex = (_f = object.startingTransactionIndex) !== null && _f !== void 0 ? _f : "0"; + return message; + }, +}; +function createBaseSubscribeUpdatePing() { + return {}; +} +exports.SubscribeUpdatePing = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdatePing(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.SubscribeUpdatePing.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseSubscribeUpdatePing(); + return message; + }, +}; +function createBaseSubscribeUpdatePong() { + return { id: 0 }; +} +exports.SubscribeUpdatePong = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.id !== 0) { + writer.uint32(8).int32(message.id); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeUpdatePong(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.id = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { id: isSet(object.id) ? globalThis.Number(object.id) : 0 }; + }, + toJSON: function (message) { + var obj = {}; + if (message.id !== 0) { + obj.id = Math.round(message.id); + } + return obj; + }, + create: function (base) { + return exports.SubscribeUpdatePong.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeUpdatePong(); + message.id = (_a = object.id) !== null && _a !== void 0 ? _a : 0; + return message; + }, +}; +function createBaseSubscribeReplayInfoRequest() { + return {}; +} +exports.SubscribeReplayInfoRequest = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeReplayInfoRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.SubscribeReplayInfoRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseSubscribeReplayInfoRequest(); + return message; + }, +}; +function createBaseSubscribeReplayInfoResponse() { + return { firstAvailable: undefined }; +} +exports.SubscribeReplayInfoResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.firstAvailable !== undefined) { + writer.uint32(8).uint64(message.firstAvailable); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseSubscribeReplayInfoResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.firstAvailable = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { firstAvailable: isSet(object.firstAvailable) ? globalThis.String(object.firstAvailable) : undefined }; + }, + toJSON: function (message) { + var obj = {}; + if (message.firstAvailable !== undefined) { + obj.firstAvailable = message.firstAvailable; + } + return obj; + }, + create: function (base) { + return exports.SubscribeReplayInfoResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseSubscribeReplayInfoResponse(); + message.firstAvailable = (_a = object.firstAvailable) !== null && _a !== void 0 ? _a : undefined; + return message; + }, +}; +function createBasePingRequest() { + return { count: 0 }; +} +exports.PingRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.count !== 0) { + writer.uint32(8).int32(message.count); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBasePingRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.count = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { count: isSet(object.count) ? globalThis.Number(object.count) : 0 }; + }, + toJSON: function (message) { + var obj = {}; + if (message.count !== 0) { + obj.count = Math.round(message.count); + } + return obj; + }, + create: function (base) { + return exports.PingRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBasePingRequest(); + message.count = (_a = object.count) !== null && _a !== void 0 ? _a : 0; + return message; + }, +}; +function createBasePongResponse() { + return { count: 0 }; +} +exports.PongResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.count !== 0) { + writer.uint32(8).int32(message.count); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBasePongResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.count = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { count: isSet(object.count) ? globalThis.Number(object.count) : 0 }; + }, + toJSON: function (message) { + var obj = {}; + if (message.count !== 0) { + obj.count = Math.round(message.count); + } + return obj; + }, + create: function (base) { + return exports.PongResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBasePongResponse(); + message.count = (_a = object.count) !== null && _a !== void 0 ? _a : 0; + return message; + }, +}; +function createBaseGetLatestBlockhashRequest() { + return { commitment: undefined }; +} +exports.GetLatestBlockhashRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.commitment !== undefined) { + writer.uint32(8).int32(message.commitment); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetLatestBlockhashRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.commitment = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined }; + }, + toJSON: function (message) { + var obj = {}; + if (message.commitment !== undefined) { + obj.commitment = commitmentLevelToJSON(message.commitment); + } + return obj; + }, + create: function (base) { + return exports.GetLatestBlockhashRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetLatestBlockhashRequest(); + message.commitment = (_a = object.commitment) !== null && _a !== void 0 ? _a : undefined; + return message; + }, +}; +function createBaseGetLatestBlockhashResponse() { + return { slot: "0", blockhash: "", lastValidBlockHeight: "0" }; +} +exports.GetLatestBlockhashResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + if (message.blockhash !== "") { + writer.uint32(18).string(message.blockhash); + } + if (message.lastValidBlockHeight !== "0") { + writer.uint32(24).uint64(message.lastValidBlockHeight); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetLatestBlockhashResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockhash = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.lastValidBlockHeight = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", + lastValidBlockHeight: isSet(object.lastValidBlockHeight) ? globalThis.String(object.lastValidBlockHeight) : "0", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.blockhash !== "") { + obj.blockhash = message.blockhash; + } + if (message.lastValidBlockHeight !== "0") { + obj.lastValidBlockHeight = message.lastValidBlockHeight; + } + return obj; + }, + create: function (base) { + return exports.GetLatestBlockhashResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBaseGetLatestBlockhashResponse(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; + message.lastValidBlockHeight = (_c = object.lastValidBlockHeight) !== null && _c !== void 0 ? _c : "0"; + return message; + }, +}; +function createBaseGetBlockHeightRequest() { + return { commitment: undefined }; +} +exports.GetBlockHeightRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.commitment !== undefined) { + writer.uint32(8).int32(message.commitment); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetBlockHeightRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.commitment = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined }; + }, + toJSON: function (message) { + var obj = {}; + if (message.commitment !== undefined) { + obj.commitment = commitmentLevelToJSON(message.commitment); + } + return obj; + }, + create: function (base) { + return exports.GetBlockHeightRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetBlockHeightRequest(); + message.commitment = (_a = object.commitment) !== null && _a !== void 0 ? _a : undefined; + return message; + }, +}; +function createBaseGetBlockHeightResponse() { + return { blockHeight: "0" }; +} +exports.GetBlockHeightResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockHeight !== "0") { + writer.uint32(8).uint64(message.blockHeight); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetBlockHeightResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.blockHeight = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { blockHeight: isSet(object.blockHeight) ? globalThis.String(object.blockHeight) : "0" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockHeight !== "0") { + obj.blockHeight = message.blockHeight; + } + return obj; + }, + create: function (base) { + return exports.GetBlockHeightResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetBlockHeightResponse(); + message.blockHeight = (_a = object.blockHeight) !== null && _a !== void 0 ? _a : "0"; + return message; + }, +}; +function createBaseGetSlotRequest() { + return { commitment: undefined }; +} +exports.GetSlotRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.commitment !== undefined) { + writer.uint32(8).int32(message.commitment); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetSlotRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.commitment = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined }; + }, + toJSON: function (message) { + var obj = {}; + if (message.commitment !== undefined) { + obj.commitment = commitmentLevelToJSON(message.commitment); + } + return obj; + }, + create: function (base) { + return exports.GetSlotRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetSlotRequest(); + message.commitment = (_a = object.commitment) !== null && _a !== void 0 ? _a : undefined; + return message; + }, +}; +function createBaseGetSlotResponse() { + return { slot: "0" }; +} +exports.GetSlotResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetSlotResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { slot: isSet(object.slot) ? globalThis.String(object.slot) : "0" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + return obj; + }, + create: function (base) { + return exports.GetSlotResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetSlotResponse(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + return message; + }, +}; +function createBaseGetVersionRequest() { + return {}; +} +exports.GetVersionRequest = { + encode: function (_, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetVersionRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (_) { + return {}; + }, + toJSON: function (_) { + var obj = {}; + return obj; + }, + create: function (base) { + return exports.GetVersionRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (_) { + var message = createBaseGetVersionRequest(); + return message; + }, +}; +function createBaseGetVersionResponse() { + return { version: "" }; +} +exports.GetVersionResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.version !== "") { + writer.uint32(10).string(message.version); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseGetVersionResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.version = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { version: isSet(object.version) ? globalThis.String(object.version) : "" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.version !== "") { + obj.version = message.version; + } + return obj; + }, + create: function (base) { + return exports.GetVersionResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseGetVersionResponse(); + message.version = (_a = object.version) !== null && _a !== void 0 ? _a : ""; + return message; + }, +}; +function createBaseIsBlockhashValidRequest() { + return { blockhash: "", commitment: undefined }; +} +exports.IsBlockhashValidRequest = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockhash !== "") { + writer.uint32(10).string(message.blockhash); + } + if (message.commitment !== undefined) { + writer.uint32(16).int32(message.commitment); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseIsBlockhashValidRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.blockhash = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.commitment = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", + commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockhash !== "") { + obj.blockhash = message.blockhash; + } + if (message.commitment !== undefined) { + obj.commitment = commitmentLevelToJSON(message.commitment); + } + return obj; + }, + create: function (base) { + return exports.IsBlockhashValidRequest.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseIsBlockhashValidRequest(); + message.blockhash = (_a = object.blockhash) !== null && _a !== void 0 ? _a : ""; + message.commitment = (_b = object.commitment) !== null && _b !== void 0 ? _b : undefined; + return message; + }, +}; +function createBaseIsBlockhashValidResponse() { + return { slot: "0", valid: false }; +} +exports.IsBlockhashValidResponse = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.slot !== "0") { + writer.uint32(8).uint64(message.slot); + } + if (message.valid !== false) { + writer.uint32(16).bool(message.valid); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseIsBlockhashValidResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.slot = reader.uint64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.valid = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + valid: isSet(object.valid) ? globalThis.Boolean(object.valid) : false, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.slot !== "0") { + obj.slot = message.slot; + } + if (message.valid !== false) { + obj.valid = message.valid; + } + return obj; + }, + create: function (base) { + return exports.IsBlockhashValidResponse.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseIsBlockhashValidResponse(); + message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; + message.valid = (_b = object.valid) !== null && _b !== void 0 ? _b : false; + return message; + }, +}; +exports.GeyserService = { + subscribe: { + path: "/geyser.Geyser/Subscribe", + requestStream: true, + responseStream: true, + requestSerialize: function (value) { return Buffer.from(exports.SubscribeRequest.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.SubscribeRequest.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.SubscribeUpdate.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.SubscribeUpdate.decode(value); }, + }, + subscribeReplayInfo: { + path: "/geyser.Geyser/SubscribeReplayInfo", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.SubscribeReplayInfoRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.SubscribeReplayInfoRequest.decode(value); }, + responseSerialize: function (value) { + return Buffer.from(exports.SubscribeReplayInfoResponse.encode(value).finish()); + }, + responseDeserialize: function (value) { return exports.SubscribeReplayInfoResponse.decode(value); }, + }, + ping: { + path: "/geyser.Geyser/Ping", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { return Buffer.from(exports.PingRequest.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.PingRequest.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.PongResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.PongResponse.decode(value); }, + }, + getLatestBlockhash: { + path: "/geyser.Geyser/GetLatestBlockhash", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.GetLatestBlockhashRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.GetLatestBlockhashRequest.decode(value); }, + responseSerialize: function (value) { + return Buffer.from(exports.GetLatestBlockhashResponse.encode(value).finish()); + }, + responseDeserialize: function (value) { return exports.GetLatestBlockhashResponse.decode(value); }, + }, + getBlockHeight: { + path: "/geyser.Geyser/GetBlockHeight", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.GetBlockHeightRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.GetBlockHeightRequest.decode(value); }, + responseSerialize: function (value) { + return Buffer.from(exports.GetBlockHeightResponse.encode(value).finish()); + }, + responseDeserialize: function (value) { return exports.GetBlockHeightResponse.decode(value); }, + }, + getSlot: { + path: "/geyser.Geyser/GetSlot", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { return Buffer.from(exports.GetSlotRequest.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.GetSlotRequest.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.GetSlotResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.GetSlotResponse.decode(value); }, + }, + isBlockhashValid: { + path: "/geyser.Geyser/IsBlockhashValid", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { + return Buffer.from(exports.IsBlockhashValidRequest.encode(value).finish()); + }, + requestDeserialize: function (value) { return exports.IsBlockhashValidRequest.decode(value); }, + responseSerialize: function (value) { + return Buffer.from(exports.IsBlockhashValidResponse.encode(value).finish()); + }, + responseDeserialize: function (value) { return exports.IsBlockhashValidResponse.decode(value); }, + }, + getVersion: { + path: "/geyser.Geyser/GetVersion", + requestStream: false, + responseStream: false, + requestSerialize: function (value) { return Buffer.from(exports.GetVersionRequest.encode(value).finish()); }, + requestDeserialize: function (value) { return exports.GetVersionRequest.decode(value); }, + responseSerialize: function (value) { return Buffer.from(exports.GetVersionResponse.encode(value).finish()); }, + responseDeserialize: function (value) { return exports.GetVersionResponse.decode(value); }, + }, +}; +exports.GeyserClient = (0, grpc_js_1.makeGenericClientConstructor)(exports.GeyserService, "geyser.Geyser"); +function bytesFromBase64(b64) { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } + else { + var bin = globalThis.atob(b64); + var arr = new Uint8Array(bin.length); + for (var i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } + else { + var bin_1 = []; + arr.forEach(function (byte) { + bin_1.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin_1.join("")); + } +} +function toTimestamp(date) { + var seconds = Math.trunc(date.getTime() / 1000).toString(); + var nanos = (date.getTime() % 1000) * 1000000; + return { seconds: seconds, nanos: nanos }; +} +function fromTimestamp(t) { + var millis = (globalThis.Number(t.seconds) || 0) * 1000; + millis += (t.nanos || 0) / 1000000; + return new globalThis.Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof globalThis.Date) { + return o; + } + else if (typeof o === "string") { + return new globalThis.Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/typescript-sdk/src/grpc/google/protobuf/timestamp.js b/typescript-sdk/src/grpc/google/protobuf/timestamp.js new file mode 100644 index 0000000..88c6225 --- /dev/null +++ b/typescript-sdk/src/grpc/google/protobuf/timestamp.js @@ -0,0 +1,84 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 +// source: google/protobuf/timestamp.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = exports.protobufPackage = void 0; +/* eslint-disable */ +var wire_1 = require("@bufbuild/protobuf/wire"); +exports.protobufPackage = "google.protobuf"; +function createBaseTimestamp() { + return { seconds: "0", nanos: 0 }; +} +exports.Timestamp = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.seconds !== "0") { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseTimestamp(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.seconds = reader.int64().toString(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.nanos = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0", + nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.seconds !== "0") { + obj.seconds = message.seconds; + } + if (message.nanos !== 0) { + obj.nanos = Math.round(message.nanos); + } + return obj; + }, + create: function (base) { + return exports.Timestamp.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseTimestamp(); + message.seconds = (_a = object.seconds) !== null && _a !== void 0 ? _a : "0"; + message.nanos = (_b = object.nanos) !== null && _b !== void 0 ? _b : 0; + return message; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/typescript-sdk/src/grpc/solana-storage.js b/typescript-sdk/src/grpc/solana-storage.js new file mode 100644 index 0000000..416d842 --- /dev/null +++ b/typescript-sdk/src/grpc/solana-storage.js @@ -0,0 +1,2055 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.7 +// protoc v3.12.4 +// source: solana-storage.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NumPartitions = exports.BlockHeight = exports.UnixTimestamp = exports.Rewards = exports.Reward = exports.ReturnData = exports.UiTokenAmount = exports.TokenBalance = exports.CompiledInstruction = exports.InnerInstruction = exports.InnerInstructions = exports.TransactionError = exports.TransactionStatusMeta = exports.MessageAddressTableLookup = exports.MessageHeader = exports.Message = exports.Transaction = exports.ConfirmedTransaction = exports.ConfirmedBlock = exports.RewardType = exports.protobufPackage = void 0; +exports.rewardTypeFromJSON = rewardTypeFromJSON; +exports.rewardTypeToJSON = rewardTypeToJSON; +/* eslint-disable */ +var wire_1 = require("@bufbuild/protobuf/wire"); +exports.protobufPackage = "solana.storage.ConfirmedBlock"; +var RewardType; +(function (RewardType) { + RewardType[RewardType["Unspecified"] = 0] = "Unspecified"; + RewardType[RewardType["Fee"] = 1] = "Fee"; + RewardType[RewardType["Rent"] = 2] = "Rent"; + RewardType[RewardType["Staking"] = 3] = "Staking"; + RewardType[RewardType["Voting"] = 4] = "Voting"; + RewardType[RewardType["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; +})(RewardType || (exports.RewardType = RewardType = {})); +function rewardTypeFromJSON(object) { + switch (object) { + case 0: + case "Unspecified": + return RewardType.Unspecified; + case 1: + case "Fee": + return RewardType.Fee; + case 2: + case "Rent": + return RewardType.Rent; + case 3: + case "Staking": + return RewardType.Staking; + case 4: + case "Voting": + return RewardType.Voting; + case -1: + case "UNRECOGNIZED": + default: + return RewardType.UNRECOGNIZED; + } +} +function rewardTypeToJSON(object) { + switch (object) { + case RewardType.Unspecified: + return "Unspecified"; + case RewardType.Fee: + return "Fee"; + case RewardType.Rent: + return "Rent"; + case RewardType.Staking: + return "Staking"; + case RewardType.Voting: + return "Voting"; + case RewardType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} +function createBaseConfirmedBlock() { + return { + previousBlockhash: "", + blockhash: "", + parentSlot: "0", + transactions: [], + rewards: [], + blockTime: undefined, + blockHeight: undefined, + numPartitions: undefined, + }; +} +exports.ConfirmedBlock = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.previousBlockhash !== "") { + writer.uint32(10).string(message.previousBlockhash); + } + if (message.blockhash !== "") { + writer.uint32(18).string(message.blockhash); + } + if (message.parentSlot !== "0") { + writer.uint32(24).uint64(message.parentSlot); + } + for (var _i = 0, _a = message.transactions; _i < _a.length; _i++) { + var v = _a[_i]; + exports.ConfirmedTransaction.encode(v, writer.uint32(34).fork()).join(); + } + for (var _b = 0, _c = message.rewards; _b < _c.length; _b++) { + var v = _c[_b]; + exports.Reward.encode(v, writer.uint32(42).fork()).join(); + } + if (message.blockTime !== undefined) { + exports.UnixTimestamp.encode(message.blockTime, writer.uint32(50).fork()).join(); + } + if (message.blockHeight !== undefined) { + exports.BlockHeight.encode(message.blockHeight, writer.uint32(58).fork()).join(); + } + if (message.numPartitions !== undefined) { + exports.NumPartitions.encode(message.numPartitions, writer.uint32(66).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseConfirmedBlock(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.previousBlockhash = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.blockhash = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.parentSlot = reader.uint64().toString(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.transactions.push(exports.ConfirmedTransaction.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.rewards.push(exports.Reward.decode(reader, reader.uint32())); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + message.blockTime = exports.UnixTimestamp.decode(reader, reader.uint32()); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + message.blockHeight = exports.BlockHeight.decode(reader, reader.uint32()); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + message.numPartitions = exports.NumPartitions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + previousBlockhash: isSet(object.previousBlockhash) ? globalThis.String(object.previousBlockhash) : "", + blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", + parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", + transactions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.transactions) + ? object.transactions.map(function (e) { return exports.ConfirmedTransaction.fromJSON(e); }) + : [], + rewards: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.rewards) ? object.rewards.map(function (e) { return exports.Reward.fromJSON(e); }) : [], + blockTime: isSet(object.blockTime) ? exports.UnixTimestamp.fromJSON(object.blockTime) : undefined, + blockHeight: isSet(object.blockHeight) ? exports.BlockHeight.fromJSON(object.blockHeight) : undefined, + numPartitions: isSet(object.numPartitions) ? exports.NumPartitions.fromJSON(object.numPartitions) : undefined, + }; + }, + toJSON: function (message) { + var _a, _b; + var obj = {}; + if (message.previousBlockhash !== "") { + obj.previousBlockhash = message.previousBlockhash; + } + if (message.blockhash !== "") { + obj.blockhash = message.blockhash; + } + if (message.parentSlot !== "0") { + obj.parentSlot = message.parentSlot; + } + if ((_a = message.transactions) === null || _a === void 0 ? void 0 : _a.length) { + obj.transactions = message.transactions.map(function (e) { return exports.ConfirmedTransaction.toJSON(e); }); + } + if ((_b = message.rewards) === null || _b === void 0 ? void 0 : _b.length) { + obj.rewards = message.rewards.map(function (e) { return exports.Reward.toJSON(e); }); + } + if (message.blockTime !== undefined) { + obj.blockTime = exports.UnixTimestamp.toJSON(message.blockTime); + } + if (message.blockHeight !== undefined) { + obj.blockHeight = exports.BlockHeight.toJSON(message.blockHeight); + } + if (message.numPartitions !== undefined) { + obj.numPartitions = exports.NumPartitions.toJSON(message.numPartitions); + } + return obj; + }, + create: function (base) { + return exports.ConfirmedBlock.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e; + var message = createBaseConfirmedBlock(); + message.previousBlockhash = (_a = object.previousBlockhash) !== null && _a !== void 0 ? _a : ""; + message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; + message.parentSlot = (_c = object.parentSlot) !== null && _c !== void 0 ? _c : "0"; + message.transactions = ((_d = object.transactions) === null || _d === void 0 ? void 0 : _d.map(function (e) { return exports.ConfirmedTransaction.fromPartial(e); })) || []; + message.rewards = ((_e = object.rewards) === null || _e === void 0 ? void 0 : _e.map(function (e) { return exports.Reward.fromPartial(e); })) || []; + message.blockTime = (object.blockTime !== undefined && object.blockTime !== null) + ? exports.UnixTimestamp.fromPartial(object.blockTime) + : undefined; + message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) + ? exports.BlockHeight.fromPartial(object.blockHeight) + : undefined; + message.numPartitions = (object.numPartitions !== undefined && object.numPartitions !== null) + ? exports.NumPartitions.fromPartial(object.numPartitions) + : undefined; + return message; + }, +}; +function createBaseConfirmedTransaction() { + return { transaction: undefined, meta: undefined }; +} +exports.ConfirmedTransaction = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.transaction !== undefined) { + exports.Transaction.encode(message.transaction, writer.uint32(10).fork()).join(); + } + if (message.meta !== undefined) { + exports.TransactionStatusMeta.encode(message.meta, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseConfirmedTransaction(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.transaction = exports.Transaction.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.meta = exports.TransactionStatusMeta.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + transaction: isSet(object.transaction) ? exports.Transaction.fromJSON(object.transaction) : undefined, + meta: isSet(object.meta) ? exports.TransactionStatusMeta.fromJSON(object.meta) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.transaction !== undefined) { + obj.transaction = exports.Transaction.toJSON(message.transaction); + } + if (message.meta !== undefined) { + obj.meta = exports.TransactionStatusMeta.toJSON(message.meta); + } + return obj; + }, + create: function (base) { + return exports.ConfirmedTransaction.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var message = createBaseConfirmedTransaction(); + message.transaction = (object.transaction !== undefined && object.transaction !== null) + ? exports.Transaction.fromPartial(object.transaction) + : undefined; + message.meta = (object.meta !== undefined && object.meta !== null) + ? exports.TransactionStatusMeta.fromPartial(object.meta) + : undefined; + return message; + }, +}; +function createBaseTransaction() { + return { signatures: [], message: undefined }; +} +exports.Transaction = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + for (var _i = 0, _a = message.signatures; _i < _a.length; _i++) { + var v = _a[_i]; + writer.uint32(10).bytes(v); + } + if (message.message !== undefined) { + exports.Message.encode(message.message, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseTransaction(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.signatures.push(reader.bytes()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.message = exports.Message.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + signatures: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.signatures) + ? object.signatures.map(function (e) { return bytesFromBase64(e); }) + : [], + message: isSet(object.message) ? exports.Message.fromJSON(object.message) : undefined, + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if ((_a = message.signatures) === null || _a === void 0 ? void 0 : _a.length) { + obj.signatures = message.signatures.map(function (e) { return base64FromBytes(e); }); + } + if (message.message !== undefined) { + obj.message = exports.Message.toJSON(message.message); + } + return obj; + }, + create: function (base) { + return exports.Transaction.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseTransaction(); + message.signatures = ((_a = object.signatures) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; + message.message = (object.message !== undefined && object.message !== null) + ? exports.Message.fromPartial(object.message) + : undefined; + return message; + }, +}; +function createBaseMessage() { + return { + header: undefined, + accountKeys: [], + recentBlockhash: new Uint8Array(0), + instructions: [], + versioned: false, + addressTableLookups: [], + }; +} +exports.Message = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.header !== undefined) { + exports.MessageHeader.encode(message.header, writer.uint32(10).fork()).join(); + } + for (var _i = 0, _a = message.accountKeys; _i < _a.length; _i++) { + var v = _a[_i]; + writer.uint32(18).bytes(v); + } + if (message.recentBlockhash.length !== 0) { + writer.uint32(26).bytes(message.recentBlockhash); + } + for (var _b = 0, _c = message.instructions; _b < _c.length; _b++) { + var v = _c[_b]; + exports.CompiledInstruction.encode(v, writer.uint32(34).fork()).join(); + } + if (message.versioned !== false) { + writer.uint32(40).bool(message.versioned); + } + for (var _d = 0, _e = message.addressTableLookups; _d < _e.length; _d++) { + var v = _e[_d]; + exports.MessageAddressTableLookup.encode(v, writer.uint32(50).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseMessage(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.header = exports.MessageHeader.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.accountKeys.push(reader.bytes()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.recentBlockhash = reader.bytes(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.instructions.push(exports.CompiledInstruction.decode(reader, reader.uint32())); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + message.versioned = reader.bool(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + message.addressTableLookups.push(exports.MessageAddressTableLookup.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + header: isSet(object.header) ? exports.MessageHeader.fromJSON(object.header) : undefined, + accountKeys: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountKeys) + ? object.accountKeys.map(function (e) { return bytesFromBase64(e); }) + : [], + recentBlockhash: isSet(object.recentBlockhash) ? bytesFromBase64(object.recentBlockhash) : new Uint8Array(0), + instructions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.instructions) + ? object.instructions.map(function (e) { return exports.CompiledInstruction.fromJSON(e); }) + : [], + versioned: isSet(object.versioned) ? globalThis.Boolean(object.versioned) : false, + addressTableLookups: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.addressTableLookups) + ? object.addressTableLookups.map(function (e) { return exports.MessageAddressTableLookup.fromJSON(e); }) + : [], + }; + }, + toJSON: function (message) { + var _a, _b, _c; + var obj = {}; + if (message.header !== undefined) { + obj.header = exports.MessageHeader.toJSON(message.header); + } + if ((_a = message.accountKeys) === null || _a === void 0 ? void 0 : _a.length) { + obj.accountKeys = message.accountKeys.map(function (e) { return base64FromBytes(e); }); + } + if (message.recentBlockhash.length !== 0) { + obj.recentBlockhash = base64FromBytes(message.recentBlockhash); + } + if ((_b = message.instructions) === null || _b === void 0 ? void 0 : _b.length) { + obj.instructions = message.instructions.map(function (e) { return exports.CompiledInstruction.toJSON(e); }); + } + if (message.versioned !== false) { + obj.versioned = message.versioned; + } + if ((_c = message.addressTableLookups) === null || _c === void 0 ? void 0 : _c.length) { + obj.addressTableLookups = message.addressTableLookups.map(function (e) { return exports.MessageAddressTableLookup.toJSON(e); }); + } + return obj; + }, + create: function (base) { + return exports.Message.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e; + var message = createBaseMessage(); + message.header = (object.header !== undefined && object.header !== null) + ? exports.MessageHeader.fromPartial(object.header) + : undefined; + message.accountKeys = ((_a = object.accountKeys) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; + message.recentBlockhash = (_b = object.recentBlockhash) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.instructions = ((_c = object.instructions) === null || _c === void 0 ? void 0 : _c.map(function (e) { return exports.CompiledInstruction.fromPartial(e); })) || []; + message.versioned = (_d = object.versioned) !== null && _d !== void 0 ? _d : false; + message.addressTableLookups = ((_e = object.addressTableLookups) === null || _e === void 0 ? void 0 : _e.map(function (e) { return exports.MessageAddressTableLookup.fromPartial(e); })) || + []; + return message; + }, +}; +function createBaseMessageHeader() { + return { numRequiredSignatures: 0, numReadonlySignedAccounts: 0, numReadonlyUnsignedAccounts: 0 }; +} +exports.MessageHeader = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.numRequiredSignatures !== 0) { + writer.uint32(8).uint32(message.numRequiredSignatures); + } + if (message.numReadonlySignedAccounts !== 0) { + writer.uint32(16).uint32(message.numReadonlySignedAccounts); + } + if (message.numReadonlyUnsignedAccounts !== 0) { + writer.uint32(24).uint32(message.numReadonlyUnsignedAccounts); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseMessageHeader(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.numRequiredSignatures = reader.uint32(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.numReadonlySignedAccounts = reader.uint32(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.numReadonlyUnsignedAccounts = reader.uint32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + numRequiredSignatures: isSet(object.numRequiredSignatures) ? globalThis.Number(object.numRequiredSignatures) : 0, + numReadonlySignedAccounts: isSet(object.numReadonlySignedAccounts) + ? globalThis.Number(object.numReadonlySignedAccounts) + : 0, + numReadonlyUnsignedAccounts: isSet(object.numReadonlyUnsignedAccounts) + ? globalThis.Number(object.numReadonlyUnsignedAccounts) + : 0, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.numRequiredSignatures !== 0) { + obj.numRequiredSignatures = Math.round(message.numRequiredSignatures); + } + if (message.numReadonlySignedAccounts !== 0) { + obj.numReadonlySignedAccounts = Math.round(message.numReadonlySignedAccounts); + } + if (message.numReadonlyUnsignedAccounts !== 0) { + obj.numReadonlyUnsignedAccounts = Math.round(message.numReadonlyUnsignedAccounts); + } + return obj; + }, + create: function (base) { + return exports.MessageHeader.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBaseMessageHeader(); + message.numRequiredSignatures = (_a = object.numRequiredSignatures) !== null && _a !== void 0 ? _a : 0; + message.numReadonlySignedAccounts = (_b = object.numReadonlySignedAccounts) !== null && _b !== void 0 ? _b : 0; + message.numReadonlyUnsignedAccounts = (_c = object.numReadonlyUnsignedAccounts) !== null && _c !== void 0 ? _c : 0; + return message; + }, +}; +function createBaseMessageAddressTableLookup() { + return { accountKey: new Uint8Array(0), writableIndexes: new Uint8Array(0), readonlyIndexes: new Uint8Array(0) }; +} +exports.MessageAddressTableLookup = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.accountKey.length !== 0) { + writer.uint32(10).bytes(message.accountKey); + } + if (message.writableIndexes.length !== 0) { + writer.uint32(18).bytes(message.writableIndexes); + } + if (message.readonlyIndexes.length !== 0) { + writer.uint32(26).bytes(message.readonlyIndexes); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseMessageAddressTableLookup(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.accountKey = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.writableIndexes = reader.bytes(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.readonlyIndexes = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + accountKey: isSet(object.accountKey) ? bytesFromBase64(object.accountKey) : new Uint8Array(0), + writableIndexes: isSet(object.writableIndexes) ? bytesFromBase64(object.writableIndexes) : new Uint8Array(0), + readonlyIndexes: isSet(object.readonlyIndexes) ? bytesFromBase64(object.readonlyIndexes) : new Uint8Array(0), + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.accountKey.length !== 0) { + obj.accountKey = base64FromBytes(message.accountKey); + } + if (message.writableIndexes.length !== 0) { + obj.writableIndexes = base64FromBytes(message.writableIndexes); + } + if (message.readonlyIndexes.length !== 0) { + obj.readonlyIndexes = base64FromBytes(message.readonlyIndexes); + } + return obj; + }, + create: function (base) { + return exports.MessageAddressTableLookup.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBaseMessageAddressTableLookup(); + message.accountKey = (_a = object.accountKey) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.writableIndexes = (_b = object.writableIndexes) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.readonlyIndexes = (_c = object.readonlyIndexes) !== null && _c !== void 0 ? _c : new Uint8Array(0); + return message; + }, +}; +function createBaseTransactionStatusMeta() { + return { + err: undefined, + fee: "0", + preBalances: [], + postBalances: [], + innerInstructions: [], + innerInstructionsNone: false, + logMessages: [], + logMessagesNone: false, + preTokenBalances: [], + postTokenBalances: [], + rewards: [], + loadedWritableAddresses: [], + loadedReadonlyAddresses: [], + returnData: undefined, + returnDataNone: false, + computeUnitsConsumed: undefined, + costUnits: undefined, + }; +} +exports.TransactionStatusMeta = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.err !== undefined) { + exports.TransactionError.encode(message.err, writer.uint32(10).fork()).join(); + } + if (message.fee !== "0") { + writer.uint32(16).uint64(message.fee); + } + writer.uint32(26).fork(); + for (var _i = 0, _a = message.preBalances; _i < _a.length; _i++) { + var v = _a[_i]; + writer.uint64(v); + } + writer.join(); + writer.uint32(34).fork(); + for (var _b = 0, _c = message.postBalances; _b < _c.length; _b++) { + var v = _c[_b]; + writer.uint64(v); + } + writer.join(); + for (var _d = 0, _e = message.innerInstructions; _d < _e.length; _d++) { + var v = _e[_d]; + exports.InnerInstructions.encode(v, writer.uint32(42).fork()).join(); + } + if (message.innerInstructionsNone !== false) { + writer.uint32(80).bool(message.innerInstructionsNone); + } + for (var _f = 0, _g = message.logMessages; _f < _g.length; _f++) { + var v = _g[_f]; + writer.uint32(50).string(v); + } + if (message.logMessagesNone !== false) { + writer.uint32(88).bool(message.logMessagesNone); + } + for (var _h = 0, _j = message.preTokenBalances; _h < _j.length; _h++) { + var v = _j[_h]; + exports.TokenBalance.encode(v, writer.uint32(58).fork()).join(); + } + for (var _k = 0, _l = message.postTokenBalances; _k < _l.length; _k++) { + var v = _l[_k]; + exports.TokenBalance.encode(v, writer.uint32(66).fork()).join(); + } + for (var _m = 0, _o = message.rewards; _m < _o.length; _m++) { + var v = _o[_m]; + exports.Reward.encode(v, writer.uint32(74).fork()).join(); + } + for (var _p = 0, _q = message.loadedWritableAddresses; _p < _q.length; _p++) { + var v = _q[_p]; + writer.uint32(98).bytes(v); + } + for (var _r = 0, _s = message.loadedReadonlyAddresses; _r < _s.length; _r++) { + var v = _s[_r]; + writer.uint32(106).bytes(v); + } + if (message.returnData !== undefined) { + exports.ReturnData.encode(message.returnData, writer.uint32(114).fork()).join(); + } + if (message.returnDataNone !== false) { + writer.uint32(120).bool(message.returnDataNone); + } + if (message.computeUnitsConsumed !== undefined) { + writer.uint32(128).uint64(message.computeUnitsConsumed); + } + if (message.costUnits !== undefined) { + writer.uint32(136).uint64(message.costUnits); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseTransactionStatusMeta(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.err = exports.TransactionError.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.fee = reader.uint64().toString(); + continue; + } + case 3: { + if (tag === 24) { + message.preBalances.push(reader.uint64().toString()); + continue; + } + if (tag === 26) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.preBalances.push(reader.uint64().toString()); + } + continue; + } + break; + } + case 4: { + if (tag === 32) { + message.postBalances.push(reader.uint64().toString()); + continue; + } + if (tag === 34) { + var end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.postBalances.push(reader.uint64().toString()); + } + continue; + } + break; + } + case 5: { + if (tag !== 42) { + break; + } + message.innerInstructions.push(exports.InnerInstructions.decode(reader, reader.uint32())); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + message.innerInstructionsNone = reader.bool(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + message.logMessages.push(reader.string()); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + message.logMessagesNone = reader.bool(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + message.preTokenBalances.push(exports.TokenBalance.decode(reader, reader.uint32())); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + message.postTokenBalances.push(exports.TokenBalance.decode(reader, reader.uint32())); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + message.rewards.push(exports.Reward.decode(reader, reader.uint32())); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + message.loadedWritableAddresses.push(reader.bytes()); + continue; + } + case 13: { + if (tag !== 106) { + break; + } + message.loadedReadonlyAddresses.push(reader.bytes()); + continue; + } + case 14: { + if (tag !== 114) { + break; + } + message.returnData = exports.ReturnData.decode(reader, reader.uint32()); + continue; + } + case 15: { + if (tag !== 120) { + break; + } + message.returnDataNone = reader.bool(); + continue; + } + case 16: { + if (tag !== 128) { + break; + } + message.computeUnitsConsumed = reader.uint64().toString(); + continue; + } + case 17: { + if (tag !== 136) { + break; + } + message.costUnits = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + err: isSet(object.err) ? exports.TransactionError.fromJSON(object.err) : undefined, + fee: isSet(object.fee) ? globalThis.String(object.fee) : "0", + preBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.preBalances) + ? object.preBalances.map(function (e) { return globalThis.String(e); }) + : [], + postBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.postBalances) + ? object.postBalances.map(function (e) { return globalThis.String(e); }) + : [], + innerInstructions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.innerInstructions) + ? object.innerInstructions.map(function (e) { return exports.InnerInstructions.fromJSON(e); }) + : [], + innerInstructionsNone: isSet(object.innerInstructionsNone) + ? globalThis.Boolean(object.innerInstructionsNone) + : false, + logMessages: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.logMessages) + ? object.logMessages.map(function (e) { return globalThis.String(e); }) + : [], + logMessagesNone: isSet(object.logMessagesNone) ? globalThis.Boolean(object.logMessagesNone) : false, + preTokenBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.preTokenBalances) + ? object.preTokenBalances.map(function (e) { return exports.TokenBalance.fromJSON(e); }) + : [], + postTokenBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.postTokenBalances) + ? object.postTokenBalances.map(function (e) { return exports.TokenBalance.fromJSON(e); }) + : [], + rewards: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.rewards) ? object.rewards.map(function (e) { return exports.Reward.fromJSON(e); }) : [], + loadedWritableAddresses: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.loadedWritableAddresses) + ? object.loadedWritableAddresses.map(function (e) { return bytesFromBase64(e); }) + : [], + loadedReadonlyAddresses: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.loadedReadonlyAddresses) + ? object.loadedReadonlyAddresses.map(function (e) { return bytesFromBase64(e); }) + : [], + returnData: isSet(object.returnData) ? exports.ReturnData.fromJSON(object.returnData) : undefined, + returnDataNone: isSet(object.returnDataNone) ? globalThis.Boolean(object.returnDataNone) : false, + computeUnitsConsumed: isSet(object.computeUnitsConsumed) + ? globalThis.String(object.computeUnitsConsumed) + : undefined, + costUnits: isSet(object.costUnits) ? globalThis.String(object.costUnits) : undefined, + }; + }, + toJSON: function (message) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j; + var obj = {}; + if (message.err !== undefined) { + obj.err = exports.TransactionError.toJSON(message.err); + } + if (message.fee !== "0") { + obj.fee = message.fee; + } + if ((_a = message.preBalances) === null || _a === void 0 ? void 0 : _a.length) { + obj.preBalances = message.preBalances; + } + if ((_b = message.postBalances) === null || _b === void 0 ? void 0 : _b.length) { + obj.postBalances = message.postBalances; + } + if ((_c = message.innerInstructions) === null || _c === void 0 ? void 0 : _c.length) { + obj.innerInstructions = message.innerInstructions.map(function (e) { return exports.InnerInstructions.toJSON(e); }); + } + if (message.innerInstructionsNone !== false) { + obj.innerInstructionsNone = message.innerInstructionsNone; + } + if ((_d = message.logMessages) === null || _d === void 0 ? void 0 : _d.length) { + obj.logMessages = message.logMessages; + } + if (message.logMessagesNone !== false) { + obj.logMessagesNone = message.logMessagesNone; + } + if ((_e = message.preTokenBalances) === null || _e === void 0 ? void 0 : _e.length) { + obj.preTokenBalances = message.preTokenBalances.map(function (e) { return exports.TokenBalance.toJSON(e); }); + } + if ((_f = message.postTokenBalances) === null || _f === void 0 ? void 0 : _f.length) { + obj.postTokenBalances = message.postTokenBalances.map(function (e) { return exports.TokenBalance.toJSON(e); }); + } + if ((_g = message.rewards) === null || _g === void 0 ? void 0 : _g.length) { + obj.rewards = message.rewards.map(function (e) { return exports.Reward.toJSON(e); }); + } + if ((_h = message.loadedWritableAddresses) === null || _h === void 0 ? void 0 : _h.length) { + obj.loadedWritableAddresses = message.loadedWritableAddresses.map(function (e) { return base64FromBytes(e); }); + } + if ((_j = message.loadedReadonlyAddresses) === null || _j === void 0 ? void 0 : _j.length) { + obj.loadedReadonlyAddresses = message.loadedReadonlyAddresses.map(function (e) { return base64FromBytes(e); }); + } + if (message.returnData !== undefined) { + obj.returnData = exports.ReturnData.toJSON(message.returnData); + } + if (message.returnDataNone !== false) { + obj.returnDataNone = message.returnDataNone; + } + if (message.computeUnitsConsumed !== undefined) { + obj.computeUnitsConsumed = message.computeUnitsConsumed; + } + if (message.costUnits !== undefined) { + obj.costUnits = message.costUnits; + } + return obj; + }, + create: function (base) { + return exports.TransactionStatusMeta.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q; + var message = createBaseTransactionStatusMeta(); + message.err = (object.err !== undefined && object.err !== null) + ? exports.TransactionError.fromPartial(object.err) + : undefined; + message.fee = (_a = object.fee) !== null && _a !== void 0 ? _a : "0"; + message.preBalances = ((_b = object.preBalances) === null || _b === void 0 ? void 0 : _b.map(function (e) { return e; })) || []; + message.postBalances = ((_c = object.postBalances) === null || _c === void 0 ? void 0 : _c.map(function (e) { return e; })) || []; + message.innerInstructions = ((_d = object.innerInstructions) === null || _d === void 0 ? void 0 : _d.map(function (e) { return exports.InnerInstructions.fromPartial(e); })) || []; + message.innerInstructionsNone = (_e = object.innerInstructionsNone) !== null && _e !== void 0 ? _e : false; + message.logMessages = ((_f = object.logMessages) === null || _f === void 0 ? void 0 : _f.map(function (e) { return e; })) || []; + message.logMessagesNone = (_g = object.logMessagesNone) !== null && _g !== void 0 ? _g : false; + message.preTokenBalances = ((_h = object.preTokenBalances) === null || _h === void 0 ? void 0 : _h.map(function (e) { return exports.TokenBalance.fromPartial(e); })) || []; + message.postTokenBalances = ((_j = object.postTokenBalances) === null || _j === void 0 ? void 0 : _j.map(function (e) { return exports.TokenBalance.fromPartial(e); })) || []; + message.rewards = ((_k = object.rewards) === null || _k === void 0 ? void 0 : _k.map(function (e) { return exports.Reward.fromPartial(e); })) || []; + message.loadedWritableAddresses = ((_l = object.loadedWritableAddresses) === null || _l === void 0 ? void 0 : _l.map(function (e) { return e; })) || []; + message.loadedReadonlyAddresses = ((_m = object.loadedReadonlyAddresses) === null || _m === void 0 ? void 0 : _m.map(function (e) { return e; })) || []; + message.returnData = (object.returnData !== undefined && object.returnData !== null) + ? exports.ReturnData.fromPartial(object.returnData) + : undefined; + message.returnDataNone = (_o = object.returnDataNone) !== null && _o !== void 0 ? _o : false; + message.computeUnitsConsumed = (_p = object.computeUnitsConsumed) !== null && _p !== void 0 ? _p : undefined; + message.costUnits = (_q = object.costUnits) !== null && _q !== void 0 ? _q : undefined; + return message; + }, +}; +function createBaseTransactionError() { + return { err: new Uint8Array(0) }; +} +exports.TransactionError = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.err.length !== 0) { + writer.uint32(10).bytes(message.err); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseTransactionError(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.err = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { err: isSet(object.err) ? bytesFromBase64(object.err) : new Uint8Array(0) }; + }, + toJSON: function (message) { + var obj = {}; + if (message.err.length !== 0) { + obj.err = base64FromBytes(message.err); + } + return obj; + }, + create: function (base) { + return exports.TransactionError.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseTransactionError(); + message.err = (_a = object.err) !== null && _a !== void 0 ? _a : new Uint8Array(0); + return message; + }, +}; +function createBaseInnerInstructions() { + return { index: 0, instructions: [] }; +} +exports.InnerInstructions = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.index !== 0) { + writer.uint32(8).uint32(message.index); + } + for (var _i = 0, _a = message.instructions; _i < _a.length; _i++) { + var v = _a[_i]; + exports.InnerInstruction.encode(v, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseInnerInstructions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.index = reader.uint32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.instructions.push(exports.InnerInstruction.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + index: isSet(object.index) ? globalThis.Number(object.index) : 0, + instructions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.instructions) + ? object.instructions.map(function (e) { return exports.InnerInstruction.fromJSON(e); }) + : [], + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if (message.index !== 0) { + obj.index = Math.round(message.index); + } + if ((_a = message.instructions) === null || _a === void 0 ? void 0 : _a.length) { + obj.instructions = message.instructions.map(function (e) { return exports.InnerInstruction.toJSON(e); }); + } + return obj; + }, + create: function (base) { + return exports.InnerInstructions.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseInnerInstructions(); + message.index = (_a = object.index) !== null && _a !== void 0 ? _a : 0; + message.instructions = ((_b = object.instructions) === null || _b === void 0 ? void 0 : _b.map(function (e) { return exports.InnerInstruction.fromPartial(e); })) || []; + return message; + }, +}; +function createBaseInnerInstruction() { + return { programIdIndex: 0, accounts: new Uint8Array(0), data: new Uint8Array(0), stackHeight: undefined }; +} +exports.InnerInstruction = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.programIdIndex !== 0) { + writer.uint32(8).uint32(message.programIdIndex); + } + if (message.accounts.length !== 0) { + writer.uint32(18).bytes(message.accounts); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + if (message.stackHeight !== undefined) { + writer.uint32(32).uint32(message.stackHeight); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseInnerInstruction(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.programIdIndex = reader.uint32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.accounts = reader.bytes(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.data = reader.bytes(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + message.stackHeight = reader.uint32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + programIdIndex: isSet(object.programIdIndex) ? globalThis.Number(object.programIdIndex) : 0, + accounts: isSet(object.accounts) ? bytesFromBase64(object.accounts) : new Uint8Array(0), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + stackHeight: isSet(object.stackHeight) ? globalThis.Number(object.stackHeight) : undefined, + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.programIdIndex !== 0) { + obj.programIdIndex = Math.round(message.programIdIndex); + } + if (message.accounts.length !== 0) { + obj.accounts = base64FromBytes(message.accounts); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + if (message.stackHeight !== undefined) { + obj.stackHeight = Math.round(message.stackHeight); + } + return obj; + }, + create: function (base) { + return exports.InnerInstruction.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseInnerInstruction(); + message.programIdIndex = (_a = object.programIdIndex) !== null && _a !== void 0 ? _a : 0; + message.accounts = (_b = object.accounts) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.data = (_c = object.data) !== null && _c !== void 0 ? _c : new Uint8Array(0); + message.stackHeight = (_d = object.stackHeight) !== null && _d !== void 0 ? _d : undefined; + return message; + }, +}; +function createBaseCompiledInstruction() { + return { programIdIndex: 0, accounts: new Uint8Array(0), data: new Uint8Array(0) }; +} +exports.CompiledInstruction = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.programIdIndex !== 0) { + writer.uint32(8).uint32(message.programIdIndex); + } + if (message.accounts.length !== 0) { + writer.uint32(18).bytes(message.accounts); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseCompiledInstruction(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.programIdIndex = reader.uint32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.accounts = reader.bytes(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.data = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + programIdIndex: isSet(object.programIdIndex) ? globalThis.Number(object.programIdIndex) : 0, + accounts: isSet(object.accounts) ? bytesFromBase64(object.accounts) : new Uint8Array(0), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.programIdIndex !== 0) { + obj.programIdIndex = Math.round(message.programIdIndex); + } + if (message.accounts.length !== 0) { + obj.accounts = base64FromBytes(message.accounts); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + return obj; + }, + create: function (base) { + return exports.CompiledInstruction.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c; + var message = createBaseCompiledInstruction(); + message.programIdIndex = (_a = object.programIdIndex) !== null && _a !== void 0 ? _a : 0; + message.accounts = (_b = object.accounts) !== null && _b !== void 0 ? _b : new Uint8Array(0); + message.data = (_c = object.data) !== null && _c !== void 0 ? _c : new Uint8Array(0); + return message; + }, +}; +function createBaseTokenBalance() { + return { accountIndex: 0, mint: "", uiTokenAmount: undefined, owner: "", programId: "" }; +} +exports.TokenBalance = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.accountIndex !== 0) { + writer.uint32(8).uint32(message.accountIndex); + } + if (message.mint !== "") { + writer.uint32(18).string(message.mint); + } + if (message.uiTokenAmount !== undefined) { + exports.UiTokenAmount.encode(message.uiTokenAmount, writer.uint32(26).fork()).join(); + } + if (message.owner !== "") { + writer.uint32(34).string(message.owner); + } + if (message.programId !== "") { + writer.uint32(42).string(message.programId); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseTokenBalance(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.accountIndex = reader.uint32(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.mint = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.uiTokenAmount = exports.UiTokenAmount.decode(reader, reader.uint32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.owner = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.programId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + accountIndex: isSet(object.accountIndex) ? globalThis.Number(object.accountIndex) : 0, + mint: isSet(object.mint) ? globalThis.String(object.mint) : "", + uiTokenAmount: isSet(object.uiTokenAmount) ? exports.UiTokenAmount.fromJSON(object.uiTokenAmount) : undefined, + owner: isSet(object.owner) ? globalThis.String(object.owner) : "", + programId: isSet(object.programId) ? globalThis.String(object.programId) : "", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.accountIndex !== 0) { + obj.accountIndex = Math.round(message.accountIndex); + } + if (message.mint !== "") { + obj.mint = message.mint; + } + if (message.uiTokenAmount !== undefined) { + obj.uiTokenAmount = exports.UiTokenAmount.toJSON(message.uiTokenAmount); + } + if (message.owner !== "") { + obj.owner = message.owner; + } + if (message.programId !== "") { + obj.programId = message.programId; + } + return obj; + }, + create: function (base) { + return exports.TokenBalance.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseTokenBalance(); + message.accountIndex = (_a = object.accountIndex) !== null && _a !== void 0 ? _a : 0; + message.mint = (_b = object.mint) !== null && _b !== void 0 ? _b : ""; + message.uiTokenAmount = (object.uiTokenAmount !== undefined && object.uiTokenAmount !== null) + ? exports.UiTokenAmount.fromPartial(object.uiTokenAmount) + : undefined; + message.owner = (_c = object.owner) !== null && _c !== void 0 ? _c : ""; + message.programId = (_d = object.programId) !== null && _d !== void 0 ? _d : ""; + return message; + }, +}; +function createBaseUiTokenAmount() { + return { uiAmount: 0, decimals: 0, amount: "", uiAmountString: "" }; +} +exports.UiTokenAmount = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.uiAmount !== 0) { + writer.uint32(9).double(message.uiAmount); + } + if (message.decimals !== 0) { + writer.uint32(16).uint32(message.decimals); + } + if (message.amount !== "") { + writer.uint32(26).string(message.amount); + } + if (message.uiAmountString !== "") { + writer.uint32(34).string(message.uiAmountString); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseUiTokenAmount(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 9) { + break; + } + message.uiAmount = reader.double(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.decimals = reader.uint32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + message.amount = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + message.uiAmountString = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + uiAmount: isSet(object.uiAmount) ? globalThis.Number(object.uiAmount) : 0, + decimals: isSet(object.decimals) ? globalThis.Number(object.decimals) : 0, + amount: isSet(object.amount) ? globalThis.String(object.amount) : "", + uiAmountString: isSet(object.uiAmountString) ? globalThis.String(object.uiAmountString) : "", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.uiAmount !== 0) { + obj.uiAmount = message.uiAmount; + } + if (message.decimals !== 0) { + obj.decimals = Math.round(message.decimals); + } + if (message.amount !== "") { + obj.amount = message.amount; + } + if (message.uiAmountString !== "") { + obj.uiAmountString = message.uiAmountString; + } + return obj; + }, + create: function (base) { + return exports.UiTokenAmount.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d; + var message = createBaseUiTokenAmount(); + message.uiAmount = (_a = object.uiAmount) !== null && _a !== void 0 ? _a : 0; + message.decimals = (_b = object.decimals) !== null && _b !== void 0 ? _b : 0; + message.amount = (_c = object.amount) !== null && _c !== void 0 ? _c : ""; + message.uiAmountString = (_d = object.uiAmountString) !== null && _d !== void 0 ? _d : ""; + return message; + }, +}; +function createBaseReturnData() { + return { programId: new Uint8Array(0), data: new Uint8Array(0) }; +} +exports.ReturnData = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.programId.length !== 0) { + writer.uint32(10).bytes(message.programId); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseReturnData(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.programId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.data = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + programId: isSet(object.programId) ? bytesFromBase64(object.programId) : new Uint8Array(0), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.programId.length !== 0) { + obj.programId = base64FromBytes(message.programId); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + return obj; + }, + create: function (base) { + return exports.ReturnData.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b; + var message = createBaseReturnData(); + message.programId = (_a = object.programId) !== null && _a !== void 0 ? _a : new Uint8Array(0); + message.data = (_b = object.data) !== null && _b !== void 0 ? _b : new Uint8Array(0); + return message; + }, +}; +function createBaseReward() { + return { pubkey: "", lamports: "0", postBalance: "0", rewardType: 0, commission: "" }; +} +exports.Reward = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.pubkey !== "") { + writer.uint32(10).string(message.pubkey); + } + if (message.lamports !== "0") { + writer.uint32(16).int64(message.lamports); + } + if (message.postBalance !== "0") { + writer.uint32(24).uint64(message.postBalance); + } + if (message.rewardType !== 0) { + writer.uint32(32).int32(message.rewardType); + } + if (message.commission !== "") { + writer.uint32(42).string(message.commission); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseReward(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.pubkey = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + message.lamports = reader.int64().toString(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + message.postBalance = reader.uint64().toString(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + message.rewardType = reader.int32(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + message.commission = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + pubkey: isSet(object.pubkey) ? globalThis.String(object.pubkey) : "", + lamports: isSet(object.lamports) ? globalThis.String(object.lamports) : "0", + postBalance: isSet(object.postBalance) ? globalThis.String(object.postBalance) : "0", + rewardType: isSet(object.rewardType) ? rewardTypeFromJSON(object.rewardType) : 0, + commission: isSet(object.commission) ? globalThis.String(object.commission) : "", + }; + }, + toJSON: function (message) { + var obj = {}; + if (message.pubkey !== "") { + obj.pubkey = message.pubkey; + } + if (message.lamports !== "0") { + obj.lamports = message.lamports; + } + if (message.postBalance !== "0") { + obj.postBalance = message.postBalance; + } + if (message.rewardType !== 0) { + obj.rewardType = rewardTypeToJSON(message.rewardType); + } + if (message.commission !== "") { + obj.commission = message.commission; + } + return obj; + }, + create: function (base) { + return exports.Reward.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a, _b, _c, _d, _e; + var message = createBaseReward(); + message.pubkey = (_a = object.pubkey) !== null && _a !== void 0 ? _a : ""; + message.lamports = (_b = object.lamports) !== null && _b !== void 0 ? _b : "0"; + message.postBalance = (_c = object.postBalance) !== null && _c !== void 0 ? _c : "0"; + message.rewardType = (_d = object.rewardType) !== null && _d !== void 0 ? _d : 0; + message.commission = (_e = object.commission) !== null && _e !== void 0 ? _e : ""; + return message; + }, +}; +function createBaseRewards() { + return { rewards: [], numPartitions: undefined }; +} +exports.Rewards = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + for (var _i = 0, _a = message.rewards; _i < _a.length; _i++) { + var v = _a[_i]; + exports.Reward.encode(v, writer.uint32(10).fork()).join(); + } + if (message.numPartitions !== undefined) { + exports.NumPartitions.encode(message.numPartitions, writer.uint32(18).fork()).join(); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseRewards(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + message.rewards.push(exports.Reward.decode(reader, reader.uint32())); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + message.numPartitions = exports.NumPartitions.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { + rewards: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.rewards) ? object.rewards.map(function (e) { return exports.Reward.fromJSON(e); }) : [], + numPartitions: isSet(object.numPartitions) ? exports.NumPartitions.fromJSON(object.numPartitions) : undefined, + }; + }, + toJSON: function (message) { + var _a; + var obj = {}; + if ((_a = message.rewards) === null || _a === void 0 ? void 0 : _a.length) { + obj.rewards = message.rewards.map(function (e) { return exports.Reward.toJSON(e); }); + } + if (message.numPartitions !== undefined) { + obj.numPartitions = exports.NumPartitions.toJSON(message.numPartitions); + } + return obj; + }, + create: function (base) { + return exports.Rewards.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseRewards(); + message.rewards = ((_a = object.rewards) === null || _a === void 0 ? void 0 : _a.map(function (e) { return exports.Reward.fromPartial(e); })) || []; + message.numPartitions = (object.numPartitions !== undefined && object.numPartitions !== null) + ? exports.NumPartitions.fromPartial(object.numPartitions) + : undefined; + return message; + }, +}; +function createBaseUnixTimestamp() { + return { timestamp: "0" }; +} +exports.UnixTimestamp = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.timestamp !== "0") { + writer.uint32(8).int64(message.timestamp); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseUnixTimestamp(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.timestamp = reader.int64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { timestamp: isSet(object.timestamp) ? globalThis.String(object.timestamp) : "0" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.timestamp !== "0") { + obj.timestamp = message.timestamp; + } + return obj; + }, + create: function (base) { + return exports.UnixTimestamp.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseUnixTimestamp(); + message.timestamp = (_a = object.timestamp) !== null && _a !== void 0 ? _a : "0"; + return message; + }, +}; +function createBaseBlockHeight() { + return { blockHeight: "0" }; +} +exports.BlockHeight = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.blockHeight !== "0") { + writer.uint32(8).uint64(message.blockHeight); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseBlockHeight(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.blockHeight = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { blockHeight: isSet(object.blockHeight) ? globalThis.String(object.blockHeight) : "0" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.blockHeight !== "0") { + obj.blockHeight = message.blockHeight; + } + return obj; + }, + create: function (base) { + return exports.BlockHeight.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseBlockHeight(); + message.blockHeight = (_a = object.blockHeight) !== null && _a !== void 0 ? _a : "0"; + return message; + }, +}; +function createBaseNumPartitions() { + return { numPartitions: "0" }; +} +exports.NumPartitions = { + encode: function (message, writer) { + if (writer === void 0) { writer = new wire_1.BinaryWriter(); } + if (message.numPartitions !== "0") { + writer.uint32(8).uint64(message.numPartitions); + } + return writer; + }, + decode: function (input, length) { + var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); + var end = length === undefined ? reader.len : reader.pos + length; + var message = createBaseNumPartitions(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + message.numPartitions = reader.uint64().toString(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + fromJSON: function (object) { + return { numPartitions: isSet(object.numPartitions) ? globalThis.String(object.numPartitions) : "0" }; + }, + toJSON: function (message) { + var obj = {}; + if (message.numPartitions !== "0") { + obj.numPartitions = message.numPartitions; + } + return obj; + }, + create: function (base) { + return exports.NumPartitions.fromPartial(base !== null && base !== void 0 ? base : {}); + }, + fromPartial: function (object) { + var _a; + var message = createBaseNumPartitions(); + message.numPartitions = (_a = object.numPartitions) !== null && _a !== void 0 ? _a : "0"; + return message; + }, +}; +function bytesFromBase64(b64) { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } + else { + var bin = globalThis.atob(b64); + var arr = new Uint8Array(bin.length); + for (var i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} +function base64FromBytes(arr) { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } + else { + var bin_1 = []; + arr.forEach(function (byte) { + bin_1.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin_1.join("")); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/typescript-sdk/src/index.js b/typescript-sdk/src/index.js new file mode 100644 index 0000000..3473f2f --- /dev/null +++ b/typescript-sdk/src/index.js @@ -0,0 +1,448 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = exports.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = exports.DEFAULT_COMMIT_INTERVAL = exports.DEFAULT_DRAGONSMOUTH_CAPACITY = exports.FumaroleConfig = exports.FumaroleClient = void 0; +var grpc_js_1 = require("@grpc/grpc-js"); +var config_1 = require("./config/config"); +Object.defineProperty(exports, "FumaroleConfig", { enumerable: true, get: function () { return config_1.FumaroleConfig; } }); +var connectivity_1 = require("./connectivity"); +var types_1 = require("./types"); +Object.defineProperty(exports, "DEFAULT_DRAGONSMOUTH_CAPACITY", { enumerable: true, get: function () { return types_1.DEFAULT_DRAGONSMOUTH_CAPACITY; } }); +Object.defineProperty(exports, "DEFAULT_COMMIT_INTERVAL", { enumerable: true, get: function () { return types_1.DEFAULT_COMMIT_INTERVAL; } }); +Object.defineProperty(exports, "DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT", { enumerable: true, get: function () { return types_1.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT; } }); +Object.defineProperty(exports, "DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP", { enumerable: true, get: function () { return types_1.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP; } }); +var FumaroleClient = /** @class */ (function () { + function FumaroleClient(connector, stub) { + this.connector = connector; + this.stub = stub; + } + FumaroleClient.connect = function (config) { + return __awaiter(this, void 0, void 0, function () { + var endpoint, connector, client, methods; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + endpoint = config.endpoint; + connector = new connectivity_1.FumaroleGrpcConnector(config, endpoint); + FumaroleClient.logger.debug("Connecting to ".concat(endpoint)); + FumaroleClient.logger.debug("Connection config:", { + endpoint: config.endpoint, + xToken: config.xToken ? "***" : "none", + maxDecodingMessageSizeBytes: config.maxDecodingMessageSizeBytes, + }); + return [4 /*yield*/, connector.connect()]; + case 1: + client = _a.sent(); + FumaroleClient.logger.debug("Connected to ".concat(endpoint, ", testing stub...")); + // Wait for client to be ready + return [4 /*yield*/, new Promise(function (resolve, reject) { + var deadline = new Date().getTime() + 5000; // 5 second timeout + client.waitForReady(deadline, function (error) { + if (error) { + FumaroleClient.logger.error("Client failed to become ready:", error); + reject(error); + } + else { + FumaroleClient.logger.debug("Client is ready"); + resolve(undefined); + } + }); + })]; + case 2: + // Wait for client to be ready + _a.sent(); + // Verify client methods + if (!client || typeof client.listConsumerGroups !== "function") { + methods = client + ? Object.getOwnPropertyNames(Object.getPrototypeOf(client)) + : []; + FumaroleClient.logger.error("Available methods:", methods); + throw new Error("gRPC client or listConsumerGroups method not available"); + } + FumaroleClient.logger.debug("gRPC client initialized successfully"); + return [2 /*return*/, new FumaroleClient(connector, client)]; + } + }); + }); + }; + FumaroleClient.prototype.version = function () { + return __awaiter(this, void 0, void 0, function () { + var request; + var _this = this; + return __generator(this, function (_a) { + FumaroleClient.logger.debug("Sending version request"); + request = {}; + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.stub.version(request, function (error, response) { + if (error) { + FumaroleClient.logger.error("Version request failed:", error); + reject(error); + } + else { + FumaroleClient.logger.debug("Version response:", response); + resolve(response); + } + }); + })]; + }); + }); + }; + FumaroleClient.prototype.dragonsmouthSubscribe = function (consumerGroupName, request) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.dragonsmouthSubscribeWithConfig(consumerGroupName, request, {})]; + }); + }); + }; + FumaroleClient.prototype.dragonsmouthSubscribeWithConfig = function (consumerGroupName, request, config) { + return __awaiter(this, void 0, void 0, function () { + var finalConfig, dragonsmouthOutlet, fumeControlPlaneQ, initialJoin, initialJoinCommand, controlPlaneStream, subscribeRequestQueue, fumeControlPlaneRxQ, controlPlaneSourceTask, controlResponse, init, lastCommittedOffsetStr, lastCommittedOffset, dataPlaneClient, runtimeTask; + var _this = this; + var _a; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + finalConfig = __assign({ concurrentDownloadLimit: types_1.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, commitInterval: types_1.DEFAULT_COMMIT_INTERVAL, maxFailedSlotDownloadAttempt: types_1.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, dataChannelCapacity: types_1.DEFAULT_DRAGONSMOUTH_CAPACITY, gcInterval: types_1.DEFAULT_GC_INTERVAL, slotMemoryRetention: types_1.DEFAULT_SLOT_MEMORY_RETENTION }, config); + dragonsmouthOutlet = new types_1.AsyncQueue(finalConfig.dataChannelCapacity); + fumeControlPlaneQ = new types_1.AsyncQueue(100); + initialJoin = { consumerGroupName: consumerGroupName }; + initialJoinCommand = { initialJoin: initialJoin }; + return [4 /*yield*/, fumeControlPlaneQ.put(initialJoinCommand)]; + case 1: + _b.sent(); + FumaroleClient.logger.debug("Sent initial join command: ".concat(JSON.stringify(initialJoinCommand))); + controlPlaneStream = this.stub.subscribe(); + subscribeRequestQueue = new types_1.AsyncQueue(100); + fumeControlPlaneRxQ = new types_1.AsyncQueue(100); + controlPlaneSourceTask = (function () { return __awaiter(_this, void 0, void 0, function () { + var _a, controlPlaneStream_1, controlPlaneStream_1_1, update, e_1_1, error_1; + var _b, e_1, _c, _d; + return __generator(this, function (_e) { + switch (_e.label) { + case 0: + _e.trys.push([0, 14, , 15]); + _e.label = 1; + case 1: + _e.trys.push([1, 7, 8, 13]); + _a = true, controlPlaneStream_1 = __asyncValues(controlPlaneStream); + _e.label = 2; + case 2: return [4 /*yield*/, controlPlaneStream_1.next()]; + case 3: + if (!(controlPlaneStream_1_1 = _e.sent(), _b = controlPlaneStream_1_1.done, !_b)) return [3 /*break*/, 6]; + _d = controlPlaneStream_1_1.value; + _a = false; + update = _d; + return [4 /*yield*/, fumeControlPlaneRxQ.put(update)]; + case 4: + _e.sent(); + _e.label = 5; + case 5: + _a = true; + return [3 /*break*/, 2]; + case 6: return [3 /*break*/, 13]; + case 7: + e_1_1 = _e.sent(); + e_1 = { error: e_1_1 }; + return [3 /*break*/, 13]; + case 8: + _e.trys.push([8, , 11, 12]); + if (!(!_a && !_b && (_c = controlPlaneStream_1.return))) return [3 /*break*/, 10]; + return [4 /*yield*/, _c.call(controlPlaneStream_1)]; + case 9: + _e.sent(); + _e.label = 10; + case 10: return [3 /*break*/, 12]; + case 11: + if (e_1) throw e_1.error; + return [7 /*endfinally*/]; + case 12: return [7 /*endfinally*/]; + case 13: return [3 /*break*/, 15]; + case 14: + error_1 = _e.sent(); + if (error_1.code !== "CANCELLED") { + throw error_1; + } + return [3 /*break*/, 15]; + case 15: return [2 /*return*/]; + } + }); + }); })(); + return [4 /*yield*/, fumeControlPlaneRxQ.get()]; + case 2: + controlResponse = (_b.sent()); + init = controlResponse.init; + if (!init) { + throw new Error("Unexpected initial response: ".concat(JSON.stringify(controlResponse))); + } + FumaroleClient.logger.debug("Control response: ".concat(JSON.stringify(controlResponse))); + lastCommittedOffsetStr = (_a = init.lastCommittedOffsets) === null || _a === void 0 ? void 0 : _a[0]; + if (!lastCommittedOffsetStr) { + throw new Error("No last committed offset"); + } + lastCommittedOffset = BigInt(lastCommittedOffsetStr); + return [4 /*yield*/, this.connector.connect()]; + case 3: + dataPlaneClient = _b.sent(); + runtimeTask = this.startRuntime(subscribeRequestQueue, fumeControlPlaneQ, fumeControlPlaneRxQ, dragonsmouthOutlet, request, consumerGroupName, lastCommittedOffset, finalConfig, dataPlaneClient); + FumaroleClient.logger.debug("Fumarole handle created: ".concat(runtimeTask)); + return [2 /*return*/, { + sink: subscribeRequestQueue, + source: dragonsmouthOutlet, + fumaroleHandle: runtimeTask, + }]; + } + }); + }); + }; + FumaroleClient.prototype.startRuntime = function (subscribeRequestQueue, controlPlaneTxQ, controlPlaneRxQ, dragonsmouthOutlet, request, consumerGroupName, lastCommittedOffset, config, dataPlaneClient) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + // Implementation of runtime task here + // This would be equivalent to AsyncioFumeDragonsmouthRuntime in Python + // For brevity, this is a placeholder implementation + return [2 /*return*/, Promise.resolve()]; + }); + }); + }; + FumaroleClient.prototype.listConsumerGroups = function () { + return __awaiter(this, void 0, void 0, function () { + var request, metadata; + var _this = this; + return __generator(this, function (_a) { + if (!this.stub) { + throw new Error("gRPC stub not initialized"); + } + if (!this.stub.listConsumerGroups) { + throw new Error("listConsumerGroups method not available on stub"); + } + FumaroleClient.logger.debug("Preparing listConsumerGroups request"); + request = {}; + metadata = new grpc_js_1.Metadata(); + return [2 /*return*/, new Promise(function (resolve, reject) { + var hasResponded = false; + var timeout = setTimeout(function () { + if (!hasResponded) { + FumaroleClient.logger.error("ListConsumerGroups timeout after 30s"); + if (call) { + try { + call.cancel(); + } + catch (e) { + FumaroleClient.logger.error("Error cancelling call:", e); + } + } + reject(new Error("gRPC call timed out after 30 seconds")); + } + }, 30000); // 30 second timeout + var call; + try { + FumaroleClient.logger.debug("Starting gRPC listConsumerGroups call"); + call = _this.stub.listConsumerGroups(request, metadata, { + deadline: Date.now() + 30000, // 30 second deadline + }, function (error, response) { + var _a; + hasResponded = true; + clearTimeout(timeout); + if (error) { + var errorDetails = { + code: error.code, + details: error.details, + metadata: (_a = error.metadata) === null || _a === void 0 ? void 0 : _a.getMap(), + stack: error.stack, + message: error.message, + name: error.name, + }; + FumaroleClient.logger.error("ListConsumerGroups error:", errorDetails); + reject(error); + } + else { + FumaroleClient.logger.debug("ListConsumerGroups success - Response:", JSON.stringify(response, null, 2)); + resolve(response); + } + }); + // Monitor call state + if (call) { + call.on("metadata", function (metadata) { + FumaroleClient.logger.debug("Received metadata:", metadata.getMap()); + }); + call.on("status", function (status) { + FumaroleClient.logger.debug("Call status:", status); + }); + call.on("error", function (error) { + FumaroleClient.logger.error("Call stream error:", error); + if (!hasResponded) { + hasResponded = true; + clearTimeout(timeout); + reject(error); + } + }); + } + else { + FumaroleClient.logger.error("Failed to create gRPC call object"); + hasResponded = true; + clearTimeout(timeout); + reject(new Error("Failed to create gRPC call")); + } + } + catch (setupError) { + hasResponded = true; + clearTimeout(timeout); + FumaroleClient.logger.error("Error setting up gRPC call:", setupError); + reject(setupError); + } + })]; + }); + }); + }; + FumaroleClient.prototype.getConsumerGroupInfo = function (consumerGroupName) { + return __awaiter(this, void 0, void 0, function () { + var request; + var _this = this; + return __generator(this, function (_a) { + FumaroleClient.logger.debug("Sending getConsumerGroupInfo request:", consumerGroupName); + request = { consumerGroupName: consumerGroupName }; + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.stub.getConsumerGroupInfo(request, function (error, response) { + if (error) { + if (error.code === 14) { + // grpc.status.NOT_FOUND + FumaroleClient.logger.debug("Consumer group not found:", consumerGroupName); + resolve(null); + } + else { + FumaroleClient.logger.error("GetConsumerGroupInfo error:", error); + reject(error); + } + } + else { + FumaroleClient.logger.debug("GetConsumerGroupInfo response:", response); + resolve(response); + } + }); + })]; + }); + }); + }; + FumaroleClient.prototype.deleteConsumerGroup = function (consumerGroupName) { + return __awaiter(this, void 0, void 0, function () { + var request; + var _this = this; + return __generator(this, function (_a) { + FumaroleClient.logger.debug("Sending deleteConsumerGroup request:", consumerGroupName); + request = { consumerGroupName: consumerGroupName }; + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.stub.deleteConsumerGroup(request, function (error, response) { + if (error) { + FumaroleClient.logger.error("DeleteConsumerGroup error:", error); + reject(error); + } + else { + FumaroleClient.logger.debug("DeleteConsumerGroup response:", response); + resolve(response); + } + }); + })]; + }); + }); + }; + FumaroleClient.prototype.deleteAllConsumerGroups = function () { + return __awaiter(this, void 0, void 0, function () { + var response, deletePromises, results, failures; + var _this = this; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.listConsumerGroups()]; + case 1: + response = _a.sent(); + deletePromises = response.consumerGroups.map(function (group) { + return _this.deleteConsumerGroup(group.consumerGroupName); + }); + return [4 /*yield*/, Promise.all(deletePromises)]; + case 2: + results = _a.sent(); + failures = results.filter(function (result) { return !result.success; }); + if (failures.length > 0) { + throw new Error("Failed to delete some consumer groups: ".concat(JSON.stringify(failures))); + } + return [2 /*return*/]; + } + }); + }); + }; + FumaroleClient.prototype.createConsumerGroup = function (request) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + FumaroleClient.logger.debug("Sending createConsumerGroup request:", request); + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.stub.createConsumerGroup(request, function (error, response) { + if (error) { + FumaroleClient.logger.error("CreateConsumerGroup error:", error); + reject(error); + } + else { + FumaroleClient.logger.debug("CreateConsumerGroup response:", response); + resolve(response); + } + }); + })]; + }); + }); + }; + FumaroleClient.logger = console; + return FumaroleClient; +}()); +exports.FumaroleClient = FumaroleClient; diff --git a/typescript-sdk/src/index.ts b/typescript-sdk/src/index.ts index 1511974..c7c55af 100644 --- a/typescript-sdk/src/index.ts +++ b/typescript-sdk/src/index.ts @@ -1,4 +1,4 @@ -import { ServiceError } from "@grpc/grpc-js"; +import { Metadata, ServiceError } from "@grpc/grpc-js"; import { FumaroleConfig } from "./config/config"; import { FumaroleClient as GrpcClient } from "./grpc/fumarole"; import { FumaroleGrpcConnector } from "./connectivity"; @@ -18,10 +18,12 @@ import { CreateConsumerGroupResponse, } from "./grpc/fumarole"; import { SubscribeRequest, SubscribeUpdate } from "./grpc/geyser"; -import { - AsyncQueue, +import type { DragonsmouthAdapterSession, FumaroleSubscribeConfig, +} from "./types"; +import { + AsyncQueue, DEFAULT_DRAGONSMOUTH_CAPACITY, DEFAULT_COMMIT_INTERVAL, DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, @@ -43,25 +45,57 @@ export class FumaroleClient { static async connect(config: FumaroleConfig): Promise { const endpoint = config.endpoint; const connector = new FumaroleGrpcConnector(config, endpoint); + FumaroleClient.logger.debug(`Connecting to ${endpoint}`); + FumaroleClient.logger.debug("Connection config:", { + endpoint: config.endpoint, + xToken: config.xToken ? "***" : "none", + maxDecodingMessageSizeBytes: config.maxDecodingMessageSizeBytes, + }); + const client = await connector.connect(); - FumaroleClient.logger.debug(`Connected to ${endpoint}`); + FumaroleClient.logger.debug(`Connected to ${endpoint}, testing stub...`); + + // Wait for client to be ready + await new Promise((resolve, reject) => { + const deadline = new Date().getTime() + 5000; // 5 second timeout + client.waitForReady(deadline, (error) => { + if (error) { + FumaroleClient.logger.error("Client failed to become ready:", error); + reject(error); + } else { + FumaroleClient.logger.debug("Client is ready"); + resolve(undefined); + } + }); + }); + + // Verify client methods + if (!client || typeof client.listConsumerGroups !== "function") { + const methods = client + ? Object.getOwnPropertyNames(Object.getPrototypeOf(client)) + : []; + FumaroleClient.logger.error("Available methods:", methods); + throw new Error("gRPC client or listConsumerGroups method not available"); + } + + FumaroleClient.logger.debug("gRPC client initialized successfully"); return new FumaroleClient(connector, client); } async version(): Promise { + FumaroleClient.logger.debug("Sending version request"); const request = {} as VersionRequest; return new Promise((resolve, reject) => { - this.stub.version( - request, - (error: ServiceError | null, response: VersionResponse) => { - if (error) { - reject(error); - } else { - resolve(response); - } + this.stub.version(request, (error, response) => { + if (error) { + FumaroleClient.logger.error("Version request failed:", error); + reject(error); + } else { + FumaroleClient.logger.debug("Version response:", response); + resolve(response); } - ); + }); }); } @@ -180,24 +214,116 @@ export class FumaroleClient { } async listConsumerGroups(): Promise { + if (!this.stub) { + throw new Error("gRPC stub not initialized"); + } + if (!this.stub.listConsumerGroups) { + throw new Error("listConsumerGroups method not available on stub"); + } + + FumaroleClient.logger.debug("Preparing listConsumerGroups request"); const request = {} as ListConsumerGroupsRequest; + const metadata = new Metadata(); + return new Promise((resolve, reject) => { - this.stub.listConsumerGroups( - request, - (error: ServiceError | null, response: ListConsumerGroupsResponse) => { - if (error) { - reject(error); - } else { - resolve(response); + let hasResponded = false; + const timeout = setTimeout(() => { + if (!hasResponded) { + FumaroleClient.logger.error("ListConsumerGroups timeout after 30s"); + if (call) { + try { + call.cancel(); + } catch (e) { + FumaroleClient.logger.error("Error cancelling call:", e); + } } + reject(new Error("gRPC call timed out after 30 seconds")); } - ); + }, 30000); // 30 second timeout + + let call: any; + try { + FumaroleClient.logger.debug("Starting gRPC listConsumerGroups call"); + call = this.stub.listConsumerGroups( + request, + metadata, + { + deadline: Date.now() + 30000, // 30 second deadline + }, + ( + error: ServiceError | null, + response: ListConsumerGroupsResponse + ) => { + hasResponded = true; + clearTimeout(timeout); + + if (error) { + const errorDetails = { + code: error.code, + details: error.details, + metadata: error.metadata?.getMap(), + stack: error.stack, + message: error.message, + name: error.name, + }; + FumaroleClient.logger.error( + "ListConsumerGroups error:", + errorDetails + ); + reject(error); + } else { + FumaroleClient.logger.debug( + "ListConsumerGroups success - Response:", + JSON.stringify(response, null, 2) + ); + resolve(response); + } + } + ); + + // Monitor call state + if (call) { + call.on("metadata", (metadata: Metadata) => { + FumaroleClient.logger.debug( + "Received metadata:", + metadata.getMap() + ); + }); + + call.on("status", (status: any) => { + FumaroleClient.logger.debug("Call status:", status); + }); + + call.on("error", (error: Error) => { + FumaroleClient.logger.error("Call stream error:", error); + if (!hasResponded) { + hasResponded = true; + clearTimeout(timeout); + reject(error); + } + }); + } else { + FumaroleClient.logger.error("Failed to create gRPC call object"); + hasResponded = true; + clearTimeout(timeout); + reject(new Error("Failed to create gRPC call")); + } + } catch (setupError) { + hasResponded = true; + clearTimeout(timeout); + FumaroleClient.logger.error("Error setting up gRPC call:", setupError); + reject(setupError); + } }); } async getConsumerGroupInfo( consumerGroupName: string ): Promise { + FumaroleClient.logger.debug( + "Sending getConsumerGroupInfo request:", + consumerGroupName + ); const request = { consumerGroupName } as GetConsumerGroupInfoRequest; return new Promise((resolve, reject) => { this.stub.getConsumerGroupInfo( @@ -206,11 +332,20 @@ export class FumaroleClient { if (error) { if (error.code === 14) { // grpc.status.NOT_FOUND + FumaroleClient.logger.debug( + "Consumer group not found:", + consumerGroupName + ); resolve(null); } else { + FumaroleClient.logger.error("GetConsumerGroupInfo error:", error); reject(error); } } else { + FumaroleClient.logger.debug( + "GetConsumerGroupInfo response:", + response + ); resolve(response); } } @@ -221,14 +356,23 @@ export class FumaroleClient { async deleteConsumerGroup( consumerGroupName: string ): Promise { + FumaroleClient.logger.debug( + "Sending deleteConsumerGroup request:", + consumerGroupName + ); const request = { consumerGroupName } as DeleteConsumerGroupRequest; return new Promise((resolve, reject) => { this.stub.deleteConsumerGroup( request, (error: ServiceError | null, response: DeleteConsumerGroupResponse) => { if (error) { + FumaroleClient.logger.error("DeleteConsumerGroup error:", error); reject(error); } else { + FumaroleClient.logger.debug( + "DeleteConsumerGroup response:", + response + ); resolve(response); } } @@ -256,13 +400,22 @@ export class FumaroleClient { async createConsumerGroup( request: CreateConsumerGroupRequest ): Promise { + FumaroleClient.logger.debug( + "Sending createConsumerGroup request:", + request + ); return new Promise((resolve, reject) => { this.stub.createConsumerGroup( request, (error: ServiceError | null, response: CreateConsumerGroupResponse) => { if (error) { + FumaroleClient.logger.error("CreateConsumerGroup error:", error); reject(error); } else { + FumaroleClient.logger.debug( + "CreateConsumerGroup response:", + response + ); resolve(response); } } @@ -273,10 +426,10 @@ export class FumaroleClient { export { FumaroleConfig, - FumaroleSubscribeConfig, - DragonsmouthAdapterSession, DEFAULT_DRAGONSMOUTH_CAPACITY, DEFAULT_COMMIT_INTERVAL, DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, }; + +export type { DragonsmouthAdapterSession, FumaroleSubscribeConfig }; diff --git a/typescript-sdk/src/runtime/aio.js b/typescript-sdk/src/runtime/aio.js new file mode 100644 index 0000000..7bced3d --- /dev/null +++ b/typescript-sdk/src/runtime/aio.js @@ -0,0 +1,593 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GrpcDownloadBlockTaskRun = exports.GrpcSlotDownloader = exports.AsyncioFumeDragonsmouthRuntime = exports.DEFAULT_SLOT_MEMORY_RETENTION = exports.DEFAULT_GC_INTERVAL = void 0; +var grpc_js_1 = require("@grpc/grpc-js"); +var aio_1 = require("../utils/aio"); +// Constants +exports.DEFAULT_GC_INTERVAL = 5; +exports.DEFAULT_SLOT_MEMORY_RETENTION = 10000; +var LOGGER = console; +var AsyncioFumeDragonsmouthRuntime = /** @class */ (function () { + function AsyncioFumeDragonsmouthRuntime(sm, slotDownloader, subscribeRequestUpdateQ, subscribeRequest, consumerGroupName, controlPlaneTxQ, controlPlaneRxQ, dragonsmouthOutlet, commitInterval, gcInterval, maxConcurrentDownload) { + if (maxConcurrentDownload === void 0) { maxConcurrentDownload = 10; } + this.sm = sm; + this.slotDownloader = slotDownloader; + this.subscribeRequestUpdateQ = subscribeRequestUpdateQ; + this.subscribeRequest = subscribeRequest; + this.consumerGroupName = consumerGroupName; + this.controlPlaneTx = controlPlaneTxQ; + this.controlPlaneRx = controlPlaneRxQ; + this.dragonsmouthOutlet = dragonsmouthOutlet; + this.commitInterval = commitInterval; + this.gcInterval = gcInterval; + this.maxConcurrentDownload = maxConcurrentDownload; + this.downloadTasks = new Map(); + this.lastCommit = Date.now(); + } + AsyncioFumeDragonsmouthRuntime.prototype.buildPollHistoryCmd = function (fromOffset) { + return { pollHist: { shardId: 0 } }; + }; + AsyncioFumeDragonsmouthRuntime.prototype.buildCommitOffsetCmd = function (offset) { + return { commitOffset: { offset: offset, shardId: 0 } }; + }; + AsyncioFumeDragonsmouthRuntime.prototype.handleControlResponse = function (controlResponse) { + var _a; + // Get first defined property from controlResponse + var responseField = Object.keys(controlResponse).find(function (key) { return controlResponse[key] !== undefined && key !== "response"; }); + if (!responseField) { + throw new Error("Control response is empty"); + } + switch (responseField) { + case "pollHist": { + var pollHist = controlResponse.pollHist; + LOGGER.debug("Received poll history ".concat((_a = pollHist.events) === null || _a === void 0 ? void 0 : _a.length, " events")); + // Convert string slots to numbers and map commitment levels + var convertedEvents = (pollHist.events || []).map(function (event) { return ({ + offset: event.offset, + slot: Number(event.slot), + parentSlot: event.parentSlot ? Number(event.parentSlot) : undefined, + commitmentLevel: event.commitmentLevel, + deadError: event.deadError, + blockchainId: event.blockchainId, + blockUid: event.blockUid, + numShards: Number(event.numShards), + }); }); + this.sm.queueBlockchainEvent(convertedEvents); + break; + } + case "commitOffset": { + var commitOffset = controlResponse.commitOffset; + LOGGER.debug("Received commit offset: ".concat(commitOffset)); + this.sm.updateCommittedOffset(commitOffset.offset); + break; + } + case "pong": + LOGGER.debug("Received pong"); + break; + default: + throw new Error("Unexpected control response"); + } + }; + AsyncioFumeDragonsmouthRuntime.prototype.pollHistoryIfNeeded = function () { + return __awaiter(this, void 0, void 0, function () { + var cmd; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!this.sm.needNewBlockchainEvents()) return [3 /*break*/, 2]; + cmd = this.buildPollHistoryCmd(this.sm.committableOffset); + return [4 /*yield*/, this.controlPlaneTx.put(cmd)]; + case 1: + _a.sent(); + _a.label = 2; + case 2: return [2 /*return*/]; + } + }); + }); + }; + AsyncioFumeDragonsmouthRuntime.prototype.commitmentLevel = function () { + return this.subscribeRequest.commitment || 0; + }; + AsyncioFumeDragonsmouthRuntime.prototype.scheduleDownloadTaskIfAny = function () { + return __awaiter(this, void 0, void 0, function () { + var downloadRequest, downloadTaskArgs, downloadPromise; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!true) return [3 /*break*/, 2]; + LOGGER.debug("Checking for download tasks to schedule"); + if (this.downloadTasks.size >= this.maxConcurrentDownload) { + return [3 /*break*/, 2]; + } + LOGGER.debug("Popping slot to download"); + return [4 /*yield*/, this.sm.popSlotToDownload(this.commitmentLevel())]; + case 1: + downloadRequest = _a.sent(); + if (!downloadRequest) { + LOGGER.debug("No download request available"); + return [3 /*break*/, 2]; + } + LOGGER.debug("Download request for slot ".concat(downloadRequest.slot, " popped")); + if (!downloadRequest.blockchainId) { + throw new Error("Download request must have a blockchain ID"); + } + downloadTaskArgs = { + downloadRequest: downloadRequest, + dragonsmouthOutlet: this.dragonsmouthOutlet, + }; + downloadPromise = this.slotDownloader.runDownload(this.subscribeRequest, downloadTaskArgs); + this.downloadTasks.set(downloadPromise, downloadRequest); + LOGGER.debug("Scheduling download task for slot ".concat(downloadRequest.slot)); + return [3 /*break*/, 0]; + case 2: return [2 /*return*/]; + } + }); + }); + }; + AsyncioFumeDragonsmouthRuntime.prototype.handleDownloadResult = function (downloadResult) { + if (downloadResult.kind === "Ok") { + var completed = downloadResult.completed; + LOGGER.debug("Download completed for slot ".concat(completed.slot, ", shard ").concat(completed.shardIdx, ", ").concat(completed.totalEventDownloaded, " total events")); + this.sm.makeSlotDownloadProgress(completed.slot, completed.shardIdx); + } + else { + var slot = downloadResult.slot; + var err = downloadResult.err; + throw new Error("Failed to download slot ".concat(slot, ": ").concat(err.message)); + } + }; + AsyncioFumeDragonsmouthRuntime.prototype.forceCommitOffset = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + LOGGER.debug("Force committing offset ".concat(this.sm.committableOffset)); + return [4 /*yield*/, this.controlPlaneTx.put(this.buildCommitOffsetCmd(this.sm.committableOffset))]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); + }; + AsyncioFumeDragonsmouthRuntime.prototype.commitOffset = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(this.sm.lastCommittedOffset < this.sm.committableOffset)) return [3 /*break*/, 2]; + LOGGER.debug("Committing offset ".concat(this.sm.committableOffset)); + return [4 /*yield*/, this.forceCommitOffset()]; + case 1: + _a.sent(); + _a.label = 2; + case 2: + this.lastCommit = Date.now(); + return [2 /*return*/]; + } + }); + }); + }; + AsyncioFumeDragonsmouthRuntime.prototype.drainSlotStatus = function () { + return __awaiter(this, void 0, void 0, function () { + var commitment, slotStatusVec, slotStatus, _i, slotStatusVec_1, slotStatus, matchedFilters, _a, _b, _c, filterName, filter, update, error_1; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + commitment = this.subscribeRequest.commitment || 0; + slotStatusVec = []; + while (true) { + slotStatus = this.sm.popNextSlotStatus(); + if (!slotStatus) + break; + slotStatusVec.push(slotStatus); + } + if (!slotStatusVec.length) + return [2 /*return*/]; + LOGGER.debug("Draining ".concat(slotStatusVec.length, " slot status")); + _i = 0, slotStatusVec_1 = slotStatusVec; + _d.label = 1; + case 1: + if (!(_i < slotStatusVec_1.length)) return [3 /*break*/, 7]; + slotStatus = slotStatusVec_1[_i]; + matchedFilters = []; + for (_a = 0, _b = Object.entries(this.subscribeRequest.slots || {}); _a < _b.length; _a++) { + _c = _b[_a], filterName = _c[0], filter = _c[1]; + if (filter.filterByCommitment && + slotStatus.commitmentLevel === commitment) { + matchedFilters.push(filterName); + } + else if (!filter.filterByCommitment) { + matchedFilters.push(filterName); + } + } + if (!matchedFilters.length) return [3 /*break*/, 5]; + update = { + filters: matchedFilters, + createdAt: undefined, + slot: { + slot: slotStatus.slot, + parent: slotStatus.parentSlot, + status: slotStatus.commitmentLevel, + deadError: slotStatus.deadError, + }, + }; + _d.label = 2; + case 2: + _d.trys.push([2, 4, , 5]); + return [4 /*yield*/, this.dragonsmouthOutlet.put(update)]; + case 3: + _d.sent(); + return [3 /*break*/, 5]; + case 4: + error_1 = _d.sent(); + if (error_1.message === "Queue full") + return [2 /*return*/]; + throw error_1; + case 5: + this.sm.markEventAsProcessed(slotStatus.sessionSequence); + _d.label = 6; + case 6: + _i++; + return [3 /*break*/, 1]; + case 7: return [2 /*return*/]; + } + }); + }); + }; + AsyncioFumeDragonsmouthRuntime.prototype.handleControlPlaneResp = function (result) { + return __awaiter(this, void 0, void 0, function () { + var errorUpdate; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(result instanceof Error)) return [3 /*break*/, 2]; + errorUpdate = { + filters: [], + createdAt: undefined, + slot: { + slot: "0", + parent: "0", + status: 0, // Using 0 as default status for error case + deadError: result.message, + }, + }; + return [4 /*yield*/, this.dragonsmouthOutlet.put(errorUpdate)]; + case 1: + _a.sent(); + LOGGER.error("Control plane error: ".concat(result.message)); + return [2 /*return*/, false]; + case 2: + this.handleControlResponse(result); + return [2 /*return*/, true]; + } + }); + }); + }; + AsyncioFumeDragonsmouthRuntime.prototype.handleNewSubscribeRequest = function (subscribeRequest) { + this.subscribeRequest = subscribeRequest; + }; + AsyncioFumeDragonsmouthRuntime.prototype.run = function () { + return __awaiter(this, void 0, void 0, function () { + var ticks, taskMap, downloadTasks, _i, downloadTasks_1, task, downloadTaskInFlight, promises, done, taskName, _a, result, newTask, newTask, newTask; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + LOGGER.debug("Fumarole runtime starting..."); + return [4 /*yield*/, this.controlPlaneTx.put(this.buildPollHistoryCmd())]; + case 1: + _b.sent(); + LOGGER.debug("Initial poll history command sent"); + return [4 /*yield*/, this.forceCommitOffset()]; + case 2: + _b.sent(); + LOGGER.debug("Initial commit offset command sent"); + ticks = 0; + taskMap = new Map(); + // Initial tasks + taskMap.set(this.subscribeRequestUpdateQ.get(), "dragonsmouth_bidi"); + taskMap.set(this.controlPlaneRx.get(), "control_plane_rx"); + taskMap.set(new aio_1.Interval(this.commitInterval).tick(), "commit_tick"); + _b.label = 3; + case 3: + if (!(taskMap.size > 0)) return [3 /*break*/, 16]; + ticks++; + LOGGER.debug("Runtime loop tick"); + if (ticks % this.gcInterval === 0) { + LOGGER.debug("Running garbage collection"); + this.sm.gc(); + ticks = 0; + } + LOGGER.debug("Polling history if needed"); + return [4 /*yield*/, this.pollHistoryIfNeeded()]; + case 4: + _b.sent(); + LOGGER.debug("Scheduling download tasks if any"); + return [4 /*yield*/, this.scheduleDownloadTaskIfAny()]; + case 5: + _b.sent(); + downloadTasks = Array.from(this.downloadTasks.keys()); + for (_i = 0, downloadTasks_1 = downloadTasks; _i < downloadTasks_1.length; _i++) { + task = downloadTasks_1[_i]; + taskMap.set(task, "download_task"); + } + downloadTaskInFlight = this.downloadTasks.size; + LOGGER.debug("Current download tasks in flight: ".concat(downloadTaskInFlight, " / ").concat(this.maxConcurrentDownload)); + promises = Array.from(taskMap.keys()); + return [4 /*yield*/, Promise.race(promises.map(function (p) { return p.then(function (result) { return ({ promise: p, result: result }); }); }))]; + case 6: + done = _b.sent(); + taskName = taskMap.get(done.promise); + taskMap.delete(done.promise); + _a = taskName; + switch (_a) { + case "dragonsmouth_bidi": return [3 /*break*/, 7]; + case "control_plane_rx": return [3 /*break*/, 8]; + case "download_task": return [3 /*break*/, 10]; + case "commit_tick": return [3 /*break*/, 11]; + } + return [3 /*break*/, 13]; + case 7: + { + LOGGER.debug("Dragonsmouth subscribe request received"); + result = done.result; + this.handleNewSubscribeRequest(result); + newTask = this.subscribeRequestUpdateQ.get(); + taskMap.set(newTask, "dragonsmouth_bidi"); + return [3 /*break*/, 14]; + } + _b.label = 8; + case 8: + LOGGER.debug("Control plane response received"); + return [4 /*yield*/, this.handleControlPlaneResp(done.result)]; + case 9: + if (!(_b.sent())) { + LOGGER.debug("Control plane error"); + return [2 /*return*/]; + } + newTask = this.controlPlaneRx.get(); + taskMap.set(newTask, "control_plane_rx"); + return [3 /*break*/, 14]; + case 10: + { + LOGGER.debug("Download task result received"); + this.downloadTasks.delete(done.promise); + this.handleDownloadResult(done.result); + return [3 /*break*/, 14]; + } + _b.label = 11; + case 11: + LOGGER.debug("Commit tick reached"); + return [4 /*yield*/, this.commitOffset()]; + case 12: + _b.sent(); + newTask = new aio_1.Interval(this.commitInterval).tick(); + taskMap.set(newTask, "commit_tick"); + return [3 /*break*/, 14]; + case 13: throw new Error("Unexpected task name: ".concat(taskName)); + case 14: return [4 /*yield*/, this.drainSlotStatus()]; + case 15: + _b.sent(); + return [3 /*break*/, 3]; + case 16: + LOGGER.debug("Fumarole runtime exiting"); + return [2 /*return*/]; + } + }); + }); + }; + return AsyncioFumeDragonsmouthRuntime; +}()); +exports.AsyncioFumeDragonsmouthRuntime = AsyncioFumeDragonsmouthRuntime; +var GrpcSlotDownloader = /** @class */ (function () { + function GrpcSlotDownloader(client) { + this.client = client; + } + GrpcSlotDownloader.prototype.runDownload = function (subscribeRequest, spec) { + return __awaiter(this, void 0, void 0, function () { + var downloadTask; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + downloadTask = new GrpcDownloadBlockTaskRun(spec.downloadRequest, this.client, { + accounts: subscribeRequest.accounts, + transactions: subscribeRequest.transactions, + entries: subscribeRequest.entry, + blocksMeta: subscribeRequest.blocksMeta, + }, spec.dragonsmouthOutlet); + LOGGER.debug("Running download task for slot ".concat(spec.downloadRequest.slot)); + return [4 /*yield*/, downloadTask.run()]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + return GrpcSlotDownloader; +}()); +exports.GrpcSlotDownloader = GrpcSlotDownloader; +var GrpcDownloadBlockTaskRun = /** @class */ (function () { + function GrpcDownloadBlockTaskRun(downloadRequest, client, filters, dragonsmouthOutlet) { + this.downloadRequest = downloadRequest; + this.client = client; + this.filters = filters; + this.dragonsmouthOutlet = dragonsmouthOutlet; + } + GrpcDownloadBlockTaskRun.prototype.mapTonicErrorCodeToDownloadBlockError = function (error) { + switch (error.code) { + case grpc_js_1.status.NOT_FOUND: + return { + kind: "BlockShardNotFound", + message: "Block shard not found", + }; + case grpc_js_1.status.UNAVAILABLE: + return { + kind: "Disconnected", + message: "Disconnected", + }; + case grpc_js_1.status.INTERNAL: + case grpc_js_1.status.ABORTED: + case grpc_js_1.status.DATA_LOSS: + case grpc_js_1.status.RESOURCE_EXHAUSTED: + case grpc_js_1.status.UNKNOWN: + case grpc_js_1.status.CANCELLED: + case grpc_js_1.status.DEADLINE_EXCEEDED: + return { + kind: "FailedDownload", + message: "Failed download", + }; + case grpc_js_1.status.INVALID_ARGUMENT: + throw new Error("Invalid argument"); + default: + return { + kind: "Fatal", + message: "Unknown error: ".concat(error.code), + }; + } + }; + GrpcDownloadBlockTaskRun.prototype.run = function () { + return __awaiter(this, void 0, void 0, function () { + var request, totalEventDownloaded_1, stream_1; + var _this = this; + return __generator(this, function (_a) { + request = { + blockchainId: this.downloadRequest.blockchainId, + blockUid: this.downloadRequest.blockUid, + shardIdx: 0, + blockFilters: this.filters, + }; + try { + LOGGER.debug("Requesting download for block ".concat(Buffer.from(this.downloadRequest.blockUid).toString("hex"), " at slot ").concat(this.downloadRequest.slot)); + totalEventDownloaded_1 = 0; + stream_1 = this.client.downloadBlock(request); + return [2 /*return*/, new Promise(function (resolve, reject) { + stream_1.on("data", function (data) { return __awaiter(_this, void 0, void 0, function () { + var kind, _a, update, error_2; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + kind = Object.keys(data).find(function (k) { return data[k] !== undefined && k !== "response"; }); + if (!kind) + return [2 /*return*/]; + _a = kind; + switch (_a) { + case "update": return [3 /*break*/, 1]; + case "blockShardDownloadFinish": return [3 /*break*/, 6]; + } + return [3 /*break*/, 7]; + case 1: + update = data.update; + if (!update) + throw new Error("Update is null"); + totalEventDownloaded_1++; + _b.label = 2; + case 2: + _b.trys.push([2, 4, , 5]); + return [4 /*yield*/, this.dragonsmouthOutlet.put(update)]; + case 3: + _b.sent(); + return [3 /*break*/, 5]; + case 4: + error_2 = _b.sent(); + if (error_2.message === "Queue shutdown") { + LOGGER.error("Dragonsmouth outlet is disconnected"); + resolve({ + kind: "Err", + slot: this.downloadRequest.slot, + err: { + kind: "OutletDisconnected", + message: "Outlet disconnected", + }, + }); + } + return [3 /*break*/, 5]; + case 5: return [3 /*break*/, 8]; + case 6: + LOGGER.debug("Download finished for block ".concat(Buffer.from(this.downloadRequest.blockUid).toString("hex"), " at slot ").concat(this.downloadRequest.slot)); + resolve({ + kind: "Ok", + completed: { + slot: this.downloadRequest.slot, + blockUid: this.downloadRequest.blockUid, + shardIdx: 0, + totalEventDownloaded: totalEventDownloaded_1, + }, + }); + return [3 /*break*/, 8]; + case 7: + reject(new Error("Unexpected response kind: ".concat(kind))); + _b.label = 8; + case 8: return [2 /*return*/]; + } + }); + }); }); + stream_1.on("error", function (error) { + LOGGER.error("Download block error: ".concat(error)); + resolve({ + kind: "Err", + slot: _this.downloadRequest.slot, + err: _this.mapTonicErrorCodeToDownloadBlockError(error), + }); + }); + stream_1.on("end", function () { + resolve({ + kind: "Err", + slot: _this.downloadRequest.slot, + err: { + kind: "FailedDownload", + message: "Failed download", + }, + }); + }); + })]; + } + catch (error) { + LOGGER.error("Download block error: ".concat(error)); + return [2 /*return*/, { + kind: "Err", + slot: this.downloadRequest.slot, + err: this.mapTonicErrorCodeToDownloadBlockError(error), + }]; + } + return [2 /*return*/]; + }); + }); + }; + return GrpcDownloadBlockTaskRun; +}()); +exports.GrpcDownloadBlockTaskRun = GrpcDownloadBlockTaskRun; diff --git a/typescript-sdk/src/runtime/aio.ts b/typescript-sdk/src/runtime/aio.ts index 1b28c6c..a241918 100644 --- a/typescript-sdk/src/runtime/aio.ts +++ b/typescript-sdk/src/runtime/aio.ts @@ -6,6 +6,7 @@ import { FumeDownloadRequest, FumeOffset, FumeShardIdx, + CommitmentLevel, } from "./state-machine"; import { SubscribeRequest, @@ -129,7 +130,18 @@ export class AsyncioFumeDragonsmouthRuntime { case "pollHist": { const pollHist = controlResponse.pollHist!; LOGGER.debug(`Received poll history ${pollHist.events?.length} events`); - this.sm.queueBlockchainEvent(pollHist.events); + // Convert string slots to numbers and map commitment levels + const convertedEvents = (pollHist.events || []).map((event) => ({ + offset: event.offset, + slot: Number(event.slot), + parentSlot: event.parentSlot ? Number(event.parentSlot) : undefined, + commitmentLevel: event.commitmentLevel as unknown as CommitmentLevel, + deadError: event.deadError, + blockchainId: event.blockchainId, + blockUid: event.blockUid, + numShards: Number(event.numShards), + })); + this.sm.queueBlockchainEvent(convertedEvents); break; } case "commitOffset": { @@ -157,7 +169,7 @@ export class AsyncioFumeDragonsmouthRuntime { return this.subscribeRequest.commitment || 0; } - private scheduleDownloadTaskIfAny(): void { + private async scheduleDownloadTaskIfAny(): Promise { while (true) { LOGGER.debug("Checking for download tasks to schedule"); if (this.downloadTasks.size >= this.maxConcurrentDownload) { @@ -165,7 +177,9 @@ export class AsyncioFumeDragonsmouthRuntime { } LOGGER.debug("Popping slot to download"); - const downloadRequest = this.sm.popSlotToDownload(this.commitmentLevel()); + const downloadRequest = await this.sm.popSlotToDownload( + this.commitmentLevel() + ); if (!downloadRequest) { LOGGER.debug("No download request available"); break; @@ -328,9 +342,11 @@ export class AsyncioFumeDragonsmouthRuntime { await this.pollHistoryIfNeeded(); LOGGER.debug("Scheduling download tasks if any"); - this.scheduleDownloadTaskIfAny(); + await this.scheduleDownloadTaskIfAny(); - for (const [task] of this.downloadTasks) { + // Convert iterator to array to avoid --downlevelIteration requirement + const downloadTasks = Array.from(this.downloadTasks.keys()); + for (const task of downloadTasks) { taskMap.set(task, "download_task"); } diff --git a/typescript-sdk/src/runtime/queue.js b/typescript-sdk/src/runtime/queue.js new file mode 100644 index 0000000..e35acb5 --- /dev/null +++ b/typescript-sdk/src/runtime/queue.js @@ -0,0 +1,125 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Queue = void 0; +var Queue = /** @class */ (function () { + function Queue(maxSize) { + if (maxSize === void 0) { maxSize = Infinity; } + this.items = []; + this.closed = false; + this.maxSize = maxSize; + } + Queue.prototype.put = function (item) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + if (this.closed) { + throw new Error("Queue shutdown"); + } + if (this.items.length >= this.maxSize) { + throw new Error("Queue full"); + } + this.items.push(item); + return [2 /*return*/]; + }); + }); + }; + Queue.prototype.get = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (this.closed && this.items.length === 0) { + throw new Error("Queue shutdown"); + } + _a.label = 1; + case 1: + if (!(this.items.length === 0)) return [3 /*break*/, 3]; + return [4 /*yield*/, new Promise(function (resolve) { return setTimeout(resolve, 10); })]; + case 2: + _a.sent(); + return [3 /*break*/, 1]; + case 3: return [2 /*return*/, this.items.shift()]; + } + }); + }); + }; + Queue.prototype.isEmpty = function () { + return this.items.length === 0; + }; + Queue.prototype.isFull = function () { + return this.items.length >= this.maxSize; + }; + Queue.prototype.size = function () { + return this.items.length; + }; + Queue.prototype.close = function () { + this.closed = true; + }; + Queue.prototype[Symbol.asyncIterator] = function () { + var _this = this; + return { + next: function () { return __awaiter(_this, void 0, void 0, function () { + var value, error_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (this.closed && this.isEmpty()) { + return [2 /*return*/, { done: true, value: undefined }]; + } + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, this.get()]; + case 2: + value = _a.sent(); + return [2 /*return*/, { done: false, value: value }]; + case 3: + error_1 = _a.sent(); + if (error_1.message === "Queue shutdown") { + return [2 /*return*/, { done: true, value: undefined }]; + } + throw error_1; + case 4: return [2 /*return*/]; + } + }); + }); }, + }; + }; + return Queue; +}()); +exports.Queue = Queue; diff --git a/typescript-sdk/src/runtime/state-machine.js b/typescript-sdk/src/runtime/state-machine.js new file mode 100644 index 0000000..22be87d --- /dev/null +++ b/typescript-sdk/src/runtime/state-machine.js @@ -0,0 +1,369 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FumaroleSM = exports.SlotDownloadState = exports.SlotDownloadProgress = exports.SlotCommitmentProgression = exports.FumeSlotStatus = exports.FumeDownloadRequest = exports.CommitmentLevel = exports.DEFAULT_SLOT_MEMORY_RETENTION = void 0; +var queue_1 = require("./queue"); +// Constants +exports.DEFAULT_SLOT_MEMORY_RETENTION = 10000; +// Solana commitment levels +var CommitmentLevel; +(function (CommitmentLevel) { + CommitmentLevel[CommitmentLevel["PROCESSED"] = 0] = "PROCESSED"; + CommitmentLevel[CommitmentLevel["CONFIRMED"] = 1] = "CONFIRMED"; + CommitmentLevel[CommitmentLevel["FINALIZED"] = 2] = "FINALIZED"; +})(CommitmentLevel || (exports.CommitmentLevel = CommitmentLevel = {})); +// Data structures +var FumeDownloadRequest = /** @class */ (function () { + function FumeDownloadRequest(slot, blockchainId, blockUid, numShards, commitmentLevel) { + this.slot = slot; + this.blockchainId = blockchainId; + this.blockUid = blockUid; + this.numShards = numShards; + this.commitmentLevel = commitmentLevel; + } + return FumeDownloadRequest; +}()); +exports.FumeDownloadRequest = FumeDownloadRequest; +var FumeSlotStatus = /** @class */ (function () { + function FumeSlotStatus(sessionSequence, offset, slot, parentSlot, commitmentLevel, deadError) { + this.sessionSequence = sessionSequence; + this.offset = offset; + this.slot = slot; + this.parentSlot = parentSlot; + this.commitmentLevel = commitmentLevel; + this.deadError = deadError; + } + return FumeSlotStatus; +}()); +exports.FumeSlotStatus = FumeSlotStatus; +var SlotCommitmentProgression = /** @class */ (function () { + function SlotCommitmentProgression() { + this.processedCommitmentLevels = new Set(); + } + SlotCommitmentProgression.prototype.hasProcessedCommitment = function (level) { + return this.processedCommitmentLevels.has(level); + }; + SlotCommitmentProgression.prototype.addProcessedCommitment = function (level) { + this.processedCommitmentLevels.add(level); + }; + return SlotCommitmentProgression; +}()); +exports.SlotCommitmentProgression = SlotCommitmentProgression; +var SlotDownloadProgress = /** @class */ (function () { + function SlotDownloadProgress(numShards) { + this.numShards = numShards; + this.shardRemaining = new Array(numShards).fill(false); + } + SlotDownloadProgress.prototype.doProgress = function (shardIdx) { + this.shardRemaining[shardIdx % this.numShards] = true; + return this.shardRemaining.every(function (x) { return x; }) + ? SlotDownloadState.Done + : SlotDownloadState.Downloading; + }; + return SlotDownloadProgress; +}()); +exports.SlotDownloadProgress = SlotDownloadProgress; +var SlotDownloadState; +(function (SlotDownloadState) { + SlotDownloadState["Downloading"] = "Downloading"; + SlotDownloadState["Done"] = "Done"; +})(SlotDownloadState || (exports.SlotDownloadState = SlotDownloadState = {})); +var FumaroleSM = /** @class */ (function () { + function FumaroleSM(lastCommittedOffset, slotMemoryRetention) { + this.slotMemoryRetention = slotMemoryRetention; + this.slotCommitmentProgression = new Map(); + this.downloadedSlot = new Set(); + this.inflightSlotShardDownload = new Map(); + this.blockedSlotStatusUpdate = new Map(); + this.slotStatusUpdateQueue = new queue_1.Queue(); + this.processedOffset = []; // Min-heap for (sequence, offset) + this.maxSlotDetected = 0; + this.unprocessedBlockchainEvent = new queue_1.Queue(); + this.sequence = 1; + this.lastProcessedFumeSequence = 0; + this.sequenceToOffset = new Map(); + this._lastCommittedOffset = lastCommittedOffset; + this._committableOffset = lastCommittedOffset; + } + Object.defineProperty(FumaroleSM.prototype, "lastCommittedOffset", { + get: function () { + return this._lastCommittedOffset; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(FumaroleSM.prototype, "committableOffset", { + get: function () { + return this._committableOffset; + }, + enumerable: false, + configurable: true + }); + FumaroleSM.prototype.updateCommittedOffset = function (offset) { + if (BigInt(offset) < BigInt(this._lastCommittedOffset)) { + throw new Error("Offset must be >= last committed offset"); + } + this._lastCommittedOffset = offset; + }; + FumaroleSM.prototype.nextSequence = function () { + var ret = this.sequence; + this.sequence += 1; + return ret; + }; + FumaroleSM.prototype.gc = function () { + while (this.downloadedSlot.size > this.slotMemoryRetention) { + // Get the first slot (oldest) from the set + var slot = this.downloadedSlot.values().next().value; + if (!slot) + break; + this.downloadedSlot.delete(slot); + this.slotCommitmentProgression.delete(slot); + this.inflightSlotShardDownload.delete(slot); + this.blockedSlotStatusUpdate.delete(slot); + } + }; + FumaroleSM.prototype.queueBlockchainEvent = function (events) { + return __awaiter(this, void 0, void 0, function () { + var _i, events_1, event_1, sequence, fumeStatus, blockedQueue; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + _i = 0, events_1 = events; + _a.label = 1; + case 1: + if (!(_i < events_1.length)) return [3 /*break*/, 9]; + event_1 = events_1[_i]; + if (BigInt(event_1.offset) < BigInt(this._lastCommittedOffset)) { + return [3 /*break*/, 8]; + } + if (event_1.slot > this.maxSlotDetected) { + this.maxSlotDetected = event_1.slot; + } + sequence = this.nextSequence(); + this.sequenceToOffset.set(sequence, event_1.offset); + if (!this.downloadedSlot.has(event_1.slot)) return [3 /*break*/, 6]; + fumeStatus = new FumeSlotStatus(sequence, event_1.offset, event_1.slot, event_1.parentSlot, event_1.commitmentLevel, event_1.deadError); + if (!this.inflightSlotShardDownload.has(event_1.slot)) return [3 /*break*/, 3]; + blockedQueue = this.blockedSlotStatusUpdate.get(event_1.slot); + if (!blockedQueue) { + blockedQueue = new queue_1.Queue(); + this.blockedSlotStatusUpdate.set(event_1.slot, blockedQueue); + } + return [4 /*yield*/, blockedQueue.put(fumeStatus)]; + case 2: + _a.sent(); + return [3 /*break*/, 5]; + case 3: return [4 /*yield*/, this.slotStatusUpdateQueue.put(fumeStatus)]; + case 4: + _a.sent(); + _a.label = 5; + case 5: return [3 /*break*/, 8]; + case 6: return [4 /*yield*/, this.unprocessedBlockchainEvent.put([sequence, event_1])]; + case 7: + _a.sent(); + _a.label = 8; + case 8: + _i++; + return [3 /*break*/, 1]; + case 9: return [2 /*return*/]; + } + }); + }); + }; + FumaroleSM.prototype.makeSlotDownloadProgress = function (slot, shardIdx) { + return __awaiter(this, void 0, void 0, function () { + var downloadProgress, downloadState, blockedStatuses, status_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + downloadProgress = this.inflightSlotShardDownload.get(slot); + if (!downloadProgress) { + throw new Error("Slot not in download"); + } + downloadState = downloadProgress.doProgress(shardIdx); + if (!(downloadState === SlotDownloadState.Done)) return [3 /*break*/, 6]; + this.inflightSlotShardDownload.delete(slot); + this.downloadedSlot.add(slot); + if (!this.slotCommitmentProgression.has(slot)) { + this.slotCommitmentProgression.set(slot, new SlotCommitmentProgression()); + } + blockedStatuses = this.blockedSlotStatusUpdate.get(slot); + if (!blockedStatuses) return [3 /*break*/, 6]; + _a.label = 1; + case 1: + if (!!blockedStatuses.isEmpty()) return [3 /*break*/, 5]; + return [4 /*yield*/, blockedStatuses.get()]; + case 2: + status_1 = _a.sent(); + if (!status_1) return [3 /*break*/, 4]; + return [4 /*yield*/, this.slotStatusUpdateQueue.put(status_1)]; + case 3: + _a.sent(); + _a.label = 4; + case 4: return [3 /*break*/, 1]; + case 5: + this.blockedSlotStatusUpdate.delete(slot); + _a.label = 6; + case 6: return [2 /*return*/, downloadState]; + } + }); + }); + }; + FumaroleSM.prototype.popNextSlotStatus = function () { + return __awaiter(this, void 0, void 0, function () { + var slotStatus, commitmentHistory; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!!this.slotStatusUpdateQueue.isEmpty()) return [3 /*break*/, 2]; + return [4 /*yield*/, this.slotStatusUpdateQueue.get()]; + case 1: + slotStatus = _a.sent(); + if (!slotStatus) + return [3 /*break*/, 0]; + commitmentHistory = this.slotCommitmentProgression.get(slotStatus.slot); + if (commitmentHistory && + !commitmentHistory.hasProcessedCommitment(slotStatus.commitmentLevel)) { + commitmentHistory.addProcessedCommitment(slotStatus.commitmentLevel); + return [2 /*return*/, slotStatus]; + } + else if (!commitmentHistory) { + throw new Error("Slot status should not be available here"); + } + return [3 /*break*/, 0]; + case 2: return [2 /*return*/, null]; + } + }); + }); + }; + FumaroleSM.prototype.makeSureSlotCommitmentProgressionExists = function (slot) { + var progression = this.slotCommitmentProgression.get(slot); + if (!progression) { + progression = new SlotCommitmentProgression(); + this.slotCommitmentProgression.set(slot, progression); + } + return progression; + }; + FumaroleSM.prototype.popSlotToDownload = function () { + return __awaiter(this, arguments, void 0, function (commitment) { + var eventPair, sessionSequence, blockchainEvent, eventCl, progression, blockchainId, blockUid, downloadRequest, downloadProgress, blockedQueue; + if (commitment === void 0) { commitment = CommitmentLevel.PROCESSED; } + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!!this.unprocessedBlockchainEvent.isEmpty()) return [3 /*break*/, 8]; + return [4 /*yield*/, this.unprocessedBlockchainEvent.get()]; + case 1: + eventPair = _a.sent(); + if (!eventPair) + return [3 /*break*/, 0]; + sessionSequence = eventPair[0], blockchainEvent = eventPair[1]; + eventCl = blockchainEvent.commitmentLevel; + if (!(eventCl < commitment)) return [3 /*break*/, 3]; + return [4 /*yield*/, this.slotStatusUpdateQueue.put(new FumeSlotStatus(sessionSequence, blockchainEvent.offset, blockchainEvent.slot, blockchainEvent.parentSlot, eventCl, blockchainEvent.deadError))]; + case 2: + _a.sent(); + this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); + return [3 /*break*/, 0]; + case 3: + if (!this.downloadedSlot.has(blockchainEvent.slot)) return [3 /*break*/, 5]; + this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); + progression = this.slotCommitmentProgression.get(blockchainEvent.slot); + if (progression && progression.hasProcessedCommitment(eventCl)) { + this.markEventAsProcessed(sessionSequence); + return [3 /*break*/, 0]; + } + return [4 /*yield*/, this.slotStatusUpdateQueue.put(new FumeSlotStatus(sessionSequence, blockchainEvent.offset, blockchainEvent.slot, blockchainEvent.parentSlot, eventCl, blockchainEvent.deadError))]; + case 4: + _a.sent(); + return [3 /*break*/, 7]; + case 5: + blockchainId = new Uint8Array(blockchainEvent.blockchainId); + blockUid = new Uint8Array(blockchainEvent.blockUid); + if (!!this.inflightSlotShardDownload.has(blockchainEvent.slot)) return [3 /*break*/, 7]; + downloadRequest = new FumeDownloadRequest(blockchainEvent.slot, blockchainId, blockUid, blockchainEvent.numShards, eventCl); + downloadProgress = new SlotDownloadProgress(blockchainEvent.numShards); + this.inflightSlotShardDownload.set(blockchainEvent.slot, downloadProgress); + blockedQueue = this.blockedSlotStatusUpdate.get(blockchainEvent.slot); + if (!blockedQueue) { + blockedQueue = new queue_1.Queue(); + this.blockedSlotStatusUpdate.set(blockchainEvent.slot, blockedQueue); + } + return [4 /*yield*/, blockedQueue.put(new FumeSlotStatus(sessionSequence, blockchainEvent.offset, blockchainEvent.slot, blockchainEvent.parentSlot, eventCl, blockchainEvent.deadError))]; + case 6: + _a.sent(); + return [2 /*return*/, downloadRequest]; + case 7: return [3 /*break*/, 0]; + case 8: return [2 /*return*/, null]; + } + }); + }); + }; + FumaroleSM.prototype.markEventAsProcessed = function (eventSeqNumber) { + var fumeOffset = this.sequenceToOffset.get(eventSeqNumber); + if (!fumeOffset) { + throw new Error("Event sequence number not found"); + } + this.sequenceToOffset.delete(eventSeqNumber); + // Use negative values for the min-heap (to simulate max-heap behavior) + this.processedOffset.push([-eventSeqNumber, fumeOffset]); + this.processedOffset.sort(function (a, b) { return a[0] - b[0]; }); // Keep sorted as a min-heap + while (this.processedOffset.length > 0) { + var _a = this.processedOffset[0], seq = _a[0], offset = _a[1]; + var positiveSeq = -seq; // Convert back to positive + if (positiveSeq !== this.lastProcessedFumeSequence + 1) { + break; + } + this.processedOffset.shift(); + this._committableOffset = offset; + this.lastProcessedFumeSequence = positiveSeq; + } + }; + FumaroleSM.prototype.slotStatusUpdateQueueLen = function () { + return this.slotStatusUpdateQueue.size(); + }; + FumaroleSM.prototype.processedOffsetQueueLen = function () { + return this.processedOffset.length; + }; + FumaroleSM.prototype.needNewBlockchainEvents = function () { + return (this.slotStatusUpdateQueue.isEmpty() && + this.blockedSlotStatusUpdate.size === 0); + }; + return FumaroleSM; +}()); +exports.FumaroleSM = FumaroleSM; diff --git a/typescript-sdk/src/runtime/state-machine.ts b/typescript-sdk/src/runtime/state-machine.ts index 807017c..5f6002e 100644 --- a/typescript-sdk/src/runtime/state-machine.ts +++ b/typescript-sdk/src/runtime/state-machine.ts @@ -1,23 +1,116 @@ -export type FumeShardIdx = number; -export type FumeOffset = string; +import { Queue as Deque } from "./queue"; -export interface FumeDownloadRequest { +// Constants +export const DEFAULT_SLOT_MEMORY_RETENTION = 10000; + +// Solana commitment levels +export enum CommitmentLevel { + PROCESSED = 0, + CONFIRMED = 1, + FINALIZED = 2, +} + +// Interface matching the gRPC BlockchainEvent type +export interface BlockchainEvent { + offset: string; slot: number; + parentSlot?: number; + commitmentLevel: CommitmentLevel; + deadError?: string; blockchainId: Uint8Array; blockUid: Uint8Array; + numShards: number; +} + +// Type aliases +export type FumeBlockchainId = Uint8Array; // Equivalent to [u8; 16] +export type FumeBlockUID = Uint8Array; // Equivalent to [u8; 16] +export type FumeNumShards = number; // Equivalent to u32 +export type FumeShardIdx = number; // Equivalent to u32 +export type FumeOffset = string; // Equivalent to i64 as string for large numbers +export type FumeSessionSequence = number; // Equivalent to u64 +export type Slot = number; // From solana_sdk::clock::Slot + +// Data structures +export class FumeDownloadRequest { + constructor( + public readonly slot: Slot, + public readonly blockchainId: FumeBlockchainId, + public readonly blockUid: FumeBlockUID, + public readonly numShards: FumeNumShards, + public readonly commitmentLevel: CommitmentLevel + ) {} +} + +export class FumeSlotStatus { + constructor( + public readonly sessionSequence: FumeSessionSequence, + public readonly offset: FumeOffset, + public readonly slot: Slot, + public readonly parentSlot: Slot | undefined, + public readonly commitmentLevel: CommitmentLevel, + public readonly deadError: string | undefined + ) {} +} + +export class SlotCommitmentProgression { + private processedCommitmentLevels = new Set(); + + public hasProcessedCommitment(level: CommitmentLevel): boolean { + return this.processedCommitmentLevels.has(level); + } + + public addProcessedCommitment(level: CommitmentLevel): void { + this.processedCommitmentLevels.add(level); + } +} + +export class SlotDownloadProgress { + private shardRemaining: boolean[]; + + constructor(public readonly numShards: FumeNumShards) { + this.shardRemaining = new Array(numShards).fill(false); + } + + public doProgress(shardIdx: FumeShardIdx): SlotDownloadState { + this.shardRemaining[shardIdx % this.numShards] = true; + return this.shardRemaining.every((x) => x) + ? SlotDownloadState.Done + : SlotDownloadState.Downloading; + } +} + +export enum SlotDownloadState { + Downloading = "Downloading", + Done = "Done", } export class FumaroleSM { - private _lastCommittedOffset: FumeOffset; + private slotCommitmentProgression = new Map< + Slot, + SlotCommitmentProgression + >(); + private downloadedSlot = new Set(); + private inflightSlotShardDownload = new Map(); + private blockedSlotStatusUpdate = new Map>(); + private slotStatusUpdateQueue = new Deque(); + private processedOffset: [number, string][] = []; // Min-heap for (sequence, offset) + private maxSlotDetected = 0; + private unprocessedBlockchainEvent = new Deque< + [FumeSessionSequence, BlockchainEvent] + >(); + private sequence = 1; + private lastProcessedFumeSequence = 0; + private sequenceToOffset = new Map(); private _committableOffset: FumeOffset; - private _slotStatusQueue: any[]; - private _needNewEvents: boolean; + private _lastCommittedOffset: FumeOffset; - constructor() { - this._lastCommittedOffset = "0"; - this._committableOffset = "0"; - this._slotStatusQueue = []; - this._needNewEvents = true; + constructor( + lastCommittedOffset: FumeOffset, + private readonly slotMemoryRetention: number + ) { + this._lastCommittedOffset = lastCommittedOffset; + this._committableOffset = lastCommittedOffset; } get lastCommittedOffset(): FumeOffset { @@ -28,37 +121,270 @@ export class FumaroleSM { return this._committableOffset; } - needNewBlockchainEvents(): boolean { - return this._needNewEvents; + public updateCommittedOffset(offset: FumeOffset): void { + if (BigInt(offset) < BigInt(this._lastCommittedOffset)) { + throw new Error("Offset must be >= last committed offset"); + } + this._lastCommittedOffset = offset; } - updateCommittedOffset(offset: FumeOffset): void { - this._lastCommittedOffset = offset; + private nextSequence(): number { + const ret = this.sequence; + this.sequence += 1; + return ret; + } + + public gc(): void { + while (this.downloadedSlot.size > this.slotMemoryRetention) { + // Get the first slot (oldest) from the set + const slot = this.downloadedSlot.values().next().value; + if (!slot) break; + + this.downloadedSlot.delete(slot); + this.slotCommitmentProgression.delete(slot); + this.inflightSlotShardDownload.delete(slot); + this.blockedSlotStatusUpdate.delete(slot); + } } - queueBlockchainEvent(events: any[]): void { - // Implementation would go here - this._needNewEvents = false; + public async queueBlockchainEvent(events: BlockchainEvent[]): Promise { + for (const event of events) { + if (BigInt(event.offset) < BigInt(this._lastCommittedOffset)) { + continue; + } + + if (event.slot > this.maxSlotDetected) { + this.maxSlotDetected = event.slot; + } + + const sequence = this.nextSequence(); + this.sequenceToOffset.set(sequence, event.offset); + + if (this.downloadedSlot.has(event.slot)) { + const fumeStatus = new FumeSlotStatus( + sequence, + event.offset, + event.slot, + event.parentSlot, + event.commitmentLevel, + event.deadError + ); + + if (this.inflightSlotShardDownload.has(event.slot)) { + let blockedQueue = this.blockedSlotStatusUpdate.get(event.slot); + if (!blockedQueue) { + blockedQueue = new Deque(); + this.blockedSlotStatusUpdate.set(event.slot, blockedQueue); + } + await blockedQueue.put(fumeStatus); + } else { + await this.slotStatusUpdateQueue.put(fumeStatus); + } + } else { + await this.unprocessedBlockchainEvent.put([sequence, event]); + } + } } - gc(): void { - // Implementation of garbage collection + public async makeSlotDownloadProgress( + slot: Slot, + shardIdx: FumeShardIdx + ): Promise { + const downloadProgress = this.inflightSlotShardDownload.get(slot); + if (!downloadProgress) { + throw new Error("Slot not in download"); + } + + const downloadState = downloadProgress.doProgress(shardIdx); + + if (downloadState === SlotDownloadState.Done) { + this.inflightSlotShardDownload.delete(slot); + this.downloadedSlot.add(slot); + + if (!this.slotCommitmentProgression.has(slot)) { + this.slotCommitmentProgression.set( + slot, + new SlotCommitmentProgression() + ); + } + + const blockedStatuses = this.blockedSlotStatusUpdate.get(slot); + if (blockedStatuses) { + // Move all blocked statuses to the main queue + while (!blockedStatuses.isEmpty()) { + const status = await blockedStatuses.get(); + if (status) await this.slotStatusUpdateQueue.put(status); + } + this.blockedSlotStatusUpdate.delete(slot); + } + } + + return downloadState; } - popSlotToDownload(commitment: number): FumeDownloadRequest | null { - // Implementation would go here + public async popNextSlotStatus(): Promise { + while (!this.slotStatusUpdateQueue.isEmpty()) { + const slotStatus = await this.slotStatusUpdateQueue.get(); + if (!slotStatus) continue; + + const commitmentHistory = this.slotCommitmentProgression.get( + slotStatus.slot + ); + if ( + commitmentHistory && + !commitmentHistory.hasProcessedCommitment(slotStatus.commitmentLevel) + ) { + commitmentHistory.addProcessedCommitment(slotStatus.commitmentLevel); + return slotStatus; + } else if (!commitmentHistory) { + throw new Error("Slot status should not be available here"); + } + } return null; } - makeSlotDownloadProgress(slot: number, shardIdx: FumeShardIdx): void { - // Implementation would go here + private makeSureSlotCommitmentProgressionExists( + slot: Slot + ): SlotCommitmentProgression { + let progression = this.slotCommitmentProgression.get(slot); + if (!progression) { + progression = new SlotCommitmentProgression(); + this.slotCommitmentProgression.set(slot, progression); + } + return progression; + } + + public async popSlotToDownload( + commitment = CommitmentLevel.PROCESSED + ): Promise { + while (!this.unprocessedBlockchainEvent.isEmpty()) { + const eventPair = await this.unprocessedBlockchainEvent.get(); + if (!eventPair) continue; + + const [sessionSequence, blockchainEvent] = eventPair; + const eventCl = blockchainEvent.commitmentLevel; + + if (eventCl < commitment) { + await this.slotStatusUpdateQueue.put( + new FumeSlotStatus( + sessionSequence, + blockchainEvent.offset, + blockchainEvent.slot, + blockchainEvent.parentSlot, + eventCl, + blockchainEvent.deadError + ) + ); + this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); + continue; + } + + if (this.downloadedSlot.has(blockchainEvent.slot)) { + this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); + const progression = this.slotCommitmentProgression.get( + blockchainEvent.slot + ); + if (progression && progression.hasProcessedCommitment(eventCl)) { + this.markEventAsProcessed(sessionSequence); + continue; + } + + await this.slotStatusUpdateQueue.put( + new FumeSlotStatus( + sessionSequence, + blockchainEvent.offset, + blockchainEvent.slot, + blockchainEvent.parentSlot, + eventCl, + blockchainEvent.deadError + ) + ); + } else { + const blockchainId = new Uint8Array(blockchainEvent.blockchainId); + const blockUid = new Uint8Array(blockchainEvent.blockUid); + if (!this.inflightSlotShardDownload.has(blockchainEvent.slot)) { + const downloadRequest = new FumeDownloadRequest( + blockchainEvent.slot, + blockchainId, + blockUid, + blockchainEvent.numShards, + eventCl + ); + + const downloadProgress = new SlotDownloadProgress( + blockchainEvent.numShards + ); + this.inflightSlotShardDownload.set( + blockchainEvent.slot, + downloadProgress + ); + + let blockedQueue = this.blockedSlotStatusUpdate.get( + blockchainEvent.slot + ); + if (!blockedQueue) { + blockedQueue = new Deque(); + this.blockedSlotStatusUpdate.set( + blockchainEvent.slot, + blockedQueue + ); + } + + await blockedQueue.put( + new FumeSlotStatus( + sessionSequence, + blockchainEvent.offset, + blockchainEvent.slot, + blockchainEvent.parentSlot, + eventCl, + blockchainEvent.deadError + ) + ); + + return downloadRequest; + } + } + } + return null; + } + + public markEventAsProcessed(eventSeqNumber: FumeSessionSequence): void { + const fumeOffset = this.sequenceToOffset.get(eventSeqNumber); + if (!fumeOffset) { + throw new Error("Event sequence number not found"); + } + this.sequenceToOffset.delete(eventSeqNumber); + + // Use negative values for the min-heap (to simulate max-heap behavior) + this.processedOffset.push([-eventSeqNumber, fumeOffset]); + this.processedOffset.sort((a, b) => a[0] - b[0]); // Keep sorted as a min-heap + + while (this.processedOffset.length > 0) { + const [seq, offset] = this.processedOffset[0]; + const positiveSeq = -seq; // Convert back to positive + + if (positiveSeq !== this.lastProcessedFumeSequence + 1) { + break; + } + + this.processedOffset.shift(); + this._committableOffset = offset; + this.lastProcessedFumeSequence = positiveSeq; + } + } + + public slotStatusUpdateQueueLen(): number { + return this.slotStatusUpdateQueue.size(); } - popNextSlotStatus(): any | null { - return this._slotStatusQueue.shift() || null; + public processedOffsetQueueLen(): number { + return this.processedOffset.length; } - markEventAsProcessed(sessionSequence: number): void { - // Implementation would go here + public needNewBlockchainEvents(): boolean { + return ( + this.slotStatusUpdateQueue.isEmpty() && + this.blockedSlotStatusUpdate.size === 0 + ); } } diff --git a/typescript-sdk/src/types.js b/typescript-sdk/src/types.js new file mode 100644 index 0000000..43a657a --- /dev/null +++ b/typescript-sdk/src/types.js @@ -0,0 +1,111 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AsyncQueue = exports.DEFAULT_SLOT_MEMORY_RETENTION = exports.DEFAULT_GC_INTERVAL = exports.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = exports.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = exports.DEFAULT_COMMIT_INTERVAL = exports.DEFAULT_DRAGONSMOUTH_CAPACITY = void 0; +// Constants +exports.DEFAULT_DRAGONSMOUTH_CAPACITY = 10000; +exports.DEFAULT_COMMIT_INTERVAL = 5.0; // seconds +exports.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = 3; +exports.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = 10; +exports.DEFAULT_GC_INTERVAL = 60; // seconds +exports.DEFAULT_SLOT_MEMORY_RETENTION = 300; // seconds +// Generic async queue interface to mimic Python's asyncio.Queue +var AsyncQueue = /** @class */ (function () { + function AsyncQueue(maxSize) { + if (maxSize === void 0) { maxSize = 0; } + this.queue = []; + this.resolvers = []; + this.full_resolvers = []; + this.closed = false; + this.maxSize = maxSize; + } + AsyncQueue.prototype.put = function (item) { + return __awaiter(this, void 0, void 0, function () { + var resolver; + var _this = this; + return __generator(this, function (_a) { + if (this.closed) { + throw new Error("Queue is closed"); + } + if (this.maxSize > 0 && this.queue.length >= this.maxSize) { + return [2 /*return*/, new Promise(function (resolve) { + _this.full_resolvers.push(resolve); + })]; + } + this.queue.push(item); + resolver = this.resolvers.shift(); + if (resolver) { + resolver(this.queue.shift()); + } + return [2 /*return*/]; + }); + }); + }; + AsyncQueue.prototype.get = function () { + return __awaiter(this, void 0, void 0, function () { + var item, full_resolver; + var _this = this; + return __generator(this, function (_a) { + if (this.closed && this.queue.length === 0) { + throw new Error("Queue is closed"); + } + if (this.queue.length === 0) { + return [2 /*return*/, new Promise(function (resolve) { + _this.resolvers.push(resolve); + })]; + } + item = this.queue.shift(); + full_resolver = this.full_resolvers.shift(); + if (full_resolver) { + full_resolver(); + } + return [2 /*return*/, item]; + }); + }); + }; + AsyncQueue.prototype.close = function () { + this.closed = true; + // Resolve all pending gets with an error + this.resolvers.forEach(function (resolve) { + resolve(undefined); + }); + this.resolvers = []; + }; + return AsyncQueue; +}()); +exports.AsyncQueue = AsyncQueue; diff --git a/typescript-sdk/src/utils/aio.js b/typescript-sdk/src/utils/aio.js new file mode 100644 index 0000000..bf9d66a --- /dev/null +++ b/typescript-sdk/src/utils/aio.js @@ -0,0 +1,126 @@ +"use strict"; +/** + * Asynchronous utilities for TypeScript + */ +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.logger = exports.Interval = void 0; +exports.never = never; +/** + * Create a forever pending promise. This promise is not resolved and will never be resolved. + * This is useful for testing purposes. + * @returns A promise that never resolves + */ +function never() { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, new Promise(function () { + // This promise intentionally never resolves + })]; + }); + }); +} +/** + * A class that represents an interval that can be used to run async operations periodically + */ +var Interval = /** @class */ (function () { + /** + * Create an interval that will run every `interval` seconds. + * @param interval The interval in seconds + */ + function Interval(interval) { + this.interval = interval; + } + /** + * Wait for the interval duration + * @returns A promise that resolves after the interval duration + */ + Interval.prototype.tick = function () { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + // Convert seconds to milliseconds for setTimeout + return [2 /*return*/, new Promise(function (resolve) { return setTimeout(resolve, _this.interval * 1000); })]; + }); + }); + }; + return Interval; +}()); +exports.Interval = Interval; +/** + * Helper functions and utilities for logging + */ +exports.logger = { + debug: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return console.debug.apply(console, __spreadArray(["[DEBUG]"], args, false)); + }, + info: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return console.info.apply(console, __spreadArray(["[INFO]"], args, false)); + }, + warn: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return console.warn.apply(console, __spreadArray(["[WARN]"], args, false)); + }, + error: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return console.error.apply(console, __spreadArray(["[ERROR]"], args, false)); + }, +}; diff --git a/typescript-sdk/tsconfig.cjs.json b/typescript-sdk/tsconfig.cjs.json new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/tsconfig.esm.json b/typescript-sdk/tsconfig.esm.json new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/tsconfig.json b/typescript-sdk/tsconfig.json new file mode 100644 index 0000000..e69de29 From b27b4b914b4c07f5a4c45fec8a912c7cf03cd4c3 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Mon, 11 Aug 2025 12:50:13 +0000 Subject: [PATCH 45/56] refactor: move .js files to dist dir Signed-off-by: GitHub --- typescript-sdk/package.json | 2 + typescript-sdk/src/config/config.js | 27 - typescript-sdk/src/connectivity.js | 170 - typescript-sdk/src/grpc/connectivity.js | 0 typescript-sdk/src/grpc/fumarole.js | 2927 ----------- typescript-sdk/src/grpc/geyser.js | 4457 ----------------- .../src/grpc/google/protobuf/timestamp.js | 84 - typescript-sdk/src/grpc/solana-storage.js | 2055 -------- typescript-sdk/src/index.js | 448 -- typescript-sdk/src/index.ts | 2 +- typescript-sdk/src/runtime/aio.js | 593 --- typescript-sdk/src/runtime/aio.ts | 11 +- typescript-sdk/src/runtime/queue.js | 125 - typescript-sdk/src/runtime/queue.ts | 2 +- typescript-sdk/src/runtime/state-machine.js | 369 -- typescript-sdk/src/types.js | 111 - typescript-sdk/src/utils/aio.js | 126 - typescript-sdk/tsconfig.cjs.json | 7 + typescript-sdk/tsconfig.esm.json | 6 + typescript-sdk/tsconfig.json | 20 + 20 files changed, 45 insertions(+), 11497 deletions(-) delete mode 100644 typescript-sdk/src/config/config.js delete mode 100644 typescript-sdk/src/connectivity.js delete mode 100644 typescript-sdk/src/grpc/connectivity.js delete mode 100644 typescript-sdk/src/grpc/fumarole.js delete mode 100644 typescript-sdk/src/grpc/geyser.js delete mode 100644 typescript-sdk/src/grpc/google/protobuf/timestamp.js delete mode 100644 typescript-sdk/src/grpc/solana-storage.js delete mode 100644 typescript-sdk/src/index.js delete mode 100644 typescript-sdk/src/runtime/aio.js delete mode 100644 typescript-sdk/src/runtime/queue.js delete mode 100644 typescript-sdk/src/runtime/state-machine.js delete mode 100644 typescript-sdk/src/types.js delete mode 100644 typescript-sdk/src/utils/aio.js diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index c52b5cb..d2ebe8c 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -9,6 +9,8 @@ "module": "./dist/esm/index.js", "types": "./dist/types/index.d.ts", "scripts": { + "clean": "rm -rf dist", + "prebuild": "npm run clean", "build": "npm run grpc-generate && tsc --project tsconfig.esm.json && tsc --project tsconfig.cjs.json && node add-js-extensions.mjs", "grpc-generate": "mkdir -p src/grpc && protoc -I../yellowstone-grpc/yellowstone-grpc-proto/proto -I../proto --plugin=node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=forceLong=string --ts_proto_opt=outputServices=grpc-js --experimental_allow_proto3_optional --ts_proto_out=src/grpc fumarole.proto --ts_proto_opt=esModuleInterop=true" }, diff --git a/typescript-sdk/src/config/config.js b/typescript-sdk/src/config/config.js deleted file mode 100644 index 3679d89..0000000 --- a/typescript-sdk/src/config/config.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FumaroleConfig = void 0; -var yaml = require("js-yaml"); -var FumaroleConfig = /** @class */ (function () { - function FumaroleConfig(options) { - var _a, _b; - this.endpoint = options.endpoint; - this.xToken = options.xToken; - this.maxDecodingMessageSizeBytes = - (_a = options.maxDecodingMessageSizeBytes) !== null && _a !== void 0 ? _a : FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE; - this.xMetadata = (_b = options.xMetadata) !== null && _b !== void 0 ? _b : {}; - } - FumaroleConfig.fromYaml = function (yamlContent) { - var _a, _b; - var data = yaml.load(yamlContent); - return new FumaroleConfig({ - endpoint: data.endpoint, - xToken: data["x-token"] || data.x_token, - maxDecodingMessageSizeBytes: (_a = data.max_decoding_message_size_bytes) !== null && _a !== void 0 ? _a : FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE, - xMetadata: (_b = data["x-metadata"]) !== null && _b !== void 0 ? _b : {}, - }); - }; - FumaroleConfig.DEFAULT_MAX_DECODING_MESSAGE_SIZE = 512000000; - return FumaroleConfig; -}()); -exports.FumaroleConfig = FumaroleConfig; diff --git a/typescript-sdk/src/connectivity.js b/typescript-sdk/src/connectivity.js deleted file mode 100644 index ad5f135..0000000 --- a/typescript-sdk/src/connectivity.js +++ /dev/null @@ -1,170 +0,0 @@ -"use strict"; -var __assign = (this && this.__assign) || function () { - __assign = Object.assign || function(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) - t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FumaroleGrpcConnector = void 0; -var grpc_js_1 = require("@grpc/grpc-js"); -var fumarole_1 = require("./grpc/fumarole"); -var X_TOKEN_HEADER = "x-token"; -var TritonAuthMetadataGenerator = /** @class */ (function () { - function TritonAuthMetadataGenerator(xToken) { - this.xToken = xToken; - } - TritonAuthMetadataGenerator.prototype.generateMetadata = function () { - var metadata = new grpc_js_1.Metadata(); - metadata.set(X_TOKEN_HEADER, this.xToken); - return Promise.resolve(metadata); - }; - return TritonAuthMetadataGenerator; -}()); -var MetadataProvider = /** @class */ (function () { - function MetadataProvider(metadata) { - var _this = this; - this.metadata = new grpc_js_1.Metadata(); - Object.entries(metadata).forEach(function (_a) { - var key = _a[0], value = _a[1]; - _this.metadata.set(key, value); - }); - } - MetadataProvider.prototype.getMetadata = function () { - return Promise.resolve(this.metadata); - }; - return MetadataProvider; -}()); -var FumaroleGrpcConnector = /** @class */ (function () { - function FumaroleGrpcConnector(config, endpoint) { - this.config = config; - this.endpoint = endpoint; - } - FumaroleGrpcConnector.prototype.connect = function () { - return __awaiter(this, arguments, void 0, function (grpcOptions) { - var options, channelCredentials, insecureXToken, endpointURL, port, address, clientOptions, client, error_1; - var _this = this; - if (grpcOptions === void 0) { grpcOptions = {}; } - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - options = __assign({ "grpc.max_receive_message_length": 111111110 }, grpcOptions); - endpointURL = new URL(this.endpoint); - port = endpointURL.port; - if (port === "") { - port = endpointURL.protocol === "https:" ? "443" : "80"; - } - address = "".concat(endpointURL.hostname, ":").concat(port); - // Handle credentials based on protocol - if (endpointURL.protocol === "https:") { - channelCredentials = grpc_js_1.credentials.combineChannelCredentials(grpc_js_1.credentials.createSsl(), grpc_js_1.credentials.createFromMetadataGenerator(function (_params, callback) { - var metadata = new grpc_js_1.Metadata(); - if (_this.config.xToken) { - metadata.add("x-token", _this.config.xToken); - } - if (_this.config.xMetadata) { - Object.entries(_this.config.xMetadata).forEach(function (_a) { - var key = _a[0], value = _a[1]; - metadata.add(key, value); - }); - } - callback(null, metadata); - })); - } - else { - channelCredentials = grpc_js_1.credentials.createInsecure(); - if (this.config.xToken) { - insecureXToken = this.config.xToken; - } - } - clientOptions = __assign(__assign({}, options), { "grpc.enable_http_proxy": 0, - // Basic keepalive settings - "grpc.keepalive_time_ms": 20000, "grpc.keepalive_timeout_ms": 10000, "grpc.http2.min_time_between_pings_ms": 10000, - // Connection settings - "grpc.initial_reconnect_backoff_ms": 100, "grpc.max_reconnect_backoff_ms": 3000, "grpc.min_reconnect_backoff_ms": 100, - // Enable retries - "grpc.enable_retries": 1, "grpc.service_config": JSON.stringify({ - methodConfig: [ - { - name: [{}], // Apply to all methods - retryPolicy: { - maxAttempts: 5, - initialBackoff: "0.1s", - maxBackoff: "3s", - backoffMultiplier: 2, - retryableStatusCodes: ["UNAVAILABLE", "DEADLINE_EXCEEDED"], - }, - }, - ], - }) }); - client = new fumarole_1.FumaroleClient(address, channelCredentials, clientOptions); - _a.label = 1; - case 1: - _a.trys.push([1, 3, , 4]); - return [4 /*yield*/, new Promise(function (resolve, reject) { - var deadline = Date.now() + 5000; // 5 second timeout - client.waitForReady(deadline, function (err) { - if (err) { - reject(err); - } - else { - resolve(); - } - }); - })]; - case 2: - _a.sent(); - return [3 /*break*/, 4]; - case 3: - error_1 = _a.sent(); - throw error_1; - case 4: return [2 /*return*/, client]; - } - }); - }); - }; - FumaroleGrpcConnector.logger = console; - return FumaroleGrpcConnector; -}()); -exports.FumaroleGrpcConnector = FumaroleGrpcConnector; diff --git a/typescript-sdk/src/grpc/connectivity.js b/typescript-sdk/src/grpc/connectivity.js deleted file mode 100644 index e69de29..0000000 diff --git a/typescript-sdk/src/grpc/fumarole.js b/typescript-sdk/src/grpc/fumarole.js deleted file mode 100644 index ffcea80..0000000 --- a/typescript-sdk/src/grpc/fumarole.js +++ /dev/null @@ -1,2927 +0,0 @@ -"use strict"; -// Code generated by protoc-gen-ts_proto. DO NOT EDIT. -// versions: -// protoc-gen-ts_proto v2.7.7 -// protoc v3.12.4 -// source: fumarole.proto -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FumaroleClient = exports.FumaroleService = exports.CreateConsumerGroupRequest = exports.CreateConsumerGroupResponse = exports.InitialConsumerGroupState_LastCommittedOffsetsEntry = exports.InitialConsumerGroupState = exports.CommitOffsetResult = exports.ControlResponse = exports.ControlCommand = exports.JoinControlPlane = exports.BlockchainHistory = exports.BlockchainEvent = exports.PollBlockchainHistory = exports.CommitOffset = exports.DataResponse = exports.DataError = exports.BlockNotFound = exports.BlockShardDownloadFinish = exports.DataCommand = exports.Pong = exports.Ping = exports.DownloadBlockShard = exports.BlockFilters_BlocksMetaEntry = exports.BlockFilters_EntriesEntry = exports.BlockFilters_TransactionsEntry = exports.BlockFilters_AccountsEntry = exports.BlockFilters = exports.GetSlotLagInfoRequest = exports.ConsumerGroupInfo = exports.ListConsumerGroupsResponse = exports.ListConsumerGroupsRequest = exports.DeleteConsumerGroupResponse = exports.DeleteConsumerGroupRequest = exports.GetConsumerGroupInfoRequest = exports.VersionResponse = exports.VersionRequest = exports.GetChainTipResponse_ShardToMaxOffsetMapEntry = exports.GetChainTipResponse = exports.GetChainTipRequest = exports.InitialOffsetPolicy = exports.protobufPackage = void 0; -exports.initialOffsetPolicyFromJSON = initialOffsetPolicyFromJSON; -exports.initialOffsetPolicyToJSON = initialOffsetPolicyToJSON; -/* eslint-disable */ -var wire_1 = require("@bufbuild/protobuf/wire"); -var grpc_js_1 = require("@grpc/grpc-js"); -var geyser_1 = require("./geyser"); -exports.protobufPackage = "fumarole"; -var InitialOffsetPolicy; -(function (InitialOffsetPolicy) { - /** LATEST - FROM_SLOT = 1; */ - InitialOffsetPolicy[InitialOffsetPolicy["LATEST"] = 0] = "LATEST"; - InitialOffsetPolicy[InitialOffsetPolicy["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; -})(InitialOffsetPolicy || (exports.InitialOffsetPolicy = InitialOffsetPolicy = {})); -function initialOffsetPolicyFromJSON(object) { - switch (object) { - case 0: - case "LATEST": - return InitialOffsetPolicy.LATEST; - case -1: - case "UNRECOGNIZED": - default: - return InitialOffsetPolicy.UNRECOGNIZED; - } -} -function initialOffsetPolicyToJSON(object) { - switch (object) { - case InitialOffsetPolicy.LATEST: - return "LATEST"; - case InitialOffsetPolicy.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } -} -function createBaseGetChainTipRequest() { - return { blockchainId: new Uint8Array(0) }; -} -exports.GetChainTipRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockchainId.length !== 0) { - writer.uint32(10).bytes(message.blockchainId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetChainTipRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.blockchainId = reader.bytes(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0) }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockchainId.length !== 0) { - obj.blockchainId = base64FromBytes(message.blockchainId); - } - return obj; - }, - create: function (base) { - return exports.GetChainTipRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetChainTipRequest(); - message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); - return message; - }, -}; -function createBaseGetChainTipResponse() { - return { blockchainId: new Uint8Array(0), shardToMaxOffsetMap: {} }; -} -exports.GetChainTipResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockchainId.length !== 0) { - writer.uint32(10).bytes(message.blockchainId); - } - Object.entries(message.shardToMaxOffsetMap).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.GetChainTipResponse_ShardToMaxOffsetMapEntry.encode({ key: key, value: value }, writer.uint32(18).fork()).join(); - }); - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetChainTipResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.blockchainId = reader.bytes(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - var entry2 = exports.GetChainTipResponse_ShardToMaxOffsetMapEntry.decode(reader, reader.uint32()); - if (entry2.value !== undefined) { - message.shardToMaxOffsetMap[entry2.key] = entry2.value; - } - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), - shardToMaxOffsetMap: isObject(object.shardToMaxOffsetMap) - ? Object.entries(object.shardToMaxOffsetMap).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[globalThis.Number(key)] = String(value); - return acc; - }, {}) - : {}, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockchainId.length !== 0) { - obj.blockchainId = base64FromBytes(message.blockchainId); - } - if (message.shardToMaxOffsetMap) { - var entries = Object.entries(message.shardToMaxOffsetMap); - if (entries.length > 0) { - obj.shardToMaxOffsetMap = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.shardToMaxOffsetMap[k] = v; - }); - } - } - return obj; - }, - create: function (base) { - return exports.GetChainTipResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseGetChainTipResponse(); - message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.shardToMaxOffsetMap = Object.entries((_b = object.shardToMaxOffsetMap) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[globalThis.Number(key)] = globalThis.String(value); - } - return acc; - }, {}); - return message; - }, -}; -function createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry() { - return { key: 0, value: "0" }; -} -exports.GetChainTipResponse_ShardToMaxOffsetMapEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== 0) { - writer.uint32(8).int32(message.key); - } - if (message.value !== "0") { - writer.uint32(16).int64(message.value); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.key = reader.int32(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.value = reader.int64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.Number(object.key) : 0, - value: isSet(object.value) ? globalThis.String(object.value) : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== 0) { - obj.key = Math.round(message.key); - } - if (message.value !== "0") { - obj.value = message.value; - } - return obj; - }, - create: function (base) { - return exports.GetChainTipResponse_ShardToMaxOffsetMapEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : 0; - message.value = (_b = object.value) !== null && _b !== void 0 ? _b : "0"; - return message; - }, -}; -function createBaseVersionRequest() { - return {}; -} -exports.VersionRequest = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseVersionRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.VersionRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseVersionRequest(); - return message; - }, -}; -function createBaseVersionResponse() { - return { version: "" }; -} -exports.VersionResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.version !== "") { - writer.uint32(10).string(message.version); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseVersionResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.version = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { version: isSet(object.version) ? globalThis.String(object.version) : "" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.version !== "") { - obj.version = message.version; - } - return obj; - }, - create: function (base) { - return exports.VersionResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseVersionResponse(); - message.version = (_a = object.version) !== null && _a !== void 0 ? _a : ""; - return message; - }, -}; -function createBaseGetConsumerGroupInfoRequest() { - return { consumerGroupName: "" }; -} -exports.GetConsumerGroupInfoRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.consumerGroupName !== "") { - writer.uint32(10).string(message.consumerGroupName); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetConsumerGroupInfoRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.consumerGroupName = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.consumerGroupName !== "") { - obj.consumerGroupName = message.consumerGroupName; - } - return obj; - }, - create: function (base) { - return exports.GetConsumerGroupInfoRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetConsumerGroupInfoRequest(); - message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; - return message; - }, -}; -function createBaseDeleteConsumerGroupRequest() { - return { consumerGroupName: "" }; -} -exports.DeleteConsumerGroupRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.consumerGroupName !== "") { - writer.uint32(10).string(message.consumerGroupName); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseDeleteConsumerGroupRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.consumerGroupName = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.consumerGroupName !== "") { - obj.consumerGroupName = message.consumerGroupName; - } - return obj; - }, - create: function (base) { - return exports.DeleteConsumerGroupRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseDeleteConsumerGroupRequest(); - message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; - return message; - }, -}; -function createBaseDeleteConsumerGroupResponse() { - return { success: false }; -} -exports.DeleteConsumerGroupResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.success !== false) { - writer.uint32(8).bool(message.success); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseDeleteConsumerGroupResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.success = reader.bool(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { success: isSet(object.success) ? globalThis.Boolean(object.success) : false }; - }, - toJSON: function (message) { - var obj = {}; - if (message.success !== false) { - obj.success = message.success; - } - return obj; - }, - create: function (base) { - return exports.DeleteConsumerGroupResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseDeleteConsumerGroupResponse(); - message.success = (_a = object.success) !== null && _a !== void 0 ? _a : false; - return message; - }, -}; -function createBaseListConsumerGroupsRequest() { - return {}; -} -exports.ListConsumerGroupsRequest = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseListConsumerGroupsRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.ListConsumerGroupsRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseListConsumerGroupsRequest(); - return message; - }, -}; -function createBaseListConsumerGroupsResponse() { - return { consumerGroups: [] }; -} -exports.ListConsumerGroupsResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - for (var _i = 0, _a = message.consumerGroups; _i < _a.length; _i++) { - var v = _a[_i]; - exports.ConsumerGroupInfo.encode(v, writer.uint32(10).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseListConsumerGroupsResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.consumerGroups.push(exports.ConsumerGroupInfo.decode(reader, reader.uint32())); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - consumerGroups: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.consumerGroups) - ? object.consumerGroups.map(function (e) { return exports.ConsumerGroupInfo.fromJSON(e); }) - : [], - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if ((_a = message.consumerGroups) === null || _a === void 0 ? void 0 : _a.length) { - obj.consumerGroups = message.consumerGroups.map(function (e) { return exports.ConsumerGroupInfo.toJSON(e); }); - } - return obj; - }, - create: function (base) { - return exports.ListConsumerGroupsResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseListConsumerGroupsResponse(); - message.consumerGroups = ((_a = object.consumerGroups) === null || _a === void 0 ? void 0 : _a.map(function (e) { return exports.ConsumerGroupInfo.fromPartial(e); })) || []; - return message; - }, -}; -function createBaseConsumerGroupInfo() { - return { id: "", consumerGroupName: "", isStale: false, blockchainId: new Uint8Array(0) }; -} -exports.ConsumerGroupInfo = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.id !== "") { - writer.uint32(10).string(message.id); - } - if (message.consumerGroupName !== "") { - writer.uint32(18).string(message.consumerGroupName); - } - if (message.isStale !== false) { - writer.uint32(24).bool(message.isStale); - } - if (message.blockchainId.length !== 0) { - writer.uint32(34).bytes(message.blockchainId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseConsumerGroupInfo(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.id = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.consumerGroupName = reader.string(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.isStale = reader.bool(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.blockchainId = reader.bytes(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - id: isSet(object.id) ? globalThis.String(object.id) : "", - consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "", - isStale: isSet(object.isStale) ? globalThis.Boolean(object.isStale) : false, - blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.id !== "") { - obj.id = message.id; - } - if (message.consumerGroupName !== "") { - obj.consumerGroupName = message.consumerGroupName; - } - if (message.isStale !== false) { - obj.isStale = message.isStale; - } - if (message.blockchainId.length !== 0) { - obj.blockchainId = base64FromBytes(message.blockchainId); - } - return obj; - }, - create: function (base) { - return exports.ConsumerGroupInfo.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseConsumerGroupInfo(); - message.id = (_a = object.id) !== null && _a !== void 0 ? _a : ""; - message.consumerGroupName = (_b = object.consumerGroupName) !== null && _b !== void 0 ? _b : ""; - message.isStale = (_c = object.isStale) !== null && _c !== void 0 ? _c : false; - message.blockchainId = (_d = object.blockchainId) !== null && _d !== void 0 ? _d : new Uint8Array(0); - return message; - }, -}; -function createBaseGetSlotLagInfoRequest() { - return { consumerGroupName: "" }; -} -exports.GetSlotLagInfoRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.consumerGroupName !== "") { - writer.uint32(10).string(message.consumerGroupName); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetSlotLagInfoRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.consumerGroupName = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.consumerGroupName !== "") { - obj.consumerGroupName = message.consumerGroupName; - } - return obj; - }, - create: function (base) { - return exports.GetSlotLagInfoRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetSlotLagInfoRequest(); - message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; - return message; - }, -}; -function createBaseBlockFilters() { - return { accounts: {}, transactions: {}, entries: {}, blocksMeta: {} }; -} -exports.BlockFilters = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - Object.entries(message.accounts).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.BlockFilters_AccountsEntry.encode({ key: key, value: value }, writer.uint32(10).fork()).join(); - }); - Object.entries(message.transactions).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.BlockFilters_TransactionsEntry.encode({ key: key, value: value }, writer.uint32(18).fork()).join(); - }); - Object.entries(message.entries).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.BlockFilters_EntriesEntry.encode({ key: key, value: value }, writer.uint32(26).fork()).join(); - }); - Object.entries(message.blocksMeta).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.BlockFilters_BlocksMetaEntry.encode({ key: key, value: value }, writer.uint32(34).fork()).join(); - }); - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockFilters(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - var entry1 = exports.BlockFilters_AccountsEntry.decode(reader, reader.uint32()); - if (entry1.value !== undefined) { - message.accounts[entry1.key] = entry1.value; - } - continue; - } - case 2: { - if (tag !== 18) { - break; - } - var entry2 = exports.BlockFilters_TransactionsEntry.decode(reader, reader.uint32()); - if (entry2.value !== undefined) { - message.transactions[entry2.key] = entry2.value; - } - continue; - } - case 3: { - if (tag !== 26) { - break; - } - var entry3 = exports.BlockFilters_EntriesEntry.decode(reader, reader.uint32()); - if (entry3.value !== undefined) { - message.entries[entry3.key] = entry3.value; - } - continue; - } - case 4: { - if (tag !== 34) { - break; - } - var entry4 = exports.BlockFilters_BlocksMetaEntry.decode(reader, reader.uint32()); - if (entry4.value !== undefined) { - message.blocksMeta[entry4.key] = entry4.value; - } - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - accounts: isObject(object.accounts) - ? Object.entries(object.accounts).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = geyser_1.SubscribeRequestFilterAccounts.fromJSON(value); - return acc; - }, {}) - : {}, - transactions: isObject(object.transactions) - ? Object.entries(object.transactions).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = geyser_1.SubscribeRequestFilterTransactions.fromJSON(value); - return acc; - }, {}) - : {}, - entries: isObject(object.entries) - ? Object.entries(object.entries).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = geyser_1.SubscribeRequestFilterEntry.fromJSON(value); - return acc; - }, {}) - : {}, - blocksMeta: isObject(object.blocksMeta) - ? Object.entries(object.blocksMeta).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = geyser_1.SubscribeRequestFilterBlocksMeta.fromJSON(value); - return acc; - }, {}) - : {}, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.accounts) { - var entries = Object.entries(message.accounts); - if (entries.length > 0) { - obj.accounts = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.accounts[k] = geyser_1.SubscribeRequestFilterAccounts.toJSON(v); - }); - } - } - if (message.transactions) { - var entries = Object.entries(message.transactions); - if (entries.length > 0) { - obj.transactions = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.transactions[k] = geyser_1.SubscribeRequestFilterTransactions.toJSON(v); - }); - } - } - if (message.entries) { - var entries = Object.entries(message.entries); - if (entries.length > 0) { - obj.entries = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.entries[k] = geyser_1.SubscribeRequestFilterEntry.toJSON(v); - }); - } - } - if (message.blocksMeta) { - var entries = Object.entries(message.blocksMeta); - if (entries.length > 0) { - obj.blocksMeta = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.blocksMeta[k] = geyser_1.SubscribeRequestFilterBlocksMeta.toJSON(v); - }); - } - } - return obj; - }, - create: function (base) { - return exports.BlockFilters.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseBlockFilters(); - message.accounts = Object.entries((_a = object.accounts) !== null && _a !== void 0 ? _a : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = geyser_1.SubscribeRequestFilterAccounts.fromPartial(value); - } - return acc; - }, {}); - message.transactions = Object.entries((_b = object.transactions) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = geyser_1.SubscribeRequestFilterTransactions.fromPartial(value); - } - return acc; - }, {}); - message.entries = Object.entries((_c = object.entries) !== null && _c !== void 0 ? _c : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = geyser_1.SubscribeRequestFilterEntry.fromPartial(value); - } - return acc; - }, {}); - message.blocksMeta = Object.entries((_d = object.blocksMeta) !== null && _d !== void 0 ? _d : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = geyser_1.SubscribeRequestFilterBlocksMeta.fromPartial(value); - } - return acc; - }, {}); - return message; - }, -}; -function createBaseBlockFilters_AccountsEntry() { - return { key: "", value: undefined }; -} -exports.BlockFilters_AccountsEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - geyser_1.SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockFilters_AccountsEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = geyser_1.SubscribeRequestFilterAccounts.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? geyser_1.SubscribeRequestFilterAccounts.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = geyser_1.SubscribeRequestFilterAccounts.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.BlockFilters_AccountsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseBlockFilters_AccountsEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? geyser_1.SubscribeRequestFilterAccounts.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseBlockFilters_TransactionsEntry() { - return { key: "", value: undefined }; -} -exports.BlockFilters_TransactionsEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - geyser_1.SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockFilters_TransactionsEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = geyser_1.SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? geyser_1.SubscribeRequestFilterTransactions.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = geyser_1.SubscribeRequestFilterTransactions.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.BlockFilters_TransactionsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseBlockFilters_TransactionsEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? geyser_1.SubscribeRequestFilterTransactions.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseBlockFilters_EntriesEntry() { - return { key: "", value: undefined }; -} -exports.BlockFilters_EntriesEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - geyser_1.SubscribeRequestFilterEntry.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockFilters_EntriesEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = geyser_1.SubscribeRequestFilterEntry.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? geyser_1.SubscribeRequestFilterEntry.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = geyser_1.SubscribeRequestFilterEntry.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.BlockFilters_EntriesEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseBlockFilters_EntriesEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? geyser_1.SubscribeRequestFilterEntry.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseBlockFilters_BlocksMetaEntry() { - return { key: "", value: undefined }; -} -exports.BlockFilters_BlocksMetaEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - geyser_1.SubscribeRequestFilterBlocksMeta.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockFilters_BlocksMetaEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = geyser_1.SubscribeRequestFilterBlocksMeta.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? geyser_1.SubscribeRequestFilterBlocksMeta.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = geyser_1.SubscribeRequestFilterBlocksMeta.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.BlockFilters_BlocksMetaEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseBlockFilters_BlocksMetaEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? geyser_1.SubscribeRequestFilterBlocksMeta.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseDownloadBlockShard() { - return { blockchainId: new Uint8Array(0), blockUid: new Uint8Array(0), shardIdx: 0, blockFilters: undefined }; -} -exports.DownloadBlockShard = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockchainId.length !== 0) { - writer.uint32(10).bytes(message.blockchainId); - } - if (message.blockUid.length !== 0) { - writer.uint32(18).bytes(message.blockUid); - } - if (message.shardIdx !== 0) { - writer.uint32(24).int32(message.shardIdx); - } - if (message.blockFilters !== undefined) { - exports.BlockFilters.encode(message.blockFilters, writer.uint32(34).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseDownloadBlockShard(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.blockchainId = reader.bytes(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockUid = reader.bytes(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.shardIdx = reader.int32(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.blockFilters = exports.BlockFilters.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), - blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), - shardIdx: isSet(object.shardIdx) ? globalThis.Number(object.shardIdx) : 0, - blockFilters: isSet(object.blockFilters) ? exports.BlockFilters.fromJSON(object.blockFilters) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockchainId.length !== 0) { - obj.blockchainId = base64FromBytes(message.blockchainId); - } - if (message.blockUid.length !== 0) { - obj.blockUid = base64FromBytes(message.blockUid); - } - if (message.shardIdx !== 0) { - obj.shardIdx = Math.round(message.shardIdx); - } - if (message.blockFilters !== undefined) { - obj.blockFilters = exports.BlockFilters.toJSON(message.blockFilters); - } - return obj; - }, - create: function (base) { - return exports.DownloadBlockShard.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBaseDownloadBlockShard(); - message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.blockUid = (_b = object.blockUid) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.shardIdx = (_c = object.shardIdx) !== null && _c !== void 0 ? _c : 0; - message.blockFilters = (object.blockFilters !== undefined && object.blockFilters !== null) - ? exports.BlockFilters.fromPartial(object.blockFilters) - : undefined; - return message; - }, -}; -function createBasePing() { - return { pingId: 0 }; -} -exports.Ping = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.pingId !== 0) { - writer.uint32(8).uint32(message.pingId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBasePing(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.pingId = reader.uint32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { pingId: isSet(object.pingId) ? globalThis.Number(object.pingId) : 0 }; - }, - toJSON: function (message) { - var obj = {}; - if (message.pingId !== 0) { - obj.pingId = Math.round(message.pingId); - } - return obj; - }, - create: function (base) { - return exports.Ping.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBasePing(); - message.pingId = (_a = object.pingId) !== null && _a !== void 0 ? _a : 0; - return message; - }, -}; -function createBasePong() { - return { pingId: 0 }; -} -exports.Pong = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.pingId !== 0) { - writer.uint32(8).uint32(message.pingId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBasePong(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.pingId = reader.uint32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { pingId: isSet(object.pingId) ? globalThis.Number(object.pingId) : 0 }; - }, - toJSON: function (message) { - var obj = {}; - if (message.pingId !== 0) { - obj.pingId = Math.round(message.pingId); - } - return obj; - }, - create: function (base) { - return exports.Pong.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBasePong(); - message.pingId = (_a = object.pingId) !== null && _a !== void 0 ? _a : 0; - return message; - }, -}; -function createBaseDataCommand() { - return { downloadBlockShard: undefined, filterUpdate: undefined }; -} -exports.DataCommand = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.downloadBlockShard !== undefined) { - exports.DownloadBlockShard.encode(message.downloadBlockShard, writer.uint32(10).fork()).join(); - } - if (message.filterUpdate !== undefined) { - exports.BlockFilters.encode(message.filterUpdate, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseDataCommand(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.downloadBlockShard = exports.DownloadBlockShard.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.filterUpdate = exports.BlockFilters.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - downloadBlockShard: isSet(object.downloadBlockShard) - ? exports.DownloadBlockShard.fromJSON(object.downloadBlockShard) - : undefined, - filterUpdate: isSet(object.filterUpdate) ? exports.BlockFilters.fromJSON(object.filterUpdate) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.downloadBlockShard !== undefined) { - obj.downloadBlockShard = exports.DownloadBlockShard.toJSON(message.downloadBlockShard); - } - if (message.filterUpdate !== undefined) { - obj.filterUpdate = exports.BlockFilters.toJSON(message.filterUpdate); - } - return obj; - }, - create: function (base) { - return exports.DataCommand.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var message = createBaseDataCommand(); - message.downloadBlockShard = (object.downloadBlockShard !== undefined && object.downloadBlockShard !== null) - ? exports.DownloadBlockShard.fromPartial(object.downloadBlockShard) - : undefined; - message.filterUpdate = (object.filterUpdate !== undefined && object.filterUpdate !== null) - ? exports.BlockFilters.fromPartial(object.filterUpdate) - : undefined; - return message; - }, -}; -function createBaseBlockShardDownloadFinish() { - return {}; -} -exports.BlockShardDownloadFinish = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockShardDownloadFinish(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.BlockShardDownloadFinish.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseBlockShardDownloadFinish(); - return message; - }, -}; -function createBaseBlockNotFound() { - return { blockchainId: new Uint8Array(0), blockUid: new Uint8Array(0), shardIdx: 0 }; -} -exports.BlockNotFound = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockchainId.length !== 0) { - writer.uint32(10).bytes(message.blockchainId); - } - if (message.blockUid.length !== 0) { - writer.uint32(18).bytes(message.blockUid); - } - if (message.shardIdx !== 0) { - writer.uint32(24).int32(message.shardIdx); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockNotFound(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.blockchainId = reader.bytes(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockUid = reader.bytes(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.shardIdx = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), - blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), - shardIdx: isSet(object.shardIdx) ? globalThis.Number(object.shardIdx) : 0, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockchainId.length !== 0) { - obj.blockchainId = base64FromBytes(message.blockchainId); - } - if (message.blockUid.length !== 0) { - obj.blockUid = base64FromBytes(message.blockUid); - } - if (message.shardIdx !== 0) { - obj.shardIdx = Math.round(message.shardIdx); - } - return obj; - }, - create: function (base) { - return exports.BlockNotFound.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBaseBlockNotFound(); - message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.blockUid = (_b = object.blockUid) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.shardIdx = (_c = object.shardIdx) !== null && _c !== void 0 ? _c : 0; - return message; - }, -}; -function createBaseDataError() { - return { notFound: undefined }; -} -exports.DataError = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.notFound !== undefined) { - exports.BlockNotFound.encode(message.notFound, writer.uint32(10).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseDataError(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.notFound = exports.BlockNotFound.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { notFound: isSet(object.notFound) ? exports.BlockNotFound.fromJSON(object.notFound) : undefined }; - }, - toJSON: function (message) { - var obj = {}; - if (message.notFound !== undefined) { - obj.notFound = exports.BlockNotFound.toJSON(message.notFound); - } - return obj; - }, - create: function (base) { - return exports.DataError.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var message = createBaseDataError(); - message.notFound = (object.notFound !== undefined && object.notFound !== null) - ? exports.BlockNotFound.fromPartial(object.notFound) - : undefined; - return message; - }, -}; -function createBaseDataResponse() { - return { update: undefined, blockShardDownloadFinish: undefined }; -} -exports.DataResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.update !== undefined) { - geyser_1.SubscribeUpdate.encode(message.update, writer.uint32(10).fork()).join(); - } - if (message.blockShardDownloadFinish !== undefined) { - exports.BlockShardDownloadFinish.encode(message.blockShardDownloadFinish, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseDataResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.update = geyser_1.SubscribeUpdate.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockShardDownloadFinish = exports.BlockShardDownloadFinish.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - update: isSet(object.update) ? geyser_1.SubscribeUpdate.fromJSON(object.update) : undefined, - blockShardDownloadFinish: isSet(object.blockShardDownloadFinish) - ? exports.BlockShardDownloadFinish.fromJSON(object.blockShardDownloadFinish) - : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.update !== undefined) { - obj.update = geyser_1.SubscribeUpdate.toJSON(message.update); - } - if (message.blockShardDownloadFinish !== undefined) { - obj.blockShardDownloadFinish = exports.BlockShardDownloadFinish.toJSON(message.blockShardDownloadFinish); - } - return obj; - }, - create: function (base) { - return exports.DataResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var message = createBaseDataResponse(); - message.update = (object.update !== undefined && object.update !== null) - ? geyser_1.SubscribeUpdate.fromPartial(object.update) - : undefined; - message.blockShardDownloadFinish = - (object.blockShardDownloadFinish !== undefined && object.blockShardDownloadFinish !== null) - ? exports.BlockShardDownloadFinish.fromPartial(object.blockShardDownloadFinish) - : undefined; - return message; - }, -}; -function createBaseCommitOffset() { - return { offset: "0", shardId: 0 }; -} -exports.CommitOffset = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.offset !== "0") { - writer.uint32(8).int64(message.offset); - } - if (message.shardId !== 0) { - writer.uint32(16).int32(message.shardId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseCommitOffset(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.offset = reader.int64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.shardId = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", - shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.offset !== "0") { - obj.offset = message.offset; - } - if (message.shardId !== 0) { - obj.shardId = Math.round(message.shardId); - } - return obj; - }, - create: function (base) { - return exports.CommitOffset.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseCommitOffset(); - message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; - message.shardId = (_b = object.shardId) !== null && _b !== void 0 ? _b : 0; - return message; - }, -}; -function createBasePollBlockchainHistory() { - return { shardId: 0, from: undefined, limit: undefined }; -} -exports.PollBlockchainHistory = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.shardId !== 0) { - writer.uint32(8).int32(message.shardId); - } - if (message.from !== undefined) { - writer.uint32(16).int64(message.from); - } - if (message.limit !== undefined) { - writer.uint32(24).int64(message.limit); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBasePollBlockchainHistory(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.shardId = reader.int32(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.from = reader.int64().toString(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.limit = reader.int64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, - from: isSet(object.from) ? globalThis.String(object.from) : undefined, - limit: isSet(object.limit) ? globalThis.String(object.limit) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.shardId !== 0) { - obj.shardId = Math.round(message.shardId); - } - if (message.from !== undefined) { - obj.from = message.from; - } - if (message.limit !== undefined) { - obj.limit = message.limit; - } - return obj; - }, - create: function (base) { - return exports.PollBlockchainHistory.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBasePollBlockchainHistory(); - message.shardId = (_a = object.shardId) !== null && _a !== void 0 ? _a : 0; - message.from = (_b = object.from) !== null && _b !== void 0 ? _b : undefined; - message.limit = (_c = object.limit) !== null && _c !== void 0 ? _c : undefined; - return message; - }, -}; -function createBaseBlockchainEvent() { - return { - offset: "0", - blockchainId: new Uint8Array(0), - blockUid: new Uint8Array(0), - numShards: 0, - slot: "0", - parentSlot: undefined, - commitmentLevel: 0, - blockchainShardId: 0, - deadError: undefined, - }; -} -exports.BlockchainEvent = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.offset !== "0") { - writer.uint32(8).int64(message.offset); - } - if (message.blockchainId.length !== 0) { - writer.uint32(18).bytes(message.blockchainId); - } - if (message.blockUid.length !== 0) { - writer.uint32(26).bytes(message.blockUid); - } - if (message.numShards !== 0) { - writer.uint32(32).uint32(message.numShards); - } - if (message.slot !== "0") { - writer.uint32(40).uint64(message.slot); - } - if (message.parentSlot !== undefined) { - writer.uint32(48).uint64(message.parentSlot); - } - if (message.commitmentLevel !== 0) { - writer.uint32(56).int32(message.commitmentLevel); - } - if (message.blockchainShardId !== 0) { - writer.uint32(64).int32(message.blockchainShardId); - } - if (message.deadError !== undefined) { - writer.uint32(74).string(message.deadError); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockchainEvent(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.offset = reader.int64().toString(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockchainId = reader.bytes(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.blockUid = reader.bytes(); - continue; - } - case 4: { - if (tag !== 32) { - break; - } - message.numShards = reader.uint32(); - continue; - } - case 5: { - if (tag !== 40) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 6: { - if (tag !== 48) { - break; - } - message.parentSlot = reader.uint64().toString(); - continue; - } - case 7: { - if (tag !== 56) { - break; - } - message.commitmentLevel = reader.int32(); - continue; - } - case 8: { - if (tag !== 64) { - break; - } - message.blockchainShardId = reader.int32(); - continue; - } - case 9: { - if (tag !== 74) { - break; - } - message.deadError = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", - blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), - blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), - numShards: isSet(object.numShards) ? globalThis.Number(object.numShards) : 0, - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : undefined, - commitmentLevel: isSet(object.commitmentLevel) ? (0, geyser_1.commitmentLevelFromJSON)(object.commitmentLevel) : 0, - blockchainShardId: isSet(object.blockchainShardId) ? globalThis.Number(object.blockchainShardId) : 0, - deadError: isSet(object.deadError) ? globalThis.String(object.deadError) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.offset !== "0") { - obj.offset = message.offset; - } - if (message.blockchainId.length !== 0) { - obj.blockchainId = base64FromBytes(message.blockchainId); - } - if (message.blockUid.length !== 0) { - obj.blockUid = base64FromBytes(message.blockUid); - } - if (message.numShards !== 0) { - obj.numShards = Math.round(message.numShards); - } - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.parentSlot !== undefined) { - obj.parentSlot = message.parentSlot; - } - if (message.commitmentLevel !== 0) { - obj.commitmentLevel = (0, geyser_1.commitmentLevelToJSON)(message.commitmentLevel); - } - if (message.blockchainShardId !== 0) { - obj.blockchainShardId = Math.round(message.blockchainShardId); - } - if (message.deadError !== undefined) { - obj.deadError = message.deadError; - } - return obj; - }, - create: function (base) { - return exports.BlockchainEvent.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j; - var message = createBaseBlockchainEvent(); - message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; - message.blockchainId = (_b = object.blockchainId) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.blockUid = (_c = object.blockUid) !== null && _c !== void 0 ? _c : new Uint8Array(0); - message.numShards = (_d = object.numShards) !== null && _d !== void 0 ? _d : 0; - message.slot = (_e = object.slot) !== null && _e !== void 0 ? _e : "0"; - message.parentSlot = (_f = object.parentSlot) !== null && _f !== void 0 ? _f : undefined; - message.commitmentLevel = (_g = object.commitmentLevel) !== null && _g !== void 0 ? _g : 0; - message.blockchainShardId = (_h = object.blockchainShardId) !== null && _h !== void 0 ? _h : 0; - message.deadError = (_j = object.deadError) !== null && _j !== void 0 ? _j : undefined; - return message; - }, -}; -function createBaseBlockchainHistory() { - return { events: [] }; -} -exports.BlockchainHistory = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - for (var _i = 0, _a = message.events; _i < _a.length; _i++) { - var v = _a[_i]; - exports.BlockchainEvent.encode(v, writer.uint32(10).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockchainHistory(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.events.push(exports.BlockchainEvent.decode(reader, reader.uint32())); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - events: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.events) - ? object.events.map(function (e) { return exports.BlockchainEvent.fromJSON(e); }) - : [], - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if ((_a = message.events) === null || _a === void 0 ? void 0 : _a.length) { - obj.events = message.events.map(function (e) { return exports.BlockchainEvent.toJSON(e); }); - } - return obj; - }, - create: function (base) { - return exports.BlockchainHistory.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseBlockchainHistory(); - message.events = ((_a = object.events) === null || _a === void 0 ? void 0 : _a.map(function (e) { return exports.BlockchainEvent.fromPartial(e); })) || []; - return message; - }, -}; -function createBaseJoinControlPlane() { - return { consumerGroupName: undefined }; -} -exports.JoinControlPlane = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.consumerGroupName !== undefined) { - writer.uint32(10).string(message.consumerGroupName); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseJoinControlPlane(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.consumerGroupName = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.consumerGroupName !== undefined) { - obj.consumerGroupName = message.consumerGroupName; - } - return obj; - }, - create: function (base) { - return exports.JoinControlPlane.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseJoinControlPlane(); - message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : undefined; - return message; - }, -}; -function createBaseControlCommand() { - return { initialJoin: undefined, commitOffset: undefined, pollHist: undefined, ping: undefined }; -} -exports.ControlCommand = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.initialJoin !== undefined) { - exports.JoinControlPlane.encode(message.initialJoin, writer.uint32(10).fork()).join(); - } - if (message.commitOffset !== undefined) { - exports.CommitOffset.encode(message.commitOffset, writer.uint32(18).fork()).join(); - } - if (message.pollHist !== undefined) { - exports.PollBlockchainHistory.encode(message.pollHist, writer.uint32(26).fork()).join(); - } - if (message.ping !== undefined) { - exports.Ping.encode(message.ping, writer.uint32(34).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseControlCommand(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.initialJoin = exports.JoinControlPlane.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.commitOffset = exports.CommitOffset.decode(reader, reader.uint32()); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.pollHist = exports.PollBlockchainHistory.decode(reader, reader.uint32()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.ping = exports.Ping.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - initialJoin: isSet(object.initialJoin) ? exports.JoinControlPlane.fromJSON(object.initialJoin) : undefined, - commitOffset: isSet(object.commitOffset) ? exports.CommitOffset.fromJSON(object.commitOffset) : undefined, - pollHist: isSet(object.pollHist) ? exports.PollBlockchainHistory.fromJSON(object.pollHist) : undefined, - ping: isSet(object.ping) ? exports.Ping.fromJSON(object.ping) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.initialJoin !== undefined) { - obj.initialJoin = exports.JoinControlPlane.toJSON(message.initialJoin); - } - if (message.commitOffset !== undefined) { - obj.commitOffset = exports.CommitOffset.toJSON(message.commitOffset); - } - if (message.pollHist !== undefined) { - obj.pollHist = exports.PollBlockchainHistory.toJSON(message.pollHist); - } - if (message.ping !== undefined) { - obj.ping = exports.Ping.toJSON(message.ping); - } - return obj; - }, - create: function (base) { - return exports.ControlCommand.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var message = createBaseControlCommand(); - message.initialJoin = (object.initialJoin !== undefined && object.initialJoin !== null) - ? exports.JoinControlPlane.fromPartial(object.initialJoin) - : undefined; - message.commitOffset = (object.commitOffset !== undefined && object.commitOffset !== null) - ? exports.CommitOffset.fromPartial(object.commitOffset) - : undefined; - message.pollHist = (object.pollHist !== undefined && object.pollHist !== null) - ? exports.PollBlockchainHistory.fromPartial(object.pollHist) - : undefined; - message.ping = (object.ping !== undefined && object.ping !== null) ? exports.Ping.fromPartial(object.ping) : undefined; - return message; - }, -}; -function createBaseControlResponse() { - return { init: undefined, commitOffset: undefined, pollHist: undefined, pong: undefined }; -} -exports.ControlResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.init !== undefined) { - exports.InitialConsumerGroupState.encode(message.init, writer.uint32(10).fork()).join(); - } - if (message.commitOffset !== undefined) { - exports.CommitOffsetResult.encode(message.commitOffset, writer.uint32(18).fork()).join(); - } - if (message.pollHist !== undefined) { - exports.BlockchainHistory.encode(message.pollHist, writer.uint32(26).fork()).join(); - } - if (message.pong !== undefined) { - exports.Pong.encode(message.pong, writer.uint32(34).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseControlResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.init = exports.InitialConsumerGroupState.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.commitOffset = exports.CommitOffsetResult.decode(reader, reader.uint32()); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.pollHist = exports.BlockchainHistory.decode(reader, reader.uint32()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.pong = exports.Pong.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - init: isSet(object.init) ? exports.InitialConsumerGroupState.fromJSON(object.init) : undefined, - commitOffset: isSet(object.commitOffset) ? exports.CommitOffsetResult.fromJSON(object.commitOffset) : undefined, - pollHist: isSet(object.pollHist) ? exports.BlockchainHistory.fromJSON(object.pollHist) : undefined, - pong: isSet(object.pong) ? exports.Pong.fromJSON(object.pong) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.init !== undefined) { - obj.init = exports.InitialConsumerGroupState.toJSON(message.init); - } - if (message.commitOffset !== undefined) { - obj.commitOffset = exports.CommitOffsetResult.toJSON(message.commitOffset); - } - if (message.pollHist !== undefined) { - obj.pollHist = exports.BlockchainHistory.toJSON(message.pollHist); - } - if (message.pong !== undefined) { - obj.pong = exports.Pong.toJSON(message.pong); - } - return obj; - }, - create: function (base) { - return exports.ControlResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var message = createBaseControlResponse(); - message.init = (object.init !== undefined && object.init !== null) - ? exports.InitialConsumerGroupState.fromPartial(object.init) - : undefined; - message.commitOffset = (object.commitOffset !== undefined && object.commitOffset !== null) - ? exports.CommitOffsetResult.fromPartial(object.commitOffset) - : undefined; - message.pollHist = (object.pollHist !== undefined && object.pollHist !== null) - ? exports.BlockchainHistory.fromPartial(object.pollHist) - : undefined; - message.pong = (object.pong !== undefined && object.pong !== null) ? exports.Pong.fromPartial(object.pong) : undefined; - return message; - }, -}; -function createBaseCommitOffsetResult() { - return { offset: "0", shardId: 0 }; -} -exports.CommitOffsetResult = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.offset !== "0") { - writer.uint32(8).int64(message.offset); - } - if (message.shardId !== 0) { - writer.uint32(16).int32(message.shardId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseCommitOffsetResult(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.offset = reader.int64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.shardId = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", - shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.offset !== "0") { - obj.offset = message.offset; - } - if (message.shardId !== 0) { - obj.shardId = Math.round(message.shardId); - } - return obj; - }, - create: function (base) { - return exports.CommitOffsetResult.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseCommitOffsetResult(); - message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; - message.shardId = (_b = object.shardId) !== null && _b !== void 0 ? _b : 0; - return message; - }, -}; -function createBaseInitialConsumerGroupState() { - return { blockchainId: new Uint8Array(0), lastCommittedOffsets: {} }; -} -exports.InitialConsumerGroupState = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockchainId.length !== 0) { - writer.uint32(10).bytes(message.blockchainId); - } - Object.entries(message.lastCommittedOffsets).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.InitialConsumerGroupState_LastCommittedOffsetsEntry.encode({ key: key, value: value }, writer.uint32(18).fork()) - .join(); - }); - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseInitialConsumerGroupState(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.blockchainId = reader.bytes(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - var entry2 = exports.InitialConsumerGroupState_LastCommittedOffsetsEntry.decode(reader, reader.uint32()); - if (entry2.value !== undefined) { - message.lastCommittedOffsets[entry2.key] = entry2.value; - } - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), - lastCommittedOffsets: isObject(object.lastCommittedOffsets) - ? Object.entries(object.lastCommittedOffsets).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[globalThis.Number(key)] = String(value); - return acc; - }, {}) - : {}, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockchainId.length !== 0) { - obj.blockchainId = base64FromBytes(message.blockchainId); - } - if (message.lastCommittedOffsets) { - var entries = Object.entries(message.lastCommittedOffsets); - if (entries.length > 0) { - obj.lastCommittedOffsets = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.lastCommittedOffsets[k] = v; - }); - } - } - return obj; - }, - create: function (base) { - return exports.InitialConsumerGroupState.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseInitialConsumerGroupState(); - message.blockchainId = (_a = object.blockchainId) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.lastCommittedOffsets = Object.entries((_b = object.lastCommittedOffsets) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[globalThis.Number(key)] = globalThis.String(value); - } - return acc; - }, {}); - return message; - }, -}; -function createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry() { - return { key: 0, value: "0" }; -} -exports.InitialConsumerGroupState_LastCommittedOffsetsEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== 0) { - writer.uint32(8).int32(message.key); - } - if (message.value !== "0") { - writer.uint32(16).int64(message.value); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.key = reader.int32(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.value = reader.int64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.Number(object.key) : 0, - value: isSet(object.value) ? globalThis.String(object.value) : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== 0) { - obj.key = Math.round(message.key); - } - if (message.value !== "0") { - obj.value = message.value; - } - return obj; - }, - create: function (base) { - return exports.InitialConsumerGroupState_LastCommittedOffsetsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : 0; - message.value = (_b = object.value) !== null && _b !== void 0 ? _b : "0"; - return message; - }, -}; -function createBaseCreateConsumerGroupResponse() { - return { consumerGroupId: "" }; -} -exports.CreateConsumerGroupResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.consumerGroupId !== "") { - writer.uint32(10).string(message.consumerGroupId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseCreateConsumerGroupResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.consumerGroupId = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { consumerGroupId: isSet(object.consumerGroupId) ? globalThis.String(object.consumerGroupId) : "" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.consumerGroupId !== "") { - obj.consumerGroupId = message.consumerGroupId; - } - return obj; - }, - create: function (base) { - return exports.CreateConsumerGroupResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseCreateConsumerGroupResponse(); - message.consumerGroupId = (_a = object.consumerGroupId) !== null && _a !== void 0 ? _a : ""; - return message; - }, -}; -function createBaseCreateConsumerGroupRequest() { - return { consumerGroupName: "", initialOffsetPolicy: 0 }; -} -exports.CreateConsumerGroupRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.consumerGroupName !== "") { - writer.uint32(10).string(message.consumerGroupName); - } - if (message.initialOffsetPolicy !== 0) { - writer.uint32(16).int32(message.initialOffsetPolicy); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseCreateConsumerGroupRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.consumerGroupName = reader.string(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.initialOffsetPolicy = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - consumerGroupName: isSet(object.consumerGroupName) ? globalThis.String(object.consumerGroupName) : "", - initialOffsetPolicy: isSet(object.initialOffsetPolicy) - ? initialOffsetPolicyFromJSON(object.initialOffsetPolicy) - : 0, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.consumerGroupName !== "") { - obj.consumerGroupName = message.consumerGroupName; - } - if (message.initialOffsetPolicy !== 0) { - obj.initialOffsetPolicy = initialOffsetPolicyToJSON(message.initialOffsetPolicy); - } - return obj; - }, - create: function (base) { - return exports.CreateConsumerGroupRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseCreateConsumerGroupRequest(); - message.consumerGroupName = (_a = object.consumerGroupName) !== null && _a !== void 0 ? _a : ""; - message.initialOffsetPolicy = (_b = object.initialOffsetPolicy) !== null && _b !== void 0 ? _b : 0; - return message; - }, -}; -exports.FumaroleService = { - getConsumerGroupInfo: { - path: "/fumarole.Fumarole/GetConsumerGroupInfo", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.GetConsumerGroupInfoRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.GetConsumerGroupInfoRequest.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.ConsumerGroupInfo.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.ConsumerGroupInfo.decode(value); }, - }, - listConsumerGroups: { - path: "/fumarole.Fumarole/ListConsumerGroups", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.ListConsumerGroupsRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.ListConsumerGroupsRequest.decode(value); }, - responseSerialize: function (value) { - return Buffer.from(exports.ListConsumerGroupsResponse.encode(value).finish()); - }, - responseDeserialize: function (value) { return exports.ListConsumerGroupsResponse.decode(value); }, - }, - deleteConsumerGroup: { - path: "/fumarole.Fumarole/DeleteConsumerGroup", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.DeleteConsumerGroupRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.DeleteConsumerGroupRequest.decode(value); }, - responseSerialize: function (value) { - return Buffer.from(exports.DeleteConsumerGroupResponse.encode(value).finish()); - }, - responseDeserialize: function (value) { return exports.DeleteConsumerGroupResponse.decode(value); }, - }, - createConsumerGroup: { - path: "/fumarole.Fumarole/CreateConsumerGroup", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.CreateConsumerGroupRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.CreateConsumerGroupRequest.decode(value); }, - responseSerialize: function (value) { - return Buffer.from(exports.CreateConsumerGroupResponse.encode(value).finish()); - }, - responseDeserialize: function (value) { return exports.CreateConsumerGroupResponse.decode(value); }, - }, - downloadBlock: { - path: "/fumarole.Fumarole/DownloadBlock", - requestStream: false, - responseStream: true, - requestSerialize: function (value) { return Buffer.from(exports.DownloadBlockShard.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.DownloadBlockShard.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.DataResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.DataResponse.decode(value); }, - }, - /** Represents subscription to the data plane */ - subscribeData: { - path: "/fumarole.Fumarole/SubscribeData", - requestStream: true, - responseStream: true, - requestSerialize: function (value) { return Buffer.from(exports.DataCommand.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.DataCommand.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.DataResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.DataResponse.decode(value); }, - }, - getChainTip: { - path: "/fumarole.Fumarole/GetChainTip", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { return Buffer.from(exports.GetChainTipRequest.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.GetChainTipRequest.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.GetChainTipResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.GetChainTipResponse.decode(value); }, - }, - /** Represents subscription to the control plane */ - subscribe: { - path: "/fumarole.Fumarole/Subscribe", - requestStream: true, - responseStream: true, - requestSerialize: function (value) { return Buffer.from(exports.ControlCommand.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.ControlCommand.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.ControlResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.ControlResponse.decode(value); }, - }, - version: { - path: "/fumarole.Fumarole/Version", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { return Buffer.from(exports.VersionRequest.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.VersionRequest.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.VersionResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.VersionResponse.decode(value); }, - }, -}; -exports.FumaroleClient = (0, grpc_js_1.makeGenericClientConstructor)(exports.FumaroleService, "fumarole.Fumarole"); -function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); - } - else { - var bin = globalThis.atob(b64); - var arr = new Uint8Array(bin.length); - for (var i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } -} -function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); - } - else { - var bin_1 = []; - arr.forEach(function (byte) { - bin_1.push(globalThis.String.fromCharCode(byte)); - }); - return globalThis.btoa(bin_1.join("")); - } -} -function isObject(value) { - return typeof value === "object" && value !== null; -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/typescript-sdk/src/grpc/geyser.js b/typescript-sdk/src/grpc/geyser.js deleted file mode 100644 index 4563b91..0000000 --- a/typescript-sdk/src/grpc/geyser.js +++ /dev/null @@ -1,4457 +0,0 @@ -"use strict"; -// Code generated by protoc-gen-ts_proto. DO NOT EDIT. -// versions: -// protoc-gen-ts_proto v2.7.7 -// protoc v3.12.4 -// source: geyser.proto -Object.defineProperty(exports, "__esModule", { value: true }); -exports.GeyserClient = exports.GeyserService = exports.IsBlockhashValidResponse = exports.IsBlockhashValidRequest = exports.GetVersionResponse = exports.GetVersionRequest = exports.GetSlotResponse = exports.GetSlotRequest = exports.GetBlockHeightResponse = exports.GetBlockHeightRequest = exports.GetLatestBlockhashResponse = exports.GetLatestBlockhashRequest = exports.PongResponse = exports.PingRequest = exports.SubscribeReplayInfoResponse = exports.SubscribeReplayInfoRequest = exports.SubscribeUpdatePong = exports.SubscribeUpdatePing = exports.SubscribeUpdateEntry = exports.SubscribeUpdateBlockMeta = exports.SubscribeUpdateBlock = exports.SubscribeUpdateTransactionStatus = exports.SubscribeUpdateTransactionInfo = exports.SubscribeUpdateTransaction = exports.SubscribeUpdateSlot = exports.SubscribeUpdateAccountInfo = exports.SubscribeUpdateAccount = exports.SubscribeUpdate = exports.SubscribeRequestPing = exports.SubscribeRequestAccountsDataSlice = exports.SubscribeRequestFilterEntry = exports.SubscribeRequestFilterBlocksMeta = exports.SubscribeRequestFilterBlocks = exports.SubscribeRequestFilterTransactions = exports.SubscribeRequestFilterSlots = exports.SubscribeRequestFilterAccountsFilterLamports = exports.SubscribeRequestFilterAccountsFilterMemcmp = exports.SubscribeRequestFilterAccountsFilter = exports.SubscribeRequestFilterAccounts = exports.SubscribeRequest_EntryEntry = exports.SubscribeRequest_BlocksMetaEntry = exports.SubscribeRequest_BlocksEntry = exports.SubscribeRequest_TransactionsStatusEntry = exports.SubscribeRequest_TransactionsEntry = exports.SubscribeRequest_SlotsEntry = exports.SubscribeRequest_AccountsEntry = exports.SubscribeRequest = exports.SlotStatus = exports.CommitmentLevel = exports.protobufPackage = void 0; -exports.commitmentLevelFromJSON = commitmentLevelFromJSON; -exports.commitmentLevelToJSON = commitmentLevelToJSON; -exports.slotStatusFromJSON = slotStatusFromJSON; -exports.slotStatusToJSON = slotStatusToJSON; -/* eslint-disable */ -var wire_1 = require("@bufbuild/protobuf/wire"); -var grpc_js_1 = require("@grpc/grpc-js"); -var timestamp_1 = require("./google/protobuf/timestamp"); -var solana_storage_1 = require("./solana-storage"); -exports.protobufPackage = "geyser"; -var CommitmentLevel; -(function (CommitmentLevel) { - CommitmentLevel[CommitmentLevel["PROCESSED"] = 0] = "PROCESSED"; - CommitmentLevel[CommitmentLevel["CONFIRMED"] = 1] = "CONFIRMED"; - CommitmentLevel[CommitmentLevel["FINALIZED"] = 2] = "FINALIZED"; - CommitmentLevel[CommitmentLevel["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; -})(CommitmentLevel || (exports.CommitmentLevel = CommitmentLevel = {})); -function commitmentLevelFromJSON(object) { - switch (object) { - case 0: - case "PROCESSED": - return CommitmentLevel.PROCESSED; - case 1: - case "CONFIRMED": - return CommitmentLevel.CONFIRMED; - case 2: - case "FINALIZED": - return CommitmentLevel.FINALIZED; - case -1: - case "UNRECOGNIZED": - default: - return CommitmentLevel.UNRECOGNIZED; - } -} -function commitmentLevelToJSON(object) { - switch (object) { - case CommitmentLevel.PROCESSED: - return "PROCESSED"; - case CommitmentLevel.CONFIRMED: - return "CONFIRMED"; - case CommitmentLevel.FINALIZED: - return "FINALIZED"; - case CommitmentLevel.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } -} -var SlotStatus; -(function (SlotStatus) { - SlotStatus[SlotStatus["SLOT_PROCESSED"] = 0] = "SLOT_PROCESSED"; - SlotStatus[SlotStatus["SLOT_CONFIRMED"] = 1] = "SLOT_CONFIRMED"; - SlotStatus[SlotStatus["SLOT_FINALIZED"] = 2] = "SLOT_FINALIZED"; - SlotStatus[SlotStatus["SLOT_FIRST_SHRED_RECEIVED"] = 3] = "SLOT_FIRST_SHRED_RECEIVED"; - SlotStatus[SlotStatus["SLOT_COMPLETED"] = 4] = "SLOT_COMPLETED"; - SlotStatus[SlotStatus["SLOT_CREATED_BANK"] = 5] = "SLOT_CREATED_BANK"; - SlotStatus[SlotStatus["SLOT_DEAD"] = 6] = "SLOT_DEAD"; - SlotStatus[SlotStatus["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; -})(SlotStatus || (exports.SlotStatus = SlotStatus = {})); -function slotStatusFromJSON(object) { - switch (object) { - case 0: - case "SLOT_PROCESSED": - return SlotStatus.SLOT_PROCESSED; - case 1: - case "SLOT_CONFIRMED": - return SlotStatus.SLOT_CONFIRMED; - case 2: - case "SLOT_FINALIZED": - return SlotStatus.SLOT_FINALIZED; - case 3: - case "SLOT_FIRST_SHRED_RECEIVED": - return SlotStatus.SLOT_FIRST_SHRED_RECEIVED; - case 4: - case "SLOT_COMPLETED": - return SlotStatus.SLOT_COMPLETED; - case 5: - case "SLOT_CREATED_BANK": - return SlotStatus.SLOT_CREATED_BANK; - case 6: - case "SLOT_DEAD": - return SlotStatus.SLOT_DEAD; - case -1: - case "UNRECOGNIZED": - default: - return SlotStatus.UNRECOGNIZED; - } -} -function slotStatusToJSON(object) { - switch (object) { - case SlotStatus.SLOT_PROCESSED: - return "SLOT_PROCESSED"; - case SlotStatus.SLOT_CONFIRMED: - return "SLOT_CONFIRMED"; - case SlotStatus.SLOT_FINALIZED: - return "SLOT_FINALIZED"; - case SlotStatus.SLOT_FIRST_SHRED_RECEIVED: - return "SLOT_FIRST_SHRED_RECEIVED"; - case SlotStatus.SLOT_COMPLETED: - return "SLOT_COMPLETED"; - case SlotStatus.SLOT_CREATED_BANK: - return "SLOT_CREATED_BANK"; - case SlotStatus.SLOT_DEAD: - return "SLOT_DEAD"; - case SlotStatus.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } -} -function createBaseSubscribeRequest() { - return { - accounts: {}, - slots: {}, - transactions: {}, - transactionsStatus: {}, - blocks: {}, - blocksMeta: {}, - entry: {}, - commitment: undefined, - accountsDataSlice: [], - ping: undefined, - fromSlot: undefined, - }; -} -exports.SubscribeRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - Object.entries(message.accounts).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.SubscribeRequest_AccountsEntry.encode({ key: key, value: value }, writer.uint32(10).fork()).join(); - }); - Object.entries(message.slots).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.SubscribeRequest_SlotsEntry.encode({ key: key, value: value }, writer.uint32(18).fork()).join(); - }); - Object.entries(message.transactions).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.SubscribeRequest_TransactionsEntry.encode({ key: key, value: value }, writer.uint32(26).fork()).join(); - }); - Object.entries(message.transactionsStatus).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.SubscribeRequest_TransactionsStatusEntry.encode({ key: key, value: value }, writer.uint32(82).fork()).join(); - }); - Object.entries(message.blocks).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.SubscribeRequest_BlocksEntry.encode({ key: key, value: value }, writer.uint32(34).fork()).join(); - }); - Object.entries(message.blocksMeta).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.SubscribeRequest_BlocksMetaEntry.encode({ key: key, value: value }, writer.uint32(42).fork()).join(); - }); - Object.entries(message.entry).forEach(function (_a) { - var key = _a[0], value = _a[1]; - exports.SubscribeRequest_EntryEntry.encode({ key: key, value: value }, writer.uint32(66).fork()).join(); - }); - if (message.commitment !== undefined) { - writer.uint32(48).int32(message.commitment); - } - for (var _i = 0, _a = message.accountsDataSlice; _i < _a.length; _i++) { - var v = _a[_i]; - exports.SubscribeRequestAccountsDataSlice.encode(v, writer.uint32(58).fork()).join(); - } - if (message.ping !== undefined) { - exports.SubscribeRequestPing.encode(message.ping, writer.uint32(74).fork()).join(); - } - if (message.fromSlot !== undefined) { - writer.uint32(88).uint64(message.fromSlot); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - var entry1 = exports.SubscribeRequest_AccountsEntry.decode(reader, reader.uint32()); - if (entry1.value !== undefined) { - message.accounts[entry1.key] = entry1.value; - } - continue; - } - case 2: { - if (tag !== 18) { - break; - } - var entry2 = exports.SubscribeRequest_SlotsEntry.decode(reader, reader.uint32()); - if (entry2.value !== undefined) { - message.slots[entry2.key] = entry2.value; - } - continue; - } - case 3: { - if (tag !== 26) { - break; - } - var entry3 = exports.SubscribeRequest_TransactionsEntry.decode(reader, reader.uint32()); - if (entry3.value !== undefined) { - message.transactions[entry3.key] = entry3.value; - } - continue; - } - case 10: { - if (tag !== 82) { - break; - } - var entry10 = exports.SubscribeRequest_TransactionsStatusEntry.decode(reader, reader.uint32()); - if (entry10.value !== undefined) { - message.transactionsStatus[entry10.key] = entry10.value; - } - continue; - } - case 4: { - if (tag !== 34) { - break; - } - var entry4 = exports.SubscribeRequest_BlocksEntry.decode(reader, reader.uint32()); - if (entry4.value !== undefined) { - message.blocks[entry4.key] = entry4.value; - } - continue; - } - case 5: { - if (tag !== 42) { - break; - } - var entry5 = exports.SubscribeRequest_BlocksMetaEntry.decode(reader, reader.uint32()); - if (entry5.value !== undefined) { - message.blocksMeta[entry5.key] = entry5.value; - } - continue; - } - case 8: { - if (tag !== 66) { - break; - } - var entry8 = exports.SubscribeRequest_EntryEntry.decode(reader, reader.uint32()); - if (entry8.value !== undefined) { - message.entry[entry8.key] = entry8.value; - } - continue; - } - case 6: { - if (tag !== 48) { - break; - } - message.commitment = reader.int32(); - continue; - } - case 7: { - if (tag !== 58) { - break; - } - message.accountsDataSlice.push(exports.SubscribeRequestAccountsDataSlice.decode(reader, reader.uint32())); - continue; - } - case 9: { - if (tag !== 74) { - break; - } - message.ping = exports.SubscribeRequestPing.decode(reader, reader.uint32()); - continue; - } - case 11: { - if (tag !== 88) { - break; - } - message.fromSlot = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - accounts: isObject(object.accounts) - ? Object.entries(object.accounts).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = exports.SubscribeRequestFilterAccounts.fromJSON(value); - return acc; - }, {}) - : {}, - slots: isObject(object.slots) - ? Object.entries(object.slots).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = exports.SubscribeRequestFilterSlots.fromJSON(value); - return acc; - }, {}) - : {}, - transactions: isObject(object.transactions) - ? Object.entries(object.transactions).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = exports.SubscribeRequestFilterTransactions.fromJSON(value); - return acc; - }, {}) - : {}, - transactionsStatus: isObject(object.transactionsStatus) - ? Object.entries(object.transactionsStatus).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = exports.SubscribeRequestFilterTransactions.fromJSON(value); - return acc; - }, {}) - : {}, - blocks: isObject(object.blocks) - ? Object.entries(object.blocks).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = exports.SubscribeRequestFilterBlocks.fromJSON(value); - return acc; - }, {}) - : {}, - blocksMeta: isObject(object.blocksMeta) - ? Object.entries(object.blocksMeta).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = exports.SubscribeRequestFilterBlocksMeta.fromJSON(value); - return acc; - }, {}) - : {}, - entry: isObject(object.entry) - ? Object.entries(object.entry).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - acc[key] = exports.SubscribeRequestFilterEntry.fromJSON(value); - return acc; - }, {}) - : {}, - commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined, - accountsDataSlice: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountsDataSlice) - ? object.accountsDataSlice.map(function (e) { return exports.SubscribeRequestAccountsDataSlice.fromJSON(e); }) - : [], - ping: isSet(object.ping) ? exports.SubscribeRequestPing.fromJSON(object.ping) : undefined, - fromSlot: isSet(object.fromSlot) ? globalThis.String(object.fromSlot) : undefined, - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if (message.accounts) { - var entries = Object.entries(message.accounts); - if (entries.length > 0) { - obj.accounts = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.accounts[k] = exports.SubscribeRequestFilterAccounts.toJSON(v); - }); - } - } - if (message.slots) { - var entries = Object.entries(message.slots); - if (entries.length > 0) { - obj.slots = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.slots[k] = exports.SubscribeRequestFilterSlots.toJSON(v); - }); - } - } - if (message.transactions) { - var entries = Object.entries(message.transactions); - if (entries.length > 0) { - obj.transactions = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.transactions[k] = exports.SubscribeRequestFilterTransactions.toJSON(v); - }); - } - } - if (message.transactionsStatus) { - var entries = Object.entries(message.transactionsStatus); - if (entries.length > 0) { - obj.transactionsStatus = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.transactionsStatus[k] = exports.SubscribeRequestFilterTransactions.toJSON(v); - }); - } - } - if (message.blocks) { - var entries = Object.entries(message.blocks); - if (entries.length > 0) { - obj.blocks = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.blocks[k] = exports.SubscribeRequestFilterBlocks.toJSON(v); - }); - } - } - if (message.blocksMeta) { - var entries = Object.entries(message.blocksMeta); - if (entries.length > 0) { - obj.blocksMeta = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.blocksMeta[k] = exports.SubscribeRequestFilterBlocksMeta.toJSON(v); - }); - } - } - if (message.entry) { - var entries = Object.entries(message.entry); - if (entries.length > 0) { - obj.entry = {}; - entries.forEach(function (_a) { - var k = _a[0], v = _a[1]; - obj.entry[k] = exports.SubscribeRequestFilterEntry.toJSON(v); - }); - } - } - if (message.commitment !== undefined) { - obj.commitment = commitmentLevelToJSON(message.commitment); - } - if ((_a = message.accountsDataSlice) === null || _a === void 0 ? void 0 : _a.length) { - obj.accountsDataSlice = message.accountsDataSlice.map(function (e) { return exports.SubscribeRequestAccountsDataSlice.toJSON(e); }); - } - if (message.ping !== undefined) { - obj.ping = exports.SubscribeRequestPing.toJSON(message.ping); - } - if (message.fromSlot !== undefined) { - obj.fromSlot = message.fromSlot; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k; - var message = createBaseSubscribeRequest(); - message.accounts = Object.entries((_a = object.accounts) !== null && _a !== void 0 ? _a : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = exports.SubscribeRequestFilterAccounts.fromPartial(value); - } - return acc; - }, {}); - message.slots = Object.entries((_b = object.slots) !== null && _b !== void 0 ? _b : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = exports.SubscribeRequestFilterSlots.fromPartial(value); - } - return acc; - }, {}); - message.transactions = Object.entries((_c = object.transactions) !== null && _c !== void 0 ? _c : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = exports.SubscribeRequestFilterTransactions.fromPartial(value); - } - return acc; - }, {}); - message.transactionsStatus = Object.entries((_d = object.transactionsStatus) !== null && _d !== void 0 ? _d : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = exports.SubscribeRequestFilterTransactions.fromPartial(value); - } - return acc; - }, {}); - message.blocks = Object.entries((_e = object.blocks) !== null && _e !== void 0 ? _e : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = exports.SubscribeRequestFilterBlocks.fromPartial(value); - } - return acc; - }, {}); - message.blocksMeta = Object.entries((_f = object.blocksMeta) !== null && _f !== void 0 ? _f : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = exports.SubscribeRequestFilterBlocksMeta.fromPartial(value); - } - return acc; - }, {}); - message.entry = Object.entries((_g = object.entry) !== null && _g !== void 0 ? _g : {}).reduce(function (acc, _a) { - var key = _a[0], value = _a[1]; - if (value !== undefined) { - acc[key] = exports.SubscribeRequestFilterEntry.fromPartial(value); - } - return acc; - }, {}); - message.commitment = (_h = object.commitment) !== null && _h !== void 0 ? _h : undefined; - message.accountsDataSlice = - ((_j = object.accountsDataSlice) === null || _j === void 0 ? void 0 : _j.map(function (e) { return exports.SubscribeRequestAccountsDataSlice.fromPartial(e); })) || []; - message.ping = (object.ping !== undefined && object.ping !== null) - ? exports.SubscribeRequestPing.fromPartial(object.ping) - : undefined; - message.fromSlot = (_k = object.fromSlot) !== null && _k !== void 0 ? _k : undefined; - return message; - }, -}; -function createBaseSubscribeRequest_AccountsEntry() { - return { key: "", value: undefined }; -} -exports.SubscribeRequest_AccountsEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - exports.SubscribeRequestFilterAccounts.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest_AccountsEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = exports.SubscribeRequestFilterAccounts.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? exports.SubscribeRequestFilterAccounts.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = exports.SubscribeRequestFilterAccounts.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest_AccountsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequest_AccountsEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? exports.SubscribeRequestFilterAccounts.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequest_SlotsEntry() { - return { key: "", value: undefined }; -} -exports.SubscribeRequest_SlotsEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - exports.SubscribeRequestFilterSlots.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest_SlotsEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = exports.SubscribeRequestFilterSlots.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? exports.SubscribeRequestFilterSlots.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = exports.SubscribeRequestFilterSlots.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest_SlotsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequest_SlotsEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? exports.SubscribeRequestFilterSlots.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequest_TransactionsEntry() { - return { key: "", value: undefined }; -} -exports.SubscribeRequest_TransactionsEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - exports.SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest_TransactionsEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = exports.SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? exports.SubscribeRequestFilterTransactions.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = exports.SubscribeRequestFilterTransactions.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest_TransactionsEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequest_TransactionsEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? exports.SubscribeRequestFilterTransactions.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequest_TransactionsStatusEntry() { - return { key: "", value: undefined }; -} -exports.SubscribeRequest_TransactionsStatusEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - exports.SubscribeRequestFilterTransactions.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest_TransactionsStatusEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = exports.SubscribeRequestFilterTransactions.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? exports.SubscribeRequestFilterTransactions.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = exports.SubscribeRequestFilterTransactions.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest_TransactionsStatusEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequest_TransactionsStatusEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? exports.SubscribeRequestFilterTransactions.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequest_BlocksEntry() { - return { key: "", value: undefined }; -} -exports.SubscribeRequest_BlocksEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - exports.SubscribeRequestFilterBlocks.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest_BlocksEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = exports.SubscribeRequestFilterBlocks.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? exports.SubscribeRequestFilterBlocks.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = exports.SubscribeRequestFilterBlocks.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest_BlocksEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequest_BlocksEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? exports.SubscribeRequestFilterBlocks.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequest_BlocksMetaEntry() { - return { key: "", value: undefined }; -} -exports.SubscribeRequest_BlocksMetaEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - exports.SubscribeRequestFilterBlocksMeta.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest_BlocksMetaEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = exports.SubscribeRequestFilterBlocksMeta.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? exports.SubscribeRequestFilterBlocksMeta.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = exports.SubscribeRequestFilterBlocksMeta.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest_BlocksMetaEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequest_BlocksMetaEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? exports.SubscribeRequestFilterBlocksMeta.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequest_EntryEntry() { - return { key: "", value: undefined }; -} -exports.SubscribeRequest_EntryEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== undefined) { - exports.SubscribeRequestFilterEntry.encode(message.value, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequest_EntryEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.key = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.value = exports.SubscribeRequestFilterEntry.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? exports.SubscribeRequestFilterEntry.fromJSON(object.value) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== undefined) { - obj.value = exports.SubscribeRequestFilterEntry.toJSON(message.value); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequest_EntryEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequest_EntryEntry(); - message.key = (_a = object.key) !== null && _a !== void 0 ? _a : ""; - message.value = (object.value !== undefined && object.value !== null) - ? exports.SubscribeRequestFilterEntry.fromPartial(object.value) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequestFilterAccounts() { - return { account: [], owner: [], filters: [], nonemptyTxnSignature: undefined }; -} -exports.SubscribeRequestFilterAccounts = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - for (var _i = 0, _a = message.account; _i < _a.length; _i++) { - var v = _a[_i]; - writer.uint32(18).string(v); - } - for (var _b = 0, _c = message.owner; _b < _c.length; _b++) { - var v = _c[_b]; - writer.uint32(26).string(v); - } - for (var _d = 0, _e = message.filters; _d < _e.length; _d++) { - var v = _e[_d]; - exports.SubscribeRequestFilterAccountsFilter.encode(v, writer.uint32(34).fork()).join(); - } - if (message.nonemptyTxnSignature !== undefined) { - writer.uint32(40).bool(message.nonemptyTxnSignature); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterAccounts(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 2: { - if (tag !== 18) { - break; - } - message.account.push(reader.string()); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.owner.push(reader.string()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.filters.push(exports.SubscribeRequestFilterAccountsFilter.decode(reader, reader.uint32())); - continue; - } - case 5: { - if (tag !== 40) { - break; - } - message.nonemptyTxnSignature = reader.bool(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - account: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.account) ? object.account.map(function (e) { return globalThis.String(e); }) : [], - owner: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.owner) ? object.owner.map(function (e) { return globalThis.String(e); }) : [], - filters: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.filters) - ? object.filters.map(function (e) { return exports.SubscribeRequestFilterAccountsFilter.fromJSON(e); }) - : [], - nonemptyTxnSignature: isSet(object.nonemptyTxnSignature) - ? globalThis.Boolean(object.nonemptyTxnSignature) - : undefined, - }; - }, - toJSON: function (message) { - var _a, _b, _c; - var obj = {}; - if ((_a = message.account) === null || _a === void 0 ? void 0 : _a.length) { - obj.account = message.account; - } - if ((_b = message.owner) === null || _b === void 0 ? void 0 : _b.length) { - obj.owner = message.owner; - } - if ((_c = message.filters) === null || _c === void 0 ? void 0 : _c.length) { - obj.filters = message.filters.map(function (e) { return exports.SubscribeRequestFilterAccountsFilter.toJSON(e); }); - } - if (message.nonemptyTxnSignature !== undefined) { - obj.nonemptyTxnSignature = message.nonemptyTxnSignature; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterAccounts.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseSubscribeRequestFilterAccounts(); - message.account = ((_a = object.account) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; - message.owner = ((_b = object.owner) === null || _b === void 0 ? void 0 : _b.map(function (e) { return e; })) || []; - message.filters = ((_c = object.filters) === null || _c === void 0 ? void 0 : _c.map(function (e) { return exports.SubscribeRequestFilterAccountsFilter.fromPartial(e); })) || []; - message.nonemptyTxnSignature = (_d = object.nonemptyTxnSignature) !== null && _d !== void 0 ? _d : undefined; - return message; - }, -}; -function createBaseSubscribeRequestFilterAccountsFilter() { - return { memcmp: undefined, datasize: undefined, tokenAccountState: undefined, lamports: undefined }; -} -exports.SubscribeRequestFilterAccountsFilter = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.memcmp !== undefined) { - exports.SubscribeRequestFilterAccountsFilterMemcmp.encode(message.memcmp, writer.uint32(10).fork()).join(); - } - if (message.datasize !== undefined) { - writer.uint32(16).uint64(message.datasize); - } - if (message.tokenAccountState !== undefined) { - writer.uint32(24).bool(message.tokenAccountState); - } - if (message.lamports !== undefined) { - exports.SubscribeRequestFilterAccountsFilterLamports.encode(message.lamports, writer.uint32(34).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterAccountsFilter(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.memcmp = exports.SubscribeRequestFilterAccountsFilterMemcmp.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.datasize = reader.uint64().toString(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.tokenAccountState = reader.bool(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.lamports = exports.SubscribeRequestFilterAccountsFilterLamports.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - memcmp: isSet(object.memcmp) ? exports.SubscribeRequestFilterAccountsFilterMemcmp.fromJSON(object.memcmp) : undefined, - datasize: isSet(object.datasize) ? globalThis.String(object.datasize) : undefined, - tokenAccountState: isSet(object.tokenAccountState) ? globalThis.Boolean(object.tokenAccountState) : undefined, - lamports: isSet(object.lamports) - ? exports.SubscribeRequestFilterAccountsFilterLamports.fromJSON(object.lamports) - : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.memcmp !== undefined) { - obj.memcmp = exports.SubscribeRequestFilterAccountsFilterMemcmp.toJSON(message.memcmp); - } - if (message.datasize !== undefined) { - obj.datasize = message.datasize; - } - if (message.tokenAccountState !== undefined) { - obj.tokenAccountState = message.tokenAccountState; - } - if (message.lamports !== undefined) { - obj.lamports = exports.SubscribeRequestFilterAccountsFilterLamports.toJSON(message.lamports); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterAccountsFilter.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseSubscribeRequestFilterAccountsFilter(); - message.memcmp = (object.memcmp !== undefined && object.memcmp !== null) - ? exports.SubscribeRequestFilterAccountsFilterMemcmp.fromPartial(object.memcmp) - : undefined; - message.datasize = (_a = object.datasize) !== null && _a !== void 0 ? _a : undefined; - message.tokenAccountState = (_b = object.tokenAccountState) !== null && _b !== void 0 ? _b : undefined; - message.lamports = (object.lamports !== undefined && object.lamports !== null) - ? exports.SubscribeRequestFilterAccountsFilterLamports.fromPartial(object.lamports) - : undefined; - return message; - }, -}; -function createBaseSubscribeRequestFilterAccountsFilterMemcmp() { - return { offset: "0", bytes: undefined, base58: undefined, base64: undefined }; -} -exports.SubscribeRequestFilterAccountsFilterMemcmp = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.offset !== "0") { - writer.uint32(8).uint64(message.offset); - } - if (message.bytes !== undefined) { - writer.uint32(18).bytes(message.bytes); - } - if (message.base58 !== undefined) { - writer.uint32(26).string(message.base58); - } - if (message.base64 !== undefined) { - writer.uint32(34).string(message.base64); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterAccountsFilterMemcmp(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.offset = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.bytes = reader.bytes(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.base58 = reader.string(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.base64 = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", - bytes: isSet(object.bytes) ? bytesFromBase64(object.bytes) : undefined, - base58: isSet(object.base58) ? globalThis.String(object.base58) : undefined, - base64: isSet(object.base64) ? globalThis.String(object.base64) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.offset !== "0") { - obj.offset = message.offset; - } - if (message.bytes !== undefined) { - obj.bytes = base64FromBytes(message.bytes); - } - if (message.base58 !== undefined) { - obj.base58 = message.base58; - } - if (message.base64 !== undefined) { - obj.base64 = message.base64; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterAccountsFilterMemcmp.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseSubscribeRequestFilterAccountsFilterMemcmp(); - message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; - message.bytes = (_b = object.bytes) !== null && _b !== void 0 ? _b : undefined; - message.base58 = (_c = object.base58) !== null && _c !== void 0 ? _c : undefined; - message.base64 = (_d = object.base64) !== null && _d !== void 0 ? _d : undefined; - return message; - }, -}; -function createBaseSubscribeRequestFilterAccountsFilterLamports() { - return { eq: undefined, ne: undefined, lt: undefined, gt: undefined }; -} -exports.SubscribeRequestFilterAccountsFilterLamports = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.eq !== undefined) { - writer.uint32(8).uint64(message.eq); - } - if (message.ne !== undefined) { - writer.uint32(16).uint64(message.ne); - } - if (message.lt !== undefined) { - writer.uint32(24).uint64(message.lt); - } - if (message.gt !== undefined) { - writer.uint32(32).uint64(message.gt); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterAccountsFilterLamports(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.eq = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.ne = reader.uint64().toString(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.lt = reader.uint64().toString(); - continue; - } - case 4: { - if (tag !== 32) { - break; - } - message.gt = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - eq: isSet(object.eq) ? globalThis.String(object.eq) : undefined, - ne: isSet(object.ne) ? globalThis.String(object.ne) : undefined, - lt: isSet(object.lt) ? globalThis.String(object.lt) : undefined, - gt: isSet(object.gt) ? globalThis.String(object.gt) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.eq !== undefined) { - obj.eq = message.eq; - } - if (message.ne !== undefined) { - obj.ne = message.ne; - } - if (message.lt !== undefined) { - obj.lt = message.lt; - } - if (message.gt !== undefined) { - obj.gt = message.gt; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterAccountsFilterLamports.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseSubscribeRequestFilterAccountsFilterLamports(); - message.eq = (_a = object.eq) !== null && _a !== void 0 ? _a : undefined; - message.ne = (_b = object.ne) !== null && _b !== void 0 ? _b : undefined; - message.lt = (_c = object.lt) !== null && _c !== void 0 ? _c : undefined; - message.gt = (_d = object.gt) !== null && _d !== void 0 ? _d : undefined; - return message; - }, -}; -function createBaseSubscribeRequestFilterSlots() { - return { filterByCommitment: undefined, interslotUpdates: undefined }; -} -exports.SubscribeRequestFilterSlots = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.filterByCommitment !== undefined) { - writer.uint32(8).bool(message.filterByCommitment); - } - if (message.interslotUpdates !== undefined) { - writer.uint32(16).bool(message.interslotUpdates); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterSlots(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.filterByCommitment = reader.bool(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.interslotUpdates = reader.bool(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - filterByCommitment: isSet(object.filterByCommitment) ? globalThis.Boolean(object.filterByCommitment) : undefined, - interslotUpdates: isSet(object.interslotUpdates) ? globalThis.Boolean(object.interslotUpdates) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.filterByCommitment !== undefined) { - obj.filterByCommitment = message.filterByCommitment; - } - if (message.interslotUpdates !== undefined) { - obj.interslotUpdates = message.interslotUpdates; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterSlots.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseSubscribeRequestFilterSlots(); - message.filterByCommitment = (_a = object.filterByCommitment) !== null && _a !== void 0 ? _a : undefined; - message.interslotUpdates = (_b = object.interslotUpdates) !== null && _b !== void 0 ? _b : undefined; - return message; - }, -}; -function createBaseSubscribeRequestFilterTransactions() { - return { - vote: undefined, - failed: undefined, - signature: undefined, - accountInclude: [], - accountExclude: [], - accountRequired: [], - }; -} -exports.SubscribeRequestFilterTransactions = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.vote !== undefined) { - writer.uint32(8).bool(message.vote); - } - if (message.failed !== undefined) { - writer.uint32(16).bool(message.failed); - } - if (message.signature !== undefined) { - writer.uint32(42).string(message.signature); - } - for (var _i = 0, _a = message.accountInclude; _i < _a.length; _i++) { - var v = _a[_i]; - writer.uint32(26).string(v); - } - for (var _b = 0, _c = message.accountExclude; _b < _c.length; _b++) { - var v = _c[_b]; - writer.uint32(34).string(v); - } - for (var _d = 0, _e = message.accountRequired; _d < _e.length; _d++) { - var v = _e[_d]; - writer.uint32(50).string(v); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterTransactions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.vote = reader.bool(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.failed = reader.bool(); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.signature = reader.string(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.accountInclude.push(reader.string()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.accountExclude.push(reader.string()); - continue; - } - case 6: { - if (tag !== 50) { - break; - } - message.accountRequired.push(reader.string()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - vote: isSet(object.vote) ? globalThis.Boolean(object.vote) : undefined, - failed: isSet(object.failed) ? globalThis.Boolean(object.failed) : undefined, - signature: isSet(object.signature) ? globalThis.String(object.signature) : undefined, - accountInclude: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountInclude) - ? object.accountInclude.map(function (e) { return globalThis.String(e); }) - : [], - accountExclude: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountExclude) - ? object.accountExclude.map(function (e) { return globalThis.String(e); }) - : [], - accountRequired: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountRequired) - ? object.accountRequired.map(function (e) { return globalThis.String(e); }) - : [], - }; - }, - toJSON: function (message) { - var _a, _b, _c; - var obj = {}; - if (message.vote !== undefined) { - obj.vote = message.vote; - } - if (message.failed !== undefined) { - obj.failed = message.failed; - } - if (message.signature !== undefined) { - obj.signature = message.signature; - } - if ((_a = message.accountInclude) === null || _a === void 0 ? void 0 : _a.length) { - obj.accountInclude = message.accountInclude; - } - if ((_b = message.accountExclude) === null || _b === void 0 ? void 0 : _b.length) { - obj.accountExclude = message.accountExclude; - } - if ((_c = message.accountRequired) === null || _c === void 0 ? void 0 : _c.length) { - obj.accountRequired = message.accountRequired; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterTransactions.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f; - var message = createBaseSubscribeRequestFilterTransactions(); - message.vote = (_a = object.vote) !== null && _a !== void 0 ? _a : undefined; - message.failed = (_b = object.failed) !== null && _b !== void 0 ? _b : undefined; - message.signature = (_c = object.signature) !== null && _c !== void 0 ? _c : undefined; - message.accountInclude = ((_d = object.accountInclude) === null || _d === void 0 ? void 0 : _d.map(function (e) { return e; })) || []; - message.accountExclude = ((_e = object.accountExclude) === null || _e === void 0 ? void 0 : _e.map(function (e) { return e; })) || []; - message.accountRequired = ((_f = object.accountRequired) === null || _f === void 0 ? void 0 : _f.map(function (e) { return e; })) || []; - return message; - }, -}; -function createBaseSubscribeRequestFilterBlocks() { - return { accountInclude: [], includeTransactions: undefined, includeAccounts: undefined, includeEntries: undefined }; -} -exports.SubscribeRequestFilterBlocks = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - for (var _i = 0, _a = message.accountInclude; _i < _a.length; _i++) { - var v = _a[_i]; - writer.uint32(10).string(v); - } - if (message.includeTransactions !== undefined) { - writer.uint32(16).bool(message.includeTransactions); - } - if (message.includeAccounts !== undefined) { - writer.uint32(24).bool(message.includeAccounts); - } - if (message.includeEntries !== undefined) { - writer.uint32(32).bool(message.includeEntries); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterBlocks(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.accountInclude.push(reader.string()); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.includeTransactions = reader.bool(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.includeAccounts = reader.bool(); - continue; - } - case 4: { - if (tag !== 32) { - break; - } - message.includeEntries = reader.bool(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - accountInclude: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountInclude) - ? object.accountInclude.map(function (e) { return globalThis.String(e); }) - : [], - includeTransactions: isSet(object.includeTransactions) - ? globalThis.Boolean(object.includeTransactions) - : undefined, - includeAccounts: isSet(object.includeAccounts) ? globalThis.Boolean(object.includeAccounts) : undefined, - includeEntries: isSet(object.includeEntries) ? globalThis.Boolean(object.includeEntries) : undefined, - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if ((_a = message.accountInclude) === null || _a === void 0 ? void 0 : _a.length) { - obj.accountInclude = message.accountInclude; - } - if (message.includeTransactions !== undefined) { - obj.includeTransactions = message.includeTransactions; - } - if (message.includeAccounts !== undefined) { - obj.includeAccounts = message.includeAccounts; - } - if (message.includeEntries !== undefined) { - obj.includeEntries = message.includeEntries; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterBlocks.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseSubscribeRequestFilterBlocks(); - message.accountInclude = ((_a = object.accountInclude) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; - message.includeTransactions = (_b = object.includeTransactions) !== null && _b !== void 0 ? _b : undefined; - message.includeAccounts = (_c = object.includeAccounts) !== null && _c !== void 0 ? _c : undefined; - message.includeEntries = (_d = object.includeEntries) !== null && _d !== void 0 ? _d : undefined; - return message; - }, -}; -function createBaseSubscribeRequestFilterBlocksMeta() { - return {}; -} -exports.SubscribeRequestFilterBlocksMeta = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterBlocksMeta(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterBlocksMeta.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseSubscribeRequestFilterBlocksMeta(); - return message; - }, -}; -function createBaseSubscribeRequestFilterEntry() { - return {}; -} -exports.SubscribeRequestFilterEntry = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestFilterEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.SubscribeRequestFilterEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseSubscribeRequestFilterEntry(); - return message; - }, -}; -function createBaseSubscribeRequestAccountsDataSlice() { - return { offset: "0", length: "0" }; -} -exports.SubscribeRequestAccountsDataSlice = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.offset !== "0") { - writer.uint32(8).uint64(message.offset); - } - if (message.length !== "0") { - writer.uint32(16).uint64(message.length); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestAccountsDataSlice(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.offset = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.length = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", - length: isSet(object.length) ? globalThis.String(object.length) : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.offset !== "0") { - obj.offset = message.offset; - } - if (message.length !== "0") { - obj.length = message.length; - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestAccountsDataSlice.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseSubscribeRequestAccountsDataSlice(); - message.offset = (_a = object.offset) !== null && _a !== void 0 ? _a : "0"; - message.length = (_b = object.length) !== null && _b !== void 0 ? _b : "0"; - return message; - }, -}; -function createBaseSubscribeRequestPing() { - return { id: 0 }; -} -exports.SubscribeRequestPing = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.id !== 0) { - writer.uint32(8).int32(message.id); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeRequestPing(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.id = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { id: isSet(object.id) ? globalThis.Number(object.id) : 0 }; - }, - toJSON: function (message) { - var obj = {}; - if (message.id !== 0) { - obj.id = Math.round(message.id); - } - return obj; - }, - create: function (base) { - return exports.SubscribeRequestPing.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeRequestPing(); - message.id = (_a = object.id) !== null && _a !== void 0 ? _a : 0; - return message; - }, -}; -function createBaseSubscribeUpdate() { - return { - filters: [], - account: undefined, - slot: undefined, - transaction: undefined, - transactionStatus: undefined, - block: undefined, - ping: undefined, - pong: undefined, - blockMeta: undefined, - entry: undefined, - createdAt: undefined, - }; -} -exports.SubscribeUpdate = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - for (var _i = 0, _a = message.filters; _i < _a.length; _i++) { - var v = _a[_i]; - writer.uint32(10).string(v); - } - if (message.account !== undefined) { - exports.SubscribeUpdateAccount.encode(message.account, writer.uint32(18).fork()).join(); - } - if (message.slot !== undefined) { - exports.SubscribeUpdateSlot.encode(message.slot, writer.uint32(26).fork()).join(); - } - if (message.transaction !== undefined) { - exports.SubscribeUpdateTransaction.encode(message.transaction, writer.uint32(34).fork()).join(); - } - if (message.transactionStatus !== undefined) { - exports.SubscribeUpdateTransactionStatus.encode(message.transactionStatus, writer.uint32(82).fork()).join(); - } - if (message.block !== undefined) { - exports.SubscribeUpdateBlock.encode(message.block, writer.uint32(42).fork()).join(); - } - if (message.ping !== undefined) { - exports.SubscribeUpdatePing.encode(message.ping, writer.uint32(50).fork()).join(); - } - if (message.pong !== undefined) { - exports.SubscribeUpdatePong.encode(message.pong, writer.uint32(74).fork()).join(); - } - if (message.blockMeta !== undefined) { - exports.SubscribeUpdateBlockMeta.encode(message.blockMeta, writer.uint32(58).fork()).join(); - } - if (message.entry !== undefined) { - exports.SubscribeUpdateEntry.encode(message.entry, writer.uint32(66).fork()).join(); - } - if (message.createdAt !== undefined) { - timestamp_1.Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(90).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdate(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.filters.push(reader.string()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.account = exports.SubscribeUpdateAccount.decode(reader, reader.uint32()); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.slot = exports.SubscribeUpdateSlot.decode(reader, reader.uint32()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.transaction = exports.SubscribeUpdateTransaction.decode(reader, reader.uint32()); - continue; - } - case 10: { - if (tag !== 82) { - break; - } - message.transactionStatus = exports.SubscribeUpdateTransactionStatus.decode(reader, reader.uint32()); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.block = exports.SubscribeUpdateBlock.decode(reader, reader.uint32()); - continue; - } - case 6: { - if (tag !== 50) { - break; - } - message.ping = exports.SubscribeUpdatePing.decode(reader, reader.uint32()); - continue; - } - case 9: { - if (tag !== 74) { - break; - } - message.pong = exports.SubscribeUpdatePong.decode(reader, reader.uint32()); - continue; - } - case 7: { - if (tag !== 58) { - break; - } - message.blockMeta = exports.SubscribeUpdateBlockMeta.decode(reader, reader.uint32()); - continue; - } - case 8: { - if (tag !== 66) { - break; - } - message.entry = exports.SubscribeUpdateEntry.decode(reader, reader.uint32()); - continue; - } - case 11: { - if (tag !== 90) { - break; - } - message.createdAt = fromTimestamp(timestamp_1.Timestamp.decode(reader, reader.uint32())); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - filters: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.filters) ? object.filters.map(function (e) { return globalThis.String(e); }) : [], - account: isSet(object.account) ? exports.SubscribeUpdateAccount.fromJSON(object.account) : undefined, - slot: isSet(object.slot) ? exports.SubscribeUpdateSlot.fromJSON(object.slot) : undefined, - transaction: isSet(object.transaction) ? exports.SubscribeUpdateTransaction.fromJSON(object.transaction) : undefined, - transactionStatus: isSet(object.transactionStatus) - ? exports.SubscribeUpdateTransactionStatus.fromJSON(object.transactionStatus) - : undefined, - block: isSet(object.block) ? exports.SubscribeUpdateBlock.fromJSON(object.block) : undefined, - ping: isSet(object.ping) ? exports.SubscribeUpdatePing.fromJSON(object.ping) : undefined, - pong: isSet(object.pong) ? exports.SubscribeUpdatePong.fromJSON(object.pong) : undefined, - blockMeta: isSet(object.blockMeta) ? exports.SubscribeUpdateBlockMeta.fromJSON(object.blockMeta) : undefined, - entry: isSet(object.entry) ? exports.SubscribeUpdateEntry.fromJSON(object.entry) : undefined, - createdAt: isSet(object.createdAt) ? fromJsonTimestamp(object.createdAt) : undefined, - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if ((_a = message.filters) === null || _a === void 0 ? void 0 : _a.length) { - obj.filters = message.filters; - } - if (message.account !== undefined) { - obj.account = exports.SubscribeUpdateAccount.toJSON(message.account); - } - if (message.slot !== undefined) { - obj.slot = exports.SubscribeUpdateSlot.toJSON(message.slot); - } - if (message.transaction !== undefined) { - obj.transaction = exports.SubscribeUpdateTransaction.toJSON(message.transaction); - } - if (message.transactionStatus !== undefined) { - obj.transactionStatus = exports.SubscribeUpdateTransactionStatus.toJSON(message.transactionStatus); - } - if (message.block !== undefined) { - obj.block = exports.SubscribeUpdateBlock.toJSON(message.block); - } - if (message.ping !== undefined) { - obj.ping = exports.SubscribeUpdatePing.toJSON(message.ping); - } - if (message.pong !== undefined) { - obj.pong = exports.SubscribeUpdatePong.toJSON(message.pong); - } - if (message.blockMeta !== undefined) { - obj.blockMeta = exports.SubscribeUpdateBlockMeta.toJSON(message.blockMeta); - } - if (message.entry !== undefined) { - obj.entry = exports.SubscribeUpdateEntry.toJSON(message.entry); - } - if (message.createdAt !== undefined) { - obj.createdAt = message.createdAt.toISOString(); - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdate.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseSubscribeUpdate(); - message.filters = ((_a = object.filters) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; - message.account = (object.account !== undefined && object.account !== null) - ? exports.SubscribeUpdateAccount.fromPartial(object.account) - : undefined; - message.slot = (object.slot !== undefined && object.slot !== null) - ? exports.SubscribeUpdateSlot.fromPartial(object.slot) - : undefined; - message.transaction = (object.transaction !== undefined && object.transaction !== null) - ? exports.SubscribeUpdateTransaction.fromPartial(object.transaction) - : undefined; - message.transactionStatus = (object.transactionStatus !== undefined && object.transactionStatus !== null) - ? exports.SubscribeUpdateTransactionStatus.fromPartial(object.transactionStatus) - : undefined; - message.block = (object.block !== undefined && object.block !== null) - ? exports.SubscribeUpdateBlock.fromPartial(object.block) - : undefined; - message.ping = (object.ping !== undefined && object.ping !== null) - ? exports.SubscribeUpdatePing.fromPartial(object.ping) - : undefined; - message.pong = (object.pong !== undefined && object.pong !== null) - ? exports.SubscribeUpdatePong.fromPartial(object.pong) - : undefined; - message.blockMeta = (object.blockMeta !== undefined && object.blockMeta !== null) - ? exports.SubscribeUpdateBlockMeta.fromPartial(object.blockMeta) - : undefined; - message.entry = (object.entry !== undefined && object.entry !== null) - ? exports.SubscribeUpdateEntry.fromPartial(object.entry) - : undefined; - message.createdAt = (_b = object.createdAt) !== null && _b !== void 0 ? _b : undefined; - return message; - }, -}; -function createBaseSubscribeUpdateAccount() { - return { account: undefined, slot: "0", isStartup: false }; -} -exports.SubscribeUpdateAccount = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.account !== undefined) { - exports.SubscribeUpdateAccountInfo.encode(message.account, writer.uint32(10).fork()).join(); - } - if (message.slot !== "0") { - writer.uint32(16).uint64(message.slot); - } - if (message.isStartup !== false) { - writer.uint32(24).bool(message.isStartup); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateAccount(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.account = exports.SubscribeUpdateAccountInfo.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.isStartup = reader.bool(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - account: isSet(object.account) ? exports.SubscribeUpdateAccountInfo.fromJSON(object.account) : undefined, - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - isStartup: isSet(object.isStartup) ? globalThis.Boolean(object.isStartup) : false, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.account !== undefined) { - obj.account = exports.SubscribeUpdateAccountInfo.toJSON(message.account); - } - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.isStartup !== false) { - obj.isStartup = message.isStartup; - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateAccount.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseSubscribeUpdateAccount(); - message.account = (object.account !== undefined && object.account !== null) - ? exports.SubscribeUpdateAccountInfo.fromPartial(object.account) - : undefined; - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.isStartup = (_b = object.isStartup) !== null && _b !== void 0 ? _b : false; - return message; - }, -}; -function createBaseSubscribeUpdateAccountInfo() { - return { - pubkey: new Uint8Array(0), - lamports: "0", - owner: new Uint8Array(0), - executable: false, - rentEpoch: "0", - data: new Uint8Array(0), - writeVersion: "0", - txnSignature: undefined, - }; -} -exports.SubscribeUpdateAccountInfo = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.pubkey.length !== 0) { - writer.uint32(10).bytes(message.pubkey); - } - if (message.lamports !== "0") { - writer.uint32(16).uint64(message.lamports); - } - if (message.owner.length !== 0) { - writer.uint32(26).bytes(message.owner); - } - if (message.executable !== false) { - writer.uint32(32).bool(message.executable); - } - if (message.rentEpoch !== "0") { - writer.uint32(40).uint64(message.rentEpoch); - } - if (message.data.length !== 0) { - writer.uint32(50).bytes(message.data); - } - if (message.writeVersion !== "0") { - writer.uint32(56).uint64(message.writeVersion); - } - if (message.txnSignature !== undefined) { - writer.uint32(66).bytes(message.txnSignature); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateAccountInfo(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.pubkey = reader.bytes(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.lamports = reader.uint64().toString(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.owner = reader.bytes(); - continue; - } - case 4: { - if (tag !== 32) { - break; - } - message.executable = reader.bool(); - continue; - } - case 5: { - if (tag !== 40) { - break; - } - message.rentEpoch = reader.uint64().toString(); - continue; - } - case 6: { - if (tag !== 50) { - break; - } - message.data = reader.bytes(); - continue; - } - case 7: { - if (tag !== 56) { - break; - } - message.writeVersion = reader.uint64().toString(); - continue; - } - case 8: { - if (tag !== 66) { - break; - } - message.txnSignature = reader.bytes(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - pubkey: isSet(object.pubkey) ? bytesFromBase64(object.pubkey) : new Uint8Array(0), - lamports: isSet(object.lamports) ? globalThis.String(object.lamports) : "0", - owner: isSet(object.owner) ? bytesFromBase64(object.owner) : new Uint8Array(0), - executable: isSet(object.executable) ? globalThis.Boolean(object.executable) : false, - rentEpoch: isSet(object.rentEpoch) ? globalThis.String(object.rentEpoch) : "0", - data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), - writeVersion: isSet(object.writeVersion) ? globalThis.String(object.writeVersion) : "0", - txnSignature: isSet(object.txnSignature) ? bytesFromBase64(object.txnSignature) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.pubkey.length !== 0) { - obj.pubkey = base64FromBytes(message.pubkey); - } - if (message.lamports !== "0") { - obj.lamports = message.lamports; - } - if (message.owner.length !== 0) { - obj.owner = base64FromBytes(message.owner); - } - if (message.executable !== false) { - obj.executable = message.executable; - } - if (message.rentEpoch !== "0") { - obj.rentEpoch = message.rentEpoch; - } - if (message.data.length !== 0) { - obj.data = base64FromBytes(message.data); - } - if (message.writeVersion !== "0") { - obj.writeVersion = message.writeVersion; - } - if (message.txnSignature !== undefined) { - obj.txnSignature = base64FromBytes(message.txnSignature); - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateAccountInfo.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f, _g, _h; - var message = createBaseSubscribeUpdateAccountInfo(); - message.pubkey = (_a = object.pubkey) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.lamports = (_b = object.lamports) !== null && _b !== void 0 ? _b : "0"; - message.owner = (_c = object.owner) !== null && _c !== void 0 ? _c : new Uint8Array(0); - message.executable = (_d = object.executable) !== null && _d !== void 0 ? _d : false; - message.rentEpoch = (_e = object.rentEpoch) !== null && _e !== void 0 ? _e : "0"; - message.data = (_f = object.data) !== null && _f !== void 0 ? _f : new Uint8Array(0); - message.writeVersion = (_g = object.writeVersion) !== null && _g !== void 0 ? _g : "0"; - message.txnSignature = (_h = object.txnSignature) !== null && _h !== void 0 ? _h : undefined; - return message; - }, -}; -function createBaseSubscribeUpdateSlot() { - return { slot: "0", parent: undefined, status: 0, deadError: undefined }; -} -exports.SubscribeUpdateSlot = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - if (message.parent !== undefined) { - writer.uint32(16).uint64(message.parent); - } - if (message.status !== 0) { - writer.uint32(24).int32(message.status); - } - if (message.deadError !== undefined) { - writer.uint32(34).string(message.deadError); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateSlot(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.parent = reader.uint64().toString(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.status = reader.int32(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.deadError = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - parent: isSet(object.parent) ? globalThis.String(object.parent) : undefined, - status: isSet(object.status) ? slotStatusFromJSON(object.status) : 0, - deadError: isSet(object.deadError) ? globalThis.String(object.deadError) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.parent !== undefined) { - obj.parent = message.parent; - } - if (message.status !== 0) { - obj.status = slotStatusToJSON(message.status); - } - if (message.deadError !== undefined) { - obj.deadError = message.deadError; - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateSlot.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseSubscribeUpdateSlot(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.parent = (_b = object.parent) !== null && _b !== void 0 ? _b : undefined; - message.status = (_c = object.status) !== null && _c !== void 0 ? _c : 0; - message.deadError = (_d = object.deadError) !== null && _d !== void 0 ? _d : undefined; - return message; - }, -}; -function createBaseSubscribeUpdateTransaction() { - return { transaction: undefined, slot: "0" }; -} -exports.SubscribeUpdateTransaction = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.transaction !== undefined) { - exports.SubscribeUpdateTransactionInfo.encode(message.transaction, writer.uint32(10).fork()).join(); - } - if (message.slot !== "0") { - writer.uint32(16).uint64(message.slot); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateTransaction(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.transaction = exports.SubscribeUpdateTransactionInfo.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - transaction: isSet(object.transaction) ? exports.SubscribeUpdateTransactionInfo.fromJSON(object.transaction) : undefined, - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.transaction !== undefined) { - obj.transaction = exports.SubscribeUpdateTransactionInfo.toJSON(message.transaction); - } - if (message.slot !== "0") { - obj.slot = message.slot; - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateTransaction.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeUpdateTransaction(); - message.transaction = (object.transaction !== undefined && object.transaction !== null) - ? exports.SubscribeUpdateTransactionInfo.fromPartial(object.transaction) - : undefined; - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - return message; - }, -}; -function createBaseSubscribeUpdateTransactionInfo() { - return { signature: new Uint8Array(0), isVote: false, transaction: undefined, meta: undefined, index: "0" }; -} -exports.SubscribeUpdateTransactionInfo = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.signature.length !== 0) { - writer.uint32(10).bytes(message.signature); - } - if (message.isVote !== false) { - writer.uint32(16).bool(message.isVote); - } - if (message.transaction !== undefined) { - solana_storage_1.Transaction.encode(message.transaction, writer.uint32(26).fork()).join(); - } - if (message.meta !== undefined) { - solana_storage_1.TransactionStatusMeta.encode(message.meta, writer.uint32(34).fork()).join(); - } - if (message.index !== "0") { - writer.uint32(40).uint64(message.index); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateTransactionInfo(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.signature = reader.bytes(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.isVote = reader.bool(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.transaction = solana_storage_1.Transaction.decode(reader, reader.uint32()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.meta = solana_storage_1.TransactionStatusMeta.decode(reader, reader.uint32()); - continue; - } - case 5: { - if (tag !== 40) { - break; - } - message.index = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(0), - isVote: isSet(object.isVote) ? globalThis.Boolean(object.isVote) : false, - transaction: isSet(object.transaction) ? solana_storage_1.Transaction.fromJSON(object.transaction) : undefined, - meta: isSet(object.meta) ? solana_storage_1.TransactionStatusMeta.fromJSON(object.meta) : undefined, - index: isSet(object.index) ? globalThis.String(object.index) : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.signature.length !== 0) { - obj.signature = base64FromBytes(message.signature); - } - if (message.isVote !== false) { - obj.isVote = message.isVote; - } - if (message.transaction !== undefined) { - obj.transaction = solana_storage_1.Transaction.toJSON(message.transaction); - } - if (message.meta !== undefined) { - obj.meta = solana_storage_1.TransactionStatusMeta.toJSON(message.meta); - } - if (message.index !== "0") { - obj.index = message.index; - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateTransactionInfo.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBaseSubscribeUpdateTransactionInfo(); - message.signature = (_a = object.signature) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.isVote = (_b = object.isVote) !== null && _b !== void 0 ? _b : false; - message.transaction = (object.transaction !== undefined && object.transaction !== null) - ? solana_storage_1.Transaction.fromPartial(object.transaction) - : undefined; - message.meta = (object.meta !== undefined && object.meta !== null) - ? solana_storage_1.TransactionStatusMeta.fromPartial(object.meta) - : undefined; - message.index = (_c = object.index) !== null && _c !== void 0 ? _c : "0"; - return message; - }, -}; -function createBaseSubscribeUpdateTransactionStatus() { - return { slot: "0", signature: new Uint8Array(0), isVote: false, index: "0", err: undefined }; -} -exports.SubscribeUpdateTransactionStatus = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - if (message.signature.length !== 0) { - writer.uint32(18).bytes(message.signature); - } - if (message.isVote !== false) { - writer.uint32(24).bool(message.isVote); - } - if (message.index !== "0") { - writer.uint32(32).uint64(message.index); - } - if (message.err !== undefined) { - solana_storage_1.TransactionError.encode(message.err, writer.uint32(42).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateTransactionStatus(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.signature = reader.bytes(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.isVote = reader.bool(); - continue; - } - case 4: { - if (tag !== 32) { - break; - } - message.index = reader.uint64().toString(); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.err = solana_storage_1.TransactionError.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(0), - isVote: isSet(object.isVote) ? globalThis.Boolean(object.isVote) : false, - index: isSet(object.index) ? globalThis.String(object.index) : "0", - err: isSet(object.err) ? solana_storage_1.TransactionError.fromJSON(object.err) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.signature.length !== 0) { - obj.signature = base64FromBytes(message.signature); - } - if (message.isVote !== false) { - obj.isVote = message.isVote; - } - if (message.index !== "0") { - obj.index = message.index; - } - if (message.err !== undefined) { - obj.err = solana_storage_1.TransactionError.toJSON(message.err); - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateTransactionStatus.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseSubscribeUpdateTransactionStatus(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.signature = (_b = object.signature) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.isVote = (_c = object.isVote) !== null && _c !== void 0 ? _c : false; - message.index = (_d = object.index) !== null && _d !== void 0 ? _d : "0"; - message.err = (object.err !== undefined && object.err !== null) - ? solana_storage_1.TransactionError.fromPartial(object.err) - : undefined; - return message; - }, -}; -function createBaseSubscribeUpdateBlock() { - return { - slot: "0", - blockhash: "", - rewards: undefined, - blockTime: undefined, - blockHeight: undefined, - parentSlot: "0", - parentBlockhash: "", - executedTransactionCount: "0", - transactions: [], - updatedAccountCount: "0", - accounts: [], - entriesCount: "0", - entries: [], - }; -} -exports.SubscribeUpdateBlock = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - if (message.blockhash !== "") { - writer.uint32(18).string(message.blockhash); - } - if (message.rewards !== undefined) { - solana_storage_1.Rewards.encode(message.rewards, writer.uint32(26).fork()).join(); - } - if (message.blockTime !== undefined) { - solana_storage_1.UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).join(); - } - if (message.blockHeight !== undefined) { - solana_storage_1.BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); - } - if (message.parentSlot !== "0") { - writer.uint32(56).uint64(message.parentSlot); - } - if (message.parentBlockhash !== "") { - writer.uint32(66).string(message.parentBlockhash); - } - if (message.executedTransactionCount !== "0") { - writer.uint32(72).uint64(message.executedTransactionCount); - } - for (var _i = 0, _a = message.transactions; _i < _a.length; _i++) { - var v = _a[_i]; - exports.SubscribeUpdateTransactionInfo.encode(v, writer.uint32(50).fork()).join(); - } - if (message.updatedAccountCount !== "0") { - writer.uint32(80).uint64(message.updatedAccountCount); - } - for (var _b = 0, _c = message.accounts; _b < _c.length; _b++) { - var v = _c[_b]; - exports.SubscribeUpdateAccountInfo.encode(v, writer.uint32(90).fork()).join(); - } - if (message.entriesCount !== "0") { - writer.uint32(96).uint64(message.entriesCount); - } - for (var _d = 0, _e = message.entries; _d < _e.length; _d++) { - var v = _e[_d]; - exports.SubscribeUpdateEntry.encode(v, writer.uint32(106).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateBlock(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockhash = reader.string(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.rewards = solana_storage_1.Rewards.decode(reader, reader.uint32()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.blockTime = solana_storage_1.UnixTimestamp.decode(reader, reader.uint32()); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.blockHeight = solana_storage_1.BlockHeight.decode(reader, reader.uint32()); - continue; - } - case 7: { - if (tag !== 56) { - break; - } - message.parentSlot = reader.uint64().toString(); - continue; - } - case 8: { - if (tag !== 66) { - break; - } - message.parentBlockhash = reader.string(); - continue; - } - case 9: { - if (tag !== 72) { - break; - } - message.executedTransactionCount = reader.uint64().toString(); - continue; - } - case 6: { - if (tag !== 50) { - break; - } - message.transactions.push(exports.SubscribeUpdateTransactionInfo.decode(reader, reader.uint32())); - continue; - } - case 10: { - if (tag !== 80) { - break; - } - message.updatedAccountCount = reader.uint64().toString(); - continue; - } - case 11: { - if (tag !== 90) { - break; - } - message.accounts.push(exports.SubscribeUpdateAccountInfo.decode(reader, reader.uint32())); - continue; - } - case 12: { - if (tag !== 96) { - break; - } - message.entriesCount = reader.uint64().toString(); - continue; - } - case 13: { - if (tag !== 106) { - break; - } - message.entries.push(exports.SubscribeUpdateEntry.decode(reader, reader.uint32())); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", - rewards: isSet(object.rewards) ? solana_storage_1.Rewards.fromJSON(object.rewards) : undefined, - blockTime: isSet(object.blockTime) ? solana_storage_1.UnixTimestamp.fromJSON(object.blockTime) : undefined, - blockHeight: isSet(object.blockHeight) ? solana_storage_1.BlockHeight.fromJSON(object.blockHeight) : undefined, - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", - parentBlockhash: isSet(object.parentBlockhash) ? globalThis.String(object.parentBlockhash) : "", - executedTransactionCount: isSet(object.executedTransactionCount) - ? globalThis.String(object.executedTransactionCount) - : "0", - transactions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.transactions) - ? object.transactions.map(function (e) { return exports.SubscribeUpdateTransactionInfo.fromJSON(e); }) - : [], - updatedAccountCount: isSet(object.updatedAccountCount) ? globalThis.String(object.updatedAccountCount) : "0", - accounts: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accounts) - ? object.accounts.map(function (e) { return exports.SubscribeUpdateAccountInfo.fromJSON(e); }) - : [], - entriesCount: isSet(object.entriesCount) ? globalThis.String(object.entriesCount) : "0", - entries: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.entries) - ? object.entries.map(function (e) { return exports.SubscribeUpdateEntry.fromJSON(e); }) - : [], - }; - }, - toJSON: function (message) { - var _a, _b, _c; - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.blockhash !== "") { - obj.blockhash = message.blockhash; - } - if (message.rewards !== undefined) { - obj.rewards = solana_storage_1.Rewards.toJSON(message.rewards); - } - if (message.blockTime !== undefined) { - obj.blockTime = solana_storage_1.UnixTimestamp.toJSON(message.blockTime); - } - if (message.blockHeight !== undefined) { - obj.blockHeight = solana_storage_1.BlockHeight.toJSON(message.blockHeight); - } - if (message.parentSlot !== "0") { - obj.parentSlot = message.parentSlot; - } - if (message.parentBlockhash !== "") { - obj.parentBlockhash = message.parentBlockhash; - } - if (message.executedTransactionCount !== "0") { - obj.executedTransactionCount = message.executedTransactionCount; - } - if ((_a = message.transactions) === null || _a === void 0 ? void 0 : _a.length) { - obj.transactions = message.transactions.map(function (e) { return exports.SubscribeUpdateTransactionInfo.toJSON(e); }); - } - if (message.updatedAccountCount !== "0") { - obj.updatedAccountCount = message.updatedAccountCount; - } - if ((_b = message.accounts) === null || _b === void 0 ? void 0 : _b.length) { - obj.accounts = message.accounts.map(function (e) { return exports.SubscribeUpdateAccountInfo.toJSON(e); }); - } - if (message.entriesCount !== "0") { - obj.entriesCount = message.entriesCount; - } - if ((_c = message.entries) === null || _c === void 0 ? void 0 : _c.length) { - obj.entries = message.entries.map(function (e) { return exports.SubscribeUpdateEntry.toJSON(e); }); - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateBlock.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k; - var message = createBaseSubscribeUpdateBlock(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; - message.rewards = (object.rewards !== undefined && object.rewards !== null) - ? solana_storage_1.Rewards.fromPartial(object.rewards) - : undefined; - message.blockTime = (object.blockTime !== undefined && object.blockTime !== null) - ? solana_storage_1.UnixTimestamp.fromPartial(object.blockTime) - : undefined; - message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) - ? solana_storage_1.BlockHeight.fromPartial(object.blockHeight) - : undefined; - message.parentSlot = (_c = object.parentSlot) !== null && _c !== void 0 ? _c : "0"; - message.parentBlockhash = (_d = object.parentBlockhash) !== null && _d !== void 0 ? _d : ""; - message.executedTransactionCount = (_e = object.executedTransactionCount) !== null && _e !== void 0 ? _e : "0"; - message.transactions = ((_f = object.transactions) === null || _f === void 0 ? void 0 : _f.map(function (e) { return exports.SubscribeUpdateTransactionInfo.fromPartial(e); })) || []; - message.updatedAccountCount = (_g = object.updatedAccountCount) !== null && _g !== void 0 ? _g : "0"; - message.accounts = ((_h = object.accounts) === null || _h === void 0 ? void 0 : _h.map(function (e) { return exports.SubscribeUpdateAccountInfo.fromPartial(e); })) || []; - message.entriesCount = (_j = object.entriesCount) !== null && _j !== void 0 ? _j : "0"; - message.entries = ((_k = object.entries) === null || _k === void 0 ? void 0 : _k.map(function (e) { return exports.SubscribeUpdateEntry.fromPartial(e); })) || []; - return message; - }, -}; -function createBaseSubscribeUpdateBlockMeta() { - return { - slot: "0", - blockhash: "", - rewards: undefined, - blockTime: undefined, - blockHeight: undefined, - parentSlot: "0", - parentBlockhash: "", - executedTransactionCount: "0", - entriesCount: "0", - }; -} -exports.SubscribeUpdateBlockMeta = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - if (message.blockhash !== "") { - writer.uint32(18).string(message.blockhash); - } - if (message.rewards !== undefined) { - solana_storage_1.Rewards.encode(message.rewards, writer.uint32(26).fork()).join(); - } - if (message.blockTime !== undefined) { - solana_storage_1.UnixTimestamp.encode(message.blockTime, writer.uint32(34).fork()).join(); - } - if (message.blockHeight !== undefined) { - solana_storage_1.BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); - } - if (message.parentSlot !== "0") { - writer.uint32(48).uint64(message.parentSlot); - } - if (message.parentBlockhash !== "") { - writer.uint32(58).string(message.parentBlockhash); - } - if (message.executedTransactionCount !== "0") { - writer.uint32(64).uint64(message.executedTransactionCount); - } - if (message.entriesCount !== "0") { - writer.uint32(72).uint64(message.entriesCount); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateBlockMeta(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockhash = reader.string(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.rewards = solana_storage_1.Rewards.decode(reader, reader.uint32()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.blockTime = solana_storage_1.UnixTimestamp.decode(reader, reader.uint32()); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.blockHeight = solana_storage_1.BlockHeight.decode(reader, reader.uint32()); - continue; - } - case 6: { - if (tag !== 48) { - break; - } - message.parentSlot = reader.uint64().toString(); - continue; - } - case 7: { - if (tag !== 58) { - break; - } - message.parentBlockhash = reader.string(); - continue; - } - case 8: { - if (tag !== 64) { - break; - } - message.executedTransactionCount = reader.uint64().toString(); - continue; - } - case 9: { - if (tag !== 72) { - break; - } - message.entriesCount = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", - rewards: isSet(object.rewards) ? solana_storage_1.Rewards.fromJSON(object.rewards) : undefined, - blockTime: isSet(object.blockTime) ? solana_storage_1.UnixTimestamp.fromJSON(object.blockTime) : undefined, - blockHeight: isSet(object.blockHeight) ? solana_storage_1.BlockHeight.fromJSON(object.blockHeight) : undefined, - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", - parentBlockhash: isSet(object.parentBlockhash) ? globalThis.String(object.parentBlockhash) : "", - executedTransactionCount: isSet(object.executedTransactionCount) - ? globalThis.String(object.executedTransactionCount) - : "0", - entriesCount: isSet(object.entriesCount) ? globalThis.String(object.entriesCount) : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.blockhash !== "") { - obj.blockhash = message.blockhash; - } - if (message.rewards !== undefined) { - obj.rewards = solana_storage_1.Rewards.toJSON(message.rewards); - } - if (message.blockTime !== undefined) { - obj.blockTime = solana_storage_1.UnixTimestamp.toJSON(message.blockTime); - } - if (message.blockHeight !== undefined) { - obj.blockHeight = solana_storage_1.BlockHeight.toJSON(message.blockHeight); - } - if (message.parentSlot !== "0") { - obj.parentSlot = message.parentSlot; - } - if (message.parentBlockhash !== "") { - obj.parentBlockhash = message.parentBlockhash; - } - if (message.executedTransactionCount !== "0") { - obj.executedTransactionCount = message.executedTransactionCount; - } - if (message.entriesCount !== "0") { - obj.entriesCount = message.entriesCount; - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateBlockMeta.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f; - var message = createBaseSubscribeUpdateBlockMeta(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; - message.rewards = (object.rewards !== undefined && object.rewards !== null) - ? solana_storage_1.Rewards.fromPartial(object.rewards) - : undefined; - message.blockTime = (object.blockTime !== undefined && object.blockTime !== null) - ? solana_storage_1.UnixTimestamp.fromPartial(object.blockTime) - : undefined; - message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) - ? solana_storage_1.BlockHeight.fromPartial(object.blockHeight) - : undefined; - message.parentSlot = (_c = object.parentSlot) !== null && _c !== void 0 ? _c : "0"; - message.parentBlockhash = (_d = object.parentBlockhash) !== null && _d !== void 0 ? _d : ""; - message.executedTransactionCount = (_e = object.executedTransactionCount) !== null && _e !== void 0 ? _e : "0"; - message.entriesCount = (_f = object.entriesCount) !== null && _f !== void 0 ? _f : "0"; - return message; - }, -}; -function createBaseSubscribeUpdateEntry() { - return { - slot: "0", - index: "0", - numHashes: "0", - hash: new Uint8Array(0), - executedTransactionCount: "0", - startingTransactionIndex: "0", - }; -} -exports.SubscribeUpdateEntry = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - if (message.index !== "0") { - writer.uint32(16).uint64(message.index); - } - if (message.numHashes !== "0") { - writer.uint32(24).uint64(message.numHashes); - } - if (message.hash.length !== 0) { - writer.uint32(34).bytes(message.hash); - } - if (message.executedTransactionCount !== "0") { - writer.uint32(40).uint64(message.executedTransactionCount); - } - if (message.startingTransactionIndex !== "0") { - writer.uint32(48).uint64(message.startingTransactionIndex); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdateEntry(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.index = reader.uint64().toString(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.numHashes = reader.uint64().toString(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.hash = reader.bytes(); - continue; - } - case 5: { - if (tag !== 40) { - break; - } - message.executedTransactionCount = reader.uint64().toString(); - continue; - } - case 6: { - if (tag !== 48) { - break; - } - message.startingTransactionIndex = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - index: isSet(object.index) ? globalThis.String(object.index) : "0", - numHashes: isSet(object.numHashes) ? globalThis.String(object.numHashes) : "0", - hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(0), - executedTransactionCount: isSet(object.executedTransactionCount) - ? globalThis.String(object.executedTransactionCount) - : "0", - startingTransactionIndex: isSet(object.startingTransactionIndex) - ? globalThis.String(object.startingTransactionIndex) - : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.index !== "0") { - obj.index = message.index; - } - if (message.numHashes !== "0") { - obj.numHashes = message.numHashes; - } - if (message.hash.length !== 0) { - obj.hash = base64FromBytes(message.hash); - } - if (message.executedTransactionCount !== "0") { - obj.executedTransactionCount = message.executedTransactionCount; - } - if (message.startingTransactionIndex !== "0") { - obj.startingTransactionIndex = message.startingTransactionIndex; - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdateEntry.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f; - var message = createBaseSubscribeUpdateEntry(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.index = (_b = object.index) !== null && _b !== void 0 ? _b : "0"; - message.numHashes = (_c = object.numHashes) !== null && _c !== void 0 ? _c : "0"; - message.hash = (_d = object.hash) !== null && _d !== void 0 ? _d : new Uint8Array(0); - message.executedTransactionCount = (_e = object.executedTransactionCount) !== null && _e !== void 0 ? _e : "0"; - message.startingTransactionIndex = (_f = object.startingTransactionIndex) !== null && _f !== void 0 ? _f : "0"; - return message; - }, -}; -function createBaseSubscribeUpdatePing() { - return {}; -} -exports.SubscribeUpdatePing = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdatePing(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.SubscribeUpdatePing.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseSubscribeUpdatePing(); - return message; - }, -}; -function createBaseSubscribeUpdatePong() { - return { id: 0 }; -} -exports.SubscribeUpdatePong = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.id !== 0) { - writer.uint32(8).int32(message.id); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeUpdatePong(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.id = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { id: isSet(object.id) ? globalThis.Number(object.id) : 0 }; - }, - toJSON: function (message) { - var obj = {}; - if (message.id !== 0) { - obj.id = Math.round(message.id); - } - return obj; - }, - create: function (base) { - return exports.SubscribeUpdatePong.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeUpdatePong(); - message.id = (_a = object.id) !== null && _a !== void 0 ? _a : 0; - return message; - }, -}; -function createBaseSubscribeReplayInfoRequest() { - return {}; -} -exports.SubscribeReplayInfoRequest = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeReplayInfoRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.SubscribeReplayInfoRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseSubscribeReplayInfoRequest(); - return message; - }, -}; -function createBaseSubscribeReplayInfoResponse() { - return { firstAvailable: undefined }; -} -exports.SubscribeReplayInfoResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.firstAvailable !== undefined) { - writer.uint32(8).uint64(message.firstAvailable); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseSubscribeReplayInfoResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.firstAvailable = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { firstAvailable: isSet(object.firstAvailable) ? globalThis.String(object.firstAvailable) : undefined }; - }, - toJSON: function (message) { - var obj = {}; - if (message.firstAvailable !== undefined) { - obj.firstAvailable = message.firstAvailable; - } - return obj; - }, - create: function (base) { - return exports.SubscribeReplayInfoResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseSubscribeReplayInfoResponse(); - message.firstAvailable = (_a = object.firstAvailable) !== null && _a !== void 0 ? _a : undefined; - return message; - }, -}; -function createBasePingRequest() { - return { count: 0 }; -} -exports.PingRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.count !== 0) { - writer.uint32(8).int32(message.count); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBasePingRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.count = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { count: isSet(object.count) ? globalThis.Number(object.count) : 0 }; - }, - toJSON: function (message) { - var obj = {}; - if (message.count !== 0) { - obj.count = Math.round(message.count); - } - return obj; - }, - create: function (base) { - return exports.PingRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBasePingRequest(); - message.count = (_a = object.count) !== null && _a !== void 0 ? _a : 0; - return message; - }, -}; -function createBasePongResponse() { - return { count: 0 }; -} -exports.PongResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.count !== 0) { - writer.uint32(8).int32(message.count); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBasePongResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.count = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { count: isSet(object.count) ? globalThis.Number(object.count) : 0 }; - }, - toJSON: function (message) { - var obj = {}; - if (message.count !== 0) { - obj.count = Math.round(message.count); - } - return obj; - }, - create: function (base) { - return exports.PongResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBasePongResponse(); - message.count = (_a = object.count) !== null && _a !== void 0 ? _a : 0; - return message; - }, -}; -function createBaseGetLatestBlockhashRequest() { - return { commitment: undefined }; -} -exports.GetLatestBlockhashRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.commitment !== undefined) { - writer.uint32(8).int32(message.commitment); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetLatestBlockhashRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.commitment = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined }; - }, - toJSON: function (message) { - var obj = {}; - if (message.commitment !== undefined) { - obj.commitment = commitmentLevelToJSON(message.commitment); - } - return obj; - }, - create: function (base) { - return exports.GetLatestBlockhashRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetLatestBlockhashRequest(); - message.commitment = (_a = object.commitment) !== null && _a !== void 0 ? _a : undefined; - return message; - }, -}; -function createBaseGetLatestBlockhashResponse() { - return { slot: "0", blockhash: "", lastValidBlockHeight: "0" }; -} -exports.GetLatestBlockhashResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - if (message.blockhash !== "") { - writer.uint32(18).string(message.blockhash); - } - if (message.lastValidBlockHeight !== "0") { - writer.uint32(24).uint64(message.lastValidBlockHeight); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetLatestBlockhashResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockhash = reader.string(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.lastValidBlockHeight = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", - lastValidBlockHeight: isSet(object.lastValidBlockHeight) ? globalThis.String(object.lastValidBlockHeight) : "0", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.blockhash !== "") { - obj.blockhash = message.blockhash; - } - if (message.lastValidBlockHeight !== "0") { - obj.lastValidBlockHeight = message.lastValidBlockHeight; - } - return obj; - }, - create: function (base) { - return exports.GetLatestBlockhashResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBaseGetLatestBlockhashResponse(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; - message.lastValidBlockHeight = (_c = object.lastValidBlockHeight) !== null && _c !== void 0 ? _c : "0"; - return message; - }, -}; -function createBaseGetBlockHeightRequest() { - return { commitment: undefined }; -} -exports.GetBlockHeightRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.commitment !== undefined) { - writer.uint32(8).int32(message.commitment); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetBlockHeightRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.commitment = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined }; - }, - toJSON: function (message) { - var obj = {}; - if (message.commitment !== undefined) { - obj.commitment = commitmentLevelToJSON(message.commitment); - } - return obj; - }, - create: function (base) { - return exports.GetBlockHeightRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetBlockHeightRequest(); - message.commitment = (_a = object.commitment) !== null && _a !== void 0 ? _a : undefined; - return message; - }, -}; -function createBaseGetBlockHeightResponse() { - return { blockHeight: "0" }; -} -exports.GetBlockHeightResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockHeight !== "0") { - writer.uint32(8).uint64(message.blockHeight); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetBlockHeightResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.blockHeight = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { blockHeight: isSet(object.blockHeight) ? globalThis.String(object.blockHeight) : "0" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockHeight !== "0") { - obj.blockHeight = message.blockHeight; - } - return obj; - }, - create: function (base) { - return exports.GetBlockHeightResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetBlockHeightResponse(); - message.blockHeight = (_a = object.blockHeight) !== null && _a !== void 0 ? _a : "0"; - return message; - }, -}; -function createBaseGetSlotRequest() { - return { commitment: undefined }; -} -exports.GetSlotRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.commitment !== undefined) { - writer.uint32(8).int32(message.commitment); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetSlotRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.commitment = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined }; - }, - toJSON: function (message) { - var obj = {}; - if (message.commitment !== undefined) { - obj.commitment = commitmentLevelToJSON(message.commitment); - } - return obj; - }, - create: function (base) { - return exports.GetSlotRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetSlotRequest(); - message.commitment = (_a = object.commitment) !== null && _a !== void 0 ? _a : undefined; - return message; - }, -}; -function createBaseGetSlotResponse() { - return { slot: "0" }; -} -exports.GetSlotResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetSlotResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { slot: isSet(object.slot) ? globalThis.String(object.slot) : "0" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - return obj; - }, - create: function (base) { - return exports.GetSlotResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetSlotResponse(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - return message; - }, -}; -function createBaseGetVersionRequest() { - return {}; -} -exports.GetVersionRequest = { - encode: function (_, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetVersionRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (_) { - return {}; - }, - toJSON: function (_) { - var obj = {}; - return obj; - }, - create: function (base) { - return exports.GetVersionRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (_) { - var message = createBaseGetVersionRequest(); - return message; - }, -}; -function createBaseGetVersionResponse() { - return { version: "" }; -} -exports.GetVersionResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.version !== "") { - writer.uint32(10).string(message.version); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseGetVersionResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.version = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { version: isSet(object.version) ? globalThis.String(object.version) : "" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.version !== "") { - obj.version = message.version; - } - return obj; - }, - create: function (base) { - return exports.GetVersionResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseGetVersionResponse(); - message.version = (_a = object.version) !== null && _a !== void 0 ? _a : ""; - return message; - }, -}; -function createBaseIsBlockhashValidRequest() { - return { blockhash: "", commitment: undefined }; -} -exports.IsBlockhashValidRequest = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockhash !== "") { - writer.uint32(10).string(message.blockhash); - } - if (message.commitment !== undefined) { - writer.uint32(16).int32(message.commitment); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseIsBlockhashValidRequest(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.blockhash = reader.string(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.commitment = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", - commitment: isSet(object.commitment) ? commitmentLevelFromJSON(object.commitment) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockhash !== "") { - obj.blockhash = message.blockhash; - } - if (message.commitment !== undefined) { - obj.commitment = commitmentLevelToJSON(message.commitment); - } - return obj; - }, - create: function (base) { - return exports.IsBlockhashValidRequest.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseIsBlockhashValidRequest(); - message.blockhash = (_a = object.blockhash) !== null && _a !== void 0 ? _a : ""; - message.commitment = (_b = object.commitment) !== null && _b !== void 0 ? _b : undefined; - return message; - }, -}; -function createBaseIsBlockhashValidResponse() { - return { slot: "0", valid: false }; -} -exports.IsBlockhashValidResponse = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.slot !== "0") { - writer.uint32(8).uint64(message.slot); - } - if (message.valid !== false) { - writer.uint32(16).bool(message.valid); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseIsBlockhashValidResponse(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.slot = reader.uint64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.valid = reader.bool(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - valid: isSet(object.valid) ? globalThis.Boolean(object.valid) : false, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.slot !== "0") { - obj.slot = message.slot; - } - if (message.valid !== false) { - obj.valid = message.valid; - } - return obj; - }, - create: function (base) { - return exports.IsBlockhashValidResponse.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseIsBlockhashValidResponse(); - message.slot = (_a = object.slot) !== null && _a !== void 0 ? _a : "0"; - message.valid = (_b = object.valid) !== null && _b !== void 0 ? _b : false; - return message; - }, -}; -exports.GeyserService = { - subscribe: { - path: "/geyser.Geyser/Subscribe", - requestStream: true, - responseStream: true, - requestSerialize: function (value) { return Buffer.from(exports.SubscribeRequest.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.SubscribeRequest.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.SubscribeUpdate.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.SubscribeUpdate.decode(value); }, - }, - subscribeReplayInfo: { - path: "/geyser.Geyser/SubscribeReplayInfo", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.SubscribeReplayInfoRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.SubscribeReplayInfoRequest.decode(value); }, - responseSerialize: function (value) { - return Buffer.from(exports.SubscribeReplayInfoResponse.encode(value).finish()); - }, - responseDeserialize: function (value) { return exports.SubscribeReplayInfoResponse.decode(value); }, - }, - ping: { - path: "/geyser.Geyser/Ping", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { return Buffer.from(exports.PingRequest.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.PingRequest.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.PongResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.PongResponse.decode(value); }, - }, - getLatestBlockhash: { - path: "/geyser.Geyser/GetLatestBlockhash", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.GetLatestBlockhashRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.GetLatestBlockhashRequest.decode(value); }, - responseSerialize: function (value) { - return Buffer.from(exports.GetLatestBlockhashResponse.encode(value).finish()); - }, - responseDeserialize: function (value) { return exports.GetLatestBlockhashResponse.decode(value); }, - }, - getBlockHeight: { - path: "/geyser.Geyser/GetBlockHeight", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.GetBlockHeightRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.GetBlockHeightRequest.decode(value); }, - responseSerialize: function (value) { - return Buffer.from(exports.GetBlockHeightResponse.encode(value).finish()); - }, - responseDeserialize: function (value) { return exports.GetBlockHeightResponse.decode(value); }, - }, - getSlot: { - path: "/geyser.Geyser/GetSlot", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { return Buffer.from(exports.GetSlotRequest.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.GetSlotRequest.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.GetSlotResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.GetSlotResponse.decode(value); }, - }, - isBlockhashValid: { - path: "/geyser.Geyser/IsBlockhashValid", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { - return Buffer.from(exports.IsBlockhashValidRequest.encode(value).finish()); - }, - requestDeserialize: function (value) { return exports.IsBlockhashValidRequest.decode(value); }, - responseSerialize: function (value) { - return Buffer.from(exports.IsBlockhashValidResponse.encode(value).finish()); - }, - responseDeserialize: function (value) { return exports.IsBlockhashValidResponse.decode(value); }, - }, - getVersion: { - path: "/geyser.Geyser/GetVersion", - requestStream: false, - responseStream: false, - requestSerialize: function (value) { return Buffer.from(exports.GetVersionRequest.encode(value).finish()); }, - requestDeserialize: function (value) { return exports.GetVersionRequest.decode(value); }, - responseSerialize: function (value) { return Buffer.from(exports.GetVersionResponse.encode(value).finish()); }, - responseDeserialize: function (value) { return exports.GetVersionResponse.decode(value); }, - }, -}; -exports.GeyserClient = (0, grpc_js_1.makeGenericClientConstructor)(exports.GeyserService, "geyser.Geyser"); -function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); - } - else { - var bin = globalThis.atob(b64); - var arr = new Uint8Array(bin.length); - for (var i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } -} -function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); - } - else { - var bin_1 = []; - arr.forEach(function (byte) { - bin_1.push(globalThis.String.fromCharCode(byte)); - }); - return globalThis.btoa(bin_1.join("")); - } -} -function toTimestamp(date) { - var seconds = Math.trunc(date.getTime() / 1000).toString(); - var nanos = (date.getTime() % 1000) * 1000000; - return { seconds: seconds, nanos: nanos }; -} -function fromTimestamp(t) { - var millis = (globalThis.Number(t.seconds) || 0) * 1000; - millis += (t.nanos || 0) / 1000000; - return new globalThis.Date(millis); -} -function fromJsonTimestamp(o) { - if (o instanceof globalThis.Date) { - return o; - } - else if (typeof o === "string") { - return new globalThis.Date(o); - } - else { - return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); - } -} -function isObject(value) { - return typeof value === "object" && value !== null; -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/typescript-sdk/src/grpc/google/protobuf/timestamp.js b/typescript-sdk/src/grpc/google/protobuf/timestamp.js deleted file mode 100644 index 88c6225..0000000 --- a/typescript-sdk/src/grpc/google/protobuf/timestamp.js +++ /dev/null @@ -1,84 +0,0 @@ -"use strict"; -// Code generated by protoc-gen-ts_proto. DO NOT EDIT. -// versions: -// protoc-gen-ts_proto v2.7.7 -// protoc v3.12.4 -// source: google/protobuf/timestamp.proto -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Timestamp = exports.protobufPackage = void 0; -/* eslint-disable */ -var wire_1 = require("@bufbuild/protobuf/wire"); -exports.protobufPackage = "google.protobuf"; -function createBaseTimestamp() { - return { seconds: "0", nanos: 0 }; -} -exports.Timestamp = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.seconds !== "0") { - writer.uint32(8).int64(message.seconds); - } - if (message.nanos !== 0) { - writer.uint32(16).int32(message.nanos); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseTimestamp(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.seconds = reader.int64().toString(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.nanos = reader.int32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0", - nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.seconds !== "0") { - obj.seconds = message.seconds; - } - if (message.nanos !== 0) { - obj.nanos = Math.round(message.nanos); - } - return obj; - }, - create: function (base) { - return exports.Timestamp.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseTimestamp(); - message.seconds = (_a = object.seconds) !== null && _a !== void 0 ? _a : "0"; - message.nanos = (_b = object.nanos) !== null && _b !== void 0 ? _b : 0; - return message; - }, -}; -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/typescript-sdk/src/grpc/solana-storage.js b/typescript-sdk/src/grpc/solana-storage.js deleted file mode 100644 index 416d842..0000000 --- a/typescript-sdk/src/grpc/solana-storage.js +++ /dev/null @@ -1,2055 +0,0 @@ -"use strict"; -// Code generated by protoc-gen-ts_proto. DO NOT EDIT. -// versions: -// protoc-gen-ts_proto v2.7.7 -// protoc v3.12.4 -// source: solana-storage.proto -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NumPartitions = exports.BlockHeight = exports.UnixTimestamp = exports.Rewards = exports.Reward = exports.ReturnData = exports.UiTokenAmount = exports.TokenBalance = exports.CompiledInstruction = exports.InnerInstruction = exports.InnerInstructions = exports.TransactionError = exports.TransactionStatusMeta = exports.MessageAddressTableLookup = exports.MessageHeader = exports.Message = exports.Transaction = exports.ConfirmedTransaction = exports.ConfirmedBlock = exports.RewardType = exports.protobufPackage = void 0; -exports.rewardTypeFromJSON = rewardTypeFromJSON; -exports.rewardTypeToJSON = rewardTypeToJSON; -/* eslint-disable */ -var wire_1 = require("@bufbuild/protobuf/wire"); -exports.protobufPackage = "solana.storage.ConfirmedBlock"; -var RewardType; -(function (RewardType) { - RewardType[RewardType["Unspecified"] = 0] = "Unspecified"; - RewardType[RewardType["Fee"] = 1] = "Fee"; - RewardType[RewardType["Rent"] = 2] = "Rent"; - RewardType[RewardType["Staking"] = 3] = "Staking"; - RewardType[RewardType["Voting"] = 4] = "Voting"; - RewardType[RewardType["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; -})(RewardType || (exports.RewardType = RewardType = {})); -function rewardTypeFromJSON(object) { - switch (object) { - case 0: - case "Unspecified": - return RewardType.Unspecified; - case 1: - case "Fee": - return RewardType.Fee; - case 2: - case "Rent": - return RewardType.Rent; - case 3: - case "Staking": - return RewardType.Staking; - case 4: - case "Voting": - return RewardType.Voting; - case -1: - case "UNRECOGNIZED": - default: - return RewardType.UNRECOGNIZED; - } -} -function rewardTypeToJSON(object) { - switch (object) { - case RewardType.Unspecified: - return "Unspecified"; - case RewardType.Fee: - return "Fee"; - case RewardType.Rent: - return "Rent"; - case RewardType.Staking: - return "Staking"; - case RewardType.Voting: - return "Voting"; - case RewardType.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } -} -function createBaseConfirmedBlock() { - return { - previousBlockhash: "", - blockhash: "", - parentSlot: "0", - transactions: [], - rewards: [], - blockTime: undefined, - blockHeight: undefined, - numPartitions: undefined, - }; -} -exports.ConfirmedBlock = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.previousBlockhash !== "") { - writer.uint32(10).string(message.previousBlockhash); - } - if (message.blockhash !== "") { - writer.uint32(18).string(message.blockhash); - } - if (message.parentSlot !== "0") { - writer.uint32(24).uint64(message.parentSlot); - } - for (var _i = 0, _a = message.transactions; _i < _a.length; _i++) { - var v = _a[_i]; - exports.ConfirmedTransaction.encode(v, writer.uint32(34).fork()).join(); - } - for (var _b = 0, _c = message.rewards; _b < _c.length; _b++) { - var v = _c[_b]; - exports.Reward.encode(v, writer.uint32(42).fork()).join(); - } - if (message.blockTime !== undefined) { - exports.UnixTimestamp.encode(message.blockTime, writer.uint32(50).fork()).join(); - } - if (message.blockHeight !== undefined) { - exports.BlockHeight.encode(message.blockHeight, writer.uint32(58).fork()).join(); - } - if (message.numPartitions !== undefined) { - exports.NumPartitions.encode(message.numPartitions, writer.uint32(66).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseConfirmedBlock(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.previousBlockhash = reader.string(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.blockhash = reader.string(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.parentSlot = reader.uint64().toString(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.transactions.push(exports.ConfirmedTransaction.decode(reader, reader.uint32())); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.rewards.push(exports.Reward.decode(reader, reader.uint32())); - continue; - } - case 6: { - if (tag !== 50) { - break; - } - message.blockTime = exports.UnixTimestamp.decode(reader, reader.uint32()); - continue; - } - case 7: { - if (tag !== 58) { - break; - } - message.blockHeight = exports.BlockHeight.decode(reader, reader.uint32()); - continue; - } - case 8: { - if (tag !== 66) { - break; - } - message.numPartitions = exports.NumPartitions.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - previousBlockhash: isSet(object.previousBlockhash) ? globalThis.String(object.previousBlockhash) : "", - blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", - transactions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.transactions) - ? object.transactions.map(function (e) { return exports.ConfirmedTransaction.fromJSON(e); }) - : [], - rewards: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.rewards) ? object.rewards.map(function (e) { return exports.Reward.fromJSON(e); }) : [], - blockTime: isSet(object.blockTime) ? exports.UnixTimestamp.fromJSON(object.blockTime) : undefined, - blockHeight: isSet(object.blockHeight) ? exports.BlockHeight.fromJSON(object.blockHeight) : undefined, - numPartitions: isSet(object.numPartitions) ? exports.NumPartitions.fromJSON(object.numPartitions) : undefined, - }; - }, - toJSON: function (message) { - var _a, _b; - var obj = {}; - if (message.previousBlockhash !== "") { - obj.previousBlockhash = message.previousBlockhash; - } - if (message.blockhash !== "") { - obj.blockhash = message.blockhash; - } - if (message.parentSlot !== "0") { - obj.parentSlot = message.parentSlot; - } - if ((_a = message.transactions) === null || _a === void 0 ? void 0 : _a.length) { - obj.transactions = message.transactions.map(function (e) { return exports.ConfirmedTransaction.toJSON(e); }); - } - if ((_b = message.rewards) === null || _b === void 0 ? void 0 : _b.length) { - obj.rewards = message.rewards.map(function (e) { return exports.Reward.toJSON(e); }); - } - if (message.blockTime !== undefined) { - obj.blockTime = exports.UnixTimestamp.toJSON(message.blockTime); - } - if (message.blockHeight !== undefined) { - obj.blockHeight = exports.BlockHeight.toJSON(message.blockHeight); - } - if (message.numPartitions !== undefined) { - obj.numPartitions = exports.NumPartitions.toJSON(message.numPartitions); - } - return obj; - }, - create: function (base) { - return exports.ConfirmedBlock.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e; - var message = createBaseConfirmedBlock(); - message.previousBlockhash = (_a = object.previousBlockhash) !== null && _a !== void 0 ? _a : ""; - message.blockhash = (_b = object.blockhash) !== null && _b !== void 0 ? _b : ""; - message.parentSlot = (_c = object.parentSlot) !== null && _c !== void 0 ? _c : "0"; - message.transactions = ((_d = object.transactions) === null || _d === void 0 ? void 0 : _d.map(function (e) { return exports.ConfirmedTransaction.fromPartial(e); })) || []; - message.rewards = ((_e = object.rewards) === null || _e === void 0 ? void 0 : _e.map(function (e) { return exports.Reward.fromPartial(e); })) || []; - message.blockTime = (object.blockTime !== undefined && object.blockTime !== null) - ? exports.UnixTimestamp.fromPartial(object.blockTime) - : undefined; - message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) - ? exports.BlockHeight.fromPartial(object.blockHeight) - : undefined; - message.numPartitions = (object.numPartitions !== undefined && object.numPartitions !== null) - ? exports.NumPartitions.fromPartial(object.numPartitions) - : undefined; - return message; - }, -}; -function createBaseConfirmedTransaction() { - return { transaction: undefined, meta: undefined }; -} -exports.ConfirmedTransaction = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.transaction !== undefined) { - exports.Transaction.encode(message.transaction, writer.uint32(10).fork()).join(); - } - if (message.meta !== undefined) { - exports.TransactionStatusMeta.encode(message.meta, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseConfirmedTransaction(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.transaction = exports.Transaction.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.meta = exports.TransactionStatusMeta.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - transaction: isSet(object.transaction) ? exports.Transaction.fromJSON(object.transaction) : undefined, - meta: isSet(object.meta) ? exports.TransactionStatusMeta.fromJSON(object.meta) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.transaction !== undefined) { - obj.transaction = exports.Transaction.toJSON(message.transaction); - } - if (message.meta !== undefined) { - obj.meta = exports.TransactionStatusMeta.toJSON(message.meta); - } - return obj; - }, - create: function (base) { - return exports.ConfirmedTransaction.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var message = createBaseConfirmedTransaction(); - message.transaction = (object.transaction !== undefined && object.transaction !== null) - ? exports.Transaction.fromPartial(object.transaction) - : undefined; - message.meta = (object.meta !== undefined && object.meta !== null) - ? exports.TransactionStatusMeta.fromPartial(object.meta) - : undefined; - return message; - }, -}; -function createBaseTransaction() { - return { signatures: [], message: undefined }; -} -exports.Transaction = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - for (var _i = 0, _a = message.signatures; _i < _a.length; _i++) { - var v = _a[_i]; - writer.uint32(10).bytes(v); - } - if (message.message !== undefined) { - exports.Message.encode(message.message, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseTransaction(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.signatures.push(reader.bytes()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.message = exports.Message.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - signatures: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.signatures) - ? object.signatures.map(function (e) { return bytesFromBase64(e); }) - : [], - message: isSet(object.message) ? exports.Message.fromJSON(object.message) : undefined, - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if ((_a = message.signatures) === null || _a === void 0 ? void 0 : _a.length) { - obj.signatures = message.signatures.map(function (e) { return base64FromBytes(e); }); - } - if (message.message !== undefined) { - obj.message = exports.Message.toJSON(message.message); - } - return obj; - }, - create: function (base) { - return exports.Transaction.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseTransaction(); - message.signatures = ((_a = object.signatures) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; - message.message = (object.message !== undefined && object.message !== null) - ? exports.Message.fromPartial(object.message) - : undefined; - return message; - }, -}; -function createBaseMessage() { - return { - header: undefined, - accountKeys: [], - recentBlockhash: new Uint8Array(0), - instructions: [], - versioned: false, - addressTableLookups: [], - }; -} -exports.Message = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.header !== undefined) { - exports.MessageHeader.encode(message.header, writer.uint32(10).fork()).join(); - } - for (var _i = 0, _a = message.accountKeys; _i < _a.length; _i++) { - var v = _a[_i]; - writer.uint32(18).bytes(v); - } - if (message.recentBlockhash.length !== 0) { - writer.uint32(26).bytes(message.recentBlockhash); - } - for (var _b = 0, _c = message.instructions; _b < _c.length; _b++) { - var v = _c[_b]; - exports.CompiledInstruction.encode(v, writer.uint32(34).fork()).join(); - } - if (message.versioned !== false) { - writer.uint32(40).bool(message.versioned); - } - for (var _d = 0, _e = message.addressTableLookups; _d < _e.length; _d++) { - var v = _e[_d]; - exports.MessageAddressTableLookup.encode(v, writer.uint32(50).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseMessage(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.header = exports.MessageHeader.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.accountKeys.push(reader.bytes()); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.recentBlockhash = reader.bytes(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.instructions.push(exports.CompiledInstruction.decode(reader, reader.uint32())); - continue; - } - case 5: { - if (tag !== 40) { - break; - } - message.versioned = reader.bool(); - continue; - } - case 6: { - if (tag !== 50) { - break; - } - message.addressTableLookups.push(exports.MessageAddressTableLookup.decode(reader, reader.uint32())); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - header: isSet(object.header) ? exports.MessageHeader.fromJSON(object.header) : undefined, - accountKeys: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.accountKeys) - ? object.accountKeys.map(function (e) { return bytesFromBase64(e); }) - : [], - recentBlockhash: isSet(object.recentBlockhash) ? bytesFromBase64(object.recentBlockhash) : new Uint8Array(0), - instructions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.instructions) - ? object.instructions.map(function (e) { return exports.CompiledInstruction.fromJSON(e); }) - : [], - versioned: isSet(object.versioned) ? globalThis.Boolean(object.versioned) : false, - addressTableLookups: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.addressTableLookups) - ? object.addressTableLookups.map(function (e) { return exports.MessageAddressTableLookup.fromJSON(e); }) - : [], - }; - }, - toJSON: function (message) { - var _a, _b, _c; - var obj = {}; - if (message.header !== undefined) { - obj.header = exports.MessageHeader.toJSON(message.header); - } - if ((_a = message.accountKeys) === null || _a === void 0 ? void 0 : _a.length) { - obj.accountKeys = message.accountKeys.map(function (e) { return base64FromBytes(e); }); - } - if (message.recentBlockhash.length !== 0) { - obj.recentBlockhash = base64FromBytes(message.recentBlockhash); - } - if ((_b = message.instructions) === null || _b === void 0 ? void 0 : _b.length) { - obj.instructions = message.instructions.map(function (e) { return exports.CompiledInstruction.toJSON(e); }); - } - if (message.versioned !== false) { - obj.versioned = message.versioned; - } - if ((_c = message.addressTableLookups) === null || _c === void 0 ? void 0 : _c.length) { - obj.addressTableLookups = message.addressTableLookups.map(function (e) { return exports.MessageAddressTableLookup.toJSON(e); }); - } - return obj; - }, - create: function (base) { - return exports.Message.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e; - var message = createBaseMessage(); - message.header = (object.header !== undefined && object.header !== null) - ? exports.MessageHeader.fromPartial(object.header) - : undefined; - message.accountKeys = ((_a = object.accountKeys) === null || _a === void 0 ? void 0 : _a.map(function (e) { return e; })) || []; - message.recentBlockhash = (_b = object.recentBlockhash) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.instructions = ((_c = object.instructions) === null || _c === void 0 ? void 0 : _c.map(function (e) { return exports.CompiledInstruction.fromPartial(e); })) || []; - message.versioned = (_d = object.versioned) !== null && _d !== void 0 ? _d : false; - message.addressTableLookups = ((_e = object.addressTableLookups) === null || _e === void 0 ? void 0 : _e.map(function (e) { return exports.MessageAddressTableLookup.fromPartial(e); })) || - []; - return message; - }, -}; -function createBaseMessageHeader() { - return { numRequiredSignatures: 0, numReadonlySignedAccounts: 0, numReadonlyUnsignedAccounts: 0 }; -} -exports.MessageHeader = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.numRequiredSignatures !== 0) { - writer.uint32(8).uint32(message.numRequiredSignatures); - } - if (message.numReadonlySignedAccounts !== 0) { - writer.uint32(16).uint32(message.numReadonlySignedAccounts); - } - if (message.numReadonlyUnsignedAccounts !== 0) { - writer.uint32(24).uint32(message.numReadonlyUnsignedAccounts); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseMessageHeader(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.numRequiredSignatures = reader.uint32(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.numReadonlySignedAccounts = reader.uint32(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.numReadonlyUnsignedAccounts = reader.uint32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - numRequiredSignatures: isSet(object.numRequiredSignatures) ? globalThis.Number(object.numRequiredSignatures) : 0, - numReadonlySignedAccounts: isSet(object.numReadonlySignedAccounts) - ? globalThis.Number(object.numReadonlySignedAccounts) - : 0, - numReadonlyUnsignedAccounts: isSet(object.numReadonlyUnsignedAccounts) - ? globalThis.Number(object.numReadonlyUnsignedAccounts) - : 0, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.numRequiredSignatures !== 0) { - obj.numRequiredSignatures = Math.round(message.numRequiredSignatures); - } - if (message.numReadonlySignedAccounts !== 0) { - obj.numReadonlySignedAccounts = Math.round(message.numReadonlySignedAccounts); - } - if (message.numReadonlyUnsignedAccounts !== 0) { - obj.numReadonlyUnsignedAccounts = Math.round(message.numReadonlyUnsignedAccounts); - } - return obj; - }, - create: function (base) { - return exports.MessageHeader.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBaseMessageHeader(); - message.numRequiredSignatures = (_a = object.numRequiredSignatures) !== null && _a !== void 0 ? _a : 0; - message.numReadonlySignedAccounts = (_b = object.numReadonlySignedAccounts) !== null && _b !== void 0 ? _b : 0; - message.numReadonlyUnsignedAccounts = (_c = object.numReadonlyUnsignedAccounts) !== null && _c !== void 0 ? _c : 0; - return message; - }, -}; -function createBaseMessageAddressTableLookup() { - return { accountKey: new Uint8Array(0), writableIndexes: new Uint8Array(0), readonlyIndexes: new Uint8Array(0) }; -} -exports.MessageAddressTableLookup = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.accountKey.length !== 0) { - writer.uint32(10).bytes(message.accountKey); - } - if (message.writableIndexes.length !== 0) { - writer.uint32(18).bytes(message.writableIndexes); - } - if (message.readonlyIndexes.length !== 0) { - writer.uint32(26).bytes(message.readonlyIndexes); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseMessageAddressTableLookup(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.accountKey = reader.bytes(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.writableIndexes = reader.bytes(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.readonlyIndexes = reader.bytes(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - accountKey: isSet(object.accountKey) ? bytesFromBase64(object.accountKey) : new Uint8Array(0), - writableIndexes: isSet(object.writableIndexes) ? bytesFromBase64(object.writableIndexes) : new Uint8Array(0), - readonlyIndexes: isSet(object.readonlyIndexes) ? bytesFromBase64(object.readonlyIndexes) : new Uint8Array(0), - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.accountKey.length !== 0) { - obj.accountKey = base64FromBytes(message.accountKey); - } - if (message.writableIndexes.length !== 0) { - obj.writableIndexes = base64FromBytes(message.writableIndexes); - } - if (message.readonlyIndexes.length !== 0) { - obj.readonlyIndexes = base64FromBytes(message.readonlyIndexes); - } - return obj; - }, - create: function (base) { - return exports.MessageAddressTableLookup.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBaseMessageAddressTableLookup(); - message.accountKey = (_a = object.accountKey) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.writableIndexes = (_b = object.writableIndexes) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.readonlyIndexes = (_c = object.readonlyIndexes) !== null && _c !== void 0 ? _c : new Uint8Array(0); - return message; - }, -}; -function createBaseTransactionStatusMeta() { - return { - err: undefined, - fee: "0", - preBalances: [], - postBalances: [], - innerInstructions: [], - innerInstructionsNone: false, - logMessages: [], - logMessagesNone: false, - preTokenBalances: [], - postTokenBalances: [], - rewards: [], - loadedWritableAddresses: [], - loadedReadonlyAddresses: [], - returnData: undefined, - returnDataNone: false, - computeUnitsConsumed: undefined, - costUnits: undefined, - }; -} -exports.TransactionStatusMeta = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.err !== undefined) { - exports.TransactionError.encode(message.err, writer.uint32(10).fork()).join(); - } - if (message.fee !== "0") { - writer.uint32(16).uint64(message.fee); - } - writer.uint32(26).fork(); - for (var _i = 0, _a = message.preBalances; _i < _a.length; _i++) { - var v = _a[_i]; - writer.uint64(v); - } - writer.join(); - writer.uint32(34).fork(); - for (var _b = 0, _c = message.postBalances; _b < _c.length; _b++) { - var v = _c[_b]; - writer.uint64(v); - } - writer.join(); - for (var _d = 0, _e = message.innerInstructions; _d < _e.length; _d++) { - var v = _e[_d]; - exports.InnerInstructions.encode(v, writer.uint32(42).fork()).join(); - } - if (message.innerInstructionsNone !== false) { - writer.uint32(80).bool(message.innerInstructionsNone); - } - for (var _f = 0, _g = message.logMessages; _f < _g.length; _f++) { - var v = _g[_f]; - writer.uint32(50).string(v); - } - if (message.logMessagesNone !== false) { - writer.uint32(88).bool(message.logMessagesNone); - } - for (var _h = 0, _j = message.preTokenBalances; _h < _j.length; _h++) { - var v = _j[_h]; - exports.TokenBalance.encode(v, writer.uint32(58).fork()).join(); - } - for (var _k = 0, _l = message.postTokenBalances; _k < _l.length; _k++) { - var v = _l[_k]; - exports.TokenBalance.encode(v, writer.uint32(66).fork()).join(); - } - for (var _m = 0, _o = message.rewards; _m < _o.length; _m++) { - var v = _o[_m]; - exports.Reward.encode(v, writer.uint32(74).fork()).join(); - } - for (var _p = 0, _q = message.loadedWritableAddresses; _p < _q.length; _p++) { - var v = _q[_p]; - writer.uint32(98).bytes(v); - } - for (var _r = 0, _s = message.loadedReadonlyAddresses; _r < _s.length; _r++) { - var v = _s[_r]; - writer.uint32(106).bytes(v); - } - if (message.returnData !== undefined) { - exports.ReturnData.encode(message.returnData, writer.uint32(114).fork()).join(); - } - if (message.returnDataNone !== false) { - writer.uint32(120).bool(message.returnDataNone); - } - if (message.computeUnitsConsumed !== undefined) { - writer.uint32(128).uint64(message.computeUnitsConsumed); - } - if (message.costUnits !== undefined) { - writer.uint32(136).uint64(message.costUnits); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseTransactionStatusMeta(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.err = exports.TransactionError.decode(reader, reader.uint32()); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.fee = reader.uint64().toString(); - continue; - } - case 3: { - if (tag === 24) { - message.preBalances.push(reader.uint64().toString()); - continue; - } - if (tag === 26) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) { - message.preBalances.push(reader.uint64().toString()); - } - continue; - } - break; - } - case 4: { - if (tag === 32) { - message.postBalances.push(reader.uint64().toString()); - continue; - } - if (tag === 34) { - var end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) { - message.postBalances.push(reader.uint64().toString()); - } - continue; - } - break; - } - case 5: { - if (tag !== 42) { - break; - } - message.innerInstructions.push(exports.InnerInstructions.decode(reader, reader.uint32())); - continue; - } - case 10: { - if (tag !== 80) { - break; - } - message.innerInstructionsNone = reader.bool(); - continue; - } - case 6: { - if (tag !== 50) { - break; - } - message.logMessages.push(reader.string()); - continue; - } - case 11: { - if (tag !== 88) { - break; - } - message.logMessagesNone = reader.bool(); - continue; - } - case 7: { - if (tag !== 58) { - break; - } - message.preTokenBalances.push(exports.TokenBalance.decode(reader, reader.uint32())); - continue; - } - case 8: { - if (tag !== 66) { - break; - } - message.postTokenBalances.push(exports.TokenBalance.decode(reader, reader.uint32())); - continue; - } - case 9: { - if (tag !== 74) { - break; - } - message.rewards.push(exports.Reward.decode(reader, reader.uint32())); - continue; - } - case 12: { - if (tag !== 98) { - break; - } - message.loadedWritableAddresses.push(reader.bytes()); - continue; - } - case 13: { - if (tag !== 106) { - break; - } - message.loadedReadonlyAddresses.push(reader.bytes()); - continue; - } - case 14: { - if (tag !== 114) { - break; - } - message.returnData = exports.ReturnData.decode(reader, reader.uint32()); - continue; - } - case 15: { - if (tag !== 120) { - break; - } - message.returnDataNone = reader.bool(); - continue; - } - case 16: { - if (tag !== 128) { - break; - } - message.computeUnitsConsumed = reader.uint64().toString(); - continue; - } - case 17: { - if (tag !== 136) { - break; - } - message.costUnits = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - err: isSet(object.err) ? exports.TransactionError.fromJSON(object.err) : undefined, - fee: isSet(object.fee) ? globalThis.String(object.fee) : "0", - preBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.preBalances) - ? object.preBalances.map(function (e) { return globalThis.String(e); }) - : [], - postBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.postBalances) - ? object.postBalances.map(function (e) { return globalThis.String(e); }) - : [], - innerInstructions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.innerInstructions) - ? object.innerInstructions.map(function (e) { return exports.InnerInstructions.fromJSON(e); }) - : [], - innerInstructionsNone: isSet(object.innerInstructionsNone) - ? globalThis.Boolean(object.innerInstructionsNone) - : false, - logMessages: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.logMessages) - ? object.logMessages.map(function (e) { return globalThis.String(e); }) - : [], - logMessagesNone: isSet(object.logMessagesNone) ? globalThis.Boolean(object.logMessagesNone) : false, - preTokenBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.preTokenBalances) - ? object.preTokenBalances.map(function (e) { return exports.TokenBalance.fromJSON(e); }) - : [], - postTokenBalances: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.postTokenBalances) - ? object.postTokenBalances.map(function (e) { return exports.TokenBalance.fromJSON(e); }) - : [], - rewards: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.rewards) ? object.rewards.map(function (e) { return exports.Reward.fromJSON(e); }) : [], - loadedWritableAddresses: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.loadedWritableAddresses) - ? object.loadedWritableAddresses.map(function (e) { return bytesFromBase64(e); }) - : [], - loadedReadonlyAddresses: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.loadedReadonlyAddresses) - ? object.loadedReadonlyAddresses.map(function (e) { return bytesFromBase64(e); }) - : [], - returnData: isSet(object.returnData) ? exports.ReturnData.fromJSON(object.returnData) : undefined, - returnDataNone: isSet(object.returnDataNone) ? globalThis.Boolean(object.returnDataNone) : false, - computeUnitsConsumed: isSet(object.computeUnitsConsumed) - ? globalThis.String(object.computeUnitsConsumed) - : undefined, - costUnits: isSet(object.costUnits) ? globalThis.String(object.costUnits) : undefined, - }; - }, - toJSON: function (message) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j; - var obj = {}; - if (message.err !== undefined) { - obj.err = exports.TransactionError.toJSON(message.err); - } - if (message.fee !== "0") { - obj.fee = message.fee; - } - if ((_a = message.preBalances) === null || _a === void 0 ? void 0 : _a.length) { - obj.preBalances = message.preBalances; - } - if ((_b = message.postBalances) === null || _b === void 0 ? void 0 : _b.length) { - obj.postBalances = message.postBalances; - } - if ((_c = message.innerInstructions) === null || _c === void 0 ? void 0 : _c.length) { - obj.innerInstructions = message.innerInstructions.map(function (e) { return exports.InnerInstructions.toJSON(e); }); - } - if (message.innerInstructionsNone !== false) { - obj.innerInstructionsNone = message.innerInstructionsNone; - } - if ((_d = message.logMessages) === null || _d === void 0 ? void 0 : _d.length) { - obj.logMessages = message.logMessages; - } - if (message.logMessagesNone !== false) { - obj.logMessagesNone = message.logMessagesNone; - } - if ((_e = message.preTokenBalances) === null || _e === void 0 ? void 0 : _e.length) { - obj.preTokenBalances = message.preTokenBalances.map(function (e) { return exports.TokenBalance.toJSON(e); }); - } - if ((_f = message.postTokenBalances) === null || _f === void 0 ? void 0 : _f.length) { - obj.postTokenBalances = message.postTokenBalances.map(function (e) { return exports.TokenBalance.toJSON(e); }); - } - if ((_g = message.rewards) === null || _g === void 0 ? void 0 : _g.length) { - obj.rewards = message.rewards.map(function (e) { return exports.Reward.toJSON(e); }); - } - if ((_h = message.loadedWritableAddresses) === null || _h === void 0 ? void 0 : _h.length) { - obj.loadedWritableAddresses = message.loadedWritableAddresses.map(function (e) { return base64FromBytes(e); }); - } - if ((_j = message.loadedReadonlyAddresses) === null || _j === void 0 ? void 0 : _j.length) { - obj.loadedReadonlyAddresses = message.loadedReadonlyAddresses.map(function (e) { return base64FromBytes(e); }); - } - if (message.returnData !== undefined) { - obj.returnData = exports.ReturnData.toJSON(message.returnData); - } - if (message.returnDataNone !== false) { - obj.returnDataNone = message.returnDataNone; - } - if (message.computeUnitsConsumed !== undefined) { - obj.computeUnitsConsumed = message.computeUnitsConsumed; - } - if (message.costUnits !== undefined) { - obj.costUnits = message.costUnits; - } - return obj; - }, - create: function (base) { - return exports.TransactionStatusMeta.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q; - var message = createBaseTransactionStatusMeta(); - message.err = (object.err !== undefined && object.err !== null) - ? exports.TransactionError.fromPartial(object.err) - : undefined; - message.fee = (_a = object.fee) !== null && _a !== void 0 ? _a : "0"; - message.preBalances = ((_b = object.preBalances) === null || _b === void 0 ? void 0 : _b.map(function (e) { return e; })) || []; - message.postBalances = ((_c = object.postBalances) === null || _c === void 0 ? void 0 : _c.map(function (e) { return e; })) || []; - message.innerInstructions = ((_d = object.innerInstructions) === null || _d === void 0 ? void 0 : _d.map(function (e) { return exports.InnerInstructions.fromPartial(e); })) || []; - message.innerInstructionsNone = (_e = object.innerInstructionsNone) !== null && _e !== void 0 ? _e : false; - message.logMessages = ((_f = object.logMessages) === null || _f === void 0 ? void 0 : _f.map(function (e) { return e; })) || []; - message.logMessagesNone = (_g = object.logMessagesNone) !== null && _g !== void 0 ? _g : false; - message.preTokenBalances = ((_h = object.preTokenBalances) === null || _h === void 0 ? void 0 : _h.map(function (e) { return exports.TokenBalance.fromPartial(e); })) || []; - message.postTokenBalances = ((_j = object.postTokenBalances) === null || _j === void 0 ? void 0 : _j.map(function (e) { return exports.TokenBalance.fromPartial(e); })) || []; - message.rewards = ((_k = object.rewards) === null || _k === void 0 ? void 0 : _k.map(function (e) { return exports.Reward.fromPartial(e); })) || []; - message.loadedWritableAddresses = ((_l = object.loadedWritableAddresses) === null || _l === void 0 ? void 0 : _l.map(function (e) { return e; })) || []; - message.loadedReadonlyAddresses = ((_m = object.loadedReadonlyAddresses) === null || _m === void 0 ? void 0 : _m.map(function (e) { return e; })) || []; - message.returnData = (object.returnData !== undefined && object.returnData !== null) - ? exports.ReturnData.fromPartial(object.returnData) - : undefined; - message.returnDataNone = (_o = object.returnDataNone) !== null && _o !== void 0 ? _o : false; - message.computeUnitsConsumed = (_p = object.computeUnitsConsumed) !== null && _p !== void 0 ? _p : undefined; - message.costUnits = (_q = object.costUnits) !== null && _q !== void 0 ? _q : undefined; - return message; - }, -}; -function createBaseTransactionError() { - return { err: new Uint8Array(0) }; -} -exports.TransactionError = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.err.length !== 0) { - writer.uint32(10).bytes(message.err); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseTransactionError(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.err = reader.bytes(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { err: isSet(object.err) ? bytesFromBase64(object.err) : new Uint8Array(0) }; - }, - toJSON: function (message) { - var obj = {}; - if (message.err.length !== 0) { - obj.err = base64FromBytes(message.err); - } - return obj; - }, - create: function (base) { - return exports.TransactionError.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseTransactionError(); - message.err = (_a = object.err) !== null && _a !== void 0 ? _a : new Uint8Array(0); - return message; - }, -}; -function createBaseInnerInstructions() { - return { index: 0, instructions: [] }; -} -exports.InnerInstructions = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.index !== 0) { - writer.uint32(8).uint32(message.index); - } - for (var _i = 0, _a = message.instructions; _i < _a.length; _i++) { - var v = _a[_i]; - exports.InnerInstruction.encode(v, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseInnerInstructions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.index = reader.uint32(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.instructions.push(exports.InnerInstruction.decode(reader, reader.uint32())); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - index: isSet(object.index) ? globalThis.Number(object.index) : 0, - instructions: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.instructions) - ? object.instructions.map(function (e) { return exports.InnerInstruction.fromJSON(e); }) - : [], - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if (message.index !== 0) { - obj.index = Math.round(message.index); - } - if ((_a = message.instructions) === null || _a === void 0 ? void 0 : _a.length) { - obj.instructions = message.instructions.map(function (e) { return exports.InnerInstruction.toJSON(e); }); - } - return obj; - }, - create: function (base) { - return exports.InnerInstructions.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseInnerInstructions(); - message.index = (_a = object.index) !== null && _a !== void 0 ? _a : 0; - message.instructions = ((_b = object.instructions) === null || _b === void 0 ? void 0 : _b.map(function (e) { return exports.InnerInstruction.fromPartial(e); })) || []; - return message; - }, -}; -function createBaseInnerInstruction() { - return { programIdIndex: 0, accounts: new Uint8Array(0), data: new Uint8Array(0), stackHeight: undefined }; -} -exports.InnerInstruction = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.programIdIndex !== 0) { - writer.uint32(8).uint32(message.programIdIndex); - } - if (message.accounts.length !== 0) { - writer.uint32(18).bytes(message.accounts); - } - if (message.data.length !== 0) { - writer.uint32(26).bytes(message.data); - } - if (message.stackHeight !== undefined) { - writer.uint32(32).uint32(message.stackHeight); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseInnerInstruction(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.programIdIndex = reader.uint32(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.accounts = reader.bytes(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.data = reader.bytes(); - continue; - } - case 4: { - if (tag !== 32) { - break; - } - message.stackHeight = reader.uint32(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - programIdIndex: isSet(object.programIdIndex) ? globalThis.Number(object.programIdIndex) : 0, - accounts: isSet(object.accounts) ? bytesFromBase64(object.accounts) : new Uint8Array(0), - data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), - stackHeight: isSet(object.stackHeight) ? globalThis.Number(object.stackHeight) : undefined, - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.programIdIndex !== 0) { - obj.programIdIndex = Math.round(message.programIdIndex); - } - if (message.accounts.length !== 0) { - obj.accounts = base64FromBytes(message.accounts); - } - if (message.data.length !== 0) { - obj.data = base64FromBytes(message.data); - } - if (message.stackHeight !== undefined) { - obj.stackHeight = Math.round(message.stackHeight); - } - return obj; - }, - create: function (base) { - return exports.InnerInstruction.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseInnerInstruction(); - message.programIdIndex = (_a = object.programIdIndex) !== null && _a !== void 0 ? _a : 0; - message.accounts = (_b = object.accounts) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.data = (_c = object.data) !== null && _c !== void 0 ? _c : new Uint8Array(0); - message.stackHeight = (_d = object.stackHeight) !== null && _d !== void 0 ? _d : undefined; - return message; - }, -}; -function createBaseCompiledInstruction() { - return { programIdIndex: 0, accounts: new Uint8Array(0), data: new Uint8Array(0) }; -} -exports.CompiledInstruction = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.programIdIndex !== 0) { - writer.uint32(8).uint32(message.programIdIndex); - } - if (message.accounts.length !== 0) { - writer.uint32(18).bytes(message.accounts); - } - if (message.data.length !== 0) { - writer.uint32(26).bytes(message.data); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseCompiledInstruction(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.programIdIndex = reader.uint32(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.accounts = reader.bytes(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.data = reader.bytes(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - programIdIndex: isSet(object.programIdIndex) ? globalThis.Number(object.programIdIndex) : 0, - accounts: isSet(object.accounts) ? bytesFromBase64(object.accounts) : new Uint8Array(0), - data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.programIdIndex !== 0) { - obj.programIdIndex = Math.round(message.programIdIndex); - } - if (message.accounts.length !== 0) { - obj.accounts = base64FromBytes(message.accounts); - } - if (message.data.length !== 0) { - obj.data = base64FromBytes(message.data); - } - return obj; - }, - create: function (base) { - return exports.CompiledInstruction.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c; - var message = createBaseCompiledInstruction(); - message.programIdIndex = (_a = object.programIdIndex) !== null && _a !== void 0 ? _a : 0; - message.accounts = (_b = object.accounts) !== null && _b !== void 0 ? _b : new Uint8Array(0); - message.data = (_c = object.data) !== null && _c !== void 0 ? _c : new Uint8Array(0); - return message; - }, -}; -function createBaseTokenBalance() { - return { accountIndex: 0, mint: "", uiTokenAmount: undefined, owner: "", programId: "" }; -} -exports.TokenBalance = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.accountIndex !== 0) { - writer.uint32(8).uint32(message.accountIndex); - } - if (message.mint !== "") { - writer.uint32(18).string(message.mint); - } - if (message.uiTokenAmount !== undefined) { - exports.UiTokenAmount.encode(message.uiTokenAmount, writer.uint32(26).fork()).join(); - } - if (message.owner !== "") { - writer.uint32(34).string(message.owner); - } - if (message.programId !== "") { - writer.uint32(42).string(message.programId); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseTokenBalance(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.accountIndex = reader.uint32(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.mint = reader.string(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.uiTokenAmount = exports.UiTokenAmount.decode(reader, reader.uint32()); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.owner = reader.string(); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.programId = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - accountIndex: isSet(object.accountIndex) ? globalThis.Number(object.accountIndex) : 0, - mint: isSet(object.mint) ? globalThis.String(object.mint) : "", - uiTokenAmount: isSet(object.uiTokenAmount) ? exports.UiTokenAmount.fromJSON(object.uiTokenAmount) : undefined, - owner: isSet(object.owner) ? globalThis.String(object.owner) : "", - programId: isSet(object.programId) ? globalThis.String(object.programId) : "", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.accountIndex !== 0) { - obj.accountIndex = Math.round(message.accountIndex); - } - if (message.mint !== "") { - obj.mint = message.mint; - } - if (message.uiTokenAmount !== undefined) { - obj.uiTokenAmount = exports.UiTokenAmount.toJSON(message.uiTokenAmount); - } - if (message.owner !== "") { - obj.owner = message.owner; - } - if (message.programId !== "") { - obj.programId = message.programId; - } - return obj; - }, - create: function (base) { - return exports.TokenBalance.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseTokenBalance(); - message.accountIndex = (_a = object.accountIndex) !== null && _a !== void 0 ? _a : 0; - message.mint = (_b = object.mint) !== null && _b !== void 0 ? _b : ""; - message.uiTokenAmount = (object.uiTokenAmount !== undefined && object.uiTokenAmount !== null) - ? exports.UiTokenAmount.fromPartial(object.uiTokenAmount) - : undefined; - message.owner = (_c = object.owner) !== null && _c !== void 0 ? _c : ""; - message.programId = (_d = object.programId) !== null && _d !== void 0 ? _d : ""; - return message; - }, -}; -function createBaseUiTokenAmount() { - return { uiAmount: 0, decimals: 0, amount: "", uiAmountString: "" }; -} -exports.UiTokenAmount = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.uiAmount !== 0) { - writer.uint32(9).double(message.uiAmount); - } - if (message.decimals !== 0) { - writer.uint32(16).uint32(message.decimals); - } - if (message.amount !== "") { - writer.uint32(26).string(message.amount); - } - if (message.uiAmountString !== "") { - writer.uint32(34).string(message.uiAmountString); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseUiTokenAmount(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 9) { - break; - } - message.uiAmount = reader.double(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.decimals = reader.uint32(); - continue; - } - case 3: { - if (tag !== 26) { - break; - } - message.amount = reader.string(); - continue; - } - case 4: { - if (tag !== 34) { - break; - } - message.uiAmountString = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - uiAmount: isSet(object.uiAmount) ? globalThis.Number(object.uiAmount) : 0, - decimals: isSet(object.decimals) ? globalThis.Number(object.decimals) : 0, - amount: isSet(object.amount) ? globalThis.String(object.amount) : "", - uiAmountString: isSet(object.uiAmountString) ? globalThis.String(object.uiAmountString) : "", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.uiAmount !== 0) { - obj.uiAmount = message.uiAmount; - } - if (message.decimals !== 0) { - obj.decimals = Math.round(message.decimals); - } - if (message.amount !== "") { - obj.amount = message.amount; - } - if (message.uiAmountString !== "") { - obj.uiAmountString = message.uiAmountString; - } - return obj; - }, - create: function (base) { - return exports.UiTokenAmount.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d; - var message = createBaseUiTokenAmount(); - message.uiAmount = (_a = object.uiAmount) !== null && _a !== void 0 ? _a : 0; - message.decimals = (_b = object.decimals) !== null && _b !== void 0 ? _b : 0; - message.amount = (_c = object.amount) !== null && _c !== void 0 ? _c : ""; - message.uiAmountString = (_d = object.uiAmountString) !== null && _d !== void 0 ? _d : ""; - return message; - }, -}; -function createBaseReturnData() { - return { programId: new Uint8Array(0), data: new Uint8Array(0) }; -} -exports.ReturnData = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.programId.length !== 0) { - writer.uint32(10).bytes(message.programId); - } - if (message.data.length !== 0) { - writer.uint32(18).bytes(message.data); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseReturnData(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.programId = reader.bytes(); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.data = reader.bytes(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - programId: isSet(object.programId) ? bytesFromBase64(object.programId) : new Uint8Array(0), - data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.programId.length !== 0) { - obj.programId = base64FromBytes(message.programId); - } - if (message.data.length !== 0) { - obj.data = base64FromBytes(message.data); - } - return obj; - }, - create: function (base) { - return exports.ReturnData.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b; - var message = createBaseReturnData(); - message.programId = (_a = object.programId) !== null && _a !== void 0 ? _a : new Uint8Array(0); - message.data = (_b = object.data) !== null && _b !== void 0 ? _b : new Uint8Array(0); - return message; - }, -}; -function createBaseReward() { - return { pubkey: "", lamports: "0", postBalance: "0", rewardType: 0, commission: "" }; -} -exports.Reward = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.pubkey !== "") { - writer.uint32(10).string(message.pubkey); - } - if (message.lamports !== "0") { - writer.uint32(16).int64(message.lamports); - } - if (message.postBalance !== "0") { - writer.uint32(24).uint64(message.postBalance); - } - if (message.rewardType !== 0) { - writer.uint32(32).int32(message.rewardType); - } - if (message.commission !== "") { - writer.uint32(42).string(message.commission); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseReward(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.pubkey = reader.string(); - continue; - } - case 2: { - if (tag !== 16) { - break; - } - message.lamports = reader.int64().toString(); - continue; - } - case 3: { - if (tag !== 24) { - break; - } - message.postBalance = reader.uint64().toString(); - continue; - } - case 4: { - if (tag !== 32) { - break; - } - message.rewardType = reader.int32(); - continue; - } - case 5: { - if (tag !== 42) { - break; - } - message.commission = reader.string(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - pubkey: isSet(object.pubkey) ? globalThis.String(object.pubkey) : "", - lamports: isSet(object.lamports) ? globalThis.String(object.lamports) : "0", - postBalance: isSet(object.postBalance) ? globalThis.String(object.postBalance) : "0", - rewardType: isSet(object.rewardType) ? rewardTypeFromJSON(object.rewardType) : 0, - commission: isSet(object.commission) ? globalThis.String(object.commission) : "", - }; - }, - toJSON: function (message) { - var obj = {}; - if (message.pubkey !== "") { - obj.pubkey = message.pubkey; - } - if (message.lamports !== "0") { - obj.lamports = message.lamports; - } - if (message.postBalance !== "0") { - obj.postBalance = message.postBalance; - } - if (message.rewardType !== 0) { - obj.rewardType = rewardTypeToJSON(message.rewardType); - } - if (message.commission !== "") { - obj.commission = message.commission; - } - return obj; - }, - create: function (base) { - return exports.Reward.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a, _b, _c, _d, _e; - var message = createBaseReward(); - message.pubkey = (_a = object.pubkey) !== null && _a !== void 0 ? _a : ""; - message.lamports = (_b = object.lamports) !== null && _b !== void 0 ? _b : "0"; - message.postBalance = (_c = object.postBalance) !== null && _c !== void 0 ? _c : "0"; - message.rewardType = (_d = object.rewardType) !== null && _d !== void 0 ? _d : 0; - message.commission = (_e = object.commission) !== null && _e !== void 0 ? _e : ""; - return message; - }, -}; -function createBaseRewards() { - return { rewards: [], numPartitions: undefined }; -} -exports.Rewards = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - for (var _i = 0, _a = message.rewards; _i < _a.length; _i++) { - var v = _a[_i]; - exports.Reward.encode(v, writer.uint32(10).fork()).join(); - } - if (message.numPartitions !== undefined) { - exports.NumPartitions.encode(message.numPartitions, writer.uint32(18).fork()).join(); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseRewards(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 10) { - break; - } - message.rewards.push(exports.Reward.decode(reader, reader.uint32())); - continue; - } - case 2: { - if (tag !== 18) { - break; - } - message.numPartitions = exports.NumPartitions.decode(reader, reader.uint32()); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { - rewards: globalThis.Array.isArray(object === null || object === void 0 ? void 0 : object.rewards) ? object.rewards.map(function (e) { return exports.Reward.fromJSON(e); }) : [], - numPartitions: isSet(object.numPartitions) ? exports.NumPartitions.fromJSON(object.numPartitions) : undefined, - }; - }, - toJSON: function (message) { - var _a; - var obj = {}; - if ((_a = message.rewards) === null || _a === void 0 ? void 0 : _a.length) { - obj.rewards = message.rewards.map(function (e) { return exports.Reward.toJSON(e); }); - } - if (message.numPartitions !== undefined) { - obj.numPartitions = exports.NumPartitions.toJSON(message.numPartitions); - } - return obj; - }, - create: function (base) { - return exports.Rewards.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseRewards(); - message.rewards = ((_a = object.rewards) === null || _a === void 0 ? void 0 : _a.map(function (e) { return exports.Reward.fromPartial(e); })) || []; - message.numPartitions = (object.numPartitions !== undefined && object.numPartitions !== null) - ? exports.NumPartitions.fromPartial(object.numPartitions) - : undefined; - return message; - }, -}; -function createBaseUnixTimestamp() { - return { timestamp: "0" }; -} -exports.UnixTimestamp = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.timestamp !== "0") { - writer.uint32(8).int64(message.timestamp); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseUnixTimestamp(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.timestamp = reader.int64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { timestamp: isSet(object.timestamp) ? globalThis.String(object.timestamp) : "0" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.timestamp !== "0") { - obj.timestamp = message.timestamp; - } - return obj; - }, - create: function (base) { - return exports.UnixTimestamp.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseUnixTimestamp(); - message.timestamp = (_a = object.timestamp) !== null && _a !== void 0 ? _a : "0"; - return message; - }, -}; -function createBaseBlockHeight() { - return { blockHeight: "0" }; -} -exports.BlockHeight = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.blockHeight !== "0") { - writer.uint32(8).uint64(message.blockHeight); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseBlockHeight(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.blockHeight = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { blockHeight: isSet(object.blockHeight) ? globalThis.String(object.blockHeight) : "0" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.blockHeight !== "0") { - obj.blockHeight = message.blockHeight; - } - return obj; - }, - create: function (base) { - return exports.BlockHeight.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseBlockHeight(); - message.blockHeight = (_a = object.blockHeight) !== null && _a !== void 0 ? _a : "0"; - return message; - }, -}; -function createBaseNumPartitions() { - return { numPartitions: "0" }; -} -exports.NumPartitions = { - encode: function (message, writer) { - if (writer === void 0) { writer = new wire_1.BinaryWriter(); } - if (message.numPartitions !== "0") { - writer.uint32(8).uint64(message.numPartitions); - } - return writer; - }, - decode: function (input, length) { - var reader = input instanceof wire_1.BinaryReader ? input : new wire_1.BinaryReader(input); - var end = length === undefined ? reader.len : reader.pos + length; - var message = createBaseNumPartitions(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: { - if (tag !== 8) { - break; - } - message.numPartitions = reader.uint64().toString(); - continue; - } - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skip(tag & 7); - } - return message; - }, - fromJSON: function (object) { - return { numPartitions: isSet(object.numPartitions) ? globalThis.String(object.numPartitions) : "0" }; - }, - toJSON: function (message) { - var obj = {}; - if (message.numPartitions !== "0") { - obj.numPartitions = message.numPartitions; - } - return obj; - }, - create: function (base) { - return exports.NumPartitions.fromPartial(base !== null && base !== void 0 ? base : {}); - }, - fromPartial: function (object) { - var _a; - var message = createBaseNumPartitions(); - message.numPartitions = (_a = object.numPartitions) !== null && _a !== void 0 ? _a : "0"; - return message; - }, -}; -function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); - } - else { - var bin = globalThis.atob(b64); - var arr = new Uint8Array(bin.length); - for (var i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } -} -function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); - } - else { - var bin_1 = []; - arr.forEach(function (byte) { - bin_1.push(globalThis.String.fromCharCode(byte)); - }); - return globalThis.btoa(bin_1.join("")); - } -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/typescript-sdk/src/index.js b/typescript-sdk/src/index.js deleted file mode 100644 index 3473f2f..0000000 --- a/typescript-sdk/src/index.js +++ /dev/null @@ -1,448 +0,0 @@ -"use strict"; -var __assign = (this && this.__assign) || function () { - __assign = Object.assign || function(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) - t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = exports.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = exports.DEFAULT_COMMIT_INTERVAL = exports.DEFAULT_DRAGONSMOUTH_CAPACITY = exports.FumaroleConfig = exports.FumaroleClient = void 0; -var grpc_js_1 = require("@grpc/grpc-js"); -var config_1 = require("./config/config"); -Object.defineProperty(exports, "FumaroleConfig", { enumerable: true, get: function () { return config_1.FumaroleConfig; } }); -var connectivity_1 = require("./connectivity"); -var types_1 = require("./types"); -Object.defineProperty(exports, "DEFAULT_DRAGONSMOUTH_CAPACITY", { enumerable: true, get: function () { return types_1.DEFAULT_DRAGONSMOUTH_CAPACITY; } }); -Object.defineProperty(exports, "DEFAULT_COMMIT_INTERVAL", { enumerable: true, get: function () { return types_1.DEFAULT_COMMIT_INTERVAL; } }); -Object.defineProperty(exports, "DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT", { enumerable: true, get: function () { return types_1.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT; } }); -Object.defineProperty(exports, "DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP", { enumerable: true, get: function () { return types_1.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP; } }); -var FumaroleClient = /** @class */ (function () { - function FumaroleClient(connector, stub) { - this.connector = connector; - this.stub = stub; - } - FumaroleClient.connect = function (config) { - return __awaiter(this, void 0, void 0, function () { - var endpoint, connector, client, methods; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - endpoint = config.endpoint; - connector = new connectivity_1.FumaroleGrpcConnector(config, endpoint); - FumaroleClient.logger.debug("Connecting to ".concat(endpoint)); - FumaroleClient.logger.debug("Connection config:", { - endpoint: config.endpoint, - xToken: config.xToken ? "***" : "none", - maxDecodingMessageSizeBytes: config.maxDecodingMessageSizeBytes, - }); - return [4 /*yield*/, connector.connect()]; - case 1: - client = _a.sent(); - FumaroleClient.logger.debug("Connected to ".concat(endpoint, ", testing stub...")); - // Wait for client to be ready - return [4 /*yield*/, new Promise(function (resolve, reject) { - var deadline = new Date().getTime() + 5000; // 5 second timeout - client.waitForReady(deadline, function (error) { - if (error) { - FumaroleClient.logger.error("Client failed to become ready:", error); - reject(error); - } - else { - FumaroleClient.logger.debug("Client is ready"); - resolve(undefined); - } - }); - })]; - case 2: - // Wait for client to be ready - _a.sent(); - // Verify client methods - if (!client || typeof client.listConsumerGroups !== "function") { - methods = client - ? Object.getOwnPropertyNames(Object.getPrototypeOf(client)) - : []; - FumaroleClient.logger.error("Available methods:", methods); - throw new Error("gRPC client or listConsumerGroups method not available"); - } - FumaroleClient.logger.debug("gRPC client initialized successfully"); - return [2 /*return*/, new FumaroleClient(connector, client)]; - } - }); - }); - }; - FumaroleClient.prototype.version = function () { - return __awaiter(this, void 0, void 0, function () { - var request; - var _this = this; - return __generator(this, function (_a) { - FumaroleClient.logger.debug("Sending version request"); - request = {}; - return [2 /*return*/, new Promise(function (resolve, reject) { - _this.stub.version(request, function (error, response) { - if (error) { - FumaroleClient.logger.error("Version request failed:", error); - reject(error); - } - else { - FumaroleClient.logger.debug("Version response:", response); - resolve(response); - } - }); - })]; - }); - }); - }; - FumaroleClient.prototype.dragonsmouthSubscribe = function (consumerGroupName, request) { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - return [2 /*return*/, this.dragonsmouthSubscribeWithConfig(consumerGroupName, request, {})]; - }); - }); - }; - FumaroleClient.prototype.dragonsmouthSubscribeWithConfig = function (consumerGroupName, request, config) { - return __awaiter(this, void 0, void 0, function () { - var finalConfig, dragonsmouthOutlet, fumeControlPlaneQ, initialJoin, initialJoinCommand, controlPlaneStream, subscribeRequestQueue, fumeControlPlaneRxQ, controlPlaneSourceTask, controlResponse, init, lastCommittedOffsetStr, lastCommittedOffset, dataPlaneClient, runtimeTask; - var _this = this; - var _a; - return __generator(this, function (_b) { - switch (_b.label) { - case 0: - finalConfig = __assign({ concurrentDownloadLimit: types_1.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, commitInterval: types_1.DEFAULT_COMMIT_INTERVAL, maxFailedSlotDownloadAttempt: types_1.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, dataChannelCapacity: types_1.DEFAULT_DRAGONSMOUTH_CAPACITY, gcInterval: types_1.DEFAULT_GC_INTERVAL, slotMemoryRetention: types_1.DEFAULT_SLOT_MEMORY_RETENTION }, config); - dragonsmouthOutlet = new types_1.AsyncQueue(finalConfig.dataChannelCapacity); - fumeControlPlaneQ = new types_1.AsyncQueue(100); - initialJoin = { consumerGroupName: consumerGroupName }; - initialJoinCommand = { initialJoin: initialJoin }; - return [4 /*yield*/, fumeControlPlaneQ.put(initialJoinCommand)]; - case 1: - _b.sent(); - FumaroleClient.logger.debug("Sent initial join command: ".concat(JSON.stringify(initialJoinCommand))); - controlPlaneStream = this.stub.subscribe(); - subscribeRequestQueue = new types_1.AsyncQueue(100); - fumeControlPlaneRxQ = new types_1.AsyncQueue(100); - controlPlaneSourceTask = (function () { return __awaiter(_this, void 0, void 0, function () { - var _a, controlPlaneStream_1, controlPlaneStream_1_1, update, e_1_1, error_1; - var _b, e_1, _c, _d; - return __generator(this, function (_e) { - switch (_e.label) { - case 0: - _e.trys.push([0, 14, , 15]); - _e.label = 1; - case 1: - _e.trys.push([1, 7, 8, 13]); - _a = true, controlPlaneStream_1 = __asyncValues(controlPlaneStream); - _e.label = 2; - case 2: return [4 /*yield*/, controlPlaneStream_1.next()]; - case 3: - if (!(controlPlaneStream_1_1 = _e.sent(), _b = controlPlaneStream_1_1.done, !_b)) return [3 /*break*/, 6]; - _d = controlPlaneStream_1_1.value; - _a = false; - update = _d; - return [4 /*yield*/, fumeControlPlaneRxQ.put(update)]; - case 4: - _e.sent(); - _e.label = 5; - case 5: - _a = true; - return [3 /*break*/, 2]; - case 6: return [3 /*break*/, 13]; - case 7: - e_1_1 = _e.sent(); - e_1 = { error: e_1_1 }; - return [3 /*break*/, 13]; - case 8: - _e.trys.push([8, , 11, 12]); - if (!(!_a && !_b && (_c = controlPlaneStream_1.return))) return [3 /*break*/, 10]; - return [4 /*yield*/, _c.call(controlPlaneStream_1)]; - case 9: - _e.sent(); - _e.label = 10; - case 10: return [3 /*break*/, 12]; - case 11: - if (e_1) throw e_1.error; - return [7 /*endfinally*/]; - case 12: return [7 /*endfinally*/]; - case 13: return [3 /*break*/, 15]; - case 14: - error_1 = _e.sent(); - if (error_1.code !== "CANCELLED") { - throw error_1; - } - return [3 /*break*/, 15]; - case 15: return [2 /*return*/]; - } - }); - }); })(); - return [4 /*yield*/, fumeControlPlaneRxQ.get()]; - case 2: - controlResponse = (_b.sent()); - init = controlResponse.init; - if (!init) { - throw new Error("Unexpected initial response: ".concat(JSON.stringify(controlResponse))); - } - FumaroleClient.logger.debug("Control response: ".concat(JSON.stringify(controlResponse))); - lastCommittedOffsetStr = (_a = init.lastCommittedOffsets) === null || _a === void 0 ? void 0 : _a[0]; - if (!lastCommittedOffsetStr) { - throw new Error("No last committed offset"); - } - lastCommittedOffset = BigInt(lastCommittedOffsetStr); - return [4 /*yield*/, this.connector.connect()]; - case 3: - dataPlaneClient = _b.sent(); - runtimeTask = this.startRuntime(subscribeRequestQueue, fumeControlPlaneQ, fumeControlPlaneRxQ, dragonsmouthOutlet, request, consumerGroupName, lastCommittedOffset, finalConfig, dataPlaneClient); - FumaroleClient.logger.debug("Fumarole handle created: ".concat(runtimeTask)); - return [2 /*return*/, { - sink: subscribeRequestQueue, - source: dragonsmouthOutlet, - fumaroleHandle: runtimeTask, - }]; - } - }); - }); - }; - FumaroleClient.prototype.startRuntime = function (subscribeRequestQueue, controlPlaneTxQ, controlPlaneRxQ, dragonsmouthOutlet, request, consumerGroupName, lastCommittedOffset, config, dataPlaneClient) { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - // Implementation of runtime task here - // This would be equivalent to AsyncioFumeDragonsmouthRuntime in Python - // For brevity, this is a placeholder implementation - return [2 /*return*/, Promise.resolve()]; - }); - }); - }; - FumaroleClient.prototype.listConsumerGroups = function () { - return __awaiter(this, void 0, void 0, function () { - var request, metadata; - var _this = this; - return __generator(this, function (_a) { - if (!this.stub) { - throw new Error("gRPC stub not initialized"); - } - if (!this.stub.listConsumerGroups) { - throw new Error("listConsumerGroups method not available on stub"); - } - FumaroleClient.logger.debug("Preparing listConsumerGroups request"); - request = {}; - metadata = new grpc_js_1.Metadata(); - return [2 /*return*/, new Promise(function (resolve, reject) { - var hasResponded = false; - var timeout = setTimeout(function () { - if (!hasResponded) { - FumaroleClient.logger.error("ListConsumerGroups timeout after 30s"); - if (call) { - try { - call.cancel(); - } - catch (e) { - FumaroleClient.logger.error("Error cancelling call:", e); - } - } - reject(new Error("gRPC call timed out after 30 seconds")); - } - }, 30000); // 30 second timeout - var call; - try { - FumaroleClient.logger.debug("Starting gRPC listConsumerGroups call"); - call = _this.stub.listConsumerGroups(request, metadata, { - deadline: Date.now() + 30000, // 30 second deadline - }, function (error, response) { - var _a; - hasResponded = true; - clearTimeout(timeout); - if (error) { - var errorDetails = { - code: error.code, - details: error.details, - metadata: (_a = error.metadata) === null || _a === void 0 ? void 0 : _a.getMap(), - stack: error.stack, - message: error.message, - name: error.name, - }; - FumaroleClient.logger.error("ListConsumerGroups error:", errorDetails); - reject(error); - } - else { - FumaroleClient.logger.debug("ListConsumerGroups success - Response:", JSON.stringify(response, null, 2)); - resolve(response); - } - }); - // Monitor call state - if (call) { - call.on("metadata", function (metadata) { - FumaroleClient.logger.debug("Received metadata:", metadata.getMap()); - }); - call.on("status", function (status) { - FumaroleClient.logger.debug("Call status:", status); - }); - call.on("error", function (error) { - FumaroleClient.logger.error("Call stream error:", error); - if (!hasResponded) { - hasResponded = true; - clearTimeout(timeout); - reject(error); - } - }); - } - else { - FumaroleClient.logger.error("Failed to create gRPC call object"); - hasResponded = true; - clearTimeout(timeout); - reject(new Error("Failed to create gRPC call")); - } - } - catch (setupError) { - hasResponded = true; - clearTimeout(timeout); - FumaroleClient.logger.error("Error setting up gRPC call:", setupError); - reject(setupError); - } - })]; - }); - }); - }; - FumaroleClient.prototype.getConsumerGroupInfo = function (consumerGroupName) { - return __awaiter(this, void 0, void 0, function () { - var request; - var _this = this; - return __generator(this, function (_a) { - FumaroleClient.logger.debug("Sending getConsumerGroupInfo request:", consumerGroupName); - request = { consumerGroupName: consumerGroupName }; - return [2 /*return*/, new Promise(function (resolve, reject) { - _this.stub.getConsumerGroupInfo(request, function (error, response) { - if (error) { - if (error.code === 14) { - // grpc.status.NOT_FOUND - FumaroleClient.logger.debug("Consumer group not found:", consumerGroupName); - resolve(null); - } - else { - FumaroleClient.logger.error("GetConsumerGroupInfo error:", error); - reject(error); - } - } - else { - FumaroleClient.logger.debug("GetConsumerGroupInfo response:", response); - resolve(response); - } - }); - })]; - }); - }); - }; - FumaroleClient.prototype.deleteConsumerGroup = function (consumerGroupName) { - return __awaiter(this, void 0, void 0, function () { - var request; - var _this = this; - return __generator(this, function (_a) { - FumaroleClient.logger.debug("Sending deleteConsumerGroup request:", consumerGroupName); - request = { consumerGroupName: consumerGroupName }; - return [2 /*return*/, new Promise(function (resolve, reject) { - _this.stub.deleteConsumerGroup(request, function (error, response) { - if (error) { - FumaroleClient.logger.error("DeleteConsumerGroup error:", error); - reject(error); - } - else { - FumaroleClient.logger.debug("DeleteConsumerGroup response:", response); - resolve(response); - } - }); - })]; - }); - }); - }; - FumaroleClient.prototype.deleteAllConsumerGroups = function () { - return __awaiter(this, void 0, void 0, function () { - var response, deletePromises, results, failures; - var _this = this; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, this.listConsumerGroups()]; - case 1: - response = _a.sent(); - deletePromises = response.consumerGroups.map(function (group) { - return _this.deleteConsumerGroup(group.consumerGroupName); - }); - return [4 /*yield*/, Promise.all(deletePromises)]; - case 2: - results = _a.sent(); - failures = results.filter(function (result) { return !result.success; }); - if (failures.length > 0) { - throw new Error("Failed to delete some consumer groups: ".concat(JSON.stringify(failures))); - } - return [2 /*return*/]; - } - }); - }); - }; - FumaroleClient.prototype.createConsumerGroup = function (request) { - return __awaiter(this, void 0, void 0, function () { - var _this = this; - return __generator(this, function (_a) { - FumaroleClient.logger.debug("Sending createConsumerGroup request:", request); - return [2 /*return*/, new Promise(function (resolve, reject) { - _this.stub.createConsumerGroup(request, function (error, response) { - if (error) { - FumaroleClient.logger.error("CreateConsumerGroup error:", error); - reject(error); - } - else { - FumaroleClient.logger.debug("CreateConsumerGroup response:", response); - resolve(response); - } - }); - })]; - }); - }); - }; - FumaroleClient.logger = console; - return FumaroleClient; -}()); -exports.FumaroleClient = FumaroleClient; diff --git a/typescript-sdk/src/index.ts b/typescript-sdk/src/index.ts index c7c55af..d751ef3 100644 --- a/typescript-sdk/src/index.ts +++ b/typescript-sdk/src/index.ts @@ -144,7 +144,7 @@ export class FumaroleClient { for await (const update of controlPlaneStream) { await fumeControlPlaneRxQ.put(update); } - } catch (error) { + } catch (error: any) { if (error.code !== "CANCELLED") { throw error; } diff --git a/typescript-sdk/src/runtime/aio.js b/typescript-sdk/src/runtime/aio.js deleted file mode 100644 index 7bced3d..0000000 --- a/typescript-sdk/src/runtime/aio.js +++ /dev/null @@ -1,593 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.GrpcDownloadBlockTaskRun = exports.GrpcSlotDownloader = exports.AsyncioFumeDragonsmouthRuntime = exports.DEFAULT_SLOT_MEMORY_RETENTION = exports.DEFAULT_GC_INTERVAL = void 0; -var grpc_js_1 = require("@grpc/grpc-js"); -var aio_1 = require("../utils/aio"); -// Constants -exports.DEFAULT_GC_INTERVAL = 5; -exports.DEFAULT_SLOT_MEMORY_RETENTION = 10000; -var LOGGER = console; -var AsyncioFumeDragonsmouthRuntime = /** @class */ (function () { - function AsyncioFumeDragonsmouthRuntime(sm, slotDownloader, subscribeRequestUpdateQ, subscribeRequest, consumerGroupName, controlPlaneTxQ, controlPlaneRxQ, dragonsmouthOutlet, commitInterval, gcInterval, maxConcurrentDownload) { - if (maxConcurrentDownload === void 0) { maxConcurrentDownload = 10; } - this.sm = sm; - this.slotDownloader = slotDownloader; - this.subscribeRequestUpdateQ = subscribeRequestUpdateQ; - this.subscribeRequest = subscribeRequest; - this.consumerGroupName = consumerGroupName; - this.controlPlaneTx = controlPlaneTxQ; - this.controlPlaneRx = controlPlaneRxQ; - this.dragonsmouthOutlet = dragonsmouthOutlet; - this.commitInterval = commitInterval; - this.gcInterval = gcInterval; - this.maxConcurrentDownload = maxConcurrentDownload; - this.downloadTasks = new Map(); - this.lastCommit = Date.now(); - } - AsyncioFumeDragonsmouthRuntime.prototype.buildPollHistoryCmd = function (fromOffset) { - return { pollHist: { shardId: 0 } }; - }; - AsyncioFumeDragonsmouthRuntime.prototype.buildCommitOffsetCmd = function (offset) { - return { commitOffset: { offset: offset, shardId: 0 } }; - }; - AsyncioFumeDragonsmouthRuntime.prototype.handleControlResponse = function (controlResponse) { - var _a; - // Get first defined property from controlResponse - var responseField = Object.keys(controlResponse).find(function (key) { return controlResponse[key] !== undefined && key !== "response"; }); - if (!responseField) { - throw new Error("Control response is empty"); - } - switch (responseField) { - case "pollHist": { - var pollHist = controlResponse.pollHist; - LOGGER.debug("Received poll history ".concat((_a = pollHist.events) === null || _a === void 0 ? void 0 : _a.length, " events")); - // Convert string slots to numbers and map commitment levels - var convertedEvents = (pollHist.events || []).map(function (event) { return ({ - offset: event.offset, - slot: Number(event.slot), - parentSlot: event.parentSlot ? Number(event.parentSlot) : undefined, - commitmentLevel: event.commitmentLevel, - deadError: event.deadError, - blockchainId: event.blockchainId, - blockUid: event.blockUid, - numShards: Number(event.numShards), - }); }); - this.sm.queueBlockchainEvent(convertedEvents); - break; - } - case "commitOffset": { - var commitOffset = controlResponse.commitOffset; - LOGGER.debug("Received commit offset: ".concat(commitOffset)); - this.sm.updateCommittedOffset(commitOffset.offset); - break; - } - case "pong": - LOGGER.debug("Received pong"); - break; - default: - throw new Error("Unexpected control response"); - } - }; - AsyncioFumeDragonsmouthRuntime.prototype.pollHistoryIfNeeded = function () { - return __awaiter(this, void 0, void 0, function () { - var cmd; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (!this.sm.needNewBlockchainEvents()) return [3 /*break*/, 2]; - cmd = this.buildPollHistoryCmd(this.sm.committableOffset); - return [4 /*yield*/, this.controlPlaneTx.put(cmd)]; - case 1: - _a.sent(); - _a.label = 2; - case 2: return [2 /*return*/]; - } - }); - }); - }; - AsyncioFumeDragonsmouthRuntime.prototype.commitmentLevel = function () { - return this.subscribeRequest.commitment || 0; - }; - AsyncioFumeDragonsmouthRuntime.prototype.scheduleDownloadTaskIfAny = function () { - return __awaiter(this, void 0, void 0, function () { - var downloadRequest, downloadTaskArgs, downloadPromise; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (!true) return [3 /*break*/, 2]; - LOGGER.debug("Checking for download tasks to schedule"); - if (this.downloadTasks.size >= this.maxConcurrentDownload) { - return [3 /*break*/, 2]; - } - LOGGER.debug("Popping slot to download"); - return [4 /*yield*/, this.sm.popSlotToDownload(this.commitmentLevel())]; - case 1: - downloadRequest = _a.sent(); - if (!downloadRequest) { - LOGGER.debug("No download request available"); - return [3 /*break*/, 2]; - } - LOGGER.debug("Download request for slot ".concat(downloadRequest.slot, " popped")); - if (!downloadRequest.blockchainId) { - throw new Error("Download request must have a blockchain ID"); - } - downloadTaskArgs = { - downloadRequest: downloadRequest, - dragonsmouthOutlet: this.dragonsmouthOutlet, - }; - downloadPromise = this.slotDownloader.runDownload(this.subscribeRequest, downloadTaskArgs); - this.downloadTasks.set(downloadPromise, downloadRequest); - LOGGER.debug("Scheduling download task for slot ".concat(downloadRequest.slot)); - return [3 /*break*/, 0]; - case 2: return [2 /*return*/]; - } - }); - }); - }; - AsyncioFumeDragonsmouthRuntime.prototype.handleDownloadResult = function (downloadResult) { - if (downloadResult.kind === "Ok") { - var completed = downloadResult.completed; - LOGGER.debug("Download completed for slot ".concat(completed.slot, ", shard ").concat(completed.shardIdx, ", ").concat(completed.totalEventDownloaded, " total events")); - this.sm.makeSlotDownloadProgress(completed.slot, completed.shardIdx); - } - else { - var slot = downloadResult.slot; - var err = downloadResult.err; - throw new Error("Failed to download slot ".concat(slot, ": ").concat(err.message)); - } - }; - AsyncioFumeDragonsmouthRuntime.prototype.forceCommitOffset = function () { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - LOGGER.debug("Force committing offset ".concat(this.sm.committableOffset)); - return [4 /*yield*/, this.controlPlaneTx.put(this.buildCommitOffsetCmd(this.sm.committableOffset))]; - case 1: - _a.sent(); - return [2 /*return*/]; - } - }); - }); - }; - AsyncioFumeDragonsmouthRuntime.prototype.commitOffset = function () { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (!(this.sm.lastCommittedOffset < this.sm.committableOffset)) return [3 /*break*/, 2]; - LOGGER.debug("Committing offset ".concat(this.sm.committableOffset)); - return [4 /*yield*/, this.forceCommitOffset()]; - case 1: - _a.sent(); - _a.label = 2; - case 2: - this.lastCommit = Date.now(); - return [2 /*return*/]; - } - }); - }); - }; - AsyncioFumeDragonsmouthRuntime.prototype.drainSlotStatus = function () { - return __awaiter(this, void 0, void 0, function () { - var commitment, slotStatusVec, slotStatus, _i, slotStatusVec_1, slotStatus, matchedFilters, _a, _b, _c, filterName, filter, update, error_1; - return __generator(this, function (_d) { - switch (_d.label) { - case 0: - commitment = this.subscribeRequest.commitment || 0; - slotStatusVec = []; - while (true) { - slotStatus = this.sm.popNextSlotStatus(); - if (!slotStatus) - break; - slotStatusVec.push(slotStatus); - } - if (!slotStatusVec.length) - return [2 /*return*/]; - LOGGER.debug("Draining ".concat(slotStatusVec.length, " slot status")); - _i = 0, slotStatusVec_1 = slotStatusVec; - _d.label = 1; - case 1: - if (!(_i < slotStatusVec_1.length)) return [3 /*break*/, 7]; - slotStatus = slotStatusVec_1[_i]; - matchedFilters = []; - for (_a = 0, _b = Object.entries(this.subscribeRequest.slots || {}); _a < _b.length; _a++) { - _c = _b[_a], filterName = _c[0], filter = _c[1]; - if (filter.filterByCommitment && - slotStatus.commitmentLevel === commitment) { - matchedFilters.push(filterName); - } - else if (!filter.filterByCommitment) { - matchedFilters.push(filterName); - } - } - if (!matchedFilters.length) return [3 /*break*/, 5]; - update = { - filters: matchedFilters, - createdAt: undefined, - slot: { - slot: slotStatus.slot, - parent: slotStatus.parentSlot, - status: slotStatus.commitmentLevel, - deadError: slotStatus.deadError, - }, - }; - _d.label = 2; - case 2: - _d.trys.push([2, 4, , 5]); - return [4 /*yield*/, this.dragonsmouthOutlet.put(update)]; - case 3: - _d.sent(); - return [3 /*break*/, 5]; - case 4: - error_1 = _d.sent(); - if (error_1.message === "Queue full") - return [2 /*return*/]; - throw error_1; - case 5: - this.sm.markEventAsProcessed(slotStatus.sessionSequence); - _d.label = 6; - case 6: - _i++; - return [3 /*break*/, 1]; - case 7: return [2 /*return*/]; - } - }); - }); - }; - AsyncioFumeDragonsmouthRuntime.prototype.handleControlPlaneResp = function (result) { - return __awaiter(this, void 0, void 0, function () { - var errorUpdate; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (!(result instanceof Error)) return [3 /*break*/, 2]; - errorUpdate = { - filters: [], - createdAt: undefined, - slot: { - slot: "0", - parent: "0", - status: 0, // Using 0 as default status for error case - deadError: result.message, - }, - }; - return [4 /*yield*/, this.dragonsmouthOutlet.put(errorUpdate)]; - case 1: - _a.sent(); - LOGGER.error("Control plane error: ".concat(result.message)); - return [2 /*return*/, false]; - case 2: - this.handleControlResponse(result); - return [2 /*return*/, true]; - } - }); - }); - }; - AsyncioFumeDragonsmouthRuntime.prototype.handleNewSubscribeRequest = function (subscribeRequest) { - this.subscribeRequest = subscribeRequest; - }; - AsyncioFumeDragonsmouthRuntime.prototype.run = function () { - return __awaiter(this, void 0, void 0, function () { - var ticks, taskMap, downloadTasks, _i, downloadTasks_1, task, downloadTaskInFlight, promises, done, taskName, _a, result, newTask, newTask, newTask; - return __generator(this, function (_b) { - switch (_b.label) { - case 0: - LOGGER.debug("Fumarole runtime starting..."); - return [4 /*yield*/, this.controlPlaneTx.put(this.buildPollHistoryCmd())]; - case 1: - _b.sent(); - LOGGER.debug("Initial poll history command sent"); - return [4 /*yield*/, this.forceCommitOffset()]; - case 2: - _b.sent(); - LOGGER.debug("Initial commit offset command sent"); - ticks = 0; - taskMap = new Map(); - // Initial tasks - taskMap.set(this.subscribeRequestUpdateQ.get(), "dragonsmouth_bidi"); - taskMap.set(this.controlPlaneRx.get(), "control_plane_rx"); - taskMap.set(new aio_1.Interval(this.commitInterval).tick(), "commit_tick"); - _b.label = 3; - case 3: - if (!(taskMap.size > 0)) return [3 /*break*/, 16]; - ticks++; - LOGGER.debug("Runtime loop tick"); - if (ticks % this.gcInterval === 0) { - LOGGER.debug("Running garbage collection"); - this.sm.gc(); - ticks = 0; - } - LOGGER.debug("Polling history if needed"); - return [4 /*yield*/, this.pollHistoryIfNeeded()]; - case 4: - _b.sent(); - LOGGER.debug("Scheduling download tasks if any"); - return [4 /*yield*/, this.scheduleDownloadTaskIfAny()]; - case 5: - _b.sent(); - downloadTasks = Array.from(this.downloadTasks.keys()); - for (_i = 0, downloadTasks_1 = downloadTasks; _i < downloadTasks_1.length; _i++) { - task = downloadTasks_1[_i]; - taskMap.set(task, "download_task"); - } - downloadTaskInFlight = this.downloadTasks.size; - LOGGER.debug("Current download tasks in flight: ".concat(downloadTaskInFlight, " / ").concat(this.maxConcurrentDownload)); - promises = Array.from(taskMap.keys()); - return [4 /*yield*/, Promise.race(promises.map(function (p) { return p.then(function (result) { return ({ promise: p, result: result }); }); }))]; - case 6: - done = _b.sent(); - taskName = taskMap.get(done.promise); - taskMap.delete(done.promise); - _a = taskName; - switch (_a) { - case "dragonsmouth_bidi": return [3 /*break*/, 7]; - case "control_plane_rx": return [3 /*break*/, 8]; - case "download_task": return [3 /*break*/, 10]; - case "commit_tick": return [3 /*break*/, 11]; - } - return [3 /*break*/, 13]; - case 7: - { - LOGGER.debug("Dragonsmouth subscribe request received"); - result = done.result; - this.handleNewSubscribeRequest(result); - newTask = this.subscribeRequestUpdateQ.get(); - taskMap.set(newTask, "dragonsmouth_bidi"); - return [3 /*break*/, 14]; - } - _b.label = 8; - case 8: - LOGGER.debug("Control plane response received"); - return [4 /*yield*/, this.handleControlPlaneResp(done.result)]; - case 9: - if (!(_b.sent())) { - LOGGER.debug("Control plane error"); - return [2 /*return*/]; - } - newTask = this.controlPlaneRx.get(); - taskMap.set(newTask, "control_plane_rx"); - return [3 /*break*/, 14]; - case 10: - { - LOGGER.debug("Download task result received"); - this.downloadTasks.delete(done.promise); - this.handleDownloadResult(done.result); - return [3 /*break*/, 14]; - } - _b.label = 11; - case 11: - LOGGER.debug("Commit tick reached"); - return [4 /*yield*/, this.commitOffset()]; - case 12: - _b.sent(); - newTask = new aio_1.Interval(this.commitInterval).tick(); - taskMap.set(newTask, "commit_tick"); - return [3 /*break*/, 14]; - case 13: throw new Error("Unexpected task name: ".concat(taskName)); - case 14: return [4 /*yield*/, this.drainSlotStatus()]; - case 15: - _b.sent(); - return [3 /*break*/, 3]; - case 16: - LOGGER.debug("Fumarole runtime exiting"); - return [2 /*return*/]; - } - }); - }); - }; - return AsyncioFumeDragonsmouthRuntime; -}()); -exports.AsyncioFumeDragonsmouthRuntime = AsyncioFumeDragonsmouthRuntime; -var GrpcSlotDownloader = /** @class */ (function () { - function GrpcSlotDownloader(client) { - this.client = client; - } - GrpcSlotDownloader.prototype.runDownload = function (subscribeRequest, spec) { - return __awaiter(this, void 0, void 0, function () { - var downloadTask; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - downloadTask = new GrpcDownloadBlockTaskRun(spec.downloadRequest, this.client, { - accounts: subscribeRequest.accounts, - transactions: subscribeRequest.transactions, - entries: subscribeRequest.entry, - blocksMeta: subscribeRequest.blocksMeta, - }, spec.dragonsmouthOutlet); - LOGGER.debug("Running download task for slot ".concat(spec.downloadRequest.slot)); - return [4 /*yield*/, downloadTask.run()]; - case 1: return [2 /*return*/, _a.sent()]; - } - }); - }); - }; - return GrpcSlotDownloader; -}()); -exports.GrpcSlotDownloader = GrpcSlotDownloader; -var GrpcDownloadBlockTaskRun = /** @class */ (function () { - function GrpcDownloadBlockTaskRun(downloadRequest, client, filters, dragonsmouthOutlet) { - this.downloadRequest = downloadRequest; - this.client = client; - this.filters = filters; - this.dragonsmouthOutlet = dragonsmouthOutlet; - } - GrpcDownloadBlockTaskRun.prototype.mapTonicErrorCodeToDownloadBlockError = function (error) { - switch (error.code) { - case grpc_js_1.status.NOT_FOUND: - return { - kind: "BlockShardNotFound", - message: "Block shard not found", - }; - case grpc_js_1.status.UNAVAILABLE: - return { - kind: "Disconnected", - message: "Disconnected", - }; - case grpc_js_1.status.INTERNAL: - case grpc_js_1.status.ABORTED: - case grpc_js_1.status.DATA_LOSS: - case grpc_js_1.status.RESOURCE_EXHAUSTED: - case grpc_js_1.status.UNKNOWN: - case grpc_js_1.status.CANCELLED: - case grpc_js_1.status.DEADLINE_EXCEEDED: - return { - kind: "FailedDownload", - message: "Failed download", - }; - case grpc_js_1.status.INVALID_ARGUMENT: - throw new Error("Invalid argument"); - default: - return { - kind: "Fatal", - message: "Unknown error: ".concat(error.code), - }; - } - }; - GrpcDownloadBlockTaskRun.prototype.run = function () { - return __awaiter(this, void 0, void 0, function () { - var request, totalEventDownloaded_1, stream_1; - var _this = this; - return __generator(this, function (_a) { - request = { - blockchainId: this.downloadRequest.blockchainId, - blockUid: this.downloadRequest.blockUid, - shardIdx: 0, - blockFilters: this.filters, - }; - try { - LOGGER.debug("Requesting download for block ".concat(Buffer.from(this.downloadRequest.blockUid).toString("hex"), " at slot ").concat(this.downloadRequest.slot)); - totalEventDownloaded_1 = 0; - stream_1 = this.client.downloadBlock(request); - return [2 /*return*/, new Promise(function (resolve, reject) { - stream_1.on("data", function (data) { return __awaiter(_this, void 0, void 0, function () { - var kind, _a, update, error_2; - return __generator(this, function (_b) { - switch (_b.label) { - case 0: - kind = Object.keys(data).find(function (k) { return data[k] !== undefined && k !== "response"; }); - if (!kind) - return [2 /*return*/]; - _a = kind; - switch (_a) { - case "update": return [3 /*break*/, 1]; - case "blockShardDownloadFinish": return [3 /*break*/, 6]; - } - return [3 /*break*/, 7]; - case 1: - update = data.update; - if (!update) - throw new Error("Update is null"); - totalEventDownloaded_1++; - _b.label = 2; - case 2: - _b.trys.push([2, 4, , 5]); - return [4 /*yield*/, this.dragonsmouthOutlet.put(update)]; - case 3: - _b.sent(); - return [3 /*break*/, 5]; - case 4: - error_2 = _b.sent(); - if (error_2.message === "Queue shutdown") { - LOGGER.error("Dragonsmouth outlet is disconnected"); - resolve({ - kind: "Err", - slot: this.downloadRequest.slot, - err: { - kind: "OutletDisconnected", - message: "Outlet disconnected", - }, - }); - } - return [3 /*break*/, 5]; - case 5: return [3 /*break*/, 8]; - case 6: - LOGGER.debug("Download finished for block ".concat(Buffer.from(this.downloadRequest.blockUid).toString("hex"), " at slot ").concat(this.downloadRequest.slot)); - resolve({ - kind: "Ok", - completed: { - slot: this.downloadRequest.slot, - blockUid: this.downloadRequest.blockUid, - shardIdx: 0, - totalEventDownloaded: totalEventDownloaded_1, - }, - }); - return [3 /*break*/, 8]; - case 7: - reject(new Error("Unexpected response kind: ".concat(kind))); - _b.label = 8; - case 8: return [2 /*return*/]; - } - }); - }); }); - stream_1.on("error", function (error) { - LOGGER.error("Download block error: ".concat(error)); - resolve({ - kind: "Err", - slot: _this.downloadRequest.slot, - err: _this.mapTonicErrorCodeToDownloadBlockError(error), - }); - }); - stream_1.on("end", function () { - resolve({ - kind: "Err", - slot: _this.downloadRequest.slot, - err: { - kind: "FailedDownload", - message: "Failed download", - }, - }); - }); - })]; - } - catch (error) { - LOGGER.error("Download block error: ".concat(error)); - return [2 /*return*/, { - kind: "Err", - slot: this.downloadRequest.slot, - err: this.mapTonicErrorCodeToDownloadBlockError(error), - }]; - } - return [2 /*return*/]; - }); - }); - }; - return GrpcDownloadBlockTaskRun; -}()); -exports.GrpcDownloadBlockTaskRun = GrpcDownloadBlockTaskRun; diff --git a/typescript-sdk/src/runtime/aio.ts b/typescript-sdk/src/runtime/aio.ts index a241918..b1fb175 100644 --- a/typescript-sdk/src/runtime/aio.ts +++ b/typescript-sdk/src/runtime/aio.ts @@ -119,7 +119,12 @@ export class AsyncioFumeDragonsmouthRuntime { private handleControlResponse(controlResponse: ControlResponse): void { // Get first defined property from controlResponse const responseField = Object.keys(controlResponse).find( - (key) => controlResponse[key] !== undefined && key !== "response" + (key): key is keyof ControlResponse => { + return ( + controlResponse[key as keyof ControlResponse] !== undefined && + key !== "response" + ); + } ); if (!responseField) { @@ -276,7 +281,7 @@ export class AsyncioFumeDragonsmouthRuntime { try { await this.dragonsmouthOutlet.put(update); - } catch (error) { + } catch (error: any) { if (error.message === "Queue full") return; throw error; } @@ -535,7 +540,7 @@ export class GrpcDownloadBlockTaskRun { totalEventDownloaded++; try { await this.dragonsmouthOutlet.put(update); - } catch (error) { + } catch (error: any) { if (error.message === "Queue shutdown") { LOGGER.error("Dragonsmouth outlet is disconnected"); resolve({ diff --git a/typescript-sdk/src/runtime/queue.js b/typescript-sdk/src/runtime/queue.js deleted file mode 100644 index e35acb5..0000000 --- a/typescript-sdk/src/runtime/queue.js +++ /dev/null @@ -1,125 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Queue = void 0; -var Queue = /** @class */ (function () { - function Queue(maxSize) { - if (maxSize === void 0) { maxSize = Infinity; } - this.items = []; - this.closed = false; - this.maxSize = maxSize; - } - Queue.prototype.put = function (item) { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - if (this.closed) { - throw new Error("Queue shutdown"); - } - if (this.items.length >= this.maxSize) { - throw new Error("Queue full"); - } - this.items.push(item); - return [2 /*return*/]; - }); - }); - }; - Queue.prototype.get = function () { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (this.closed && this.items.length === 0) { - throw new Error("Queue shutdown"); - } - _a.label = 1; - case 1: - if (!(this.items.length === 0)) return [3 /*break*/, 3]; - return [4 /*yield*/, new Promise(function (resolve) { return setTimeout(resolve, 10); })]; - case 2: - _a.sent(); - return [3 /*break*/, 1]; - case 3: return [2 /*return*/, this.items.shift()]; - } - }); - }); - }; - Queue.prototype.isEmpty = function () { - return this.items.length === 0; - }; - Queue.prototype.isFull = function () { - return this.items.length >= this.maxSize; - }; - Queue.prototype.size = function () { - return this.items.length; - }; - Queue.prototype.close = function () { - this.closed = true; - }; - Queue.prototype[Symbol.asyncIterator] = function () { - var _this = this; - return { - next: function () { return __awaiter(_this, void 0, void 0, function () { - var value, error_1; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (this.closed && this.isEmpty()) { - return [2 /*return*/, { done: true, value: undefined }]; - } - _a.label = 1; - case 1: - _a.trys.push([1, 3, , 4]); - return [4 /*yield*/, this.get()]; - case 2: - value = _a.sent(); - return [2 /*return*/, { done: false, value: value }]; - case 3: - error_1 = _a.sent(); - if (error_1.message === "Queue shutdown") { - return [2 /*return*/, { done: true, value: undefined }]; - } - throw error_1; - case 4: return [2 /*return*/]; - } - }); - }); }, - }; - }; - return Queue; -}()); -exports.Queue = Queue; diff --git a/typescript-sdk/src/runtime/queue.ts b/typescript-sdk/src/runtime/queue.ts index 48fc2f9..1d7cc8f 100644 --- a/typescript-sdk/src/runtime/queue.ts +++ b/typescript-sdk/src/runtime/queue.ts @@ -58,7 +58,7 @@ export class Queue { try { const value = await this.get(); return { done: false, value }; - } catch (error) { + } catch (error: any) { if (error.message === "Queue shutdown") { return { done: true, value: undefined }; } diff --git a/typescript-sdk/src/runtime/state-machine.js b/typescript-sdk/src/runtime/state-machine.js deleted file mode 100644 index 22be87d..0000000 --- a/typescript-sdk/src/runtime/state-machine.js +++ /dev/null @@ -1,369 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FumaroleSM = exports.SlotDownloadState = exports.SlotDownloadProgress = exports.SlotCommitmentProgression = exports.FumeSlotStatus = exports.FumeDownloadRequest = exports.CommitmentLevel = exports.DEFAULT_SLOT_MEMORY_RETENTION = void 0; -var queue_1 = require("./queue"); -// Constants -exports.DEFAULT_SLOT_MEMORY_RETENTION = 10000; -// Solana commitment levels -var CommitmentLevel; -(function (CommitmentLevel) { - CommitmentLevel[CommitmentLevel["PROCESSED"] = 0] = "PROCESSED"; - CommitmentLevel[CommitmentLevel["CONFIRMED"] = 1] = "CONFIRMED"; - CommitmentLevel[CommitmentLevel["FINALIZED"] = 2] = "FINALIZED"; -})(CommitmentLevel || (exports.CommitmentLevel = CommitmentLevel = {})); -// Data structures -var FumeDownloadRequest = /** @class */ (function () { - function FumeDownloadRequest(slot, blockchainId, blockUid, numShards, commitmentLevel) { - this.slot = slot; - this.blockchainId = blockchainId; - this.blockUid = blockUid; - this.numShards = numShards; - this.commitmentLevel = commitmentLevel; - } - return FumeDownloadRequest; -}()); -exports.FumeDownloadRequest = FumeDownloadRequest; -var FumeSlotStatus = /** @class */ (function () { - function FumeSlotStatus(sessionSequence, offset, slot, parentSlot, commitmentLevel, deadError) { - this.sessionSequence = sessionSequence; - this.offset = offset; - this.slot = slot; - this.parentSlot = parentSlot; - this.commitmentLevel = commitmentLevel; - this.deadError = deadError; - } - return FumeSlotStatus; -}()); -exports.FumeSlotStatus = FumeSlotStatus; -var SlotCommitmentProgression = /** @class */ (function () { - function SlotCommitmentProgression() { - this.processedCommitmentLevels = new Set(); - } - SlotCommitmentProgression.prototype.hasProcessedCommitment = function (level) { - return this.processedCommitmentLevels.has(level); - }; - SlotCommitmentProgression.prototype.addProcessedCommitment = function (level) { - this.processedCommitmentLevels.add(level); - }; - return SlotCommitmentProgression; -}()); -exports.SlotCommitmentProgression = SlotCommitmentProgression; -var SlotDownloadProgress = /** @class */ (function () { - function SlotDownloadProgress(numShards) { - this.numShards = numShards; - this.shardRemaining = new Array(numShards).fill(false); - } - SlotDownloadProgress.prototype.doProgress = function (shardIdx) { - this.shardRemaining[shardIdx % this.numShards] = true; - return this.shardRemaining.every(function (x) { return x; }) - ? SlotDownloadState.Done - : SlotDownloadState.Downloading; - }; - return SlotDownloadProgress; -}()); -exports.SlotDownloadProgress = SlotDownloadProgress; -var SlotDownloadState; -(function (SlotDownloadState) { - SlotDownloadState["Downloading"] = "Downloading"; - SlotDownloadState["Done"] = "Done"; -})(SlotDownloadState || (exports.SlotDownloadState = SlotDownloadState = {})); -var FumaroleSM = /** @class */ (function () { - function FumaroleSM(lastCommittedOffset, slotMemoryRetention) { - this.slotMemoryRetention = slotMemoryRetention; - this.slotCommitmentProgression = new Map(); - this.downloadedSlot = new Set(); - this.inflightSlotShardDownload = new Map(); - this.blockedSlotStatusUpdate = new Map(); - this.slotStatusUpdateQueue = new queue_1.Queue(); - this.processedOffset = []; // Min-heap for (sequence, offset) - this.maxSlotDetected = 0; - this.unprocessedBlockchainEvent = new queue_1.Queue(); - this.sequence = 1; - this.lastProcessedFumeSequence = 0; - this.sequenceToOffset = new Map(); - this._lastCommittedOffset = lastCommittedOffset; - this._committableOffset = lastCommittedOffset; - } - Object.defineProperty(FumaroleSM.prototype, "lastCommittedOffset", { - get: function () { - return this._lastCommittedOffset; - }, - enumerable: false, - configurable: true - }); - Object.defineProperty(FumaroleSM.prototype, "committableOffset", { - get: function () { - return this._committableOffset; - }, - enumerable: false, - configurable: true - }); - FumaroleSM.prototype.updateCommittedOffset = function (offset) { - if (BigInt(offset) < BigInt(this._lastCommittedOffset)) { - throw new Error("Offset must be >= last committed offset"); - } - this._lastCommittedOffset = offset; - }; - FumaroleSM.prototype.nextSequence = function () { - var ret = this.sequence; - this.sequence += 1; - return ret; - }; - FumaroleSM.prototype.gc = function () { - while (this.downloadedSlot.size > this.slotMemoryRetention) { - // Get the first slot (oldest) from the set - var slot = this.downloadedSlot.values().next().value; - if (!slot) - break; - this.downloadedSlot.delete(slot); - this.slotCommitmentProgression.delete(slot); - this.inflightSlotShardDownload.delete(slot); - this.blockedSlotStatusUpdate.delete(slot); - } - }; - FumaroleSM.prototype.queueBlockchainEvent = function (events) { - return __awaiter(this, void 0, void 0, function () { - var _i, events_1, event_1, sequence, fumeStatus, blockedQueue; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - _i = 0, events_1 = events; - _a.label = 1; - case 1: - if (!(_i < events_1.length)) return [3 /*break*/, 9]; - event_1 = events_1[_i]; - if (BigInt(event_1.offset) < BigInt(this._lastCommittedOffset)) { - return [3 /*break*/, 8]; - } - if (event_1.slot > this.maxSlotDetected) { - this.maxSlotDetected = event_1.slot; - } - sequence = this.nextSequence(); - this.sequenceToOffset.set(sequence, event_1.offset); - if (!this.downloadedSlot.has(event_1.slot)) return [3 /*break*/, 6]; - fumeStatus = new FumeSlotStatus(sequence, event_1.offset, event_1.slot, event_1.parentSlot, event_1.commitmentLevel, event_1.deadError); - if (!this.inflightSlotShardDownload.has(event_1.slot)) return [3 /*break*/, 3]; - blockedQueue = this.blockedSlotStatusUpdate.get(event_1.slot); - if (!blockedQueue) { - blockedQueue = new queue_1.Queue(); - this.blockedSlotStatusUpdate.set(event_1.slot, blockedQueue); - } - return [4 /*yield*/, blockedQueue.put(fumeStatus)]; - case 2: - _a.sent(); - return [3 /*break*/, 5]; - case 3: return [4 /*yield*/, this.slotStatusUpdateQueue.put(fumeStatus)]; - case 4: - _a.sent(); - _a.label = 5; - case 5: return [3 /*break*/, 8]; - case 6: return [4 /*yield*/, this.unprocessedBlockchainEvent.put([sequence, event_1])]; - case 7: - _a.sent(); - _a.label = 8; - case 8: - _i++; - return [3 /*break*/, 1]; - case 9: return [2 /*return*/]; - } - }); - }); - }; - FumaroleSM.prototype.makeSlotDownloadProgress = function (slot, shardIdx) { - return __awaiter(this, void 0, void 0, function () { - var downloadProgress, downloadState, blockedStatuses, status_1; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - downloadProgress = this.inflightSlotShardDownload.get(slot); - if (!downloadProgress) { - throw new Error("Slot not in download"); - } - downloadState = downloadProgress.doProgress(shardIdx); - if (!(downloadState === SlotDownloadState.Done)) return [3 /*break*/, 6]; - this.inflightSlotShardDownload.delete(slot); - this.downloadedSlot.add(slot); - if (!this.slotCommitmentProgression.has(slot)) { - this.slotCommitmentProgression.set(slot, new SlotCommitmentProgression()); - } - blockedStatuses = this.blockedSlotStatusUpdate.get(slot); - if (!blockedStatuses) return [3 /*break*/, 6]; - _a.label = 1; - case 1: - if (!!blockedStatuses.isEmpty()) return [3 /*break*/, 5]; - return [4 /*yield*/, blockedStatuses.get()]; - case 2: - status_1 = _a.sent(); - if (!status_1) return [3 /*break*/, 4]; - return [4 /*yield*/, this.slotStatusUpdateQueue.put(status_1)]; - case 3: - _a.sent(); - _a.label = 4; - case 4: return [3 /*break*/, 1]; - case 5: - this.blockedSlotStatusUpdate.delete(slot); - _a.label = 6; - case 6: return [2 /*return*/, downloadState]; - } - }); - }); - }; - FumaroleSM.prototype.popNextSlotStatus = function () { - return __awaiter(this, void 0, void 0, function () { - var slotStatus, commitmentHistory; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (!!this.slotStatusUpdateQueue.isEmpty()) return [3 /*break*/, 2]; - return [4 /*yield*/, this.slotStatusUpdateQueue.get()]; - case 1: - slotStatus = _a.sent(); - if (!slotStatus) - return [3 /*break*/, 0]; - commitmentHistory = this.slotCommitmentProgression.get(slotStatus.slot); - if (commitmentHistory && - !commitmentHistory.hasProcessedCommitment(slotStatus.commitmentLevel)) { - commitmentHistory.addProcessedCommitment(slotStatus.commitmentLevel); - return [2 /*return*/, slotStatus]; - } - else if (!commitmentHistory) { - throw new Error("Slot status should not be available here"); - } - return [3 /*break*/, 0]; - case 2: return [2 /*return*/, null]; - } - }); - }); - }; - FumaroleSM.prototype.makeSureSlotCommitmentProgressionExists = function (slot) { - var progression = this.slotCommitmentProgression.get(slot); - if (!progression) { - progression = new SlotCommitmentProgression(); - this.slotCommitmentProgression.set(slot, progression); - } - return progression; - }; - FumaroleSM.prototype.popSlotToDownload = function () { - return __awaiter(this, arguments, void 0, function (commitment) { - var eventPair, sessionSequence, blockchainEvent, eventCl, progression, blockchainId, blockUid, downloadRequest, downloadProgress, blockedQueue; - if (commitment === void 0) { commitment = CommitmentLevel.PROCESSED; } - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - if (!!this.unprocessedBlockchainEvent.isEmpty()) return [3 /*break*/, 8]; - return [4 /*yield*/, this.unprocessedBlockchainEvent.get()]; - case 1: - eventPair = _a.sent(); - if (!eventPair) - return [3 /*break*/, 0]; - sessionSequence = eventPair[0], blockchainEvent = eventPair[1]; - eventCl = blockchainEvent.commitmentLevel; - if (!(eventCl < commitment)) return [3 /*break*/, 3]; - return [4 /*yield*/, this.slotStatusUpdateQueue.put(new FumeSlotStatus(sessionSequence, blockchainEvent.offset, blockchainEvent.slot, blockchainEvent.parentSlot, eventCl, blockchainEvent.deadError))]; - case 2: - _a.sent(); - this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); - return [3 /*break*/, 0]; - case 3: - if (!this.downloadedSlot.has(blockchainEvent.slot)) return [3 /*break*/, 5]; - this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); - progression = this.slotCommitmentProgression.get(blockchainEvent.slot); - if (progression && progression.hasProcessedCommitment(eventCl)) { - this.markEventAsProcessed(sessionSequence); - return [3 /*break*/, 0]; - } - return [4 /*yield*/, this.slotStatusUpdateQueue.put(new FumeSlotStatus(sessionSequence, blockchainEvent.offset, blockchainEvent.slot, blockchainEvent.parentSlot, eventCl, blockchainEvent.deadError))]; - case 4: - _a.sent(); - return [3 /*break*/, 7]; - case 5: - blockchainId = new Uint8Array(blockchainEvent.blockchainId); - blockUid = new Uint8Array(blockchainEvent.blockUid); - if (!!this.inflightSlotShardDownload.has(blockchainEvent.slot)) return [3 /*break*/, 7]; - downloadRequest = new FumeDownloadRequest(blockchainEvent.slot, blockchainId, blockUid, blockchainEvent.numShards, eventCl); - downloadProgress = new SlotDownloadProgress(blockchainEvent.numShards); - this.inflightSlotShardDownload.set(blockchainEvent.slot, downloadProgress); - blockedQueue = this.blockedSlotStatusUpdate.get(blockchainEvent.slot); - if (!blockedQueue) { - blockedQueue = new queue_1.Queue(); - this.blockedSlotStatusUpdate.set(blockchainEvent.slot, blockedQueue); - } - return [4 /*yield*/, blockedQueue.put(new FumeSlotStatus(sessionSequence, blockchainEvent.offset, blockchainEvent.slot, blockchainEvent.parentSlot, eventCl, blockchainEvent.deadError))]; - case 6: - _a.sent(); - return [2 /*return*/, downloadRequest]; - case 7: return [3 /*break*/, 0]; - case 8: return [2 /*return*/, null]; - } - }); - }); - }; - FumaroleSM.prototype.markEventAsProcessed = function (eventSeqNumber) { - var fumeOffset = this.sequenceToOffset.get(eventSeqNumber); - if (!fumeOffset) { - throw new Error("Event sequence number not found"); - } - this.sequenceToOffset.delete(eventSeqNumber); - // Use negative values for the min-heap (to simulate max-heap behavior) - this.processedOffset.push([-eventSeqNumber, fumeOffset]); - this.processedOffset.sort(function (a, b) { return a[0] - b[0]; }); // Keep sorted as a min-heap - while (this.processedOffset.length > 0) { - var _a = this.processedOffset[0], seq = _a[0], offset = _a[1]; - var positiveSeq = -seq; // Convert back to positive - if (positiveSeq !== this.lastProcessedFumeSequence + 1) { - break; - } - this.processedOffset.shift(); - this._committableOffset = offset; - this.lastProcessedFumeSequence = positiveSeq; - } - }; - FumaroleSM.prototype.slotStatusUpdateQueueLen = function () { - return this.slotStatusUpdateQueue.size(); - }; - FumaroleSM.prototype.processedOffsetQueueLen = function () { - return this.processedOffset.length; - }; - FumaroleSM.prototype.needNewBlockchainEvents = function () { - return (this.slotStatusUpdateQueue.isEmpty() && - this.blockedSlotStatusUpdate.size === 0); - }; - return FumaroleSM; -}()); -exports.FumaroleSM = FumaroleSM; diff --git a/typescript-sdk/src/types.js b/typescript-sdk/src/types.js deleted file mode 100644 index 43a657a..0000000 --- a/typescript-sdk/src/types.js +++ /dev/null @@ -1,111 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.AsyncQueue = exports.DEFAULT_SLOT_MEMORY_RETENTION = exports.DEFAULT_GC_INTERVAL = exports.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = exports.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = exports.DEFAULT_COMMIT_INTERVAL = exports.DEFAULT_DRAGONSMOUTH_CAPACITY = void 0; -// Constants -exports.DEFAULT_DRAGONSMOUTH_CAPACITY = 10000; -exports.DEFAULT_COMMIT_INTERVAL = 5.0; // seconds -exports.DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = 3; -exports.DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = 10; -exports.DEFAULT_GC_INTERVAL = 60; // seconds -exports.DEFAULT_SLOT_MEMORY_RETENTION = 300; // seconds -// Generic async queue interface to mimic Python's asyncio.Queue -var AsyncQueue = /** @class */ (function () { - function AsyncQueue(maxSize) { - if (maxSize === void 0) { maxSize = 0; } - this.queue = []; - this.resolvers = []; - this.full_resolvers = []; - this.closed = false; - this.maxSize = maxSize; - } - AsyncQueue.prototype.put = function (item) { - return __awaiter(this, void 0, void 0, function () { - var resolver; - var _this = this; - return __generator(this, function (_a) { - if (this.closed) { - throw new Error("Queue is closed"); - } - if (this.maxSize > 0 && this.queue.length >= this.maxSize) { - return [2 /*return*/, new Promise(function (resolve) { - _this.full_resolvers.push(resolve); - })]; - } - this.queue.push(item); - resolver = this.resolvers.shift(); - if (resolver) { - resolver(this.queue.shift()); - } - return [2 /*return*/]; - }); - }); - }; - AsyncQueue.prototype.get = function () { - return __awaiter(this, void 0, void 0, function () { - var item, full_resolver; - var _this = this; - return __generator(this, function (_a) { - if (this.closed && this.queue.length === 0) { - throw new Error("Queue is closed"); - } - if (this.queue.length === 0) { - return [2 /*return*/, new Promise(function (resolve) { - _this.resolvers.push(resolve); - })]; - } - item = this.queue.shift(); - full_resolver = this.full_resolvers.shift(); - if (full_resolver) { - full_resolver(); - } - return [2 /*return*/, item]; - }); - }); - }; - AsyncQueue.prototype.close = function () { - this.closed = true; - // Resolve all pending gets with an error - this.resolvers.forEach(function (resolve) { - resolve(undefined); - }); - this.resolvers = []; - }; - return AsyncQueue; -}()); -exports.AsyncQueue = AsyncQueue; diff --git a/typescript-sdk/src/utils/aio.js b/typescript-sdk/src/utils/aio.js deleted file mode 100644 index bf9d66a..0000000 --- a/typescript-sdk/src/utils/aio.js +++ /dev/null @@ -1,126 +0,0 @@ -"use strict"; -/** - * Asynchronous utilities for TypeScript - */ -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.logger = exports.Interval = void 0; -exports.never = never; -/** - * Create a forever pending promise. This promise is not resolved and will never be resolved. - * This is useful for testing purposes. - * @returns A promise that never resolves - */ -function never() { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - return [2 /*return*/, new Promise(function () { - // This promise intentionally never resolves - })]; - }); - }); -} -/** - * A class that represents an interval that can be used to run async operations periodically - */ -var Interval = /** @class */ (function () { - /** - * Create an interval that will run every `interval` seconds. - * @param interval The interval in seconds - */ - function Interval(interval) { - this.interval = interval; - } - /** - * Wait for the interval duration - * @returns A promise that resolves after the interval duration - */ - Interval.prototype.tick = function () { - return __awaiter(this, void 0, void 0, function () { - var _this = this; - return __generator(this, function (_a) { - // Convert seconds to milliseconds for setTimeout - return [2 /*return*/, new Promise(function (resolve) { return setTimeout(resolve, _this.interval * 1000); })]; - }); - }); - }; - return Interval; -}()); -exports.Interval = Interval; -/** - * Helper functions and utilities for logging - */ -exports.logger = { - debug: function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return console.debug.apply(console, __spreadArray(["[DEBUG]"], args, false)); - }, - info: function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return console.info.apply(console, __spreadArray(["[INFO]"], args, false)); - }, - warn: function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return console.warn.apply(console, __spreadArray(["[WARN]"], args, false)); - }, - error: function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return console.error.apply(console, __spreadArray(["[ERROR]"], args, false)); - }, -}; diff --git a/typescript-sdk/tsconfig.cjs.json b/typescript-sdk/tsconfig.cjs.json index e69de29..8ab4792 100644 --- a/typescript-sdk/tsconfig.cjs.json +++ b/typescript-sdk/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "CommonJS", + "outDir": "./dist/cjs" + } +} diff --git a/typescript-sdk/tsconfig.esm.json b/typescript-sdk/tsconfig.esm.json index e69de29..c571fac 100644 --- a/typescript-sdk/tsconfig.esm.json +++ b/typescript-sdk/tsconfig.esm.json @@ -0,0 +1,6 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./dist/esm" + } +} diff --git a/typescript-sdk/tsconfig.json b/typescript-sdk/tsconfig.json index e69de29..489f30c 100644 --- a/typescript-sdk/tsconfig.json +++ b/typescript-sdk/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "ES2020", + "lib": ["ES2020"], + "declaration": true, + "declarationDir": "./dist/types", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "resolveJsonModule": true, + "isolatedModules": true, + "outDir": "./dist/esm", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} From 3909dbc446a06d5a519392099ad6b46da6d6da79 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Sat, 16 Aug 2025 07:51:43 +0000 Subject: [PATCH 46/56] refactor: handle uint64 and bigint Signed-off-by: GitHub --- typescript-sdk/package.json | 2 +- typescript-sdk/src/grpc/connectivity.ts | 0 typescript-sdk/src/grpc/fumarole.ts | 173 +++-- typescript-sdk/src/grpc/geyser.ts | 693 ++++++++++-------- .../src/grpc/google/protobuf/timestamp.ts | 21 +- typescript-sdk/src/grpc/solana-storage.ts | 189 +++-- typescript-sdk/src/runtime/state-machine.ts | 47 +- 7 files changed, 645 insertions(+), 480 deletions(-) delete mode 100644 typescript-sdk/src/grpc/connectivity.ts diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index d2ebe8c..ee1de75 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -12,7 +12,7 @@ "clean": "rm -rf dist", "prebuild": "npm run clean", "build": "npm run grpc-generate && tsc --project tsconfig.esm.json && tsc --project tsconfig.cjs.json && node add-js-extensions.mjs", - "grpc-generate": "mkdir -p src/grpc && protoc -I../yellowstone-grpc/yellowstone-grpc-proto/proto -I../proto --plugin=node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=forceLong=string --ts_proto_opt=outputServices=grpc-js --experimental_allow_proto3_optional --ts_proto_out=src/grpc fumarole.proto --ts_proto_opt=esModuleInterop=true" + "grpc-generate": "mkdir -p src/grpc && protoc -I../yellowstone-grpc/yellowstone-grpc-proto/proto -I../proto --plugin=node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=forceLong=bigint --ts_proto_opt=outputServices=grpc-js --experimental_allow_proto3_optional --ts_proto_out=src/grpc fumarole.proto --ts_proto_opt=esModuleInterop=true" }, "repository": { "type": "git", diff --git a/typescript-sdk/src/grpc/connectivity.ts b/typescript-sdk/src/grpc/connectivity.ts deleted file mode 100644 index e69de29..0000000 diff --git a/typescript-sdk/src/grpc/fumarole.ts b/typescript-sdk/src/grpc/fumarole.ts index da5e989..4c941d5 100644 --- a/typescript-sdk/src/grpc/fumarole.ts +++ b/typescript-sdk/src/grpc/fumarole.ts @@ -69,12 +69,12 @@ export interface GetChainTipRequest { export interface GetChainTipResponse { blockchainId: Uint8Array; - shardToMaxOffsetMap: { [key: number]: string }; + shardToMaxOffsetMap: { [key: number]: bigint }; } export interface GetChainTipResponse_ShardToMaxOffsetMapEntry { key: number; - value: string; + value: bigint; } export interface VersionRequest { @@ -180,23 +180,23 @@ export interface DataResponse { } export interface CommitOffset { - offset: string; + offset: bigint; shardId: number; } export interface PollBlockchainHistory { shardId: number; - from?: string | undefined; - limit?: string | undefined; + from?: bigint | undefined; + limit?: bigint | undefined; } export interface BlockchainEvent { - offset: string; + offset: bigint; blockchainId: Uint8Array; blockUid: Uint8Array; numShards: number; - slot: string; - parentSlot?: string | undefined; + slot: bigint; + parentSlot?: bigint | undefined; commitmentLevel: CommitmentLevel; blockchainShardId: number; deadError?: string | undefined; @@ -225,18 +225,18 @@ export interface ControlResponse { } export interface CommitOffsetResult { - offset: string; + offset: bigint; shardId: number; } export interface InitialConsumerGroupState { blockchainId: Uint8Array; - lastCommittedOffsets: { [key: number]: string }; + lastCommittedOffsets: { [key: number]: bigint }; } export interface InitialConsumerGroupState_LastCommittedOffsetsEntry { key: number; - value: string; + value: bigint; } export interface CreateConsumerGroupResponse { @@ -361,8 +361,8 @@ export const GetChainTipResponse: MessageFns = { return { blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), shardToMaxOffsetMap: isObject(object.shardToMaxOffsetMap) - ? Object.entries(object.shardToMaxOffsetMap).reduce<{ [key: number]: string }>((acc, [key, value]) => { - acc[globalThis.Number(key)] = String(value); + ? Object.entries(object.shardToMaxOffsetMap).reduce<{ [key: number]: bigint }>((acc, [key, value]) => { + acc[globalThis.Number(key)] = BigInt(value as string | number | bigint | boolean); return acc; }, {}) : {}, @@ -379,7 +379,7 @@ export const GetChainTipResponse: MessageFns = { if (entries.length > 0) { obj.shardToMaxOffsetMap = {}; entries.forEach(([k, v]) => { - obj.shardToMaxOffsetMap[k] = v; + obj.shardToMaxOffsetMap[k] = v.toString(); }); } } @@ -392,10 +392,10 @@ export const GetChainTipResponse: MessageFns = { fromPartial, I>>(object: I): GetChainTipResponse { const message = createBaseGetChainTipResponse(); message.blockchainId = object.blockchainId ?? new Uint8Array(0); - message.shardToMaxOffsetMap = Object.entries(object.shardToMaxOffsetMap ?? {}).reduce<{ [key: number]: string }>( + message.shardToMaxOffsetMap = Object.entries(object.shardToMaxOffsetMap ?? {}).reduce<{ [key: number]: bigint }>( (acc, [key, value]) => { if (value !== undefined) { - acc[globalThis.Number(key)] = globalThis.String(value); + acc[globalThis.Number(key)] = BigInt(value as string | number | bigint | boolean); } return acc; }, @@ -406,7 +406,7 @@ export const GetChainTipResponse: MessageFns = { }; function createBaseGetChainTipResponse_ShardToMaxOffsetMapEntry(): GetChainTipResponse_ShardToMaxOffsetMapEntry { - return { key: 0, value: "0" }; + return { key: 0, value: 0n }; } export const GetChainTipResponse_ShardToMaxOffsetMapEntry: MessageFns = { @@ -417,7 +417,10 @@ export const GetChainTipResponse_ShardToMaxOffsetMapEntry: MessageFns = { }; function createBaseCommitOffset(): CommitOffset { - return { offset: "0", shardId: 0 }; + return { offset: 0n, shardId: 0 }; } export const CommitOffset: MessageFns = { encode(message: CommitOffset, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.offset !== "0") { + if (message.offset !== 0n) { + if (BigInt.asIntN(64, message.offset) !== message.offset) { + throw new globalThis.Error("value provided for field message.offset of type int64 too large"); + } writer.uint32(8).int64(message.offset); } if (message.shardId !== 0) { @@ -2164,7 +2170,7 @@ export const CommitOffset: MessageFns = { break; } - message.offset = reader.int64().toString(); + message.offset = reader.int64() as bigint; continue; } case 2: { @@ -2186,15 +2192,15 @@ export const CommitOffset: MessageFns = { fromJSON(object: any): CommitOffset { return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + offset: isSet(object.offset) ? BigInt(object.offset) : 0n, shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, }; }, toJSON(message: CommitOffset): unknown { const obj: any = {}; - if (message.offset !== "0") { - obj.offset = message.offset; + if (message.offset !== 0n) { + obj.offset = message.offset.toString(); } if (message.shardId !== 0) { obj.shardId = Math.round(message.shardId); @@ -2207,7 +2213,7 @@ export const CommitOffset: MessageFns = { }, fromPartial, I>>(object: I): CommitOffset { const message = createBaseCommitOffset(); - message.offset = object.offset ?? "0"; + message.offset = object.offset ?? 0n; message.shardId = object.shardId ?? 0; return message; }, @@ -2223,9 +2229,15 @@ export const PollBlockchainHistory: MessageFns = { writer.uint32(8).int32(message.shardId); } if (message.from !== undefined) { + if (BigInt.asIntN(64, message.from) !== message.from) { + throw new globalThis.Error("value provided for field message.from of type int64 too large"); + } writer.uint32(16).int64(message.from); } if (message.limit !== undefined) { + if (BigInt.asIntN(64, message.limit) !== message.limit) { + throw new globalThis.Error("value provided for field message.limit of type int64 too large"); + } writer.uint32(24).int64(message.limit); } return writer; @@ -2251,7 +2263,7 @@ export const PollBlockchainHistory: MessageFns = { break; } - message.from = reader.int64().toString(); + message.from = reader.int64() as bigint; continue; } case 3: { @@ -2259,7 +2271,7 @@ export const PollBlockchainHistory: MessageFns = { break; } - message.limit = reader.int64().toString(); + message.limit = reader.int64() as bigint; continue; } } @@ -2274,8 +2286,8 @@ export const PollBlockchainHistory: MessageFns = { fromJSON(object: any): PollBlockchainHistory { return { shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, - from: isSet(object.from) ? globalThis.String(object.from) : undefined, - limit: isSet(object.limit) ? globalThis.String(object.limit) : undefined, + from: isSet(object.from) ? BigInt(object.from) : undefined, + limit: isSet(object.limit) ? BigInt(object.limit) : undefined, }; }, @@ -2285,10 +2297,10 @@ export const PollBlockchainHistory: MessageFns = { obj.shardId = Math.round(message.shardId); } if (message.from !== undefined) { - obj.from = message.from; + obj.from = message.from.toString(); } if (message.limit !== undefined) { - obj.limit = message.limit; + obj.limit = message.limit.toString(); } return obj; }, @@ -2307,11 +2319,11 @@ export const PollBlockchainHistory: MessageFns = { function createBaseBlockchainEvent(): BlockchainEvent { return { - offset: "0", + offset: 0n, blockchainId: new Uint8Array(0), blockUid: new Uint8Array(0), numShards: 0, - slot: "0", + slot: 0n, parentSlot: undefined, commitmentLevel: 0, blockchainShardId: 0, @@ -2321,7 +2333,10 @@ function createBaseBlockchainEvent(): BlockchainEvent { export const BlockchainEvent: MessageFns = { encode(message: BlockchainEvent, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.offset !== "0") { + if (message.offset !== 0n) { + if (BigInt.asIntN(64, message.offset) !== message.offset) { + throw new globalThis.Error("value provided for field message.offset of type int64 too large"); + } writer.uint32(8).int64(message.offset); } if (message.blockchainId.length !== 0) { @@ -2333,10 +2348,16 @@ export const BlockchainEvent: MessageFns = { if (message.numShards !== 0) { writer.uint32(32).uint32(message.numShards); } - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(40).uint64(message.slot); } if (message.parentSlot !== undefined) { + if (BigInt.asUintN(64, message.parentSlot) !== message.parentSlot) { + throw new globalThis.Error("value provided for field message.parentSlot of type uint64 too large"); + } writer.uint32(48).uint64(message.parentSlot); } if (message.commitmentLevel !== 0) { @@ -2363,7 +2384,7 @@ export const BlockchainEvent: MessageFns = { break; } - message.offset = reader.int64().toString(); + message.offset = reader.int64() as bigint; continue; } case 2: { @@ -2395,7 +2416,7 @@ export const BlockchainEvent: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 6: { @@ -2403,7 +2424,7 @@ export const BlockchainEvent: MessageFns = { break; } - message.parentSlot = reader.uint64().toString(); + message.parentSlot = reader.uint64() as bigint; continue; } case 7: { @@ -2441,12 +2462,12 @@ export const BlockchainEvent: MessageFns = { fromJSON(object: any): BlockchainEvent { return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + offset: isSet(object.offset) ? BigInt(object.offset) : 0n, blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), blockUid: isSet(object.blockUid) ? bytesFromBase64(object.blockUid) : new Uint8Array(0), numShards: isSet(object.numShards) ? globalThis.Number(object.numShards) : 0, - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : undefined, + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, + parentSlot: isSet(object.parentSlot) ? BigInt(object.parentSlot) : undefined, commitmentLevel: isSet(object.commitmentLevel) ? commitmentLevelFromJSON(object.commitmentLevel) : 0, blockchainShardId: isSet(object.blockchainShardId) ? globalThis.Number(object.blockchainShardId) : 0, deadError: isSet(object.deadError) ? globalThis.String(object.deadError) : undefined, @@ -2455,8 +2476,8 @@ export const BlockchainEvent: MessageFns = { toJSON(message: BlockchainEvent): unknown { const obj: any = {}; - if (message.offset !== "0") { - obj.offset = message.offset; + if (message.offset !== 0n) { + obj.offset = message.offset.toString(); } if (message.blockchainId.length !== 0) { obj.blockchainId = base64FromBytes(message.blockchainId); @@ -2467,11 +2488,11 @@ export const BlockchainEvent: MessageFns = { if (message.numShards !== 0) { obj.numShards = Math.round(message.numShards); } - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } if (message.parentSlot !== undefined) { - obj.parentSlot = message.parentSlot; + obj.parentSlot = message.parentSlot.toString(); } if (message.commitmentLevel !== 0) { obj.commitmentLevel = commitmentLevelToJSON(message.commitmentLevel); @@ -2490,11 +2511,11 @@ export const BlockchainEvent: MessageFns = { }, fromPartial, I>>(object: I): BlockchainEvent { const message = createBaseBlockchainEvent(); - message.offset = object.offset ?? "0"; + message.offset = object.offset ?? 0n; message.blockchainId = object.blockchainId ?? new Uint8Array(0); message.blockUid = object.blockUid ?? new Uint8Array(0); message.numShards = object.numShards ?? 0; - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; message.parentSlot = object.parentSlot ?? undefined; message.commitmentLevel = object.commitmentLevel ?? 0; message.blockchainShardId = object.blockchainShardId ?? 0; @@ -2854,12 +2875,15 @@ export const ControlResponse: MessageFns = { }; function createBaseCommitOffsetResult(): CommitOffsetResult { - return { offset: "0", shardId: 0 }; + return { offset: 0n, shardId: 0 }; } export const CommitOffsetResult: MessageFns = { encode(message: CommitOffsetResult, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.offset !== "0") { + if (message.offset !== 0n) { + if (BigInt.asIntN(64, message.offset) !== message.offset) { + throw new globalThis.Error("value provided for field message.offset of type int64 too large"); + } writer.uint32(8).int64(message.offset); } if (message.shardId !== 0) { @@ -2880,7 +2904,7 @@ export const CommitOffsetResult: MessageFns = { break; } - message.offset = reader.int64().toString(); + message.offset = reader.int64() as bigint; continue; } case 2: { @@ -2902,15 +2926,15 @@ export const CommitOffsetResult: MessageFns = { fromJSON(object: any): CommitOffsetResult { return { - offset: isSet(object.offset) ? globalThis.String(object.offset) : "0", + offset: isSet(object.offset) ? BigInt(object.offset) : 0n, shardId: isSet(object.shardId) ? globalThis.Number(object.shardId) : 0, }; }, toJSON(message: CommitOffsetResult): unknown { const obj: any = {}; - if (message.offset !== "0") { - obj.offset = message.offset; + if (message.offset !== 0n) { + obj.offset = message.offset.toString(); } if (message.shardId !== 0) { obj.shardId = Math.round(message.shardId); @@ -2923,7 +2947,7 @@ export const CommitOffsetResult: MessageFns = { }, fromPartial, I>>(object: I): CommitOffsetResult { const message = createBaseCommitOffsetResult(); - message.offset = object.offset ?? "0"; + message.offset = object.offset ?? 0n; message.shardId = object.shardId ?? 0; return message; }, @@ -2984,8 +3008,8 @@ export const InitialConsumerGroupState: MessageFns = return { blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), lastCommittedOffsets: isObject(object.lastCommittedOffsets) - ? Object.entries(object.lastCommittedOffsets).reduce<{ [key: number]: string }>((acc, [key, value]) => { - acc[globalThis.Number(key)] = String(value); + ? Object.entries(object.lastCommittedOffsets).reduce<{ [key: number]: bigint }>((acc, [key, value]) => { + acc[globalThis.Number(key)] = BigInt(value as string | number | bigint | boolean); return acc; }, {}) : {}, @@ -3002,7 +3026,7 @@ export const InitialConsumerGroupState: MessageFns = if (entries.length > 0) { obj.lastCommittedOffsets = {}; entries.forEach(([k, v]) => { - obj.lastCommittedOffsets[k] = v; + obj.lastCommittedOffsets[k] = v.toString(); }); } } @@ -3015,10 +3039,10 @@ export const InitialConsumerGroupState: MessageFns = fromPartial, I>>(object: I): InitialConsumerGroupState { const message = createBaseInitialConsumerGroupState(); message.blockchainId = object.blockchainId ?? new Uint8Array(0); - message.lastCommittedOffsets = Object.entries(object.lastCommittedOffsets ?? {}).reduce<{ [key: number]: string }>( + message.lastCommittedOffsets = Object.entries(object.lastCommittedOffsets ?? {}).reduce<{ [key: number]: bigint }>( (acc, [key, value]) => { if (value !== undefined) { - acc[globalThis.Number(key)] = globalThis.String(value); + acc[globalThis.Number(key)] = BigInt(value as string | number | bigint | boolean); } return acc; }, @@ -3029,7 +3053,7 @@ export const InitialConsumerGroupState: MessageFns = }; function createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(): InitialConsumerGroupState_LastCommittedOffsetsEntry { - return { key: 0, value: "0" }; + return { key: 0, value: 0n }; } export const InitialConsumerGroupState_LastCommittedOffsetsEntry: MessageFns< @@ -3042,7 +3066,10 @@ export const InitialConsumerGroupState_LastCommittedOffsetsEntry: MessageFns< if (message.key !== 0) { writer.uint32(8).int32(message.key); } - if (message.value !== "0") { + if (message.value !== 0n) { + if (BigInt.asIntN(64, message.value) !== message.value) { + throw new globalThis.Error("value provided for field message.value of type int64 too large"); + } writer.uint32(16).int64(message.value); } return writer; @@ -3068,7 +3095,7 @@ export const InitialConsumerGroupState_LastCommittedOffsetsEntry: MessageFns< break; } - message.value = reader.int64().toString(); + message.value = reader.int64() as bigint; continue; } } @@ -3083,7 +3110,7 @@ export const InitialConsumerGroupState_LastCommittedOffsetsEntry: MessageFns< fromJSON(object: any): InitialConsumerGroupState_LastCommittedOffsetsEntry { return { key: isSet(object.key) ? globalThis.Number(object.key) : 0, - value: isSet(object.value) ? globalThis.String(object.value) : "0", + value: isSet(object.value) ? BigInt(object.value) : 0n, }; }, @@ -3092,8 +3119,8 @@ export const InitialConsumerGroupState_LastCommittedOffsetsEntry: MessageFns< if (message.key !== 0) { obj.key = Math.round(message.key); } - if (message.value !== "0") { - obj.value = message.value; + if (message.value !== 0n) { + obj.value = message.value.toString(); } return obj; }, @@ -3108,7 +3135,7 @@ export const InitialConsumerGroupState_LastCommittedOffsetsEntry: MessageFns< ): InitialConsumerGroupState_LastCommittedOffsetsEntry { const message = createBaseInitialConsumerGroupState_LastCommittedOffsetsEntry(); message.key = object.key ?? 0; - message.value = object.value ?? "0"; + message.value = object.value ?? 0n; return message; }, }; @@ -3495,7 +3522,7 @@ function base64FromBytes(arr: Uint8Array): string { } } -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; +type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined; export type DeepPartial = T extends Builtin ? T : T extends globalThis.Array ? globalThis.Array> diff --git a/typescript-sdk/src/grpc/geyser.ts b/typescript-sdk/src/grpc/geyser.ts index 6bd7f3f..35b5510 100644 --- a/typescript-sdk/src/grpc/geyser.ts +++ b/typescript-sdk/src/grpc/geyser.ts @@ -145,7 +145,7 @@ export interface SubscribeRequest { commitment?: CommitmentLevel | undefined; accountsDataSlice: SubscribeRequestAccountsDataSlice[]; ping?: SubscribeRequestPing | undefined; - fromSlot?: string | undefined; + fromSlot?: bigint | undefined; } export interface SubscribeRequest_AccountsEntry { @@ -192,23 +192,23 @@ export interface SubscribeRequestFilterAccounts { export interface SubscribeRequestFilterAccountsFilter { memcmp?: SubscribeRequestFilterAccountsFilterMemcmp | undefined; - datasize?: string | undefined; + datasize?: bigint | undefined; tokenAccountState?: boolean | undefined; lamports?: SubscribeRequestFilterAccountsFilterLamports | undefined; } export interface SubscribeRequestFilterAccountsFilterMemcmp { - offset: string; + offset: bigint; bytes?: Uint8Array | undefined; base58?: string | undefined; base64?: string | undefined; } export interface SubscribeRequestFilterAccountsFilterLamports { - eq?: string | undefined; - ne?: string | undefined; - lt?: string | undefined; - gt?: string | undefined; + eq?: bigint | undefined; + ne?: bigint | undefined; + lt?: bigint | undefined; + gt?: bigint | undefined; } export interface SubscribeRequestFilterSlots { @@ -239,8 +239,8 @@ export interface SubscribeRequestFilterEntry { } export interface SubscribeRequestAccountsDataSlice { - offset: string; - length: string; + offset: bigint; + length: bigint; } export interface SubscribeRequestPing { @@ -263,31 +263,31 @@ export interface SubscribeUpdate { export interface SubscribeUpdateAccount { account: SubscribeUpdateAccountInfo | undefined; - slot: string; + slot: bigint; isStartup: boolean; } export interface SubscribeUpdateAccountInfo { pubkey: Uint8Array; - lamports: string; + lamports: bigint; owner: Uint8Array; executable: boolean; - rentEpoch: string; + rentEpoch: bigint; data: Uint8Array; - writeVersion: string; + writeVersion: bigint; txnSignature?: Uint8Array | undefined; } export interface SubscribeUpdateSlot { - slot: string; - parent?: string | undefined; + slot: bigint; + parent?: bigint | undefined; status: SlotStatus; deadError?: string | undefined; } export interface SubscribeUpdateTransaction { transaction: SubscribeUpdateTransactionInfo | undefined; - slot: string; + slot: bigint; } export interface SubscribeUpdateTransactionInfo { @@ -295,53 +295,53 @@ export interface SubscribeUpdateTransactionInfo { isVote: boolean; transaction: Transaction | undefined; meta: TransactionStatusMeta | undefined; - index: string; + index: bigint; } export interface SubscribeUpdateTransactionStatus { - slot: string; + slot: bigint; signature: Uint8Array; isVote: boolean; - index: string; + index: bigint; err: TransactionError | undefined; } export interface SubscribeUpdateBlock { - slot: string; + slot: bigint; blockhash: string; rewards: Rewards | undefined; blockTime: UnixTimestamp | undefined; blockHeight: BlockHeight | undefined; - parentSlot: string; + parentSlot: bigint; parentBlockhash: string; - executedTransactionCount: string; + executedTransactionCount: bigint; transactions: SubscribeUpdateTransactionInfo[]; - updatedAccountCount: string; + updatedAccountCount: bigint; accounts: SubscribeUpdateAccountInfo[]; - entriesCount: string; + entriesCount: bigint; entries: SubscribeUpdateEntry[]; } export interface SubscribeUpdateBlockMeta { - slot: string; + slot: bigint; blockhash: string; rewards: Rewards | undefined; blockTime: UnixTimestamp | undefined; blockHeight: BlockHeight | undefined; - parentSlot: string; + parentSlot: bigint; parentBlockhash: string; - executedTransactionCount: string; - entriesCount: string; + executedTransactionCount: bigint; + entriesCount: bigint; } export interface SubscribeUpdateEntry { - slot: string; - index: string; - numHashes: string; + slot: bigint; + index: bigint; + numHashes: bigint; hash: Uint8Array; - executedTransactionCount: string; + executedTransactionCount: bigint; /** added in v1.18, for solana 1.17 value is always 0 */ - startingTransactionIndex: string; + startingTransactionIndex: bigint; } export interface SubscribeUpdatePing { @@ -355,7 +355,7 @@ export interface SubscribeReplayInfoRequest { } export interface SubscribeReplayInfoResponse { - firstAvailable?: string | undefined; + firstAvailable?: bigint | undefined; } export interface PingRequest { @@ -371,9 +371,9 @@ export interface GetLatestBlockhashRequest { } export interface GetLatestBlockhashResponse { - slot: string; + slot: bigint; blockhash: string; - lastValidBlockHeight: string; + lastValidBlockHeight: bigint; } export interface GetBlockHeightRequest { @@ -381,7 +381,7 @@ export interface GetBlockHeightRequest { } export interface GetBlockHeightResponse { - blockHeight: string; + blockHeight: bigint; } export interface GetSlotRequest { @@ -389,7 +389,7 @@ export interface GetSlotRequest { } export interface GetSlotResponse { - slot: string; + slot: bigint; } export interface GetVersionRequest { @@ -405,7 +405,7 @@ export interface IsBlockhashValidRequest { } export interface IsBlockhashValidResponse { - slot: string; + slot: bigint; valid: boolean; } @@ -458,6 +458,9 @@ export const SubscribeRequest: MessageFns = { SubscribeRequestPing.encode(message.ping, writer.uint32(74).fork()).join(); } if (message.fromSlot !== undefined) { + if (BigInt.asUintN(64, message.fromSlot) !== message.fromSlot) { + throw new globalThis.Error("value provided for field message.fromSlot of type uint64 too large"); + } writer.uint32(88).uint64(message.fromSlot); } return writer; @@ -576,7 +579,7 @@ export const SubscribeRequest: MessageFns = { break; } - message.fromSlot = reader.uint64().toString(); + message.fromSlot = reader.uint64() as bigint; continue; } } @@ -649,7 +652,7 @@ export const SubscribeRequest: MessageFns = { ? object.accountsDataSlice.map((e: any) => SubscribeRequestAccountsDataSlice.fromJSON(e)) : [], ping: isSet(object.ping) ? SubscribeRequestPing.fromJSON(object.ping) : undefined, - fromSlot: isSet(object.fromSlot) ? globalThis.String(object.fromSlot) : undefined, + fromSlot: isSet(object.fromSlot) ? BigInt(object.fromSlot) : undefined, }; }, @@ -728,7 +731,7 @@ export const SubscribeRequest: MessageFns = { obj.ping = SubscribeRequestPing.toJSON(message.ping); } if (message.fromSlot !== undefined) { - obj.fromSlot = message.fromSlot; + obj.fromSlot = message.fromSlot.toString(); } return obj; }, @@ -1493,6 +1496,9 @@ export const SubscribeRequestFilterAccountsFilter: MessageFns = { encode(message: SubscribeRequestFilterAccountsFilterMemcmp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.offset !== "0") { + if (message.offset !== 0n) { + if (BigInt.asUintN(64, message.offset) !== message.offset) { + throw new globalThis.Error("value provided for field message.offset of type uint64 too large"); + } writer.uint32(8).uint64(message.offset); } if (message.bytes !== undefined) { @@ -1634,7 +1643,7 @@ export const SubscribeRequestFilterAccountsFilterMemcmp: MessageFns = { encode(message: SubscribeRequestAccountsDataSlice, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.offset !== "0") { + if (message.offset !== 0n) { + if (BigInt.asUintN(64, message.offset) !== message.offset) { + throw new globalThis.Error("value provided for field message.offset of type uint64 too large"); + } writer.uint32(8).uint64(message.offset); } - if (message.length !== "0") { + if (message.length !== 0n) { + if (BigInt.asUintN(64, message.length) !== message.length) { + throw new globalThis.Error("value provided for field message.length of type uint64 too large"); + } writer.uint32(16).uint64(message.length); } return writer; @@ -2290,7 +2317,7 @@ export const SubscribeRequestAccountsDataSlice: MessageFns = { }; function createBaseSubscribeUpdateAccount(): SubscribeUpdateAccount { - return { account: undefined, slot: "0", isStartup: false }; + return { account: undefined, slot: 0n, isStartup: false }; } export const SubscribeUpdateAccount: MessageFns = { @@ -2662,7 +2689,10 @@ export const SubscribeUpdateAccount: MessageFns = { if (message.account !== undefined) { SubscribeUpdateAccountInfo.encode(message.account, writer.uint32(10).fork()).join(); } - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(16).uint64(message.slot); } if (message.isStartup !== false) { @@ -2691,7 +2721,7 @@ export const SubscribeUpdateAccount: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 3: { @@ -2714,7 +2744,7 @@ export const SubscribeUpdateAccount: MessageFns = { fromJSON(object: any): SubscribeUpdateAccount { return { account: isSet(object.account) ? SubscribeUpdateAccountInfo.fromJSON(object.account) : undefined, - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, isStartup: isSet(object.isStartup) ? globalThis.Boolean(object.isStartup) : false, }; }, @@ -2724,8 +2754,8 @@ export const SubscribeUpdateAccount: MessageFns = { if (message.account !== undefined) { obj.account = SubscribeUpdateAccountInfo.toJSON(message.account); } - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } if (message.isStartup !== false) { obj.isStartup = message.isStartup; @@ -2741,7 +2771,7 @@ export const SubscribeUpdateAccount: MessageFns = { message.account = (object.account !== undefined && object.account !== null) ? SubscribeUpdateAccountInfo.fromPartial(object.account) : undefined; - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; message.isStartup = object.isStartup ?? false; return message; }, @@ -2750,12 +2780,12 @@ export const SubscribeUpdateAccount: MessageFns = { function createBaseSubscribeUpdateAccountInfo(): SubscribeUpdateAccountInfo { return { pubkey: new Uint8Array(0), - lamports: "0", + lamports: 0n, owner: new Uint8Array(0), executable: false, - rentEpoch: "0", + rentEpoch: 0n, data: new Uint8Array(0), - writeVersion: "0", + writeVersion: 0n, txnSignature: undefined, }; } @@ -2765,7 +2795,10 @@ export const SubscribeUpdateAccountInfo: MessageFns if (message.pubkey.length !== 0) { writer.uint32(10).bytes(message.pubkey); } - if (message.lamports !== "0") { + if (message.lamports !== 0n) { + if (BigInt.asUintN(64, message.lamports) !== message.lamports) { + throw new globalThis.Error("value provided for field message.lamports of type uint64 too large"); + } writer.uint32(16).uint64(message.lamports); } if (message.owner.length !== 0) { @@ -2774,13 +2807,19 @@ export const SubscribeUpdateAccountInfo: MessageFns if (message.executable !== false) { writer.uint32(32).bool(message.executable); } - if (message.rentEpoch !== "0") { + if (message.rentEpoch !== 0n) { + if (BigInt.asUintN(64, message.rentEpoch) !== message.rentEpoch) { + throw new globalThis.Error("value provided for field message.rentEpoch of type uint64 too large"); + } writer.uint32(40).uint64(message.rentEpoch); } if (message.data.length !== 0) { writer.uint32(50).bytes(message.data); } - if (message.writeVersion !== "0") { + if (message.writeVersion !== 0n) { + if (BigInt.asUintN(64, message.writeVersion) !== message.writeVersion) { + throw new globalThis.Error("value provided for field message.writeVersion of type uint64 too large"); + } writer.uint32(56).uint64(message.writeVersion); } if (message.txnSignature !== undefined) { @@ -2809,7 +2848,7 @@ export const SubscribeUpdateAccountInfo: MessageFns break; } - message.lamports = reader.uint64().toString(); + message.lamports = reader.uint64() as bigint; continue; } case 3: { @@ -2833,7 +2872,7 @@ export const SubscribeUpdateAccountInfo: MessageFns break; } - message.rentEpoch = reader.uint64().toString(); + message.rentEpoch = reader.uint64() as bigint; continue; } case 6: { @@ -2849,7 +2888,7 @@ export const SubscribeUpdateAccountInfo: MessageFns break; } - message.writeVersion = reader.uint64().toString(); + message.writeVersion = reader.uint64() as bigint; continue; } case 8: { @@ -2872,12 +2911,12 @@ export const SubscribeUpdateAccountInfo: MessageFns fromJSON(object: any): SubscribeUpdateAccountInfo { return { pubkey: isSet(object.pubkey) ? bytesFromBase64(object.pubkey) : new Uint8Array(0), - lamports: isSet(object.lamports) ? globalThis.String(object.lamports) : "0", + lamports: isSet(object.lamports) ? BigInt(object.lamports) : 0n, owner: isSet(object.owner) ? bytesFromBase64(object.owner) : new Uint8Array(0), executable: isSet(object.executable) ? globalThis.Boolean(object.executable) : false, - rentEpoch: isSet(object.rentEpoch) ? globalThis.String(object.rentEpoch) : "0", + rentEpoch: isSet(object.rentEpoch) ? BigInt(object.rentEpoch) : 0n, data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), - writeVersion: isSet(object.writeVersion) ? globalThis.String(object.writeVersion) : "0", + writeVersion: isSet(object.writeVersion) ? BigInt(object.writeVersion) : 0n, txnSignature: isSet(object.txnSignature) ? bytesFromBase64(object.txnSignature) : undefined, }; }, @@ -2887,8 +2926,8 @@ export const SubscribeUpdateAccountInfo: MessageFns if (message.pubkey.length !== 0) { obj.pubkey = base64FromBytes(message.pubkey); } - if (message.lamports !== "0") { - obj.lamports = message.lamports; + if (message.lamports !== 0n) { + obj.lamports = message.lamports.toString(); } if (message.owner.length !== 0) { obj.owner = base64FromBytes(message.owner); @@ -2896,14 +2935,14 @@ export const SubscribeUpdateAccountInfo: MessageFns if (message.executable !== false) { obj.executable = message.executable; } - if (message.rentEpoch !== "0") { - obj.rentEpoch = message.rentEpoch; + if (message.rentEpoch !== 0n) { + obj.rentEpoch = message.rentEpoch.toString(); } if (message.data.length !== 0) { obj.data = base64FromBytes(message.data); } - if (message.writeVersion !== "0") { - obj.writeVersion = message.writeVersion; + if (message.writeVersion !== 0n) { + obj.writeVersion = message.writeVersion.toString(); } if (message.txnSignature !== undefined) { obj.txnSignature = base64FromBytes(message.txnSignature); @@ -2917,27 +2956,33 @@ export const SubscribeUpdateAccountInfo: MessageFns fromPartial, I>>(object: I): SubscribeUpdateAccountInfo { const message = createBaseSubscribeUpdateAccountInfo(); message.pubkey = object.pubkey ?? new Uint8Array(0); - message.lamports = object.lamports ?? "0"; + message.lamports = object.lamports ?? 0n; message.owner = object.owner ?? new Uint8Array(0); message.executable = object.executable ?? false; - message.rentEpoch = object.rentEpoch ?? "0"; + message.rentEpoch = object.rentEpoch ?? 0n; message.data = object.data ?? new Uint8Array(0); - message.writeVersion = object.writeVersion ?? "0"; + message.writeVersion = object.writeVersion ?? 0n; message.txnSignature = object.txnSignature ?? undefined; return message; }, }; function createBaseSubscribeUpdateSlot(): SubscribeUpdateSlot { - return { slot: "0", parent: undefined, status: 0, deadError: undefined }; + return { slot: 0n, parent: undefined, status: 0, deadError: undefined }; } export const SubscribeUpdateSlot: MessageFns = { encode(message: SubscribeUpdateSlot, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } if (message.parent !== undefined) { + if (BigInt.asUintN(64, message.parent) !== message.parent) { + throw new globalThis.Error("value provided for field message.parent of type uint64 too large"); + } writer.uint32(16).uint64(message.parent); } if (message.status !== 0) { @@ -2961,7 +3006,7 @@ export const SubscribeUpdateSlot: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 2: { @@ -2969,7 +3014,7 @@ export const SubscribeUpdateSlot: MessageFns = { break; } - message.parent = reader.uint64().toString(); + message.parent = reader.uint64() as bigint; continue; } case 3: { @@ -2999,8 +3044,8 @@ export const SubscribeUpdateSlot: MessageFns = { fromJSON(object: any): SubscribeUpdateSlot { return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - parent: isSet(object.parent) ? globalThis.String(object.parent) : undefined, + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, + parent: isSet(object.parent) ? BigInt(object.parent) : undefined, status: isSet(object.status) ? slotStatusFromJSON(object.status) : 0, deadError: isSet(object.deadError) ? globalThis.String(object.deadError) : undefined, }; @@ -3008,11 +3053,11 @@ export const SubscribeUpdateSlot: MessageFns = { toJSON(message: SubscribeUpdateSlot): unknown { const obj: any = {}; - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } if (message.parent !== undefined) { - obj.parent = message.parent; + obj.parent = message.parent.toString(); } if (message.status !== 0) { obj.status = slotStatusToJSON(message.status); @@ -3028,7 +3073,7 @@ export const SubscribeUpdateSlot: MessageFns = { }, fromPartial, I>>(object: I): SubscribeUpdateSlot { const message = createBaseSubscribeUpdateSlot(); - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; message.parent = object.parent ?? undefined; message.status = object.status ?? 0; message.deadError = object.deadError ?? undefined; @@ -3037,7 +3082,7 @@ export const SubscribeUpdateSlot: MessageFns = { }; function createBaseSubscribeUpdateTransaction(): SubscribeUpdateTransaction { - return { transaction: undefined, slot: "0" }; + return { transaction: undefined, slot: 0n }; } export const SubscribeUpdateTransaction: MessageFns = { @@ -3045,7 +3090,10 @@ export const SubscribeUpdateTransaction: MessageFns if (message.transaction !== undefined) { SubscribeUpdateTransactionInfo.encode(message.transaction, writer.uint32(10).fork()).join(); } - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(16).uint64(message.slot); } return writer; @@ -3071,7 +3119,7 @@ export const SubscribeUpdateTransaction: MessageFns break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } } @@ -3086,7 +3134,7 @@ export const SubscribeUpdateTransaction: MessageFns fromJSON(object: any): SubscribeUpdateTransaction { return { transaction: isSet(object.transaction) ? SubscribeUpdateTransactionInfo.fromJSON(object.transaction) : undefined, - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, }; }, @@ -3095,8 +3143,8 @@ export const SubscribeUpdateTransaction: MessageFns if (message.transaction !== undefined) { obj.transaction = SubscribeUpdateTransactionInfo.toJSON(message.transaction); } - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } return obj; }, @@ -3109,13 +3157,13 @@ export const SubscribeUpdateTransaction: MessageFns message.transaction = (object.transaction !== undefined && object.transaction !== null) ? SubscribeUpdateTransactionInfo.fromPartial(object.transaction) : undefined; - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; return message; }, }; function createBaseSubscribeUpdateTransactionInfo(): SubscribeUpdateTransactionInfo { - return { signature: new Uint8Array(0), isVote: false, transaction: undefined, meta: undefined, index: "0" }; + return { signature: new Uint8Array(0), isVote: false, transaction: undefined, meta: undefined, index: 0n }; } export const SubscribeUpdateTransactionInfo: MessageFns = { @@ -3132,7 +3180,10 @@ export const SubscribeUpdateTransactionInfo: MessageFns = { encode(message: SubscribeUpdateTransactionStatus, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } if (message.signature.length !== 0) { @@ -3259,7 +3313,10 @@ export const SubscribeUpdateTransactionStatus: MessageFns = { encode(message: SubscribeUpdateBlock, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } if (message.blockhash !== "") { @@ -3409,25 +3469,39 @@ export const SubscribeUpdateBlock: MessageFns = { if (message.blockHeight !== undefined) { BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); } - if (message.parentSlot !== "0") { + if (message.parentSlot !== 0n) { + if (BigInt.asUintN(64, message.parentSlot) !== message.parentSlot) { + throw new globalThis.Error("value provided for field message.parentSlot of type uint64 too large"); + } writer.uint32(56).uint64(message.parentSlot); } if (message.parentBlockhash !== "") { writer.uint32(66).string(message.parentBlockhash); } - if (message.executedTransactionCount !== "0") { + if (message.executedTransactionCount !== 0n) { + if (BigInt.asUintN(64, message.executedTransactionCount) !== message.executedTransactionCount) { + throw new globalThis.Error( + "value provided for field message.executedTransactionCount of type uint64 too large", + ); + } writer.uint32(72).uint64(message.executedTransactionCount); } for (const v of message.transactions) { SubscribeUpdateTransactionInfo.encode(v!, writer.uint32(50).fork()).join(); } - if (message.updatedAccountCount !== "0") { + if (message.updatedAccountCount !== 0n) { + if (BigInt.asUintN(64, message.updatedAccountCount) !== message.updatedAccountCount) { + throw new globalThis.Error("value provided for field message.updatedAccountCount of type uint64 too large"); + } writer.uint32(80).uint64(message.updatedAccountCount); } for (const v of message.accounts) { SubscribeUpdateAccountInfo.encode(v!, writer.uint32(90).fork()).join(); } - if (message.entriesCount !== "0") { + if (message.entriesCount !== 0n) { + if (BigInt.asUintN(64, message.entriesCount) !== message.entriesCount) { + throw new globalThis.Error("value provided for field message.entriesCount of type uint64 too large"); + } writer.uint32(96).uint64(message.entriesCount); } for (const v of message.entries) { @@ -3448,7 +3522,7 @@ export const SubscribeUpdateBlock: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 2: { @@ -3488,7 +3562,7 @@ export const SubscribeUpdateBlock: MessageFns = { break; } - message.parentSlot = reader.uint64().toString(); + message.parentSlot = reader.uint64() as bigint; continue; } case 8: { @@ -3504,7 +3578,7 @@ export const SubscribeUpdateBlock: MessageFns = { break; } - message.executedTransactionCount = reader.uint64().toString(); + message.executedTransactionCount = reader.uint64() as bigint; continue; } case 6: { @@ -3520,7 +3594,7 @@ export const SubscribeUpdateBlock: MessageFns = { break; } - message.updatedAccountCount = reader.uint64().toString(); + message.updatedAccountCount = reader.uint64() as bigint; continue; } case 11: { @@ -3536,7 +3610,7 @@ export const SubscribeUpdateBlock: MessageFns = { break; } - message.entriesCount = reader.uint64().toString(); + message.entriesCount = reader.uint64() as bigint; continue; } case 13: { @@ -3558,24 +3632,22 @@ export const SubscribeUpdateBlock: MessageFns = { fromJSON(object: any): SubscribeUpdateBlock { return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", rewards: isSet(object.rewards) ? Rewards.fromJSON(object.rewards) : undefined, blockTime: isSet(object.blockTime) ? UnixTimestamp.fromJSON(object.blockTime) : undefined, blockHeight: isSet(object.blockHeight) ? BlockHeight.fromJSON(object.blockHeight) : undefined, - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", + parentSlot: isSet(object.parentSlot) ? BigInt(object.parentSlot) : 0n, parentBlockhash: isSet(object.parentBlockhash) ? globalThis.String(object.parentBlockhash) : "", - executedTransactionCount: isSet(object.executedTransactionCount) - ? globalThis.String(object.executedTransactionCount) - : "0", + executedTransactionCount: isSet(object.executedTransactionCount) ? BigInt(object.executedTransactionCount) : 0n, transactions: globalThis.Array.isArray(object?.transactions) ? object.transactions.map((e: any) => SubscribeUpdateTransactionInfo.fromJSON(e)) : [], - updatedAccountCount: isSet(object.updatedAccountCount) ? globalThis.String(object.updatedAccountCount) : "0", + updatedAccountCount: isSet(object.updatedAccountCount) ? BigInt(object.updatedAccountCount) : 0n, accounts: globalThis.Array.isArray(object?.accounts) ? object.accounts.map((e: any) => SubscribeUpdateAccountInfo.fromJSON(e)) : [], - entriesCount: isSet(object.entriesCount) ? globalThis.String(object.entriesCount) : "0", + entriesCount: isSet(object.entriesCount) ? BigInt(object.entriesCount) : 0n, entries: globalThis.Array.isArray(object?.entries) ? object.entries.map((e: any) => SubscribeUpdateEntry.fromJSON(e)) : [], @@ -3584,8 +3656,8 @@ export const SubscribeUpdateBlock: MessageFns = { toJSON(message: SubscribeUpdateBlock): unknown { const obj: any = {}; - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } if (message.blockhash !== "") { obj.blockhash = message.blockhash; @@ -3599,26 +3671,26 @@ export const SubscribeUpdateBlock: MessageFns = { if (message.blockHeight !== undefined) { obj.blockHeight = BlockHeight.toJSON(message.blockHeight); } - if (message.parentSlot !== "0") { - obj.parentSlot = message.parentSlot; + if (message.parentSlot !== 0n) { + obj.parentSlot = message.parentSlot.toString(); } if (message.parentBlockhash !== "") { obj.parentBlockhash = message.parentBlockhash; } - if (message.executedTransactionCount !== "0") { - obj.executedTransactionCount = message.executedTransactionCount; + if (message.executedTransactionCount !== 0n) { + obj.executedTransactionCount = message.executedTransactionCount.toString(); } if (message.transactions?.length) { obj.transactions = message.transactions.map((e) => SubscribeUpdateTransactionInfo.toJSON(e)); } - if (message.updatedAccountCount !== "0") { - obj.updatedAccountCount = message.updatedAccountCount; + if (message.updatedAccountCount !== 0n) { + obj.updatedAccountCount = message.updatedAccountCount.toString(); } if (message.accounts?.length) { obj.accounts = message.accounts.map((e) => SubscribeUpdateAccountInfo.toJSON(e)); } - if (message.entriesCount !== "0") { - obj.entriesCount = message.entriesCount; + if (message.entriesCount !== 0n) { + obj.entriesCount = message.entriesCount.toString(); } if (message.entries?.length) { obj.entries = message.entries.map((e) => SubscribeUpdateEntry.toJSON(e)); @@ -3631,7 +3703,7 @@ export const SubscribeUpdateBlock: MessageFns = { }, fromPartial, I>>(object: I): SubscribeUpdateBlock { const message = createBaseSubscribeUpdateBlock(); - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; message.blockhash = object.blockhash ?? ""; message.rewards = (object.rewards !== undefined && object.rewards !== null) ? Rewards.fromPartial(object.rewards) @@ -3642,13 +3714,13 @@ export const SubscribeUpdateBlock: MessageFns = { message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) ? BlockHeight.fromPartial(object.blockHeight) : undefined; - message.parentSlot = object.parentSlot ?? "0"; + message.parentSlot = object.parentSlot ?? 0n; message.parentBlockhash = object.parentBlockhash ?? ""; - message.executedTransactionCount = object.executedTransactionCount ?? "0"; + message.executedTransactionCount = object.executedTransactionCount ?? 0n; message.transactions = object.transactions?.map((e) => SubscribeUpdateTransactionInfo.fromPartial(e)) || []; - message.updatedAccountCount = object.updatedAccountCount ?? "0"; + message.updatedAccountCount = object.updatedAccountCount ?? 0n; message.accounts = object.accounts?.map((e) => SubscribeUpdateAccountInfo.fromPartial(e)) || []; - message.entriesCount = object.entriesCount ?? "0"; + message.entriesCount = object.entriesCount ?? 0n; message.entries = object.entries?.map((e) => SubscribeUpdateEntry.fromPartial(e)) || []; return message; }, @@ -3656,21 +3728,24 @@ export const SubscribeUpdateBlock: MessageFns = { function createBaseSubscribeUpdateBlockMeta(): SubscribeUpdateBlockMeta { return { - slot: "0", + slot: 0n, blockhash: "", rewards: undefined, blockTime: undefined, blockHeight: undefined, - parentSlot: "0", + parentSlot: 0n, parentBlockhash: "", - executedTransactionCount: "0", - entriesCount: "0", + executedTransactionCount: 0n, + entriesCount: 0n, }; } export const SubscribeUpdateBlockMeta: MessageFns = { encode(message: SubscribeUpdateBlockMeta, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } if (message.blockhash !== "") { @@ -3685,16 +3760,27 @@ export const SubscribeUpdateBlockMeta: MessageFns = { if (message.blockHeight !== undefined) { BlockHeight.encode(message.blockHeight, writer.uint32(42).fork()).join(); } - if (message.parentSlot !== "0") { + if (message.parentSlot !== 0n) { + if (BigInt.asUintN(64, message.parentSlot) !== message.parentSlot) { + throw new globalThis.Error("value provided for field message.parentSlot of type uint64 too large"); + } writer.uint32(48).uint64(message.parentSlot); } if (message.parentBlockhash !== "") { writer.uint32(58).string(message.parentBlockhash); } - if (message.executedTransactionCount !== "0") { + if (message.executedTransactionCount !== 0n) { + if (BigInt.asUintN(64, message.executedTransactionCount) !== message.executedTransactionCount) { + throw new globalThis.Error( + "value provided for field message.executedTransactionCount of type uint64 too large", + ); + } writer.uint32(64).uint64(message.executedTransactionCount); } - if (message.entriesCount !== "0") { + if (message.entriesCount !== 0n) { + if (BigInt.asUintN(64, message.entriesCount) !== message.entriesCount) { + throw new globalThis.Error("value provided for field message.entriesCount of type uint64 too large"); + } writer.uint32(72).uint64(message.entriesCount); } return writer; @@ -3712,7 +3798,7 @@ export const SubscribeUpdateBlockMeta: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 2: { @@ -3752,7 +3838,7 @@ export const SubscribeUpdateBlockMeta: MessageFns = { break; } - message.parentSlot = reader.uint64().toString(); + message.parentSlot = reader.uint64() as bigint; continue; } case 7: { @@ -3768,7 +3854,7 @@ export const SubscribeUpdateBlockMeta: MessageFns = { break; } - message.executedTransactionCount = reader.uint64().toString(); + message.executedTransactionCount = reader.uint64() as bigint; continue; } case 9: { @@ -3776,7 +3862,7 @@ export const SubscribeUpdateBlockMeta: MessageFns = { break; } - message.entriesCount = reader.uint64().toString(); + message.entriesCount = reader.uint64() as bigint; continue; } } @@ -3790,24 +3876,22 @@ export const SubscribeUpdateBlockMeta: MessageFns = { fromJSON(object: any): SubscribeUpdateBlockMeta { return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", rewards: isSet(object.rewards) ? Rewards.fromJSON(object.rewards) : undefined, blockTime: isSet(object.blockTime) ? UnixTimestamp.fromJSON(object.blockTime) : undefined, blockHeight: isSet(object.blockHeight) ? BlockHeight.fromJSON(object.blockHeight) : undefined, - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", + parentSlot: isSet(object.parentSlot) ? BigInt(object.parentSlot) : 0n, parentBlockhash: isSet(object.parentBlockhash) ? globalThis.String(object.parentBlockhash) : "", - executedTransactionCount: isSet(object.executedTransactionCount) - ? globalThis.String(object.executedTransactionCount) - : "0", - entriesCount: isSet(object.entriesCount) ? globalThis.String(object.entriesCount) : "0", + executedTransactionCount: isSet(object.executedTransactionCount) ? BigInt(object.executedTransactionCount) : 0n, + entriesCount: isSet(object.entriesCount) ? BigInt(object.entriesCount) : 0n, }; }, toJSON(message: SubscribeUpdateBlockMeta): unknown { const obj: any = {}; - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } if (message.blockhash !== "") { obj.blockhash = message.blockhash; @@ -3821,17 +3905,17 @@ export const SubscribeUpdateBlockMeta: MessageFns = { if (message.blockHeight !== undefined) { obj.blockHeight = BlockHeight.toJSON(message.blockHeight); } - if (message.parentSlot !== "0") { - obj.parentSlot = message.parentSlot; + if (message.parentSlot !== 0n) { + obj.parentSlot = message.parentSlot.toString(); } if (message.parentBlockhash !== "") { obj.parentBlockhash = message.parentBlockhash; } - if (message.executedTransactionCount !== "0") { - obj.executedTransactionCount = message.executedTransactionCount; + if (message.executedTransactionCount !== 0n) { + obj.executedTransactionCount = message.executedTransactionCount.toString(); } - if (message.entriesCount !== "0") { - obj.entriesCount = message.entriesCount; + if (message.entriesCount !== 0n) { + obj.entriesCount = message.entriesCount.toString(); } return obj; }, @@ -3841,7 +3925,7 @@ export const SubscribeUpdateBlockMeta: MessageFns = { }, fromPartial, I>>(object: I): SubscribeUpdateBlockMeta { const message = createBaseSubscribeUpdateBlockMeta(); - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; message.blockhash = object.blockhash ?? ""; message.rewards = (object.rewards !== undefined && object.rewards !== null) ? Rewards.fromPartial(object.rewards) @@ -3852,43 +3936,62 @@ export const SubscribeUpdateBlockMeta: MessageFns = { message.blockHeight = (object.blockHeight !== undefined && object.blockHeight !== null) ? BlockHeight.fromPartial(object.blockHeight) : undefined; - message.parentSlot = object.parentSlot ?? "0"; + message.parentSlot = object.parentSlot ?? 0n; message.parentBlockhash = object.parentBlockhash ?? ""; - message.executedTransactionCount = object.executedTransactionCount ?? "0"; - message.entriesCount = object.entriesCount ?? "0"; + message.executedTransactionCount = object.executedTransactionCount ?? 0n; + message.entriesCount = object.entriesCount ?? 0n; return message; }, }; function createBaseSubscribeUpdateEntry(): SubscribeUpdateEntry { return { - slot: "0", - index: "0", - numHashes: "0", + slot: 0n, + index: 0n, + numHashes: 0n, hash: new Uint8Array(0), - executedTransactionCount: "0", - startingTransactionIndex: "0", + executedTransactionCount: 0n, + startingTransactionIndex: 0n, }; } export const SubscribeUpdateEntry: MessageFns = { encode(message: SubscribeUpdateEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } - if (message.index !== "0") { + if (message.index !== 0n) { + if (BigInt.asUintN(64, message.index) !== message.index) { + throw new globalThis.Error("value provided for field message.index of type uint64 too large"); + } writer.uint32(16).uint64(message.index); } - if (message.numHashes !== "0") { + if (message.numHashes !== 0n) { + if (BigInt.asUintN(64, message.numHashes) !== message.numHashes) { + throw new globalThis.Error("value provided for field message.numHashes of type uint64 too large"); + } writer.uint32(24).uint64(message.numHashes); } if (message.hash.length !== 0) { writer.uint32(34).bytes(message.hash); } - if (message.executedTransactionCount !== "0") { + if (message.executedTransactionCount !== 0n) { + if (BigInt.asUintN(64, message.executedTransactionCount) !== message.executedTransactionCount) { + throw new globalThis.Error( + "value provided for field message.executedTransactionCount of type uint64 too large", + ); + } writer.uint32(40).uint64(message.executedTransactionCount); } - if (message.startingTransactionIndex !== "0") { + if (message.startingTransactionIndex !== 0n) { + if (BigInt.asUintN(64, message.startingTransactionIndex) !== message.startingTransactionIndex) { + throw new globalThis.Error( + "value provided for field message.startingTransactionIndex of type uint64 too large", + ); + } writer.uint32(48).uint64(message.startingTransactionIndex); } return writer; @@ -3906,7 +4009,7 @@ export const SubscribeUpdateEntry: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 2: { @@ -3914,7 +4017,7 @@ export const SubscribeUpdateEntry: MessageFns = { break; } - message.index = reader.uint64().toString(); + message.index = reader.uint64() as bigint; continue; } case 3: { @@ -3922,7 +4025,7 @@ export const SubscribeUpdateEntry: MessageFns = { break; } - message.numHashes = reader.uint64().toString(); + message.numHashes = reader.uint64() as bigint; continue; } case 4: { @@ -3938,7 +4041,7 @@ export const SubscribeUpdateEntry: MessageFns = { break; } - message.executedTransactionCount = reader.uint64().toString(); + message.executedTransactionCount = reader.uint64() as bigint; continue; } case 6: { @@ -3946,7 +4049,7 @@ export const SubscribeUpdateEntry: MessageFns = { break; } - message.startingTransactionIndex = reader.uint64().toString(); + message.startingTransactionIndex = reader.uint64() as bigint; continue; } } @@ -3960,38 +4063,34 @@ export const SubscribeUpdateEntry: MessageFns = { fromJSON(object: any): SubscribeUpdateEntry { return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", - index: isSet(object.index) ? globalThis.String(object.index) : "0", - numHashes: isSet(object.numHashes) ? globalThis.String(object.numHashes) : "0", + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, + index: isSet(object.index) ? BigInt(object.index) : 0n, + numHashes: isSet(object.numHashes) ? BigInt(object.numHashes) : 0n, hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(0), - executedTransactionCount: isSet(object.executedTransactionCount) - ? globalThis.String(object.executedTransactionCount) - : "0", - startingTransactionIndex: isSet(object.startingTransactionIndex) - ? globalThis.String(object.startingTransactionIndex) - : "0", + executedTransactionCount: isSet(object.executedTransactionCount) ? BigInt(object.executedTransactionCount) : 0n, + startingTransactionIndex: isSet(object.startingTransactionIndex) ? BigInt(object.startingTransactionIndex) : 0n, }; }, toJSON(message: SubscribeUpdateEntry): unknown { const obj: any = {}; - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } - if (message.index !== "0") { - obj.index = message.index; + if (message.index !== 0n) { + obj.index = message.index.toString(); } - if (message.numHashes !== "0") { - obj.numHashes = message.numHashes; + if (message.numHashes !== 0n) { + obj.numHashes = message.numHashes.toString(); } if (message.hash.length !== 0) { obj.hash = base64FromBytes(message.hash); } - if (message.executedTransactionCount !== "0") { - obj.executedTransactionCount = message.executedTransactionCount; + if (message.executedTransactionCount !== 0n) { + obj.executedTransactionCount = message.executedTransactionCount.toString(); } - if (message.startingTransactionIndex !== "0") { - obj.startingTransactionIndex = message.startingTransactionIndex; + if (message.startingTransactionIndex !== 0n) { + obj.startingTransactionIndex = message.startingTransactionIndex.toString(); } return obj; }, @@ -4001,12 +4100,12 @@ export const SubscribeUpdateEntry: MessageFns = { }, fromPartial, I>>(object: I): SubscribeUpdateEntry { const message = createBaseSubscribeUpdateEntry(); - message.slot = object.slot ?? "0"; - message.index = object.index ?? "0"; - message.numHashes = object.numHashes ?? "0"; + message.slot = object.slot ?? 0n; + message.index = object.index ?? 0n; + message.numHashes = object.numHashes ?? 0n; message.hash = object.hash ?? new Uint8Array(0); - message.executedTransactionCount = object.executedTransactionCount ?? "0"; - message.startingTransactionIndex = object.startingTransactionIndex ?? "0"; + message.executedTransactionCount = object.executedTransactionCount ?? 0n; + message.startingTransactionIndex = object.startingTransactionIndex ?? 0n; return message; }, }; @@ -4162,6 +4261,9 @@ function createBaseSubscribeReplayInfoResponse(): SubscribeReplayInfoResponse { export const SubscribeReplayInfoResponse: MessageFns = { encode(message: SubscribeReplayInfoResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { if (message.firstAvailable !== undefined) { + if (BigInt.asUintN(64, message.firstAvailable) !== message.firstAvailable) { + throw new globalThis.Error("value provided for field message.firstAvailable of type uint64 too large"); + } writer.uint32(8).uint64(message.firstAvailable); } return writer; @@ -4179,7 +4281,7 @@ export const SubscribeReplayInfoResponse: MessageFns = }; function createBaseGetLatestBlockhashResponse(): GetLatestBlockhashResponse { - return { slot: "0", blockhash: "", lastValidBlockHeight: "0" }; + return { slot: 0n, blockhash: "", lastValidBlockHeight: 0n }; } export const GetLatestBlockhashResponse: MessageFns = { encode(message: GetLatestBlockhashResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } if (message.blockhash !== "") { writer.uint32(18).string(message.blockhash); } - if (message.lastValidBlockHeight !== "0") { + if (message.lastValidBlockHeight !== 0n) { + if (BigInt.asUintN(64, message.lastValidBlockHeight) !== message.lastValidBlockHeight) { + throw new globalThis.Error("value provided for field message.lastValidBlockHeight of type uint64 too large"); + } writer.uint32(24).uint64(message.lastValidBlockHeight); } return writer; @@ -4417,7 +4525,7 @@ export const GetLatestBlockhashResponse: MessageFns break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 2: { @@ -4433,7 +4541,7 @@ export const GetLatestBlockhashResponse: MessageFns break; } - message.lastValidBlockHeight = reader.uint64().toString(); + message.lastValidBlockHeight = reader.uint64() as bigint; continue; } } @@ -4447,22 +4555,22 @@ export const GetLatestBlockhashResponse: MessageFns fromJSON(object: any): GetLatestBlockhashResponse { return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", - lastValidBlockHeight: isSet(object.lastValidBlockHeight) ? globalThis.String(object.lastValidBlockHeight) : "0", + lastValidBlockHeight: isSet(object.lastValidBlockHeight) ? BigInt(object.lastValidBlockHeight) : 0n, }; }, toJSON(message: GetLatestBlockhashResponse): unknown { const obj: any = {}; - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } if (message.blockhash !== "") { obj.blockhash = message.blockhash; } - if (message.lastValidBlockHeight !== "0") { - obj.lastValidBlockHeight = message.lastValidBlockHeight; + if (message.lastValidBlockHeight !== 0n) { + obj.lastValidBlockHeight = message.lastValidBlockHeight.toString(); } return obj; }, @@ -4472,9 +4580,9 @@ export const GetLatestBlockhashResponse: MessageFns }, fromPartial, I>>(object: I): GetLatestBlockhashResponse { const message = createBaseGetLatestBlockhashResponse(); - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; message.blockhash = object.blockhash ?? ""; - message.lastValidBlockHeight = object.lastValidBlockHeight ?? "0"; + message.lastValidBlockHeight = object.lastValidBlockHeight ?? 0n; return message; }, }; @@ -4538,12 +4646,15 @@ export const GetBlockHeightRequest: MessageFns = { }; function createBaseGetBlockHeightResponse(): GetBlockHeightResponse { - return { blockHeight: "0" }; + return { blockHeight: 0n }; } export const GetBlockHeightResponse: MessageFns = { encode(message: GetBlockHeightResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.blockHeight !== "0") { + if (message.blockHeight !== 0n) { + if (BigInt.asUintN(64, message.blockHeight) !== message.blockHeight) { + throw new globalThis.Error("value provided for field message.blockHeight of type uint64 too large"); + } writer.uint32(8).uint64(message.blockHeight); } return writer; @@ -4561,7 +4672,7 @@ export const GetBlockHeightResponse: MessageFns = { break; } - message.blockHeight = reader.uint64().toString(); + message.blockHeight = reader.uint64() as bigint; continue; } } @@ -4574,13 +4685,13 @@ export const GetBlockHeightResponse: MessageFns = { }, fromJSON(object: any): GetBlockHeightResponse { - return { blockHeight: isSet(object.blockHeight) ? globalThis.String(object.blockHeight) : "0" }; + return { blockHeight: isSet(object.blockHeight) ? BigInt(object.blockHeight) : 0n }; }, toJSON(message: GetBlockHeightResponse): unknown { const obj: any = {}; - if (message.blockHeight !== "0") { - obj.blockHeight = message.blockHeight; + if (message.blockHeight !== 0n) { + obj.blockHeight = message.blockHeight.toString(); } return obj; }, @@ -4590,7 +4701,7 @@ export const GetBlockHeightResponse: MessageFns = { }, fromPartial, I>>(object: I): GetBlockHeightResponse { const message = createBaseGetBlockHeightResponse(); - message.blockHeight = object.blockHeight ?? "0"; + message.blockHeight = object.blockHeight ?? 0n; return message; }, }; @@ -4654,12 +4765,15 @@ export const GetSlotRequest: MessageFns = { }; function createBaseGetSlotResponse(): GetSlotResponse { - return { slot: "0" }; + return { slot: 0n }; } export const GetSlotResponse: MessageFns = { encode(message: GetSlotResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } return writer; @@ -4677,7 +4791,7 @@ export const GetSlotResponse: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } } @@ -4690,13 +4804,13 @@ export const GetSlotResponse: MessageFns = { }, fromJSON(object: any): GetSlotResponse { - return { slot: isSet(object.slot) ? globalThis.String(object.slot) : "0" }; + return { slot: isSet(object.slot) ? BigInt(object.slot) : 0n }; }, toJSON(message: GetSlotResponse): unknown { const obj: any = {}; - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } return obj; }, @@ -4706,7 +4820,7 @@ export const GetSlotResponse: MessageFns = { }, fromPartial, I>>(object: I): GetSlotResponse { const message = createBaseGetSlotResponse(); - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; return message; }, }; @@ -4889,12 +5003,15 @@ export const IsBlockhashValidRequest: MessageFns = { }; function createBaseIsBlockhashValidResponse(): IsBlockhashValidResponse { - return { slot: "0", valid: false }; + return { slot: 0n, valid: false }; } export const IsBlockhashValidResponse: MessageFns = { encode(message: IsBlockhashValidResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.slot !== "0") { + if (message.slot !== 0n) { + if (BigInt.asUintN(64, message.slot) !== message.slot) { + throw new globalThis.Error("value provided for field message.slot of type uint64 too large"); + } writer.uint32(8).uint64(message.slot); } if (message.valid !== false) { @@ -4915,7 +5032,7 @@ export const IsBlockhashValidResponse: MessageFns = { break; } - message.slot = reader.uint64().toString(); + message.slot = reader.uint64() as bigint; continue; } case 2: { @@ -4937,15 +5054,15 @@ export const IsBlockhashValidResponse: MessageFns = { fromJSON(object: any): IsBlockhashValidResponse { return { - slot: isSet(object.slot) ? globalThis.String(object.slot) : "0", + slot: isSet(object.slot) ? BigInt(object.slot) : 0n, valid: isSet(object.valid) ? globalThis.Boolean(object.valid) : false, }; }, toJSON(message: IsBlockhashValidResponse): unknown { const obj: any = {}; - if (message.slot !== "0") { - obj.slot = message.slot; + if (message.slot !== 0n) { + obj.slot = message.slot.toString(); } if (message.valid !== false) { obj.valid = message.valid; @@ -4958,7 +5075,7 @@ export const IsBlockhashValidResponse: MessageFns = { }, fromPartial, I>>(object: I): IsBlockhashValidResponse { const message = createBaseIsBlockhashValidResponse(); - message.slot = object.slot ?? "0"; + message.slot = object.slot ?? 0n; message.valid = object.valid ?? false; return message; }, @@ -5198,7 +5315,7 @@ function base64FromBytes(arr: Uint8Array): string { } } -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; +type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined; export type DeepPartial = T extends Builtin ? T : T extends globalThis.Array ? globalThis.Array> @@ -5211,13 +5328,13 @@ export type Exact = P extends Builtin ? P : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; function toTimestamp(date: Date): Timestamp { - const seconds = Math.trunc(date.getTime() / 1_000).toString(); + const seconds = BigInt(Math.trunc(date.getTime() / 1_000)); const nanos = (date.getTime() % 1_000) * 1_000_000; return { seconds, nanos }; } function fromTimestamp(t: Timestamp): Date { - let millis = (globalThis.Number(t.seconds) || 0) * 1_000; + let millis = (globalThis.Number(t.seconds.toString()) || 0) * 1_000; millis += (t.nanos || 0) / 1_000_000; return new globalThis.Date(millis); } diff --git a/typescript-sdk/src/grpc/google/protobuf/timestamp.ts b/typescript-sdk/src/grpc/google/protobuf/timestamp.ts index e116ef7..45ee7c1 100644 --- a/typescript-sdk/src/grpc/google/protobuf/timestamp.ts +++ b/typescript-sdk/src/grpc/google/protobuf/timestamp.ts @@ -98,7 +98,7 @@ export interface Timestamp { * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to * 9999-12-31T23:59:59Z inclusive. */ - seconds: string; + seconds: bigint; /** * Non-negative fractions of a second at nanosecond resolution. Negative * second values with fractions must still have non-negative nanos values @@ -109,12 +109,15 @@ export interface Timestamp { } function createBaseTimestamp(): Timestamp { - return { seconds: "0", nanos: 0 }; + return { seconds: 0n, nanos: 0 }; } export const Timestamp: MessageFns = { encode(message: Timestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.seconds !== "0") { + if (message.seconds !== 0n) { + if (BigInt.asIntN(64, message.seconds) !== message.seconds) { + throw new globalThis.Error("value provided for field message.seconds of type int64 too large"); + } writer.uint32(8).int64(message.seconds); } if (message.nanos !== 0) { @@ -135,7 +138,7 @@ export const Timestamp: MessageFns = { break; } - message.seconds = reader.int64().toString(); + message.seconds = reader.int64() as bigint; continue; } case 2: { @@ -157,15 +160,15 @@ export const Timestamp: MessageFns = { fromJSON(object: any): Timestamp { return { - seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0", + seconds: isSet(object.seconds) ? BigInt(object.seconds) : 0n, nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, }; }, toJSON(message: Timestamp): unknown { const obj: any = {}; - if (message.seconds !== "0") { - obj.seconds = message.seconds; + if (message.seconds !== 0n) { + obj.seconds = message.seconds.toString(); } if (message.nanos !== 0) { obj.nanos = Math.round(message.nanos); @@ -178,13 +181,13 @@ export const Timestamp: MessageFns = { }, fromPartial, I>>(object: I): Timestamp { const message = createBaseTimestamp(); - message.seconds = object.seconds ?? "0"; + message.seconds = object.seconds ?? 0n; message.nanos = object.nanos ?? 0; return message; }, }; -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; +type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined; export type DeepPartial = T extends Builtin ? T : T extends globalThis.Array ? globalThis.Array> diff --git a/typescript-sdk/src/grpc/solana-storage.ts b/typescript-sdk/src/grpc/solana-storage.ts index 276b24f..6291afb 100644 --- a/typescript-sdk/src/grpc/solana-storage.ts +++ b/typescript-sdk/src/grpc/solana-storage.ts @@ -63,7 +63,7 @@ export function rewardTypeToJSON(object: RewardType): string { export interface ConfirmedBlock { previousBlockhash: string; blockhash: string; - parentSlot: string; + parentSlot: bigint; transactions: ConfirmedTransaction[]; rewards: Reward[]; blockTime: UnixTimestamp | undefined; @@ -104,9 +104,9 @@ export interface MessageAddressTableLookup { export interface TransactionStatusMeta { err: TransactionError | undefined; - fee: string; - preBalances: string[]; - postBalances: string[]; + fee: bigint; + preBalances: bigint[]; + postBalances: bigint[]; innerInstructions: InnerInstructions[]; innerInstructionsNone: boolean; logMessages: string[]; @@ -124,10 +124,10 @@ export interface TransactionStatusMeta { * Set to `None` for txs executed on earlier versions. */ computeUnitsConsumed?: - | string + | bigint | undefined; /** Total transaction cost */ - costUnits?: string | undefined; + costUnits?: bigint | undefined; } export interface TransactionError { @@ -179,8 +179,8 @@ export interface ReturnData { export interface Reward { pubkey: string; - lamports: string; - postBalance: string; + lamports: bigint; + postBalance: bigint; rewardType: RewardType; commission: string; } @@ -191,22 +191,22 @@ export interface Rewards { } export interface UnixTimestamp { - timestamp: string; + timestamp: bigint; } export interface BlockHeight { - blockHeight: string; + blockHeight: bigint; } export interface NumPartitions { - numPartitions: string; + numPartitions: bigint; } function createBaseConfirmedBlock(): ConfirmedBlock { return { previousBlockhash: "", blockhash: "", - parentSlot: "0", + parentSlot: 0n, transactions: [], rewards: [], blockTime: undefined, @@ -223,7 +223,10 @@ export const ConfirmedBlock: MessageFns = { if (message.blockhash !== "") { writer.uint32(18).string(message.blockhash); } - if (message.parentSlot !== "0") { + if (message.parentSlot !== 0n) { + if (BigInt.asUintN(64, message.parentSlot) !== message.parentSlot) { + throw new globalThis.Error("value provided for field message.parentSlot of type uint64 too large"); + } writer.uint32(24).uint64(message.parentSlot); } for (const v of message.transactions) { @@ -272,7 +275,7 @@ export const ConfirmedBlock: MessageFns = { break; } - message.parentSlot = reader.uint64().toString(); + message.parentSlot = reader.uint64() as bigint; continue; } case 4: { @@ -328,7 +331,7 @@ export const ConfirmedBlock: MessageFns = { return { previousBlockhash: isSet(object.previousBlockhash) ? globalThis.String(object.previousBlockhash) : "", blockhash: isSet(object.blockhash) ? globalThis.String(object.blockhash) : "", - parentSlot: isSet(object.parentSlot) ? globalThis.String(object.parentSlot) : "0", + parentSlot: isSet(object.parentSlot) ? BigInt(object.parentSlot) : 0n, transactions: globalThis.Array.isArray(object?.transactions) ? object.transactions.map((e: any) => ConfirmedTransaction.fromJSON(e)) : [], @@ -347,8 +350,8 @@ export const ConfirmedBlock: MessageFns = { if (message.blockhash !== "") { obj.blockhash = message.blockhash; } - if (message.parentSlot !== "0") { - obj.parentSlot = message.parentSlot; + if (message.parentSlot !== 0n) { + obj.parentSlot = message.parentSlot.toString(); } if (message.transactions?.length) { obj.transactions = message.transactions.map((e) => ConfirmedTransaction.toJSON(e)); @@ -375,7 +378,7 @@ export const ConfirmedBlock: MessageFns = { const message = createBaseConfirmedBlock(); message.previousBlockhash = object.previousBlockhash ?? ""; message.blockhash = object.blockhash ?? ""; - message.parentSlot = object.parentSlot ?? "0"; + message.parentSlot = object.parentSlot ?? 0n; message.transactions = object.transactions?.map((e) => ConfirmedTransaction.fromPartial(e)) || []; message.rewards = object.rewards?.map((e) => Reward.fromPartial(e)) || []; message.blockTime = (object.blockTime !== undefined && object.blockTime !== null) @@ -898,7 +901,7 @@ export const MessageAddressTableLookup: MessageFns = function createBaseTransactionStatusMeta(): TransactionStatusMeta { return { err: undefined, - fee: "0", + fee: 0n, preBalances: [], postBalances: [], innerInstructions: [], @@ -922,16 +925,25 @@ export const TransactionStatusMeta: MessageFns = { if (message.err !== undefined) { TransactionError.encode(message.err, writer.uint32(10).fork()).join(); } - if (message.fee !== "0") { + if (message.fee !== 0n) { + if (BigInt.asUintN(64, message.fee) !== message.fee) { + throw new globalThis.Error("value provided for field message.fee of type uint64 too large"); + } writer.uint32(16).uint64(message.fee); } writer.uint32(26).fork(); for (const v of message.preBalances) { + if (BigInt.asUintN(64, v) !== v) { + throw new globalThis.Error("a value provided in array field preBalances of type uint64 is too large"); + } writer.uint64(v); } writer.join(); writer.uint32(34).fork(); for (const v of message.postBalances) { + if (BigInt.asUintN(64, v) !== v) { + throw new globalThis.Error("a value provided in array field postBalances of type uint64 is too large"); + } writer.uint64(v); } writer.join(); @@ -969,9 +981,15 @@ export const TransactionStatusMeta: MessageFns = { writer.uint32(120).bool(message.returnDataNone); } if (message.computeUnitsConsumed !== undefined) { + if (BigInt.asUintN(64, message.computeUnitsConsumed) !== message.computeUnitsConsumed) { + throw new globalThis.Error("value provided for field message.computeUnitsConsumed of type uint64 too large"); + } writer.uint32(128).uint64(message.computeUnitsConsumed); } if (message.costUnits !== undefined) { + if (BigInt.asUintN(64, message.costUnits) !== message.costUnits) { + throw new globalThis.Error("value provided for field message.costUnits of type uint64 too large"); + } writer.uint32(136).uint64(message.costUnits); } return writer; @@ -997,12 +1015,12 @@ export const TransactionStatusMeta: MessageFns = { break; } - message.fee = reader.uint64().toString(); + message.fee = reader.uint64() as bigint; continue; } case 3: { if (tag === 24) { - message.preBalances.push(reader.uint64().toString()); + message.preBalances.push(reader.uint64() as bigint); continue; } @@ -1010,7 +1028,7 @@ export const TransactionStatusMeta: MessageFns = { if (tag === 26) { const end2 = reader.uint32() + reader.pos; while (reader.pos < end2) { - message.preBalances.push(reader.uint64().toString()); + message.preBalances.push(reader.uint64() as bigint); } continue; @@ -1020,7 +1038,7 @@ export const TransactionStatusMeta: MessageFns = { } case 4: { if (tag === 32) { - message.postBalances.push(reader.uint64().toString()); + message.postBalances.push(reader.uint64() as bigint); continue; } @@ -1028,7 +1046,7 @@ export const TransactionStatusMeta: MessageFns = { if (tag === 34) { const end2 = reader.uint32() + reader.pos; while (reader.pos < end2) { - message.postBalances.push(reader.uint64().toString()); + message.postBalances.push(reader.uint64() as bigint); } continue; @@ -1129,7 +1147,7 @@ export const TransactionStatusMeta: MessageFns = { break; } - message.computeUnitsConsumed = reader.uint64().toString(); + message.computeUnitsConsumed = reader.uint64() as bigint; continue; } case 17: { @@ -1137,7 +1155,7 @@ export const TransactionStatusMeta: MessageFns = { break; } - message.costUnits = reader.uint64().toString(); + message.costUnits = reader.uint64() as bigint; continue; } } @@ -1152,12 +1170,10 @@ export const TransactionStatusMeta: MessageFns = { fromJSON(object: any): TransactionStatusMeta { return { err: isSet(object.err) ? TransactionError.fromJSON(object.err) : undefined, - fee: isSet(object.fee) ? globalThis.String(object.fee) : "0", - preBalances: globalThis.Array.isArray(object?.preBalances) - ? object.preBalances.map((e: any) => globalThis.String(e)) - : [], + fee: isSet(object.fee) ? BigInt(object.fee) : 0n, + preBalances: globalThis.Array.isArray(object?.preBalances) ? object.preBalances.map((e: any) => BigInt(e)) : [], postBalances: globalThis.Array.isArray(object?.postBalances) - ? object.postBalances.map((e: any) => globalThis.String(e)) + ? object.postBalances.map((e: any) => BigInt(e)) : [], innerInstructions: globalThis.Array.isArray(object?.innerInstructions) ? object.innerInstructions.map((e: any) => InnerInstructions.fromJSON(e)) @@ -1175,7 +1191,9 @@ export const TransactionStatusMeta: MessageFns = { postTokenBalances: globalThis.Array.isArray(object?.postTokenBalances) ? object.postTokenBalances.map((e: any) => TokenBalance.fromJSON(e)) : [], - rewards: globalThis.Array.isArray(object?.rewards) ? object.rewards.map((e: any) => Reward.fromJSON(e)) : [], + rewards: globalThis.Array.isArray(object?.rewards) + ? object.rewards.map((e: any) => Reward.fromJSON(e)) + : [], loadedWritableAddresses: globalThis.Array.isArray(object?.loadedWritableAddresses) ? object.loadedWritableAddresses.map((e: any) => bytesFromBase64(e)) : [], @@ -1184,10 +1202,8 @@ export const TransactionStatusMeta: MessageFns = { : [], returnData: isSet(object.returnData) ? ReturnData.fromJSON(object.returnData) : undefined, returnDataNone: isSet(object.returnDataNone) ? globalThis.Boolean(object.returnDataNone) : false, - computeUnitsConsumed: isSet(object.computeUnitsConsumed) - ? globalThis.String(object.computeUnitsConsumed) - : undefined, - costUnits: isSet(object.costUnits) ? globalThis.String(object.costUnits) : undefined, + computeUnitsConsumed: isSet(object.computeUnitsConsumed) ? BigInt(object.computeUnitsConsumed) : undefined, + costUnits: isSet(object.costUnits) ? BigInt(object.costUnits) : undefined, }; }, @@ -1196,14 +1212,14 @@ export const TransactionStatusMeta: MessageFns = { if (message.err !== undefined) { obj.err = TransactionError.toJSON(message.err); } - if (message.fee !== "0") { - obj.fee = message.fee; + if (message.fee !== 0n) { + obj.fee = message.fee.toString(); } if (message.preBalances?.length) { - obj.preBalances = message.preBalances; + obj.preBalances = message.preBalances.map((e) => e.toString()); } if (message.postBalances?.length) { - obj.postBalances = message.postBalances; + obj.postBalances = message.postBalances.map((e) => e.toString()); } if (message.innerInstructions?.length) { obj.innerInstructions = message.innerInstructions.map((e) => InnerInstructions.toJSON(e)); @@ -1239,10 +1255,10 @@ export const TransactionStatusMeta: MessageFns = { obj.returnDataNone = message.returnDataNone; } if (message.computeUnitsConsumed !== undefined) { - obj.computeUnitsConsumed = message.computeUnitsConsumed; + obj.computeUnitsConsumed = message.computeUnitsConsumed.toString(); } if (message.costUnits !== undefined) { - obj.costUnits = message.costUnits; + obj.costUnits = message.costUnits.toString(); } return obj; }, @@ -1255,7 +1271,7 @@ export const TransactionStatusMeta: MessageFns = { message.err = (object.err !== undefined && object.err !== null) ? TransactionError.fromPartial(object.err) : undefined; - message.fee = object.fee ?? "0"; + message.fee = object.fee ?? 0n; message.preBalances = object.preBalances?.map((e) => e) || []; message.postBalances = object.postBalances?.map((e) => e) || []; message.innerInstructions = object.innerInstructions?.map((e) => InnerInstructions.fromPartial(e)) || []; @@ -1924,7 +1940,7 @@ export const ReturnData: MessageFns = { }; function createBaseReward(): Reward { - return { pubkey: "", lamports: "0", postBalance: "0", rewardType: 0, commission: "" }; + return { pubkey: "", lamports: 0n, postBalance: 0n, rewardType: 0, commission: "" }; } export const Reward: MessageFns = { @@ -1932,10 +1948,16 @@ export const Reward: MessageFns = { if (message.pubkey !== "") { writer.uint32(10).string(message.pubkey); } - if (message.lamports !== "0") { + if (message.lamports !== 0n) { + if (BigInt.asIntN(64, message.lamports) !== message.lamports) { + throw new globalThis.Error("value provided for field message.lamports of type int64 too large"); + } writer.uint32(16).int64(message.lamports); } - if (message.postBalance !== "0") { + if (message.postBalance !== 0n) { + if (BigInt.asUintN(64, message.postBalance) !== message.postBalance) { + throw new globalThis.Error("value provided for field message.postBalance of type uint64 too large"); + } writer.uint32(24).uint64(message.postBalance); } if (message.rewardType !== 0) { @@ -1967,7 +1989,7 @@ export const Reward: MessageFns = { break; } - message.lamports = reader.int64().toString(); + message.lamports = reader.int64() as bigint; continue; } case 3: { @@ -1975,7 +1997,7 @@ export const Reward: MessageFns = { break; } - message.postBalance = reader.uint64().toString(); + message.postBalance = reader.uint64() as bigint; continue; } case 4: { @@ -2006,8 +2028,8 @@ export const Reward: MessageFns = { fromJSON(object: any): Reward { return { pubkey: isSet(object.pubkey) ? globalThis.String(object.pubkey) : "", - lamports: isSet(object.lamports) ? globalThis.String(object.lamports) : "0", - postBalance: isSet(object.postBalance) ? globalThis.String(object.postBalance) : "0", + lamports: isSet(object.lamports) ? BigInt(object.lamports) : 0n, + postBalance: isSet(object.postBalance) ? BigInt(object.postBalance) : 0n, rewardType: isSet(object.rewardType) ? rewardTypeFromJSON(object.rewardType) : 0, commission: isSet(object.commission) ? globalThis.String(object.commission) : "", }; @@ -2018,11 +2040,11 @@ export const Reward: MessageFns = { if (message.pubkey !== "") { obj.pubkey = message.pubkey; } - if (message.lamports !== "0") { - obj.lamports = message.lamports; + if (message.lamports !== 0n) { + obj.lamports = message.lamports.toString(); } - if (message.postBalance !== "0") { - obj.postBalance = message.postBalance; + if (message.postBalance !== 0n) { + obj.postBalance = message.postBalance.toString(); } if (message.rewardType !== 0) { obj.rewardType = rewardTypeToJSON(message.rewardType); @@ -2039,8 +2061,8 @@ export const Reward: MessageFns = { fromPartial, I>>(object: I): Reward { const message = createBaseReward(); message.pubkey = object.pubkey ?? ""; - message.lamports = object.lamports ?? "0"; - message.postBalance = object.postBalance ?? "0"; + message.lamports = object.lamports ?? 0n; + message.postBalance = object.postBalance ?? 0n; message.rewardType = object.rewardType ?? 0; message.commission = object.commission ?? ""; return message; @@ -2126,12 +2148,15 @@ export const Rewards: MessageFns = { }; function createBaseUnixTimestamp(): UnixTimestamp { - return { timestamp: "0" }; + return { timestamp: 0n }; } export const UnixTimestamp: MessageFns = { encode(message: UnixTimestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.timestamp !== "0") { + if (message.timestamp !== 0n) { + if (BigInt.asIntN(64, message.timestamp) !== message.timestamp) { + throw new globalThis.Error("value provided for field message.timestamp of type int64 too large"); + } writer.uint32(8).int64(message.timestamp); } return writer; @@ -2149,7 +2174,7 @@ export const UnixTimestamp: MessageFns = { break; } - message.timestamp = reader.int64().toString(); + message.timestamp = reader.int64() as bigint; continue; } } @@ -2162,13 +2187,13 @@ export const UnixTimestamp: MessageFns = { }, fromJSON(object: any): UnixTimestamp { - return { timestamp: isSet(object.timestamp) ? globalThis.String(object.timestamp) : "0" }; + return { timestamp: isSet(object.timestamp) ? BigInt(object.timestamp) : 0n }; }, toJSON(message: UnixTimestamp): unknown { const obj: any = {}; - if (message.timestamp !== "0") { - obj.timestamp = message.timestamp; + if (message.timestamp !== 0n) { + obj.timestamp = message.timestamp.toString(); } return obj; }, @@ -2178,18 +2203,21 @@ export const UnixTimestamp: MessageFns = { }, fromPartial, I>>(object: I): UnixTimestamp { const message = createBaseUnixTimestamp(); - message.timestamp = object.timestamp ?? "0"; + message.timestamp = object.timestamp ?? 0n; return message; }, }; function createBaseBlockHeight(): BlockHeight { - return { blockHeight: "0" }; + return { blockHeight: 0n }; } export const BlockHeight: MessageFns = { encode(message: BlockHeight, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.blockHeight !== "0") { + if (message.blockHeight !== 0n) { + if (BigInt.asUintN(64, message.blockHeight) !== message.blockHeight) { + throw new globalThis.Error("value provided for field message.blockHeight of type uint64 too large"); + } writer.uint32(8).uint64(message.blockHeight); } return writer; @@ -2207,7 +2235,7 @@ export const BlockHeight: MessageFns = { break; } - message.blockHeight = reader.uint64().toString(); + message.blockHeight = reader.uint64() as bigint; continue; } } @@ -2220,13 +2248,13 @@ export const BlockHeight: MessageFns = { }, fromJSON(object: any): BlockHeight { - return { blockHeight: isSet(object.blockHeight) ? globalThis.String(object.blockHeight) : "0" }; + return { blockHeight: isSet(object.blockHeight) ? BigInt(object.blockHeight) : 0n }; }, toJSON(message: BlockHeight): unknown { const obj: any = {}; - if (message.blockHeight !== "0") { - obj.blockHeight = message.blockHeight; + if (message.blockHeight !== 0n) { + obj.blockHeight = message.blockHeight.toString(); } return obj; }, @@ -2236,18 +2264,21 @@ export const BlockHeight: MessageFns = { }, fromPartial, I>>(object: I): BlockHeight { const message = createBaseBlockHeight(); - message.blockHeight = object.blockHeight ?? "0"; + message.blockHeight = object.blockHeight ?? 0n; return message; }, }; function createBaseNumPartitions(): NumPartitions { - return { numPartitions: "0" }; + return { numPartitions: 0n }; } export const NumPartitions: MessageFns = { encode(message: NumPartitions, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.numPartitions !== "0") { + if (message.numPartitions !== 0n) { + if (BigInt.asUintN(64, message.numPartitions) !== message.numPartitions) { + throw new globalThis.Error("value provided for field message.numPartitions of type uint64 too large"); + } writer.uint32(8).uint64(message.numPartitions); } return writer; @@ -2265,7 +2296,7 @@ export const NumPartitions: MessageFns = { break; } - message.numPartitions = reader.uint64().toString(); + message.numPartitions = reader.uint64() as bigint; continue; } } @@ -2278,13 +2309,13 @@ export const NumPartitions: MessageFns = { }, fromJSON(object: any): NumPartitions { - return { numPartitions: isSet(object.numPartitions) ? globalThis.String(object.numPartitions) : "0" }; + return { numPartitions: isSet(object.numPartitions) ? BigInt(object.numPartitions) : 0n }; }, toJSON(message: NumPartitions): unknown { const obj: any = {}; - if (message.numPartitions !== "0") { - obj.numPartitions = message.numPartitions; + if (message.numPartitions !== 0n) { + obj.numPartitions = message.numPartitions.toString(); } return obj; }, @@ -2294,7 +2325,7 @@ export const NumPartitions: MessageFns = { }, fromPartial, I>>(object: I): NumPartitions { const message = createBaseNumPartitions(); - message.numPartitions = object.numPartitions ?? "0"; + message.numPartitions = object.numPartitions ?? 0n; return message; }, }; @@ -2324,7 +2355,7 @@ function base64FromBytes(arr: Uint8Array): string { } } -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; +type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined; export type DeepPartial = T extends Builtin ? T : T extends globalThis.Array ? globalThis.Array> diff --git a/typescript-sdk/src/runtime/state-machine.ts b/typescript-sdk/src/runtime/state-machine.ts index 5f6002e..cdb4cba 100644 --- a/typescript-sdk/src/runtime/state-machine.ts +++ b/typescript-sdk/src/runtime/state-machine.ts @@ -1,35 +1,18 @@ +import { BlockchainEvent } from "../grpc/fumarole"; +import { CommitmentLevel } from "../grpc/geyser"; import { Queue as Deque } from "./queue"; // Constants export const DEFAULT_SLOT_MEMORY_RETENTION = 10000; -// Solana commitment levels -export enum CommitmentLevel { - PROCESSED = 0, - CONFIRMED = 1, - FINALIZED = 2, -} - -// Interface matching the gRPC BlockchainEvent type -export interface BlockchainEvent { - offset: string; - slot: number; - parentSlot?: number; - commitmentLevel: CommitmentLevel; - deadError?: string; - blockchainId: Uint8Array; - blockUid: Uint8Array; - numShards: number; -} - // Type aliases export type FumeBlockchainId = Uint8Array; // Equivalent to [u8; 16] export type FumeBlockUID = Uint8Array; // Equivalent to [u8; 16] export type FumeNumShards = number; // Equivalent to u32 export type FumeShardIdx = number; // Equivalent to u32 -export type FumeOffset = string; // Equivalent to i64 as string for large numbers -export type FumeSessionSequence = number; // Equivalent to u64 -export type Slot = number; // From solana_sdk::clock::Slot +export type FumeOffset = bigint; // Equivalent to i64 as string for large numbers +export type FumeSessionSequence = bigint; // Equivalent to u64 +export type Slot = bigint; // From solana_sdk::clock::Slot // Data structures export class FumeDownloadRequest { @@ -94,13 +77,13 @@ export class FumaroleSM { private inflightSlotShardDownload = new Map(); private blockedSlotStatusUpdate = new Map>(); private slotStatusUpdateQueue = new Deque(); - private processedOffset: [number, string][] = []; // Min-heap for (sequence, offset) - private maxSlotDetected = 0; + private processedOffset: [bigint, bigint][] = []; // Min-heap for (sequence, offset) + private maxSlotDetected = 0n; private unprocessedBlockchainEvent = new Deque< [FumeSessionSequence, BlockchainEvent] >(); - private sequence = 1; - private lastProcessedFumeSequence = 0; + private sequence = 1n; + private lastProcessedFumeSequence = 0n; private sequenceToOffset = new Map(); private _committableOffset: FumeOffset; private _lastCommittedOffset: FumeOffset; @@ -128,9 +111,9 @@ export class FumaroleSM { this._lastCommittedOffset = offset; } - private nextSequence(): number { + private nextSequence(): bigint { const ret = this.sequence; - this.sequence += 1; + this.sequence += 1n; return ret; } @@ -357,13 +340,17 @@ export class FumaroleSM { // Use negative values for the min-heap (to simulate max-heap behavior) this.processedOffset.push([-eventSeqNumber, fumeOffset]); - this.processedOffset.sort((a, b) => a[0] - b[0]); // Keep sorted as a min-heap + this.processedOffset.sort((a, b) => { + if (a[0] < b[0]) return -1; + if (a[0] > b[0]) return 1; + return 0; + });// Keep sorted as a min-heap while (this.processedOffset.length > 0) { const [seq, offset] = this.processedOffset[0]; const positiveSeq = -seq; // Convert back to positive - if (positiveSeq !== this.lastProcessedFumeSequence + 1) { + if (positiveSeq !== this.lastProcessedFumeSequence + 1n) { break; } From c1b9a941f803017a3a8361c1202ee331d5b422de Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Sat, 16 Aug 2025 11:12:27 +0000 Subject: [PATCH 47/56] refactor: implement state-machine as per rust implementation Signed-off-by: GitHub --- typescript-sdk/src/runtime/binary-heap.ts | 76 ++++ typescript-sdk/src/runtime/state-machine.ts | 369 ++++++++++---------- 2 files changed, 263 insertions(+), 182 deletions(-) create mode 100644 typescript-sdk/src/runtime/binary-heap.ts diff --git a/typescript-sdk/src/runtime/binary-heap.ts b/typescript-sdk/src/runtime/binary-heap.ts new file mode 100644 index 0000000..df15f81 --- /dev/null +++ b/typescript-sdk/src/runtime/binary-heap.ts @@ -0,0 +1,76 @@ +export class BinaryHeap { + private heap: T[]; + private compare: (a: T, b: T) => number; + + constructor(compare: (a: T, b: T) => number) { + this.heap = []; + this.compare = compare; + } + + public push(value: T): void { + this.heap.push(value); + this.bubbleUp(this.heap.length - 1); + } + + public pop(): T | undefined { + if (this.heap.length === 0) return undefined; + const result = this.heap[0]; + const last = this.heap.pop()!; + if (this.heap.length > 0) { + this.heap[0] = last; + this.bubbleDown(0); + } + return result; + } + + public peek(): T | undefined { + return this.heap[0]; + } + + public get length(): number { + return this.heap.length; + } + + private bubbleUp(index: number): void { + while (index > 0) { + const parentIndex = Math.floor((index - 1) / 2); + if (this.compare(this.heap[index], this.heap[parentIndex]) >= 0) { + break; + } + [this.heap[index], this.heap[parentIndex]] = [ + this.heap[parentIndex], + this.heap[index], + ]; + index = parentIndex; + } + } + + private bubbleDown(index: number): void { + while (true) { + let smallest = index; + const leftChild = 2 * index + 1; + const rightChild = 2 * index + 2; + + if ( + leftChild < this.heap.length && + this.compare(this.heap[leftChild], this.heap[smallest]) < 0 + ) { + smallest = leftChild; + } + if ( + rightChild < this.heap.length && + this.compare(this.heap[rightChild], this.heap[smallest]) < 0 + ) { + smallest = rightChild; + } + + if (smallest === index) break; + + [this.heap[index], this.heap[smallest]] = [ + this.heap[smallest], + this.heap[index], + ]; + index = smallest; + } + } +} diff --git a/typescript-sdk/src/runtime/state-machine.ts b/typescript-sdk/src/runtime/state-machine.ts index cdb4cba..6167b1b 100644 --- a/typescript-sdk/src/runtime/state-machine.ts +++ b/typescript-sdk/src/runtime/state-machine.ts @@ -1,9 +1,26 @@ import { BlockchainEvent } from "../grpc/fumarole"; import { CommitmentLevel } from "../grpc/geyser"; -import { Queue as Deque } from "./queue"; +import { BinaryHeap } from "./binary-heap"; + +class Queue { + private items: T[] = []; + + push(item: T): void { + this.items.push(item); + } + + shift(): T | undefined { + return this.items.shift(); + } + + get length(): number { + return this.items.length; + } +} // Constants export const DEFAULT_SLOT_MEMORY_RETENTION = 10000; +export const MINIMUM_UNPROCESSED_BLOCKCHAIN_EVENT = 10; // Type aliases export type FumeBlockchainId = Uint8Array; // Equivalent to [u8; 16] @@ -69,309 +86,297 @@ export enum SlotDownloadState { } export class FumaroleSM { - private slotCommitmentProgression = new Map< + private slot_commitment_progression = new Map< Slot, SlotCommitmentProgression >(); - private downloadedSlot = new Set(); - private inflightSlotShardDownload = new Map(); - private blockedSlotStatusUpdate = new Map>(); - private slotStatusUpdateQueue = new Deque(); - private processedOffset: [bigint, bigint][] = []; // Min-heap for (sequence, offset) - private maxSlotDetected = 0n; - private unprocessedBlockchainEvent = new Deque< + private downloaded_slot = new Set(); + private inflight_slot_shard_download = new Map(); + private blocked_slot_status_update = new Map>(); + private slot_status_update_queue = new Queue(); + private processed_offset = new BinaryHeap<[FumeSessionSequence, FumeOffset]>( + ( + a: [FumeSessionSequence, FumeOffset], + b: [FumeSessionSequence, FumeOffset] + ) => { + // Implementing Reverse ordering as in Rust + if (a[0] === b[0]) return 0; + return a[0] > b[0] ? 1 : -1; + } + ); + private unprocessed_blockchain_event = new Queue< [FumeSessionSequence, BlockchainEvent] >(); private sequence = 1n; - private lastProcessedFumeSequence = 0n; - private sequenceToOffset = new Map(); - private _committableOffset: FumeOffset; - private _lastCommittedOffset: FumeOffset; + private sequence_to_offset = new Map(); + public max_slot_detected: Slot = 0n; + private last_processed_fume_sequence = 0n; + public committable_offset: FumeOffset; constructor( - lastCommittedOffset: FumeOffset, - private readonly slotMemoryRetention: number + public last_committed_offset: FumeOffset, + private slot_memory_retention: number = DEFAULT_SLOT_MEMORY_RETENTION ) { - this._lastCommittedOffset = lastCommittedOffset; - this._committableOffset = lastCommittedOffset; - } - - get lastCommittedOffset(): FumeOffset { - return this._lastCommittedOffset; - } - - get committableOffset(): FumeOffset { - return this._committableOffset; + this.committable_offset = last_committed_offset; } public updateCommittedOffset(offset: FumeOffset): void { - if (BigInt(offset) < BigInt(this._lastCommittedOffset)) { - throw new Error("Offset must be >= last committed offset"); + if (offset < this.last_committed_offset) { + throw new Error( + "offset must be greater than or equal to last committed offset" + ); } - this._lastCommittedOffset = offset; + this.last_committed_offset = offset; } - private nextSequence(): bigint { + private nextSequence(): FumeSessionSequence { const ret = this.sequence; - this.sequence += 1n; + this.sequence = this.sequence + 1n; return ret; } public gc(): void { - while (this.downloadedSlot.size > this.slotMemoryRetention) { - // Get the first slot (oldest) from the set - const slot = this.downloadedSlot.values().next().value; - if (!slot) break; - - this.downloadedSlot.delete(slot); - this.slotCommitmentProgression.delete(slot); - this.inflightSlotShardDownload.delete(slot); - this.blockedSlotStatusUpdate.delete(slot); + while (this.downloaded_slot.size > this.slot_memory_retention) { + const firstSlot = Array.from(this.downloaded_slot)[0]; + if (!firstSlot) break; + + this.downloaded_slot.delete(firstSlot); + this.slot_commitment_progression.delete(firstSlot); + this.inflight_slot_shard_download.delete(firstSlot); + this.blocked_slot_status_update.delete(firstSlot); } } - public async queueBlockchainEvent(events: BlockchainEvent[]): Promise { + public queueBlockchainEvent(events: BlockchainEvent[]): void { for (const event of events) { - if (BigInt(event.offset) < BigInt(this._lastCommittedOffset)) { + if (event.offset < this.last_committed_offset) { continue; } - if (event.slot > this.maxSlotDetected) { - this.maxSlotDetected = event.slot; + if (event.slot > this.max_slot_detected) { + this.max_slot_detected = event.slot; } const sequence = this.nextSequence(); - this.sequenceToOffset.set(sequence, event.offset); + this.sequence_to_offset.set(sequence, event.offset); - if (this.downloadedSlot.has(event.slot)) { + if (this.downloaded_slot.has(event.slot)) { const fumeStatus = new FumeSlotStatus( sequence, event.offset, event.slot, event.parentSlot, - event.commitmentLevel, + event.commitmentLevel as CommitmentLevel, event.deadError ); - if (this.inflightSlotShardDownload.has(event.slot)) { - let blockedQueue = this.blockedSlotStatusUpdate.get(event.slot); - if (!blockedQueue) { - blockedQueue = new Deque(); - this.blockedSlotStatusUpdate.set(event.slot, blockedQueue); + if (this.inflight_slot_shard_download.has(event.slot)) { + // This event is blocked by a slot download currently in progress + let queue = this.blocked_slot_status_update.get(event.slot); + if (!queue) { + queue = new Queue(); + this.blocked_slot_status_update.set(event.slot, queue); } - await blockedQueue.put(fumeStatus); + queue.push(fumeStatus); } else { - await this.slotStatusUpdateQueue.put(fumeStatus); + // Fast track this event + this.slot_status_update_queue.push(fumeStatus); } } else { - await this.unprocessedBlockchainEvent.put([sequence, event]); + this.unprocessed_blockchain_event.push([sequence, event]); } } } - public async makeSlotDownloadProgress( + public makeSlotDownloadProgress( slot: Slot, shardIdx: FumeShardIdx - ): Promise { - const downloadProgress = this.inflightSlotShardDownload.get(slot); + ): SlotDownloadState { + const downloadProgress = this.inflight_slot_shard_download.get(slot); if (!downloadProgress) { - throw new Error("Slot not in download"); + throw new Error("slot not in download"); } const downloadState = downloadProgress.doProgress(shardIdx); if (downloadState === SlotDownloadState.Done) { - this.inflightSlotShardDownload.delete(slot); - this.downloadedSlot.add(slot); - - if (!this.slotCommitmentProgression.has(slot)) { - this.slotCommitmentProgression.set( + this.inflight_slot_shard_download.delete(slot); + this.downloaded_slot.add(slot); + if (!this.slot_commitment_progression.has(slot)) { + this.slot_commitment_progression.set( slot, new SlotCommitmentProgression() ); } - const blockedStatuses = this.blockedSlotStatusUpdate.get(slot); - if (blockedStatuses) { - // Move all blocked statuses to the main queue - while (!blockedStatuses.isEmpty()) { - const status = await blockedStatuses.get(); - if (status) await this.slotStatusUpdateQueue.put(status); - } - this.blockedSlotStatusUpdate.delete(slot); + const blockedSlotStatus = + this.blocked_slot_status_update.get(slot) ?? + new Queue(); + this.blocked_slot_status_update.delete(slot); + while (blockedSlotStatus.length > 0) { + const status = blockedSlotStatus.shift(); + if (status) this.slot_status_update_queue.push(status); } } - return downloadState; } - public async popNextSlotStatus(): Promise { - while (!this.slotStatusUpdateQueue.isEmpty()) { - const slotStatus = await this.slotStatusUpdateQueue.get(); - if (!slotStatus) continue; + public popNextSlotStatus(): FumeSlotStatus | undefined { + while (this.slot_status_update_queue.length > 0) { + const slotStatus = this.slot_status_update_queue.shift(); + if (!slotStatus) return undefined; - const commitmentHistory = this.slotCommitmentProgression.get( + const commitmentHistory = this.slot_commitment_progression.get( slotStatus.slot ); - if ( - commitmentHistory && - !commitmentHistory.hasProcessedCommitment(slotStatus.commitmentLevel) - ) { - commitmentHistory.addProcessedCommitment(slotStatus.commitmentLevel); - return slotStatus; - } else if (!commitmentHistory) { - throw new Error("Slot status should not be available here"); + if (commitmentHistory) { + if ( + !commitmentHistory.hasProcessedCommitment(slotStatus.commitmentLevel) + ) { + commitmentHistory.addProcessedCommitment(slotStatus.commitmentLevel); + return slotStatus; + } + // We already processed this commitment level + continue; + } else { + // This should be unreachable as per Rust implementation + throw new Error("slot status should not be available here"); } } - return null; + return undefined; } - private makeSureSlotCommitmentProgressionExists( + private makeSlotCommitmentProgressionExists( slot: Slot ): SlotCommitmentProgression { - let progression = this.slotCommitmentProgression.get(slot); + let progression = this.slot_commitment_progression.get(slot); if (!progression) { progression = new SlotCommitmentProgression(); - this.slotCommitmentProgression.set(slot, progression); + this.slot_commitment_progression.set(slot, progression); } return progression; } - public async popSlotToDownload( - commitment = CommitmentLevel.PROCESSED - ): Promise { - while (!this.unprocessedBlockchainEvent.isEmpty()) { - const eventPair = await this.unprocessedBlockchainEvent.get(); - if (!eventPair) continue; + public popSlotToDownload( + commitment?: CommitmentLevel + ): FumeDownloadRequest | undefined { + while (this.unprocessed_blockchain_event.length > 0) { + const minCommitment = commitment ?? CommitmentLevel.PROCESSED; + const next = this.unprocessed_blockchain_event.shift(); + if (!next) return undefined; + const [sessionSequence, event] = next; + if (!event) return undefined; - const [sessionSequence, blockchainEvent] = eventPair; - const eventCl = blockchainEvent.commitmentLevel; + const eventCommitmentLevel = event.commitmentLevel as CommitmentLevel; - if (eventCl < commitment) { - await this.slotStatusUpdateQueue.put( + if (eventCommitmentLevel !== minCommitment) { + this.slot_status_update_queue.push( new FumeSlotStatus( sessionSequence, - blockchainEvent.offset, - blockchainEvent.slot, - blockchainEvent.parentSlot, - eventCl, - blockchainEvent.deadError + event.offset, + event.slot, + event.parentSlot, + eventCommitmentLevel, + event.deadError ) ); - this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); + this.makeSlotCommitmentProgressionExists(event.slot); continue; } - if (this.downloadedSlot.has(blockchainEvent.slot)) { - this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); - const progression = this.slotCommitmentProgression.get( - blockchainEvent.slot - ); - if (progression && progression.hasProcessedCommitment(eventCl)) { + if (this.downloaded_slot.has(event.slot)) { + this.makeSlotCommitmentProgressionExists(event.slot); + const progression = this.slot_commitment_progression.get(event.slot); + if (!progression) { + throw new Error("slot status should not be available here"); + } + + if (progression.hasProcessedCommitment(eventCommitmentLevel)) { this.markEventAsProcessed(sessionSequence); continue; } - await this.slotStatusUpdateQueue.put( + this.slot_status_update_queue.push( new FumeSlotStatus( sessionSequence, - blockchainEvent.offset, - blockchainEvent.slot, - blockchainEvent.parentSlot, - eventCl, - blockchainEvent.deadError + event.offset, + event.slot, + event.parentSlot, + eventCommitmentLevel, + event.deadError ) ); } else { - const blockchainId = new Uint8Array(blockchainEvent.blockchainId); - const blockUid = new Uint8Array(blockchainEvent.blockUid); - if (!this.inflightSlotShardDownload.has(blockchainEvent.slot)) { - const downloadRequest = new FumeDownloadRequest( - blockchainEvent.slot, - blockchainId, - blockUid, - blockchainEvent.numShards, - eventCl - ); - - const downloadProgress = new SlotDownloadProgress( - blockchainEvent.numShards - ); - this.inflightSlotShardDownload.set( - blockchainEvent.slot, - downloadProgress - ); - - let blockedQueue = this.blockedSlotStatusUpdate.get( - blockchainEvent.slot - ); - if (!blockedQueue) { - blockedQueue = new Deque(); - this.blockedSlotStatusUpdate.set( - blockchainEvent.slot, - blockedQueue - ); - } + let queue = this.blocked_slot_status_update.get(event.slot); + if (!queue) { + queue = new Queue(); + this.blocked_slot_status_update.set(event.slot, queue); + } + queue.push( + new FumeSlotStatus( + sessionSequence, + event.offset, + event.slot, + event.parentSlot, + eventCommitmentLevel, + event.deadError + ) + ); - await blockedQueue.put( - new FumeSlotStatus( - sessionSequence, - blockchainEvent.offset, - blockchainEvent.slot, - blockchainEvent.parentSlot, - eventCl, - blockchainEvent.deadError - ) + if (!this.inflight_slot_shard_download.has(event.slot)) { + const downloadRequest = new FumeDownloadRequest( + event.slot, + event.blockchainId, + event.blockUid, + event.numShards, + eventCommitmentLevel ); - + const downloadProgress = new SlotDownloadProgress(event.numShards); + this.inflight_slot_shard_download.set(event.slot, downloadProgress); return downloadRequest; } } } - return null; + return undefined; + } + + public slotStatusUpdateQueueLen(): number { + return this.slot_status_update_queue.length; } public markEventAsProcessed(eventSeqNumber: FumeSessionSequence): void { - const fumeOffset = this.sequenceToOffset.get(eventSeqNumber); + const fumeOffset = this.sequence_to_offset.get(eventSeqNumber); if (!fumeOffset) { - throw new Error("Event sequence number not found"); + throw new Error("event sequence number not found"); } - this.sequenceToOffset.delete(eventSeqNumber); - - // Use negative values for the min-heap (to simulate max-heap behavior) - this.processedOffset.push([-eventSeqNumber, fumeOffset]); - this.processedOffset.sort((a, b) => { - if (a[0] < b[0]) return -1; - if (a[0] > b[0]) return 1; - return 0; - });// Keep sorted as a min-heap + this.sequence_to_offset.delete(eventSeqNumber); + this.processed_offset.push([eventSeqNumber, fumeOffset]); - while (this.processedOffset.length > 0) { - const [seq, offset] = this.processedOffset[0]; - const positiveSeq = -seq; // Convert back to positive + while (true) { + const tuple = this.processed_offset.peek(); + if (!tuple) break; - if (positiveSeq !== this.lastProcessedFumeSequence + 1n) { + const [blockedEventSeqNumber2, fumeOffset2] = tuple; + if (blockedEventSeqNumber2 !== this.last_processed_fume_sequence + 1n) { break; } - this.processedOffset.shift(); - this._committableOffset = offset; - this.lastProcessedFumeSequence = positiveSeq; + this.processed_offset.pop(); + this.committable_offset = fumeOffset2; + this.last_processed_fume_sequence = blockedEventSeqNumber2; } } - public slotStatusUpdateQueueLen(): number { - return this.slotStatusUpdateQueue.size(); - } - public processedOffsetQueueLen(): number { - return this.processedOffset.length; + return this.processed_offset.length; } public needNewBlockchainEvents(): boolean { return ( - this.slotStatusUpdateQueue.isEmpty() && - this.blockedSlotStatusUpdate.size === 0 + this.unprocessed_blockchain_event.length < + MINIMUM_UNPROCESSED_BLOCKCHAIN_EVENT || + (this.slot_status_update_queue.length === 0 && + this.blocked_slot_status_update.size === 0) ); } } From 8d8638e18c73eef8b100beffe8743905a23f0967 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Sat, 16 Aug 2025 15:18:45 +0000 Subject: [PATCH 48/56] refactor: added tests for state-machine.ts Signed-off-by: GitHub --- .gitignore | 7 +- typescript-sdk/jest.config.js | 14 ++ typescript-sdk/jest.config.mjs | 18 +++ typescript-sdk/package.json | 10 +- .../src/tests/state-machine.test.ts | 131 ++++++++++++++++++ 5 files changed, 176 insertions(+), 4 deletions(-) create mode 100644 typescript-sdk/jest.config.js create mode 100644 typescript-sdk/jest.config.mjs create mode 100644 typescript-sdk/src/tests/state-machine.test.ts diff --git a/.gitignore b/.gitignore index b77ad32..c5586bb 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,9 @@ dist target node_modules dist -*.txt \ No newline at end of file +*.txt + +.vscode +yellowstone-grpc + +.env \ No newline at end of file diff --git a/typescript-sdk/jest.config.js b/typescript-sdk/jest.config.js new file mode 100644 index 0000000..e5f1d35 --- /dev/null +++ b/typescript-sdk/jest.config.js @@ -0,0 +1,14 @@ +/** @type {import('ts-jest').JestConfigWithTsJest} */ +module.exports = { + preset: "ts-jest", + testEnvironment: "node", + testMatch: ["**/src/tests/**/*.test.ts"], + transform: { + "^.+\\.tsx?$": [ + "ts-jest", + { + tsconfig: "tsconfig.json", + }, + ], + }, +}; diff --git a/typescript-sdk/jest.config.mjs b/typescript-sdk/jest.config.mjs new file mode 100644 index 0000000..bcd1f22 --- /dev/null +++ b/typescript-sdk/jest.config.mjs @@ -0,0 +1,18 @@ +export default { + preset: "ts-jest/presets/default-esm", + testEnvironment: "node", + testMatch: ["**/src/tests/**/*.test.ts"], + extensionsToTreatAsEsm: [".ts"], + transform: { + "^.+\\.tsx?$": [ + "ts-jest", + { + useESM: true, + tsconfig: "tsconfig.json", + }, + ], + }, + moduleNameMapper: { + "^(\\.{1,2}/.*)\\.js$": "$1", + }, +}; diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index ee1de75..2c76d32 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -12,7 +12,8 @@ "clean": "rm -rf dist", "prebuild": "npm run clean", "build": "npm run grpc-generate && tsc --project tsconfig.esm.json && tsc --project tsconfig.cjs.json && node add-js-extensions.mjs", - "grpc-generate": "mkdir -p src/grpc && protoc -I../yellowstone-grpc/yellowstone-grpc-proto/proto -I../proto --plugin=node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=forceLong=bigint --ts_proto_opt=outputServices=grpc-js --experimental_allow_proto3_optional --ts_proto_out=src/grpc fumarole.proto --ts_proto_opt=esModuleInterop=true" + "grpc-generate": "mkdir -p src/grpc && protoc -I../yellowstone-grpc/yellowstone-grpc-proto/proto -I../proto --plugin=node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=forceLong=bigint --ts_proto_opt=outputServices=grpc-js --experimental_allow_proto3_optional --ts_proto_out=src/grpc fumarole.proto --ts_proto_opt=esModuleInterop=true", + "test": "NODE_OPTIONS=--experimental-vm-modules jest --config jest.config.mjs" }, "repository": { "type": "git", @@ -32,9 +33,12 @@ ], "homepage": "https://triton.one", "devDependencies": { + "@types/jest": "^30.0.0", + "@types/node": "^22.17.1", + "jest": "^30.0.5", + "ts-jest": "^29.4.1", "ts-proto": "^2.7.7", - "typescript": "^5.2.2", - "@types/node": "^22.17.1" + "typescript": "^5.2.2" }, "dependencies": { "@bufbuild/protobuf": "^2.6.3", diff --git a/typescript-sdk/src/tests/state-machine.test.ts b/typescript-sdk/src/tests/state-machine.test.ts new file mode 100644 index 0000000..6bd882f --- /dev/null +++ b/typescript-sdk/src/tests/state-machine.test.ts @@ -0,0 +1,131 @@ +import { CommitmentLevel } from '../grpc/geyser'; +import { BlockchainEvent } from '../grpc/fumarole'; +import { FumaroleSM, DEFAULT_SLOT_MEMORY_RETENTION, SlotDownloadState } from '../runtime/state-machine'; +import { randomBytes } from 'crypto'; + +function randomBlockchainEvent( + offset: bigint, + slot: bigint, + commitment_level: CommitmentLevel, +): BlockchainEvent { + const nilBlockchainId = new Uint8Array(16); // UUID.nil() + const randomBlockUid = randomBytes(16); // UUID.v4() + + return { + offset, + blockchainId: nilBlockchainId, + blockUid: randomBlockUid, + numShards: 1, + slot, + parentSlot: undefined, + commitmentLevel: commitment_level as number, + blockchainShardId: 0, + deadError: undefined, + }; +} + +describe('FumaroleSM', () => { + describe('happy path', () => { + it('should handle basic flow correctly', () => { + const sm = new FumaroleSM(0n, DEFAULT_SLOT_MEMORY_RETENTION); + + const event = randomBlockchainEvent(1n, 1n, CommitmentLevel.PROCESSED); + sm.queueBlockchainEvent([event]); + + // Slot status should not be available, since we didn't download it yet + const downloadReq = sm.popSlotToDownload(); + expect(downloadReq).toBeDefined(); + expect(downloadReq?.slot).toBe(1n); + + expect(sm.popSlotToDownload()).toBeUndefined(); + expect(sm.popNextSlotStatus()).toBeUndefined(); + + const downloadState = sm.makeSlotDownloadProgress(1n, 0); + expect(downloadState).toBe(SlotDownloadState.Done); + + const status = sm.popNextSlotStatus(); + expect(status).toBeDefined(); + expect(status?.slot).toBe(1n); + expect(status?.commitmentLevel).toBe(CommitmentLevel.PROCESSED); + if (status) { + sm.markEventAsProcessed(status.sessionSequence); + } + + // All subsequent commitment level should be available right away + const event2 = { + ...event, + offset: event.offset + 1n, + commitmentLevel: CommitmentLevel.CONFIRMED as number, + }; + sm.queueBlockchainEvent([event2]); + + // It should not cause new slot download request + expect(sm.popSlotToDownload()).toBeUndefined(); + + const status2 = sm.popNextSlotStatus(); + expect(status2).toBeDefined(); + expect(status2?.slot).toBe(1n); + expect(status2?.commitmentLevel).toBe(CommitmentLevel.CONFIRMED); + if (status2) { + sm.markEventAsProcessed(status2.sessionSequence); + } + + expect(sm.committable_offset).toBe(event2.offset); + }); + }); + + describe('slot status deduplication', () => { + it('should deduplicate slot status', () => { + const sm = new FumaroleSM(0n, DEFAULT_SLOT_MEMORY_RETENTION); + + const event = randomBlockchainEvent(1n, 1n, CommitmentLevel.PROCESSED); + sm.queueBlockchainEvent([event]); + + // Slot status should not be available, since we didn't download it yet + expect(sm.popNextSlotStatus()).toBeUndefined(); + + const downloadReq = sm.popSlotToDownload(); + expect(downloadReq).toBeDefined(); + expect(downloadReq?.slot).toBe(1n); + + expect(sm.popSlotToDownload()).toBeUndefined(); + + sm.makeSlotDownloadProgress(1n, 0); + + const status = sm.popNextSlotStatus(); + expect(status).toBeDefined(); + expect(status?.slot).toBe(1n); + expect(status?.commitmentLevel).toBe(CommitmentLevel.PROCESSED); + + // Putting the same event back should be ignored + sm.queueBlockchainEvent([event]); + + expect(sm.popSlotToDownload()).toBeUndefined(); + expect(sm.popNextSlotStatus()).toBeUndefined(); + }); + }); + + describe('minimum commitment level', () => { + it('should handle min commitment level correctly', () => { + const sm = new FumaroleSM(0n, DEFAULT_SLOT_MEMORY_RETENTION); + + const event = randomBlockchainEvent(1n, 1n, CommitmentLevel.PROCESSED); + sm.queueBlockchainEvent([event]); + + // Slot status should not be available, since we didn't download it yet + expect(sm.popNextSlotStatus()).toBeUndefined(); + + // Use finalized commitment level here + const downloadReq = sm.popSlotToDownload(CommitmentLevel.FINALIZED); + expect(downloadReq).toBeUndefined(); + + expect(sm.popSlotToDownload()).toBeUndefined(); + + // It should not cause the slot status to be available here even if we have a finalized commitment level filtered out before + const status = sm.popNextSlotStatus(); + expect(status).toBeDefined(); + expect(status?.slot).toBe(1n); + expect(status?.commitmentLevel).toBe(CommitmentLevel.PROCESSED); + }); + }); +}); From 0cc1e936b84ef7666d2cbe8b06880374e5b72fe0 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Wed, 20 Aug 2025 07:17:02 +0000 Subject: [PATCH 49/56] refactor: remove slop Signed-off-by: GitHub --- .../examples/list-consumer-groups.js | 138 - .../examples/list-consumer-groups.ts | 90 - typescript-sdk/pnpm-lock.yaml | 2953 ++++++++++++++++- typescript-sdk/src/connectivity.ts | 344 +- typescript-sdk/src/grpc/fumarole.ts | 222 +- typescript-sdk/src/index.ts | 184 +- typescript-sdk/src/runtime/aio.ts | 608 ---- typescript-sdk/src/runtime/queue.ts | 70 - typescript-sdk/src/types.ts | 99 - typescript-sdk/src/types/index.ts | 166 + typescript-sdk/src/utils/aio.ts | 53 - typescript-sdk/tsconfig.json | 5 +- 12 files changed, 3538 insertions(+), 1394 deletions(-) delete mode 100644 typescript-sdk/examples/list-consumer-groups.js delete mode 100644 typescript-sdk/src/runtime/aio.ts delete mode 100644 typescript-sdk/src/runtime/queue.ts delete mode 100644 typescript-sdk/src/types.ts create mode 100644 typescript-sdk/src/types/index.ts delete mode 100644 typescript-sdk/src/utils/aio.ts diff --git a/typescript-sdk/examples/list-consumer-groups.js b/typescript-sdk/examples/list-consumer-groups.js deleted file mode 100644 index ce736af..0000000 --- a/typescript-sdk/examples/list-consumer-groups.js +++ /dev/null @@ -1,138 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var src_1 = require("../src"); -function main() { - return __awaiter(this, void 0, void 0, function () { - var config, client, response, _i, _a, group, info, err_1, error_1, error_2; - return __generator(this, function (_b) { - switch (_b.label) { - case 0: - _b.trys.push([0, 13, , 14]); - config = { - endpoint: "https://fra141.nodes.rpcpool.com", // Replace with your Fumarole endpoint - xToken: "7b042cd6-ea1e-46af-b46b-653bdce119f6", - maxDecodingMessageSizeBytes: 100 * 1024 * 1024, // 100MB max message size - xMetadata: {}, // Additional metadata if needed - }; - // Connect to the Fumarole server - console.log("Connecting to Fumarole server..."); - return [4 /*yield*/, src_1.FumaroleClient.connect(config)]; - case 1: - client = _b.sent(); - console.log("Connected successfully"); - // List all consumer groups - console.log("\nFetching consumer groups..."); - _b.label = 2; - case 2: - _b.trys.push([2, 11, , 12]); - console.log("Sending listConsumerGroups request to server..."); - process.on("unhandledRejection", function (reason, promise) { - console.error("Unhandled Rejection at:", promise, "reason:", reason); - }); - return [4 /*yield*/, client.listConsumerGroups().catch(function (error) { - console.error("Caught error during listConsumerGroups:", error); - if (error.code) - console.error("Error code:", error.code); - if (error.details) - console.error("Error details:", error.details); - if (error.metadata) - console.error("Error metadata:", error.metadata); - if (error.stack) - console.error("Error stack:", error.stack); - throw error; - })]; - case 3: - response = _b.sent(); - console.log("\n=== ListConsumerGroups Response ==="); - console.log(JSON.stringify(response, null, 2)); - console.log("=====================================\n"); - if (!(!response.consumerGroups || response.consumerGroups.length === 0)) return [3 /*break*/, 4]; - console.log("No consumer groups found on server"); - return [3 /*break*/, 10]; - case 4: - console.log("Found ".concat(response.consumerGroups.length, " consumer groups. Fetching details...\n")); - _i = 0, _a = response.consumerGroups; - _b.label = 5; - case 5: - if (!(_i < _a.length)) return [3 /*break*/, 10]; - group = _a[_i]; - console.log("=== Consumer Group: ".concat(group.consumerGroupName, " ===")); - console.log("Basic info:", JSON.stringify(group, null, 2)); - _b.label = 6; - case 6: - _b.trys.push([6, 8, , 9]); - console.log("\nFetching detailed info for group: ".concat(group.consumerGroupName)); - return [4 /*yield*/, client.getConsumerGroupInfo(group.consumerGroupName)]; - case 7: - info = _b.sent(); - if (info) { - console.log("\nDetailed Group Info:"); - console.log("Status: Active"); - console.log("Server Response:", JSON.stringify(info, null, 2)); - } - else { - console.log("\nGroup Status: Not found or inactive"); - } - console.log("===============================\n"); - return [3 /*break*/, 9]; - case 8: - err_1 = _b.sent(); - console.error("\nError fetching group info from server: ".concat(err_1 instanceof Error ? err_1.message : String(err_1))); - return [3 /*break*/, 9]; - case 9: - _i++; - return [3 /*break*/, 5]; - case 10: return [3 /*break*/, 12]; - case 11: - error_1 = _b.sent(); - console.error("Error:", error_1 instanceof Error ? error_1.message : String(error_1)); - process.exit(1); - return [3 /*break*/, 12]; - case 12: return [3 /*break*/, 14]; - case 13: - error_2 = _b.sent(); - console.error("Error:", error_2 instanceof Error ? error_2.message : String(error_2)); - process.exit(1); - return [3 /*break*/, 14]; - case 14: return [2 /*return*/]; - } - }); - }); -} -main().catch(console.error); diff --git a/typescript-sdk/examples/list-consumer-groups.ts b/typescript-sdk/examples/list-consumer-groups.ts index 93cd678..e69de29 100644 --- a/typescript-sdk/examples/list-consumer-groups.ts +++ b/typescript-sdk/examples/list-consumer-groups.ts @@ -1,90 +0,0 @@ -import { FumaroleClient, FumaroleConfig } from "../src"; - -async function main() { - try { - // Configure the client - const config: FumaroleConfig = { - endpoint: "https://fra141.nodes.rpcpool.com", // Replace with your Fumarole endpoint - xToken: "7b042cd6-ea1e-46af-b46b-653bdce119f6", - maxDecodingMessageSizeBytes: 100 * 1024 * 1024, // 100MB max message size - xMetadata: {}, // Additional metadata if needed - }; - - // Connect to the Fumarole server - console.log("Connecting to Fumarole server..."); - const client = await FumaroleClient.connect(config); - console.log("Connected successfully"); - - // List all consumer groups - console.log("\nFetching consumer groups..."); - try { - console.log("Sending listConsumerGroups request to server..."); - process.on("unhandledRejection", (reason, promise) => { - console.error("Unhandled Rejection at:", promise, "reason:", reason); - }); - - const response = await client.listConsumerGroups().catch((error) => { - console.error("Caught error during listConsumerGroups:", error); - if (error.code) console.error("Error code:", error.code); - if (error.details) console.error("Error details:", error.details); - if (error.metadata) console.error("Error metadata:", error.metadata); - if (error.stack) console.error("Error stack:", error.stack); - throw error; - }); - - console.log("\n=== ListConsumerGroups Response ==="); - console.log(JSON.stringify(response, null, 2)); - console.log("=====================================\n"); - - if (!response.consumerGroups || response.consumerGroups.length === 0) { - console.log("No consumer groups found on server"); - } else { - console.log( - `Found ${response.consumerGroups.length} consumer groups. Fetching details...\n` - ); - for (const group of response.consumerGroups) { - console.log(`=== Consumer Group: ${group.consumerGroupName} ===`); - console.log("Basic info:", JSON.stringify(group, null, 2)); - - // Get detailed info for the group - try { - console.log( - `\nFetching detailed info for group: ${group.consumerGroupName}` - ); - const info = await client.getConsumerGroupInfo( - group.consumerGroupName - ); - if (info) { - console.log("\nDetailed Group Info:"); - console.log("Status: Active"); - console.log("Server Response:", JSON.stringify(info, null, 2)); - } else { - console.log("\nGroup Status: Not found or inactive"); - } - console.log("===============================\n"); - } catch (err) { - console.error( - `\nError fetching group info from server: ${ - err instanceof Error ? err.message : String(err) - }` - ); - } - } - } - } catch (error) { - console.error( - "Error:", - error instanceof Error ? error.message : String(error) - ); - process.exit(1); - } - } catch (error) { - console.error( - "Error:", - error instanceof Error ? error.message : String(error) - ); - process.exit(1); - } -} - -main().catch(console.error); diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml index 4cd325f..95e0c97 100644 --- a/typescript-sdk/pnpm-lock.yaml +++ b/typescript-sdk/pnpm-lock.yaml @@ -21,9 +21,18 @@ importers: specifier: ^4.1.0 version: 4.1.0 devDependencies: + '@types/jest': + specifier: ^30.0.0 + version: 30.0.0 '@types/node': specifier: ^22.17.1 version: 22.17.1 + jest: + specifier: ^30.0.5 + version: 30.0.5(@types/node@22.17.1) + ts-jest: + specifier: ^29.4.1 + version: 29.4.1(@babel/core@7.28.3)(@jest/transform@30.0.5)(@jest/types@30.0.5)(babel-jest@30.0.5(@babel/core@7.28.3))(jest-util@30.0.5)(jest@30.0.5(@types/node@22.17.1))(typescript@5.9.2) ts-proto: specifier: ^2.7.7 version: 2.7.7 @@ -33,9 +42,187 @@ importers: packages: + '@ampproject/remapping@2.3.0': + resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} + engines: {node: '>=6.0.0'} + + '@babel/code-frame@7.27.1': + resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} + engines: {node: '>=6.9.0'} + + '@babel/compat-data@7.28.0': + resolution: {integrity: sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==} + engines: {node: '>=6.9.0'} + + '@babel/core@7.28.3': + resolution: {integrity: sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==} + engines: {node: '>=6.9.0'} + + '@babel/generator@7.28.3': + resolution: {integrity: sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-compilation-targets@7.27.2': + resolution: {integrity: sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-globals@7.28.0': + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-imports@7.27.1': + resolution: {integrity: sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-transforms@7.28.3': + resolution: {integrity: sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-plugin-utils@7.27.1': + resolution: {integrity: sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.27.1': + resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.27.1': + resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} + engines: {node: '>=6.9.0'} + + '@babel/helpers@7.28.3': + resolution: {integrity: sha512-PTNtvUQihsAsDHMOP5pfobP8C6CM4JWXmP8DrEIt46c3r2bf87Ua1zoqevsMo9g+tWDwgWrFP5EIxuBx5RudAw==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.28.3': + resolution: {integrity: sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/plugin-syntax-async-generators@7.8.4': + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-bigint@7.8.3': + resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-class-properties@7.12.13': + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-class-static-block@7.14.5': + resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-import-attributes@7.27.1': + resolution: {integrity: sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-import-meta@7.10.4': + resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-json-strings@7.8.3': + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-jsx@7.27.1': + resolution: {integrity: sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-logical-assignment-operators@7.10.4': + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3': + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-numeric-separator@7.10.4': + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-object-rest-spread@7.8.3': + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-optional-catch-binding@7.8.3': + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-optional-chaining@7.8.3': + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-private-property-in-object@7.14.5': + resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-top-level-await@7.14.5': + resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-typescript@7.27.1': + resolution: {integrity: sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/template@7.27.2': + resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} + engines: {node: '>=6.9.0'} + + '@babel/traverse@7.28.3': + resolution: {integrity: sha512-7w4kZYHneL3A6NP2nxzHvT3HCZ7puDZZjFMqDpBPECub79sTtSO5CGXDkKrTQq8ksAwfD/XI2MRFX23njdDaIQ==} + engines: {node: '>=6.9.0'} + + '@babel/types@7.28.2': + resolution: {integrity: sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==} + engines: {node: '>=6.9.0'} + + '@bcoe/v8-coverage@0.2.3': + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + '@bufbuild/protobuf@2.6.3': resolution: {integrity: sha512-w/gJKME9mYN7ZoUAmSMAWXk4hkVpxRKvEJCb3dV5g9wwWdxTJJ0ayOJAVcNxtdqaxDyFuC0uz4RSGVacJ030PQ==} + '@emnapi/core@1.4.5': + resolution: {integrity: sha512-XsLw1dEOpkSX/WucdqUhPWP7hDxSvZiY+fsUC14h+FtQ2Ifni4znbBt8punRX+Uj2JG/uDb8nEHVKvrVlvdZ5Q==} + + '@emnapi/runtime@1.4.5': + resolution: {integrity: sha512-++LApOtY0pEEz1zrd9vy1/zXVaVJJ/EbAF3u0fXIzPJEDtnITsBGbbK0EkM72amhl/R5b+5xx0Y/QhcVOpuulg==} + + '@emnapi/wasi-threads@1.0.4': + resolution: {integrity: sha512-PJR+bOmMOPH8AtcTGAyYNiuJ3/Fcoj2XN/gBEWzDIKh254XO+mM9XoXHk5GNEhodxeMznbg7BlRojVbKN+gC6g==} + '@grpc/grpc-js@1.13.4': resolution: {integrity: sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==} engines: {node: '>=12.10.0'} @@ -45,9 +232,127 @@ packages: engines: {node: '>=6'} hasBin: true + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@istanbuljs/load-nyc-config@1.1.0': + resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} + engines: {node: '>=8'} + + '@istanbuljs/schema@0.1.3': + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + + '@jest/console@30.0.5': + resolution: {integrity: sha512-xY6b0XiL0Nav3ReresUarwl2oIz1gTnxGbGpho9/rbUWsLH0f1OD/VT84xs8c7VmH7MChnLb0pag6PhZhAdDiA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/core@30.0.5': + resolution: {integrity: sha512-fKD0OulvRsXF1hmaFgHhVJzczWzA1RXMMo9LTPuFXo9q/alDbME3JIyWYqovWsUBWSoBcsHaGPSLF9rz4l9Qeg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + '@jest/diff-sequences@30.0.1': + resolution: {integrity: sha512-n5H8QLDJ47QqbCNn5SuFjCRDrOLEZ0h8vAHCK5RL9Ls7Xa8AQLa/YxAc9UjFqoEDM48muwtBGjtMY5cr0PLDCw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/environment@30.0.5': + resolution: {integrity: sha512-aRX7WoaWx1oaOkDQvCWImVQ8XNtdv5sEWgk4gxR6NXb7WBUnL5sRak4WRzIQRZ1VTWPvV4VI4mgGjNL9TeKMYA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/expect-utils@30.0.5': + resolution: {integrity: sha512-F3lmTT7CXWYywoVUGTCmom0vXq3HTTkaZyTAzIy+bXSBizB7o5qzlC9VCtq0arOa8GqmNsbg/cE9C6HLn7Szew==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/expect@30.0.5': + resolution: {integrity: sha512-6udac8KKrtTtC+AXZ2iUN/R7dp7Ydry+Fo6FPFnDG54wjVMnb6vW/XNlf7Xj8UDjAE3aAVAsR4KFyKk3TCXmTA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/fake-timers@30.0.5': + resolution: {integrity: sha512-ZO5DHfNV+kgEAeP3gK3XlpJLL4U3Sz6ebl/n68Uwt64qFFs5bv4bfEEjyRGK5uM0C90ewooNgFuKMdkbEoMEXw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/get-type@30.0.1': + resolution: {integrity: sha512-AyYdemXCptSRFirI5EPazNxyPwAL0jXt3zceFjaj8NFiKP9pOi0bfXonf6qkf82z2t3QWPeLCWWw4stPBzctLw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/globals@30.0.5': + resolution: {integrity: sha512-7oEJT19WW4oe6HR7oLRvHxwlJk2gev0U9px3ufs8sX9PoD1Eza68KF0/tlN7X0dq/WVsBScXQGgCldA1V9Y/jA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/pattern@30.0.1': + resolution: {integrity: sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/reporters@30.0.5': + resolution: {integrity: sha512-mafft7VBX4jzED1FwGC1o/9QUM2xebzavImZMeqnsklgcyxBto8mV4HzNSzUrryJ+8R9MFOM3HgYuDradWR+4g==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + '@jest/schemas@30.0.5': + resolution: {integrity: sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/snapshot-utils@30.0.5': + resolution: {integrity: sha512-XcCQ5qWHLvi29UUrowgDFvV4t7ETxX91CbDczMnoqXPOIcZOxyNdSjm6kV5XMc8+HkxfRegU/MUmnTbJRzGrUQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/source-map@30.0.1': + resolution: {integrity: sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/test-result@30.0.5': + resolution: {integrity: sha512-wPyztnK0gbDMQAJZ43tdMro+qblDHH1Ru/ylzUo21TBKqt88ZqnKKK2m30LKmLLoKtR2lxdpCC/P3g1vfKcawQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/test-sequencer@30.0.5': + resolution: {integrity: sha512-Aea/G1egWoIIozmDD7PBXUOxkekXl7ueGzrsGGi1SbeKgQqCYCIf+wfbflEbf2LiPxL8j2JZGLyrzZagjvW4YQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/transform@30.0.5': + resolution: {integrity: sha512-Vk8amLQCmuZyy6GbBht1Jfo9RSdBtg7Lks+B0PecnjI8J+PCLQPGh7uI8Q/2wwpW2gLdiAfiHNsmekKlywULqg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jest/types@30.0.5': + resolution: {integrity: sha512-aREYa3aku9SSnea4aX6bhKn4bgv3AXkgijoQgbYV3yvbiGt6z+MQ85+6mIhx9DsKW2BuB/cLR/A+tcMThx+KLQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.30': + resolution: {integrity: sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==} + '@js-sdsl/ordered-map@4.4.2': resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + '@napi-rs/wasm-runtime@0.2.12': + resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@pkgr/core@0.2.9': + resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -78,31 +383,287 @@ packages: '@protobufjs/utf8@1.1.0': resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + '@sinclair/typebox@0.34.39': + resolution: {integrity: sha512-keEoFsevmLwAedzacnTVmra66GViRH3fhWO1M+nZ8rUgpPJyN4mcvqlGr3QMrQXx4L8KNwW0q9/BeHSEoO4teg==} + + '@sinonjs/commons@3.0.1': + resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} + + '@sinonjs/fake-timers@13.0.5': + resolution: {integrity: sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==} + + '@tybys/wasm-util@0.10.0': + resolution: {integrity: sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==} + + '@types/babel__core@7.20.5': + resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} + + '@types/babel__generator@7.27.0': + resolution: {integrity: sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==} + + '@types/babel__template@7.4.4': + resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==} + + '@types/babel__traverse@7.28.0': + resolution: {integrity: sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==} + + '@types/istanbul-lib-coverage@2.0.6': + resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} + + '@types/istanbul-lib-report@3.0.3': + resolution: {integrity: sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==} + + '@types/istanbul-reports@3.0.4': + resolution: {integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==} + + '@types/jest@30.0.0': + resolution: {integrity: sha512-XTYugzhuwqWjws0CVz8QpM36+T+Dz5mTEBKhNs/esGLnCIlGdRy+Dq78NRjd7ls7r8BC8ZRMOrKlkO1hU0JOwA==} + '@types/js-yaml@4.0.9': resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} '@types/node@22.17.1': resolution: {integrity: sha512-y3tBaz+rjspDTylNjAX37jEC3TETEFGNJL6uQDxwF9/8GLLIjW1rvVHlynyuUKMnMr1Roq8jOv3vkopBjC4/VA==} + '@types/stack-utils@2.0.3': + resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} + + '@types/yargs-parser@21.0.3': + resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} + + '@types/yargs@17.0.33': + resolution: {integrity: sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==} + + '@ungap/structured-clone@1.3.0': + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + + '@unrs/resolver-binding-android-arm-eabi@1.11.1': + resolution: {integrity: sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==} + cpu: [arm] + os: [android] + + '@unrs/resolver-binding-android-arm64@1.11.1': + resolution: {integrity: sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==} + cpu: [arm64] + os: [android] + + '@unrs/resolver-binding-darwin-arm64@1.11.1': + resolution: {integrity: sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==} + cpu: [arm64] + os: [darwin] + + '@unrs/resolver-binding-darwin-x64@1.11.1': + resolution: {integrity: sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==} + cpu: [x64] + os: [darwin] + + '@unrs/resolver-binding-freebsd-x64@1.11.1': + resolution: {integrity: sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==} + cpu: [x64] + os: [freebsd] + + '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': + resolution: {integrity: sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==} + cpu: [arm] + os: [linux] + + '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': + resolution: {integrity: sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==} + cpu: [arm] + os: [linux] + + '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': + resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==} + cpu: [arm64] + os: [linux] + + '@unrs/resolver-binding-linux-arm64-musl@1.11.1': + resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==} + cpu: [arm64] + os: [linux] + + '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': + resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==} + cpu: [ppc64] + os: [linux] + + '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': + resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==} + cpu: [riscv64] + os: [linux] + + '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': + resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==} + cpu: [riscv64] + os: [linux] + + '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': + resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==} + cpu: [s390x] + os: [linux] + + '@unrs/resolver-binding-linux-x64-gnu@1.11.1': + resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==} + cpu: [x64] + os: [linux] + + '@unrs/resolver-binding-linux-x64-musl@1.11.1': + resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==} + cpu: [x64] + os: [linux] + + '@unrs/resolver-binding-wasm32-wasi@1.11.1': + resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': + resolution: {integrity: sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==} + cpu: [arm64] + os: [win32] + + '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': + resolution: {integrity: sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==} + cpu: [ia32] + os: [win32] + + '@unrs/resolver-binding-win32-x64-msvc@1.11.1': + resolution: {integrity: sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==} + cpu: [x64] + os: [win32] + + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} + ansi-regex@6.1.0: + resolution: {integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==} + engines: {node: '>=12'} + ansi-styles@4.3.0: resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} engines: {node: '>=8'} + ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + + argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + babel-jest@30.0.5: + resolution: {integrity: sha512-mRijnKimhGDMsizTvBTWotwNpzrkHr+VvZUQBof2AufXKB8NXrL1W69TG20EvOz7aevx6FTJIaBuBkYxS8zolg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@babel/core': ^7.11.0 + + babel-plugin-istanbul@7.0.0: + resolution: {integrity: sha512-C5OzENSx/A+gt7t4VH1I2XsflxyPUmXRFPKBxt33xncdOmq7oROVM3bZv9Ysjjkv8OJYDMa+tKuKMvqU/H3xdw==} + engines: {node: '>=12'} + + babel-plugin-jest-hoist@30.0.1: + resolution: {integrity: sha512-zTPME3pI50NsFW8ZBaVIOeAxzEY7XHlmWeXXu9srI+9kNfzCUTy8MFan46xOGZY8NZThMqq+e3qZUKsvXbasnQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + babel-preset-current-node-syntax@1.2.0: + resolution: {integrity: sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==} + peerDependencies: + '@babel/core': ^7.0.0 || ^8.0.0-0 + + babel-preset-jest@30.0.1: + resolution: {integrity: sha512-+YHejD5iTWI46cZmcc/YtX4gaKBtdqCHCVfuVinizVpbmyjO3zYmeuyFdfA8duRqQZfgCAMlsfmkVbJ+e2MAJw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@babel/core': ^7.11.0 + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + browserslist@4.25.2: + resolution: {integrity: sha512-0si2SJK3ooGzIawRu61ZdPCO1IncZwS8IzuX73sPZsXW6EQ/w/DAfPyKI8l1ETTCr2MnvqWitmlCUxgdul45jA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + bs-logger@0.2.6: + resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} + engines: {node: '>= 6'} + + bser@2.1.1: + resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + + buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + camelcase@5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} + + camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + + caniuse-lite@1.0.30001735: + resolution: {integrity: sha512-EV/laoX7Wq2J9TQlyIXRxTJqIw4sxfXS4OYgudGxBYRuTv0q7AM6yMEpU/Vo1I94thg9U6EZ2NfZx9GJq83u7w==} + case-anything@2.1.13: resolution: {integrity: sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==} engines: {node: '>=12.13'} + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + + ci-info@4.3.0: + resolution: {integrity: sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==} + engines: {node: '>=8'} + + cjs-module-lexer@2.1.0: + resolution: {integrity: sha512-UX0OwmYRYQQetfrLEZeewIFFI+wSTofC+pMBLNuH3RUuu/xzG1oz84UCEDOSoQlN3fZ4+AzmV50ZYvGqkMh9yA==} + cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} + co@4.6.0: + resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} + engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} + + collect-v8-coverage@1.0.2: + resolution: {integrity: sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==} + color-convert@2.0.1: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} @@ -110,185 +671,2188 @@ packages: color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + dedent@1.6.0: + resolution: {integrity: sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + + deepmerge@4.3.1: + resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} + engines: {node: '>=0.10.0'} + detect-libc@1.0.3: resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} engines: {node: '>=0.10'} hasBin: true + detect-newline@3.1.0: + resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} + engines: {node: '>=8'} + dprint-node@1.0.8: resolution: {integrity: sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg==} + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + electron-to-chromium@1.5.203: + resolution: {integrity: sha512-uz4i0vLhfm6dLZWbz/iH88KNDV+ivj5+2SA+utpgjKaj9Q0iDLuwk6Idhe9BTxciHudyx6IvTvijhkPvFGUQ0g==} + + emittery@0.13.1: + resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==} + engines: {node: '>=12'} + emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + error-ex@1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} + escalade@3.2.0: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} + escape-string-regexp@2.0.0: + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + + esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + + exit-x@0.2.2: + resolution: {integrity: sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==} + engines: {node: '>= 0.8.0'} + + expect@30.0.5: + resolution: {integrity: sha512-P0te2pt+hHI5qLJkIR+iMvS+lYUZml8rKKsohVHAGY+uClp9XVbdyYNJOIjSRpHVp8s8YqxJCiHUkSYZGr8rtQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fb-watchman@2.0.2: + resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + + fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + get-caller-file@2.0.5: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} - is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} + get-package-type@0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} - js-yaml@4.1.0: - resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + + glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} hasBin: true - lodash.camelcase@4.3.0: - resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Glob versions prior to v9 are no longer supported - long@5.3.2: - resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - protobufjs@7.5.3: - resolution: {integrity: sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==} - engines: {node: '>=12.0.0'} + handlebars@4.7.8: + resolution: {integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==} + engines: {node: '>=0.4.7'} + hasBin: true - require-directory@2.1.1: - resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} - engines: {node: '>=0.10.0'} + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} - string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + + import-local@3.2.0: + resolution: {integrity: sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==} engines: {node: '>=8'} + hasBin: true - strip-ansi@6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} engines: {node: '>=8'} - ts-poet@6.12.0: - resolution: {integrity: sha512-xo+iRNMWqyvXpFTaOAvLPA5QAWO6TZrSUs5s4Odaya3epqofBu/fMLHEWl8jPmjhA0s9sgj9sNvF1BmaQlmQkA==} + is-generator-fn@2.1.0: + resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} + engines: {node: '>=6'} - ts-proto-descriptors@2.0.0: - resolution: {integrity: sha512-wHcTH3xIv11jxgkX5OyCSFfw27agpInAd6yh89hKG6zqIXnjW9SYqSER2CVQxdPj4czeOhGagNvZBEbJPy7qkw==} + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} - ts-proto@2.7.7: - resolution: {integrity: sha512-/OfN9/Yriji2bbpOysZ/Jzc96isOKz+eBTJEcKaIZ0PR6x1TNgVm4Lz0zfbo+J0jwFO7fJjJyssefBPQ0o1V9A==} - hasBin: true + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} - typescript@5.9.2: - resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} - engines: {node: '>=14.17'} - hasBin: true + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - undici-types@6.21.0: - resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + istanbul-lib-coverage@3.2.2: + resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} + engines: {node: '>=8'} - wrap-ansi@7.0.0: - resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + istanbul-lib-instrument@6.0.3: + resolution: {integrity: sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==} engines: {node: '>=10'} - y18n@5.0.8: - resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + istanbul-lib-report@3.0.1: + resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==} engines: {node: '>=10'} - yargs-parser@21.1.1: - resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} - engines: {node: '>=12'} + istanbul-lib-source-maps@5.0.6: + resolution: {integrity: sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==} + engines: {node: '>=10'} - yargs@17.7.2: - resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} - engines: {node: '>=12'} + istanbul-reports@3.1.7: + resolution: {integrity: sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==} + engines: {node: '>=8'} -snapshots: + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} - '@bufbuild/protobuf@2.6.3': {} + jest-changed-files@30.0.5: + resolution: {integrity: sha512-bGl2Ntdx0eAwXuGpdLdVYVr5YQHnSZlQ0y9HVDu565lCUAe9sj6JOtBbMmBBikGIegne9piDDIOeiLVoqTkz4A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} - '@grpc/grpc-js@1.13.4': - dependencies: - '@grpc/proto-loader': 0.7.15 - '@js-sdsl/ordered-map': 4.4.2 + jest-circus@30.0.5: + resolution: {integrity: sha512-h/sjXEs4GS+NFFfqBDYT7y5Msfxh04EwWLhQi0F8kuWpe+J/7tICSlswU8qvBqumR3kFgHbfu7vU6qruWWBPug==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} - '@grpc/proto-loader@0.7.15': - dependencies: - lodash.camelcase: 4.3.0 - long: 5.3.2 - protobufjs: 7.5.3 - yargs: 17.7.2 + jest-cli@30.0.5: + resolution: {integrity: sha512-Sa45PGMkBZzF94HMrlX4kUyPOwUpdZasaliKN3mifvDmkhLYqLLg8HQTzn6gq7vJGahFYMQjXgyJWfYImKZzOw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + + jest-config@30.0.5: + resolution: {integrity: sha512-aIVh+JNOOpzUgzUnPn5FLtyVnqc3TQHVMupYtyeURSb//iLColiMIR8TxCIDKyx9ZgjKnXGucuW68hCxgbrwmA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@types/node': '*' + esbuild-register: '>=3.4.0' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + esbuild-register: + optional: true + ts-node: + optional: true + + jest-diff@30.0.5: + resolution: {integrity: sha512-1UIqE9PoEKaHcIKvq2vbibrCog4Y8G0zmOxgQUVEiTqwR5hJVMCoDsN1vFvI5JvwD37hjueZ1C4l2FyGnfpE0A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-docblock@30.0.1: + resolution: {integrity: sha512-/vF78qn3DYphAaIc3jy4gA7XSAz167n9Bm/wn/1XhTLW7tTBIzXtCJpb/vcmc73NIIeeohCbdL94JasyXUZsGA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-each@30.0.5: + resolution: {integrity: sha512-dKjRsx1uZ96TVyejD3/aAWcNKy6ajMaN531CwWIsrazIqIoXI9TnnpPlkrEYku/8rkS3dh2rbH+kMOyiEIv0xQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-environment-node@30.0.5: + resolution: {integrity: sha512-ppYizXdLMSvciGsRsMEnv/5EFpvOdXBaXRBzFUDPWrsfmog4kYrOGWXarLllz6AXan6ZAA/kYokgDWuos1IKDA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-haste-map@30.0.5: + resolution: {integrity: sha512-dkmlWNlsTSR0nH3nRfW5BKbqHefLZv0/6LCccG0xFCTWcJu8TuEwG+5Cm75iBfjVoockmO6J35o5gxtFSn5xeg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-leak-detector@30.0.5: + resolution: {integrity: sha512-3Uxr5uP8jmHMcsOtYMRB/zf1gXN3yUIc+iPorhNETG54gErFIiUhLvyY/OggYpSMOEYqsmRxmuU4ZOoX5jpRFg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-matcher-utils@30.0.5: + resolution: {integrity: sha512-uQgGWt7GOrRLP1P7IwNWwK1WAQbq+m//ZY0yXygyfWp0rJlksMSLQAA4wYQC3b6wl3zfnchyTx+k3HZ5aPtCbQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-message-util@30.0.5: + resolution: {integrity: sha512-NAiDOhsK3V7RU0Aa/HnrQo+E4JlbarbmI3q6Pi4KcxicdtjV82gcIUrejOtczChtVQR4kddu1E1EJlW6EN9IyA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-mock@30.0.5: + resolution: {integrity: sha512-Od7TyasAAQX/6S+QCbN6vZoWOMwlTtzzGuxJku1GhGanAjz9y+QsQkpScDmETvdc9aSXyJ/Op4rhpMYBWW91wQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-pnp-resolver@1.2.3: + resolution: {integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==} + engines: {node: '>=6'} + peerDependencies: + jest-resolve: '*' + peerDependenciesMeta: + jest-resolve: + optional: true + + jest-regex-util@30.0.1: + resolution: {integrity: sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-resolve-dependencies@30.0.5: + resolution: {integrity: sha512-/xMvBR4MpwkrHW4ikZIWRttBBRZgWK4d6xt3xW1iRDSKt4tXzYkMkyPfBnSCgv96cpkrctfXs6gexeqMYqdEpw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-resolve@30.0.5: + resolution: {integrity: sha512-d+DjBQ1tIhdz91B79mywH5yYu76bZuE96sSbxj8MkjWVx5WNdt1deEFRONVL4UkKLSrAbMkdhb24XN691yDRHg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-runner@30.0.5: + resolution: {integrity: sha512-JcCOucZmgp+YuGgLAXHNy7ualBx4wYSgJVWrYMRBnb79j9PD0Jxh0EHvR5Cx/r0Ce+ZBC4hCdz2AzFFLl9hCiw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-runtime@30.0.5: + resolution: {integrity: sha512-7oySNDkqpe4xpX5PPiJTe5vEa+Ak/NnNz2bGYZrA1ftG3RL3EFlHaUkA1Cjx+R8IhK0Vg43RML5mJedGTPNz3A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-snapshot@30.0.5: + resolution: {integrity: sha512-T00dWU/Ek3LqTp4+DcW6PraVxjk28WY5Ua/s+3zUKSERZSNyxTqhDXCWKG5p2HAJ+crVQ3WJ2P9YVHpj1tkW+g==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-util@30.0.5: + resolution: {integrity: sha512-pvyPWssDZR0FlfMxCBoc0tvM8iUEskaRFALUtGQYzVEAqisAztmy+R8LnU14KT4XA0H/a5HMVTXat1jLne010g==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-validate@30.0.5: + resolution: {integrity: sha512-ouTm6VFHaS2boyl+k4u+Qip4TSH7Uld5tyD8psQ8abGgt2uYYB8VwVfAHWHjHc0NWmGGbwO5h0sCPOGHHevefw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-watcher@30.0.5: + resolution: {integrity: sha512-z9slj/0vOwBDBjN3L4z4ZYaA+pG56d6p3kTUhFRYGvXbXMWhXmb/FIxREZCD06DYUwDKKnj2T80+Pb71CQ0KEg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest-worker@30.0.5: + resolution: {integrity: sha512-ojRXsWzEP16NdUuBw/4H/zkZdHOa7MMYCk4E430l+8fELeLg/mqmMlRhjL7UNZvQrDmnovWZV4DxX03fZF48fQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + jest@30.0.5: + resolution: {integrity: sha512-y2mfcJywuTUkvLm2Lp1/pFX8kTgMO5yyQGq/Sk/n2mN7XWYp4JsCZ/QXW34M8YScgk8bPZlREH04f6blPnoHnQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true - '@js-sdsl/ordered-map@4.4.2': {} + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - '@protobufjs/aspromise@1.1.2': {} + js-yaml@3.14.1: + resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + hasBin: true - '@protobufjs/base64@1.1.2': {} + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true - '@protobufjs/codegen@2.0.4': {} + jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true - '@protobufjs/eventemitter@1.1.0': {} + json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - '@protobufjs/fetch@1.1.0': - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/inquire': 1.1.0 + json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true - '@protobufjs/float@1.0.2': {} + leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} - '@protobufjs/inquire@1.1.0': {} + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - '@protobufjs/path@1.1.2': {} + locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} - '@protobufjs/pool@1.1.0': {} + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} - '@protobufjs/utf8@1.1.0': {} + lodash.memoize@4.1.2: + resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} - '@types/js-yaml@4.0.9': {} + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} - '@types/node@22.17.1': - dependencies: - undici-types: 6.21.0 + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} - ansi-regex@5.0.1: {} + lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} - ansi-styles@4.3.0: - dependencies: - color-convert: 2.0.1 + make-dir@4.0.0: + resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} + engines: {node: '>=10'} - argparse@2.0.1: {} + make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - case-anything@2.1.13: {} + makeerror@1.0.12: + resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} - cliui@8.0.1: - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 + merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - color-convert@2.0.1: - dependencies: - color-name: 1.1.4 + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} - color-name@1.1.4: {} + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} - detect-libc@1.0.3: {} + minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} - dprint-node@1.0.8: - dependencies: - detect-libc: 1.0.3 + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} - emoji-regex@8.0.0: {} + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - escalade@3.2.0: {} + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} - get-caller-file@2.0.5: {} + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - is-fullwidth-code-point@3.0.0: {} + napi-postinstall@0.3.3: + resolution: {integrity: sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + hasBin: true - js-yaml@4.1.0: - dependencies: - argparse: 2.0.1 + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} - lodash.camelcase@4.3.0: {} + neo-async@2.6.2: + resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} - long@5.3.2: {} + node-int64@0.4.0: + resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} - protobufjs@7.5.3: - dependencies: - '@protobufjs/aspromise': 1.1.2 - '@protobufjs/base64': 1.1.2 - '@protobufjs/codegen': 2.0.4 + node-releases@2.0.19: + resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + + once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + + p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + + parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + + pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} + engines: {node: '>= 6'} + + pkg-dir@4.2.0: + resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} + engines: {node: '>=8'} + + pretty-format@30.0.5: + resolution: {integrity: sha512-D1tKtYvByrBkFLe2wHJl2bwMJIiT8rW+XA+TiataH79/FszLQMrpGEvzUVkzPau7OCO0Qnrhpe87PqtOAIB8Yw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + + protobufjs@7.5.3: + resolution: {integrity: sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==} + engines: {node: '>=12.0.0'} + + pure-rand@7.0.1: + resolution: {integrity: sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==} + + react-is@18.3.1: + resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + resolve-cwd@3.0.0: + resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} + engines: {node: '>=8'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + + semver@7.7.2: + resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} + engines: {node: '>=10'} + hasBin: true + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + source-map-support@0.5.13: + resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + + stack-utils@2.0.6: + resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} + engines: {node: '>=10'} + + string-length@4.0.2: + resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} + engines: {node: '>=10'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + strip-bom@4.0.0: + resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} + engines: {node: '>=8'} + + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + + synckit@0.11.11: + resolution: {integrity: sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==} + engines: {node: ^14.18.0 || >=16.0.0} + + test-exclude@6.0.0: + resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} + engines: {node: '>=8'} + + tmpl@1.0.5: + resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + ts-jest@29.4.1: + resolution: {integrity: sha512-SaeUtjfpg9Uqu8IbeDKtdaS0g8lS6FT6OzM3ezrDfErPJPHNDo/Ey+VFGP1bQIDfagYDLyRpd7O15XpG1Es2Uw==} + engines: {node: ^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@babel/core': '>=7.0.0-beta.0 <8' + '@jest/transform': ^29.0.0 || ^30.0.0 + '@jest/types': ^29.0.0 || ^30.0.0 + babel-jest: ^29.0.0 || ^30.0.0 + esbuild: '*' + jest: ^29.0.0 || ^30.0.0 + jest-util: ^29.0.0 || ^30.0.0 + typescript: '>=4.3 <6' + peerDependenciesMeta: + '@babel/core': + optional: true + '@jest/transform': + optional: true + '@jest/types': + optional: true + babel-jest: + optional: true + esbuild: + optional: true + jest-util: + optional: true + + ts-poet@6.12.0: + resolution: {integrity: sha512-xo+iRNMWqyvXpFTaOAvLPA5QAWO6TZrSUs5s4Odaya3epqofBu/fMLHEWl8jPmjhA0s9sgj9sNvF1BmaQlmQkA==} + + ts-proto-descriptors@2.0.0: + resolution: {integrity: sha512-wHcTH3xIv11jxgkX5OyCSFfw27agpInAd6yh89hKG6zqIXnjW9SYqSER2CVQxdPj4czeOhGagNvZBEbJPy7qkw==} + + ts-proto@2.7.7: + resolution: {integrity: sha512-/OfN9/Yriji2bbpOysZ/Jzc96isOKz+eBTJEcKaIZ0PR6x1TNgVm4Lz0zfbo+J0jwFO7fJjJyssefBPQ0o1V9A==} + hasBin: true + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + type-detect@4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} + + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + + type-fest@4.41.0: + resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==} + engines: {node: '>=16'} + + typescript@5.9.2: + resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} + engines: {node: '>=14.17'} + hasBin: true + + uglify-js@3.19.3: + resolution: {integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==} + engines: {node: '>=0.8.0'} + hasBin: true + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + unrs-resolver@1.11.1: + resolution: {integrity: sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==} + + update-browserslist-db@1.1.3: + resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + v8-to-istanbul@9.3.0: + resolution: {integrity: sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==} + engines: {node: '>=10.12.0'} + + walker@1.0.8: + resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + write-file-atomic@5.0.1: + resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + +snapshots: + + '@ampproject/remapping@2.3.0': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.30 + + '@babel/code-frame@7.27.1': + dependencies: + '@babel/helper-validator-identifier': 7.27.1 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/compat-data@7.28.0': {} + + '@babel/core@7.28.3': + dependencies: + '@ampproject/remapping': 2.3.0 + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.28.3 + '@babel/helper-compilation-targets': 7.27.2 + '@babel/helper-module-transforms': 7.28.3(@babel/core@7.28.3) + '@babel/helpers': 7.28.3 + '@babel/parser': 7.28.3 + '@babel/template': 7.27.2 + '@babel/traverse': 7.28.3 + '@babel/types': 7.28.2 + convert-source-map: 2.0.0 + debug: 4.4.1 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/generator@7.28.3': + dependencies: + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.30 + jsesc: 3.1.0 + + '@babel/helper-compilation-targets@7.27.2': + dependencies: + '@babel/compat-data': 7.28.0 + '@babel/helper-validator-option': 7.27.1 + browserslist: 4.25.2 + lru-cache: 5.1.1 + semver: 6.3.1 + + '@babel/helper-globals@7.28.0': {} + + '@babel/helper-module-imports@7.27.1': + dependencies: + '@babel/traverse': 7.28.3 + '@babel/types': 7.28.2 + transitivePeerDependencies: + - supports-color + + '@babel/helper-module-transforms@7.28.3(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-module-imports': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + '@babel/traverse': 7.28.3 + transitivePeerDependencies: + - supports-color + + '@babel/helper-plugin-utils@7.27.1': {} + + '@babel/helper-string-parser@7.27.1': {} + + '@babel/helper-validator-identifier@7.27.1': {} + + '@babel/helper-validator-option@7.27.1': {} + + '@babel/helpers@7.28.3': + dependencies: + '@babel/template': 7.27.2 + '@babel/types': 7.28.2 + + '@babel/parser@7.28.3': + dependencies: + '@babel/types': 7.28.2 + + '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-import-attributes@7.27.1(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-jsx@7.27.1(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/plugin-syntax-typescript@7.27.1(@babel/core@7.28.3)': + dependencies: + '@babel/core': 7.28.3 + '@babel/helper-plugin-utils': 7.27.1 + + '@babel/template@7.27.2': + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 + + '@babel/traverse@7.28.3': + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.28.3 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.28.3 + '@babel/template': 7.27.2 + '@babel/types': 7.28.2 + debug: 4.4.1 + transitivePeerDependencies: + - supports-color + + '@babel/types@7.28.2': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + + '@bcoe/v8-coverage@0.2.3': {} + + '@bufbuild/protobuf@2.6.3': {} + + '@emnapi/core@1.4.5': + dependencies: + '@emnapi/wasi-threads': 1.0.4 + tslib: 2.8.1 + optional: true + + '@emnapi/runtime@1.4.5': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.0.4': + dependencies: + tslib: 2.8.1 + optional: true + + '@grpc/grpc-js@1.13.4': + dependencies: + '@grpc/proto-loader': 0.7.15 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.15': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.3 + yargs: 17.7.2 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@istanbuljs/load-nyc-config@1.1.0': + dependencies: + camelcase: 5.3.1 + find-up: 4.1.0 + get-package-type: 0.1.0 + js-yaml: 3.14.1 + resolve-from: 5.0.0 + + '@istanbuljs/schema@0.1.3': {} + + '@jest/console@30.0.5': + dependencies: + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + chalk: 4.1.2 + jest-message-util: 30.0.5 + jest-util: 30.0.5 + slash: 3.0.0 + + '@jest/core@30.0.5': + dependencies: + '@jest/console': 30.0.5 + '@jest/pattern': 30.0.1 + '@jest/reporters': 30.0.5 + '@jest/test-result': 30.0.5 + '@jest/transform': 30.0.5 + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + ci-info: 4.3.0 + exit-x: 0.2.2 + graceful-fs: 4.2.11 + jest-changed-files: 30.0.5 + jest-config: 30.0.5(@types/node@22.17.1) + jest-haste-map: 30.0.5 + jest-message-util: 30.0.5 + jest-regex-util: 30.0.1 + jest-resolve: 30.0.5 + jest-resolve-dependencies: 30.0.5 + jest-runner: 30.0.5 + jest-runtime: 30.0.5 + jest-snapshot: 30.0.5 + jest-util: 30.0.5 + jest-validate: 30.0.5 + jest-watcher: 30.0.5 + micromatch: 4.0.8 + pretty-format: 30.0.5 + slash: 3.0.0 + transitivePeerDependencies: + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + + '@jest/diff-sequences@30.0.1': {} + + '@jest/environment@30.0.5': + dependencies: + '@jest/fake-timers': 30.0.5 + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + jest-mock: 30.0.5 + + '@jest/expect-utils@30.0.5': + dependencies: + '@jest/get-type': 30.0.1 + + '@jest/expect@30.0.5': + dependencies: + expect: 30.0.5 + jest-snapshot: 30.0.5 + transitivePeerDependencies: + - supports-color + + '@jest/fake-timers@30.0.5': + dependencies: + '@jest/types': 30.0.5 + '@sinonjs/fake-timers': 13.0.5 + '@types/node': 22.17.1 + jest-message-util: 30.0.5 + jest-mock: 30.0.5 + jest-util: 30.0.5 + + '@jest/get-type@30.0.1': {} + + '@jest/globals@30.0.5': + dependencies: + '@jest/environment': 30.0.5 + '@jest/expect': 30.0.5 + '@jest/types': 30.0.5 + jest-mock: 30.0.5 + transitivePeerDependencies: + - supports-color + + '@jest/pattern@30.0.1': + dependencies: + '@types/node': 22.17.1 + jest-regex-util: 30.0.1 + + '@jest/reporters@30.0.5': + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@jest/console': 30.0.5 + '@jest/test-result': 30.0.5 + '@jest/transform': 30.0.5 + '@jest/types': 30.0.5 + '@jridgewell/trace-mapping': 0.3.30 + '@types/node': 22.17.1 + chalk: 4.1.2 + collect-v8-coverage: 1.0.2 + exit-x: 0.2.2 + glob: 10.4.5 + graceful-fs: 4.2.11 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-instrument: 6.0.3 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 5.0.6 + istanbul-reports: 3.1.7 + jest-message-util: 30.0.5 + jest-util: 30.0.5 + jest-worker: 30.0.5 + slash: 3.0.0 + string-length: 4.0.2 + v8-to-istanbul: 9.3.0 + transitivePeerDependencies: + - supports-color + + '@jest/schemas@30.0.5': + dependencies: + '@sinclair/typebox': 0.34.39 + + '@jest/snapshot-utils@30.0.5': + dependencies: + '@jest/types': 30.0.5 + chalk: 4.1.2 + graceful-fs: 4.2.11 + natural-compare: 1.4.0 + + '@jest/source-map@30.0.1': + dependencies: + '@jridgewell/trace-mapping': 0.3.30 + callsites: 3.1.0 + graceful-fs: 4.2.11 + + '@jest/test-result@30.0.5': + dependencies: + '@jest/console': 30.0.5 + '@jest/types': 30.0.5 + '@types/istanbul-lib-coverage': 2.0.6 + collect-v8-coverage: 1.0.2 + + '@jest/test-sequencer@30.0.5': + dependencies: + '@jest/test-result': 30.0.5 + graceful-fs: 4.2.11 + jest-haste-map: 30.0.5 + slash: 3.0.0 + + '@jest/transform@30.0.5': + dependencies: + '@babel/core': 7.28.3 + '@jest/types': 30.0.5 + '@jridgewell/trace-mapping': 0.3.30 + babel-plugin-istanbul: 7.0.0 + chalk: 4.1.2 + convert-source-map: 2.0.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.11 + jest-haste-map: 30.0.5 + jest-regex-util: 30.0.1 + jest-util: 30.0.5 + micromatch: 4.0.8 + pirates: 4.0.7 + slash: 3.0.0 + write-file-atomic: 5.0.1 + transitivePeerDependencies: + - supports-color + + '@jest/types@30.0.5': + dependencies: + '@jest/pattern': 30.0.1 + '@jest/schemas': 30.0.5 + '@types/istanbul-lib-coverage': 2.0.6 + '@types/istanbul-reports': 3.0.4 + '@types/node': 22.17.1 + '@types/yargs': 17.0.33 + chalk: 4.1.2 + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.30 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.30': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@js-sdsl/ordered-map@4.4.2': {} + + '@napi-rs/wasm-runtime@0.2.12': + dependencies: + '@emnapi/core': 1.4.5 + '@emnapi/runtime': 1.4.5 + '@tybys/wasm-util': 0.10.0 + optional: true + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@pkgr/core@0.2.9': {} + + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + + '@sinclair/typebox@0.34.39': {} + + '@sinonjs/commons@3.0.1': + dependencies: + type-detect: 4.0.8 + + '@sinonjs/fake-timers@13.0.5': + dependencies: + '@sinonjs/commons': 3.0.1 + + '@tybys/wasm-util@0.10.0': + dependencies: + tslib: 2.8.1 + optional: true + + '@types/babel__core@7.20.5': + dependencies: + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 + '@types/babel__generator': 7.27.0 + '@types/babel__template': 7.4.4 + '@types/babel__traverse': 7.28.0 + + '@types/babel__generator@7.27.0': + dependencies: + '@babel/types': 7.28.2 + + '@types/babel__template@7.4.4': + dependencies: + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 + + '@types/babel__traverse@7.28.0': + dependencies: + '@babel/types': 7.28.2 + + '@types/istanbul-lib-coverage@2.0.6': {} + + '@types/istanbul-lib-report@3.0.3': + dependencies: + '@types/istanbul-lib-coverage': 2.0.6 + + '@types/istanbul-reports@3.0.4': + dependencies: + '@types/istanbul-lib-report': 3.0.3 + + '@types/jest@30.0.0': + dependencies: + expect: 30.0.5 + pretty-format: 30.0.5 + + '@types/js-yaml@4.0.9': {} + + '@types/node@22.17.1': + dependencies: + undici-types: 6.21.0 + + '@types/stack-utils@2.0.3': {} + + '@types/yargs-parser@21.0.3': {} + + '@types/yargs@17.0.33': + dependencies: + '@types/yargs-parser': 21.0.3 + + '@ungap/structured-clone@1.3.0': {} + + '@unrs/resolver-binding-android-arm-eabi@1.11.1': + optional: true + + '@unrs/resolver-binding-android-arm64@1.11.1': + optional: true + + '@unrs/resolver-binding-darwin-arm64@1.11.1': + optional: true + + '@unrs/resolver-binding-darwin-x64@1.11.1': + optional: true + + '@unrs/resolver-binding-freebsd-x64@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-arm64-musl@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-x64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-x64-musl@1.11.1': + optional: true + + '@unrs/resolver-binding-wasm32-wasi@1.11.1': + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + optional: true + + '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': + optional: true + + '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': + optional: true + + '@unrs/resolver-binding-win32-x64-msvc@1.11.1': + optional: true + + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + + ansi-regex@5.0.1: {} + + ansi-regex@6.1.0: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@5.2.0: {} + + ansi-styles@6.2.1: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + argparse@1.0.10: + dependencies: + sprintf-js: 1.0.3 + + argparse@2.0.1: {} + + babel-jest@30.0.5(@babel/core@7.28.3): + dependencies: + '@babel/core': 7.28.3 + '@jest/transform': 30.0.5 + '@types/babel__core': 7.20.5 + babel-plugin-istanbul: 7.0.0 + babel-preset-jest: 30.0.1(@babel/core@7.28.3) + chalk: 4.1.2 + graceful-fs: 4.2.11 + slash: 3.0.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-istanbul@7.0.0: + dependencies: + '@babel/helper-plugin-utils': 7.27.1 + '@istanbuljs/load-nyc-config': 1.1.0 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-instrument: 6.0.3 + test-exclude: 6.0.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-jest-hoist@30.0.1: + dependencies: + '@babel/template': 7.27.2 + '@babel/types': 7.28.2 + '@types/babel__core': 7.20.5 + + babel-preset-current-node-syntax@1.2.0(@babel/core@7.28.3): + dependencies: + '@babel/core': 7.28.3 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.28.3) + '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.3) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.28.3) + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.28.3) + '@babel/plugin-syntax-import-attributes': 7.27.1(@babel/core@7.28.3) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.28.3) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.28.3) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.28.3) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.28.3) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.28.3) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.28.3) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.28.3) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.28.3) + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.28.3) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.28.3) + + babel-preset-jest@30.0.1(@babel/core@7.28.3): + dependencies: + '@babel/core': 7.28.3 + babel-plugin-jest-hoist: 30.0.1 + babel-preset-current-node-syntax: 1.2.0(@babel/core@7.28.3) + + balanced-match@1.0.2: {} + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + browserslist@4.25.2: + dependencies: + caniuse-lite: 1.0.30001735 + electron-to-chromium: 1.5.203 + node-releases: 2.0.19 + update-browserslist-db: 1.1.3(browserslist@4.25.2) + + bs-logger@0.2.6: + dependencies: + fast-json-stable-stringify: 2.1.0 + + bser@2.1.1: + dependencies: + node-int64: 0.4.0 + + buffer-from@1.1.2: {} + + callsites@3.1.0: {} + + camelcase@5.3.1: {} + + camelcase@6.3.0: {} + + caniuse-lite@1.0.30001735: {} + + case-anything@2.1.13: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + char-regex@1.0.2: {} + + ci-info@4.3.0: {} + + cjs-module-lexer@2.1.0: {} + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + co@4.6.0: {} + + collect-v8-coverage@1.0.2: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + concat-map@0.0.1: {} + + convert-source-map@2.0.0: {} + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + debug@4.4.1: + dependencies: + ms: 2.1.3 + + dedent@1.6.0: {} + + deepmerge@4.3.1: {} + + detect-libc@1.0.3: {} + + detect-newline@3.1.0: {} + + dprint-node@1.0.8: + dependencies: + detect-libc: 1.0.3 + + eastasianwidth@0.2.0: {} + + electron-to-chromium@1.5.203: {} + + emittery@0.13.1: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + error-ex@1.3.2: + dependencies: + is-arrayish: 0.2.1 + + escalade@3.2.0: {} + + escape-string-regexp@2.0.0: {} + + esprima@4.0.1: {} + + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + exit-x@0.2.2: {} + + expect@30.0.5: + dependencies: + '@jest/expect-utils': 30.0.5 + '@jest/get-type': 30.0.1 + jest-matcher-utils: 30.0.5 + jest-message-util: 30.0.5 + jest-mock: 30.0.5 + jest-util: 30.0.5 + + fast-json-stable-stringify@2.1.0: {} + + fb-watchman@2.0.2: + dependencies: + bser: 2.1.1 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-up@4.1.0: + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + fs.realpath@1.0.0: {} + + fsevents@2.3.3: + optional: true + + gensync@1.0.0-beta.2: {} + + get-caller-file@2.0.5: {} + + get-package-type@0.1.0: {} + + get-stream@6.0.1: {} + + glob@10.4.5: + dependencies: + foreground-child: 3.3.1 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + + glob@7.2.3: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + + graceful-fs@4.2.11: {} + + handlebars@4.7.8: + dependencies: + minimist: 1.2.8 + neo-async: 2.6.2 + source-map: 0.6.1 + wordwrap: 1.0.0 + optionalDependencies: + uglify-js: 3.19.3 + + has-flag@4.0.0: {} + + html-escaper@2.0.2: {} + + human-signals@2.1.0: {} + + import-local@3.2.0: + dependencies: + pkg-dir: 4.2.0 + resolve-cwd: 3.0.0 + + imurmurhash@0.1.4: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + is-arrayish@0.2.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-generator-fn@2.1.0: {} + + is-number@7.0.0: {} + + is-stream@2.0.1: {} + + isexe@2.0.0: {} + + istanbul-lib-coverage@3.2.2: {} + + istanbul-lib-instrument@6.0.3: + dependencies: + '@babel/core': 7.28.3 + '@babel/parser': 7.28.3 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.2 + semver: 7.7.2 + transitivePeerDependencies: + - supports-color + + istanbul-lib-report@3.0.1: + dependencies: + istanbul-lib-coverage: 3.2.2 + make-dir: 4.0.0 + supports-color: 7.2.0 + + istanbul-lib-source-maps@5.0.6: + dependencies: + '@jridgewell/trace-mapping': 0.3.30 + debug: 4.4.1 + istanbul-lib-coverage: 3.2.2 + transitivePeerDependencies: + - supports-color + + istanbul-reports@3.1.7: + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.1 + + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + jest-changed-files@30.0.5: + dependencies: + execa: 5.1.1 + jest-util: 30.0.5 + p-limit: 3.1.0 + + jest-circus@30.0.5: + dependencies: + '@jest/environment': 30.0.5 + '@jest/expect': 30.0.5 + '@jest/test-result': 30.0.5 + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + chalk: 4.1.2 + co: 4.6.0 + dedent: 1.6.0 + is-generator-fn: 2.1.0 + jest-each: 30.0.5 + jest-matcher-utils: 30.0.5 + jest-message-util: 30.0.5 + jest-runtime: 30.0.5 + jest-snapshot: 30.0.5 + jest-util: 30.0.5 + p-limit: 3.1.0 + pretty-format: 30.0.5 + pure-rand: 7.0.1 + slash: 3.0.0 + stack-utils: 2.0.6 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-cli@30.0.5(@types/node@22.17.1): + dependencies: + '@jest/core': 30.0.5 + '@jest/test-result': 30.0.5 + '@jest/types': 30.0.5 + chalk: 4.1.2 + exit-x: 0.2.2 + import-local: 3.2.0 + jest-config: 30.0.5(@types/node@22.17.1) + jest-util: 30.0.5 + jest-validate: 30.0.5 + yargs: 17.7.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + + jest-config@30.0.5(@types/node@22.17.1): + dependencies: + '@babel/core': 7.28.3 + '@jest/get-type': 30.0.1 + '@jest/pattern': 30.0.1 + '@jest/test-sequencer': 30.0.5 + '@jest/types': 30.0.5 + babel-jest: 30.0.5(@babel/core@7.28.3) + chalk: 4.1.2 + ci-info: 4.3.0 + deepmerge: 4.3.1 + glob: 10.4.5 + graceful-fs: 4.2.11 + jest-circus: 30.0.5 + jest-docblock: 30.0.1 + jest-environment-node: 30.0.5 + jest-regex-util: 30.0.1 + jest-resolve: 30.0.5 + jest-runner: 30.0.5 + jest-util: 30.0.5 + jest-validate: 30.0.5 + micromatch: 4.0.8 + parse-json: 5.2.0 + pretty-format: 30.0.5 + slash: 3.0.0 + strip-json-comments: 3.1.1 + optionalDependencies: + '@types/node': 22.17.1 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-diff@30.0.5: + dependencies: + '@jest/diff-sequences': 30.0.1 + '@jest/get-type': 30.0.1 + chalk: 4.1.2 + pretty-format: 30.0.5 + + jest-docblock@30.0.1: + dependencies: + detect-newline: 3.1.0 + + jest-each@30.0.5: + dependencies: + '@jest/get-type': 30.0.1 + '@jest/types': 30.0.5 + chalk: 4.1.2 + jest-util: 30.0.5 + pretty-format: 30.0.5 + + jest-environment-node@30.0.5: + dependencies: + '@jest/environment': 30.0.5 + '@jest/fake-timers': 30.0.5 + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + jest-mock: 30.0.5 + jest-util: 30.0.5 + jest-validate: 30.0.5 + + jest-haste-map@30.0.5: + dependencies: + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + anymatch: 3.1.3 + fb-watchman: 2.0.2 + graceful-fs: 4.2.11 + jest-regex-util: 30.0.1 + jest-util: 30.0.5 + jest-worker: 30.0.5 + micromatch: 4.0.8 + walker: 1.0.8 + optionalDependencies: + fsevents: 2.3.3 + + jest-leak-detector@30.0.5: + dependencies: + '@jest/get-type': 30.0.1 + pretty-format: 30.0.5 + + jest-matcher-utils@30.0.5: + dependencies: + '@jest/get-type': 30.0.1 + chalk: 4.1.2 + jest-diff: 30.0.5 + pretty-format: 30.0.5 + + jest-message-util@30.0.5: + dependencies: + '@babel/code-frame': 7.27.1 + '@jest/types': 30.0.5 + '@types/stack-utils': 2.0.3 + chalk: 4.1.2 + graceful-fs: 4.2.11 + micromatch: 4.0.8 + pretty-format: 30.0.5 + slash: 3.0.0 + stack-utils: 2.0.6 + + jest-mock@30.0.5: + dependencies: + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + jest-util: 30.0.5 + + jest-pnp-resolver@1.2.3(jest-resolve@30.0.5): + optionalDependencies: + jest-resolve: 30.0.5 + + jest-regex-util@30.0.1: {} + + jest-resolve-dependencies@30.0.5: + dependencies: + jest-regex-util: 30.0.1 + jest-snapshot: 30.0.5 + transitivePeerDependencies: + - supports-color + + jest-resolve@30.0.5: + dependencies: + chalk: 4.1.2 + graceful-fs: 4.2.11 + jest-haste-map: 30.0.5 + jest-pnp-resolver: 1.2.3(jest-resolve@30.0.5) + jest-util: 30.0.5 + jest-validate: 30.0.5 + slash: 3.0.0 + unrs-resolver: 1.11.1 + + jest-runner@30.0.5: + dependencies: + '@jest/console': 30.0.5 + '@jest/environment': 30.0.5 + '@jest/test-result': 30.0.5 + '@jest/transform': 30.0.5 + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + chalk: 4.1.2 + emittery: 0.13.1 + exit-x: 0.2.2 + graceful-fs: 4.2.11 + jest-docblock: 30.0.1 + jest-environment-node: 30.0.5 + jest-haste-map: 30.0.5 + jest-leak-detector: 30.0.5 + jest-message-util: 30.0.5 + jest-resolve: 30.0.5 + jest-runtime: 30.0.5 + jest-util: 30.0.5 + jest-watcher: 30.0.5 + jest-worker: 30.0.5 + p-limit: 3.1.0 + source-map-support: 0.5.13 + transitivePeerDependencies: + - supports-color + + jest-runtime@30.0.5: + dependencies: + '@jest/environment': 30.0.5 + '@jest/fake-timers': 30.0.5 + '@jest/globals': 30.0.5 + '@jest/source-map': 30.0.1 + '@jest/test-result': 30.0.5 + '@jest/transform': 30.0.5 + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + chalk: 4.1.2 + cjs-module-lexer: 2.1.0 + collect-v8-coverage: 1.0.2 + glob: 10.4.5 + graceful-fs: 4.2.11 + jest-haste-map: 30.0.5 + jest-message-util: 30.0.5 + jest-mock: 30.0.5 + jest-regex-util: 30.0.1 + jest-resolve: 30.0.5 + jest-snapshot: 30.0.5 + jest-util: 30.0.5 + slash: 3.0.0 + strip-bom: 4.0.0 + transitivePeerDependencies: + - supports-color + + jest-snapshot@30.0.5: + dependencies: + '@babel/core': 7.28.3 + '@babel/generator': 7.28.3 + '@babel/plugin-syntax-jsx': 7.27.1(@babel/core@7.28.3) + '@babel/plugin-syntax-typescript': 7.27.1(@babel/core@7.28.3) + '@babel/types': 7.28.2 + '@jest/expect-utils': 30.0.5 + '@jest/get-type': 30.0.1 + '@jest/snapshot-utils': 30.0.5 + '@jest/transform': 30.0.5 + '@jest/types': 30.0.5 + babel-preset-current-node-syntax: 1.2.0(@babel/core@7.28.3) + chalk: 4.1.2 + expect: 30.0.5 + graceful-fs: 4.2.11 + jest-diff: 30.0.5 + jest-matcher-utils: 30.0.5 + jest-message-util: 30.0.5 + jest-util: 30.0.5 + pretty-format: 30.0.5 + semver: 7.7.2 + synckit: 0.11.11 + transitivePeerDependencies: + - supports-color + + jest-util@30.0.5: + dependencies: + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + chalk: 4.1.2 + ci-info: 4.3.0 + graceful-fs: 4.2.11 + picomatch: 4.0.3 + + jest-validate@30.0.5: + dependencies: + '@jest/get-type': 30.0.1 + '@jest/types': 30.0.5 + camelcase: 6.3.0 + chalk: 4.1.2 + leven: 3.1.0 + pretty-format: 30.0.5 + + jest-watcher@30.0.5: + dependencies: + '@jest/test-result': 30.0.5 + '@jest/types': 30.0.5 + '@types/node': 22.17.1 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + emittery: 0.13.1 + jest-util: 30.0.5 + string-length: 4.0.2 + + jest-worker@30.0.5: + dependencies: + '@types/node': 22.17.1 + '@ungap/structured-clone': 1.3.0 + jest-util: 30.0.5 + merge-stream: 2.0.0 + supports-color: 8.1.1 + + jest@30.0.5(@types/node@22.17.1): + dependencies: + '@jest/core': 30.0.5 + '@jest/types': 30.0.5 + import-local: 3.2.0 + jest-cli: 30.0.5(@types/node@22.17.1) + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + + js-tokens@4.0.0: {} + + js-yaml@3.14.1: + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + jsesc@3.1.0: {} + + json-parse-even-better-errors@2.3.1: {} + + json5@2.2.3: {} + + leven@3.1.0: {} + + lines-and-columns@1.2.4: {} + + locate-path@5.0.0: + dependencies: + p-locate: 4.1.0 + + lodash.camelcase@4.3.0: {} + + lodash.memoize@4.1.2: {} + + long@5.3.2: {} + + lru-cache@10.4.3: {} + + lru-cache@5.1.1: + dependencies: + yallist: 3.1.1 + + make-dir@4.0.0: + dependencies: + semver: 7.7.2 + + make-error@1.3.6: {} + + makeerror@1.0.12: + dependencies: + tmpl: 1.0.5 + + merge-stream@2.0.0: {} + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + mimic-fn@2.1.0: {} + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.12 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.2 + + minimist@1.2.8: {} + + minipass@7.1.2: {} + + ms@2.1.3: {} + + napi-postinstall@0.3.3: {} + + natural-compare@1.4.0: {} + + neo-async@2.6.2: {} + + node-int64@0.4.0: {} + + node-releases@2.0.19: {} + + normalize-path@3.0.0: {} + + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + p-limit@2.3.0: + dependencies: + p-try: 2.2.0 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@4.1.0: + dependencies: + p-limit: 2.3.0 + + p-try@2.2.0: {} + + package-json-from-dist@1.0.1: {} + + parse-json@5.2.0: + dependencies: + '@babel/code-frame': 7.27.1 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + + path-exists@4.0.0: {} + + path-is-absolute@1.0.1: {} + + path-key@3.1.1: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + picomatch@4.0.3: {} + + pirates@4.0.7: {} + + pkg-dir@4.2.0: + dependencies: + find-up: 4.1.0 + + pretty-format@30.0.5: + dependencies: + '@jest/schemas': 30.0.5 + ansi-styles: 5.2.0 + react-is: 18.3.1 + + protobufjs@7.5.3: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 '@protobufjs/eventemitter': 1.1.0 '@protobufjs/fetch': 1.1.0 '@protobufjs/float': 1.0.2 @@ -299,18 +2863,122 @@ snapshots: '@types/node': 22.17.1 long: 5.3.2 + pure-rand@7.0.1: {} + + react-is@18.3.1: {} + require-directory@2.1.1: {} + resolve-cwd@3.0.0: + dependencies: + resolve-from: 5.0.0 + + resolve-from@5.0.0: {} + + semver@6.3.1: {} + + semver@7.7.2: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + signal-exit@3.0.7: {} + + signal-exit@4.1.0: {} + + slash@3.0.0: {} + + source-map-support@0.5.13: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map@0.6.1: {} + + sprintf-js@1.0.3: {} + + stack-utils@2.0.6: + dependencies: + escape-string-regexp: 2.0.0 + + string-length@4.0.2: + dependencies: + char-regex: 1.0.2 + strip-ansi: 6.0.1 + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 is-fullwidth-code-point: 3.0.0 strip-ansi: 6.0.1 + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.1.0 + + strip-bom@4.0.0: {} + + strip-final-newline@2.0.0: {} + + strip-json-comments@3.1.1: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-color@8.1.1: + dependencies: + has-flag: 4.0.0 + + synckit@0.11.11: + dependencies: + '@pkgr/core': 0.2.9 + + test-exclude@6.0.0: + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 7.2.3 + minimatch: 3.1.2 + + tmpl@1.0.5: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + ts-jest@29.4.1(@babel/core@7.28.3)(@jest/transform@30.0.5)(@jest/types@30.0.5)(babel-jest@30.0.5(@babel/core@7.28.3))(jest-util@30.0.5)(jest@30.0.5(@types/node@22.17.1))(typescript@5.9.2): + dependencies: + bs-logger: 0.2.6 + fast-json-stable-stringify: 2.1.0 + handlebars: 4.7.8 + jest: 30.0.5(@types/node@22.17.1) + json5: 2.2.3 + lodash.memoize: 4.1.2 + make-error: 1.3.6 + semver: 7.7.2 + type-fest: 4.41.0 + typescript: 5.9.2 + yargs-parser: 21.1.1 + optionalDependencies: + '@babel/core': 7.28.3 + '@jest/transform': 30.0.5 + '@jest/types': 30.0.5 + babel-jest: 30.0.5(@babel/core@7.28.3) + jest-util: 30.0.5 + ts-poet@6.12.0: dependencies: dprint-node: 1.0.8 @@ -326,18 +2994,91 @@ snapshots: ts-poet: 6.12.0 ts-proto-descriptors: 2.0.0 + tslib@2.8.1: + optional: true + + type-detect@4.0.8: {} + + type-fest@0.21.3: {} + + type-fest@4.41.0: {} + typescript@5.9.2: {} + uglify-js@3.19.3: + optional: true + undici-types@6.21.0: {} + unrs-resolver@1.11.1: + dependencies: + napi-postinstall: 0.3.3 + optionalDependencies: + '@unrs/resolver-binding-android-arm-eabi': 1.11.1 + '@unrs/resolver-binding-android-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-x64': 1.11.1 + '@unrs/resolver-binding-freebsd-x64': 1.11.1 + '@unrs/resolver-binding-linux-arm-gnueabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm-musleabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-arm64-musl': 1.11.1 + '@unrs/resolver-binding-linux-ppc64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-musl': 1.11.1 + '@unrs/resolver-binding-linux-s390x-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-musl': 1.11.1 + '@unrs/resolver-binding-wasm32-wasi': 1.11.1 + '@unrs/resolver-binding-win32-arm64-msvc': 1.11.1 + '@unrs/resolver-binding-win32-ia32-msvc': 1.11.1 + '@unrs/resolver-binding-win32-x64-msvc': 1.11.1 + + update-browserslist-db@1.1.3(browserslist@4.25.2): + dependencies: + browserslist: 4.25.2 + escalade: 3.2.0 + picocolors: 1.1.1 + + v8-to-istanbul@9.3.0: + dependencies: + '@jridgewell/trace-mapping': 0.3.30 + '@types/istanbul-lib-coverage': 2.0.6 + convert-source-map: 2.0.0 + + walker@1.0.8: + dependencies: + makeerror: 1.0.12 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + wordwrap@1.0.0: {} + wrap-ansi@7.0.0: dependencies: ansi-styles: 4.3.0 string-width: 4.2.3 strip-ansi: 6.0.1 + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + wrappy@1.0.2: {} + + write-file-atomic@5.0.1: + dependencies: + imurmurhash: 0.1.4 + signal-exit: 4.1.0 + y18n@5.0.8: {} + yallist@3.1.1: {} + yargs-parser@21.1.1: {} yargs@17.7.2: @@ -349,3 +3090,5 @@ snapshots: string-width: 4.2.3 y18n: 5.0.8 yargs-parser: 21.1.1 + + yocto-queue@0.1.0: {} diff --git a/typescript-sdk/src/connectivity.ts b/typescript-sdk/src/connectivity.ts index 3c101de..52c044e 100644 --- a/typescript-sdk/src/connectivity.ts +++ b/typescript-sdk/src/connectivity.ts @@ -1,142 +1,284 @@ -import { ChannelCredentials, Metadata, credentials } from "@grpc/grpc-js"; +import { + ChannelCredentials, + credentials, + Metadata, + Client, + ServiceError, +} from "@grpc/grpc-js"; import { FumaroleClient } from "./grpc/fumarole"; -import { FumaroleConfig } from "./config/config"; -const X_TOKEN_HEADER = "x-token"; +export const X_TOKEN_HEADER = "x-token"; -class TritonAuthMetadataGenerator { - constructor(private readonly xToken: string) {} - - generateMetadata(): Promise { - const metadata = new Metadata(); - metadata.set(X_TOKEN_HEADER, this.xToken); - return Promise.resolve(metadata); - } +interface FumaroleConfig { + xToken?: string; + xMetadata?: Record; } -interface CallMetadataOptions { - metadata?: Metadata; -} - -class MetadataProvider { - private metadata: Metadata; - - constructor(metadata: Record) { - this.metadata = new Metadata(); - Object.entries(metadata).forEach(([key, value]) => { - this.metadata.set(key, value); - }); - } +// Simple logger implementation +class Logger { + constructor(private prefix: string) {} - getMetadata(): Promise { - return Promise.resolve(this.metadata); + debug(message: string) { + console.debug(`[${this.prefix}] ${message}`); } } export class FumaroleGrpcConnector { - private static readonly logger = console; - - constructor( - private readonly config: FumaroleConfig, - private readonly endpoint: string - ) {} + private readonly logger: Logger; + private readonly config: FumaroleConfig; + private readonly endpoint: string; - async connect( - grpcOptions: Record = {} - ): Promise { - const options = { - "grpc.max_receive_message_length": 111111110, - ...grpcOptions, - }; - - let channelCredentials: ChannelCredentials; - let insecureXToken: string | undefined; + constructor(config: FumaroleConfig, endpoint: string) { + this.logger = new Logger("FumaroleGrpcConnector"); + this.config = config; + this.endpoint = endpoint; + } - // Parse endpoint properly - const endpointURL = new URL(this.endpoint); - let port = endpointURL.port; - if (port === "") { - port = endpointURL.protocol === "https:" ? "443" : "80"; + private createMetadata(): Metadata { + const metadata = new Metadata(); + if (this.config.xMetadata) { + Object.entries(this.config.xMetadata).forEach(([key, value]) => { + metadata.set(key, value); + }); } - const address = `${endpointURL.hostname}:${port}`; + return metadata; + } - // Handle credentials based on protocol - if (endpointURL.protocol === "https:") { - channelCredentials = credentials.combineChannelCredentials( - credentials.createSsl(), - credentials.createFromMetadataGenerator((_params, callback) => { - const metadata = new Metadata(); - if (this.config.xToken) { - metadata.add("x-token", this.config.xToken); - } - if (this.config.xMetadata) { - Object.entries(this.config.xMetadata).forEach(([key, value]) => { - metadata.add(key, value); - }); - } - callback(null, metadata); - }) - ); - } else { - channelCredentials = credentials.createInsecure(); - if (this.config.xToken) { - insecureXToken = this.config.xToken; - } - } + async connect( + grpcOptions: { [key: string]: any }[] = [] + ): Promise { + this.logger.debug(`Connecting to endpoint: ${this.endpoint}`); - // Create the client options with simpler settings - const clientOptions = { - ...options, - "grpc.enable_http_proxy": 0, - // Basic keepalive settings - "grpc.keepalive_time_ms": 20000, - "grpc.keepalive_timeout_ms": 10000, + const defaultOptions: { [key: string]: any } = { + "grpc.max_receive_message_length": 111111110, + "grpc.keepalive_time_ms": 10000, + "grpc.keepalive_timeout_ms": 5000, "grpc.http2.min_time_between_pings_ms": 10000, - // Connection settings - "grpc.initial_reconnect_backoff_ms": 100, - "grpc.max_reconnect_backoff_ms": 3000, - "grpc.min_reconnect_backoff_ms": 100, - // Enable retries - "grpc.enable_retries": 1, + "grpc.keepalive_permit_without_calls": 1, + "grpc.initial_reconnect_backoff_ms": 1000, + "grpc.max_reconnect_backoff_ms": 10000, "grpc.service_config": JSON.stringify({ + loadBalancingConfig: [{ round_robin: {} }], methodConfig: [ { - name: [{}], // Apply to all methods + name: [{ service: "fumarole.Fumarole" }], retryPolicy: { maxAttempts: 5, - initialBackoff: "0.1s", - maxBackoff: "3s", + initialBackoff: "1s", + maxBackoff: "10s", backoffMultiplier: 2, - retryableStatusCodes: ["UNAVAILABLE", "DEADLINE_EXCEEDED"], + retryableStatusCodes: ["UNAVAILABLE"], }, }, ], }), }; - // Create the client with credentials and options - const client = new FumaroleClient( - address, - channelCredentials, - clientOptions - ); + const channelOptions: { [key: string]: any } = { + ...defaultOptions, + }; + + // Add additional options + grpcOptions.forEach((opt) => { + Object.entries(opt).forEach(([key, value]) => { + this.logger.debug(`Setting channel option: ${key} = ${value}`); + channelOptions[key] = value; + }); + }); + + let channelCredentials: ChannelCredentials; - // Do a simple connection check try { - await new Promise((resolve, reject) => { - const deadline = Date.now() + 5000; // 5 second timeout - client.waitForReady(deadline, (err) => { - if (err) { - reject(err); + const endpointURL = new URL(this.endpoint); + this.logger.debug( + `Parsed URL - protocol: ${endpointURL.protocol}, hostname: ${endpointURL.hostname}, port: ${endpointURL.port}` + ); + + let port = endpointURL.port; + if (port === "") { + switch (endpointURL.protocol) { + case "https:": + port = "443"; + break; + case "http:": + port = "80"; + break; + } + this.logger.debug(`No port specified, using default port: ${port}`); + } + + // Check if we need to use TLS. + if (endpointURL.protocol === "https:") { + this.logger.debug("HTTPS detected, setting up SSL credentials"); + const sslCreds = credentials.createSsl(); + this.logger.debug("SSL credentials created"); + + const callCreds = credentials.createFromMetadataGenerator( + (_params, callback) => { + const metadata = new Metadata(); + if (this.config.xToken !== undefined) { + this.logger.debug("Adding x-token to metadata"); + metadata.add(X_TOKEN_HEADER, this.config.xToken); + } + return callback(null, metadata); + } + ); + this.logger.debug("Call credentials created"); + + channelCredentials = credentials.combineChannelCredentials( + sslCreds, + callCreds + ); + this.logger.debug("Using secure channel with x-token authentication"); + } else { + channelCredentials = credentials.createInsecure(); + this.logger.debug("Using insecure channel without authentication"); + } + + const finalEndpoint = `${endpointURL.hostname}:${port}`; + this.logger.debug(`Creating gRPC client with endpoint: ${finalEndpoint}`); + + const client = new FumaroleClient( + finalEndpoint, + channelCredentials, + channelOptions + ); + + this.logger.debug(`gRPC client created, waiting for ready state...`); + + // Wait for the client to be ready with a longer timeout + await new Promise((resolve, reject) => { + const deadline = new Date().getTime() + 30000; // 30 second timeout + client.waitForReady(deadline, (error) => { + if (error) { + this.logger.debug( + `Client failed to become ready: ${error.message}` + ); + const grpcError = error as ServiceError; + if (grpcError.code !== undefined) + this.logger.debug(`Error code: ${grpcError.code}`); + if (grpcError.details) + this.logger.debug(`Error details: ${grpcError.details}`); + if (grpcError.metadata) + this.logger.debug(`Error metadata: ${grpcError.metadata}`); + reject(error); } else { - resolve(); + this.logger.debug(`Client is ready`); + resolve(undefined); } }); }); + + this.logger.debug( + `gRPC client created successfully for ${finalEndpoint}` + ); + return client; } catch (error) { + this.logger.debug( + `Error during connection setup: ${ + error instanceof Error ? error.message : String(error) + }` + ); throw error; } + } +} + +// Helper function to create a gRPC channel (for backward compatibility) +export function createGrpcChannel( + endpoint: string, + xToken?: string, + compression?: any, + ...grpcOptions: { [key: string]: any }[] +): Client { + console.debug(`Creating gRPC channel for endpoint: ${endpoint}`); + const defaultOptions: { [key: string]: any } = { + "grpc.max_receive_message_length": 111111110, + "grpc.keepalive_time_ms": 10000, + "grpc.keepalive_timeout_ms": 5000, + "grpc.http2.min_time_between_pings_ms": 10000, + "grpc.keepalive_permit_without_calls": 1, + }; + + const channelOptions: { [key: string]: any } = { + ...defaultOptions, + }; + + // Add additional options + grpcOptions.forEach((opt) => { + Object.entries(opt).forEach(([key, value]) => { + console.debug(`Setting channel option: ${key} = ${value}`); + channelOptions[key] = value; + }); + }); + + try { + const endpointURL = new URL(endpoint); + console.debug( + `Parsed URL - protocol: ${endpointURL.protocol}, hostname: ${endpointURL.hostname}, port: ${endpointURL.port}` + ); + + let port = endpointURL.port; + if (port === "") { + switch (endpointURL.protocol) { + case "https:": + port = "443"; + break; + case "http:": + port = "80"; + break; + } + console.debug(`No port specified, using default port: ${port}`); + } + + let channelCredentials: ChannelCredentials; + + // Check if we need to use TLS. + if (endpointURL.protocol === "https:") { + console.debug("HTTPS detected, setting up SSL credentials"); + const sslCreds = credentials.createSsl(); + console.debug("SSL credentials created"); + + const callCreds = credentials.createFromMetadataGenerator( + (_params, callback) => { + const metadata = new Metadata(); + if (xToken !== undefined) { + console.debug("Adding x-token to metadata"); + metadata.add(X_TOKEN_HEADER, xToken); + } + return callback(null, metadata); + } + ); + console.debug("Call credentials created"); + + channelCredentials = credentials.combineChannelCredentials( + sslCreds, + callCreds + ); + console.debug("Combined credentials created for secure channel"); + } else { + channelCredentials = credentials.createInsecure(); + console.debug("Using insecure channel without authentication"); + } + + const finalEndpoint = `${endpointURL.hostname}:${port}`; + console.debug(`Creating gRPC client with endpoint: ${finalEndpoint}`); + + const client = new Client( + finalEndpoint, + channelCredentials, + channelOptions + ); + + console.debug("gRPC client created successfully"); return client; + } catch (error) { + console.debug( + `Error creating gRPC channel: ${ + error instanceof Error ? error.message : String(error) + }` + ); + throw error; } } diff --git a/typescript-sdk/src/grpc/fumarole.ts b/typescript-sdk/src/grpc/fumarole.ts index 4c941d5..f487fa3 100644 --- a/typescript-sdk/src/grpc/fumarole.ts +++ b/typescript-sdk/src/grpc/fumarole.ts @@ -36,8 +36,8 @@ import { export const protobufPackage = "fumarole"; export enum InitialOffsetPolicy { - /** LATEST - FROM_SLOT = 1; */ LATEST = 0, + FROM_SLOT = 1, UNRECOGNIZED = -1, } @@ -46,6 +46,9 @@ export function initialOffsetPolicyFromJSON(object: any): InitialOffsetPolicy { case 0: case "LATEST": return InitialOffsetPolicy.LATEST; + case 1: + case "FROM_SLOT": + return InitialOffsetPolicy.FROM_SLOT; case -1: case "UNRECOGNIZED": default: @@ -57,12 +60,24 @@ export function initialOffsetPolicyToJSON(object: InitialOffsetPolicy): string { switch (object) { case InitialOffsetPolicy.LATEST: return "LATEST"; + case InitialOffsetPolicy.FROM_SLOT: + return "FROM_SLOT"; case InitialOffsetPolicy.UNRECOGNIZED: default: return "UNRECOGNIZED"; } } +export interface GetSlotRangeRequest { + blockchainId: Uint8Array; +} + +export interface GetSlotRangeResponse { + blockchainId: Uint8Array; + minSlot: bigint; + maxSlot: bigint; +} + export interface GetChainTipRequest { blockchainId: Uint8Array; } @@ -245,10 +260,166 @@ export interface CreateConsumerGroupResponse { export interface CreateConsumerGroupRequest { consumerGroupName: string; - /** optional uint64 from_slot = 3; */ initialOffsetPolicy: InitialOffsetPolicy; + fromSlot?: bigint | undefined; +} + +function createBaseGetSlotRangeRequest(): GetSlotRangeRequest { + return { blockchainId: new Uint8Array(0) }; +} + +export const GetSlotRangeRequest: MessageFns = { + encode(message: GetSlotRangeRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GetSlotRangeRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetSlotRangeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.blockchainId = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetSlotRangeRequest { + return { blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0) }; + }, + + toJSON(message: GetSlotRangeRequest): unknown { + const obj: any = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + return obj; + }, + + create, I>>(base?: I): GetSlotRangeRequest { + return GetSlotRangeRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GetSlotRangeRequest { + const message = createBaseGetSlotRangeRequest(); + message.blockchainId = object.blockchainId ?? new Uint8Array(0); + return message; + }, +}; + +function createBaseGetSlotRangeResponse(): GetSlotRangeResponse { + return { blockchainId: new Uint8Array(0), minSlot: 0n, maxSlot: 0n }; } +export const GetSlotRangeResponse: MessageFns = { + encode(message: GetSlotRangeResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.blockchainId.length !== 0) { + writer.uint32(10).bytes(message.blockchainId); + } + if (message.minSlot !== 0n) { + if (BigInt.asIntN(64, message.minSlot) !== message.minSlot) { + throw new globalThis.Error("value provided for field message.minSlot of type int64 too large"); + } + writer.uint32(16).int64(message.minSlot); + } + if (message.maxSlot !== 0n) { + if (BigInt.asIntN(64, message.maxSlot) !== message.maxSlot) { + throw new globalThis.Error("value provided for field message.maxSlot of type int64 too large"); + } + writer.uint32(24).int64(message.maxSlot); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): GetSlotRangeResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetSlotRangeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.blockchainId = reader.bytes(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.minSlot = reader.int64() as bigint; + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.maxSlot = reader.int64() as bigint; + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetSlotRangeResponse { + return { + blockchainId: isSet(object.blockchainId) ? bytesFromBase64(object.blockchainId) : new Uint8Array(0), + minSlot: isSet(object.minSlot) ? BigInt(object.minSlot) : 0n, + maxSlot: isSet(object.maxSlot) ? BigInt(object.maxSlot) : 0n, + }; + }, + + toJSON(message: GetSlotRangeResponse): unknown { + const obj: any = {}; + if (message.blockchainId.length !== 0) { + obj.blockchainId = base64FromBytes(message.blockchainId); + } + if (message.minSlot !== 0n) { + obj.minSlot = message.minSlot.toString(); + } + if (message.maxSlot !== 0n) { + obj.maxSlot = message.maxSlot.toString(); + } + return obj; + }, + + create, I>>(base?: I): GetSlotRangeResponse { + return GetSlotRangeResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GetSlotRangeResponse { + const message = createBaseGetSlotRangeResponse(); + message.blockchainId = object.blockchainId ?? new Uint8Array(0); + message.minSlot = object.minSlot ?? 0n; + message.maxSlot = object.maxSlot ?? 0n; + return message; + }, +}; + function createBaseGetChainTipRequest(): GetChainTipRequest { return { blockchainId: new Uint8Array(0) }; } @@ -3199,7 +3370,7 @@ export const CreateConsumerGroupResponse: MessageFns = { @@ -3210,6 +3381,12 @@ export const CreateConsumerGroupRequest: MessageFns if (message.initialOffsetPolicy !== 0) { writer.uint32(16).int32(message.initialOffsetPolicy); } + if (message.fromSlot !== undefined) { + if (BigInt.asUintN(64, message.fromSlot) !== message.fromSlot) { + throw new globalThis.Error("value provided for field message.fromSlot of type uint64 too large"); + } + writer.uint32(24).uint64(message.fromSlot); + } return writer; }, @@ -3236,6 +3413,14 @@ export const CreateConsumerGroupRequest: MessageFns message.initialOffsetPolicy = reader.int32() as any; continue; } + case 3: { + if (tag !== 24) { + break; + } + + message.fromSlot = reader.uint64() as bigint; + continue; + } } if ((tag & 7) === 4 || tag === 0) { break; @@ -3251,6 +3436,7 @@ export const CreateConsumerGroupRequest: MessageFns initialOffsetPolicy: isSet(object.initialOffsetPolicy) ? initialOffsetPolicyFromJSON(object.initialOffsetPolicy) : 0, + fromSlot: isSet(object.fromSlot) ? BigInt(object.fromSlot) : undefined, }; }, @@ -3262,6 +3448,9 @@ export const CreateConsumerGroupRequest: MessageFns if (message.initialOffsetPolicy !== 0) { obj.initialOffsetPolicy = initialOffsetPolicyToJSON(message.initialOffsetPolicy); } + if (message.fromSlot !== undefined) { + obj.fromSlot = message.fromSlot.toString(); + } return obj; }, @@ -3272,6 +3461,7 @@ export const CreateConsumerGroupRequest: MessageFns const message = createBaseCreateConsumerGroupRequest(); message.consumerGroupName = object.consumerGroupName ?? ""; message.initialOffsetPolicy = object.initialOffsetPolicy ?? 0; + message.fromSlot = object.fromSlot ?? undefined; return message; }, }; @@ -3368,6 +3558,16 @@ export const FumaroleService = { responseSerialize: (value: VersionResponse): Buffer => Buffer.from(VersionResponse.encode(value).finish()), responseDeserialize: (value: Buffer): VersionResponse => VersionResponse.decode(value), }, + getSlotRange: { + path: "/fumarole.Fumarole/GetSlotRange", + requestStream: false, + responseStream: false, + requestSerialize: (value: GetSlotRangeRequest): Buffer => Buffer.from(GetSlotRangeRequest.encode(value).finish()), + requestDeserialize: (value: Buffer): GetSlotRangeRequest => GetSlotRangeRequest.decode(value), + responseSerialize: (value: GetSlotRangeResponse): Buffer => + Buffer.from(GetSlotRangeResponse.encode(value).finish()), + responseDeserialize: (value: Buffer): GetSlotRangeResponse => GetSlotRangeResponse.decode(value), + }, } as const; export interface FumaroleServer extends UntypedServiceImplementation { @@ -3382,6 +3582,7 @@ export interface FumaroleServer extends UntypedServiceImplementation { /** Represents subscription to the control plane */ subscribe: handleBidiStreamingCall; version: handleUnaryCall; + getSlotRange: handleUnaryCall; } export interface FumaroleClient extends Client { @@ -3489,6 +3690,21 @@ export interface FumaroleClient extends Client { options: Partial, callback: (error: ServiceError | null, response: VersionResponse) => void, ): ClientUnaryCall; + getSlotRange( + request: GetSlotRangeRequest, + callback: (error: ServiceError | null, response: GetSlotRangeResponse) => void, + ): ClientUnaryCall; + getSlotRange( + request: GetSlotRangeRequest, + metadata: Metadata, + callback: (error: ServiceError | null, response: GetSlotRangeResponse) => void, + ): ClientUnaryCall; + getSlotRange( + request: GetSlotRangeRequest, + metadata: Metadata, + options: Partial, + callback: (error: ServiceError | null, response: GetSlotRangeResponse) => void, + ): ClientUnaryCall; } export const FumaroleClient = makeGenericClientConstructor(FumaroleService, "fumarole.Fumarole") as unknown as { diff --git a/typescript-sdk/src/index.ts b/typescript-sdk/src/index.ts index d751ef3..ee2c290 100644 --- a/typescript-sdk/src/index.ts +++ b/typescript-sdk/src/index.ts @@ -1,7 +1,9 @@ -import { Metadata, ServiceError } from "@grpc/grpc-js"; +import { Metadata, ServiceError, MetadataValue, status } from "@grpc/grpc-js"; import { FumaroleConfig } from "./config/config"; import { FumaroleClient as GrpcClient } from "./grpc/fumarole"; import { FumaroleGrpcConnector } from "./connectivity"; + +const X_TOKEN_HEADER = "x-token"; import { VersionRequest, VersionResponse, @@ -16,8 +18,13 @@ import { DeleteConsumerGroupResponse, CreateConsumerGroupRequest, CreateConsumerGroupResponse, + InitialOffsetPolicy, } from "./grpc/fumarole"; -import { SubscribeRequest, SubscribeUpdate } from "./grpc/geyser"; +import { + SubscribeRequest, + SubscribeUpdate, + CommitmentLevel, +} from "./grpc/geyser"; import type { DragonsmouthAdapterSession, FumaroleSubscribeConfig, @@ -30,6 +37,7 @@ import { DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, DEFAULT_GC_INTERVAL, DEFAULT_SLOT_MEMORY_RETENTION, + getDefaultFumaroleSubscribeConfig, } from "./types"; export class FumaroleClient { @@ -37,6 +45,12 @@ export class FumaroleClient { private readonly connector: FumaroleGrpcConnector; private readonly stub: GrpcClient; + private static safeStringify(obj: unknown): string { + return JSON.stringify(obj, (_, v) => + typeof v === "bigint" ? v.toString() : v + ); + } + constructor(connector: FumaroleGrpcConnector, stub: GrpcClient) { this.connector = connector; this.stub = stub; @@ -47,11 +61,14 @@ export class FumaroleClient { const connector = new FumaroleGrpcConnector(config, endpoint); FumaroleClient.logger.debug(`Connecting to ${endpoint}`); - FumaroleClient.logger.debug("Connection config:", { - endpoint: config.endpoint, - xToken: config.xToken ? "***" : "none", - maxDecodingMessageSizeBytes: config.maxDecodingMessageSizeBytes, - }); + FumaroleClient.logger.debug( + "Connection config:", + FumaroleClient.safeStringify({ + endpoint: config.endpoint, + xToken: config.xToken ? "***" : "none", + maxDecodingMessageSizeBytes: config.maxDecodingMessageSizeBytes, + }) + ); const client = await connector.connect(); FumaroleClient.logger.debug(`Connected to ${endpoint}, testing stub...`); @@ -61,7 +78,10 @@ export class FumaroleClient { const deadline = new Date().getTime() + 5000; // 5 second timeout client.waitForReady(deadline, (error) => { if (error) { - FumaroleClient.logger.error("Client failed to become ready:", error); + FumaroleClient.logger.error( + "Client failed to become ready:", + FumaroleClient.safeStringify(error) + ); reject(error); } else { FumaroleClient.logger.debug("Client is ready"); @@ -75,7 +95,10 @@ export class FumaroleClient { const methods = client ? Object.getOwnPropertyNames(Object.getPrototypeOf(client)) : []; - FumaroleClient.logger.error("Available methods:", methods); + FumaroleClient.logger.error( + "Available methods:", + FumaroleClient.safeStringify(methods) + ); throw new Error("gRPC client or listConsumerGroups method not available"); } @@ -89,129 +112,32 @@ export class FumaroleClient { return new Promise((resolve, reject) => { this.stub.version(request, (error, response) => { if (error) { - FumaroleClient.logger.error("Version request failed:", error); + FumaroleClient.logger.error( + "Version request failed:", + FumaroleClient.safeStringify(error) + ); reject(error); } else { - FumaroleClient.logger.debug("Version response:", response); + FumaroleClient.logger.debug( + "Version response:", + FumaroleClient.safeStringify(response) + ); resolve(response); } }); }); } - async dragonsmouthSubscribe( - consumerGroupName: string, - request: SubscribeRequest - ): Promise { - return this.dragonsmouthSubscribeWithConfig(consumerGroupName, request, {}); - } - - async dragonsmouthSubscribeWithConfig( - consumerGroupName: string, - request: SubscribeRequest, - config: FumaroleSubscribeConfig - ): Promise { - const finalConfig = { - concurrentDownloadLimit: DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, - commitInterval: DEFAULT_COMMIT_INTERVAL, - maxFailedSlotDownloadAttempt: DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, - dataChannelCapacity: DEFAULT_DRAGONSMOUTH_CAPACITY, - gcInterval: DEFAULT_GC_INTERVAL, - slotMemoryRetention: DEFAULT_SLOT_MEMORY_RETENTION, - ...config, - }; - - const dragonsmouthOutlet = new AsyncQueue( - finalConfig.dataChannelCapacity - ); - const fumeControlPlaneQ = new AsyncQueue(100); - - const initialJoin = { consumerGroupName } as JoinControlPlane; - const initialJoinCommand = { initialJoin } as ControlCommand; - await fumeControlPlaneQ.put(initialJoinCommand); - - FumaroleClient.logger.debug( - `Sent initial join command: ${JSON.stringify(initialJoinCommand)}` - ); - - const controlPlaneStream = this.stub.subscribe(); - const subscribeRequestQueue = new AsyncQueue(100); - const fumeControlPlaneRxQ = new AsyncQueue(100); - - // Start the control plane source task - const controlPlaneSourceTask = (async () => { - try { - for await (const update of controlPlaneStream) { - await fumeControlPlaneRxQ.put(update); - } - } catch (error: any) { - if (error.code !== "CANCELLED") { - throw error; - } - } - })(); - - // Read the initial response - const controlResponse = - (await fumeControlPlaneRxQ.get()) as ControlResponse; - const init = controlResponse.init; - if (!init) { - throw new Error( - `Unexpected initial response: ${JSON.stringify(controlResponse)}` - ); - } - - FumaroleClient.logger.debug( - `Control response: ${JSON.stringify(controlResponse)}` - ); - - const lastCommittedOffsetStr = init.lastCommittedOffsets?.[0]; - if (!lastCommittedOffsetStr) { - throw new Error("No last committed offset"); - } - const lastCommittedOffset = BigInt(lastCommittedOffsetStr); - - // Create the runtime - const dataPlaneClient = await this.connector.connect(); - - // Start the runtime task - const runtimeTask = this.startRuntime( - subscribeRequestQueue, - fumeControlPlaneQ, - fumeControlPlaneRxQ, - dragonsmouthOutlet, - request, - consumerGroupName, - lastCommittedOffset, - finalConfig, - dataPlaneClient - ); - - FumaroleClient.logger.debug(`Fumarole handle created: ${runtimeTask}`); - - return { - sink: subscribeRequestQueue, - source: dragonsmouthOutlet, - fumaroleHandle: runtimeTask, - }; - } - - private async startRuntime( - subscribeRequestQueue: AsyncQueue, - controlPlaneTxQ: AsyncQueue, - controlPlaneRxQ: AsyncQueue, - dragonsmouthOutlet: AsyncQueue, - request: SubscribeRequest, - consumerGroupName: string, - lastCommittedOffset: bigint, - config: Required, - dataPlaneClient: GrpcClient - ): Promise { - // Implementation of runtime task here - // This would be equivalent to AsyncioFumeDragonsmouthRuntime in Python - // For brevity, this is a placeholder implementation - return Promise.resolve(); - } + // async dragonsmouthSubscribe( + // consumerGroupName: string, + // request: SubscribeRequest + // ): Promise { + // return this.dragonsmouthSubscribeWithConfig( + // consumerGroupName, + // request, + // getDefaultFumaroleSubscribeConfig() + // ); + // } async listConsumerGroups(): Promise { if (!this.stub) { @@ -274,7 +200,7 @@ export class FumaroleClient { } else { FumaroleClient.logger.debug( "ListConsumerGroups success - Response:", - JSON.stringify(response, null, 2) + FumaroleClient.safeStringify(response) ); resolve(response); } @@ -392,7 +318,9 @@ export class FumaroleClient { const failures = results.filter((result) => !result.success); if (failures.length > 0) { throw new Error( - `Failed to delete some consumer groups: ${JSON.stringify(failures)}` + `Failed to delete some consumer groups: ${FumaroleClient.safeStringify( + failures + )}` ); } } @@ -426,6 +354,10 @@ export class FumaroleClient { export { FumaroleConfig, + InitialOffsetPolicy, + CommitmentLevel, + SubscribeRequest, + SubscribeUpdate, DEFAULT_DRAGONSMOUTH_CAPACITY, DEFAULT_COMMIT_INTERVAL, DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, diff --git a/typescript-sdk/src/runtime/aio.ts b/typescript-sdk/src/runtime/aio.ts deleted file mode 100644 index b1fb175..0000000 --- a/typescript-sdk/src/runtime/aio.ts +++ /dev/null @@ -1,608 +0,0 @@ -import { ChannelCredentials, ServiceError, status } from "@grpc/grpc-js"; -import { Queue as AsyncQueue } from "./queue"; -import { Interval } from "../utils/aio"; -import { - FumaroleSM, - FumeDownloadRequest, - FumeOffset, - FumeShardIdx, - CommitmentLevel, -} from "./state-machine"; -import { - SubscribeRequest, - SubscribeUpdate, - SubscribeUpdateSlot, - CommitmentLevel as ProtoCommitmentLevel, -} from "../grpc/geyser"; -import { - ControlCommand, - PollBlockchainHistory, - CommitOffset, - ControlResponse, - DownloadBlockShard, - BlockFilters, - FumaroleClient, -} from "../grpc/fumarole"; - -// Constants -export const DEFAULT_GC_INTERVAL = 5; -export const DEFAULT_SLOT_MEMORY_RETENTION = 10000; - -// Types and Interfaces -export interface CompletedDownloadBlockTask { - slot: number; - blockUid: Uint8Array; - shardIdx: FumeShardIdx; - totalEventDownloaded: number; -} - -export interface DownloadBlockError { - kind: - | "Disconnected" - | "OutletDisconnected" - | "BlockShardNotFound" - | "FailedDownload" - | "Fatal"; - message: string; -} - -export interface DownloadTaskResult { - kind: "Ok" | "Err"; - completed?: CompletedDownloadBlockTask; - slot?: number; - err?: DownloadBlockError; -} - -export interface AsyncSlotDownloader { - runDownload( - subscribeRequest: SubscribeRequest, - spec: DownloadTaskArgs - ): Promise; -} - -const LOGGER = console; - -export class AsyncioFumeDragonsmouthRuntime { - private readonly sm: FumaroleSM; - private readonly slotDownloader: AsyncSlotDownloader; - private subscribeRequestUpdateQ: AsyncQueue; - private subscribeRequest: SubscribeRequest; - private readonly consumerGroupName: string; - private readonly controlPlaneTx: AsyncQueue; - private readonly controlPlaneRx: AsyncQueue; - private readonly dragonsmouthOutlet: AsyncQueue; - private readonly commitInterval: number; - private readonly gcInterval: number; - private readonly maxConcurrentDownload: number; - private readonly downloadTasks: Map< - Promise, - FumeDownloadRequest - >; - private lastCommit: number; - - constructor( - sm: FumaroleSM, - slotDownloader: AsyncSlotDownloader, - subscribeRequestUpdateQ: AsyncQueue, - subscribeRequest: SubscribeRequest, - consumerGroupName: string, - controlPlaneTxQ: AsyncQueue, - controlPlaneRxQ: AsyncQueue, - dragonsmouthOutlet: AsyncQueue, - commitInterval: number, - gcInterval: number, - maxConcurrentDownload: number = 10 - ) { - this.sm = sm; - this.slotDownloader = slotDownloader; - this.subscribeRequestUpdateQ = subscribeRequestUpdateQ; - this.subscribeRequest = subscribeRequest; - this.consumerGroupName = consumerGroupName; - this.controlPlaneTx = controlPlaneTxQ; - this.controlPlaneRx = controlPlaneRxQ; - this.dragonsmouthOutlet = dragonsmouthOutlet; - this.commitInterval = commitInterval; - this.gcInterval = gcInterval; - this.maxConcurrentDownload = maxConcurrentDownload; - this.downloadTasks = new Map(); - this.lastCommit = Date.now(); - } - - private buildPollHistoryCmd(fromOffset?: FumeOffset): ControlCommand { - return { pollHist: { shardId: 0 } } as ControlCommand; - } - - private buildCommitOffsetCmd(offset: FumeOffset): ControlCommand { - return { commitOffset: { offset, shardId: 0 } } as ControlCommand; - } - - private handleControlResponse(controlResponse: ControlResponse): void { - // Get first defined property from controlResponse - const responseField = Object.keys(controlResponse).find( - (key): key is keyof ControlResponse => { - return ( - controlResponse[key as keyof ControlResponse] !== undefined && - key !== "response" - ); - } - ); - - if (!responseField) { - throw new Error("Control response is empty"); - } - - switch (responseField) { - case "pollHist": { - const pollHist = controlResponse.pollHist!; - LOGGER.debug(`Received poll history ${pollHist.events?.length} events`); - // Convert string slots to numbers and map commitment levels - const convertedEvents = (pollHist.events || []).map((event) => ({ - offset: event.offset, - slot: Number(event.slot), - parentSlot: event.parentSlot ? Number(event.parentSlot) : undefined, - commitmentLevel: event.commitmentLevel as unknown as CommitmentLevel, - deadError: event.deadError, - blockchainId: event.blockchainId, - blockUid: event.blockUid, - numShards: Number(event.numShards), - })); - this.sm.queueBlockchainEvent(convertedEvents); - break; - } - case "commitOffset": { - const commitOffset = controlResponse.commitOffset!; - LOGGER.debug(`Received commit offset: ${commitOffset}`); - this.sm.updateCommittedOffset(commitOffset.offset); - break; - } - case "pong": - LOGGER.debug("Received pong"); - break; - default: - throw new Error("Unexpected control response"); - } - } - - private async pollHistoryIfNeeded(): Promise { - if (this.sm.needNewBlockchainEvents()) { - const cmd = this.buildPollHistoryCmd(this.sm.committableOffset); - await this.controlPlaneTx.put(cmd); - } - } - - private commitmentLevel(): number { - return this.subscribeRequest.commitment || 0; - } - - private async scheduleDownloadTaskIfAny(): Promise { - while (true) { - LOGGER.debug("Checking for download tasks to schedule"); - if (this.downloadTasks.size >= this.maxConcurrentDownload) { - break; - } - - LOGGER.debug("Popping slot to download"); - const downloadRequest = await this.sm.popSlotToDownload( - this.commitmentLevel() - ); - if (!downloadRequest) { - LOGGER.debug("No download request available"); - break; - } - - LOGGER.debug(`Download request for slot ${downloadRequest.slot} popped`); - if (!downloadRequest.blockchainId) { - throw new Error("Download request must have a blockchain ID"); - } - - const downloadTaskArgs: DownloadTaskArgs = { - downloadRequest, - dragonsmouthOutlet: this.dragonsmouthOutlet, - }; - - const downloadPromise = this.slotDownloader.runDownload( - this.subscribeRequest, - downloadTaskArgs - ); - this.downloadTasks.set(downloadPromise, downloadRequest); - LOGGER.debug(`Scheduling download task for slot ${downloadRequest.slot}`); - } - } - - private handleDownloadResult(downloadResult: DownloadTaskResult): void { - if (downloadResult.kind === "Ok") { - const completed = downloadResult.completed!; - LOGGER.debug( - `Download completed for slot ${completed.slot}, shard ${completed.shardIdx}, ${completed.totalEventDownloaded} total events` - ); - this.sm.makeSlotDownloadProgress(completed.slot, completed.shardIdx); - } else { - const slot = downloadResult.slot!; - const err = downloadResult.err!; - throw new Error(`Failed to download slot ${slot}: ${err.message}`); - } - } - - private async forceCommitOffset(): Promise { - LOGGER.debug(`Force committing offset ${this.sm.committableOffset}`); - await this.controlPlaneTx.put( - this.buildCommitOffsetCmd(this.sm.committableOffset) - ); - } - - private async commitOffset(): Promise { - if (this.sm.lastCommittedOffset < this.sm.committableOffset) { - LOGGER.debug(`Committing offset ${this.sm.committableOffset}`); - await this.forceCommitOffset(); - } - this.lastCommit = Date.now(); - } - - private async drainSlotStatus(): Promise { - const commitment = this.subscribeRequest.commitment || 0; - const slotStatusVec: any[] = []; - - while (true) { - const slotStatus = this.sm.popNextSlotStatus(); - if (!slotStatus) break; - slotStatusVec.push(slotStatus); - } - - if (!slotStatusVec.length) return; - - LOGGER.debug(`Draining ${slotStatusVec.length} slot status`); - - for (const slotStatus of slotStatusVec) { - const matchedFilters: string[] = []; - for (const [filterName, filter] of Object.entries( - this.subscribeRequest.slots || {} - )) { - if ( - filter.filterByCommitment && - slotStatus.commitmentLevel === commitment - ) { - matchedFilters.push(filterName); - } else if (!filter.filterByCommitment) { - matchedFilters.push(filterName); - } - } - - if (matchedFilters.length) { - const update: SubscribeUpdate = { - filters: matchedFilters, - createdAt: undefined, - slot: { - slot: slotStatus.slot, - parent: slotStatus.parentSlot, - status: slotStatus.commitmentLevel, - deadError: slotStatus.deadError, - } as SubscribeUpdateSlot, - }; - - try { - await this.dragonsmouthOutlet.put(update); - } catch (error: any) { - if (error.message === "Queue full") return; - throw error; - } - } - - this.sm.markEventAsProcessed(slotStatus.sessionSequence); - } - } - - private async handleControlPlaneResp( - result: ControlResponse | Error - ): Promise { - if (result instanceof Error) { - // Create a slot update with the error information - const errorUpdate: SubscribeUpdate = { - filters: [], - createdAt: undefined, - slot: { - slot: "0", - parent: "0", - status: 0, // Using 0 as default status for error case - deadError: result.message, - }, - }; - await this.dragonsmouthOutlet.put(errorUpdate); - LOGGER.error(`Control plane error: ${result.message}`); - return false; - } - this.handleControlResponse(result); - return true; - } - - public handleNewSubscribeRequest(subscribeRequest: SubscribeRequest): void { - this.subscribeRequest = subscribeRequest; - } - - public async run(): Promise { - LOGGER.debug("Fumarole runtime starting..."); - await this.controlPlaneTx.put(this.buildPollHistoryCmd()); - LOGGER.debug("Initial poll history command sent"); - await this.forceCommitOffset(); - LOGGER.debug("Initial commit offset command sent"); - let ticks = 0; - - const taskMap = new Map, string>(); - - // Initial tasks - taskMap.set(this.subscribeRequestUpdateQ.get(), "dragonsmouth_bidi"); - taskMap.set(this.controlPlaneRx.get(), "control_plane_rx"); - taskMap.set(new Interval(this.commitInterval).tick(), "commit_tick"); - - while (taskMap.size > 0) { - ticks++; - LOGGER.debug("Runtime loop tick"); - - if (ticks % this.gcInterval === 0) { - LOGGER.debug("Running garbage collection"); - this.sm.gc(); - ticks = 0; - } - - LOGGER.debug("Polling history if needed"); - await this.pollHistoryIfNeeded(); - - LOGGER.debug("Scheduling download tasks if any"); - await this.scheduleDownloadTaskIfAny(); - - // Convert iterator to array to avoid --downlevelIteration requirement - const downloadTasks = Array.from(this.downloadTasks.keys()); - for (const task of downloadTasks) { - taskMap.set(task, "download_task"); - } - - const downloadTaskInFlight = this.downloadTasks.size; - LOGGER.debug( - `Current download tasks in flight: ${downloadTaskInFlight} / ${this.maxConcurrentDownload}` - ); - - const promises = Array.from(taskMap.keys()); - const done = await Promise.race( - promises.map((p) => p.then((result) => ({ promise: p, result }))) - ); - - const taskName = taskMap.get(done.promise); - taskMap.delete(done.promise); - - switch (taskName) { - case "dragonsmouth_bidi": { - LOGGER.debug("Dragonsmouth subscribe request received"); - const result = done.result as SubscribeRequest; - this.handleNewSubscribeRequest(result); - const newTask = this.subscribeRequestUpdateQ.get(); - taskMap.set(newTask, "dragonsmouth_bidi"); - break; - } - case "control_plane_rx": { - LOGGER.debug("Control plane response received"); - if (!(await this.handleControlPlaneResp(done.result))) { - LOGGER.debug("Control plane error"); - return; - } - const newTask = this.controlPlaneRx.get(); - taskMap.set(newTask, "control_plane_rx"); - break; - } - case "download_task": { - LOGGER.debug("Download task result received"); - this.downloadTasks.delete(done.promise); - this.handleDownloadResult(done.result); - break; - } - case "commit_tick": { - LOGGER.debug("Commit tick reached"); - await this.commitOffset(); - const newTask = new Interval(this.commitInterval).tick(); - taskMap.set(newTask, "commit_tick"); - break; - } - default: - throw new Error(`Unexpected task name: ${taskName}`); - } - - await this.drainSlotStatus(); - } - - LOGGER.debug("Fumarole runtime exiting"); - } -} - -export interface DownloadTaskRunnerChannels { - downloadTaskQueueTx: AsyncQueue; - cncTx: AsyncQueue; - downloadResultRx: AsyncQueue; -} - -export interface DownloadTaskRunnerCommand { - kind: string; - subscribeRequest?: SubscribeRequest; -} - -export interface DownloadTaskArgs { - downloadRequest: FumeDownloadRequest; - dragonsmouthOutlet: AsyncQueue; -} - -export class GrpcSlotDownloader implements AsyncSlotDownloader { - private client: FumaroleClient; - - constructor(client: FumaroleClient) { - this.client = client; - } - - public async runDownload( - subscribeRequest: SubscribeRequest, - spec: DownloadTaskArgs - ): Promise { - const downloadTask = new GrpcDownloadBlockTaskRun( - spec.downloadRequest, - this.client, - { - accounts: subscribeRequest.accounts, - transactions: subscribeRequest.transactions, - entries: subscribeRequest.entry, - blocksMeta: subscribeRequest.blocksMeta, - } as BlockFilters, - spec.dragonsmouthOutlet - ); - - LOGGER.debug(`Running download task for slot ${spec.downloadRequest.slot}`); - return await downloadTask.run(); - } -} - -export class GrpcDownloadBlockTaskRun { - private downloadRequest: FumeDownloadRequest; - private client: FumaroleClient; - private filters: BlockFilters; - private dragonsmouthOutlet: AsyncQueue; - - constructor( - downloadRequest: FumeDownloadRequest, - client: FumaroleClient, - filters: BlockFilters, - dragonsmouthOutlet: AsyncQueue - ) { - this.downloadRequest = downloadRequest; - this.client = client; - this.filters = filters; - this.dragonsmouthOutlet = dragonsmouthOutlet; - } - - private mapTonicErrorCodeToDownloadBlockError( - error: ServiceError - ): DownloadBlockError { - switch (error.code) { - case status.NOT_FOUND: - return { - kind: "BlockShardNotFound", - message: "Block shard not found", - }; - case status.UNAVAILABLE: - return { - kind: "Disconnected", - message: "Disconnected", - }; - case status.INTERNAL: - case status.ABORTED: - case status.DATA_LOSS: - case status.RESOURCE_EXHAUSTED: - case status.UNKNOWN: - case status.CANCELLED: - case status.DEADLINE_EXCEEDED: - return { - kind: "FailedDownload", - message: "Failed download", - }; - case status.INVALID_ARGUMENT: - throw new Error("Invalid argument"); - default: - return { - kind: "Fatal", - message: `Unknown error: ${error.code}`, - }; - } - } - - public async run(): Promise { - const request = { - blockchainId: this.downloadRequest.blockchainId, - blockUid: this.downloadRequest.blockUid, - shardIdx: 0, - blockFilters: this.filters, - } as DownloadBlockShard; - - try { - LOGGER.debug( - `Requesting download for block ${Buffer.from( - this.downloadRequest.blockUid - ).toString("hex")} at slot ${this.downloadRequest.slot}` - ); - - let totalEventDownloaded = 0; - const stream = this.client.downloadBlock(request); - - return new Promise((resolve, reject) => { - stream.on("data", async (data: any) => { - const kind = Object.keys(data).find( - (k) => data[k] !== undefined && k !== "response" - ); - if (!kind) return; - - switch (kind) { - case "update": { - const update = data.update; - if (!update) throw new Error("Update is null"); - totalEventDownloaded++; - try { - await this.dragonsmouthOutlet.put(update); - } catch (error: any) { - if (error.message === "Queue shutdown") { - LOGGER.error("Dragonsmouth outlet is disconnected"); - resolve({ - kind: "Err", - slot: this.downloadRequest.slot, - err: { - kind: "OutletDisconnected", - message: "Outlet disconnected", - }, - }); - } - } - break; - } - case "blockShardDownloadFinish": - LOGGER.debug( - `Download finished for block ${Buffer.from( - this.downloadRequest.blockUid - ).toString("hex")} at slot ${this.downloadRequest.slot}` - ); - resolve({ - kind: "Ok", - completed: { - slot: this.downloadRequest.slot, - blockUid: this.downloadRequest.blockUid, - shardIdx: 0, - totalEventDownloaded, - }, - }); - break; - default: - reject(new Error(`Unexpected response kind: ${kind}`)); - } - }); - - stream.on("error", (error: ServiceError) => { - LOGGER.error(`Download block error: ${error}`); - resolve({ - kind: "Err", - slot: this.downloadRequest.slot, - err: this.mapTonicErrorCodeToDownloadBlockError(error), - }); - }); - - stream.on("end", () => { - resolve({ - kind: "Err", - slot: this.downloadRequest.slot, - err: { - kind: "FailedDownload", - message: "Failed download", - }, - }); - }); - }); - } catch (error) { - LOGGER.error(`Download block error: ${error}`); - return { - kind: "Err", - slot: this.downloadRequest.slot, - err: this.mapTonicErrorCodeToDownloadBlockError(error as ServiceError), - }; - } - } -} diff --git a/typescript-sdk/src/runtime/queue.ts b/typescript-sdk/src/runtime/queue.ts deleted file mode 100644 index 1d7cc8f..0000000 --- a/typescript-sdk/src/runtime/queue.ts +++ /dev/null @@ -1,70 +0,0 @@ -export class Queue { - private items: T[] = []; - private maxSize: number; - private closed: boolean = false; - - constructor(maxSize: number = Infinity) { - this.maxSize = maxSize; - } - - async put(item: T): Promise { - if (this.closed) { - throw new Error("Queue shutdown"); - } - - if (this.items.length >= this.maxSize) { - throw new Error("Queue full"); - } - - this.items.push(item); - } - - async get(): Promise { - if (this.closed && this.items.length === 0) { - throw new Error("Queue shutdown"); - } - - // Wait for an item to be available - while (this.items.length === 0) { - await new Promise((resolve) => setTimeout(resolve, 10)); - } - - return this.items.shift()!; - } - - isEmpty(): boolean { - return this.items.length === 0; - } - - isFull(): boolean { - return this.items.length >= this.maxSize; - } - - size(): number { - return this.items.length; - } - - close(): void { - this.closed = true; - } - - [Symbol.asyncIterator](): AsyncIterator { - return { - next: async (): Promise> => { - if (this.closed && this.isEmpty()) { - return { done: true, value: undefined }; - } - - try { - const value = await this.get(); - return { done: false, value }; - } catch (error: any) { - if (error.message === "Queue shutdown") { - return { done: true, value: undefined }; - } - throw error; - } - }, - }; - } -} diff --git a/typescript-sdk/src/types.ts b/typescript-sdk/src/types.ts deleted file mode 100644 index f46d09a..0000000 --- a/typescript-sdk/src/types.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { SubscribeRequest, SubscribeUpdate } from "./grpc/geyser"; - -// Constants -export const DEFAULT_DRAGONSMOUTH_CAPACITY = 10000; -export const DEFAULT_COMMIT_INTERVAL = 5.0; // seconds -export const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = 3; -export const DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = 10; -export const DEFAULT_GC_INTERVAL = 60; // seconds -export const DEFAULT_SLOT_MEMORY_RETENTION = 300; // seconds - -export interface FumaroleSubscribeConfig { - // The maximum number of concurrent download tasks per TCP connection. - concurrentDownloadLimit?: number; - - // The interval at which to commit the slot memory. - commitInterval?: number; - - // The maximum number of failed slot download attempts before giving up. - maxFailedSlotDownloadAttempt?: number; - - // The maximum number of slots to download concurrently. - dataChannelCapacity?: number; - - // The interval at which to perform garbage collection on the slot memory. - gcInterval?: number; - - // The retention period for slot memory in seconds. - slotMemoryRetention?: number; -} - -export interface DragonsmouthAdapterSession { - // The queue for sending SubscribeRequest update to the dragonsmouth stream - sink: AsyncQueue; - - // The queue for receiving SubscribeUpdate from the dragonsmouth stream - source: AsyncQueue; - - // The handle for the fumarole runtime - fumaroleHandle: Promise; -} - -// Generic async queue interface to mimic Python's asyncio.Queue -export class AsyncQueue { - private queue: T[] = []; - private maxSize: number; - private resolvers: ((value: T) => void)[] = []; - private full_resolvers: (() => void)[] = []; - private closed = false; - - constructor(maxSize = 0) { - this.maxSize = maxSize; - } - - async put(item: T): Promise { - if (this.closed) { - throw new Error("Queue is closed"); - } - - if (this.maxSize > 0 && this.queue.length >= this.maxSize) { - return new Promise((resolve) => { - this.full_resolvers.push(resolve); - }); - } - - this.queue.push(item); - const resolver = this.resolvers.shift(); - if (resolver) { - resolver(this.queue.shift()!); - } - } - - async get(): Promise { - if (this.closed && this.queue.length === 0) { - throw new Error("Queue is closed"); - } - - if (this.queue.length === 0) { - return new Promise((resolve) => { - this.resolvers.push(resolve); - }); - } - - const item = this.queue.shift()!; - const full_resolver = this.full_resolvers.shift(); - if (full_resolver) { - full_resolver(); - } - return item; - } - - close(): void { - this.closed = true; - // Resolve all pending gets with an error - this.resolvers.forEach((resolve) => { - resolve(undefined as any); - }); - this.resolvers = []; - } -} diff --git a/typescript-sdk/src/types/index.ts b/typescript-sdk/src/types/index.ts new file mode 100644 index 0000000..e541033 --- /dev/null +++ b/typescript-sdk/src/types/index.ts @@ -0,0 +1,166 @@ +import { SubscribeRequest, SubscribeUpdate } from "../grpc/geyser"; +import { ControlCommand, ControlResponse } from "../grpc/fumarole"; + +export interface FumaroleSubscribeConfig { + concurrentDownloadLimit: number; + commitInterval: number; + maxFailedSlotDownloadAttempt: number; + dataChannelCapacity: number; + gcInterval: number; + slotMemoryRetention: number; +} + +export function getDefaultFumaroleSubscribeConfig(): FumaroleSubscribeConfig { + return { + concurrentDownloadLimit: DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP, + commitInterval: DEFAULT_COMMIT_INTERVAL, + maxFailedSlotDownloadAttempt: DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, + dataChannelCapacity: DEFAULT_DRAGONSMOUTH_CAPACITY, + gcInterval: DEFAULT_GC_INTERVAL, + slotMemoryRetention: DEFAULT_SLOT_MEMORY_RETENTION + }; +} + +export class AsyncQueue { + private items: T[] = []; + private maxSize: number; + private closed: boolean = false; + + constructor(maxSize: number = Infinity) { + this.maxSize = maxSize; + } + + async put(item: T): Promise { + if (this.closed) { + throw new Error("Queue shutdown"); + } + + if (this.items.length >= this.maxSize) { + throw new Error("Queue full"); + } + + this.items.push(item); + } + + async get(): Promise { + if (this.closed && this.items.length === 0) { + throw new Error("Queue shutdown"); + } + + // Wait for an item to be available + while (this.items.length === 0) { + await new Promise((resolve) => setTimeout(resolve, 10)); + } + + return this.items.shift()!; + } + + isEmpty(): boolean { + return this.items.length === 0; + } + + isFull(): boolean { + return this.items.length >= this.maxSize; + } + + size(): number { + return this.items.length; + } + + close(): void { + this.closed = true; + } + + [Symbol.asyncIterator](): AsyncIterator { + return { + next: async (): Promise> => { + if (this.closed && this.isEmpty()) { + return { done: true, value: undefined }; + } + + try { + const value = await this.get(); + return { done: false, value }; + } catch (error: any) { + if (error.message === "Queue shutdown") { + return { done: true, value: undefined }; + } + throw error; + } + }, + }; + } +} + + +// export class AsyncQueue { +// private queue: T[] = []; +// private waitingResolvers: ((value: T) => void)[] = []; +// private closed = false; + +// constructor(private maxSize: number) {} + +// async put(item: T): Promise { +// if (this.closed) { +// throw new Error("Queue is closed"); +// } +// if (this.waitingResolvers.length > 0) { +// const resolve = this.waitingResolvers.shift()!; +// resolve(item); +// } else { +// if (this.queue.length >= this.maxSize) { +// await new Promise((resolve) => { +// this.waitingResolvers.push(() => resolve()); +// }); +// } +// this.queue.push(item); +// } +// } + +// async get(): Promise { +// if (this.queue.length > 0) { +// const item = this.queue.shift()!; +// if (this.waitingResolvers.length > 0) { +// const resolve = this.waitingResolvers.shift()!; +// resolve(undefined as any); +// } +// return item; +// } + +// if (this.closed) { +// throw new Error("Queue shutdown"); +// } + +// return new Promise((resolve) => { +// this.waitingResolvers.push(resolve); +// }); +// } + +// close() { +// this.closed = true; +// // Resolve all waiting getters with error +// while (this.waitingResolvers.length > 0) { +// const resolve = this.waitingResolvers.shift()!; +// resolve(undefined as any); +// } +// } +// } + +export interface DragonsmouthAdapterSession { + /** Queue for sending subscribe requests */ + sink: AsyncQueue; + /** Queue for receiving subscription updates */ + source: AsyncQueue; + /** Handle for tracking the fumarole runtime */ + fumaroleHandle: Promise; + /** Method to close and clean up the session */ + close: () => Promise; +} + +// Constants +export const DEFAULT_DRAGONSMOUTH_CAPACITY = 10000; +export const DEFAULT_COMMIT_INTERVAL = 5.0; // seconds +export const DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT = 3; +export const DEFAULT_CONCURRENT_DOWNLOAD_LIMIT_PER_TCP = 10; +export const DEFAULT_GC_INTERVAL = 60; // seconds +export const DEFAULT_SLOT_MEMORY_RETENTION = 300; // seconds diff --git a/typescript-sdk/src/utils/aio.ts b/typescript-sdk/src/utils/aio.ts deleted file mode 100644 index 1bd9f5b..0000000 --- a/typescript-sdk/src/utils/aio.ts +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Asynchronous utilities for TypeScript - */ - -/** - * Create a forever pending promise. This promise is not resolved and will never be resolved. - * This is useful for testing purposes. - * @returns A promise that never resolves - */ -export async function never(): Promise { - return new Promise(() => { - // This promise intentionally never resolves - }); -} - -/** - * A class that represents an interval that can be used to run async operations periodically - */ -export class Interval { - private readonly interval: number; - - /** - * Create an interval that will run every `interval` seconds. - * @param interval The interval in seconds - */ - constructor(interval: number) { - this.interval = interval; - } - - /** - * Wait for the interval duration - * @returns A promise that resolves after the interval duration - */ - async tick(): Promise { - // Convert seconds to milliseconds for setTimeout - return new Promise((resolve) => setTimeout(resolve, this.interval * 1000)); - } -} - -/** - * Type for any function that returns a Promise - */ -export type AsyncFunction = () => Promise; - -/** - * Helper functions and utilities for logging - */ -export const logger = { - debug: (...args: any[]) => console.debug("[DEBUG]", ...args), - info: (...args: any[]) => console.info("[INFO]", ...args), - warn: (...args: any[]) => console.warn("[WARN]", ...args), - error: (...args: any[]) => console.error("[ERROR]", ...args), -}; diff --git a/typescript-sdk/tsconfig.json b/typescript-sdk/tsconfig.json index 489f30c..5c9a4b7 100644 --- a/typescript-sdk/tsconfig.json +++ b/typescript-sdk/tsconfig.json @@ -13,7 +13,10 @@ "resolveJsonModule": true, "isolatedModules": true, "outDir": "./dist/esm", - "rootDir": "./src" + "rootDir": "./src", + "composite": true, + "incremental": true, + "typeRoots": ["./node_modules/@types", "./src/types"] }, "include": ["src/**/*"], "exclude": ["node_modules", "dist"] From ce8ac87dd95610ff7d9ea67aa5a24b476466f498 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Wed, 20 Aug 2025 07:31:24 +0000 Subject: [PATCH 50/56] refactor: remove examples from sdk src Signed-off-by: GitHub --- typescript-sdk/examples/README.md | 0 typescript-sdk/examples/list-consumer-groups.ts | 0 typescript-sdk/package.json | 3 ++- typescript-sdk/pnpm-lock.yaml | 13 +++++++++++-- 4 files changed, 13 insertions(+), 3 deletions(-) delete mode 100644 typescript-sdk/examples/README.md delete mode 100644 typescript-sdk/examples/list-consumer-groups.ts diff --git a/typescript-sdk/examples/README.md b/typescript-sdk/examples/README.md deleted file mode 100644 index e69de29..0000000 diff --git a/typescript-sdk/examples/list-consumer-groups.ts b/typescript-sdk/examples/list-consumer-groups.ts deleted file mode 100644 index e69de29..0000000 diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index 2c76d32..d7f8567 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -44,6 +44,7 @@ "@bufbuild/protobuf": "^2.6.3", "@grpc/grpc-js": "^1.13.4", "@types/js-yaml": "^4.0.9", - "js-yaml": "^4.1.0" + "js-yaml": "^4.1.0", + "rxjs": "^7.8.2" } } \ No newline at end of file diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml index 95e0c97..0fd9dd3 100644 --- a/typescript-sdk/pnpm-lock.yaml +++ b/typescript-sdk/pnpm-lock.yaml @@ -20,6 +20,9 @@ importers: js-yaml: specifier: ^4.1.0 version: 4.1.0 + rxjs: + specifier: ^7.8.2 + version: 7.8.2 devDependencies: '@types/jest': specifier: ^30.0.0 @@ -1220,6 +1223,9 @@ packages: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} + rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} + semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -2875,6 +2881,10 @@ snapshots: resolve-from@5.0.0: {} + rxjs@7.8.2: + dependencies: + tslib: 2.8.1 + semver@6.3.1: {} semver@7.7.2: {} @@ -2994,8 +3004,7 @@ snapshots: ts-poet: 6.12.0 ts-proto-descriptors: 2.0.0 - tslib@2.8.1: - optional: true + tslib@2.8.1: {} type-detect@4.0.8: {} From 64880c69414ee6cf8350ebd1f78f212cbb5a47c7 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Thu, 21 Aug 2025 11:21:36 +0000 Subject: [PATCH 51/56] refactor: implement GrpcSlotDownloader with unit tests Signed-off-by: GitHub --- typescript-sdk/jest.config.js | 4 +- typescript-sdk/package.json | 1 + typescript-sdk/pnpm-lock.yaml | 3 + typescript-sdk/src/runtime/async-queue.ts | 70 ++++++ .../src/runtime/grpc-slot-downloader.ts | 238 ++++++++++++++++++ .../src/tests/grpc-slot-downloader.test.ts | 198 +++++++++++++++ typescript-sdk/src/types/index.ts | 73 +----- 7 files changed, 514 insertions(+), 73 deletions(-) create mode 100644 typescript-sdk/src/runtime/async-queue.ts create mode 100644 typescript-sdk/src/runtime/grpc-slot-downloader.ts create mode 100644 typescript-sdk/src/tests/grpc-slot-downloader.test.ts diff --git a/typescript-sdk/jest.config.js b/typescript-sdk/jest.config.js index e5f1d35..b086764 100644 --- a/typescript-sdk/jest.config.js +++ b/typescript-sdk/jest.config.js @@ -1,12 +1,14 @@ /** @type {import('ts-jest').JestConfigWithTsJest} */ module.exports = { - preset: "ts-jest", + preset: "ts-jest/presets/default-esm", testEnvironment: "node", testMatch: ["**/src/tests/**/*.test.ts"], + extensionsToTreatAsEsm: [".ts"], transform: { "^.+\\.tsx?$": [ "ts-jest", { + useESM: true, // <-- important for ESM builds tsconfig: "tsconfig.json", }, ], diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index d7f8567..923e9fa 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -33,6 +33,7 @@ ], "homepage": "https://triton.one", "devDependencies": { + "@jest/globals": "^30.0.5", "@types/jest": "^30.0.0", "@types/node": "^22.17.1", "jest": "^30.0.5", diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml index 0fd9dd3..07c04a1 100644 --- a/typescript-sdk/pnpm-lock.yaml +++ b/typescript-sdk/pnpm-lock.yaml @@ -24,6 +24,9 @@ importers: specifier: ^7.8.2 version: 7.8.2 devDependencies: + '@jest/globals': + specifier: ^30.0.5 + version: 30.0.5 '@types/jest': specifier: ^30.0.0 version: 30.0.0 diff --git a/typescript-sdk/src/runtime/async-queue.ts b/typescript-sdk/src/runtime/async-queue.ts new file mode 100644 index 0000000..bc59eb0 --- /dev/null +++ b/typescript-sdk/src/runtime/async-queue.ts @@ -0,0 +1,70 @@ +export class AsyncQueue { + private items: T[] = []; + private maxSize: number; + private closed: boolean = false; + + constructor(maxSize: number = Infinity) { + this.maxSize = maxSize; + } + + async put(item: T): Promise { + if (this.closed) { + throw new Error("Queue shutdown"); + } + + if (this.items.length >= this.maxSize) { + throw new Error("Queue full"); + } + + this.items.push(item); + } + + async get(): Promise { + if (this.closed && this.items.length === 0) { + throw new Error("Queue shutdown"); + } + + // Wait for an item to be available + while (this.items.length === 0) { + await new Promise((resolve) => setTimeout(resolve, 10)); + } + + return this.items.shift()!; + } + + isEmpty(): boolean { + return this.items.length === 0; + } + + isFull(): boolean { + return this.items.length >= this.maxSize; + } + + size(): number { + return this.items.length; + } + + close(): void { + this.closed = true; + } + + [Symbol.asyncIterator](): AsyncIterator { + return { + next: async (): Promise> => { + if (this.closed && this.isEmpty()) { + return { done: true, value: undefined }; + } + + try { + const value = await this.get(); + return { done: false, value }; + } catch (error: any) { + if (error.message === "Queue shutdown") { + return { done: true, value: undefined }; + } + throw error; + } + }, + }; + } +} diff --git a/typescript-sdk/src/runtime/grpc-slot-downloader.ts b/typescript-sdk/src/runtime/grpc-slot-downloader.ts new file mode 100644 index 0000000..37affcb --- /dev/null +++ b/typescript-sdk/src/runtime/grpc-slot-downloader.ts @@ -0,0 +1,238 @@ +import { ClientReadableStream, ServiceError, status } from "@grpc/grpc-js"; +import { + BlockFilters, + DataResponse, + DownloadBlockShard, + FumaroleClient, +} from "../grpc/fumarole"; +import { SubscribeRequest, SubscribeUpdate } from "../grpc/geyser"; +import { AsyncQueue } from "./async-queue"; +import { FumeDownloadRequest, FumeShardIdx } from "./state-machine"; + +// Constants +const DEFAULT_GC_INTERVAL = 5; + +const DEFAULT_SLOT_MEMORY_RETENTION = 10000; + +export class CompletedDownloadBlockTask { + constructor( + public slot: bigint, + public blockUid: Uint8Array, + public shardIdx: FumeShardIdx, + public totalEventDownloaded: number + ) {} +} + +export type DownloadBlockErrorKind = + | "Disconnected" + | "OutletDisconnected" + | "BlockShardNotFound" + | "FailedDownload" + | "Fatal"; + +export class DownloadBlockError { + constructor(public kind: DownloadBlockErrorKind, public message: string) {} +} + +export type DownloadTaskResultKind = "Ok" | "Err"; + +export class DownloadTaskResult { + constructor( + public kind: DownloadTaskResultKind, + public completed?: CompletedDownloadBlockTask, + public slot?: bigint, + public err?: DownloadBlockError + ) {} +} + +export class DownloadTaskArgs { + constructor( + public downloadRequest: FumeDownloadRequest, + // TODO: figure out a type for this + public dragonsmouthOutlet: AsyncQueue + ) {} +} + +export abstract class AsyncSlotDownloader { + abstract runDownload( + subscribeRequest: SubscribeRequest, + spec: DownloadTaskArgs + ): Promise; +} + +export class GrpcDownloadBlockTaskRun { + public downloadRequest: FumeDownloadRequest; + public client: FumaroleClient; + public filters?: BlockFilters | null; + public dragonsmouthOutlet: AsyncQueue; + + constructor( + downloadRequest: FumeDownloadRequest, + client: FumaroleClient, + filters: BlockFilters | null, + dragonsmouthOutlet: AsyncQueue + ) { + this.downloadRequest = downloadRequest; + this.client = client; + this.filters = filters; + this.dragonsmouthOutlet = dragonsmouthOutlet; + } + + public mapTonicErrorCodeToDownloadBlockError( + e: ServiceError + ): DownloadBlockError { + const code = e.code; + + if (code === status.NOT_FOUND) { + return new DownloadBlockError( + "BlockShardNotFound", + "Block shard not found" + ); + } else if (code === status.UNAVAILABLE) { + return new DownloadBlockError("Disconnected", "Disconnected"); + } else if ( + code === status.INTERNAL || + code === status.ABORTED || + code === status.DATA_LOSS || + code === status.RESOURCE_EXHAUSTED || + code === status.UNKNOWN || + code === status.CANCELLED || + code === status.DEADLINE_EXCEEDED + ) { + return new DownloadBlockError("FailedDownload", "Failed download"); + } else if (code === status.INVALID_ARGUMENT) { + throw new Error("Invalid argument"); + } else { + return new DownloadBlockError("Fatal", `Unknown error: ${code}`); + } + } + + public async run(): Promise { + const downloadRequest: DownloadBlockShard = { + blockchainId: this.downloadRequest.blockchainId, + blockUid: this.downloadRequest.blockUid, + shardIdx: 0, + blockFilters: this.filters === null ? undefined : this.filters, + }; + + let downloadResponse: ClientReadableStream; + + try { + console.log( + `Requesting download for block ${this.downloadRequest.blockUid.toString()} at slot ${ + this.downloadRequest.slot + }` + ); + + downloadResponse = this.client.downloadBlock(downloadRequest); + } catch (e: any) { + console.log(`Download block error ${e}`); + return { + kind: "Err", + slot: this.downloadRequest.slot, + err: this.mapTonicErrorCodeToDownloadBlockError(e), + }; + } + + let totalEventDownloaded = 0; + + // Wrap the event-driven API in a Promise + return new Promise((resolve, reject) => { + downloadResponse.on("data", async (data: DataResponse) => { + try { + if (data.update) { + // === case: update === + const update = data.update; + totalEventDownloaded += 1; + + try { + await this.dragonsmouthOutlet.put(update); + } catch (err: any) { + // TODO: figure out a type for this + console.error("Dragonsmouth outlet is disconnected"); + resolve({ + kind: "Err", + slot: this.downloadRequest.slot, + err: { + kind: "OutletDisconnected", + message: "Outlet disconnected", + }, + }); + } + } else if (data.blockShardDownloadFinish) { + // === case: block_shard_download_finish === + console.debug( + `Download finished for block ${this.downloadRequest.blockUid.toString()} at slot ${ + this.downloadRequest.slot + }` + ); + resolve({ + kind: "Ok", + completed: { + slot: this.downloadRequest.slot, + blockUid: this.downloadRequest.blockUid, + shardIdx: 0, + totalEventDownloaded, + }, + }); + } else { + // === unexpected === + reject( + new Error(`Unexpected DataResponse: ${JSON.stringify(data)}`) + ); + } + } catch (err) { + reject(err); + } + }); + + // TODO: figure out a type for this + downloadResponse.on("error", (err: any) => { + console.error("stream error", err); + reject(this.mapTonicErrorCodeToDownloadBlockError(err)); + }); + + downloadResponse.on("end", () => { + console.log("stream ended without blockShardDownloadFinish"); + resolve({ + kind: "Err", + slot: this.downloadRequest.slot, + err: { + kind: "FailedDownload", + message: "Stream ended unexpectedly", + }, + }); + }); + }); + } +} + +export class GrpcSlotDownloader extends AsyncSlotDownloader { + public fumaroleClient: FumaroleClient; + + constructor(fumaroleClient: FumaroleClient) { + super(); + this.fumaroleClient = fumaroleClient; + } + + public async runDownload( + subscribeRequest: SubscribeRequest, + spec: DownloadTaskArgs + ): Promise { + const downloadTask = new GrpcDownloadBlockTaskRun( + spec.downloadRequest, + this.fumaroleClient, + { + accounts: subscribeRequest.accounts, + transactions: subscribeRequest.transactions, + entries: subscribeRequest.entry, + blocksMeta: subscribeRequest.blocksMeta, + }, + spec.dragonsmouthOutlet + ); + + console.log(`Running download task for slot ${spec.downloadRequest.slot}`); + + return await downloadTask.run(); + } +} diff --git a/typescript-sdk/src/tests/grpc-slot-downloader.test.ts b/typescript-sdk/src/tests/grpc-slot-downloader.test.ts new file mode 100644 index 0000000..fdea884 --- /dev/null +++ b/typescript-sdk/src/tests/grpc-slot-downloader.test.ts @@ -0,0 +1,198 @@ +import { jest } from "@jest/globals"; + + +import { EventEmitter } from "events"; +import { status, ServiceError } from "@grpc/grpc-js"; +import { + GrpcDownloadBlockTaskRun, + DownloadBlockError, + DownloadTaskResult, + GrpcSlotDownloader, + DownloadTaskArgs, +} from "../runtime/grpc-slot-downloader"; +import { FumaroleClient } from "../grpc/fumarole"; +import { AsyncQueue } from "../runtime/async-queue"; +import { CommitmentLevel, SubscribeRequest, SubscribeUpdate } from "../grpc/geyser"; + +// === helpers === +class MockReadableStream extends EventEmitter { + cancel = jest.fn(); +} + +// mock AsyncQueue that can simulate disconnection +class MockAsyncQueue extends AsyncQueue { + public put = jest.fn(async (_: T) => {}); +} + +describe("GrpcDownloadBlockTaskRun", () => { + let mockClient: jest.Mocked; + let mockQueue: MockAsyncQueue; + let downloadRequest: any; + + beforeEach(() => { + mockClient = { + downloadBlock: jest.fn(), + } as any; + + mockQueue = new MockAsyncQueue(); + + downloadRequest = { + blockchainId: 0, + blockUid: new Uint8Array([1, 2, 3]), + slot: BigInt(42), + }; + }); + + test("mapTonicErrorCodeToDownloadBlockError returns correct mapping", () => { + const task = new GrpcDownloadBlockTaskRun( + downloadRequest, + mockClient, + null, + mockQueue + ); + + const codes: [number, string][] = [ + [status.NOT_FOUND, "BlockShardNotFound"], + [status.UNAVAILABLE, "Disconnected"], + [status.INTERNAL, "FailedDownload"], + [status.INVALID_ARGUMENT, "throw"], + [12345, "Fatal"], + ]; + + for (const [code, expected] of codes) { + const err = { code } as ServiceError; + if (expected === "throw") { + expect(() => task.mapTonicErrorCodeToDownloadBlockError(err)).toThrow( + "Invalid argument" + ); + } else { + const mapped = task.mapTonicErrorCodeToDownloadBlockError(err); + expect(mapped.kind).toBe(expected); + } + } + }); + + test("run resolves with Ok when blockShardDownloadFinish is received", async () => { + const stream = new MockReadableStream(); + mockClient.downloadBlock.mockReturnValue(stream as any); + + const task = new GrpcDownloadBlockTaskRun( + downloadRequest, + mockClient, + null, + mockQueue + ); + + const promise = task.run(); + + // emit update first + stream.emit("data", { update: { foo: "bar" } }); + // emit finish + stream.emit("data", { blockShardDownloadFinish: true }); + + const result = await promise; + expect(result.kind).toBe("Ok"); + expect(result.completed?.slot).toBe(BigInt(42)); + expect(mockQueue.put).toHaveBeenCalled(); + }); + + test("run resolves with Err when outlet is disconnected", async () => { + const stream = new MockReadableStream(); + mockClient.downloadBlock.mockReturnValue(stream as any); + + // simulate queue.put throwing + mockQueue.put.mockRejectedValueOnce(new Error("disconnected")); + + const task = new GrpcDownloadBlockTaskRun( + downloadRequest, + mockClient, + null, + mockQueue + ); + + const promise = task.run(); + + stream.emit("data", { update: { foo: "bar" } }); + + const result = await promise; + expect(result.kind).toBe("Err"); + expect(result.err?.kind).toBe("OutletDisconnected"); + }); + + test("run rejects when stream emits error", async () => { + const stream = new MockReadableStream(); + mockClient.downloadBlock.mockReturnValue(stream as any); + + const task = new GrpcDownloadBlockTaskRun( + downloadRequest, + mockClient, + null, + mockQueue + ); + + const promise = task.run(); + + const err = { code: status.UNAVAILABLE } as ServiceError; + stream.emit("error", err); + + await expect(promise).rejects.toBeInstanceOf(DownloadBlockError); + }); + + test("run resolves Err when stream ends without finish", async () => { + const stream = new MockReadableStream(); + mockClient.downloadBlock.mockReturnValue(stream as any); + + const task = new GrpcDownloadBlockTaskRun( + downloadRequest, + mockClient, + null, + mockQueue + ); + + const promise = task.run(); + + stream.emit("end"); + + const result = await promise; + expect(result.kind).toBe("Err"); + expect(result.err?.kind).toBe("FailedDownload"); + }); +}); + +describe("GrpcSlotDownloader", () => { + test("runDownload delegates to GrpcDownloadBlockTaskRun", async () => { + const mockClient = { + downloadBlock: jest.fn(() => new MockReadableStream()), + } as any as FumaroleClient; + + const downloader = new GrpcSlotDownloader(mockClient); + + const spec = new DownloadTaskArgs( + { + blockchainId: new Uint8Array([0,0,0]), + blockUid: new Uint8Array([1, 2, 3]), + slot: BigInt(42), + numShards: 1, + commitmentLevel: CommitmentLevel.CONFIRMED + }, + new MockAsyncQueue() + ); + + const subscribeRequest: any = { + accounts: {}, + transactions: {}, + entry: {}, + blocksMeta: {}, + }; + + // stub GrpcDownloadBlockTaskRun.run + const runSpy = jest + .spyOn(GrpcDownloadBlockTaskRun.prototype, "run") + .mockResolvedValue({ kind: "Ok" } as DownloadTaskResult); + + const result = await downloader.runDownload(subscribeRequest, spec); + + expect(runSpy).toHaveBeenCalled(); + expect(result.kind).toBe("Ok"); + }); +}); diff --git a/typescript-sdk/src/types/index.ts b/typescript-sdk/src/types/index.ts index e541033..d95d7f2 100644 --- a/typescript-sdk/src/types/index.ts +++ b/typescript-sdk/src/types/index.ts @@ -1,5 +1,6 @@ import { SubscribeRequest, SubscribeUpdate } from "../grpc/geyser"; import { ControlCommand, ControlResponse } from "../grpc/fumarole"; +import { AsyncQueue } from "../runtime/async-queue"; export interface FumaroleSubscribeConfig { concurrentDownloadLimit: number; @@ -21,78 +22,6 @@ export function getDefaultFumaroleSubscribeConfig(): FumaroleSubscribeConfig { }; } -export class AsyncQueue { - private items: T[] = []; - private maxSize: number; - private closed: boolean = false; - - constructor(maxSize: number = Infinity) { - this.maxSize = maxSize; - } - - async put(item: T): Promise { - if (this.closed) { - throw new Error("Queue shutdown"); - } - - if (this.items.length >= this.maxSize) { - throw new Error("Queue full"); - } - - this.items.push(item); - } - - async get(): Promise { - if (this.closed && this.items.length === 0) { - throw new Error("Queue shutdown"); - } - - // Wait for an item to be available - while (this.items.length === 0) { - await new Promise((resolve) => setTimeout(resolve, 10)); - } - - return this.items.shift()!; - } - - isEmpty(): boolean { - return this.items.length === 0; - } - - isFull(): boolean { - return this.items.length >= this.maxSize; - } - - size(): number { - return this.items.length; - } - - close(): void { - this.closed = true; - } - - [Symbol.asyncIterator](): AsyncIterator { - return { - next: async (): Promise> => { - if (this.closed && this.isEmpty()) { - return { done: true, value: undefined }; - } - - try { - const value = await this.get(); - return { done: false, value }; - } catch (error: any) { - if (error.message === "Queue shutdown") { - return { done: true, value: undefined }; - } - throw error; - } - }, - }; - } -} - - // export class AsyncQueue { // private queue: T[] = []; // private waitingResolvers: ((value: T) => void)[] = []; From 6a86ff74d3e6858cab825b1041ec16415a15ddb2 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Thu, 21 Aug 2025 18:09:53 +0000 Subject: [PATCH 52/56] refactor: implement runtime, waitFirstCompleted promises Signed-off-by: GitHub --- .../src/runtime/grpc-slot-downloader.ts | 29 +- typescript-sdk/src/runtime/runtime.ts | 391 ++++++++++++++++++ .../src/runtime/wait-first-completed.ts | 57 +++ typescript-sdk/src/utils/interval.ts | 11 + 4 files changed, 485 insertions(+), 3 deletions(-) create mode 100644 typescript-sdk/src/runtime/runtime.ts create mode 100644 typescript-sdk/src/runtime/wait-first-completed.ts create mode 100644 typescript-sdk/src/utils/interval.ts diff --git a/typescript-sdk/src/runtime/grpc-slot-downloader.ts b/typescript-sdk/src/runtime/grpc-slot-downloader.ts index 37affcb..ab2b66f 100644 --- a/typescript-sdk/src/runtime/grpc-slot-downloader.ts +++ b/typescript-sdk/src/runtime/grpc-slot-downloader.ts @@ -49,7 +49,7 @@ export class DownloadTaskArgs { constructor( public downloadRequest: FumeDownloadRequest, // TODO: figure out a type for this - public dragonsmouthOutlet: AsyncQueue + public dragonsmouthOutlet: AsyncQueue ) {} } @@ -64,13 +64,13 @@ export class GrpcDownloadBlockTaskRun { public downloadRequest: FumeDownloadRequest; public client: FumaroleClient; public filters?: BlockFilters | null; - public dragonsmouthOutlet: AsyncQueue; + public dragonsmouthOutlet: AsyncQueue; constructor( downloadRequest: FumeDownloadRequest, client: FumaroleClient, filters: BlockFilters | null, - dragonsmouthOutlet: AsyncQueue + dragonsmouthOutlet: AsyncQueue ) { this.downloadRequest = downloadRequest; this.client = client; @@ -236,3 +236,26 @@ export class GrpcSlotDownloader extends AsyncSlotDownloader { return await downloadTask.run(); } } + +export class DownloadTaskRunnerCommand { + kind: string; + subscribeRequest?: SubscribeRequest; + + private constructor(kind: string, subscribeRequest?: SubscribeRequest) { + this.kind = kind; + this.subscribeRequest = subscribeRequest; + } + + static UpdateSubscribeRequest(subscribeRequest: SubscribeRequest): DownloadTaskRunnerCommand { + return new DownloadTaskRunnerCommand("UpdateSubscribeRequest", subscribeRequest); + } +} + +export class DownloadTaskRunnerChannels { + constructor( + public downloadTaskQueueTx: AsyncQueue<{}>, + public cncTx: AsyncQueue<{}>, + public downloadResultRx: AsyncQueue<{}> + ) {} +} + diff --git a/typescript-sdk/src/runtime/runtime.ts b/typescript-sdk/src/runtime/runtime.ts new file mode 100644 index 0000000..bb89391 --- /dev/null +++ b/typescript-sdk/src/runtime/runtime.ts @@ -0,0 +1,391 @@ +import { + ControlCommand, + ControlResponse, +} from "../grpc/fumarole"; +import { + CommitmentLevel, + SlotStatus, + SubscribeRequest, + SubscribeUpdate, +} from "../grpc/geyser"; +import { Interval } from "../utils/interval"; +import { AsyncQueue } from "./async-queue"; +import { + AsyncSlotDownloader, + DownloadTaskArgs, + DownloadTaskResult, +} from "./grpc-slot-downloader"; +import { + FumaroleSM, + FumeDownloadRequest, + FumeOffset, + FumeSlotStatus, +} from "./state-machine"; +import { waitFirstCompleted } from "./wait-first-completed"; + +type TaskName = + | "dragonsmouth_bidi" + | "control_plane_rx" + | "download_task" + | "commit_tick"; + +export class FumeDragonsmouthRuntime { + public stateMachine: FumaroleSM; + public slotDownloader: AsyncSlotDownloader; + public subscribeRequestUpdateQueue: AsyncQueue<{}>; + public subscribeRequest: SubscribeRequest; + public consumerGroupName: string; + public controlPlaneTransmitQueue: AsyncQueue<{}>; + public controlPlaneReceiveQueue: AsyncQueue<{}>; + public dragonsmouthOutlet: AsyncQueue; + public commitInterval: number; // in seconds + public gcInterval: number; + public maxConcurrentDownload: number; + public downloadTasks: Map, FumeDownloadRequest>; + public innerRuntimeChannel: AsyncQueue<{}>; + public lastCommit: number; + + constructor( + stateMachine: FumaroleSM, + slotDownloader: AsyncSlotDownloader, + subscribeRequestUpdateQueue: AsyncQueue<{}>, + subscribeRequest: SubscribeRequest, + consumerGroupName: string, + controlPlaneTransmitQueue: AsyncQueue<{}>, + controlPlaneReceiveQueue: AsyncQueue<{}>, + dragonsmouthOutlet: AsyncQueue, + commitInterval: number, + gcInterval: number, + maxConcurrentDownload: number = 10 + ) { + this.stateMachine = stateMachine; + this.slotDownloader = slotDownloader; + this.subscribeRequestUpdateQueue = subscribeRequestUpdateQueue; + this.subscribeRequest = subscribeRequest; + this.consumerGroupName = consumerGroupName; + this.controlPlaneTransmitQueue = controlPlaneTransmitQueue; + this.controlPlaneReceiveQueue = controlPlaneReceiveQueue; + this.dragonsmouthOutlet = dragonsmouthOutlet; + this.commitInterval = commitInterval; + this.gcInterval = gcInterval; + this.maxConcurrentDownload = maxConcurrentDownload; + this.downloadTasks = new Map(); + this.innerRuntimeChannel = new AsyncQueue<{}>(); + this.lastCommit = Date.now() / 1000; // seconds since epoch; to match python syntax + } + + private buildPollHistoryCmd(fromOffset?: FumeOffset): ControlCommand { + // Build a command to poll the blockchain history + return { + pollHist: { + shardId: 0, + limit: undefined, + }, + }; + } + + private buildCommitOffsetCmd(offset: FumeOffset): ControlCommand { + return { + commitOffset: { + offset, + shardId: 0, + }, + }; + } + + private handleControlResponse(controlResponse: ControlResponse): void { + // Determine which oneof field is set + if (controlResponse.pollHist) { + const pollHist = controlResponse.pollHist; + console.debug(`Received poll history ${pollHist.events.length} events`); + this.stateMachine.queueBlockchainEvent(pollHist.events); + } else if (controlResponse.commitOffset) { + const commitOffset = controlResponse.commitOffset; + console.debug(`Received commit offset: ${commitOffset}`); + this.stateMachine.updateCommittedOffset(commitOffset.offset); + } else if (controlResponse.pong) { + console.debug("Received pong"); + } else { + throw new Error("Unexpected control response"); + } + } + + public get commitmentLevel(): CommitmentLevel | undefined { + return this.subscribeRequest.commitment; + } + + public async pollHistoryIfNeeded(): Promise { + // Poll the history if the state machine needs new events. + if (this.stateMachine.needNewBlockchainEvents()) { + const cmd = this.buildPollHistoryCmd( + this.stateMachine.committable_offset + ); + await this.controlPlaneTransmitQueue.put(cmd); + } + } + + private scheduleDownloadTaskIfAny(): void { + while (true) { + console.debug("Checking for download tasks to schedule"); + + if (this.downloadTasks.size >= this.maxConcurrentDownload) { + break; + } + + console.debug("Popping slot to download"); + const downloadRequest = this.stateMachine.popSlotToDownload( + this.commitmentLevel + ); + if (!downloadRequest) { + console.debug("No download request available"); + break; + } + + console.debug(`Download request for slot ${downloadRequest.slot} popped`); + if (!downloadRequest.blockchainId) { + throw new Error("Download request must have a blockchain ID"); + } + + const downloadTaskArgs: DownloadTaskArgs = { + downloadRequest, + dragonsmouthOutlet: this.dragonsmouthOutlet, + }; + + // In TS, calling async fn returns a Promise (like create_task) + const downloadTask = this.slotDownloader.runDownload( + this.subscribeRequest, + downloadTaskArgs + ); + + // Track the promise alongside the request + this.downloadTasks.set(downloadTask, downloadRequest); + + console.debug( + `Scheduling download task for slot ${downloadRequest.slot}` + ); + } + } + + private handleDownloadResult(downloadResult: DownloadTaskResult): void { + /** Handles the result of a download task. */ + if (downloadResult.kind === "Ok") { + const completed = downloadResult.completed!; + console.debug( + `Download completed for slot ${completed.slot}, shard ${completed.shardIdx}, ${completed.totalEventDownloaded} total events` + ); + + this.stateMachine.makeSlotDownloadProgress( + completed.slot, + completed.shardIdx + ); + } else { + const slot = downloadResult.slot; + const err = downloadResult.err; + throw new Error(`Failed to download slot ${slot}: ${err!.message}`); + } + } + + private async forceCommitOffset(): Promise { + console.debug( + `Force committing offset ${this.stateMachine.committable_offset}` + ); + + await this.controlPlaneTransmitQueue.put( + this.buildCommitOffsetCmd(this.stateMachine.committable_offset) + ); + } + + private async commitOffset(): Promise { + if ( + this.stateMachine.last_committed_offset < + this.stateMachine.committable_offset + ) { + console.debug( + `Committing offset ${this.stateMachine.committable_offset}` + ); + await this.forceCommitOffset(); + } + this.lastCommit = Date.now() / 1000; // seconds since epoch; to match python syntax + } + + private async drainSlotStatus(): Promise { + const commitment = this.subscribeRequest.commitment; + const slotStatusVec: FumeSlotStatus[] = []; + + let slotStatus: FumeSlotStatus | undefined; + while ((slotStatus = this.stateMachine.popNextSlotStatus())) { + slotStatusVec.push(slotStatus); + } + + if (slotStatusVec.length === 0) { + return; + } + + console.debug(`Draining ${slotStatusVec.length} slot status`); + + for (const slotStatus of slotStatusVec) { + const matchedFilters: string[] = []; + + for (const [filterName, filter] of Object.entries( + this.subscribeRequest.slots + )) { + if ( + filter.filterByCommitment && + slotStatus.commitmentLevel === commitment + ) { + matchedFilters.push(filterName); + } else if (!filter.filterByCommitment) { + matchedFilters.push(filterName); + } + } + + if (matchedFilters.length > 0) { + const update: SubscribeUpdate = { + filters: matchedFilters, + createdAt: undefined, + slot: { + slot: slotStatus.slot, + parent: slotStatus.parentSlot, + status: slotStatus.commitmentLevel as number as SlotStatus, + deadError: slotStatus.deadError, + }, + }; + + try { + await this.dragonsmouthOutlet.put(update); + } catch (err) { + // TODO make proper error types + if (err === "Queue full") { + return; + } + throw err; + } + } + + this.stateMachine.markEventAsProcessed(slotStatus.sessionSequence); + } + } + + private async handleControlPlaneResp( + result: ControlResponse | Error + ): Promise { + if (result instanceof Error) { + await this.dragonsmouthOutlet.put(result); + return false; + } + + this.handleControlResponse(result); + return true; + } + + public handleNewSubscribeRequest(subscribeRequest: SubscribeRequest) { + this.subscribeRequest = subscribeRequest; + } + + public async run() { + console.log("Fumarole runtime starting..."); + + await this.controlPlaneTransmitQueue.put( + this.buildPollHistoryCmd(undefined) + ); + console.log("Initial poll history command sent"); + + await this.forceCommitOffset(); + console.log("Initial commit offset command sent"); + + let ticks = 0; + let pending = new Set>(); + let taskMap = new Map, TaskName>(); + + // Initial tasks + const task1 = this.subscribeRequestUpdateQueue.get(); + taskMap.set(task1, "dragonsmouth_bidi"); + pending.add(task1); + + const task2 = this.controlPlaneReceiveQueue.get(); + taskMap.set(task2, "control_plane_rx"); + pending.add(task2); + + const task3 = new Interval(this.commitInterval).tick(); + taskMap.set(task3, "commit_tick"); + pending.add(task3); + + while (pending.size > 0) { + ticks += 1; + console.log("Runtime loop tick"); + + if (ticks % this.gcInterval === 0) { + console.log("Running garbage collection"); + this.stateMachine.gc(); + ticks = 0; + } + + console.log("Polling history if needed"); + await this.pollHistoryIfNeeded(); + + console.log("Scheduling download tasks if any"); + this.scheduleDownloadTaskIfAny(); + for (const [t] of this.downloadTasks.entries()) { + pending.add(t); + taskMap.set(t, "download_task"); + } + + const downloadTaskInflight = this.downloadTasks.size; + console.log( + `Current download tasks in flight: ${downloadTaskInflight} / ${this.maxConcurrentDownload}` + ); + + // Wait for at least one task to finish + const { done, pending: newPending } = await waitFirstCompleted(pending); + pending = newPending; + + for (const t of done) { + const result = await t; + const name = taskMap.get(t)!; + taskMap.delete(t); + + switch (name) { + case "dragonsmouth_bidi": + console.log("Dragonsmouth subscribe request received"); + this.handleNewSubscribeRequest(result); + const newTask1 = this.subscribeRequestUpdateQueue.get(); + taskMap.set(newTask1, "dragonsmouth_bidi"); + pending.add(newTask1); + break; + + case "control_plane_rx": + console.log("Control plane response received"); + if (!(await this.handleControlPlaneResp(result))) { + console.log("Control plane error"); + return; + } + const newTask2 = this.controlPlaneReceiveQueue.get(); + taskMap.set(newTask2, "control_plane_rx"); + pending.add(newTask2); + break; + + case "download_task": + console.log("Download task result received"); + this.downloadTasks.delete(t); + this.handleDownloadResult(result); + break; + + case "commit_tick": + console.log("Commit tick reached"); + await this.commitOffset(); + const newTask3 = new Interval(this.commitInterval).tick(); + taskMap.set(newTask3, "commit_tick"); + pending.add(newTask3); + break; + + default: + throw new Error(`Unexpected task name: ${name}`); + } + } + + await this.drainSlotStatus(); + } + + console.log("Fumarole runtime exiting"); + } +} diff --git a/typescript-sdk/src/runtime/wait-first-completed.ts b/typescript-sdk/src/runtime/wait-first-completed.ts new file mode 100644 index 0000000..0712e2b --- /dev/null +++ b/typescript-sdk/src/runtime/wait-first-completed.ts @@ -0,0 +1,57 @@ +//// Custom TypeScript implementation for Python's `asyncio.wait` + +type WaitResult = { + done: Set>; + pending: Set>; +}; + +export async function waitFirstCompleted( + promises: Set> +): Promise> { + if (promises.size === 0) { + return { done: new Set(), pending: new Set() }; + } + + // Map original promises to tracking wrappers + const wrapped = new Map< + Promise, + Promise<{ promise: Promise; status: "fulfilled" | "rejected"; value?: T; reason?: unknown }> + >(); + + for (const p of promises) { + wrapped.set( + p, + p.then( + value => ({ promise: p, status: "fulfilled", value }), + reason => ({ promise: p, status: "rejected", reason }) + ) + ); + } + + // Wait for the first one to settle + let first; + try { + first = await Promise.race(wrapped.values()); + } catch { + // This branch should not happen since we handle rejection inside wrapper + throw new Error("Unexpected race rejection"); + } + + // Collect all results, but do not cancel still-pending promises + const results = await Promise.allSettled(wrapped.values()); + + const done = new Set>(); + const stillPending = new Set(promises); + + for (const r of results) { + if (r.status === "fulfilled") { + const { promise } = r.value; + if (promise === first.promise) { + done.add(promise); + stillPending.delete(promise); + } + } + } + + return { done, pending: stillPending }; +} diff --git a/typescript-sdk/src/utils/interval.ts b/typescript-sdk/src/utils/interval.ts new file mode 100644 index 0000000..cec69d8 --- /dev/null +++ b/typescript-sdk/src/utils/interval.ts @@ -0,0 +1,11 @@ +export class Interval { + private ms: number; + + constructor(ms: number) { + this.ms = ms; + } + + async tick(): Promise { + return new Promise((resolve) => setTimeout(resolve, this.ms)); + } +} From a93151d81daccb748528ddc4056bd8a0d43cbf4d Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Mon, 25 Aug 2025 15:21:13 +0000 Subject: [PATCH 53/56] refactor: rewrite state machine Signed-off-by: GitHub --- typescript-sdk/src/runtime/async-queue.ts | 8 + .../src/runtime/grpc-slot-downloader.ts | 2 +- typescript-sdk/src/runtime/runtime.ts | 38 +- typescript-sdk/src/runtime/state-machine.ts | 454 +++++++++--------- .../src/tests/state-machine.test.ts | 22 +- 5 files changed, 275 insertions(+), 249 deletions(-) diff --git a/typescript-sdk/src/runtime/async-queue.ts b/typescript-sdk/src/runtime/async-queue.ts index bc59eb0..b40afc8 100644 --- a/typescript-sdk/src/runtime/async-queue.ts +++ b/typescript-sdk/src/runtime/async-queue.ts @@ -32,6 +32,14 @@ export class AsyncQueue { return this.items.shift()!; } + /** Non-blocking shift. Returns undefined if queue is empty. */ + shift(): T | undefined { + if (this.closed && this.items.length === 0) { + return undefined; + } + return this.items.shift(); + } + isEmpty(): boolean { return this.items.length === 0; } diff --git a/typescript-sdk/src/runtime/grpc-slot-downloader.ts b/typescript-sdk/src/runtime/grpc-slot-downloader.ts index ab2b66f..5bd48b6 100644 --- a/typescript-sdk/src/runtime/grpc-slot-downloader.ts +++ b/typescript-sdk/src/runtime/grpc-slot-downloader.ts @@ -161,7 +161,7 @@ export class GrpcDownloadBlockTaskRun { } } else if (data.blockShardDownloadFinish) { // === case: block_shard_download_finish === - console.debug( + console.log( `Download finished for block ${this.downloadRequest.blockUid.toString()} at slot ${ this.downloadRequest.slot }` diff --git a/typescript-sdk/src/runtime/runtime.ts b/typescript-sdk/src/runtime/runtime.ts index bb89391..cd9fcb3 100644 --- a/typescript-sdk/src/runtime/runtime.ts +++ b/typescript-sdk/src/runtime/runtime.ts @@ -97,14 +97,14 @@ export class FumeDragonsmouthRuntime { // Determine which oneof field is set if (controlResponse.pollHist) { const pollHist = controlResponse.pollHist; - console.debug(`Received poll history ${pollHist.events.length} events`); + console.log(`Received poll history ${pollHist.events.length} events`); this.stateMachine.queueBlockchainEvent(pollHist.events); } else if (controlResponse.commitOffset) { const commitOffset = controlResponse.commitOffset; - console.debug(`Received commit offset: ${commitOffset}`); + console.log(`Received commit offset: ${commitOffset}`); this.stateMachine.updateCommittedOffset(commitOffset.offset); } else if (controlResponse.pong) { - console.debug("Received pong"); + console.log("Received pong"); } else { throw new Error("Unexpected control response"); } @@ -118,7 +118,7 @@ export class FumeDragonsmouthRuntime { // Poll the history if the state machine needs new events. if (this.stateMachine.needNewBlockchainEvents()) { const cmd = this.buildPollHistoryCmd( - this.stateMachine.committable_offset + this.stateMachine.committableOffset ); await this.controlPlaneTransmitQueue.put(cmd); } @@ -126,22 +126,22 @@ export class FumeDragonsmouthRuntime { private scheduleDownloadTaskIfAny(): void { while (true) { - console.debug("Checking for download tasks to schedule"); + console.log("Checking for download tasks to schedule"); if (this.downloadTasks.size >= this.maxConcurrentDownload) { break; } - console.debug("Popping slot to download"); + console.log("Popping slot to download"); const downloadRequest = this.stateMachine.popSlotToDownload( this.commitmentLevel ); if (!downloadRequest) { - console.debug("No download request available"); + console.log("No download request available"); break; } - console.debug(`Download request for slot ${downloadRequest.slot} popped`); + console.log(`Download request for slot ${downloadRequest.slot} popped`); if (!downloadRequest.blockchainId) { throw new Error("Download request must have a blockchain ID"); } @@ -160,7 +160,7 @@ export class FumeDragonsmouthRuntime { // Track the promise alongside the request this.downloadTasks.set(downloadTask, downloadRequest); - console.debug( + console.log( `Scheduling download task for slot ${downloadRequest.slot}` ); } @@ -170,7 +170,7 @@ export class FumeDragonsmouthRuntime { /** Handles the result of a download task. */ if (downloadResult.kind === "Ok") { const completed = downloadResult.completed!; - console.debug( + console.log( `Download completed for slot ${completed.slot}, shard ${completed.shardIdx}, ${completed.totalEventDownloaded} total events` ); @@ -186,22 +186,22 @@ export class FumeDragonsmouthRuntime { } private async forceCommitOffset(): Promise { - console.debug( - `Force committing offset ${this.stateMachine.committable_offset}` + console.log( + `Force committing offset ${this.stateMachine.committableOffset}` ); await this.controlPlaneTransmitQueue.put( - this.buildCommitOffsetCmd(this.stateMachine.committable_offset) + this.buildCommitOffsetCmd(this.stateMachine.committableOffset) ); } private async commitOffset(): Promise { if ( - this.stateMachine.last_committed_offset < - this.stateMachine.committable_offset + this.stateMachine.lastCommittedOffset < + this.stateMachine.committableOffset ) { - console.debug( - `Committing offset ${this.stateMachine.committable_offset}` + console.log( + `Committing offset ${this.stateMachine.committableOffset}` ); await this.forceCommitOffset(); } @@ -212,7 +212,7 @@ export class FumeDragonsmouthRuntime { const commitment = this.subscribeRequest.commitment; const slotStatusVec: FumeSlotStatus[] = []; - let slotStatus: FumeSlotStatus | undefined; + let slotStatus: FumeSlotStatus | null; while ((slotStatus = this.stateMachine.popNextSlotStatus())) { slotStatusVec.push(slotStatus); } @@ -221,7 +221,7 @@ export class FumeDragonsmouthRuntime { return; } - console.debug(`Draining ${slotStatusVec.length} slot status`); + console.log(`Draining ${slotStatusVec.length} slot status`); for (const slotStatus of slotStatusVec) { const matchedFilters: string[] = []; diff --git a/typescript-sdk/src/runtime/state-machine.ts b/typescript-sdk/src/runtime/state-machine.ts index 6167b1b..b6e34ee 100644 --- a/typescript-sdk/src/runtime/state-machine.ts +++ b/typescript-sdk/src/runtime/state-machine.ts @@ -1,22 +1,26 @@ +// import { OrderedSet } from "@js-sdsl/ordered-set"; +import { OrderedSet } from "@js-sdsl/ordered-set"; import { BlockchainEvent } from "../grpc/fumarole"; import { CommitmentLevel } from "../grpc/geyser"; +import { AsyncQueue } from "./async-queue"; import { BinaryHeap } from "./binary-heap"; +import { Deque } from "@datastructures-js/deque"; -class Queue { - private items: T[] = []; - - push(item: T): void { - this.items.push(item); - } +// class Queue { +// private items: T[] = []; - shift(): T | undefined { - return this.items.shift(); - } +// push(item: T): void { +// this.items.push(item); +// } - get length(): number { - return this.items.length; - } -} +// shift(): T | undefined { +// return this.items.shift(); +// } + +// get length(): number { +// return this.items.length; +// } +// } // Constants export const DEFAULT_SLOT_MEMORY_RETENTION = 10000; @@ -27,18 +31,18 @@ export type FumeBlockchainId = Uint8Array; // Equivalent to [u8; 16] export type FumeBlockUID = Uint8Array; // Equivalent to [u8; 16] export type FumeNumShards = number; // Equivalent to u32 export type FumeShardIdx = number; // Equivalent to u32 -export type FumeOffset = bigint; // Equivalent to i64 as string for large numbers +export type FumeOffset = bigint; // Equivalent to i64 export type FumeSessionSequence = bigint; // Equivalent to u64 export type Slot = bigint; // From solana_sdk::clock::Slot // Data structures export class FumeDownloadRequest { constructor( - public readonly slot: Slot, - public readonly blockchainId: FumeBlockchainId, - public readonly blockUid: FumeBlockUID, - public readonly numShards: FumeNumShards, - public readonly commitmentLevel: CommitmentLevel + public slot: Slot, + public blockchainId: FumeBlockchainId, + public blockUid: FumeBlockUID, + public numShards: FumeNumShards, + public commitmentLevel: CommitmentLevel ) {} } @@ -54,27 +58,29 @@ export class FumeSlotStatus { } export class SlotCommitmentProgression { - private processedCommitmentLevels = new Set(); + processedCommitmentLevels = new Set(); - public hasProcessedCommitment(level: CommitmentLevel): boolean { - return this.processedCommitmentLevels.has(level); - } + // public hasProcessedCommitment(level: CommitmentLevel): boolean { + // return this.processedCommitmentLevels.has(level); + // } - public addProcessedCommitment(level: CommitmentLevel): void { - this.processedCommitmentLevels.add(level); - } + // public addProcessedCommitment(level: CommitmentLevel): void { + // this.processedCommitmentLevels.add(level); + // } } export class SlotDownloadProgress { + private numShards: number; private shardRemaining: boolean[]; - constructor(public readonly numShards: FumeNumShards) { + constructor(numShards: number) { + this.numShards = numShards; this.shardRemaining = new Array(numShards).fill(false); } - public doProgress(shardIdx: FumeShardIdx): SlotDownloadState { + doProgress(shardIdx: number): SlotDownloadState { this.shardRemaining[shardIdx % this.numShards] = true; - return this.shardRemaining.every((x) => x) + return this.shardRemaining.every(Boolean) ? SlotDownloadState.Done : SlotDownloadState.Downloading; } @@ -85,298 +91,310 @@ export enum SlotDownloadState { Done = "Done", } -export class FumaroleSM { - private slot_commitment_progression = new Map< - Slot, - SlotCommitmentProgression - >(); - private downloaded_slot = new Set(); - private inflight_slot_shard_download = new Map(); - private blocked_slot_status_update = new Map>(); - private slot_status_update_queue = new Queue(); - private processed_offset = new BinaryHeap<[FumeSessionSequence, FumeOffset]>( - ( - a: [FumeSessionSequence, FumeOffset], - b: [FumeSessionSequence, FumeOffset] - ) => { - // Implementing Reverse ordering as in Rust - if (a[0] === b[0]) return 0; - return a[0] > b[0] ? 1 : -1; - } - ); - private unprocessed_blockchain_event = new Queue< - [FumeSessionSequence, BlockchainEvent] - >(); - private sequence = 1n; - private sequence_to_offset = new Map(); - public max_slot_detected: Slot = 0n; - private last_processed_fume_sequence = 0n; - public committable_offset: FumeOffset; +export function orderedSetContains(set: OrderedSet, value: T): boolean { + return !set.find(value).equals(set.end()); +} - constructor( - public last_committed_offset: FumeOffset, - private slot_memory_retention: number = DEFAULT_SLOT_MEMORY_RETENTION - ) { - this.committable_offset = last_committed_offset; +export class FumaroleSM { + lastCommittedOffset: FumeOffset; + slotCommitmentProgression: Map; + downloadedSlotSet: OrderedSet; + inflightSlotShardDownload: Map; + blockedSlotStatusUpdate: Map>; + slotStatusUpdateQueue: Deque; + + // processedOffset: [bigint, FumeOffset][]; // min-heap equivalent + processedOffset: BinaryHeap<[FumeSessionSequence, FumeOffset]>; + + committableOffset: FumeOffset; + maxSlotDetected: bigint; + unprocessedBlockchainEvent: Deque<[FumeSessionSequence, BlockchainEvent]>; + sequence: bigint; + lastProcessedFumeSequence: bigint; + sequenceToOffset: Map; + slotMemoryRetention: number; + + constructor(lastCommittedOffset: FumeOffset, slotMemoryRetention: number) { + this.lastCommittedOffset = lastCommittedOffset; + this.slotCommitmentProgression = new Map(); + this.downloadedSlotSet = new OrderedSet(); + this.inflightSlotShardDownload = new Map(); + this.blockedSlotStatusUpdate = new Map(); + this.slotStatusUpdateQueue = new Deque(); + this.processedOffset = new BinaryHeap<[FumeSessionSequence, FumeOffset]>( + (a, b) => (a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0) // min-heap by sequence number + ); + this.committableOffset = lastCommittedOffset; + this.maxSlotDetected = 0n; + this.unprocessedBlockchainEvent = new Deque(); + this.sequence = 1n; + this.lastProcessedFumeSequence = 0n; + this.sequenceToOffset = new Map(); + this.slotMemoryRetention = slotMemoryRetention; } - public updateCommittedOffset(offset: FumeOffset): void { - if (offset < this.last_committed_offset) { - throw new Error( - "offset must be greater than or equal to last committed offset" - ); + updateCommittedOffset(offset: FumeOffset): void { + if (offset >= this.lastCommittedOffset) { + throw new Error("Offset must be >= last committed offset"); } - this.last_committed_offset = offset; + this.lastCommittedOffset = offset; } - private nextSequence(): FumeSessionSequence { + nextSequence(): bigint { const ret = this.sequence; - this.sequence = this.sequence + 1n; + this.sequence += 1n; return ret; } - public gc(): void { - while (this.downloaded_slot.size > this.slot_memory_retention) { - const firstSlot = Array.from(this.downloaded_slot)[0]; - if (!firstSlot) break; + gc(): void { + // Garbage collect old slots to respect memory retention limit. + while (this.downloadedSlotSet.size() > this.slotMemoryRetention) { + // mimic pythons downloaded_slot.popfirst() + const ds = this.downloadedSlotSet.getElementByPos(0); + this.downloadedSlotSet.eraseElementByPos(0); - this.downloaded_slot.delete(firstSlot); - this.slot_commitment_progression.delete(firstSlot); - this.inflight_slot_shard_download.delete(firstSlot); - this.blocked_slot_status_update.delete(firstSlot); + const slot = ds ?? null; + if (slot === null) { + break; + } + this.slotCommitmentProgression.delete(slot); + this.inflightSlotShardDownload.delete(slot); + this.blockedSlotStatusUpdate.delete(slot); } } - public queueBlockchainEvent(events: BlockchainEvent[]): void { - for (const event of events) { - if (event.offset < this.last_committed_offset) { + queueBlockchainEvent(events: BlockchainEvent[]): void { + // Queue blockchain events for processing. + for (const blockchainEvent of events) { + if (blockchainEvent.offset < this.lastCommittedOffset) { continue; } - if (event.slot > this.max_slot_detected) { - this.max_slot_detected = event.slot; + if (blockchainEvent.slot > this.maxSlotDetected) { + this.maxSlotDetected = blockchainEvent.slot; } const sequence = this.nextSequence(); - this.sequence_to_offset.set(sequence, event.offset); - - if (this.downloaded_slot.has(event.slot)) { - const fumeStatus = new FumeSlotStatus( - sequence, - event.offset, - event.slot, - event.parentSlot, - event.commitmentLevel as CommitmentLevel, - event.deadError - ); - - if (this.inflight_slot_shard_download.has(event.slot)) { - // This event is blocked by a slot download currently in progress - let queue = this.blocked_slot_status_update.get(event.slot); - if (!queue) { - queue = new Queue(); - this.blocked_slot_status_update.set(event.slot, queue); - } - queue.push(fumeStatus); + this.sequenceToOffset.set(sequence, blockchainEvent.offset); + + if (orderedSetContains(this.downloadedSlotSet, blockchainEvent.slot)) { + const fumeStatus: FumeSlotStatus = { + sessionSequence: sequence, + offset: blockchainEvent.offset, + slot: blockchainEvent.slot, + parentSlot: blockchainEvent.parentSlot, + commitmentLevel: blockchainEvent.commitmentLevel, + deadError: blockchainEvent.deadError, + }; + + if (this.inflightSlotShardDownload.has(blockchainEvent.slot)) { + this.blockedSlotStatusUpdate + .get(blockchainEvent.slot) + ?.pushBack(fumeStatus); } else { - // Fast track this event - this.slot_status_update_queue.push(fumeStatus); + this.slotStatusUpdateQueue.pushBack(fumeStatus); } } else { - this.unprocessed_blockchain_event.push([sequence, event]); + this.unprocessedBlockchainEvent.pushBack([sequence, blockchainEvent]); } } } - public makeSlotDownloadProgress( + makeSlotDownloadProgress( slot: Slot, shardIdx: FumeShardIdx ): SlotDownloadState { - const downloadProgress = this.inflight_slot_shard_download.get(slot); + // Update download progress for a given slot. + const downloadProgress = this.inflightSlotShardDownload.get(slot); if (!downloadProgress) { - throw new Error("slot not in download"); + throw new Error("Slot not in download"); } const downloadState = downloadProgress.doProgress(shardIdx); if (downloadState === SlotDownloadState.Done) { - this.inflight_slot_shard_download.delete(slot); - this.downloaded_slot.add(slot); - if (!this.slot_commitment_progression.has(slot)) { - this.slot_commitment_progression.set( + this.inflightSlotShardDownload.delete(slot); + this.downloadedSlotSet.insert(slot); + + if (!this.slotCommitmentProgression.has(slot)) { + this.slotCommitmentProgression.set( slot, new SlotCommitmentProgression() ); } - const blockedSlotStatus = - this.blocked_slot_status_update.get(slot) ?? - new Queue(); - this.blocked_slot_status_update.delete(slot); - while (blockedSlotStatus.length > 0) { - const status = blockedSlotStatus.shift(); - if (status) this.slot_status_update_queue.push(status); + const blockedStatuses = + this.blockedSlotStatusUpdate.get(slot) ?? new Deque(); + this.blockedSlotStatusUpdate.delete(slot); + + for (const status of blockedStatuses.toArray()) { + this.slotStatusUpdateQueue.pushBack(status); } } + return downloadState; } - public popNextSlotStatus(): FumeSlotStatus | undefined { - while (this.slot_status_update_queue.length > 0) { - const slotStatus = this.slot_status_update_queue.shift(); - if (!slotStatus) return undefined; + popNextSlotStatus(): FumeSlotStatus | null { + // Pop the next slot status to process. + while (!this.slotStatusUpdateQueue.isEmpty()) { + const slotStatus = this.slotStatusUpdateQueue.popFront(); + if (!slotStatus) { + continue; + } - const commitmentHistory = this.slot_commitment_progression.get( + const commitmentHistory = this.slotCommitmentProgression.get( slotStatus.slot ); - if (commitmentHistory) { - if ( - !commitmentHistory.hasProcessedCommitment(slotStatus.commitmentLevel) - ) { - commitmentHistory.addProcessedCommitment(slotStatus.commitmentLevel); - return slotStatus; - } - // We already processed this commitment level - continue; - } else { - // This should be unreachable as per Rust implementation - throw new Error("slot status should not be available here"); + + if ( + commitmentHistory && + !commitmentHistory.processedCommitmentLevels.has( + slotStatus.commitmentLevel + ) + ) { + commitmentHistory.processedCommitmentLevels.add( + slotStatus.commitmentLevel + ); + return slotStatus; + } else if (!commitmentHistory) { + throw new Error("Slot status should not be available here"); } } - return undefined; + return null; } - private makeSlotCommitmentProgressionExists( + makeSureSlotCommitmentProgressionExists( slot: Slot ): SlotCommitmentProgression { - let progression = this.slot_commitment_progression.get(slot); - if (!progression) { - progression = new SlotCommitmentProgression(); - this.slot_commitment_progression.set(slot, progression); + // Ensure a slot has a commitment progression entry. + if (!this.slotCommitmentProgression.has(slot)) { + this.slotCommitmentProgression.set(slot, new SlotCommitmentProgression()); } - return progression; + return this.slotCommitmentProgression.get(slot)!; } - public popSlotToDownload( - commitment?: CommitmentLevel - ): FumeDownloadRequest | undefined { - while (this.unprocessed_blockchain_event.length > 0) { - const minCommitment = commitment ?? CommitmentLevel.PROCESSED; - const next = this.unprocessed_blockchain_event.shift(); - if (!next) return undefined; - const [sessionSequence, event] = next; - if (!event) return undefined; + popSlotToDownload(commitment?: CommitmentLevel): FumeDownloadRequest | null { + // Pop the next slot to download. + const minCommitment = commitment ?? CommitmentLevel.PROCESSED; + + while (!this.unprocessedBlockchainEvent.isEmpty()) { + const [sessionSequence, blockchainEvent] = + this.unprocessedBlockchainEvent.popFront()!; - const eventCommitmentLevel = event.commitmentLevel as CommitmentLevel; + const eventCommitmentLevel = blockchainEvent.commitmentLevel; if (eventCommitmentLevel !== minCommitment) { - this.slot_status_update_queue.push( + this.slotStatusUpdateQueue.pushBack( new FumeSlotStatus( sessionSequence, - event.offset, - event.slot, - event.parentSlot, + blockchainEvent.offset, + blockchainEvent.slot, + blockchainEvent.parentSlot, eventCommitmentLevel, - event.deadError + blockchainEvent.deadError ) ); - this.makeSlotCommitmentProgressionExists(event.slot); + this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); continue; } - if (this.downloaded_slot.has(event.slot)) { - this.makeSlotCommitmentProgressionExists(event.slot); - const progression = this.slot_commitment_progression.get(event.slot); - if (!progression) { - throw new Error("slot status should not be available here"); - } + if (orderedSetContains(this.downloadedSlotSet, blockchainEvent.slot)) { + this.makeSureSlotCommitmentProgressionExists(blockchainEvent.slot); + const progression = this.slotCommitmentProgression.get( + blockchainEvent.slot + )!; - if (progression.hasProcessedCommitment(eventCommitmentLevel)) { + if (progression.processedCommitmentLevels.has(eventCommitmentLevel)) { this.markEventAsProcessed(sessionSequence); continue; } - this.slot_status_update_queue.push( + this.slotStatusUpdateQueue.pushBack( new FumeSlotStatus( sessionSequence, - event.offset, - event.slot, - event.parentSlot, + blockchainEvent.offset, + blockchainEvent.slot, + blockchainEvent.parentSlot, eventCommitmentLevel, - event.deadError + blockchainEvent.deadError ) ); } else { - let queue = this.blocked_slot_status_update.get(event.slot); - if (!queue) { - queue = new Queue(); - this.blocked_slot_status_update.set(event.slot, queue); - } - queue.push( - new FumeSlotStatus( - sessionSequence, - event.offset, - event.slot, - event.parentSlot, - eventCommitmentLevel, - event.deadError - ) - ); + const blockchainId = blockchainEvent.blockchainId; + const blockUid = blockchainEvent.blockUid; + + this.blockedSlotStatusUpdate + .get(blockchainEvent.slot)! + .pushBack( + new FumeSlotStatus( + sessionSequence, + blockchainEvent.offset, + blockchainEvent.slot, + blockchainEvent.parentSlot, + eventCommitmentLevel, + blockchainEvent.deadError + ) + ); - if (!this.inflight_slot_shard_download.has(event.slot)) { + if (!this.inflightSlotShardDownload.has(blockchainEvent.slot)) { const downloadRequest = new FumeDownloadRequest( - event.slot, - event.blockchainId, - event.blockUid, - event.numShards, + blockchainEvent.slot, + blockchainId, + blockUid, + blockchainEvent.numShards, eventCommitmentLevel ); - const downloadProgress = new SlotDownloadProgress(event.numShards); - this.inflight_slot_shard_download.set(event.slot, downloadProgress); + const downloadProgress = new SlotDownloadProgress( + blockchainEvent.numShards + ); + this.inflightSlotShardDownload.set( + blockchainEvent.slot, + downloadProgress + ); return downloadRequest; } } } - return undefined; + return null; } - public slotStatusUpdateQueueLen(): number { - return this.slot_status_update_queue.length; - } + markEventAsProcessed(eventSeqNumber: FumeSessionSequence): void { + const fumeOffset = this.sequenceToOffset.get(eventSeqNumber); + this.sequenceToOffset.delete(eventSeqNumber); - public markEventAsProcessed(eventSeqNumber: FumeSessionSequence): void { - const fumeOffset = this.sequence_to_offset.get(eventSeqNumber); - if (!fumeOffset) { - throw new Error("event sequence number not found"); + if (fumeOffset === undefined) { + throw new Error("Event sequence number not found"); } - this.sequence_to_offset.delete(eventSeqNumber); - this.processed_offset.push([eventSeqNumber, fumeOffset]); - while (true) { - const tuple = this.processed_offset.peek(); - if (!tuple) break; + // push into min-heap (compare by sequence number) + this.processedOffset.push([eventSeqNumber, fumeOffset]); + + while (this.processedOffset.length > 0) { + const [seq, offset] = this.processedOffset.peek()!; - const [blockedEventSeqNumber2, fumeOffset2] = tuple; - if (blockedEventSeqNumber2 !== this.last_processed_fume_sequence + 1n) { + if (seq !== this.lastProcessedFumeSequence + 1n) { break; } - this.processed_offset.pop(); - this.committable_offset = fumeOffset2; - this.last_processed_fume_sequence = blockedEventSeqNumber2; + this.processedOffset.pop(); + this.committableOffset = offset; + this.lastProcessedFumeSequence = seq; } } - public processedOffsetQueueLen(): number { - return this.processed_offset.length; + slotStatusUpdateQueueLen(): number { + return this.slotStatusUpdateQueue.size(); + } + + processedOffsetQueueLen(): number { + return this.processedOffset.length; } - public needNewBlockchainEvents(): boolean { + needNewBlockchainEvents(): boolean { return ( - this.unprocessed_blockchain_event.length < - MINIMUM_UNPROCESSED_BLOCKCHAIN_EVENT || - (this.slot_status_update_queue.length === 0 && - this.blocked_slot_status_update.size === 0) + this.slotStatusUpdateQueue.size() === 0 && + this.blockedSlotStatusUpdate.size === 0 ); } } diff --git a/typescript-sdk/src/tests/state-machine.test.ts b/typescript-sdk/src/tests/state-machine.test.ts index 6bd882f..2e402ac 100644 --- a/typescript-sdk/src/tests/state-machine.test.ts +++ b/typescript-sdk/src/tests/state-machine.test.ts @@ -37,8 +37,8 @@ describe('FumaroleSM', () => { expect(downloadReq).toBeDefined(); expect(downloadReq?.slot).toBe(1n); - expect(sm.popSlotToDownload()).toBeUndefined(); - expect(sm.popNextSlotStatus()).toBeUndefined(); + expect(sm.popSlotToDownload()).toBeNull(); + expect(sm.popNextSlotStatus()).toBeNull(); const downloadState = sm.makeSlotDownloadProgress(1n, 0); expect(downloadState).toBe(SlotDownloadState.Done); @@ -60,7 +60,7 @@ describe('FumaroleSM', () => { sm.queueBlockchainEvent([event2]); // It should not cause new slot download request - expect(sm.popSlotToDownload()).toBeUndefined(); + expect(sm.popSlotToDownload()).toBeNull(); const status2 = sm.popNextSlotStatus(); expect(status2).toBeDefined(); @@ -70,7 +70,7 @@ describe('FumaroleSM', () => { sm.markEventAsProcessed(status2.sessionSequence); } - expect(sm.committable_offset).toBe(event2.offset); + expect(sm.committableOffset).toBe(event2.offset); }); }); @@ -82,13 +82,13 @@ describe('FumaroleSM', () => { sm.queueBlockchainEvent([event]); // Slot status should not be available, since we didn't download it yet - expect(sm.popNextSlotStatus()).toBeUndefined(); + expect(sm.popNextSlotStatus()).toBeNull(); const downloadReq = sm.popSlotToDownload(); expect(downloadReq).toBeDefined(); expect(downloadReq?.slot).toBe(1n); - expect(sm.popSlotToDownload()).toBeUndefined(); + expect(sm.popSlotToDownload()).toBeNull(); sm.makeSlotDownloadProgress(1n, 0); @@ -100,8 +100,8 @@ describe('FumaroleSM', () => { // Putting the same event back should be ignored sm.queueBlockchainEvent([event]); - expect(sm.popSlotToDownload()).toBeUndefined(); - expect(sm.popNextSlotStatus()).toBeUndefined(); + expect(sm.popSlotToDownload()).toBeNull(); + expect(sm.popNextSlotStatus()).toBeNull(); }); }); @@ -113,13 +113,13 @@ describe('FumaroleSM', () => { sm.queueBlockchainEvent([event]); // Slot status should not be available, since we didn't download it yet - expect(sm.popNextSlotStatus()).toBeUndefined(); + expect(sm.popNextSlotStatus()).toBeNull(); // Use finalized commitment level here const downloadReq = sm.popSlotToDownload(CommitmentLevel.FINALIZED); - expect(downloadReq).toBeUndefined(); + expect(downloadReq).toBeNull(); - expect(sm.popSlotToDownload()).toBeUndefined(); + expect(sm.popSlotToDownload()).toBeNull(); // It should not cause the slot status to be available here even if we have a finalized commitment level filtered out before const status = sm.popNextSlotStatus(); From 7fff1001380910103d6dbe838d5c86560851e7eb Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Mon, 25 Aug 2025 17:11:37 +0000 Subject: [PATCH 54/56] refactor: sdk aalignment with new state machine Signed-off-by: GitHub --- typescript-sdk/package.json | 2 + typescript-sdk/pnpm-lock.yaml | 16 +++ typescript-sdk/src/index.ts | 148 ++++++++++++++++++-- typescript-sdk/src/runtime/state-machine.ts | 26 ++-- typescript-sdk/src/types/index.ts | 4 +- 5 files changed, 169 insertions(+), 27 deletions(-) diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index 923e9fa..1d9fcc8 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -43,7 +43,9 @@ }, "dependencies": { "@bufbuild/protobuf": "^2.6.3", + "@datastructures-js/deque": "^1.0.5", "@grpc/grpc-js": "^1.13.4", + "@js-sdsl/ordered-set": "^4.4.2", "@types/js-yaml": "^4.0.9", "js-yaml": "^4.1.0", "rxjs": "^7.8.2" diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml index 07c04a1..f97e6c0 100644 --- a/typescript-sdk/pnpm-lock.yaml +++ b/typescript-sdk/pnpm-lock.yaml @@ -11,9 +11,15 @@ importers: '@bufbuild/protobuf': specifier: ^2.6.3 version: 2.6.3 + '@datastructures-js/deque': + specifier: ^1.0.5 + version: 1.0.5 '@grpc/grpc-js': specifier: ^1.13.4 version: 1.13.4 + '@js-sdsl/ordered-set': + specifier: ^4.4.2 + version: 4.4.2 '@types/js-yaml': specifier: ^4.0.9 version: 4.0.9 @@ -220,6 +226,9 @@ packages: '@bufbuild/protobuf@2.6.3': resolution: {integrity: sha512-w/gJKME9mYN7ZoUAmSMAWXk4hkVpxRKvEJCb3dV5g9wwWdxTJJ0ayOJAVcNxtdqaxDyFuC0uz4RSGVacJ030PQ==} + '@datastructures-js/deque@1.0.5': + resolution: {integrity: sha512-ogeIkpcCGnM+LAZYZqXBXn6Nx0a/ikkKDFPoa1MzZWRFyEdfVUNwkHclET/T8jkyNM1dN6NmPDfenNKcKazjjg==} + '@emnapi/core@1.4.5': resolution: {integrity: sha512-XsLw1dEOpkSX/WucdqUhPWP7hDxSvZiY+fsUC14h+FtQ2Ifni4znbBt8punRX+Uj2JG/uDb8nEHVKvrVlvdZ5Q==} @@ -348,6 +357,9 @@ packages: '@js-sdsl/ordered-map@4.4.2': resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + '@js-sdsl/ordered-set@4.4.2': + resolution: {integrity: sha512-ieYQ8WlBPKYzEo81H3q0DFbd8WtFRXXABb4+vRCF0AO3WWtJZFxYvRGdipUXGrd6tlSySmqhcPuO3J6SCodCxg==} + '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} @@ -1647,6 +1659,8 @@ snapshots: '@bufbuild/protobuf@2.6.3': {} + '@datastructures-js/deque@1.0.5': {} + '@emnapi/core@1.4.5': dependencies: '@emnapi/wasi-threads': 1.0.4 @@ -1889,6 +1903,8 @@ snapshots: '@js-sdsl/ordered-map@4.4.2': {} + '@js-sdsl/ordered-set@4.4.2': {} + '@napi-rs/wasm-runtime@0.2.12': dependencies: '@emnapi/core': 1.4.5 diff --git a/typescript-sdk/src/index.ts b/typescript-sdk/src/index.ts index ee2c290..33f4067 100644 --- a/typescript-sdk/src/index.ts +++ b/typescript-sdk/src/index.ts @@ -1,4 +1,10 @@ -import { Metadata, ServiceError, MetadataValue, status } from "@grpc/grpc-js"; +import { + Metadata, + ServiceError, + MetadataValue, + status, + ClientDuplexStream, +} from "@grpc/grpc-js"; import { FumaroleConfig } from "./config/config"; import { FumaroleClient as GrpcClient } from "./grpc/fumarole"; import { FumaroleGrpcConnector } from "./connectivity"; @@ -30,7 +36,6 @@ import type { FumaroleSubscribeConfig, } from "./types"; import { - AsyncQueue, DEFAULT_DRAGONSMOUTH_CAPACITY, DEFAULT_COMMIT_INTERVAL, DEFAULT_MAX_SLOT_DOWNLOAD_ATTEMPT, @@ -39,6 +44,14 @@ import { DEFAULT_SLOT_MEMORY_RETENTION, getDefaultFumaroleSubscribeConfig, } from "./types"; +import { AsyncQueue } from "./runtime/async-queue"; +import { FumaroleSM } from "./runtime/state-machine"; +import { GrpcSlotDownloader } from "./runtime/grpc-slot-downloader"; +import { FumeDragonsmouthRuntime } from "./runtime/runtime"; + +(BigInt.prototype as any).toJSON = function () { + return this.toString(); +}; export class FumaroleClient { private static readonly logger = console; @@ -128,16 +141,127 @@ export class FumaroleClient { }); } - // async dragonsmouthSubscribe( - // consumerGroupName: string, - // request: SubscribeRequest - // ): Promise { - // return this.dragonsmouthSubscribeWithConfig( - // consumerGroupName, - // request, - // getDefaultFumaroleSubscribeConfig() - // ); - // } + async dragonsmouthSubscribe( + consumerGroupName: string, + request: SubscribeRequest, + xToken: string + ): Promise { + return this.dragonsmouthSubscribeWithConfig( + consumerGroupName, + request, + getDefaultFumaroleSubscribeConfig(), + xToken + ); + } + + public async dragonsmouthSubscribeWithConfig( + consumerGroupName: string, + request: SubscribeRequest, + config: FumaroleSubscribeConfig, + xToken: string + ): Promise { + // Queues + const dragonsmouthOutlet = new AsyncQueue( + config.dataChannelCapacity + ); + const fumeControlPlaneQ = new AsyncQueue<{}>(100); // sink queue + const fumeControlPlaneRxQ = new AsyncQueue<{}>(100); // source queue + + // Send initial join command + const initialJoin: JoinControlPlane = { consumerGroupName }; + const initialJoinCommand: ControlCommand = { initialJoin }; + await fumeControlPlaneQ.put(initialJoinCommand); + console.log(`Sent initial join command ONE:`, initialJoinCommand); + + const metadata = new Metadata(); + metadata.add("x-token", xToken); + + console.log("SUBSCRIBE METADATA"); + console.log(metadata.getMap()); + + // Create duplex stream + const fumeControlPlaneStreamRx = this.stub.subscribe( + metadata + ) as ClientDuplexStream; + + const controlPlaneWriter = (async () => { + try { + while (true) { + const update = await fumeControlPlaneQ.get(); + const ok = fumeControlPlaneStreamRx.write(update); + if (!ok) { + await new Promise((res) => + fumeControlPlaneStreamRx.once("drain", res) + ); + } + } + } catch (err) { + console.error("Writer error:", err); + } + })(); + + + + // Task: read from duplex stream into a queue + const controlPlaneReader = (async () => { + try { + for await (const update of fumeControlPlaneStreamRx) { + console.log("UPDATE"); + console.log(JSON.stringify(update)); + + await fumeControlPlaneRxQ.put(update); + } + } catch (err) { + console.log("failed to read from duplex stream into a queue"); + console.log(err); + + // stream ended or error occurred + } + })(); + + // Wait for initial response from control plane + const controlResponse: ControlResponse = await fumeControlPlaneRxQ.get(); + const init = (controlResponse as ControlResponse).init; + if (!init) + throw new Error(`Unexpected initial response: ${controlResponse}`); + console.log(`Control response:`, controlResponse); + + const lastCommittedOffset = init.lastCommittedOffsets[0]; + if (lastCommittedOffset == null) + throw new Error("No last committed offset"); + + // Initialize state machine and queues + const sm = new FumaroleSM(lastCommittedOffset, config.slotMemoryRetention); + const subscribeRequestQueue = new AsyncQueue(100); + + // Connect data plane and create slot downloader + const dataPlaneClient = await this.connector.connect(); + const grpcSlotDownloader = new GrpcSlotDownloader(dataPlaneClient); + + // Create Fume runtime + const rt = new FumeDragonsmouthRuntime( + sm, + grpcSlotDownloader, + subscribeRequestQueue, + request, + consumerGroupName, + fumeControlPlaneQ, + fumeControlPlaneRxQ, + dragonsmouthOutlet, + config.commitInterval, + config.gcInterval, + config.concurrentDownloadLimit + ); + + const fumaroleHandle = rt.run(); + console.log(`Fumarole handle created:`, fumaroleHandle); + + return { + sink: subscribeRequestQueue, + source: dragonsmouthOutlet, + fumaroleHandle, + }; + } async listConsumerGroups(): Promise { if (!this.stub) { diff --git a/typescript-sdk/src/runtime/state-machine.ts b/typescript-sdk/src/runtime/state-machine.ts index b6e34ee..4cfb711 100644 --- a/typescript-sdk/src/runtime/state-machine.ts +++ b/typescript-sdk/src/runtime/state-machine.ts @@ -324,18 +324,20 @@ export class FumaroleSM { const blockchainId = blockchainEvent.blockchainId; const blockUid = blockchainEvent.blockUid; - this.blockedSlotStatusUpdate - .get(blockchainEvent.slot)! - .pushBack( - new FumeSlotStatus( - sessionSequence, - blockchainEvent.offset, - blockchainEvent.slot, - blockchainEvent.parentSlot, - eventCommitmentLevel, - blockchainEvent.deadError - ) - ); + if (!this.blockedSlotStatusUpdate.get(blockchainEvent.slot)) { + this.blockedSlotStatusUpdate.set(blockchainEvent.slot, new Deque()); + } + // this won't be undefined because if it is then we just created it above + this.blockedSlotStatusUpdate.get(blockchainEvent.slot)!.pushBack( + new FumeSlotStatus( + sessionSequence, + blockchainEvent.offset, + blockchainEvent.slot, + blockchainEvent.parentSlot, + eventCommitmentLevel, + blockchainEvent.deadError + ) + ); if (!this.inflightSlotShardDownload.has(blockchainEvent.slot)) { const downloadRequest = new FumeDownloadRequest( diff --git a/typescript-sdk/src/types/index.ts b/typescript-sdk/src/types/index.ts index d95d7f2..0e5a445 100644 --- a/typescript-sdk/src/types/index.ts +++ b/typescript-sdk/src/types/index.ts @@ -79,11 +79,9 @@ export interface DragonsmouthAdapterSession { /** Queue for sending subscribe requests */ sink: AsyncQueue; /** Queue for receiving subscription updates */ - source: AsyncQueue; + source: AsyncQueue; /** Handle for tracking the fumarole runtime */ fumaroleHandle: Promise; - /** Method to close and clean up the session */ - close: () => Promise; } // Constants From 613a2944be80f94277caec0e787ef16a811912ff Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Mon, 25 Aug 2025 17:12:03 +0000 Subject: [PATCH 55/56] feat: typescript examples Signed-off-by: GitHub --- examples/typescript/package.json | 1 + examples/typescript/pnpm-lock.yaml | 366 ++++++++++++++++++ examples/typescript/src/index.ts | 161 -------- .../list-consumer-groups-with-group-info.ts | 96 +++++ .../src/subscribe-token-transactions.ts | 160 ++++++++ 5 files changed, 623 insertions(+), 161 deletions(-) create mode 100644 examples/typescript/pnpm-lock.yaml delete mode 100644 examples/typescript/src/index.ts create mode 100644 examples/typescript/src/list-consumer-groups-with-group-info.ts create mode 100644 examples/typescript/src/subscribe-token-transactions.ts diff --git a/examples/typescript/package.json b/examples/typescript/package.json index 016980b..4512fb6 100644 --- a/examples/typescript/package.json +++ b/examples/typescript/package.json @@ -8,6 +8,7 @@ "homepage": "https://triton.one", "dependencies": { "@triton-one/yellowstone-fumarole": "file:../../typescript-sdk", + "dotenv": "^17.2.1", "yargs": "^17.6.2" }, "scripts": { diff --git a/examples/typescript/pnpm-lock.yaml b/examples/typescript/pnpm-lock.yaml new file mode 100644 index 0000000..04caef4 --- /dev/null +++ b/examples/typescript/pnpm-lock.yaml @@ -0,0 +1,366 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@triton-one/yellowstone-fumarole': + specifier: file:../../typescript-sdk + version: file:../../typescript-sdk + dotenv: + specifier: ^17.2.1 + version: 17.2.1 + yargs: + specifier: ^17.6.2 + version: 17.7.2 + devDependencies: + '@types/node': + specifier: ^22.13.5 + version: 22.17.1 + '@types/yargs': + specifier: ^17.0.33 + version: 17.0.33 + prettier: + specifier: ^2.8.3 + version: 2.8.8 + typescript: + specifier: ^4.9.5 + version: 4.9.5 + +packages: + + '@bufbuild/protobuf@2.6.3': + resolution: {integrity: sha512-w/gJKME9mYN7ZoUAmSMAWXk4hkVpxRKvEJCb3dV5g9wwWdxTJJ0ayOJAVcNxtdqaxDyFuC0uz4RSGVacJ030PQ==} + + '@datastructures-js/deque@1.0.5': + resolution: {integrity: sha512-ogeIkpcCGnM+LAZYZqXBXn6Nx0a/ikkKDFPoa1MzZWRFyEdfVUNwkHclET/T8jkyNM1dN6NmPDfenNKcKazjjg==} + + '@grpc/grpc-js@1.13.4': + resolution: {integrity: sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.15': + resolution: {integrity: sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==} + engines: {node: '>=6'} + hasBin: true + + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@js-sdsl/ordered-set@4.4.2': + resolution: {integrity: sha512-ieYQ8WlBPKYzEo81H3q0DFbd8WtFRXXABb4+vRCF0AO3WWtJZFxYvRGdipUXGrd6tlSySmqhcPuO3J6SCodCxg==} + + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + + '@triton-one/yellowstone-fumarole@file:../../typescript-sdk': + resolution: {directory: ../../typescript-sdk, type: directory} + + '@types/js-yaml@4.0.9': + resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} + + '@types/node@22.17.1': + resolution: {integrity: sha512-y3tBaz+rjspDTylNjAX37jEC3TETEFGNJL6uQDxwF9/8GLLIjW1rvVHlynyuUKMnMr1Roq8jOv3vkopBjC4/VA==} + + '@types/yargs-parser@21.0.3': + resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} + + '@types/yargs@17.0.33': + resolution: {integrity: sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + dotenv@17.2.1: + resolution: {integrity: sha512-kQhDYKZecqnM0fCnzI5eIv5L4cAe/iRI+HqMbO/hbRdTAeXDG+M9FjipUxNfbARuEg4iHIbhnhs78BCHNbSxEQ==} + engines: {node: '>=12'} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} + + prettier@2.8.8: + resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==} + engines: {node: '>=10.13.0'} + hasBin: true + + protobufjs@7.5.3: + resolution: {integrity: sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw==} + engines: {node: '>=12.0.0'} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + typescript@4.9.5: + resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==} + engines: {node: '>=4.2.0'} + hasBin: true + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + +snapshots: + + '@bufbuild/protobuf@2.6.3': {} + + '@datastructures-js/deque@1.0.5': {} + + '@grpc/grpc-js@1.13.4': + dependencies: + '@grpc/proto-loader': 0.7.15 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.15': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.3 + yargs: 17.7.2 + + '@js-sdsl/ordered-map@4.4.2': {} + + '@js-sdsl/ordered-set@4.4.2': {} + + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + + '@triton-one/yellowstone-fumarole@file:../../typescript-sdk': + dependencies: + '@bufbuild/protobuf': 2.6.3 + '@datastructures-js/deque': 1.0.5 + '@grpc/grpc-js': 1.13.4 + '@js-sdsl/ordered-set': 4.4.2 + '@types/js-yaml': 4.0.9 + js-yaml: 4.1.0 + rxjs: 7.8.2 + + '@types/js-yaml@4.0.9': {} + + '@types/node@22.17.1': + dependencies: + undici-types: 6.21.0 + + '@types/yargs-parser@21.0.3': {} + + '@types/yargs@17.0.33': + dependencies: + '@types/yargs-parser': 21.0.3 + + ansi-regex@5.0.1: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + argparse@2.0.1: {} + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + dotenv@17.2.1: {} + + emoji-regex@8.0.0: {} + + escalade@3.2.0: {} + + get-caller-file@2.0.5: {} + + is-fullwidth-code-point@3.0.0: {} + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + lodash.camelcase@4.3.0: {} + + long@5.3.2: {} + + prettier@2.8.8: {} + + protobufjs@7.5.3: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 22.17.1 + long: 5.3.2 + + require-directory@2.1.1: {} + + rxjs@7.8.2: + dependencies: + tslib: 2.8.1 + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + tslib@2.8.1: {} + + typescript@4.9.5: {} + + undici-types@6.21.0: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + y18n@5.0.8: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 diff --git a/examples/typescript/src/index.ts b/examples/typescript/src/index.ts deleted file mode 100644 index 47617c0..0000000 --- a/examples/typescript/src/index.ts +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Read the Fumarole documentation abd blog if you haven't already to understand what's happening in the following code - * https://docs.triton.one/project-yellowstone/fumarole - * https://blog.triton.one/introducing-yellowstone-fumarole - * - * Fumarole Example Client - * - * This example demonstrates how to use the Yellowstone Fumarole client to: - * - Connect to a Fumarole endpoint - * - Subscribe to account and transaction updates - * - Manage consumer groups - * - Handle streaming data - * - * Comment/Uncomment blocks of code to perform the operations you want to try out - */ -import Client, { - FumaroleSubscribeRequest, -} from "@triton-one/yellowstone-fumarole"; -import { - EventSubscriptionPolicy, - InitialOffsetPolicy, - FumaroleClient, -} from "@triton-one/yellowstone-fumarole/dist/types/grpc/fumarole"; -import { CommitmentLevel } from "@triton-one/yellowstone-fumarole/dist/types/grpc/geyser"; -import yargs from "yargs"; -async function main() { - const args = await parseCommandLineArguments(); - - const client = new Client(args.endpoint, args.xToken, { - "grpc.max_receive_message_length": 64 * 1024 * 1024, // 64MiB - }); - - const consumerGroupLabel = "hello3"; - - // const consumerGroup = await client.createStaticConsumerGroup({ - // commitmentLevel: CommitmentLevel.CONFIRMED, - // consumerGroupLabel: consumerGroupLabel, - // eventSubscriptionPolicy: EventSubscriptionPolicy.BOTH, - // initialOffsetPolicy: InitialOffsetPolicy.LATEST, - // }); - // console.log( - // `Created Consumer Group with label ${consumerGroupLabel} and id ${consumerGroup.groupId}` - // ); - - // const consumerGroupInfo = await client.getConsumerGroupInfo({ - // consumerGroupLabel: consumerGroupLabel, - // }); - // console.log(`Consumer group ${consumerGroupLabel} info: `); - // console.log(consumerGroupInfo); - - // const slotLagInfo = await client.getSlotLagInfo({ - // consumerGroupLabel: consumerGroupLabel, - // }); - // console.log(`Slot Lag Info: ${slotLagInfo}`); - - const subscribeRequest: FumaroleSubscribeRequest = { - accounts: { - tokenKeg: { - account: ["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"], - filters: [], - owner: ["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"], - nonemptyTxnSignature: false, - }, - }, - consumerGroupLabel: consumerGroupLabel, - transactions: { - // tokenKeg: { - // accountExclude: [], - // accountInclude: ["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"], - // accountRequired: ["TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"], - // }, - }, - }; - - await fumaroleSubscribe(client, subscribeRequest); - - const consumerGroupsList = await client.listConsumerGroups({}); - console.log(`Consumer groups`); - console.log(consumerGroupsList); - - // const oldestSlot = await client.getOldestSlot({commitmentLevel: CommitmentLevel.CONFIRMED}) - // console.log(`Oldest slot ${oldestSlot}`); - - const commitmentLevels = await client.listAvailableCommitmentLevels({}); - console.log(`Commitment Levels`); - console.log(commitmentLevels); - - const deleteConsumerGroupInfo = await client.deleteConsumerGroup({ - consumerGroupLabel: consumerGroupLabel, - }); - console.log(`Delete consumer group result`); - console.log(deleteConsumerGroupInfo); -} - -async function fumaroleSubscribe( - client: Client, - subscribeRequest: FumaroleSubscribeRequest -) { - // Subscribe for events - const stream = await client.subscribe(); - // const stream = await client.subscribe({ compression: "gzip" }); - - // Create `error` / `end` handler - const streamClosed = new Promise((resolve, reject) => { - stream.on("error", (error) => { - reject(error); - stream.end(); - }); - stream.on("end", () => { - resolve(); - }); - stream.on("close", () => { - resolve(); - }); - }); - - // Handle updates - stream.on("data", (data) => { - console.log(data); - }); - - // Send subscribe request - await new Promise((resolve, reject) => { - stream.write(subscribeRequest, (err) => { - if (err === null || err === undefined) { - resolve(); - } else { - reject(err); - } - }); - }).catch((reason) => { - console.error(reason); - throw reason; - }); - - await streamClosed; -} - -function parseCommandLineArguments() { - return yargs(process.argv.slice(3)) - .options({ - endpoint: { - alias: "e", - default: "http://localhost:10000", - describe: "fumarole gRPC endpoint", - type: "string", - }, - "x-token": { - describe: "token for auth, can be used only with ssl", - type: "string", - }, - commitment: { - describe: "commitment level", - choices: ["processed", "confirmed", "finalized"], - }, - }) - .demandCommand(1) - .help().argv; -} - -main(); diff --git a/examples/typescript/src/list-consumer-groups-with-group-info.ts b/examples/typescript/src/list-consumer-groups-with-group-info.ts new file mode 100644 index 0000000..9e2941f --- /dev/null +++ b/examples/typescript/src/list-consumer-groups-with-group-info.ts @@ -0,0 +1,96 @@ +import { FumaroleClient, FumaroleConfig } from "@triton-one/yellowstone-fumarole"; +import dotenv from "dotenv"; +dotenv.config() + +const FUMAROLE_ENDPOINT = process.env.FUMAROLE_ENDPOINT!; +const FUMAROLE_X_TOKEN = process.env.FUMAROLE_X_TOKEN!; + + +async function main() { + try { + // Configure the client + const config: FumaroleConfig = { + endpoint: FUMAROLE_ENDPOINT, + xToken: FUMAROLE_X_TOKEN, + maxDecodingMessageSizeBytes: 100 * 1024 * 1024, // 100MB max message size + xMetadata: {}, // Additional metadata if needed + }; + + // Connect to the Fumarole server + console.log("Connecting to Fumarole server..."); + const client = await FumaroleClient.connect(config); + console.log("Connected successfully"); + + // List all consumer groups + console.log("\nFetching consumer groups..."); + try { + console.log("Sending listConsumerGroups request to server..."); + process.on("unhandledRejection", (reason, promise) => { + console.error("Unhandled Rejection at:", promise, "reason:", reason); + }); + + const response = await client.listConsumerGroups().catch((error) => { + console.error("Caught error during listConsumerGroups:", error); + if (error.code) console.error("Error code:", error.code); + if (error.details) console.error("Error details:", error.details); + if (error.metadata) console.error("Error metadata:", error.metadata); + if (error.stack) console.error("Error stack:", error.stack); + throw error; + }); + + console.log("\n=== ListConsumerGroups Response ==="); + console.log(JSON.stringify(response, null, 2)); + console.log("=====================================\n"); + + if (!response.consumerGroups || response.consumerGroups.length === 0) { + console.log("No consumer groups found on server"); + } else { + console.log( + `Found ${response.consumerGroups.length} consumer groups. Fetching details...\n` + ); + for (const group of response.consumerGroups) { + console.log(`=== Consumer Group: ${group.consumerGroupName} ===`); + console.log("Basic info:", JSON.stringify(group, null, 2)); + + // Get detailed info for the group + try { + console.log( + `\nFetching detailed info for group: ${group.consumerGroupName}` + ); + const info = await client.getConsumerGroupInfo( + group.consumerGroupName + ); + if (info) { + console.log("\nDetailed Group Info:"); + console.log("Status: Active"); + console.log("Server Response:", JSON.stringify(info, null, 2)); + } else { + console.log("\nGroup Status: Not found or inactive"); + } + console.log("===============================\n"); + } catch (err) { + console.error( + `\nError fetching group info from server: ${ + err instanceof Error ? err.message : String(err) + }` + ); + } + } + } + } catch (error) { + console.error( + "Error:", + error instanceof Error ? error.message : String(error) + ); + process.exit(1); + } + } catch (error) { + console.error( + "Error:", + error instanceof Error ? error.message : String(error) + ); + process.exit(1); + } +} + +main().catch(console.error); diff --git a/examples/typescript/src/subscribe-token-transactions.ts b/examples/typescript/src/subscribe-token-transactions.ts new file mode 100644 index 0000000..5760b43 --- /dev/null +++ b/examples/typescript/src/subscribe-token-transactions.ts @@ -0,0 +1,160 @@ +import dotenv from "dotenv"; + +import { + FumaroleClient, + SubscribeRequest, + DragonsmouthAdapterSession, + CommitmentLevel, + InitialOffsetPolicy, + SubscribeUpdate, +} from "@triton-one/yellowstone-fumarole"; + +dotenv.config(); + +// stringify bigint in json +function safeJsonStringify(obj: unknown): string { + return JSON.stringify( + obj, + (_, v) => { + if (typeof v === "bigint") return v.toString(); + if (v instanceof Error) return v.message; + return v; + }, + 2 + ); +} + +const FUMAROLE_ENDPOINT = process.env.FUMAROLE_ENDPOINT!; +const FUMAROLE_X_TOKEN = process.env.FUMAROLE_X_TOKEN!; +const TOKEN_ADDRESS = "Tokenkegqfezyinwajbnbgkpfxcwubvf9ss623vq5da"; + +let isShuttingDown = false; + +async function main() { + let groupName: string | undefined; + let client: FumaroleClient | undefined; + + try { + console.log(`Connecting to Fumarole server at ${FUMAROLE_ENDPOINT}...`); + const config = { + endpoint: FUMAROLE_ENDPOINT, + xToken: FUMAROLE_X_TOKEN, + maxDecodingMessageSizeBytes: 100 * 1024 * 1024, + xMetadata: {}, + }; + + console.log( + "Initializing Fumarole client with configuration:", + safeJsonStringify(Object.assign({}, config, { xToken: "***" })) + ); + + client = await FumaroleClient.connect(config); + + const request: SubscribeRequest = { + commitment: CommitmentLevel.CONFIRMED, + accounts: {}, + transactions: { + tokenFilter: { + accountInclude: [TOKEN_ADDRESS], + accountExclude: [], + accountRequired: [TOKEN_ADDRESS], + }, + }, + slots: { + slotFilter: { + filterByCommitment: true, + interslotUpdates: true, + }, + }, + transactionsStatus: {}, + blocks: {}, + blocksMeta: {}, + entry: {}, + ping: { id: Date.now() }, + accountsDataSlice: [], + fromSlot: undefined, + }; + + // delete them all because they pile up and hit limit while developing + await client.deleteAllConsumerGroups(); + + groupName = `token-monitor-${Math.random().toString(36).substring(7)}`; + console.log(`Creating consumer group: ${groupName}`); + + console.log("Creating consumer group with initialOffsetPolicy LATEST"); + try { + await client.createConsumerGroup({ + consumerGroupName: groupName, + initialOffsetPolicy: InitialOffsetPolicy.LATEST, + }); + } catch (err) { + console.error("Failed to create consumer group:", err); + throw err; + } + + const subscribeConfig = { + concurrentDownloadLimit: 200, + commitInterval: 2000, + maxFailedSlotDownloadAttempt: 100, + dataChannelCapacity: 20000, + slotMemoryRetention: 300, + gcInterval: 30000, + }; + + console.log("Subscribe request:", safeJsonStringify(request)); + console.log("Subscribe config:", safeJsonStringify(subscribeConfig)); + + console.log(`Starting subscription for group ${groupName}...`); + + let subscription: DragonsmouthAdapterSession; + + subscription = await client.dragonsmouthSubscribeWithConfig( + groupName, + request, + subscribeConfig, + FUMAROLE_X_TOKEN + ); + + const { sink, source, fumaroleHandle } = subscription; + + await fumaroleHandle; + + fumaroleHandle.catch((e) => { + console.log("caught in fumarole handle"); + console.log(e); + }); + + subscription.sink.put(request); + + // Handle fumarole connection closure in background + fumaroleHandle.then((res) => { + console.error("Fumarole handle closed:", res); + }); + + // Consume async queue + for await (const event of source) { + console.log(JSON.stringify(event, null, 2)); + } + + console.error("Source closed"); + } catch (error) { + console.log("CATCH 2"); + console.log(error); + } +} + +async function handleShutdown(signal: string) { + if (isShuttingDown) return; + isShuttingDown = true; + console.log(`\nReceived ${signal}. Cleaning up...`); + await new Promise((resolve) => setTimeout(resolve, 1000)); + process.exit(0); +} + +process.on("SIGINT", () => handleShutdown("SIGINT")); +process.on("SIGTERM", () => handleShutdown("SIGTERM")); +process.on("unhandledRejection", (reason, promise) => { + console.error("Unhandled Rejection at:", promise, "reason:", reason); +}); + +main().catch(console.error); From 4398e0597c4a321fbdd07c74bfe7058adf54e419 Mon Sep 17 00:00:00 2001 From: Wilfred Almeida <60785452+WilfredAlmeida@users.noreply.github.com> Date: Thu, 28 Aug 2025 15:33:10 +0000 Subject: [PATCH 56/56] refactor: wip Signed-off-by: GitHub --- examples/typescript/package.json | 1 + examples/typescript/pnpm-lock.yaml | 16 ++++ .../src/subscribe-token-transactions.ts | 13 +++- typescript-sdk/package.json | 1 + typescript-sdk/pnpm-lock.yaml | 15 ++++ typescript-sdk/src/connectivity.ts | 2 + typescript-sdk/src/grpc/types.ts | 0 typescript-sdk/src/index.ts | 7 +- typescript-sdk/src/runtime/base-downloader.ts | 0 typescript-sdk/src/runtime/fumarole-client.ts | 0 typescript-sdk/src/runtime/grpc-downloader.ts | 0 .../src/runtime/grpc-slot-downloader.ts | 28 +++++-- typescript-sdk/src/runtime/index.ts | 0 typescript-sdk/src/runtime/runtime.ts | 10 ++- typescript-sdk/src/runtime/state-machine.ts | 5 ++ .../src/runtime/wait-first-completed.ts | 76 +++++++------------ typescript-sdk/src/types/core.ts | 0 .../test/fumarole-integration.test.ts | 0 18 files changed, 108 insertions(+), 66 deletions(-) create mode 100644 typescript-sdk/src/grpc/types.ts create mode 100644 typescript-sdk/src/runtime/base-downloader.ts create mode 100644 typescript-sdk/src/runtime/fumarole-client.ts create mode 100644 typescript-sdk/src/runtime/grpc-downloader.ts create mode 100644 typescript-sdk/src/runtime/index.ts create mode 100644 typescript-sdk/src/types/core.ts create mode 100644 typescript-sdk/test/fumarole-integration.test.ts diff --git a/examples/typescript/package.json b/examples/typescript/package.json index 4512fb6..c485755 100644 --- a/examples/typescript/package.json +++ b/examples/typescript/package.json @@ -8,6 +8,7 @@ "homepage": "https://triton.one", "dependencies": { "@triton-one/yellowstone-fumarole": "file:../../typescript-sdk", + "bs58": "^6.0.0", "dotenv": "^17.2.1", "yargs": "^17.6.2" }, diff --git a/examples/typescript/pnpm-lock.yaml b/examples/typescript/pnpm-lock.yaml index 04caef4..f495453 100644 --- a/examples/typescript/pnpm-lock.yaml +++ b/examples/typescript/pnpm-lock.yaml @@ -11,6 +11,9 @@ importers: '@triton-one/yellowstone-fumarole': specifier: file:../../typescript-sdk version: file:../../typescript-sdk + bs58: + specifier: ^6.0.0 + version: 6.0.0 dotenv: specifier: ^17.2.1 version: 17.2.1 @@ -110,6 +113,12 @@ packages: argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + base-x@5.0.1: + resolution: {integrity: sha512-M7uio8Zt++eg3jPj+rHMfCC+IuygQHHCOU+IYsVtik6FWjuYpVt/+MRKcgsAMHh8mMFAwnB+Bs+mTrFiXjMzKg==} + + bs58@6.0.0: + resolution: {integrity: sha512-PD0wEnEYg6ijszw/u8s+iI3H17cTymlrwkKhDhPZq+Sokl3AU4htyBFTjAeNAlCCmg0f53g6ih3jATyCKftTfw==} + cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} @@ -253,6 +262,7 @@ snapshots: '@grpc/grpc-js': 1.13.4 '@js-sdsl/ordered-set': 4.4.2 '@types/js-yaml': 4.0.9 + bs58: 6.0.0 js-yaml: 4.1.0 rxjs: 7.8.2 @@ -276,6 +286,12 @@ snapshots: argparse@2.0.1: {} + base-x@5.0.1: {} + + bs58@6.0.0: + dependencies: + base-x: 5.0.1 + cliui@8.0.1: dependencies: string-width: 4.2.3 diff --git a/examples/typescript/src/subscribe-token-transactions.ts b/examples/typescript/src/subscribe-token-transactions.ts index 5760b43..a297428 100644 --- a/examples/typescript/src/subscribe-token-transactions.ts +++ b/examples/typescript/src/subscribe-token-transactions.ts @@ -104,6 +104,8 @@ async function main() { console.log("Subscribe request:", safeJsonStringify(request)); console.log("Subscribe config:", safeJsonStringify(subscribeConfig)); + // groupName = "helloworld-1" + console.log(`Starting subscription for group ${groupName}...`); let subscription: DragonsmouthAdapterSession; @@ -117,20 +119,25 @@ async function main() { const { sink, source, fumaroleHandle } = subscription; - await fumaroleHandle; + // await fumaroleHandle; fumaroleHandle.catch((e) => { console.log("caught in fumarole handle"); console.log(e); }); - subscription.sink.put(request); - // Handle fumarole connection closure in background fumaroleHandle.then((res) => { console.error("Fumarole handle closed:", res); }); + // while (true) { + // const up = await source.get() + // console.log("THE UPDATE"); + // console.log(up); + // } + + // Consume async queue for await (const event of source) { console.log(JSON.stringify(event, null, 2)); diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index 1d9fcc8..6685484 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -47,6 +47,7 @@ "@grpc/grpc-js": "^1.13.4", "@js-sdsl/ordered-set": "^4.4.2", "@types/js-yaml": "^4.0.9", + "bs58": "^6.0.0", "js-yaml": "^4.1.0", "rxjs": "^7.8.2" } diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml index f97e6c0..1810f1d 100644 --- a/typescript-sdk/pnpm-lock.yaml +++ b/typescript-sdk/pnpm-lock.yaml @@ -23,6 +23,9 @@ importers: '@types/js-yaml': specifier: ^4.0.9 version: 4.0.9 + bs58: + specifier: ^6.0.0 + version: 6.0.0 js-yaml: specifier: ^4.1.0 version: 4.1.0 @@ -612,6 +615,9 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + base-x@5.0.1: + resolution: {integrity: sha512-M7uio8Zt++eg3jPj+rHMfCC+IuygQHHCOU+IYsVtik6FWjuYpVt/+MRKcgsAMHh8mMFAwnB+Bs+mTrFiXjMzKg==} + brace-expansion@1.1.12: resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} @@ -631,6 +637,9 @@ packages: resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} engines: {node: '>= 6'} + bs58@6.0.0: + resolution: {integrity: sha512-PD0wEnEYg6ijszw/u8s+iI3H17cTymlrwkKhDhPZq+Sokl3AU4htyBFTjAeNAlCCmg0f53g6ih3jATyCKftTfw==} + bser@2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} @@ -2149,6 +2158,8 @@ snapshots: balanced-match@1.0.2: {} + base-x@5.0.1: {} + brace-expansion@1.1.12: dependencies: balanced-match: 1.0.2 @@ -2173,6 +2184,10 @@ snapshots: dependencies: fast-json-stable-stringify: 2.1.0 + bs58@6.0.0: + dependencies: + base-x: 5.0.1 + bser@2.1.1: dependencies: node-int64: 0.4.0 diff --git a/typescript-sdk/src/connectivity.ts b/typescript-sdk/src/connectivity.ts index 52c044e..9cf72f3 100644 --- a/typescript-sdk/src/connectivity.ts +++ b/typescript-sdk/src/connectivity.ts @@ -119,6 +119,7 @@ export class FumaroleGrpcConnector { if (this.config.xToken !== undefined) { this.logger.debug("Adding x-token to metadata"); metadata.add(X_TOKEN_HEADER, this.config.xToken); + metadata.add("x-subscription-id", this.config.xToken); } return callback(null, metadata); } @@ -246,6 +247,7 @@ export function createGrpcChannel( if (xToken !== undefined) { console.debug("Adding x-token to metadata"); metadata.add(X_TOKEN_HEADER, xToken); + metadata.add("x-subscription-id", xToken); } return callback(null, metadata); } diff --git a/typescript-sdk/src/grpc/types.ts b/typescript-sdk/src/grpc/types.ts new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/src/index.ts b/typescript-sdk/src/index.ts index 33f4067..9b4b4ed 100644 --- a/typescript-sdk/src/index.ts +++ b/typescript-sdk/src/index.ts @@ -174,6 +174,7 @@ export class FumaroleClient { console.log(`Sent initial join command ONE:`, initialJoinCommand); const metadata = new Metadata(); + metadata.add("x-subscription-id", xToken); metadata.add("x-token", xToken); console.log("SUBSCRIBE METADATA"); @@ -181,7 +182,7 @@ export class FumaroleClient { // Create duplex stream const fumeControlPlaneStreamRx = this.stub.subscribe( - metadata + metadata, {} ) as ClientDuplexStream; const controlPlaneWriter = (async () => { @@ -206,8 +207,8 @@ export class FumaroleClient { const controlPlaneReader = (async () => { try { for await (const update of fumeControlPlaneStreamRx) { - console.log("UPDATE"); - console.log(JSON.stringify(update)); + // console.log("UPDATE"); + // console.log(JSON.stringify(update)); await fumeControlPlaneRxQ.put(update); } diff --git a/typescript-sdk/src/runtime/base-downloader.ts b/typescript-sdk/src/runtime/base-downloader.ts new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/src/runtime/fumarole-client.ts b/typescript-sdk/src/runtime/fumarole-client.ts new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/src/runtime/grpc-downloader.ts b/typescript-sdk/src/runtime/grpc-downloader.ts new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/src/runtime/grpc-slot-downloader.ts b/typescript-sdk/src/runtime/grpc-slot-downloader.ts index 5bd48b6..08f84a9 100644 --- a/typescript-sdk/src/runtime/grpc-slot-downloader.ts +++ b/typescript-sdk/src/runtime/grpc-slot-downloader.ts @@ -1,4 +1,4 @@ -import { ClientReadableStream, ServiceError, status } from "@grpc/grpc-js"; +import { ClientReadableStream, Metadata, ServiceError, status } from "@grpc/grpc-js"; import { BlockFilters, DataResponse, @@ -8,7 +8,7 @@ import { import { SubscribeRequest, SubscribeUpdate } from "../grpc/geyser"; import { AsyncQueue } from "./async-queue"; import { FumeDownloadRequest, FumeShardIdx } from "./state-machine"; - +import bs58 from "bs58"; // Constants const DEFAULT_GC_INTERVAL = 5; @@ -119,12 +119,19 @@ export class GrpcDownloadBlockTaskRun { try { console.log( - `Requesting download for block ${this.downloadRequest.blockUid.toString()} at slot ${ + `Requesting download for block ${bs58.encode(this.downloadRequest.blockUid)} at slot ${ this.downloadRequest.slot - }` + } with filters ${JSON.stringify(downloadRequest.blockFilters)}` ); - downloadResponse = this.client.downloadBlock(downloadRequest); + const metadata = new Metadata(); + metadata.add("x-token", "7b042cd6-ea1e-46af-b46b-653bdce119f6") + metadata.add("x-subscription-id", "091d7d4f-a38d-4a2e-b4f9-91643170ff0e6") + + console.log("DOWNLOAD METADATA"); + console.log(metadata.getMap()); + + downloadResponse = this.client.downloadBlock(downloadRequest, metadata); } catch (e: any) { console.log(`Download block error ${e}`); return { @@ -140,6 +147,9 @@ export class GrpcDownloadBlockTaskRun { return new Promise((resolve, reject) => { downloadResponse.on("data", async (data: DataResponse) => { try { + console.log("DATA DATA\n\n"); + console.log(JSON.stringify(data)) + console.log("\n\n"); if (data.update) { // === case: update === const update = data.update; @@ -160,9 +170,10 @@ export class GrpcDownloadBlockTaskRun { }); } } else if (data.blockShardDownloadFinish) { + // === case: block_shard_download_finish === console.log( - `Download finished for block ${this.downloadRequest.blockUid.toString()} at slot ${ + `Download finished for block ${bs58.encode(this.downloadRequest.blockUid)} at slot ${ this.downloadRequest.slot }` ); @@ -192,8 +203,9 @@ export class GrpcDownloadBlockTaskRun { reject(this.mapTonicErrorCodeToDownloadBlockError(err)); }); - downloadResponse.on("end", () => { - console.log("stream ended without blockShardDownloadFinish"); + downloadResponse.on("end", (e: any) => { + console.log(`stream ended without blockShardDownloadFinish for block ${bs58.encode(this.downloadRequest.blockUid)} with slot ${this.downloadRequest.slot}`); + resolve({ kind: "Err", slot: this.downloadRequest.slot, diff --git a/typescript-sdk/src/runtime/index.ts b/typescript-sdk/src/runtime/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/src/runtime/runtime.ts b/typescript-sdk/src/runtime/runtime.ts index cd9fcb3..b6d3cf7 100644 --- a/typescript-sdk/src/runtime/runtime.ts +++ b/typescript-sdk/src/runtime/runtime.ts @@ -101,7 +101,7 @@ export class FumeDragonsmouthRuntime { this.stateMachine.queueBlockchainEvent(pollHist.events); } else if (controlResponse.commitOffset) { const commitOffset = controlResponse.commitOffset; - console.log(`Received commit offset: ${commitOffset}`); + console.log(`Received commit offset: ${JSON.stringify(commitOffset)}`); this.stateMachine.updateCommittedOffset(commitOffset.offset); } else if (controlResponse.pong) { console.log("Received pong"); @@ -336,8 +336,12 @@ export class FumeDragonsmouthRuntime { ); // Wait for at least one task to finish - const { done, pending: newPending } = await waitFirstCompleted(pending); - pending = newPending; + console.log("UP UP"); + + // const { done, pending: newPending } = await Promise.race(pending); + const { done, pending: newPending } = await waitFirstCompleted(Array.from(pending)); + console.log("DOWN DOWN"); + pending = new Set(newPending); for (const t of done) { const result = await t; diff --git a/typescript-sdk/src/runtime/state-machine.ts b/typescript-sdk/src/runtime/state-machine.ts index 4cfb711..7a2f440 100644 --- a/typescript-sdk/src/runtime/state-machine.ts +++ b/typescript-sdk/src/runtime/state-machine.ts @@ -134,6 +134,11 @@ export class FumaroleSM { } updateCommittedOffset(offset: FumeOffset): void { + console.log("OFFSET"); + console.log(offset); + console.log("this.LAST_COMMITTED_OFFSET"); + console.log(this.lastCommittedOffset); + if (offset >= this.lastCommittedOffset) { throw new Error("Offset must be >= last committed offset"); } diff --git a/typescript-sdk/src/runtime/wait-first-completed.ts b/typescript-sdk/src/runtime/wait-first-completed.ts index 0712e2b..4b140cf 100644 --- a/typescript-sdk/src/runtime/wait-first-completed.ts +++ b/typescript-sdk/src/runtime/wait-first-completed.ts @@ -1,57 +1,35 @@ -//// Custom TypeScript implementation for Python's `asyncio.wait` - type WaitResult = { - done: Set>; - pending: Set>; + done: Promise[]; + pending: Promise[]; }; -export async function waitFirstCompleted( - promises: Set> -): Promise> { - if (promises.size === 0) { - return { done: new Set(), pending: new Set() }; +export async function waitFirstCompleted(promises: Promise[]): Promise> { + if (promises.length === 0) { + return { done: [], pending: [] }; } - // Map original promises to tracking wrappers - const wrapped = new Map< - Promise, - Promise<{ promise: Promise; status: "fulfilled" | "rejected"; value?: T; reason?: unknown }> - >(); + return new Promise>((resolve) => { + let settled = false; - for (const p of promises) { - wrapped.set( - p, + promises.forEach((p) => { p.then( - value => ({ promise: p, status: "fulfilled", value }), - reason => ({ promise: p, status: "rejected", reason }) - ) - ); - } - - // Wait for the first one to settle - let first; - try { - first = await Promise.race(wrapped.values()); - } catch { - // This branch should not happen since we handle rejection inside wrapper - throw new Error("Unexpected race rejection"); - } - - // Collect all results, but do not cancel still-pending promises - const results = await Promise.allSettled(wrapped.values()); - - const done = new Set>(); - const stillPending = new Set(promises); - - for (const r of results) { - if (r.status === "fulfilled") { - const { promise } = r.value; - if (promise === first.promise) { - done.add(promise); - stillPending.delete(promise); - } - } - } - - return { done, pending: stillPending }; + () => { + if (!settled) { + settled = true; + const done = [p]; + const pending = promises.filter((q) => q !== p); + resolve({ done, pending }); + } + }, + () => { + if (!settled) { + settled = true; + const done = [p]; + const pending = promises.filter((q) => q !== p); + resolve({ done, pending }); + } + } + ); + }); + }); } diff --git a/typescript-sdk/src/types/core.ts b/typescript-sdk/src/types/core.ts new file mode 100644 index 0000000..e69de29 diff --git a/typescript-sdk/test/fumarole-integration.test.ts b/typescript-sdk/test/fumarole-integration.test.ts new file mode 100644 index 0000000..e69de29