From d06391d93ec795e10101c763a4255e8c9de13fdd Mon Sep 17 00:00:00 2001 From: shamardy Date: Wed, 27 Jul 2022 20:12:03 +0200 Subject: [PATCH 01/33] use async_blocking wip --- mm2src/coins/lightning.rs | 94 +++++++---- mm2src/coins/lightning/ln_events.rs | 146 +++++++++--------- .../lightning/ln_filesystem_persister.rs | 6 +- mm2src/coins/lightning/ln_utils.rs | 105 +++++++------ 4 files changed, 200 insertions(+), 151 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index d796d791d7..33c7b279eb 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -117,9 +117,14 @@ impl LightningCoin { #[inline] fn my_node_id(&self) -> String { self.channel_manager.get_our_node_id().to_string() } - fn get_balance_msat(&self) -> (u64, u64) { - self.channel_manager - .list_channels() + async fn list_channels(&self) -> Vec { + let selfi = self.clone(); + async_blocking(move || selfi.channel_manager.list_channels()).await + } + + async fn get_balance_msat(&self) -> (u64, u64) { + self.list_channels() + .await .iter() .fold((0, 0), |(spendable, unspendable), chan| { if chan.is_usable { @@ -133,11 +138,8 @@ impl LightningCoin { }) } - fn pay_invoice(&self, invoice: Invoice) -> SendPaymentResult { - self.invoice_payer - .pay_invoice(&invoice) - .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e)))?; - let payment_hash = PaymentHash((*invoice.payment_hash()).into_inner()); + async fn pay_invoice(&self, invoice: Invoice) -> SendPaymentResult { + let payment_hash = PaymentHash((invoice.payment_hash()).into_inner()); let payment_type = PaymentType::OutboundPayment { destination: *invoice.payee_pub_key().unwrap_or(&invoice.recover_payee_pub_key()), }; @@ -146,13 +148,24 @@ impl LightningCoin { InvoiceDescription::Hash(h) => hex::encode(h.0.into_inner()), }; let payment_secret = Some(*invoice.payment_secret()); + let amt_msat = invoice.amount_milli_satoshis().map(|a| a as i64); + + let selfi = self.clone(); + async_blocking(move || { + selfi + .invoice_payer + .pay_invoice(&invoice) + .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e))) + }) + .await?; + Ok(DBPaymentInfo { payment_hash, payment_type, description, preimage: None, secret: payment_secret, - amt_msat: invoice.amount_milli_satoshis().map(|a| a as i64), + amt_msat, fee_paid_msat: None, status: HTLCStatus::Pending, created_at: (now_ms() / 1000) as i64, @@ -160,7 +173,7 @@ impl LightningCoin { }) } - fn keysend( + async fn keysend( &self, destination: PublicKey, amount_msat: u64, @@ -173,9 +186,16 @@ impl LightningCoin { )); } let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); - self.invoice_payer - .pay_pubkey(destination, payment_preimage, amount_msat, final_cltv_expiry_delta) - .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e)))?; + + let selfi = self.clone(); + async_blocking(move || { + selfi + .invoice_payer + .pay_pubkey(destination, payment_preimage, amount_msat, final_cltv_expiry_delta) + .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e))) + }) + .await?; + let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).into_inner()); let payment_type = PaymentType::OutboundPayment { destination }; @@ -199,12 +219,8 @@ impl LightningCoin { paging: PagingOptionsEnum, limit: usize, ) -> ListChannelsResult { - let mut total_open_channels: Vec = self - .channel_manager - .list_channels() - .into_iter() - .map(From::from) - .collect(); + let mut total_open_channels: Vec = + self.list_channels().await.into_iter().map(From::from).collect(); total_open_channels.sort_by(|a, b| a.rpc_channel_id.cmp(&b.rpc_channel_id)); @@ -425,13 +441,16 @@ impl MarketCoinOps for LightningCoin { } fn my_balance(&self) -> BalanceFut { + let coin = self.clone(); let decimals = self.decimals(); - let (spendable_msat, unspendable_msat) = self.get_balance_msat(); - let my_balance = CoinBalance { - spendable: big_decimal_from_sat_unsigned(spendable_msat, decimals), - unspendable: big_decimal_from_sat_unsigned(unspendable_msat, decimals), + let fut = async move { + let (spendable_msat, unspendable_msat) = coin.get_balance_msat().await; + Ok(CoinBalance { + spendable: big_decimal_from_sat_unsigned(spendable_msat, decimals), + unspendable: big_decimal_from_sat_unsigned(unspendable_msat, decimals), + }) }; - Box::new(futures01::future::ok(my_balance)) + Box::new(fut.boxed().compat()) } fn base_coin_balance(&self) -> BalanceFut { @@ -1273,12 +1292,12 @@ pub async fn send_payment(ctx: MmArc, req: SendPaymentReq) -> SendPaymentResult< )); } let payment_info = match req.payment { - Payment::Invoice { invoice } => ln_coin.pay_invoice(invoice.into())?, + Payment::Invoice { invoice } => ln_coin.pay_invoice(invoice.into()).await?, Payment::Keysend { destination, amount_in_msat, expiry, - } => ln_coin.keysend(destination.into(), amount_in_msat, expiry)?, + } => ln_coin.keysend(destination.into(), amount_in_msat, expiry).await?, }; ln_coin.db.add_or_update_payment_in_db(payment_info.clone()).await?; Ok(SendPaymentResponse { @@ -1473,16 +1492,23 @@ pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResu MmCoinEnum::LightningCoin(c) => c, _ => return MmError::err(CloseChannelError::UnsupportedCoin(coin.ticker().to_string())), }; + let channel_id = req.channel_id.0; if req.force_close { - ln_coin - .channel_manager - .force_close_channel(&req.channel_id.0) - .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e)))?; + async_blocking(move || { + ln_coin + .channel_manager + .force_close_channel(&channel_id) + .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e))) + }) + .await?; } else { - ln_coin - .channel_manager - .close_channel(&req.channel_id.0) - .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e)))?; + async_blocking(move || { + ln_coin + .channel_manager + .close_channel(&channel_id) + .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e))) + }) + .await?; } Ok(format!("Initiated closing of channel: {:?}", req.channel_id)) diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index 59af6a94ee..45e9ed8c9f 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -9,6 +9,7 @@ use common::executor::{spawn, Timer}; use common::log::{error, info}; use common::{now_ms, spawn_abortable, AbortOnDropHandle}; use core::time::Duration; +use futures::compat::Future01CompatExt; use lightning::chain::chaininterface::{ConfirmationTarget, FeeEstimator}; use lightning::chain::keysinterface::SpendableOutputDescriptor; use lightning::util::events::{Event, EventHandler, PaymentPurpose}; @@ -62,7 +63,7 @@ impl EventHandler for LightningEventHandler { Event::PendingHTLCsForwardable { time_forwardable } => self.handle_pending_htlcs_forwards(*time_forwardable), - Event::SpendableOutputs { outputs } => self.handle_spendable_outputs(outputs), + Event::SpendableOutputs { outputs } => self.handle_spendable_outputs(outputs.clone()), // Todo: an RPC for total amount earned Event::PaymentForwarded { fee_earned_msat, claim_from_onchain_tx } => info!( @@ -430,85 +431,92 @@ impl LightningEventHandler { }); } - fn handle_spendable_outputs(&self, outputs: &[SpendableOutputDescriptor]) { + fn handle_spendable_outputs(&self, outputs: Vec) { info!("Handling SpendableOutputs event!"); - let platform_coin = &self.platform.coin; // Todo: add support for Hardware wallets for funding transactions and spending spendable outputs (channel closing transactions) - let my_address = match platform_coin.as_ref().derivation_method.iguana_or_err() { - Ok(addr) => addr, + let my_address = match self.platform.coin.as_ref().derivation_method.iguana_or_err() { + Ok(addr) => addr.clone(), Err(e) => { error!("{}", e); return; }, }; - let change_destination_script = Builder::build_witness_script(&my_address.hash).to_bytes().take().into(); - let feerate_sat_per_1000_weight = self.platform.get_est_sat_per_1000_weight(ConfirmationTarget::Normal); - let output_descriptors = &outputs.iter().collect::>(); - let claiming_tx = match self.keys_manager.spend_spendable_outputs( - output_descriptors, - Vec::new(), - change_destination_script, - feerate_sat_per_1000_weight, - &Secp256k1::new(), - ) { - Ok(tx) => tx, - Err(_) => { - error!("Error spending spendable outputs"); - return; - }, - }; - let claiming_txid = claiming_tx.txid(); - let tx_hex = serialize_hex(&claiming_tx); - if let Err(e) = tokio::task::block_in_place(move || self.platform.coin.send_raw_tx(&tx_hex).wait()) { - // TODO: broadcast transaction through p2p network in this case - error!( - "Broadcasting of the claiming transaction {} failed: {}", - claiming_txid, e - ); - return; - } - - let claiming_tx_inputs_value = outputs.iter().fold(0, |sum, output| match output { - SpendableOutputDescriptor::StaticOutput { output, .. } => sum + output.value, - SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => sum + descriptor.output.value, - SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => sum + descriptor.output.value, - }); - let claiming_tx_outputs_value = claiming_tx.output.iter().fold(0, |sum, txout| sum + txout.value); - if claiming_tx_inputs_value < claiming_tx_outputs_value { - error!( - "Claiming transaction input value {} can't be less than outputs value {}!", - claiming_tx_inputs_value, claiming_tx_outputs_value - ); - return; - } - let claiming_tx_fee = claiming_tx_inputs_value - claiming_tx_outputs_value; - let claiming_tx_fee_per_channel = (claiming_tx_fee as f64) / (outputs.len() as f64); + let platform = self.platform.clone(); + let db = self.db.clone(); + let keys_manager = self.keys_manager.clone(); - for output in outputs { - let (closing_txid, claimed_balance) = match output { - SpendableOutputDescriptor::StaticOutput { outpoint, output } => { - (outpoint.txid.to_string(), output.value) - }, - SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => { - (descriptor.outpoint.txid.to_string(), descriptor.output.value) - }, - SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => { - (descriptor.outpoint.txid.to_string(), descriptor.output.value) + let abort_handler = spawn_abortable(async move { + let change_destination_script = Builder::build_witness_script(&my_address.hash).to_bytes().take().into(); + let feerate_sat_per_1000_weight = platform.get_est_sat_per_1000_weight(ConfirmationTarget::Normal); + let output_descriptors = outputs.iter().collect::>(); + let claiming_tx = match keys_manager.spend_spendable_outputs( + &output_descriptors, + Vec::new(), + change_destination_script, + feerate_sat_per_1000_weight, + &Secp256k1::new(), + ) { + Ok(tx) => tx, + Err(_) => { + error!("Error spending spendable outputs"); + return; }, }; - let db = self.db.clone(); - - // This doesn't need to be respawned on restart unlike add_closing_tx_to_db since Event::SpendableOutputs will be re-fired on restart - // if the spending_tx is not broadcasted. - let abort_handler = spawn_abortable(add_claiming_tx_to_db_loop( - db, - closing_txid, - claiming_txid.to_string(), - (claimed_balance as f64) - claiming_tx_fee_per_channel, - )); - self.abort_handlers.lock().push(abort_handler); - } + + let claiming_txid = claiming_tx.txid(); + let tx_hex = serialize_hex(&claiming_tx); + + if let Err(e) = platform.coin.send_raw_tx(&tx_hex).compat().await { + // TODO: broadcast transaction through p2p network in this case, we have to check that the transactions is confirmed on-chain after this. + error!( + "Broadcasting of the claiming transaction {} failed: {}", + claiming_txid, e + ); + return; + } + + let claiming_tx_inputs_value = outputs.iter().fold(0, |sum, output| match output { + SpendableOutputDescriptor::StaticOutput { output, .. } => sum + output.value, + SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => sum + descriptor.output.value, + SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => sum + descriptor.output.value, + }); + let claiming_tx_outputs_value = claiming_tx.output.iter().fold(0, |sum, txout| sum + txout.value); + if claiming_tx_inputs_value < claiming_tx_outputs_value { + error!( + "Claiming transaction input value {} can't be less than outputs value {}!", + claiming_tx_inputs_value, claiming_tx_outputs_value + ); + return; + } + let claiming_tx_fee = claiming_tx_inputs_value - claiming_tx_outputs_value; + let claiming_tx_fee_per_channel = (claiming_tx_fee as f64) / (outputs.len() as f64); + + for output in outputs { + let (closing_txid, claimed_balance) = match output { + SpendableOutputDescriptor::StaticOutput { outpoint, output } => { + (outpoint.txid.to_string(), output.value) + }, + SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => { + (descriptor.outpoint.txid.to_string(), descriptor.output.value) + }, + SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => { + (descriptor.outpoint.txid.to_string(), descriptor.output.value) + }, + }; + + // This doesn't need to be respawned on restart unlike add_closing_tx_to_db since Event::SpendableOutputs will be re-fired on restart + // if the spending_tx is not broadcasted. + add_claiming_tx_to_db_loop( + db.clone(), + closing_txid, + claiming_txid.to_string(), + (claimed_balance as f64) - claiming_tx_fee_per_channel, + ) + .await; + } + }); + self.abort_handlers.lock().push(abort_handler); } fn handle_open_channel_request( diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index 7182f51eb3..4a228e1e85 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -35,7 +35,7 @@ use {std::ffi::OsStr, std::os::windows::ffi::OsStrExt}; pub struct LightningFilesystemPersister { main_path: PathBuf, backup_path: Option, - channels_persister: FilesystemPersister, + channels_persister: Arc, } impl LightningFilesystemPersister { @@ -46,7 +46,7 @@ impl LightningFilesystemPersister { Self { main_path: main_path.clone(), backup_path, - channels_persister: FilesystemPersister::new(main_path.display().to_string()), + channels_persister: Arc::new(FilesystemPersister::new(main_path.display().to_string())), } } @@ -60,7 +60,7 @@ impl LightningFilesystemPersister { /// Get the channels_persister which was initialized when this persister was initialized. #[inline] - pub fn channels_persister(&self) -> &FilesystemPersister { &self.channels_persister } + pub fn channels_persister(&self) -> Arc { self.channels_persister.clone() } pub fn monitor_backup_path(&self) -> Option { if let Some(mut backup_path) = self.backup_path() { diff --git a/mm2src/coins/lightning/ln_utils.rs b/mm2src/coins/lightning/ln_utils.rs index 30b3e3c2de..d815587cb3 100644 --- a/mm2src/coins/lightning/ln_utils.rs +++ b/mm2src/coins/lightning/ln_utils.rs @@ -12,7 +12,6 @@ use common::log; use common::log::LogState; use lightning::chain::keysinterface::{InMemorySigner, KeysManager}; use lightning::chain::{chainmonitor, BestBlock, Watch}; -use lightning::ln::channelmanager; use lightning::ln::channelmanager::{ChainParameters, ChannelManagerReadArgs, SimpleArcChannelManager}; use lightning::util::config::UserConfig; use lightning::util::ser::ReadableArgs; @@ -120,13 +119,19 @@ pub async fn init_channel_manager( )); // Read ChannelMonitor state from disk, important for lightning node is restarting and has at least 1 channel - let mut channelmonitors = persister - .channels_persister() - .read_channelmonitors(keys_manager.clone()) - .map_to_mm(|e| EnableLightningError::IOError(e.to_string()))?; + let channels_persister = persister.channels_persister(); + let channels_keys_manager = keys_manager.clone(); + let mut channelmonitors = async_blocking(move || { + channels_persister + .read_channelmonitors(channels_keys_manager) + .map_to_mm(|e| EnableLightningError::IOError(e.to_string())) + }) + .await?; // This is used for Electrum only to prepare for chain synchronization for (_, chan_mon) in channelmonitors.iter() { + // Although there is a mutex lock inside the load_outputs_to_watch fn + // it shouldn't be held by anything yet, so async_blocking is not needed. chan_mon.load_outputs_to_watch(&platform); } @@ -143,63 +148,73 @@ pub async fn init_channel_manager( platform.update_best_block_height(best_header.block_height()); let best_block = RpcBestBlock::from(best_header.clone()); let best_block_hash = BlockHash::from_hash(sha256d::Hash::from_inner(best_block.hash.0)); - let (channel_manager_blockhash, channel_manager) = { - if let Ok(mut f) = File::open(persister.manager_path()) { + + // Todo: Simplify this + let channel_manager = if persister.manager_path().exists() { + let chain_monitor_for_args = chain_monitor.clone(); + + let (channel_manager_blockhash, channel_manager, channelmonitors) = async_blocking(move || { + let mut manager_file = match File::open(persister.manager_path()) { + Ok(f) => f, + Err(e) => return Err(e.into()), + }; + let mut channel_monitor_mut_references = Vec::new(); for (_, channel_monitor) in channelmonitors.iter_mut() { channel_monitor_mut_references.push(channel_monitor); } + // Read ChannelManager data from the file let read_args = ChannelManagerReadArgs::new( keys_manager.clone(), fee_estimator.clone(), - chain_monitor.clone(), + chain_monitor_for_args, broadcaster.clone(), logger.clone(), user_config, channel_monitor_mut_references, ); - <(BlockHash, ChannelManager)>::read(&mut f, read_args) - .map_to_mm(|e| EnableLightningError::IOError(e.to_string()))? - } else { - // Initialize the ChannelManager to starting a new node without history - let chain_params = ChainParameters { - network: platform.network.clone().into(), - best_block: BestBlock::new(best_block_hash, best_block.height as u32), - }; - let new_channel_manager = channelmanager::ChannelManager::new( - fee_estimator.clone(), - chain_monitor.clone(), - broadcaster.clone(), - logger.clone(), - keys_manager.clone(), - user_config, - chain_params, - ); - (best_block_hash, new_channel_manager) - } - }; - - let channel_manager: Arc = Arc::new(channel_manager); + <(BlockHash, Arc)>::read(&mut manager_file, read_args) + .map(|(h, c)| (h, c, channelmonitors)) + .map_to_mm(|e| EnableLightningError::IOError(e.to_string())) + }) + .await?; - // Sync ChannelMonitors and ChannelManager to chain tip if the node is restarting and has open channels - platform - .process_txs_confirmations(&rpc_client, &db, &chain_monitor, &channel_manager) - .await; - if channel_manager_blockhash != best_block_hash { + // Sync ChannelMonitors and ChannelManager to chain tip if the node is restarting and has open channels platform - .process_txs_unconfirmations(&chain_monitor, &channel_manager) + .process_txs_confirmations(&rpc_client, &db, &chain_monitor, &channel_manager) .await; - update_best_block(&chain_monitor, &channel_manager, best_header).await; - } + if channel_manager_blockhash != best_block_hash { + platform + .process_txs_unconfirmations(&chain_monitor, &channel_manager) + .await; + update_best_block(&chain_monitor, &channel_manager, best_header).await; + } - // Give ChannelMonitors to ChainMonitor - for (_, channel_monitor) in channelmonitors.drain(..) { - let funding_outpoint = channel_monitor.get_funding_txo().0; - chain_monitor - .watch_channel(funding_outpoint, channel_monitor) - .map_to_mm(|e| EnableLightningError::IOError(format!("{:?}", e)))?; - } + // Give ChannelMonitors to ChainMonitor + for (_, channel_monitor) in channelmonitors.into_iter() { + let funding_outpoint = channel_monitor.get_funding_txo().0; + chain_monitor + .watch_channel(funding_outpoint, channel_monitor) + .map_to_mm(|e| EnableLightningError::IOError(format!("{:?}", e)))?; + } + channel_manager + } else { + // Initialize the ChannelManager to starting a new node without history + let chain_params = ChainParameters { + network: platform.network.clone().into(), + best_block: BestBlock::new(best_block_hash, best_block.height as u32), + }; + Arc::new(ChannelManager::new( + fee_estimator.clone(), + chain_monitor.clone(), + broadcaster.clone(), + logger.clone(), + keys_manager.clone(), + user_config, + chain_params, + )) + }; // Update best block whenever there's a new chain tip or a block has been newly disconnected spawn(ln_best_block_update_loop( From b74f4de1d0547dfd7d0eefbbfe2fa0667c62f4a8 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 29 Jul 2022 17:23:31 +0200 Subject: [PATCH 02/33] use async_blocking where it's needed completed --- mm2src/coins/lightning.rs | 36 +++++++----- mm2src/coins/lightning/ln_p2p.rs | 11 +++- mm2src/coins/lightning/ln_platform.rs | 83 +++++++++++++++++---------- mm2src/coins/lightning/ln_utils.rs | 24 +++++--- mm2src/coins/utxo/spv.rs | 1 + 5 files changed, 98 insertions(+), 57 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 33c7b279eb..f449012eed 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -1169,8 +1169,8 @@ pub async fn get_channel_details( _ => return MmError::err(GetChannelDetailsError::UnsupportedCoin(coin.ticker().to_string())), }; let channel_details = match ln_coin - .channel_manager .list_channels() + .await .into_iter() .find(|chan| chan.user_channel_id == req.rpc_channel_id) { @@ -1219,14 +1219,17 @@ pub async fn generate_invoice( node_pubkey )); } + let network = ln_coin.platform.network.clone().into(); - let invoice = create_invoice_from_channelmanager( - &ln_coin.channel_manager, - ln_coin.keys_manager, - network, - req.amount_in_msat, - req.description.clone(), - )?; + let channel_manager = ln_coin.channel_manager.clone(); + let keys_manager = ln_coin.keys_manager.clone(); + let amount_in_msat = req.amount_in_msat; + let description = req.description.clone(); + let invoice = async_blocking(move || { + create_invoice_from_channelmanager(&channel_manager, keys_manager, network, amount_in_msat, description) + }) + .await?; + let payment_hash = invoice.payment_hash().into_inner(); let payment_info = DBPaymentInfo { payment_hash: PaymentHash(payment_hash), @@ -1615,14 +1618,17 @@ pub async fn get_claimable_balances( let ignored_channels = if req.include_open_channels_balances { Vec::new() } else { - ln_coin.channel_manager.list_channels() + ln_coin.list_channels().await }; - let claimable_balances = ln_coin - .chain_monitor - .get_claimable_balances(&ignored_channels.iter().collect::>()[..]) - .into_iter() - .map(From::from) - .collect(); + let claimable_balances = async_blocking(move || { + ln_coin + .chain_monitor + .get_claimable_balances(&ignored_channels.iter().collect::>()[..]) + .into_iter() + .map(From::from) + .collect() + }) + .await; Ok(claimable_balances) } diff --git a/mm2src/coins/lightning/ln_p2p.rs b/mm2src/coins/lightning/ln_p2p.rs index cb4ab8c14a..e3f6108d09 100644 --- a/mm2src/coins/lightning/ln_p2p.rs +++ b/mm2src/coins/lightning/ln_p2p.rs @@ -34,7 +34,9 @@ pub async fn connect_to_node( node_addr: SocketAddr, peer_manager: Arc, ) -> ConnectToNodeResult { - if peer_manager.get_peer_node_ids().contains(&pubkey) { + let peer_manager_ref = peer_manager.clone(); + let peer_node_ids = async_blocking(move || peer_manager_ref.get_peer_node_ids()).await; + if peer_node_ids.contains(&pubkey) { return Ok(ConnectToNodeRes::AlreadyConnected { pubkey, node_addr }); } @@ -61,7 +63,9 @@ pub async fn connect_to_node( std::task::Poll::Pending => {}, } - if peer_manager.get_peer_node_ids().contains(&pubkey) { + let peer_manager = peer_manager.clone(); + let peer_node_ids = async_blocking(move || peer_manager.get_peer_node_ids()).await; + if peer_node_ids.contains(&pubkey) { break; } @@ -132,7 +136,8 @@ pub async fn ln_node_announcement_loop( continue; }, }; - channel_manager.broadcast_node_announcement(node_color, node_name, addresses); + let channel_manager = channel_manager.clone(); + async_blocking(move || channel_manager.broadcast_node_announcement(node_color, node_name, addresses)).await; Timer::sleep(BROADCAST_NODE_ANNOUNCEMENT_INTERVAL as f64).await; } diff --git a/mm2src/coins/lightning/ln_platform.rs b/mm2src/coins/lightning/ln_platform.rs index 38535624a3..33fa72749d 100644 --- a/mm2src/coins/lightning/ln_platform.rs +++ b/mm2src/coins/lightning/ln_platform.rs @@ -43,8 +43,8 @@ pub async fn get_best_header(best_header_listener: &ElectrumClient) -> EnableLig } pub async fn update_best_block( - chain_monitor: &ChainMonitor, - channel_manager: &ChannelManager, + chain_monitor: Arc, + channel_manager: Arc, best_header: ElectrumBlockHeader, ) { { @@ -81,8 +81,8 @@ pub async fn update_best_block( (block_header, h.height as u32) }, }; - channel_manager.best_block_updated(&new_best_header, new_best_height); - chain_monitor.best_block_updated(&new_best_header, new_best_height); + async_blocking(move || channel_manager.best_block_updated(&new_best_header, new_best_height)).await; + async_blocking(move || chain_monitor.best_block_updated(&new_best_header, new_best_height)).await; } } @@ -100,16 +100,21 @@ pub async fn ln_best_block_update_loop( // in case a transaction confirmation fails due to electrums being down. This way there will be no need to wait for a new // block to confirm such transaction and causing delays. platform - .process_txs_confirmations(&best_header_listener, &db, &chain_monitor, &channel_manager) + .process_txs_confirmations( + &best_header_listener, + &db, + Arc::clone(&chain_monitor), + Arc::clone(&channel_manager), + ) .await; let best_header = ok_or_continue_after_sleep!(get_best_header(&best_header_listener).await, TRY_LOOP_INTERVAL); if current_best_block != best_header.clone().into() { platform.update_best_block_height(best_header.block_height()); platform - .process_txs_unconfirmations(&chain_monitor, &channel_manager) + .process_txs_unconfirmations(Arc::clone(&chain_monitor), Arc::clone(&channel_manager)) .await; current_best_block = best_header.clone().into(); - update_best_block(&chain_monitor, &channel_manager, best_header).await; + update_best_block(Arc::clone(&chain_monitor), Arc::clone(&channel_manager), best_header).await; } Timer::sleep(CHECK_FOR_NEW_BEST_BLOCK_INTERVAL).await; } @@ -184,9 +189,9 @@ impl Platform { registered_outputs.push(output); } - async fn process_tx_for_unconfirmation(&self, txid: Txid, monitor: &T) + async fn process_tx_for_unconfirmation(&self, txid: Txid, monitor: Arc) where - T: Confirm, + T: Confirm + Send + Sync + 'static, { let rpc_txid = h256_json_from_txid(txid); match self.rpc_client().get_tx_if_onchain(&rpc_txid).await { @@ -196,7 +201,8 @@ impl Platform { "Transaction {} is not found on chain. The transaction will be re-broadcasted.", txid, ); - monitor.transaction_unconfirmed(&txid); + let monitor = monitor.clone(); + async_blocking(move || monitor.transaction_unconfirmed(&txid)).await; // If a transaction is unconfirmed due to a block reorganization; LDK will rebroadcast it. // In this case, this transaction needs to be added again to the registered transactions // to start watching for it on the chain again. @@ -209,17 +215,23 @@ impl Platform { } } - pub async fn process_txs_unconfirmations(&self, chain_monitor: &ChainMonitor, channel_manager: &ChannelManager) { + pub async fn process_txs_unconfirmations( + &self, + chain_monitor: Arc, + channel_manager: Arc, + ) { // Retrieve channel manager transaction IDs to check the chain for un-confirmations let channel_manager_relevant_txids = channel_manager.get_relevant_txids(); for txid in channel_manager_relevant_txids { - self.process_tx_for_unconfirmation(txid, channel_manager).await; + self.process_tx_for_unconfirmation(txid, Arc::clone(&channel_manager)) + .await; } // Retrieve chain monitor transaction IDs to check the chain for un-confirmations let chain_monitor_relevant_txids = chain_monitor.get_relevant_txids(); for txid in chain_monitor_relevant_txids { - self.process_tx_for_unconfirmation(txid, chain_monitor).await; + self.process_tx_for_unconfirmation(txid, Arc::clone(&chain_monitor)) + .await; } } @@ -346,8 +358,8 @@ impl Platform { &self, client: &ElectrumClient, db: &SqliteLightningDB, - chain_monitor: &ChainMonitor, - channel_manager: &ChannelManager, + chain_monitor: Arc, + channel_manager: Arc, ) { let mut transactions_to_confirm = self.get_confirmed_registered_txs(client).await; self.append_spent_registered_output_txs(&mut transactions_to_confirm, client) @@ -366,22 +378,31 @@ impl Platform { { error!("Unable to update the funding tx block height in DB: {}", e); } - channel_manager.transactions_confirmed( - &confirmed_transaction_info.header.clone().into(), - &[( - confirmed_transaction_info.index as usize, - &confirmed_transaction_info.tx.clone().into(), - )], - confirmed_transaction_info.height as u32, - ); - chain_monitor.transactions_confirmed( - &confirmed_transaction_info.header.into(), - &[( - confirmed_transaction_info.index as usize, - &confirmed_transaction_info.tx.into(), - )], - confirmed_transaction_info.height as u32, - ); + let channel_manager = channel_manager.clone(); + let confirmed_transaction_info_cloned = confirmed_transaction_info.clone(); + async_blocking(move || { + channel_manager.transactions_confirmed( + &confirmed_transaction_info_cloned.header.clone().into(), + &[( + confirmed_transaction_info_cloned.index as usize, + &confirmed_transaction_info_cloned.tx.clone().into(), + )], + confirmed_transaction_info_cloned.height as u32, + ) + }) + .await; + let chain_monitor = chain_monitor.clone(); + async_blocking(move || { + chain_monitor.transactions_confirmed( + &confirmed_transaction_info.header.into(), + &[( + confirmed_transaction_info.index as usize, + &confirmed_transaction_info.tx.into(), + )], + confirmed_transaction_info.height as u32, + ) + }) + .await; } } diff --git a/mm2src/coins/lightning/ln_utils.rs b/mm2src/coins/lightning/ln_utils.rs index d815587cb3..fb292a6797 100644 --- a/mm2src/coins/lightning/ln_utils.rs +++ b/mm2src/coins/lightning/ln_utils.rs @@ -149,7 +149,6 @@ pub async fn init_channel_manager( let best_block = RpcBestBlock::from(best_header.clone()); let best_block_hash = BlockHash::from_hash(sha256d::Hash::from_inner(best_block.hash.0)); - // Todo: Simplify this let channel_manager = if persister.manager_path().exists() { let chain_monitor_for_args = chain_monitor.clone(); @@ -182,21 +181,30 @@ pub async fn init_channel_manager( // Sync ChannelMonitors and ChannelManager to chain tip if the node is restarting and has open channels platform - .process_txs_confirmations(&rpc_client, &db, &chain_monitor, &channel_manager) + .process_txs_confirmations( + &rpc_client, + &db, + Arc::clone(&chain_monitor), + Arc::clone(&channel_manager), + ) .await; if channel_manager_blockhash != best_block_hash { platform - .process_txs_unconfirmations(&chain_monitor, &channel_manager) + .process_txs_unconfirmations(Arc::clone(&chain_monitor), Arc::clone(&channel_manager)) .await; - update_best_block(&chain_monitor, &channel_manager, best_header).await; + update_best_block(Arc::clone(&chain_monitor), Arc::clone(&channel_manager), best_header).await; } // Give ChannelMonitors to ChainMonitor for (_, channel_monitor) in channelmonitors.into_iter() { let funding_outpoint = channel_monitor.get_funding_txo().0; - chain_monitor - .watch_channel(funding_outpoint, channel_monitor) - .map_to_mm(|e| EnableLightningError::IOError(format!("{:?}", e)))?; + let chain_monitor = chain_monitor.clone(); + async_blocking(move || { + chain_monitor + .watch_channel(funding_outpoint, channel_monitor) + .map_to_mm(|e| EnableLightningError::IOError(format!("{:?}", e))) + }) + .await?; } channel_manager } else { @@ -245,7 +253,7 @@ pub async fn get_open_channels_nodes_addresses( persister: LightningPersisterShared, channel_manager: Arc, ) -> EnableLightningResult { - let channels = channel_manager.list_channels(); + let channels = async_blocking(move || channel_manager.list_channels()).await; let mut nodes_addresses = persister.get_nodes_addresses().await?; nodes_addresses.retain(|pubkey, _node_addr| { channels diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index 6d85d0afbe..caae118106 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -10,6 +10,7 @@ use serialization::serialize_list; use spv_validation::helpers_validation::SPVError; use spv_validation::spv_proof::{SPVProof, TRY_SPV_PROOF_INTERVAL}; +#[derive(Clone)] pub struct ConfirmedTransactionInfo { pub tx: UtxoTx, pub header: BlockHeader, From a51e1df341af45e8c08c49885992cc6beebb2fcf Mon Sep 17 00:00:00 2001 From: shamardy Date: Mon, 1 Aug 2022 22:17:21 +0200 Subject: [PATCH 03/33] get default fee from rpc when starting lightning + update when a new fee is received --- mm2src/coins/lightning.rs | 5 +- mm2src/coins/lightning/ln_conf.rs | 16 ++-- mm2src/coins/lightning/ln_errors.rs | 4 + mm2src/coins/lightning/ln_platform.rs | 87 ++++++++++++++++--- mm2src/coins/lp_coins.rs | 4 +- .../src/lightning_activation.rs | 4 +- .../mm2_main/src/mm2_tests/lightning_tests.rs | 51 +++-------- 7 files changed, 102 insertions(+), 69 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index f449012eed..06834a66d1 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -45,7 +45,7 @@ use lightning_background_processor::BackgroundProcessor; use lightning_invoice::payment; use lightning_invoice::utils::{create_invoice_from_channelmanager, DefaultRouter}; use lightning_invoice::{Invoice, InvoiceDescription}; -use ln_conf::{ChannelOptions, LightningCoinConf, LightningProtocolConf, PlatformCoinConfirmations}; +use ln_conf::{ChannelOptions, LightningCoinConf, LightningProtocolConf, PlatformCoinConfirmationTargets}; use ln_db::{ClosedChannelsFilter, DBChannelDetails, DBPaymentInfo, DBPaymentsFilter, HTLCStatus, LightningDB, PaymentType}; use ln_errors::{ClaimableBalancesError, ClaimableBalancesResult, CloseChannelError, CloseChannelResult, @@ -641,8 +641,9 @@ pub async fn start_lightning( let platform = Arc::new(Platform::new( platform_coin.clone(), protocol_conf.network.clone(), - protocol_conf.confirmations, + protocol_conf.confirmation_targets, )); + platform.set_default_fees().await?; // Initialize the Logger let logger = ctx.log.0.clone(); diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index b40234d32d..9c1a1adec0 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -2,23 +2,17 @@ use crate::utxo::BlockchainNetwork; use lightning::util::config::{ChannelConfig, ChannelHandshakeConfig, ChannelHandshakeLimits, UserConfig}; #[derive(Clone, Debug, Deserialize, Serialize)] -pub struct DefaultFeesAndConfirmations { - pub default_fee_per_kb: u64, - pub n_blocks: u32, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct PlatformCoinConfirmations { - pub background: DefaultFeesAndConfirmations, - pub normal: DefaultFeesAndConfirmations, - pub high_priority: DefaultFeesAndConfirmations, +pub struct PlatformCoinConfirmationTargets { + pub background: u32, + pub normal: u32, + pub high_priority: u32, } #[derive(Debug)] pub struct LightningProtocolConf { pub platform_coin_ticker: String, pub network: BlockchainNetwork, - pub confirmations: PlatformCoinConfirmations, + pub confirmation_targets: PlatformCoinConfirmationTargets, } #[derive(Clone, Debug, Deserialize, PartialEq)] diff --git a/mm2src/coins/lightning/ln_errors.rs b/mm2src/coins/lightning/ln_errors.rs index e70f555d8c..fe3bb46237 100644 --- a/mm2src/coins/lightning/ln_errors.rs +++ b/mm2src/coins/lightning/ln_errors.rs @@ -72,6 +72,10 @@ impl From for EnableLightningError { fn from(err: SqlError) -> EnableLightningError { EnableLightningError::DbError(err.to_string()) } } +impl From for EnableLightningError { + fn from(e: UtxoRpcError) -> Self { EnableLightningError::RpcError(e.to_string()) } +} + #[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] #[serde(tag = "error_type", content = "error_data")] pub enum ConnectToNodeError { diff --git a/mm2src/coins/lightning/ln_platform.rs b/mm2src/coins/lightning/ln_platform.rs index 33fa72749d..84e6cb53d0 100644 --- a/mm2src/coins/lightning/ln_platform.rs +++ b/mm2src/coins/lightning/ln_platform.rs @@ -1,7 +1,7 @@ use super::*; use crate::lightning::ln_errors::{SaveChannelClosingError, SaveChannelClosingResult}; use crate::utxo::rpc_clients::{BestBlock as RpcBestBlock, BlockHashOrHeight, ElectrumBlockHeader, ElectrumClient, - ElectrumNonce, EstimateFeeMethod, UtxoRpcClientEnum}; + ElectrumNonce, EstimateFeeMethod, UtxoRpcClientEnum, UtxoRpcResult}; use crate::utxo::spv::{ConfirmedTransactionInfo, SimplePaymentVerification}; use crate::utxo::utxo_standard::UtxoStandardCoin; use crate::{MarketCoinOps, MmCoin}; @@ -22,12 +22,20 @@ use rpc::v1::types::{Bytes as BytesJson, H256 as H256Json}; use spv_validation::spv_proof::TRY_SPV_PROOF_INTERVAL; use std::cmp; use std::convert::{TryFrom, TryInto}; -use std::sync::atomic::{AtomicU64, Ordering as AtomicOrdering}; +use std::sync::atomic::{AtomicU64, Ordering as AtomicOrdering, Ordering}; const CHECK_FOR_NEW_BEST_BLOCK_INTERVAL: f64 = 60.; const MIN_ALLOWED_FEE_PER_1000_WEIGHT: u32 = 253; const TRY_LOOP_INTERVAL: f64 = 60.; +static DEFAULT_BACKGROUND_FEES_PER_VB: AtomicU64 = AtomicU64::new(1012); +static DEFAULT_NORMAL_FEES_PER_VB: AtomicU64 = AtomicU64::new(8000); +static DEFAULT_HIGH_PRIORITY_FEES_PER_VB: AtomicU64 = AtomicU64::new(20000); + +fn set_default_background_fees(fee: u64) { DEFAULT_BACKGROUND_FEES_PER_VB.store(fee, Ordering::Relaxed); } +fn set_default_normal_fees(fee: u64) { DEFAULT_BACKGROUND_FEES_PER_VB.store(fee, Ordering::Relaxed); } +fn set_default_high_priority_fees(fee: u64) { DEFAULT_BACKGROUND_FEES_PER_VB.store(fee, Ordering::Relaxed); } + #[inline] pub fn h256_json_from_txid(txid: Txid) -> H256Json { H256Json::from(txid.as_hash().into_inner()).reversed() } @@ -139,9 +147,8 @@ pub struct Platform { pub network: BlockchainNetwork, /// The best block height. pub best_block_height: AtomicU64, - /// Default fees to and confirmation targets to be used for FeeEstimator. Default fees are used when the call for - /// estimate_fee_sat fails. - pub default_fees_and_confirmations: PlatformCoinConfirmations, + /// Number of blocks for every Confirmation target. This is used in the FeeEstimator. + pub confirmations_targets: PlatformCoinConfirmationTargets, /// This cache stores the transactions that the LN node has interest in. pub registered_txs: PaMutex>, /// This cache stores the outputs that the LN node has interest in. @@ -155,13 +162,13 @@ impl Platform { pub fn new( coin: UtxoStandardCoin, network: BlockchainNetwork, - default_fees_and_confirmations: PlatformCoinConfirmations, + default_fees_and_confirmations: PlatformCoinConfirmationTargets, ) -> Self { Platform { coin, network, best_block_height: AtomicU64::new(0), - default_fees_and_confirmations, + confirmations_targets: default_fees_and_confirmations, registered_txs: PaMutex::new(HashSet::new()), registered_outputs: PaMutex::new(Vec::new()), unsigned_funding_txs: PaMutex::new(HashMap::new()), @@ -171,6 +178,52 @@ impl Platform { #[inline] fn rpc_client(&self) -> &UtxoRpcClientEnum { &self.coin.as_ref().rpc_client } + pub async fn set_default_fees(&self) -> UtxoRpcResult<()> { + let platform_coin = &self.coin; + let conf = &platform_coin.as_ref().conf; + + let default_background_fees = self + .rpc_client() + .estimate_fee_sat( + platform_coin.decimals(), + // Todo: when implementing Native client detect_fee_method should be used for Native and EstimateFeeMethod::Standard for Electrum + &EstimateFeeMethod::Standard, + &conf.estimate_fee_mode, + self.confirmations_targets.background, + ) + .compat() + .await?; + set_default_background_fees(default_background_fees); + + let default_normal_fees = self + .rpc_client() + .estimate_fee_sat( + platform_coin.decimals(), + // Todo: when implementing Native client detect_fee_method should be used for Native and EstimateFeeMethod::Standard for Electrum + &EstimateFeeMethod::Standard, + &conf.estimate_fee_mode, + self.confirmations_targets.normal, + ) + .compat() + .await?; + set_default_normal_fees(default_normal_fees); + + let default_high_priority_fees = self + .rpc_client() + .estimate_fee_sat( + platform_coin.decimals(), + // Todo: when implementing Native client detect_fee_method should be used for Native and EstimateFeeMethod::Standard for Electrum + &EstimateFeeMethod::Standard, + &conf.estimate_fee_mode, + self.confirmations_targets.high_priority, + ) + .compat() + .await?; + set_default_high_priority_fees(default_high_priority_fees); + + Ok(()) + } + #[inline] pub fn update_best_block_height(&self, new_height: u64) { self.best_block_height.store(new_height, AtomicOrdering::Relaxed); @@ -444,16 +497,16 @@ impl FeeEstimator for Platform { let platform_coin = &self.coin; let default_fee = match confirmation_target { - ConfirmationTarget::Background => self.default_fees_and_confirmations.background.default_fee_per_kb, - ConfirmationTarget::Normal => self.default_fees_and_confirmations.normal.default_fee_per_kb, - ConfirmationTarget::HighPriority => self.default_fees_and_confirmations.high_priority.default_fee_per_kb, + ConfirmationTarget::Background => DEFAULT_BACKGROUND_FEES_PER_VB.load(Ordering::Relaxed), + ConfirmationTarget::Normal => DEFAULT_NORMAL_FEES_PER_VB.load(Ordering::Relaxed), + ConfirmationTarget::HighPriority => DEFAULT_HIGH_PRIORITY_FEES_PER_VB.load(Ordering::Relaxed), }; let conf = &platform_coin.as_ref().conf; let n_blocks = match confirmation_target { - ConfirmationTarget::Background => self.default_fees_and_confirmations.background.n_blocks, - ConfirmationTarget::Normal => self.default_fees_and_confirmations.normal.n_blocks, - ConfirmationTarget::HighPriority => self.default_fees_and_confirmations.high_priority.n_blocks, + ConfirmationTarget::Background => self.confirmations_targets.background, + ConfirmationTarget::Normal => self.confirmations_targets.normal, + ConfirmationTarget::HighPriority => self.confirmations_targets.high_priority, }; let fee_per_kb = tokio::task::block_in_place(move || { self.rpc_client() @@ -468,6 +521,14 @@ impl FeeEstimator for Platform { .wait() .unwrap_or(default_fee) }); + + // Set default fee to last known fee for the corresponding confirmation target + match confirmation_target { + ConfirmationTarget::Background => DEFAULT_BACKGROUND_FEES_PER_VB.store(fee_per_kb, Ordering::Relaxed), + ConfirmationTarget::Normal => DEFAULT_NORMAL_FEES_PER_VB.store(fee_per_kb, Ordering::Relaxed), + ConfirmationTarget::HighPriority => DEFAULT_HIGH_PRIORITY_FEES_PER_VB.store(fee_per_kb, Ordering::Relaxed), + }; + // Must be no smaller than 253 (ie 1 satoshi-per-byte rounded up to ensure later round-downs don’t put us below 1 satoshi-per-byte). // https://docs.rs/lightning/0.0.101/lightning/chain/chaininterface/trait.FeeEstimator.html#tymethod.get_est_sat_per_1000_weight cmp::max((fee_per_kb as f64 / 4.0).ceil() as u32, MIN_ALLOWED_FEE_PER_1000_WEIGHT) diff --git a/mm2src/coins/lp_coins.rs b/mm2src/coins/lp_coins.rs index 0f1e3e4085..6ff7425103 100644 --- a/mm2src/coins/lp_coins.rs +++ b/mm2src/coins/lp_coins.rs @@ -64,7 +64,7 @@ use utxo_signer::with_key_pair::UtxoSignWithKeyPairError; cfg_native! { use crate::lightning::LightningCoin; - use crate::lightning::ln_conf::PlatformCoinConfirmations; + use crate::lightning::ln_conf::PlatformCoinConfirmationTargets; use async_std::fs; use futures::AsyncWriteExt; use std::io; @@ -2067,7 +2067,7 @@ pub enum CoinProtocol { LIGHTNING { platform: String, network: BlockchainNetwork, - confirmations: PlatformCoinConfirmations, + confirmation_targets: PlatformCoinConfirmationTargets, }, #[cfg(not(target_arch = "wasm32"))] SOLANA, diff --git a/mm2src/coins_activation/src/lightning_activation.rs b/mm2src/coins_activation/src/lightning_activation.rs index 5572755d09..eadbb192d3 100644 --- a/mm2src/coins_activation/src/lightning_activation.rs +++ b/mm2src/coins_activation/src/lightning_activation.rs @@ -38,11 +38,11 @@ impl TryFromCoinProtocol for LightningProtocolConf { CoinProtocol::LIGHTNING { platform, network, - confirmations, + confirmation_targets, } => Ok(LightningProtocolConf { platform_coin_ticker: platform, network, - confirmations, + confirmation_targets, }), proto => MmError::err(proto), } diff --git a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs index a44a4fe5bf..63392401a7 100644 --- a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs +++ b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs @@ -47,19 +47,10 @@ fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { "protocol_data":{ "platform": "tBTC-TEST-segwit", "network": "testnet", - "confirmations": { - "background": { - "default_fee_per_kb": 1012, - "n_blocks": 12 - }, - "normal": { - "default_fee_per_kb": 8000, - "n_blocks": 6 - }, - "high_priority": { - "default_fee_per_kb": 20000, - "n_blocks": 1 - } + "confirmation_targets": { + "background": 12, + "normal": 6, + "high_priority": 1 } } } @@ -148,19 +139,10 @@ fn test_enable_lightning() { "protocol_data":{ "platform": "tBTC-TEST-segwit", "network": "testnet", - "confirmations": { - "background": { - "default_fee_per_kb": 1012, - "n_blocks": 12 - }, - "normal": { - "default_fee_per_kb": 8000, - "n_blocks": 6 - }, - "high_priority": { - "default_fee_per_kb": 20000, - "n_blocks": 1 - } + "confirmation_targets": { + "background": 12, + "normal": 6, + "high_priority": 1 } } } @@ -346,19 +328,10 @@ fn test_sign_verify_message_lightning() { "protocol_data":{ "platform": "tBTC-TEST-segwit", "network": "testnet", - "confirmations": { - "background": { - "default_fee_per_kb": 1012, - "n_blocks": 12 - }, - "normal": { - "default_fee_per_kb": 8000, - "n_blocks": 6 - }, - "high_priority": { - "default_fee_per_kb": 20000, - "n_blocks": 1 - } + "confirmation_targets": { + "background": 12, + "normal": 6, + "high_priority": 1 } } } From 14d37d706388fe6030aa26a15ecaebcc298bfaf9 Mon Sep 17 00:00:00 2001 From: shamardy Date: Wed, 3 Aug 2022 22:13:01 +0200 Subject: [PATCH 04/33] wip: upgrade rust-lightning to v0.0.108 --- Cargo.lock | 90 +++-- deny.toml | 2 + mm2src/coins/Cargo.toml | 14 +- mm2src/coins/lightning.rs | 71 ++-- mm2src/coins/lightning/ln_conf.rs | 13 + mm2src/coins/lightning/ln_db.rs | 2 +- mm2src/coins/lightning/ln_events.rs | 80 ++--- .../lightning/ln_filesystem_persister.rs | 310 ++++++++++-------- mm2src/coins/lightning/ln_p2p.rs | 10 +- mm2src/coins/lightning/ln_serialization.rs | 2 +- mm2src/coins/lightning/ln_sql.rs | 4 +- mm2src/coins/lightning/ln_storage.rs | 24 +- mm2src/coins/lightning/ln_utils.rs | 4 +- mm2src/common/Cargo.toml | 2 +- mm2src/hw_common/src/primitives.rs | 24 ++ mm2src/mm2_bitcoin/chain/Cargo.toml | 2 +- mm2src/mm2_bitcoin/chain/src/transaction.rs | 3 +- 17 files changed, 398 insertions(+), 259 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ca6200024e..aedc27addb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -491,7 +491,7 @@ dependencies = [ "hkd32", "hmac 0.11.0", "ripemd160", - "secp256k1", + "secp256k1 0.20.3", "sha2 0.9.9", "subtle 2.4.0", "zeroize", @@ -499,13 +499,13 @@ dependencies = [ [[package]] name = "bitcoin" -version = "0.27.1" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a41df6ad9642c5c15ae312dd3d074de38fd3eb7cc87ad4ce10f90292a83fe4d" +checksum = "05bba324e6baf655b882df672453dbbc527bc938cadd27750ae510aaccc3a66a" dependencies = [ "bech32", "bitcoin_hashes", - "secp256k1", + "secp256k1 0.22.1", ] [[package]] @@ -1057,6 +1057,7 @@ dependencies = [ "lightning-invoice", "lightning-net-tokio", "lightning-persister", + "lightning-rapid-gossip-sync", "metrics", "mm2_core", "mm2_db", @@ -1080,7 +1081,8 @@ dependencies = [ "rust-ini", "rustls 0.20.4", "script", - "secp256k1", + "secp256k1 0.20.3", + "secp256k1 0.22.1", "ser_error", "ser_error_derive", "serde", @@ -1472,7 +1474,7 @@ dependencies = [ "primitives", "rpc_task", "rustc-hex 2.1.0", - "secp256k1", + "secp256k1 0.20.3", "ser_error", "ser_error_derive", "serde", @@ -2141,7 +2143,7 @@ dependencies = [ "mem", "rand 0.6.5", "rustc-hex 1.0.0", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_derive", "tiny-keccak 1.4.4", @@ -2921,7 +2923,7 @@ dependencies = [ "js-sys", "mm2_err_handle", "rusb", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_derive", "wasm-bindgen", @@ -3239,7 +3241,7 @@ dependencies = [ "primitives", "rand 0.6.5", "rustc-hex 2.1.0", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_derive", ] @@ -3714,39 +3716,42 @@ dependencies = [ [[package]] name = "lightning" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d885bf509066af86ae85354c8959028ad6192c22a2657ef8271e94029d30f9d0" dependencies = [ "bitcoin", - "secp256k1", ] [[package]] name = "lightning-background-processor" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ba6fcb3cef50ae1027a89b40f847b771e831fad843673a350586e29b01b618b" dependencies = [ "bitcoin", "lightning", - "lightning-persister", + "lightning-rapid-gossip-sync", ] [[package]] name = "lightning-invoice" -version = "0.14.0" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaafc1cebaf9ea8d2a57e60aae9fe6095554b8305714f8452cd8a20a3aa5b7ba" dependencies = [ "bech32", "bitcoin_hashes", "lightning", "num-traits", - "secp256k1", + "secp256k1 0.22.1", ] [[package]] name = "lightning-net-tokio" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0170619152c4d6b947d5ed0de427b85691482a293e0cae52d4336a2220a776" dependencies = [ "bitcoin", "lightning", @@ -3755,8 +3760,9 @@ dependencies = [ [[package]] name = "lightning-persister" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e9f154ee5b60e576973da61379767569e2ad7b61a361d716a7d15f37df7e0bc" dependencies = [ "bitcoin", "libc", @@ -3764,6 +3770,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "lightning-rapid-gossip-sync" +version = "0.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08b9947e17c7b97bb267bf3fe6bd51493c1b03a104ab81b246ef3f3ac9077ed9" +dependencies = [ + "bitcoin", + "lightning", +] + [[package]] name = "linked-hash-map" version = "0.5.3" @@ -4067,7 +4083,7 @@ dependencies = [ "rand 0.7.3", "regex", "rmp-serde", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_bytes", "serde_json", @@ -4222,7 +4238,7 @@ dependencies = [ "rpc", "rpc_task", "script", - "secp256k1", + "secp256k1 0.20.3", "ser_error", "ser_error_derive", "serde", @@ -6031,14 +6047,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a" dependencies = [ "rand 0.6.5", - "secp256k1-sys", + "secp256k1-sys 0.4.2", +] + +[[package]] +name = "secp256k1" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26947345339603ae8395f68e2f3d85a6b0a8ddfe6315818e80b8504415099db0" +dependencies = [ + "secp256k1-sys 0.5.2", ] [[package]] name = "secp256k1-sys" -version = "0.4.0" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +dependencies = [ + "cc", +] + +[[package]] +name = "secp256k1-sys" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67e4b6455ee49f5901c8985b88f98fb0a0e1d90a6661f5a03f4888bd987dad29" +checksum = "152e20a0fd0519390fc43ab404663af8a0b794273d2a91d60ad4a39f13ffe110" dependencies = [ "cc", ] @@ -8796,7 +8830,7 @@ dependencies = [ "rand 0.7.3", "rand_core 0.5.1", "ripemd160", - "secp256k1", + "secp256k1 0.20.3", "sha2 0.9.9", "subtle 2.4.0", "zcash_note_encryption", diff --git a/deny.toml b/deny.toml index c27fd4a6de..28c94aab17 100644 --- a/deny.toml +++ b/deny.toml @@ -259,6 +259,8 @@ skip = [ { name = "rustls-pemfile", version = "*" }, { name = "scopeguard", version = "*" }, { name = "sct", version = "*" }, + { name = "secp256k1", version = "*" }, + { name = "secp256k1-sys", version = "*" }, { name = "semver", version = "*" }, { name = "send_wrapper", version = "*" }, { name = "sha2", version = "*" }, diff --git a/mm2src/coins/Cargo.toml b/mm2src/coins/Cargo.toml index 617df41d6f..77bb9863fb 100644 --- a/mm2src/coins/Cargo.toml +++ b/mm2src/coins/Cargo.toml @@ -17,7 +17,7 @@ async-trait = "0.1.52" base64 = "0.10.0" base58 = "0.2.0" bip32 = { version = "0.2.2", default-features = false, features = ["alloc", "secp256k1-ffi"] } -bitcoin = "0.27.1" +bitcoin = "0.28.1" bitcoin_hashes = "0.10.0" bitcrypto = { path = "../mm2_bitcoin/crypto" } bincode = "1.3.3" @@ -49,8 +49,9 @@ jsonrpc-core = "8.0.1" keys = { path = "../mm2_bitcoin/keys" } lazy_static = "1.4" libc = "0.2" -lightning = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } -lightning-invoice = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } +lightning = "0.0.108" +lightning-invoice = "0.16.0" +lightning-rapid-gossip-sync = "0.0.108" metrics = "0.12" mm2_core = { path = "../mm2_core" } mm2_err_handle = { path = "../mm2_err_handle" } @@ -70,6 +71,7 @@ rpc = { path = "../mm2_bitcoin/rpc" } rpc_task = { path = "../rpc_task" } script = { path = "../mm2_bitcoin/script" } secp256k1 = { version = "0.20" } +secp256k1v22 = { version = "0.22", package = "secp256k1" } ser_error = { path = "../derives/ser_error" } ser_error_derive = { path = "../derives/ser_error_derive" } serde = "1.0" @@ -98,9 +100,9 @@ web-sys = { version = "0.3.55", features = ["console", "Headers", "Request", "Re [target.'cfg(not(target_arch = "wasm32"))'.dependencies] dirs = { version = "1" } -lightning-background-processor = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } -lightning-persister = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } -lightning-net-tokio = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } +lightning-background-processor = "0.0.108" +lightning-persister = "0.0.108" +lightning-net-tokio = "0.0.108" rust-ini = { version = "0.13" } rustls = { version = "0.20", features = ["dangerous_configuration"] } tokio = { version = "1.7" } diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 06834a66d1..3c567e2110 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -12,6 +12,7 @@ mod ln_utils; use super::{lp_coinfind_or_err, DerivationMethod, MmCoinEnum}; use crate::lightning::ln_events::init_events_abort_handlers; +use crate::lightning::ln_serialization::PublicKeyForRPC; use crate::lightning::ln_sql::SqliteLightningDB; use crate::utxo::rpc_clients::UtxoRpcClientEnum; use crate::utxo::utxo_common::{big_decimal_from_sat_unsigned, UtxoTxBuilder}; @@ -39,12 +40,13 @@ use lightning::chain::keysinterface::{KeysInterface, KeysManager, Recipient}; use lightning::chain::Access; use lightning::ln::channelmanager::{ChannelDetails, MIN_FINAL_CLTV_EXPIRY}; use lightning::ln::{PaymentHash, PaymentPreimage}; -use lightning::routing::network_graph::{NetGraphMsgHandler, NetworkGraph}; +use lightning::routing::gossip::{NetworkGraph, P2PGossipSync}; use lightning::util::config::UserConfig; -use lightning_background_processor::BackgroundProcessor; +use lightning_background_processor::{BackgroundProcessor, GossipSync}; use lightning_invoice::payment; use lightning_invoice::utils::{create_invoice_from_channelmanager, DefaultRouter}; use lightning_invoice::{Invoice, InvoiceDescription}; +use lightning_rapid_gossip_sync::RapidGossipSync; use ln_conf::{ChannelOptions, LightningCoinConf, LightningProtocolConf, PlatformCoinConfirmationTargets}; use ln_db::{ClosedChannelsFilter, DBChannelDetails, DBPaymentInfo, DBPaymentsFilter, HTLCStatus, LightningDB, PaymentType}; @@ -58,7 +60,7 @@ use ln_events::LightningEventHandler; use ln_filesystem_persister::{LightningFilesystemPersister, LightningPersisterShared}; use ln_p2p::{connect_to_node, ConnectToNodeRes, PeerManager}; use ln_platform::{h256_json_from_txid, Platform}; -use ln_serialization::{InvoiceForRPC, NodeAddress, PublicKeyForRPC}; +use ln_serialization::{InvoiceForRPC, NodeAddress}; use ln_storage::{LightningStorage, NodesAddressesMapShared, Scorer}; use ln_utils::{ChainMonitor, ChannelManager}; use mm2_core::mm_ctx::MmArc; @@ -69,6 +71,7 @@ use parking_lot::Mutex as PaMutex; use rpc::v1::types::{Bytes as BytesJson, H256 as H256Json}; use script::{Builder, TransactionInputSigner}; use secp256k1::PublicKey; +use secp256k1v22::PublicKey as LnPublicKey; use serde::{Deserialize, Serialize}; use serde_json::Value as Json; use std::collections::hash_map::Entry; @@ -76,10 +79,12 @@ use std::collections::{HashMap, HashSet}; use std::fmt; use std::net::SocketAddr; use std::str::FromStr; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; -type Router = DefaultRouter, Arc>; -type InvoicePayer = payment::InvoicePayer, Router, Arc>, Arc, E>; +pub const DEFAULT_INVOICE_EXPIRY: u32 = 3600; + +type Router = DefaultRouter>>, Arc>; +type InvoicePayer = payment::InvoicePayer, Router, Arc, Arc, E>; #[derive(Clone)] pub struct LightningCoin { @@ -175,7 +180,7 @@ impl LightningCoin { async fn keysend( &self, - destination: PublicKey, + destination: LnPublicKey, amount_msat: u64, final_cltv_expiry_delta: u32, ) -> SendPaymentResult { @@ -510,6 +515,7 @@ impl MarketCoinOps for LightningCoin { .keys_manager .get_node_secret(Recipient::Node) .map_err(|_| "Unsupported recipient".to_string())? + .display_secret() .to_string()) } @@ -654,10 +660,14 @@ pub async fn start_lightning( // Initialize the KeysManager let keys_manager = ln_utils::init_keys_manager(ctx)?; - // Initialize the NetGraphMsgHandler. This is used for providing routes to send payments over - let network_graph = Arc::new(persister.get_network_graph(protocol_conf.network.into()).await?); + // Initialize the P2PGossipSync. This is used for providing routes to send payments over + let network_graph = Arc::new( + persister + .get_network_graph(protocol_conf.network.into(), logger.clone()) + .await?, + ); - let network_gossip = Arc::new(NetGraphMsgHandler::new( + let gossip_sync = Arc::new(P2PGossipSync::new( network_graph.clone(), None::>, logger.clone(), @@ -682,7 +692,7 @@ pub async fn start_lightning( ctx.clone(), params.listening_port, channel_manager.clone(), - network_gossip.clone(), + gossip_sync.clone(), keys_manager .get_node_secret(Recipient::Node) .map_to_mm(|_| EnableLightningError::UnsupportedMode("'start_lightning'".into(), "local node".into()))?, @@ -702,7 +712,7 @@ pub async fn start_lightning( )); // Initialize routing Scorer - let scorer = Arc::new(Mutex::new(persister.get_scorer(network_graph.clone()).await?)); + let scorer = Arc::new(persister.get_scorer(network_graph.clone(), logger.clone()).await?); spawn(ln_utils::persist_scorer_loop(persister.clone(), scorer.clone())); // Create InvoicePayer @@ -716,12 +726,17 @@ pub async fn start_lightning( let invoice_payer = Arc::new(InvoicePayer::new( channel_manager.clone(), router, - scorer, + scorer.clone(), logger.clone(), event_handler, - payment::RetryAttempts(params.payment_retries.unwrap_or(5)), + payment::Retry::Attempts(params.payment_retries.unwrap_or(5)), )); + let p2p_gossip_sync = + GossipSync::<_, Arc>>, Arc>>, _, _, _>::P2P( + gossip_sync.clone(), + ); + // Start Background Processing. Runs tasks periodically in the background to keep LN node operational. // InvoicePayer will act as our event handler as it handles some of the payments related events before // delegating it to LightningEventHandler. @@ -731,9 +746,10 @@ pub async fn start_lightning( invoice_payer.clone(), chain_monitor.clone(), channel_manager.clone(), - Some(network_gossip), + p2p_gossip_sync, peer_manager.clone(), logger, + Some(scorer), )); // If channel_nodes_data file exists, read channels nodes data from disk and reconnect to channel nodes/peers if possible. @@ -987,7 +1003,7 @@ fn apply_open_channel_filter(channel_details: &ChannelDetailsForRPC, filter: &Op let is_to_inbound_capacity_msat = filter.to_inbound_capacity_msat.is_none() || Some(&channel_details.inbound_capacity_msat) <= filter.to_inbound_capacity_msat.as_ref(); - let is_confirmed = filter.confirmed.is_none() || Some(&channel_details.confirmed) == filter.confirmed.as_ref(); + let is_confirmed = filter.confirmed.is_none() || Some(&channel_details.is_ready) == filter.confirmed.as_ref(); let is_usable = filter.is_usable.is_none() || Some(&channel_details.is_usable) == filter.is_usable.as_ref(); @@ -1035,8 +1051,8 @@ pub struct ChannelDetailsForRPC { pub inbound_capacity_msat: u64, // Channel is confirmed onchain, this means that funding_locked messages have been exchanged, // the channel is not currently being shut down, and the required confirmation count has been reached. - pub confirmed: bool, - // Channel is confirmed and funding_locked messages have been exchanged, the peer is connected, + pub is_ready: bool, + // Channel is confirmed and channel_ready messages have been exchanged, the peer is connected, // and the channel is not currently negotiating a shutdown. pub is_usable: bool, // A publicly-announced channel. @@ -1056,7 +1072,7 @@ impl From for ChannelDetailsForRPC { balance_msat: details.balance_msat, outbound_capacity_msat: details.outbound_capacity_msat, inbound_capacity_msat: details.inbound_capacity_msat, - confirmed: details.is_funding_locked, + is_ready: details.is_channel_ready, is_usable: details.is_usable, is_public: details.is_public, } @@ -1193,6 +1209,7 @@ pub struct GenerateInvoiceRequest { pub coin: String, pub amount_in_msat: Option, pub description: String, + pub expiry: Option, } #[derive(Serialize)] @@ -1226,8 +1243,16 @@ pub async fn generate_invoice( let keys_manager = ln_coin.keys_manager.clone(); let amount_in_msat = req.amount_in_msat; let description = req.description.clone(); + let expiry = req.expiry.unwrap_or(DEFAULT_INVOICE_EXPIRY); let invoice = async_blocking(move || { - create_invoice_from_channelmanager(&channel_manager, keys_manager, network, amount_in_msat, description) + create_invoice_from_channelmanager( + &channel_manager, + keys_manager, + network, + amount_in_msat, + description, + expiry, + ) }) .await?; @@ -1486,6 +1511,7 @@ pub async fn get_payment_details( pub struct CloseChannelReq { pub coin: String, pub channel_id: H256Json, + pub counterparty_node_id: PublicKeyForRPC, #[serde(default)] pub force_close: bool, } @@ -1497,11 +1523,12 @@ pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResu _ => return MmError::err(CloseChannelError::UnsupportedCoin(coin.ticker().to_string())), }; let channel_id = req.channel_id.0; + let counterparty_node_id: LnPublicKey = req.counterparty_node_id.into(); if req.force_close { async_blocking(move || { ln_coin .channel_manager - .force_close_channel(&channel_id) + .force_close_channel(&channel_id, &counterparty_node_id) .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e))) }) .await?; @@ -1509,7 +1536,7 @@ pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResu async_blocking(move || { ln_coin .channel_manager - .close_channel(&channel_id) + .close_channel(&channel_id, &counterparty_node_id) .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e))) }) .await?; diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index 9c1a1adec0..a7ed4c3eac 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -153,6 +153,8 @@ impl From for ChannelHandshakeConfig { pub struct CounterpartyLimits { /// Minimum allowed satoshis when an inbound channel is funded. pub min_funding_sats: Option, + /// Maximum allowed satoshis when an inbound channel is funded. + pub max_funding_sats: Option, /// The remote node sets a limit on the minimum size of HTLCs we can send to them. This allows /// us to limit the maximum minimum-size they can require. pub max_htlc_minimum_msat: Option, @@ -172,6 +174,9 @@ pub struct CounterpartyLimits { pub force_announced_channel_preference: Option, /// Set to the amount of time we're willing to wait to claim money back to us. pub our_locktime_limit: Option, + /// When set an outbound channel can be used straight away without waiting for any on-chain confirmations. + /// https://docs.rs/lightning/latest/lightning/util/config/struct.ChannelHandshakeLimits.html#structfield.trust_own_funding_0conf + pub allow_outbound_0conf: Option, } impl From for ChannelHandshakeLimits { @@ -182,6 +187,10 @@ impl From for ChannelHandshakeLimits { channel_handshake_limits.min_funding_satoshis = sats; } + if let Some(sats) = limits.max_funding_sats { + channel_handshake_limits.max_funding_satoshis = sats; + } + if let Some(msat) = limits.max_htlc_minimum_msat { channel_handshake_limits.max_htlc_minimum_msat = msat; } @@ -210,6 +219,10 @@ impl From for ChannelHandshakeLimits { channel_handshake_limits.their_to_self_delay = blocks; } + if let Some(is_0conf) = limits.allow_outbound_0conf { + channel_handshake_limits.trust_own_funding_0conf = is_0conf; + } + channel_handshake_limits } } diff --git a/mm2src/coins/lightning/ln_db.rs b/mm2src/coins/lightning/ln_db.rs index b47a72361a..248c164b06 100644 --- a/mm2src/coins/lightning/ln_db.rs +++ b/mm2src/coins/lightning/ln_db.rs @@ -3,7 +3,7 @@ use common::{now_ms, PagingOptionsEnum}; use db_common::sqlite::rusqlite::types::FromSqlError; use derive_more::Display; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use serde::{Deserialize, Serialize}; use std::str::FromStr; diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index 45e9ed8c9f..e8182e711b 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -16,7 +16,7 @@ use lightning::util::events::{Event, EventHandler, PaymentPurpose}; use parking_lot::Mutex as PaMutex; use rand::Rng; use script::{Builder, SignatureVersion}; -use secp256k1::Secp256k1; +use secp256k1v22::Secp256k1; use std::convert::TryFrom; use std::sync::Arc; use utxo_signer::with_key_pair::sign_tx; @@ -39,18 +39,20 @@ impl EventHandler for LightningEventHandler { channel_value_satoshis, output_script, user_channel_id, + counterparty_node_id, } => self.handle_funding_generation_ready( *temporary_channel_id, *channel_value_satoshis, output_script, *user_channel_id, + counterparty_node_id, ), Event::PaymentReceived { payment_hash, - amt, + amount_msat, purpose, - } => self.handle_payment_received(*payment_hash, *amt, purpose), + } => self.handle_payment_received(*payment_hash, *amount_msat, purpose), Event::PaymentSent { payment_preimage, @@ -59,6 +61,8 @@ impl EventHandler for LightningEventHandler { .. } => self.handle_payment_sent(*payment_preimage, *payment_hash, *fee_paid_msat), + Event::PaymentClaimed { payment_hash, amount_msat, purpose } => self.handle_payment_claimed(*payment_hash, *amount_msat, purpose), + Event::PaymentFailed { payment_hash, .. } => self.handle_payment_failed(*payment_hash), Event::PendingHTLCsForwardable { time_forwardable } => self.handle_pending_htlcs_forwards(*time_forwardable), @@ -66,9 +70,11 @@ impl EventHandler for LightningEventHandler { Event::SpendableOutputs { outputs } => self.handle_spendable_outputs(outputs.clone()), // Todo: an RPC for total amount earned - Event::PaymentForwarded { fee_earned_msat, claim_from_onchain_tx } => info!( - "Received a fee of {} milli-satoshis for a successfully forwarded payment through our {} lightning node. Was the forwarded HTLC claimed by our counterparty via an on-chain transaction?: {}", + Event::PaymentForwarded { fee_earned_msat, claim_from_onchain_tx, prev_channel_id, next_channel_id} => info!( + "Received a fee of {} milli-satoshis for a successfully forwarded payment from {} to {} through our {} lightning node. Was the forwarded HTLC claimed by our counterparty via an on-chain transaction?: {}", fee_earned_msat.unwrap_or_default(), + prev_channel_id.map(hex::encode).unwrap_or_else(|| "unknown".into()), + next_channel_id.map(hex::encode).unwrap_or_else(|| "unknown".into()), self.platform.coin.ticker(), claim_from_onchain_tx, ), @@ -250,10 +256,11 @@ impl LightningEventHandler { channel_value_satoshis: u64, output_script: &Script, user_channel_id: u64, + counterparty_node_id: &LnPublicKey, ) { info!( - "Handling FundingGenerationReady event for internal channel id: {}", - user_channel_id + "Handling FundingGenerationReady event for internal channel id: {} with: {}", + user_channel_id, counterparty_node_id ); let funding_tx = match sign_funding_transaction(user_channel_id, output_script, self.platform.clone()) { Ok(tx) => tx, @@ -268,9 +275,9 @@ impl LightningEventHandler { }; let funding_txid = funding_tx.txid(); // Give the funding transaction back to LDK for opening the channel. - if let Err(e) = self - .channel_manager - .funding_transaction_generated(&temporary_channel_id, funding_tx) + if let Err(e) = + self.channel_manager + .funding_transaction_generated(&temporary_channel_id, counterparty_node_id, funding_tx) { error!("{:?}", e); return; @@ -290,55 +297,50 @@ impl LightningEventHandler { }); } - fn handle_payment_received(&self, payment_hash: PaymentHash, amt: u64, purpose: &PaymentPurpose) { + fn handle_payment_received(&self, payment_hash: PaymentHash, _amount_msat: u64, purpose: &PaymentPurpose) { info!( "Handling PaymentReceived event for payment_hash: {}", hex::encode(payment_hash.0) ); - let (payment_preimage, payment_secret) = match purpose { - PaymentPurpose::InvoicePayment { - payment_preimage, - payment_secret, - } => match payment_preimage { - Some(preimage) => (*preimage, Some(*payment_secret)), + let payment_preimage = match purpose { + PaymentPurpose::InvoicePayment { payment_preimage, .. } => match payment_preimage { + Some(preimage) => *preimage, None => return, }, - PaymentPurpose::SpontaneousPayment(preimage) => (*preimage, None), - }; - let status = match self.channel_manager.claim_funds(payment_preimage) { - true => { - info!( - "Received an amount of {} millisatoshis for payment hash {}", - amt, - hex::encode(payment_hash.0) - ); - HTLCStatus::Succeeded - }, - false => HTLCStatus::Failed, + PaymentPurpose::SpontaneousPayment(preimage) => *preimage, }; + self.channel_manager.claim_funds(payment_preimage); + } + + fn handle_payment_claimed(&self, payment_hash: PaymentHash, amount_msat: u64, purpose: &PaymentPurpose) { + info!( + "Received an amount of {} millisatoshis for payment hash {}", + amount_msat, + hex::encode(payment_hash.0) + ); let db = self.db.clone(); - match purpose { - PaymentPurpose::InvoicePayment { .. } => spawn(async move { + match *purpose { + PaymentPurpose::InvoicePayment { payment_preimage, .. } => spawn(async move { if let Ok(Some(mut payment_info)) = db.get_payment_from_db(payment_hash).await.error_log_passthrough() { - payment_info.preimage = Some(payment_preimage); + payment_info.preimage = payment_preimage; payment_info.status = HTLCStatus::Succeeded; - payment_info.amt_msat = Some(amt as i64); + payment_info.amt_msat = Some(amount_msat as i64); payment_info.last_updated = (now_ms() / 1000) as i64; if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { error!("Unable to update payment information in DB: {}", e); } } }), - PaymentPurpose::SpontaneousPayment(_) => { + PaymentPurpose::SpontaneousPayment(payment_preimage) => { let payment_info = DBPaymentInfo { payment_hash, payment_type: PaymentType::InboundPayment, description: "".into(), preimage: Some(payment_preimage), - secret: payment_secret, - amt_msat: Some(amt as i64), + secret: None, + amt_msat: Some(amount_msat as i64), fee_paid_msat: None, - status, + status: HTLCStatus::Succeeded, created_at: (now_ms() / 1000) as i64, last_updated: (now_ms() / 1000) as i64, }; @@ -522,7 +524,7 @@ impl LightningEventHandler { fn handle_open_channel_request( &self, temporary_channel_id: [u8; 32], - counterparty_node_id: PublicKey, + counterparty_node_id: LnPublicKey, funding_satoshis: u64, push_msat: u64, ) { @@ -538,7 +540,7 @@ impl LightningEventHandler { if let Ok(last_channel_rpc_id) = db.get_last_channel_rpc_id().await.error_log_passthrough() { let user_channel_id = last_channel_rpc_id as u64 + 1; if channel_manager - .accept_inbound_channel(&temporary_channel_id, user_channel_id) + .accept_inbound_channel(&temporary_channel_id, &counterparty_node_id, user_channel_id) .is_ok() { let is_public = match channel_manager diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index 4a228e1e85..301ff69393 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -4,33 +4,29 @@ use crate::lightning::ln_utils::{ChainMonitor, ChannelManager}; use async_trait::async_trait; use bitcoin::blockdata::constants::genesis_block; use bitcoin::Network; -use bitcoin_hashes::hex::ToHex; use common::async_blocking; use common::log::LogState; -use lightning::chain::channelmonitor::{ChannelMonitor, ChannelMonitorUpdate}; -use lightning::chain::keysinterface::{InMemorySigner, KeysManager, Sign}; -use lightning::chain::transaction::OutPoint; -use lightning::chain::{chainmonitor, ChannelMonitorUpdateErr}; -use lightning::routing::network_graph::NetworkGraph; -use lightning::routing::scoring::ProbabilisticScoringParameters; -use lightning::util::ser::{Readable, ReadableArgs, Writeable}; -use lightning_background_processor::Persister; +use lightning::chain::keysinterface::{InMemorySigner, KeysManager}; +use lightning::routing::gossip::NetworkGraph; +use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringParameters}; +use lightning::util::persist::{KVStorePersister, Persister}; +use lightning::util::ser::{ReadableArgs, Writeable}; use lightning_persister::FilesystemPersister; use mm2_io::fs::check_dir_operations; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use std::collections::HashMap; use std::fs; -use std::io::{BufReader, BufWriter}; +use std::io::{BufReader, BufWriter, Error}; use std::net::SocketAddr; use std::ops::Deref; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::str::FromStr; use std::sync::{Arc, Mutex}; -#[cfg(target_family = "unix")] use std::os::unix::io::AsRawFd; +// #[cfg(target_family = "unix")] use std::os::unix::io::AsRawFd; -#[cfg(target_family = "windows")] -use {std::ffi::OsStr, std::os::windows::ffi::OsStrExt}; +// #[cfg(target_family = "windows")] +// use {std::ffi::OsStr, std::os::windows::ffi::OsStrExt}; pub struct LightningFilesystemPersister { main_path: PathBuf, @@ -103,6 +99,12 @@ impl LightningFilesystemPersister { } } +impl KVStorePersister for LightningFilesystemPersister { + fn persist(&self, key: &str, object: &W) -> std::io::Result<()> { + self.channels_persister.persist(key, object) + } +} + #[derive(Clone)] pub struct LightningPersisterShared(pub Arc); @@ -111,11 +113,20 @@ impl Deref for LightningPersisterShared { fn deref(&self) -> &LightningFilesystemPersister { self.0.deref() } } -impl Persister, Arc, Arc, Arc, Arc> - for LightningPersisterShared +impl + Persister< + '_, + InMemorySigner, + Arc, + Arc, + Arc, + Arc, + Arc, + Scorer, + > for LightningPersisterShared { - fn persist_manager(&self, channel_manager: &ChannelManager) -> Result<(), std::io::Error> { - FilesystemPersister::persist_manager(self.0.main_path().display().to_string(), channel_manager)?; + fn persist_manager(&self, channel_manager: &ChannelManager) -> Result<(), Error> { + self.persist("manager", channel_manager)?; if let Some(backup_path) = self.0.backup_path() { let file = fs::OpenOptions::new() .create(true) @@ -127,9 +138,8 @@ impl Persister, Arc, Arc Result<(), std::io::Error> { - if FilesystemPersister::persist_network_graph(self.0.main_path().display().to_string(), network_graph).is_err() - { + fn persist_graph(&self, network_graph: &NetworkGraph>) -> Result<(), Error> { + if self.persist("network_graph", network_graph).is_err() { // Persistence errors here are non-fatal as we can just fetch the routing graph // again later, but they may indicate a disk error which could be fatal elsewhere. eprintln!("Warning: Failed to persist network graph, check your disk and permissions"); @@ -137,120 +147,122 @@ impl Persister, Arc, Arc { - if $e != 0 { - return Ok(()); - } else { - return Err(std::io::Error::last_os_error()); - } - }; -} -#[cfg(target_family = "windows")] -fn path_to_windows_str>(path: T) -> Vec { - path.as_ref().encode_wide().chain(Some(0)).collect() + fn persist_scorer(&self, _scorer: &Scorer) -> Result<(), Error> { todo!() } } -fn write_monitor_to_file( - mut path: PathBuf, - filename: String, - monitor: &ChannelMonitor, -) -> std::io::Result<()> { - // Do a crazy dance with lots of fsync()s to be overly cautious here... - // We never want to end up in a state where we've lost the old data, or end up using the - // old data on power loss after we've returned. - // The way to atomically write a file on Unix platforms is: - // open(tmpname), write(tmpfile), fsync(tmpfile), close(tmpfile), rename(), fsync(dir) - path.push(filename); - let filename_with_path = path.display().to_string(); - let tmp_filename = format!("{}.tmp", filename_with_path); - - { - let mut f = fs::File::create(&tmp_filename)?; - monitor.write(&mut f)?; - f.sync_all()?; - } - // Fsync the parent directory on Unix. - #[cfg(target_family = "unix")] - { - fs::rename(&tmp_filename, &filename_with_path)?; - let path = Path::new(&filename_with_path).parent().ok_or_else(|| { - std::io::Error::new( - std::io::ErrorKind::NotFound, - format!("can't find parent dir for {}", filename_with_path), - ) - })?; - let dir_file = fs::OpenOptions::new().read(true).open(path)?; - unsafe { - libc::fsync(dir_file.as_raw_fd()); - } - } - #[cfg(target_family = "windows")] - { - let src = PathBuf::from(tmp_filename); - let dst = PathBuf::from(filename_with_path.clone()); - if Path::new(&filename_with_path).exists() { - unsafe { - winapi::um::winbase::ReplaceFileW( - path_to_windows_str(dst).as_ptr(), - path_to_windows_str(src).as_ptr(), - std::ptr::null(), - winapi::um::winbase::REPLACEFILE_IGNORE_MERGE_ERRORS, - std::ptr::null_mut() as *mut winapi::ctypes::c_void, - std::ptr::null_mut() as *mut winapi::ctypes::c_void, - ) - }; - } else { - call!(unsafe { - winapi::um::winbase::MoveFileExW( - path_to_windows_str(src).as_ptr(), - path_to_windows_str(dst).as_ptr(), - winapi::um::winbase::MOVEFILE_WRITE_THROUGH | winapi::um::winbase::MOVEFILE_REPLACE_EXISTING, - ) - }); - } - } - Ok(()) -} - -impl chainmonitor::Persist for LightningFilesystemPersister { - fn persist_new_channel( - &self, - funding_txo: OutPoint, - monitor: &ChannelMonitor, - update_id: chainmonitor::MonitorUpdateId, - ) -> Result<(), ChannelMonitorUpdateErr> { - self.channels_persister - .persist_new_channel(funding_txo, monitor, update_id)?; - if let Some(backup_path) = self.monitor_backup_path() { - let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); - write_monitor_to_file(backup_path, filename, monitor) - .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; - } - Ok(()) - } - - fn update_persisted_channel( - &self, - funding_txo: OutPoint, - update: &Option, - monitor: &ChannelMonitor, - update_id: chainmonitor::MonitorUpdateId, - ) -> Result<(), ChannelMonitorUpdateErr> { - self.channels_persister - .update_persisted_channel(funding_txo, update, monitor, update_id)?; - if let Some(backup_path) = self.monitor_backup_path() { - let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); - write_monitor_to_file(backup_path, filename, monitor) - .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; - } - Ok(()) - } -} +// #[cfg(target_family = "windows")] +// macro_rules! call { +// ($e: expr) => { +// if $e != 0 { +// return Ok(()); +// } else { +// return Err(std::io::Error::last_os_error()); +// } +// }; +// } + +// #[cfg(target_family = "windows")] +// fn path_to_windows_str>(path: T) -> Vec { +// path.as_ref().encode_wide().chain(Some(0)).collect() +// } + +// fn write_monitor_to_file( +// mut path: PathBuf, +// filename: String, +// monitor: &ChannelMonitor, +// ) -> std::io::Result<()> { +// // Do a crazy dance with lots of fsync()s to be overly cautious here... +// // We never want to end up in a state where we've lost the old data, or end up using the +// // old data on power loss after we've returned. +// // The way to atomically write a file on Unix platforms is: +// // open(tmpname), write(tmpfile), fsync(tmpfile), close(tmpfile), rename(), fsync(dir) +// path.push(filename); +// let filename_with_path = path.display().to_string(); +// let tmp_filename = format!("{}.tmp", filename_with_path); +// +// { +// let mut f = fs::File::create(&tmp_filename)?; +// monitor.write(&mut f)?; +// f.sync_all()?; +// } +// // Fsync the parent directory on Unix. +// #[cfg(target_family = "unix")] +// { +// fs::rename(&tmp_filename, &filename_with_path)?; +// let path = Path::new(&filename_with_path).parent().ok_or_else(|| { +// std::io::Error::new( +// std::io::ErrorKind::NotFound, +// format!("can't find parent dir for {}", filename_with_path), +// ) +// })?; +// let dir_file = fs::OpenOptions::new().read(true).open(path)?; +// unsafe { +// libc::fsync(dir_file.as_raw_fd()); +// } +// } +// #[cfg(target_family = "windows")] +// { +// let src = PathBuf::from(tmp_filename); +// let dst = PathBuf::from(filename_with_path.clone()); +// if Path::new(&filename_with_path).exists() { +// unsafe { +// winapi::um::winbase::ReplaceFileW( +// path_to_windows_str(dst).as_ptr(), +// path_to_windows_str(src).as_ptr(), +// std::ptr::null(), +// winapi::um::winbase::REPLACEFILE_IGNORE_MERGE_ERRORS, +// std::ptr::null_mut() as *mut winapi::ctypes::c_void, +// std::ptr::null_mut() as *mut winapi::ctypes::c_void, +// ) +// }; +// } else { +// call!(unsafe { +// winapi::um::winbase::MoveFileExW( +// path_to_windows_str(src).as_ptr(), +// path_to_windows_str(dst).as_ptr(), +// winapi::um::winbase::MOVEFILE_WRITE_THROUGH | winapi::um::winbase::MOVEFILE_REPLACE_EXISTING, +// ) +// }); +// } +// } +// Ok(()) +// } + +// impl chainmonitor::Persist for LightningFilesystemPersister { +// fn persist_new_channel( +// &self, +// funding_txo: OutPoint, +// monitor: &ChannelMonitor, +// update_id: chainmonitor::MonitorUpdateId, +// ) -> Result<(), ChannelMonitorUpdateErr> { +// self.channels_persister +// .persist_new_channel(funding_txo, monitor, update_id)?; +// if let Some(backup_path) = self.monitor_backup_path() { +// let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); +// write_monitor_to_file(backup_path, filename, monitor) +// .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; +// } +// Ok(()) +// } +// +// fn update_persisted_channel( +// &self, +// funding_txo: OutPoint, +// update: &Option, +// monitor: &ChannelMonitor, +// update_id: chainmonitor::MonitorUpdateId, +// ) -> Result<(), ChannelMonitorUpdateErr> { +// self.channels_persister +// .update_persisted_channel(funding_txo, update, monitor, update_id)?; +// if let Some(backup_path) = self.monitor_backup_path() { +// let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); +// write_monitor_to_file(backup_path, filename, monitor) +// .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; +// } +// Ok(()) +// } +// } #[async_trait] impl LightningStorage for LightningFilesystemPersister { @@ -353,40 +365,52 @@ impl LightningStorage for LightningFilesystemPersister { .await } - async fn get_network_graph(&self, network: Network) -> Result { + async fn get_network_graph( + &self, + network: Network, + logger: Arc, + ) -> Result>, Self::Error> { let path = self.network_graph_path(); if !path.exists() { - return Ok(NetworkGraph::new(genesis_block(network).header.block_hash())); + return Ok(NetworkGraph::new(genesis_block(network).header.block_hash(), logger)); } async_blocking(move || { let file = fs::File::open(path)?; common::log::info!("Reading the saved lightning network graph from file, this can take some time!"); - NetworkGraph::read(&mut BufReader::new(file)) + NetworkGraph::read(&mut BufReader::new(file), logger) .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) }) .await } - async fn get_scorer(&self, network_graph: Arc) -> Result { + async fn get_scorer( + &self, + network_graph: Arc>>, + logger: Arc, + ) -> Result { let path = self.scorer_path(); if !path.exists() { - return Ok(Scorer::new(ProbabilisticScoringParameters::default(), network_graph)); + return Ok(Mutex::new(ProbabilisticScorer::new( + ProbabilisticScoringParameters::default(), + network_graph, + logger, + ))); } async_blocking(move || { let file = fs::File::open(path)?; - Scorer::read( + let scorer = ProbabilisticScorer::read( &mut BufReader::new(file), - (ProbabilisticScoringParameters::default(), network_graph), + (ProbabilisticScoringParameters::default(), network_graph, logger), ) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; + Ok(Mutex::new(scorer)) }) .await } - async fn save_scorer(&self, scorer: Arc>) -> Result<(), Self::Error> { + async fn save_scorer(&self, scorer: Arc) -> Result<(), Self::Error> { let path = self.scorer_path(); async_blocking(move || { - let scorer = scorer.lock().unwrap(); let file = fs::OpenOptions::new() .create(true) .write(true) diff --git a/mm2src/coins/lightning/ln_p2p.rs b/mm2src/coins/lightning/ln_p2p.rs index e3f6108d09..bd871bd2dd 100644 --- a/mm2src/coins/lightning/ln_p2p.rs +++ b/mm2src/coins/lightning/ln_p2p.rs @@ -5,18 +5,18 @@ use derive_more::Display; use lightning::chain::Access; use lightning::ln::msgs::NetAddress; use lightning::ln::peer_handler::{IgnoringMessageHandler, MessageHandler, SimpleArcPeerManager}; -use lightning::routing::network_graph::{NetGraphMsgHandler, NetworkGraph}; +use lightning::routing::gossip::{NetworkGraph, P2PGossipSync}; use lightning_net_tokio::SocketDescriptor; use mm2_net::ip_addr::fetch_external_ip; use rand::RngCore; -use secp256k1::SecretKey; +use secp256k1v22::{PublicKey, SecretKey}; use std::net::{IpAddr, Ipv4Addr}; use tokio::net::TcpListener; const TRY_RECONNECTING_TO_NODE_INTERVAL: f64 = 60.; const BROADCAST_NODE_ANNOUNCEMENT_INTERVAL: u64 = 600; -type NetworkGossip = NetGraphMsgHandler, Arc, Arc>; +pub type NetworkGossip = P2PGossipSync>>, Arc, Arc>; pub type PeerManager = SimpleArcPeerManager; @@ -168,7 +168,7 @@ pub async fn init_peer_manager( ctx: MmArc, listening_port: u16, channel_manager: Arc, - network_gossip: Arc, + gossip_sync: Arc, node_secret: SecretKey, logger: Arc, ) -> EnableLightningResult> { @@ -185,7 +185,7 @@ pub async fn init_peer_manager( rand::thread_rng().fill_bytes(&mut ephemeral_bytes); let lightning_msg_handler = MessageHandler { chan_handler: channel_manager, - route_handler: network_gossip, + route_handler: gossip_sync, }; // IgnoringMessageHandler is used as custom message types (experimental and application-specific messages) is not needed diff --git a/mm2src/coins/lightning/ln_serialization.rs b/mm2src/coins/lightning/ln_serialization.rs index 82f0a700c7..dce582977f 100644 --- a/mm2src/coins/lightning/ln_serialization.rs +++ b/mm2src/coins/lightning/ln_serialization.rs @@ -1,5 +1,5 @@ use lightning_invoice::Invoice; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use serde::{de, Serialize, Serializer}; use std::fmt; use std::net::{SocketAddr, ToSocketAddrs}; diff --git a/mm2src/coins/lightning/ln_sql.rs b/mm2src/coins/lightning/ln_sql.rs index 2cbeb98116..ac21b6351e 100644 --- a/mm2src/coins/lightning/ln_sql.rs +++ b/mm2src/coins/lightning/ln_sql.rs @@ -9,7 +9,7 @@ use db_common::sqlite::{h256_option_slice_from_row, h256_slice_from_row, offset_ sql_text_conversion_err, string_from_row, validate_table_name, SqlNamedParams, SqliteConnShared, CHECK_TABLE_EXISTS_SQL}; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use std::convert::TryInto; use std::str::FromStr; @@ -890,7 +890,7 @@ mod tests { use db_common::sqlite::rusqlite::Connection; use rand::distributions::Alphanumeric; use rand::{Rng, RngCore}; - use secp256k1::{Secp256k1, SecretKey}; + use secp256k1v22::{Secp256k1, SecretKey}; use std::num::NonZeroUsize; use std::sync::{Arc, Mutex}; diff --git a/mm2src/coins/lightning/ln_storage.rs b/mm2src/coins/lightning/ln_storage.rs index bd44fdc0e1..b57b803361 100644 --- a/mm2src/coins/lightning/ln_storage.rs +++ b/mm2src/coins/lightning/ln_storage.rs @@ -1,16 +1,18 @@ use async_trait::async_trait; use bitcoin::Network; -use lightning::routing::network_graph::NetworkGraph; +use common::log::LogState; +use lightning::routing::gossip::NetworkGraph; use lightning::routing::scoring::ProbabilisticScorer; use parking_lot::Mutex as PaMutex; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use std::collections::HashMap; use std::net::SocketAddr; use std::sync::{Arc, Mutex}; pub type NodesAddressesMap = HashMap; pub type NodesAddressesMapShared = Arc>; -pub type Scorer = ProbabilisticScorer>; + +pub type Scorer = Mutex>>, Arc>>; #[async_trait] pub trait LightningStorage { @@ -21,13 +23,21 @@ pub trait LightningStorage { async fn is_fs_initialized(&self) -> Result; - async fn get_nodes_addresses(&self) -> Result, Self::Error>; + async fn get_nodes_addresses(&self) -> Result; async fn save_nodes_addresses(&self, nodes_addresses: NodesAddressesMapShared) -> Result<(), Self::Error>; - async fn get_network_graph(&self, network: Network) -> Result; + async fn get_network_graph( + &self, + network: Network, + logger: Arc, + ) -> Result>, Self::Error>; - async fn get_scorer(&self, network_graph: Arc) -> Result; + async fn get_scorer( + &self, + network_graph: Arc>>, + logger: Arc, + ) -> Result; - async fn save_scorer(&self, scorer: Arc>) -> Result<(), Self::Error>; + async fn save_scorer(&self, scorer: Arc) -> Result<(), Self::Error>; } diff --git a/mm2src/coins/lightning/ln_utils.rs b/mm2src/coins/lightning/ln_utils.rs index fb292a6797..25b853801b 100644 --- a/mm2src/coins/lightning/ln_utils.rs +++ b/mm2src/coins/lightning/ln_utils.rs @@ -18,7 +18,7 @@ use lightning::util::ser::ReadableArgs; use mm2_core::mm_ctx::MmArc; use std::fs::File; use std::path::PathBuf; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; use std::time::SystemTime; const SCORER_PERSIST_INTERVAL: u64 = 600; @@ -237,7 +237,7 @@ pub async fn init_channel_manager( Ok((chain_monitor, channel_manager)) } -pub async fn persist_scorer_loop(persister: LightningPersisterShared, scorer: Arc>) { +pub async fn persist_scorer_loop(persister: LightningPersisterShared, scorer: Arc) { loop { if let Err(e) = persister.save_scorer(scorer.clone()).await { log::warn!( diff --git a/mm2src/common/Cargo.toml b/mm2src/common/Cargo.toml index ddb1e0015c..cbefbd3f41 100644 --- a/mm2src/common/Cargo.toml +++ b/mm2src/common/Cargo.toml @@ -29,7 +29,7 @@ http = "0.2" http-body = "0.1" itertools = "0.10" lazy_static = "1.4" -lightning = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } +lightning = "0.0.108" log = "0.4.8" parking_lot = { version = "0.12.0", features = ["nightly"] } parking_lot_core = { version = "0.6", features = ["nightly"] } diff --git a/mm2src/hw_common/src/primitives.rs b/mm2src/hw_common/src/primitives.rs index 1c8d782b7a..b502190e96 100644 --- a/mm2src/hw_common/src/primitives.rs +++ b/mm2src/hw_common/src/primitives.rs @@ -5,6 +5,30 @@ pub use bip32::{ChildNumber, DerivationPath, Error as Bip32Error, ExtendedPublic pub type Secp256k1ExtendedPublicKey = ExtendedPublicKey; pub type XPub = String; +// #[derive(Clone, Debug, PartialEq)] +// pub struct PublicKeyInternal(pub secp256k1::PublicKey); +// +// impl bip32::PublicKey for PublicKeyInternal { +// fn from_bytes(bytes: bip32::PublicKeyBytes) -> bip32::Result { +// Ok(PublicKeyInternal(secp256k1::PublicKey::from_slice(&bytes).map_err(|_| bip32::Error::Crypto)?)) +// } +// +// fn to_bytes(&self) -> bip32::PublicKeyBytes { +// self.0.serialize() +// } +// +// fn derive_child(&self, other: bip32::PrivateKeyBytes) -> bip32::Result { +// let engine = secp256k1::Secp256k1::::verification_only(); +// +// let mut child_key = self.0; +// child_key +// .add_exp_assign(&engine, &other) +// .map_err(|_| bip32::Error::Crypto)?; +// +// Ok(PublicKeyInternal(child_key)) +// } +// } + #[derive(Clone, Copy)] pub enum EcdsaCurve { Secp256k1, diff --git a/mm2src/mm2_bitcoin/chain/Cargo.toml b/mm2src/mm2_bitcoin/chain/Cargo.toml index 4263b584cb..90429f7adf 100644 --- a/mm2src/mm2_bitcoin/chain/Cargo.toml +++ b/mm2src/mm2_bitcoin/chain/Cargo.toml @@ -5,7 +5,7 @@ authors = ["debris "] [dependencies] rustc-hex = "2" -bitcoin = "0.27.1" +bitcoin = "0.28.1" bitcrypto = { path = "../crypto" } primitives = { path = "../primitives" } serialization = { path = "../serialization" } diff --git a/mm2src/mm2_bitcoin/chain/src/transaction.rs b/mm2src/mm2_bitcoin/chain/src/transaction.rs index e2585275e2..067d4325b0 100644 --- a/mm2src/mm2_bitcoin/chain/src/transaction.rs +++ b/mm2src/mm2_bitcoin/chain/src/transaction.rs @@ -6,6 +6,7 @@ use constants::{LOCKTIME_THRESHOLD, SEQUENCE_FINAL}; use crypto::{dhash256, sha256}; use ext_bitcoin::blockdata::transaction::{OutPoint as ExtOutpoint, Transaction as ExtTransaction, TxIn, TxOut}; use ext_bitcoin::hash_types::Txid; +use ext_bitcoin::Witness; use hash::{CipherText, EncCipherText, OutCipherText, ZkProof, ZkProofSapling, H256, H512, H64}; use hex::FromHex; use ser::{deserialize, serialize, serialize_with_flags, SERIALIZE_TRANSACTION_WITNESS}; @@ -75,7 +76,7 @@ impl From for TxIn { previous_output: txin.previous_output.into(), script_sig: txin.script_sig.take().into(), sequence: txin.sequence, - witness: txin.script_witness.into_iter().map(|s| s.take()).collect(), + witness: Witness::from_vec(txin.script_witness.into_iter().map(|s| s.take()).collect()), } } } From ab5887bf9d186f35573bd7fae5e0bb3035f298a4 Mon Sep 17 00:00:00 2001 From: shamardy Date: Thu, 4 Aug 2022 12:19:47 +0200 Subject: [PATCH 05/33] fix wasm compilation --- mm2src/coins/Cargo.toml | 22 +++++++++---------- mm2src/coins/utxo.rs | 4 ++++ .../utxo_indexedb_block_header_storage.rs | 4 ++-- mm2src/common/Cargo.toml | 2 +- mm2src/mm2_bitcoin/chain/Cargo.toml | 4 +++- mm2src/mm2_bitcoin/chain/src/block_header.rs | 3 +++ mm2src/mm2_bitcoin/chain/src/lib.rs | 1 + mm2src/mm2_bitcoin/chain/src/transaction.rs | 8 ++++++- 8 files changed, 32 insertions(+), 16 deletions(-) diff --git a/mm2src/coins/Cargo.toml b/mm2src/coins/Cargo.toml index 77bb9863fb..8a50918a65 100644 --- a/mm2src/coins/Cargo.toml +++ b/mm2src/coins/Cargo.toml @@ -17,7 +17,6 @@ async-trait = "0.1.52" base64 = "0.10.0" base58 = "0.2.0" bip32 = { version = "0.2.2", default-features = false, features = ["alloc", "secp256k1-ffi"] } -bitcoin = "0.28.1" bitcoin_hashes = "0.10.0" bitcrypto = { path = "../mm2_bitcoin/crypto" } bincode = "1.3.3" @@ -49,9 +48,6 @@ jsonrpc-core = "8.0.1" keys = { path = "../mm2_bitcoin/keys" } lazy_static = "1.4" libc = "0.2" -lightning = "0.0.108" -lightning-invoice = "0.16.0" -lightning-rapid-gossip-sync = "0.0.108" metrics = "0.12" mm2_core = { path = "../mm2_core" } mm2_err_handle = { path = "../mm2_err_handle" } @@ -71,7 +67,6 @@ rpc = { path = "../mm2_bitcoin/rpc" } rpc_task = { path = "../rpc_task" } script = { path = "../mm2_bitcoin/script" } secp256k1 = { version = "0.20" } -secp256k1v22 = { version = "0.22", package = "secp256k1" } ser_error = { path = "../derives/ser_error" } ser_error_derive = { path = "../derives/ser_error_derive" } serde = "1.0" @@ -100,20 +95,25 @@ web-sys = { version = "0.3.55", features = ["console", "Headers", "Request", "Re [target.'cfg(not(target_arch = "wasm32"))'.dependencies] dirs = { version = "1" } +bitcoin = "0.28.1" +lightning = "0.0.108" lightning-background-processor = "0.0.108" -lightning-persister = "0.0.108" +lightning-invoice = "0.16.0" lightning-net-tokio = "0.0.108" +lightning-persister = "0.0.108" +lightning-rapid-gossip-sync = "0.0.108" rust-ini = { version = "0.13" } rustls = { version = "0.20", features = ["dangerous_configuration"] } -tokio = { version = "1.7" } -tokio-rustls = { version = "0.23" } -tonic = { version = "0.7", features = ["tls", "tls-webpki-roots", "compression"] } -webpki-roots = { version = "0.22" } +secp256k1v22 = { version = "0.22", package = "secp256k1" } solana-client = { version = "1", default-features = false } solana-sdk = { version = "1", default-features = false } solana-transaction-status = "1" -spl-token = { version = "3" } spl-associated-token-account = "1" +spl-token = { version = "3" } +tokio = { version = "1.7" } +tokio-rustls = { version = "0.23" } +tonic = { version = "0.7", features = ["tls", "tls-webpki-roots", "compression"] } +webpki-roots = { version = "0.22" } zcash_client_backend = { git = "https://github.com/KomodoPlatform/librustzcash.git" } zcash_client_sqlite = { git = "https://github.com/KomodoPlatform/librustzcash.git" } zcash_primitives = { features = ["transparent-inputs"], git = "https://github.com/KomodoPlatform/librustzcash.git" } diff --git a/mm2src/coins/utxo.rs b/mm2src/coins/utxo.rs index 45815465b0..b746424c6d 100644 --- a/mm2src/coins/utxo.rs +++ b/mm2src/coins/utxo.rs @@ -39,6 +39,7 @@ pub mod utxo_standard; pub mod utxo_withdraw; use async_trait::async_trait; +#[cfg(not(target_arch = "wasm32"))] use bitcoin::network::constants::Network as BitcoinNetwork; pub use bitcrypto::{dhash160, sha256, ChecksumType}; pub use chain::Transaction as UtxoTx; @@ -60,6 +61,7 @@ use futures01::Future; use keys::bytes::Bytes; pub use keys::{Address, AddressFormat as UtxoAddressFormat, AddressHashEnum, KeyPair, Private, Public, Secret, Type as ScriptType}; +#[cfg(not(target_arch = "wasm32"))] use lightning_invoice::Currency as LightningCurrency; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; @@ -417,6 +419,7 @@ pub enum BlockchainNetwork { Regtest, } +#[cfg(not(target_arch = "wasm32"))] impl From for BitcoinNetwork { fn from(network: BlockchainNetwork) -> Self { match network { @@ -427,6 +430,7 @@ impl From for BitcoinNetwork { } } +#[cfg(not(target_arch = "wasm32"))] impl From for LightningCurrency { fn from(network: BlockchainNetwork) -> Self { match network { diff --git a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs index e0e98658a4..236b5645b5 100644 --- a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs @@ -46,8 +46,8 @@ impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { async fn get_block_height_by_hash( &self, - for_coin: &str, - hash: H256, + _for_coin: &str, + _hash: H256, ) -> Result, BlockHeaderStorageError> { Ok(None) } diff --git a/mm2src/common/Cargo.toml b/mm2src/common/Cargo.toml index cbefbd3f41..4c55f4227b 100644 --- a/mm2src/common/Cargo.toml +++ b/mm2src/common/Cargo.toml @@ -29,7 +29,6 @@ http = "0.2" http-body = "0.1" itertools = "0.10" lazy_static = "1.4" -lightning = "0.0.108" log = "0.4.8" parking_lot = { version = "0.12.0", features = ["nightly"] } parking_lot_core = { version = "0.6", features = ["nightly"] } @@ -66,6 +65,7 @@ hyper = { version = "0.14.11", features = ["client", "http2", "server", "tcp"] } # got "invalid certificate: UnknownIssuer" for https://ropsten.infura.io on iOS using default-features hyper-rustls = { version = "0.23", default-features = false, features = ["http1", "http2", "webpki-tokio"] } libc = { version = "0.2" } +lightning = "0.0.108" log4rs = { version = "1.0", default-features = false, features = ["console_appender", "pattern_encoder"] } metrics = { version = "0.12" } metrics-runtime = { version = "0.13", default-features = false, features = ["metrics-observer-prometheus"] } diff --git a/mm2src/mm2_bitcoin/chain/Cargo.toml b/mm2src/mm2_bitcoin/chain/Cargo.toml index 90429f7adf..d029c0f8fb 100644 --- a/mm2src/mm2_bitcoin/chain/Cargo.toml +++ b/mm2src/mm2_bitcoin/chain/Cargo.toml @@ -5,8 +5,10 @@ authors = ["debris "] [dependencies] rustc-hex = "2" -bitcoin = "0.28.1" bitcrypto = { path = "../crypto" } primitives = { path = "../primitives" } serialization = { path = "../serialization" } serialization_derive = { path = "../serialization_derive" } + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +bitcoin = "0.28.1" diff --git a/mm2src/mm2_bitcoin/chain/src/block_header.rs b/mm2src/mm2_bitcoin/chain/src/block_header.rs index 90bd7097f6..23f2f6bdbd 100644 --- a/mm2src/mm2_bitcoin/chain/src/block_header.rs +++ b/mm2src/mm2_bitcoin/chain/src/block_header.rs @@ -1,6 +1,8 @@ use compact::Compact; use crypto::dhash256; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::blockdata::block::BlockHeader as ExtBlockHeader; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::hash_types::{BlockHash as ExtBlockHash, TxMerkleNode as ExtTxMerkleNode}; use hash::H256; use hex::FromHex; @@ -348,6 +350,7 @@ impl TryFrom for BlockHeader { } } +#[cfg(not(target_arch = "wasm32"))] impl From for ExtBlockHeader { fn from(header: BlockHeader) -> Self { let prev_blockhash = ExtBlockHash::from_hash(header.previous_header_hash.to_sha256d()); diff --git a/mm2src/mm2_bitcoin/chain/src/lib.rs b/mm2src/mm2_bitcoin/chain/src/lib.rs index b3f474fd49..58133355bf 100644 --- a/mm2src/mm2_bitcoin/chain/src/lib.rs +++ b/mm2src/mm2_bitcoin/chain/src/lib.rs @@ -1,3 +1,4 @@ +#[cfg(not(target_arch = "wasm32"))] extern crate bitcoin as ext_bitcoin; extern crate bitcrypto as crypto; extern crate primitives; diff --git a/mm2src/mm2_bitcoin/chain/src/transaction.rs b/mm2src/mm2_bitcoin/chain/src/transaction.rs index 067d4325b0..8d250a583a 100644 --- a/mm2src/mm2_bitcoin/chain/src/transaction.rs +++ b/mm2src/mm2_bitcoin/chain/src/transaction.rs @@ -4,9 +4,11 @@ use bytes::Bytes; use constants::{LOCKTIME_THRESHOLD, SEQUENCE_FINAL}; use crypto::{dhash256, sha256}; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::blockdata::transaction::{OutPoint as ExtOutpoint, Transaction as ExtTransaction, TxIn, TxOut}; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::hash_types::Txid; -use ext_bitcoin::Witness; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::Witness; use hash::{CipherText, EncCipherText, OutCipherText, ZkProof, ZkProofSapling, H256, H512, H64}; use hex::FromHex; use ser::{deserialize, serialize, serialize_with_flags, SERIALIZE_TRANSACTION_WITNESS}; @@ -38,6 +40,7 @@ impl OutPoint { pub fn is_null(&self) -> bool { self.hash.is_zero() && self.index == u32::MAX } } +#[cfg(not(target_arch = "wasm32"))] impl From for ExtOutpoint { fn from(outpoint: OutPoint) -> Self { ExtOutpoint { @@ -70,6 +73,7 @@ impl TransactionInput { pub fn has_witness(&self) -> bool { !self.script_witness.is_empty() } } +#[cfg(not(target_arch = "wasm32"))] impl From for TxIn { fn from(txin: TransactionInput) -> Self { TxIn { @@ -96,6 +100,7 @@ impl Default for TransactionOutput { } } +#[cfg(not(target_arch = "wasm32"))] impl From for TxOut { fn from(txout: TransactionOutput) -> Self { TxOut { @@ -227,6 +232,7 @@ impl From<&'static str> for Transaction { fn from(s: &'static str) -> Self { deserialize(&s.from_hex::>().unwrap() as &[u8]).unwrap() } } +#[cfg(not(target_arch = "wasm32"))] impl From for ExtTransaction { fn from(tx: Transaction) -> Self { ExtTransaction { From 0d803949c6aa7f95ab1da85849e42c3857813893 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 5 Aug 2022 00:44:16 +0200 Subject: [PATCH 06/33] wip: upgrade rust-lightning to v0.0.108, update persister code --- mm2src/coins/lightning.rs | 28 +- mm2src/coins/lightning/ln_events.rs | 11 +- .../lightning/ln_filesystem_persister.rs | 364 ++++++++---------- mm2src/coins/lightning/ln_p2p.rs | 4 +- mm2src/coins/lightning/ln_storage.rs | 19 +- mm2src/coins/lightning/ln_utils.rs | 35 +- mm2src/hw_common/src/primitives.rs | 24 -- 7 files changed, 191 insertions(+), 294 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 3c567e2110..ccf0943210 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -14,6 +14,7 @@ use super::{lp_coinfind_or_err, DerivationMethod, MmCoinEnum}; use crate::lightning::ln_events::init_events_abort_handlers; use crate::lightning::ln_serialization::PublicKeyForRPC; use crate::lightning::ln_sql::SqliteLightningDB; +use crate::lightning::ln_storage::NetworkGraph; use crate::utxo::rpc_clients::UtxoRpcClientEnum; use crate::utxo::utxo_common::{big_decimal_from_sat_unsigned, UtxoTxBuilder}; use crate::utxo::{sat_from_big_decimal, BlockchainNetwork, FeePolicy, GetUtxoListOps, UtxoTxGenerationOps}; @@ -40,7 +41,7 @@ use lightning::chain::keysinterface::{KeysInterface, KeysManager, Recipient}; use lightning::chain::Access; use lightning::ln::channelmanager::{ChannelDetails, MIN_FINAL_CLTV_EXPIRY}; use lightning::ln::{PaymentHash, PaymentPreimage}; -use lightning::routing::gossip::{NetworkGraph, P2PGossipSync}; +use lightning::routing::gossip; use lightning::util::config::UserConfig; use lightning_background_processor::{BackgroundProcessor, GossipSync}; use lightning_invoice::payment; @@ -57,7 +58,7 @@ use ln_errors::{ClaimableBalancesError, ClaimableBalancesResult, CloseChannelErr ListPaymentsError, ListPaymentsResult, OpenChannelError, OpenChannelResult, SendPaymentError, SendPaymentResult}; use ln_events::LightningEventHandler; -use ln_filesystem_persister::{LightningFilesystemPersister, LightningPersisterShared}; +use ln_filesystem_persister::LightningFilesystemPersister; use ln_p2p::{connect_to_node, ConnectToNodeRes, PeerManager}; use ln_platform::{h256_json_from_txid, Platform}; use ln_serialization::{InvoiceForRPC, NodeAddress}; @@ -70,8 +71,7 @@ use mm2_number::{BigDecimal, MmNumber}; use parking_lot::Mutex as PaMutex; use rpc::v1::types::{Bytes as BytesJson, H256 as H256Json}; use script::{Builder, TransactionInputSigner}; -use secp256k1::PublicKey; -use secp256k1v22::PublicKey as LnPublicKey; +use secp256k1v22::PublicKey; use serde::{Deserialize, Serialize}; use serde_json::Value as Json; use std::collections::hash_map::Entry; @@ -83,7 +83,7 @@ use std::sync::Arc; pub const DEFAULT_INVOICE_EXPIRY: u32 = 3600; -type Router = DefaultRouter>>, Arc>; +type Router = DefaultRouter, Arc>; type InvoicePayer = payment::InvoicePayer, Router, Arc, Arc, E>; #[derive(Clone)] @@ -104,7 +104,7 @@ pub struct LightningCoin { /// The lightning node invoice payer. pub invoice_payer: Arc>>, /// The lightning node persister that takes care of writing/reading data from storage. - pub persister: LightningPersisterShared, + pub persister: Arc, /// The lightning node db struct that takes care of reading/writing data from/to db. pub db: SqliteLightningDB, /// The mutex storing the addresses of the nodes that the lightning node has open channels with, @@ -180,7 +180,7 @@ impl LightningCoin { async fn keysend( &self, - destination: LnPublicKey, + destination: PublicKey, amount_msat: u64, final_cltv_expiry_delta: u32, ) -> SendPaymentResult { @@ -667,7 +667,7 @@ pub async fn start_lightning( .await?, ); - let gossip_sync = Arc::new(P2PGossipSync::new( + let gossip_sync = Arc::new(gossip::P2PGossipSync::new( network_graph.clone(), None::>, logger.clone(), @@ -713,7 +713,6 @@ pub async fn start_lightning( // Initialize routing Scorer let scorer = Arc::new(persister.get_scorer(network_graph.clone(), logger.clone()).await?); - spawn(ln_utils::persist_scorer_loop(persister.clone(), scorer.clone())); // Create InvoicePayer // random_seed_bytes are additional random seed to improve privacy by adding a random CLTV expiry offset to each path's final hop. @@ -733,9 +732,7 @@ pub async fn start_lightning( )); let p2p_gossip_sync = - GossipSync::<_, Arc>>, Arc>>, _, _, _>::P2P( - gossip_sync.clone(), - ); + GossipSync::<_, Arc, Arc>>, _, _, _>::P2P(gossip_sync.clone()); // Start Background Processing. Runs tasks periodically in the background to keep LN node operational. // InvoicePayer will act as our event handler as it handles some of the payments related events before @@ -965,7 +962,7 @@ pub struct OpenChannelsFilter { pub to_outbound_capacity_msat: Option, pub from_inbound_capacity_msat: Option, pub to_inbound_capacity_msat: Option, - pub confirmed: Option, + pub is_ready: Option, pub is_usable: Option, pub is_public: Option, } @@ -1003,7 +1000,7 @@ fn apply_open_channel_filter(channel_details: &ChannelDetailsForRPC, filter: &Op let is_to_inbound_capacity_msat = filter.to_inbound_capacity_msat.is_none() || Some(&channel_details.inbound_capacity_msat) <= filter.to_inbound_capacity_msat.as_ref(); - let is_confirmed = filter.confirmed.is_none() || Some(&channel_details.is_ready) == filter.confirmed.as_ref(); + let is_confirmed = filter.is_ready.is_none() || Some(&channel_details.is_ready) == filter.is_ready.as_ref(); let is_usable = filter.is_usable.is_none() || Some(&channel_details.is_usable) == filter.is_usable.as_ref(); @@ -1516,6 +1513,7 @@ pub struct CloseChannelReq { pub force_close: bool, } +// Todo: use either counterparty_node_id or channel_id to close channel/s pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResult { let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; let ln_coin = match coin { @@ -1523,7 +1521,7 @@ pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResu _ => return MmError::err(CloseChannelError::UnsupportedCoin(coin.ticker().to_string())), }; let channel_id = req.channel_id.0; - let counterparty_node_id: LnPublicKey = req.counterparty_node_id.into(); + let counterparty_node_id: PublicKey = req.counterparty_node_id.into(); if req.force_close { async_blocking(move || { ln_coin diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index e8182e711b..8ea23ee873 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -256,7 +256,7 @@ impl LightningEventHandler { channel_value_satoshis: u64, output_script: &Script, user_channel_id: u64, - counterparty_node_id: &LnPublicKey, + counterparty_node_id: &PublicKey, ) { info!( "Handling FundingGenerationReady event for internal channel id: {} with: {}", @@ -297,10 +297,11 @@ impl LightningEventHandler { }); } - fn handle_payment_received(&self, payment_hash: PaymentHash, _amount_msat: u64, purpose: &PaymentPurpose) { + fn handle_payment_received(&self, payment_hash: PaymentHash, amount_msat: u64, purpose: &PaymentPurpose) { info!( - "Handling PaymentReceived event for payment_hash: {}", - hex::encode(payment_hash.0) + "Handling PaymentReceived event for payment_hash: {} with amount {}", + hex::encode(payment_hash.0), + amount_msat ); let payment_preimage = match purpose { PaymentPurpose::InvoicePayment { payment_preimage, .. } => match payment_preimage { @@ -524,7 +525,7 @@ impl LightningEventHandler { fn handle_open_channel_request( &self, temporary_channel_id: [u8; 32], - counterparty_node_id: LnPublicKey, + counterparty_node_id: PublicKey, funding_satoshis: u64, push_msat: u64, ) { diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index 301ff69393..3df80533e6 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -1,50 +1,41 @@ -use crate::lightning::ln_platform::Platform; -use crate::lightning::ln_storage::{LightningStorage, NodesAddressesMap, NodesAddressesMapShared, Scorer}; -use crate::lightning::ln_utils::{ChainMonitor, ChannelManager}; +use crate::lightning::ln_storage::{LightningStorage, NetworkGraph, NodesAddressesMap, NodesAddressesMapShared, Scorer}; use async_trait::async_trait; use bitcoin::blockdata::constants::genesis_block; -use bitcoin::Network; +use bitcoin::{BlockHash, Network, Txid}; +use bitcoin_hashes::hex::FromHex; use common::async_blocking; use common::log::LogState; -use lightning::chain::keysinterface::{InMemorySigner, KeysManager}; -use lightning::routing::gossip::NetworkGraph; +use lightning::chain::channelmonitor::ChannelMonitor; +use lightning::chain::keysinterface::{KeysInterface, Sign}; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringParameters}; -use lightning::util::persist::{KVStorePersister, Persister}; +use lightning::util::persist::KVStorePersister; use lightning::util::ser::{ReadableArgs, Writeable}; -use lightning_persister::FilesystemPersister; use mm2_io::fs::check_dir_operations; use secp256k1v22::PublicKey; use std::collections::HashMap; use std::fs; -use std::io::{BufReader, BufWriter, Error}; +use std::io::{BufReader, BufWriter, Cursor}; use std::net::SocketAddr; use std::ops::Deref; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::{Arc, Mutex}; -// #[cfg(target_family = "unix")] use std::os::unix::io::AsRawFd; +#[cfg(target_family = "unix")] use std::os::unix::io::AsRawFd; -// #[cfg(target_family = "windows")] -// use {std::ffi::OsStr, std::os::windows::ffi::OsStrExt}; +#[cfg(target_family = "windows")] +use {std::ffi::OsStr, std::os::windows::ffi::OsStrExt}; pub struct LightningFilesystemPersister { main_path: PathBuf, backup_path: Option, - channels_persister: Arc, } impl LightningFilesystemPersister { /// Initialize a new LightningPersister and set the path to the individual channels' /// files. #[inline] - pub fn new(main_path: PathBuf, backup_path: Option) -> Self { - Self { - main_path: main_path.clone(), - backup_path, - channels_persister: Arc::new(FilesystemPersister::new(main_path.display().to_string())), - } - } + pub fn new(main_path: PathBuf, backup_path: Option) -> Self { Self { main_path, backup_path } } /// Get the directory which was provided when this persister was initialized. #[inline] @@ -54,18 +45,6 @@ impl LightningFilesystemPersister { #[inline] pub fn backup_path(&self) -> Option { self.backup_path.clone() } - /// Get the channels_persister which was initialized when this persister was initialized. - #[inline] - pub fn channels_persister(&self) -> Arc { self.channels_persister.clone() } - - pub fn monitor_backup_path(&self) -> Option { - if let Some(mut backup_path) = self.backup_path() { - backup_path.push("monitors"); - return Some(backup_path); - } - None - } - pub fn nodes_addresses_path(&self) -> PathBuf { let mut path = self.main_path(); path.push("channel_nodes_data"); @@ -97,173 +76,165 @@ impl LightningFilesystemPersister { path.push("manager"); path } + + /// Read `ChannelMonitor`s from disk. + pub fn read_channelmonitors( + &self, + keys_manager: K, + ) -> Result)>, std::io::Error> + where + K::Target: KeysInterface + Sized, + { + let mut path = self.main_path(); + path.push("monitors"); + if !Path::new(&path).exists() { + return Ok(Vec::new()); + } + let mut res = Vec::new(); + for file_option in fs::read_dir(path).unwrap() { + let file = file_option.unwrap(); + let owned_file_name = file.file_name(); + let filename = owned_file_name.to_str(); + if filename.is_none() || !filename.unwrap().is_ascii() || filename.unwrap().len() < 65 { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Invalid ChannelMonitor file name", + )); + } + if filename.unwrap().ends_with(".tmp") { + // If we were in the middle of committing an new update and crashed, it should be + // safe to ignore the update - we should never have returned to the caller and + // irrevocably committed to the new state in any way. + continue; + } + + let txid = Txid::from_hex(filename.unwrap().split_at(64).0); + if txid.is_err() { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Invalid tx ID in filename", + )); + } + + let index = filename.unwrap().split_at(65).1.parse::(); + if index.is_err() { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Invalid tx index in filename", + )); + } + + let contents = fs::read(&file.path())?; + let mut buffer = Cursor::new(&contents); + match <(BlockHash, ChannelMonitor)>::read(&mut buffer, &*keys_manager) { + Ok((blockhash, channel_monitor)) => { + if channel_monitor.get_funding_txo().0.txid != txid.unwrap() + || channel_monitor.get_funding_txo().0.index != index.unwrap() + { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "ChannelMonitor was stored in the wrong file", + )); + } + res.push((blockhash, channel_monitor)); + }, + Err(e) => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Failed to deserialize ChannelMonitor: {}", e), + )) + }, + } + } + Ok(res) + } } impl KVStorePersister for LightningFilesystemPersister { fn persist(&self, key: &str, object: &W) -> std::io::Result<()> { - self.channels_persister.persist(key, object) + let mut dest_file = self.main_path(); + dest_file.push(key); + write_to_file(dest_file, object)?; + + if !matches!(key, "network_graph" | "scorer") { + if let Some(mut dest_file) = self.backup_path() { + dest_file.push(key); + write_to_file(dest_file, object)?; + } + } + + Ok(()) } } -#[derive(Clone)] -pub struct LightningPersisterShared(pub Arc); +#[cfg(target_family = "windows")] +macro_rules! call { + ($e: expr) => { + if $e != 0 { + return Ok(()); + } else { + return Err(std::io::Error::last_os_error()); + } + }; +} -impl Deref for LightningPersisterShared { - type Target = LightningFilesystemPersister; - fn deref(&self) -> &LightningFilesystemPersister { self.0.deref() } +#[cfg(target_family = "windows")] +fn path_to_windows_str>(path: T) -> Vec { + path.as_ref().encode_wide().chain(Some(0)).collect() } -impl - Persister< - '_, - InMemorySigner, - Arc, - Arc, - Arc, - Arc, - Arc, - Scorer, - > for LightningPersisterShared -{ - fn persist_manager(&self, channel_manager: &ChannelManager) -> Result<(), Error> { - self.persist("manager", channel_manager)?; - if let Some(backup_path) = self.0.backup_path() { - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(backup_path)?; - channel_manager.write(&mut BufWriter::new(file))?; +fn write_to_file(dest_file: PathBuf, data: &W) -> std::io::Result<()> { + let mut tmp_file = dest_file.clone(); + tmp_file.set_extension("tmp"); + + let parent_directory = dest_file.parent().unwrap(); + // Do a crazy dance with lots of fsync()s to be overly cautious here... + // We never want to end up in a state where we've lost the old data, or end up using the + // old data on power loss after we've returned. + // The way to atomically write a file on Unix platforms is: + // open(tmpname), write(tmpfile), fsync(tmpfile), close(tmpfile), rename(), fsync(dir) + { + // Note that going by rust-lang/rust@d602a6b, on MacOS it is only safe to use + // rust stdlib 1.36 or higher. + let mut buf = BufWriter::new(fs::File::create(&tmp_file)?); + data.write(&mut buf)?; + buf.into_inner()?.sync_all()?; + } + // Fsync the parent directory on Unix. + #[cfg(target_family = "unix")] + { + fs::rename(&tmp_file, &dest_file)?; + let dir_file = fs::OpenOptions::new().read(true).open(parent_directory)?; + unsafe { + libc::fsync(dir_file.as_raw_fd()); } - Ok(()) } - - fn persist_graph(&self, network_graph: &NetworkGraph>) -> Result<(), Error> { - if self.persist("network_graph", network_graph).is_err() { - // Persistence errors here are non-fatal as we can just fetch the routing graph - // again later, but they may indicate a disk error which could be fatal elsewhere. - eprintln!("Warning: Failed to persist network graph, check your disk and permissions"); + #[cfg(target_family = "windows")] + { + if dest_file.exists() { + unsafe { + winapi::um::winbase::ReplaceFileW( + path_to_windows_str(dest_file).as_ptr(), + path_to_windows_str(tmp_file).as_ptr(), + std::ptr::null(), + winapi::um::winbase::REPLACEFILE_IGNORE_MERGE_ERRORS, + std::ptr::null_mut() as *mut winapi::ctypes::c_void, + std::ptr::null_mut() as *mut winapi::ctypes::c_void, + ) + }; + } else { + call!(unsafe { + winapi::um::winbase::MoveFileExW( + path_to_windows_str(tmp_file).as_ptr(), + path_to_windows_str(dest_file).as_ptr(), + winapi::um::winbase::MOVEFILE_WRITE_THROUGH | winapi::um::winbase::MOVEFILE_REPLACE_EXISTING, + ) + }); } - - Ok(()) } - - fn persist_scorer(&self, _scorer: &Scorer) -> Result<(), Error> { todo!() } + Ok(()) } -// #[cfg(target_family = "windows")] -// macro_rules! call { -// ($e: expr) => { -// if $e != 0 { -// return Ok(()); -// } else { -// return Err(std::io::Error::last_os_error()); -// } -// }; -// } - -// #[cfg(target_family = "windows")] -// fn path_to_windows_str>(path: T) -> Vec { -// path.as_ref().encode_wide().chain(Some(0)).collect() -// } - -// fn write_monitor_to_file( -// mut path: PathBuf, -// filename: String, -// monitor: &ChannelMonitor, -// ) -> std::io::Result<()> { -// // Do a crazy dance with lots of fsync()s to be overly cautious here... -// // We never want to end up in a state where we've lost the old data, or end up using the -// // old data on power loss after we've returned. -// // The way to atomically write a file on Unix platforms is: -// // open(tmpname), write(tmpfile), fsync(tmpfile), close(tmpfile), rename(), fsync(dir) -// path.push(filename); -// let filename_with_path = path.display().to_string(); -// let tmp_filename = format!("{}.tmp", filename_with_path); -// -// { -// let mut f = fs::File::create(&tmp_filename)?; -// monitor.write(&mut f)?; -// f.sync_all()?; -// } -// // Fsync the parent directory on Unix. -// #[cfg(target_family = "unix")] -// { -// fs::rename(&tmp_filename, &filename_with_path)?; -// let path = Path::new(&filename_with_path).parent().ok_or_else(|| { -// std::io::Error::new( -// std::io::ErrorKind::NotFound, -// format!("can't find parent dir for {}", filename_with_path), -// ) -// })?; -// let dir_file = fs::OpenOptions::new().read(true).open(path)?; -// unsafe { -// libc::fsync(dir_file.as_raw_fd()); -// } -// } -// #[cfg(target_family = "windows")] -// { -// let src = PathBuf::from(tmp_filename); -// let dst = PathBuf::from(filename_with_path.clone()); -// if Path::new(&filename_with_path).exists() { -// unsafe { -// winapi::um::winbase::ReplaceFileW( -// path_to_windows_str(dst).as_ptr(), -// path_to_windows_str(src).as_ptr(), -// std::ptr::null(), -// winapi::um::winbase::REPLACEFILE_IGNORE_MERGE_ERRORS, -// std::ptr::null_mut() as *mut winapi::ctypes::c_void, -// std::ptr::null_mut() as *mut winapi::ctypes::c_void, -// ) -// }; -// } else { -// call!(unsafe { -// winapi::um::winbase::MoveFileExW( -// path_to_windows_str(src).as_ptr(), -// path_to_windows_str(dst).as_ptr(), -// winapi::um::winbase::MOVEFILE_WRITE_THROUGH | winapi::um::winbase::MOVEFILE_REPLACE_EXISTING, -// ) -// }); -// } -// } -// Ok(()) -// } - -// impl chainmonitor::Persist for LightningFilesystemPersister { -// fn persist_new_channel( -// &self, -// funding_txo: OutPoint, -// monitor: &ChannelMonitor, -// update_id: chainmonitor::MonitorUpdateId, -// ) -> Result<(), ChannelMonitorUpdateErr> { -// self.channels_persister -// .persist_new_channel(funding_txo, monitor, update_id)?; -// if let Some(backup_path) = self.monitor_backup_path() { -// let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); -// write_monitor_to_file(backup_path, filename, monitor) -// .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; -// } -// Ok(()) -// } -// -// fn update_persisted_channel( -// &self, -// funding_txo: OutPoint, -// update: &Option, -// monitor: &ChannelMonitor, -// update_id: chainmonitor::MonitorUpdateId, -// ) -> Result<(), ChannelMonitorUpdateErr> { -// self.channels_persister -// .update_persisted_channel(funding_txo, update, monitor, update_id)?; -// if let Some(backup_path) = self.monitor_backup_path() { -// let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); -// write_monitor_to_file(backup_path, filename, monitor) -// .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; -// } -// Ok(()) -// } -// } - #[async_trait] impl LightningStorage for LightningFilesystemPersister { type Error = std::io::Error; @@ -365,11 +336,7 @@ impl LightningStorage for LightningFilesystemPersister { .await } - async fn get_network_graph( - &self, - network: Network, - logger: Arc, - ) -> Result>, Self::Error> { + async fn get_network_graph(&self, network: Network, logger: Arc) -> Result { let path = self.network_graph_path(); if !path.exists() { return Ok(NetworkGraph::new(genesis_block(network).header.block_hash(), logger)); @@ -383,11 +350,7 @@ impl LightningStorage for LightningFilesystemPersister { .await } - async fn get_scorer( - &self, - network_graph: Arc>>, - logger: Arc, - ) -> Result { + async fn get_scorer(&self, network_graph: Arc, logger: Arc) -> Result { let path = self.scorer_path(); if !path.exists() { return Ok(Mutex::new(ProbabilisticScorer::new( @@ -407,17 +370,4 @@ impl LightningStorage for LightningFilesystemPersister { }) .await } - - async fn save_scorer(&self, scorer: Arc) -> Result<(), Self::Error> { - let path = self.scorer_path(); - async_blocking(move || { - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(path)?; - scorer.write(&mut BufWriter::new(file)) - }) - .await - } } diff --git a/mm2src/coins/lightning/ln_p2p.rs b/mm2src/coins/lightning/ln_p2p.rs index bd871bd2dd..1acd762289 100644 --- a/mm2src/coins/lightning/ln_p2p.rs +++ b/mm2src/coins/lightning/ln_p2p.rs @@ -5,7 +5,7 @@ use derive_more::Display; use lightning::chain::Access; use lightning::ln::msgs::NetAddress; use lightning::ln::peer_handler::{IgnoringMessageHandler, MessageHandler, SimpleArcPeerManager}; -use lightning::routing::gossip::{NetworkGraph, P2PGossipSync}; +use lightning::routing::gossip; use lightning_net_tokio::SocketDescriptor; use mm2_net::ip_addr::fetch_external_ip; use rand::RngCore; @@ -16,7 +16,7 @@ use tokio::net::TcpListener; const TRY_RECONNECTING_TO_NODE_INTERVAL: f64 = 60.; const BROADCAST_NODE_ANNOUNCEMENT_INTERVAL: u64 = 600; -pub type NetworkGossip = P2PGossipSync>>, Arc, Arc>; +pub type NetworkGossip = gossip::P2PGossipSync, Arc, Arc>; pub type PeerManager = SimpleArcPeerManager; diff --git a/mm2src/coins/lightning/ln_storage.rs b/mm2src/coins/lightning/ln_storage.rs index b57b803361..159ffdac7a 100644 --- a/mm2src/coins/lightning/ln_storage.rs +++ b/mm2src/coins/lightning/ln_storage.rs @@ -1,7 +1,7 @@ use async_trait::async_trait; use bitcoin::Network; use common::log::LogState; -use lightning::routing::gossip::NetworkGraph; +use lightning::routing::gossip; use lightning::routing::scoring::ProbabilisticScorer; use parking_lot::Mutex as PaMutex; use secp256k1v22::PublicKey; @@ -12,7 +12,8 @@ use std::sync::{Arc, Mutex}; pub type NodesAddressesMap = HashMap; pub type NodesAddressesMapShared = Arc>; -pub type Scorer = Mutex>>, Arc>>; +pub type NetworkGraph = gossip::NetworkGraph>; +pub type Scorer = Mutex, Arc>>; #[async_trait] pub trait LightningStorage { @@ -27,17 +28,7 @@ pub trait LightningStorage { async fn save_nodes_addresses(&self, nodes_addresses: NodesAddressesMapShared) -> Result<(), Self::Error>; - async fn get_network_graph( - &self, - network: Network, - logger: Arc, - ) -> Result>, Self::Error>; + async fn get_network_graph(&self, network: Network, logger: Arc) -> Result; - async fn get_scorer( - &self, - network_graph: Arc>>, - logger: Arc, - ) -> Result; - - async fn save_scorer(&self, scorer: Arc) -> Result<(), Self::Error>; + async fn get_scorer(&self, network_graph: Arc, logger: Arc) -> Result; } diff --git a/mm2src/coins/lightning/ln_utils.rs b/mm2src/coins/lightning/ln_utils.rs index 25b853801b..262d74b811 100644 --- a/mm2src/coins/lightning/ln_utils.rs +++ b/mm2src/coins/lightning/ln_utils.rs @@ -1,14 +1,12 @@ use super::*; use crate::lightning::ln_db::LightningDB; -use crate::lightning::ln_filesystem_persister::LightningPersisterShared; use crate::lightning::ln_platform::{get_best_header, ln_best_block_update_loop, update_best_block}; use crate::lightning::ln_sql::SqliteLightningDB; -use crate::lightning::ln_storage::{LightningStorage, NodesAddressesMap, Scorer}; +use crate::lightning::ln_storage::{LightningStorage, NodesAddressesMap}; use crate::utxo::rpc_clients::BestBlock as RpcBestBlock; use bitcoin::hash_types::BlockHash; use bitcoin_hashes::{sha256d, Hash}; -use common::executor::{spawn, Timer}; -use common::log; +use common::executor::spawn; use common::log::LogState; use lightning::chain::keysinterface::{InMemorySigner, KeysManager}; use lightning::chain::{chainmonitor, BestBlock, Watch}; @@ -21,15 +19,13 @@ use std::path::PathBuf; use std::sync::Arc; use std::time::SystemTime; -const SCORER_PERSIST_INTERVAL: u64 = 600; - pub type ChainMonitor = chainmonitor::ChainMonitor< InMemorySigner, Arc, Arc, Arc, Arc, - LightningPersisterShared, + Arc, >; pub type ChannelManager = SimpleArcChannelManager; @@ -51,13 +47,10 @@ pub async fn init_persister( ctx: &MmArc, ticker: String, backup_path: Option, -) -> EnableLightningResult { +) -> EnableLightningResult> { let ln_data_dir = ln_data_dir(ctx, &ticker); let ln_data_backup_dir = ln_data_backup_dir(ctx, backup_path, &ticker); - let persister = LightningPersisterShared(Arc::new(LightningFilesystemPersister::new( - ln_data_dir, - ln_data_backup_dir, - ))); + let persister = Arc::new(LightningFilesystemPersister::new(ln_data_dir, ln_data_backup_dir)); let is_initialized = persister.is_fs_initialized().await?; if !is_initialized { @@ -97,7 +90,7 @@ pub fn init_keys_manager(ctx: &MmArc) -> EnableLightningResult> pub async fn init_channel_manager( platform: Arc, logger: Arc, - persister: LightningPersisterShared, + persister: Arc, db: SqliteLightningDB, keys_manager: Arc, user_config: UserConfig, @@ -119,7 +112,7 @@ pub async fn init_channel_manager( )); // Read ChannelMonitor state from disk, important for lightning node is restarting and has at least 1 channel - let channels_persister = persister.channels_persister(); + let channels_persister = persister.clone(); let channels_keys_manager = keys_manager.clone(); let mut channelmonitors = async_blocking(move || { channels_persister @@ -237,20 +230,8 @@ pub async fn init_channel_manager( Ok((chain_monitor, channel_manager)) } -pub async fn persist_scorer_loop(persister: LightningPersisterShared, scorer: Arc) { - loop { - if let Err(e) = persister.save_scorer(scorer.clone()).await { - log::warn!( - "Failed to persist scorer error: {}, please check disk space and permissions", - e - ); - } - Timer::sleep(SCORER_PERSIST_INTERVAL as f64).await; - } -} - pub async fn get_open_channels_nodes_addresses( - persister: LightningPersisterShared, + persister: Arc, channel_manager: Arc, ) -> EnableLightningResult { let channels = async_blocking(move || channel_manager.list_channels()).await; diff --git a/mm2src/hw_common/src/primitives.rs b/mm2src/hw_common/src/primitives.rs index b502190e96..1c8d782b7a 100644 --- a/mm2src/hw_common/src/primitives.rs +++ b/mm2src/hw_common/src/primitives.rs @@ -5,30 +5,6 @@ pub use bip32::{ChildNumber, DerivationPath, Error as Bip32Error, ExtendedPublic pub type Secp256k1ExtendedPublicKey = ExtendedPublicKey; pub type XPub = String; -// #[derive(Clone, Debug, PartialEq)] -// pub struct PublicKeyInternal(pub secp256k1::PublicKey); -// -// impl bip32::PublicKey for PublicKeyInternal { -// fn from_bytes(bytes: bip32::PublicKeyBytes) -> bip32::Result { -// Ok(PublicKeyInternal(secp256k1::PublicKey::from_slice(&bytes).map_err(|_| bip32::Error::Crypto)?)) -// } -// -// fn to_bytes(&self) -> bip32::PublicKeyBytes { -// self.0.serialize() -// } -// -// fn derive_child(&self, other: bip32::PrivateKeyBytes) -> bip32::Result { -// let engine = secp256k1::Secp256k1::::verification_only(); -// -// let mut child_key = self.0; -// child_key -// .add_exp_assign(&engine, &other) -// .map_err(|_| bip32::Error::Crypto)?; -// -// Ok(PublicKeyInternal(child_key)) -// } -// } - #[derive(Clone, Copy)] pub enum EcdsaCurve { Secp256k1, From a3974eb0686516d7db534e332705195a1d13ad15 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 5 Aug 2022 12:54:41 +0200 Subject: [PATCH 07/33] fix clippy and udep errors --- Cargo.lock | 13 ------------- mm2src/coins/Cargo.toml | 1 - mm2src/coins/lightning/ln_filesystem_persister.rs | 2 +- 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3490b0ca63..db798f5994 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1057,7 +1057,6 @@ dependencies = [ "lightning-background-processor", "lightning-invoice", "lightning-net-tokio", - "lightning-persister", "lightning-rapid-gossip-sync", "metrics", "mm2_core", @@ -3760,18 +3759,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "lightning-persister" -version = "0.0.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e9f154ee5b60e576973da61379767569e2ad7b61a361d716a7d15f37df7e0bc" -dependencies = [ - "bitcoin", - "libc", - "lightning", - "winapi", -] - [[package]] name = "lightning-rapid-gossip-sync" version = "0.0.108" diff --git a/mm2src/coins/Cargo.toml b/mm2src/coins/Cargo.toml index 8a50918a65..8c232a87c1 100644 --- a/mm2src/coins/Cargo.toml +++ b/mm2src/coins/Cargo.toml @@ -100,7 +100,6 @@ lightning = "0.0.108" lightning-background-processor = "0.0.108" lightning-invoice = "0.16.0" lightning-net-tokio = "0.0.108" -lightning-persister = "0.0.108" lightning-rapid-gossip-sync = "0.0.108" rust-ini = { version = "0.13" } rustls = { version = "0.20", features = ["dangerous_configuration"] } diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index 3df80533e6..a9773dce6a 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -187,7 +187,6 @@ fn write_to_file(dest_file: PathBuf, data: &W) -> std::io::Result< let mut tmp_file = dest_file.clone(); tmp_file.set_extension("tmp"); - let parent_directory = dest_file.parent().unwrap(); // Do a crazy dance with lots of fsync()s to be overly cautious here... // We never want to end up in a state where we've lost the old data, or end up using the // old data on power loss after we've returned. @@ -203,6 +202,7 @@ fn write_to_file(dest_file: PathBuf, data: &W) -> std::io::Result< // Fsync the parent directory on Unix. #[cfg(target_family = "unix")] { + let parent_directory = dest_file.parent().unwrap(); fs::rename(&tmp_file, &dest_file)?; let dir_file = fs::OpenOptions::new().read(true).open(parent_directory)?; unsafe { From 6e66860817c2084c4c02d7231c54d6677a0d3e47 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 5 Aug 2022 17:35:11 +0200 Subject: [PATCH 08/33] Implement trusted nodes for inbound 0-conf channels, release htlcs immediately for invoice payments that we don't have a preimage for --- mm2src/coins/lightning.rs | 50 ++++++++++++++++++- mm2src/coins/lightning/ln_errors.rs | 34 +++++++++++++ mm2src/coins/lightning/ln_events.rs | 31 +++++++++--- .../lightning/ln_filesystem_persister.rs | 49 +++++++++++++++++- mm2src/coins/lightning/ln_storage.rs | 7 ++- 5 files changed, 160 insertions(+), 11 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index ccf0943210..ab5de388ad 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -11,10 +11,11 @@ mod ln_storage; mod ln_utils; use super::{lp_coinfind_or_err, DerivationMethod, MmCoinEnum}; +use crate::lightning::ln_errors::{TrustedNodeError, TrustedNodeResult}; use crate::lightning::ln_events::init_events_abort_handlers; use crate::lightning::ln_serialization::PublicKeyForRPC; use crate::lightning::ln_sql::SqliteLightningDB; -use crate::lightning::ln_storage::NetworkGraph; +use crate::lightning::ln_storage::{NetworkGraph, TrustedNodesShared}; use crate::utxo::rpc_clients::UtxoRpcClientEnum; use crate::utxo::utxo_common::{big_decimal_from_sat_unsigned, UtxoTxBuilder}; use crate::utxo::{sat_from_big_decimal, BlockchainNetwork, FeePolicy, GetUtxoListOps, UtxoTxGenerationOps}; @@ -110,6 +111,9 @@ pub struct LightningCoin { /// The mutex storing the addresses of the nodes that the lightning node has open channels with, /// these addresses are used for reconnecting. pub open_channels_nodes: NodesAddressesMapShared, + /// The mutex storing the public keys of the nodes that our lightning node trusts to allow 0 confirmation + /// inbound channels from. + pub trusted_nodes: TrustedNodesShared, } impl fmt::Debug for LightningCoin { @@ -700,6 +704,8 @@ pub async fn start_lightning( ) .await?; + let trusted_nodes = Arc::new(PaMutex::new(persister.get_trusted_nodes().await?)); + let events_abort_handlers = init_events_abort_handlers(platform.clone(), db.clone()).await?; // Initialize the event handler @@ -708,6 +714,7 @@ pub async fn start_lightning( channel_manager.clone(), keys_manager.clone(), db.clone(), + trusted_nodes.clone(), events_abort_handlers, )); @@ -778,6 +785,7 @@ pub async fn start_lightning( persister, db, open_channels_nodes, + trusted_nodes, }) } @@ -1658,3 +1666,43 @@ pub async fn get_claimable_balances( Ok(claimable_balances) } + +#[derive(Deserialize)] +pub struct AddTrustedNodeReq { + pub coin: String, + pub node_id: PublicKeyForRPC, +} + +pub async fn add_trusted_node(ctx: MmArc, req: AddTrustedNodeReq) -> TrustedNodeResult { + let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; + let ln_coin = match coin { + MmCoinEnum::LightningCoin(c) => c, + _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), + }; + + if ln_coin.trusted_nodes.lock().insert(req.node_id.into()) { + ln_coin.persister.save_trusted_nodes(ln_coin.trusted_nodes).await?; + } + + Ok("success".into()) +} + +#[derive(Deserialize)] +pub struct RemoveTrustedNodeReq { + pub coin: String, + pub node_id: PublicKeyForRPC, +} + +pub async fn remove_trusted_node(ctx: MmArc, req: RemoveTrustedNodeReq) -> TrustedNodeResult { + let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; + let ln_coin = match coin { + MmCoinEnum::LightningCoin(c) => c, + _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), + }; + + if ln_coin.trusted_nodes.lock().remove(&req.node_id.into()) { + ln_coin.persister.save_trusted_nodes(ln_coin.trusted_nodes).await?; + } + + Ok("success".into()) +} diff --git a/mm2src/coins/lightning/ln_errors.rs b/mm2src/coins/lightning/ln_errors.rs index fe3bb46237..c2671b6af5 100644 --- a/mm2src/coins/lightning/ln_errors.rs +++ b/mm2src/coins/lightning/ln_errors.rs @@ -23,6 +23,7 @@ pub type GetPaymentDetailsResult = Result> pub type CloseChannelResult = Result>; pub type ClaimableBalancesResult = Result>; pub type SaveChannelClosingResult = Result>; +pub type TrustedNodeResult = Result>; #[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] #[serde(tag = "error_type", content = "error_data")] @@ -518,3 +519,36 @@ impl From for SaveChannelClosingError { impl From for SaveChannelClosingError { fn from(err: TryFromIntError) -> SaveChannelClosingError { SaveChannelClosingError::ConversionError(err) } } + +#[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] +#[serde(tag = "error_type", content = "error_data")] +pub enum TrustedNodeError { + #[display(fmt = "Lightning network is not supported for {}", _0)] + UnsupportedCoin(String), + #[display(fmt = "No such coin {}", _0)] + NoSuchCoin(String), + #[display(fmt = "I/O error {}", _0)] + IOError(String), +} + +impl HttpStatusCode for TrustedNodeError { + fn status_code(&self) -> StatusCode { + match self { + TrustedNodeError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, + TrustedNodeError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + TrustedNodeError::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +impl From for TrustedNodeError { + fn from(e: CoinFindError) -> Self { + match e { + CoinFindError::NoSuchCoin { coin } => TrustedNodeError::NoSuchCoin(coin), + } + } +} + +impl From for TrustedNodeError { + fn from(err: std::io::Error) -> TrustedNodeError { TrustedNodeError::IOError(err.to_string()) } +} diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index 8ea23ee873..1de481e079 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -28,6 +28,7 @@ pub struct LightningEventHandler { channel_manager: Arc, keys_manager: Arc, db: SqliteLightningDB, + trusted_nodes: TrustedNodesShared, abort_handlers: Arc>>, } @@ -52,7 +53,7 @@ impl EventHandler for LightningEventHandler { payment_hash, amount_msat, purpose, - } => self.handle_payment_received(*payment_hash, *amount_msat, purpose), + } => self.handle_payment_received(payment_hash, *amount_msat, purpose), Event::PaymentSent { payment_preimage, @@ -239,6 +240,7 @@ impl LightningEventHandler { channel_manager: Arc, keys_manager: Arc, db: SqliteLightningDB, + trusted_nodes: TrustedNodesShared, abort_handlers: Arc>>, ) -> Self { LightningEventHandler { @@ -246,6 +248,7 @@ impl LightningEventHandler { channel_manager, keys_manager, db, + trusted_nodes, abort_handlers, } } @@ -297,16 +300,21 @@ impl LightningEventHandler { }); } - fn handle_payment_received(&self, payment_hash: PaymentHash, amount_msat: u64, purpose: &PaymentPurpose) { + fn handle_payment_received(&self, payment_hash: &PaymentHash, received_amount: u64, purpose: &PaymentPurpose) { info!( "Handling PaymentReceived event for payment_hash: {} with amount {}", hex::encode(payment_hash.0), - amount_msat + received_amount ); let payment_preimage = match purpose { PaymentPurpose::InvoicePayment { payment_preimage, .. } => match payment_preimage { Some(preimage) => *preimage, - None => return, + None => { + // Free the htlc immediately if we don't have the preimage required to claim the payment + // to allow for this inbound liquidity to be used for other inbound payments. + self.channel_manager.fail_htlc_backwards(payment_hash); + return; + }, }, PaymentPurpose::SpontaneousPayment(preimage) => *preimage, }; @@ -535,14 +543,23 @@ impl LightningEventHandler { ); let db = self.db.clone(); + let trusted_nodes = self.trusted_nodes.clone(); let channel_manager = self.channel_manager.clone(); let platform = self.platform.clone(); spawn(async move { if let Ok(last_channel_rpc_id) = db.get_last_channel_rpc_id().await.error_log_passthrough() { let user_channel_id = last_channel_rpc_id as u64 + 1; - if channel_manager - .accept_inbound_channel(&temporary_channel_id, &counterparty_node_id, user_channel_id) - .is_ok() + if (trusted_nodes.lock().contains(&counterparty_node_id) + && channel_manager + .accept_inbound_channel_from_trusted_peer_0conf( + &temporary_channel_id, + &counterparty_node_id, + user_channel_id, + ) + .is_ok()) + || channel_manager + .accept_inbound_channel(&temporary_channel_id, &counterparty_node_id, user_channel_id) + .is_ok() { let is_public = match channel_manager .list_channels() diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index a9773dce6a..fcbea7914b 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -1,4 +1,5 @@ -use crate::lightning::ln_storage::{LightningStorage, NetworkGraph, NodesAddressesMap, NodesAddressesMapShared, Scorer}; +use crate::lightning::ln_storage::{LightningStorage, NetworkGraph, NodesAddressesMap, NodesAddressesMapShared, Scorer, + TrustedNodesShared}; use async_trait::async_trait; use bitcoin::blockdata::constants::genesis_block; use bitcoin::{BlockHash, Network, Txid}; @@ -12,7 +13,7 @@ use lightning::util::persist::KVStorePersister; use lightning::util::ser::{ReadableArgs, Writeable}; use mm2_io::fs::check_dir_operations; use secp256k1v22::PublicKey; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::fs; use std::io::{BufReader, BufWriter, Cursor}; use std::net::SocketAddr; @@ -71,6 +72,12 @@ impl LightningFilesystemPersister { path } + pub fn trusted_nodes_path(&self) -> PathBuf { + let mut path = self.main_path(); + path.push("trusted_nodes"); + path + } + pub fn manager_path(&self) -> PathBuf { let mut path = self.main_path(); path.push("manager"); @@ -370,4 +377,42 @@ impl LightningStorage for LightningFilesystemPersister { }) .await } + + async fn get_trusted_nodes(&self) -> Result, Self::Error> { + let path = self.trusted_nodes_path(); + if !path.exists() { + return Ok(HashSet::new()); + } + async_blocking(move || { + let file = fs::File::open(path)?; + let reader = BufReader::new(file); + let trusted_nodes: HashSet = + serde_json::from_reader(reader).map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; + trusted_nodes + .iter() + .map(|pubkey_str| { + let pubkey = PublicKey::from_str(pubkey_str) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; + Ok(pubkey) + }) + .collect() + }) + .await + } + + async fn save_trusted_nodes(&self, trusted_nodes: TrustedNodesShared) -> Result<(), Self::Error> { + let path = self.trusted_nodes_path(); + async_blocking(move || { + let trusted_nodes: HashSet = trusted_nodes.lock().iter().map(|pubkey| pubkey.to_string()).collect(); + + let file = fs::OpenOptions::new() + .create(true) + .write(true) + .truncate(true) + .open(path)?; + serde_json::to_writer(file, &trusted_nodes) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) + }) + .await + } } diff --git a/mm2src/coins/lightning/ln_storage.rs b/mm2src/coins/lightning/ln_storage.rs index 159ffdac7a..610720bc3f 100644 --- a/mm2src/coins/lightning/ln_storage.rs +++ b/mm2src/coins/lightning/ln_storage.rs @@ -5,12 +5,13 @@ use lightning::routing::gossip; use lightning::routing::scoring::ProbabilisticScorer; use parking_lot::Mutex as PaMutex; use secp256k1v22::PublicKey; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::net::SocketAddr; use std::sync::{Arc, Mutex}; pub type NodesAddressesMap = HashMap; pub type NodesAddressesMapShared = Arc>; +pub type TrustedNodesShared = Arc>>; pub type NetworkGraph = gossip::NetworkGraph>; pub type Scorer = Mutex, Arc>>; @@ -31,4 +32,8 @@ pub trait LightningStorage { async fn get_network_graph(&self, network: Network, logger: Arc) -> Result; async fn get_scorer(&self, network_graph: Arc, logger: Arc) -> Result; + + async fn get_trusted_nodes(&self) -> Result, Self::Error>; + + async fn save_trusted_nodes(&self, trusted_nodes: TrustedNodesShared) -> Result<(), Self::Error>; } From 4ffe1ebd8199998f4f76942c322a1e5c0f03c820 Mon Sep 17 00:00:00 2001 From: shamardy Date: Mon, 8 Aug 2022 19:08:28 +0200 Subject: [PATCH 09/33] Update to V0.0.108 complete, added more features: list_trusted_node RPC, finally can send payments in unit tests (due to 0 confs channels --- mm2src/coins/lightning.rs | 25 ++++ mm2src/coins/lightning/ln_conf.rs | 7 + .../lightning/ln_filesystem_persister.rs | 36 +++-- mm2src/coins/utxo/spv.rs | 1 + .../mm2_main/src/mm2_tests/lightning_tests.rs | 140 ++++++++++++++++-- .../mm2_main/src/rpc/dispatcher/dispatcher.rs | 4 + 6 files changed, 193 insertions(+), 20 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index ab5de388ad..7c6a9c15e0 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -735,6 +735,7 @@ pub async fn start_lightning( scorer.clone(), logger.clone(), event_handler, + // Todo: Add option for choosing payment::Retry::Timeout instead of Attempts in LightningParams payment::Retry::Attempts(params.payment_retries.unwrap_or(5)), )); @@ -1706,3 +1707,27 @@ pub async fn remove_trusted_node(ctx: MmArc, req: RemoveTrustedNodeReq) -> Trust Ok("success".into()) } + +#[derive(Deserialize)] +pub struct ListTrustedNodesReq { + pub coin: String, +} + +#[derive(Serialize)] +pub struct ListTrustedNodesResponse { + trusted_nodes: Vec, +} + +pub async fn list_trusted_node(ctx: MmArc, req: ListTrustedNodesReq) -> TrustedNodeResult { + let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; + let ln_coin = match coin { + MmCoinEnum::LightningCoin(c) => c, + _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), + }; + + let trusted_nodes = ln_coin.trusted_nodes.lock().clone(); + + Ok(ListTrustedNodesResponse { + trusted_nodes: trusted_nodes.into_iter().map(PublicKeyForRPC).collect(), + }) +} diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index a7ed4c3eac..685844c297 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -123,6 +123,9 @@ pub struct OurChannelsConfig { /// our real on-chain channel UTXO in each invoice and requiring that our counterparty only /// relay HTLCs to us using the channel's SCID alias. pub negotiate_scid_privacy: Option, + /// Sets the percentage of the channel value we will cap the total value of outstanding inbound + /// HTLCs to. + pub max_inbound_in_flight_htlc_percent: Option, } impl From for ChannelHandshakeConfig { @@ -145,6 +148,10 @@ impl From for ChannelHandshakeConfig { channel_handshake_config.negotiate_scid_privacy = scid_privacy } + if let Some(max_inbound_htlc) = config.max_inbound_in_flight_htlc_percent { + channel_handshake_config.max_inbound_htlc_value_in_flight_percent_of_channel = max_inbound_htlc + } + channel_handshake_config } } diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index fcbea7914b..c01c0cf8fa 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -18,7 +18,7 @@ use std::fs; use std::io::{BufReader, BufWriter, Cursor}; use std::net::SocketAddr; use std::ops::Deref; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::str::FromStr; use std::sync::{Arc, Mutex}; @@ -84,6 +84,20 @@ impl LightningFilesystemPersister { path } + pub fn monitors_path(&self) -> PathBuf { + let mut path = self.main_path(); + path.push("monitors"); + path + } + + pub fn monitors_backup_path(&self) -> Option { + if let Some(mut backup_path) = self.backup_path() { + backup_path.push("monitors"); + return Some(backup_path); + } + None + } + /// Read `ChannelMonitor`s from disk. pub fn read_channelmonitors( &self, @@ -92,9 +106,8 @@ impl LightningFilesystemPersister { where K::Target: KeysInterface + Sized, { - let mut path = self.main_path(); - path.push("monitors"); - if !Path::new(&path).exists() { + let path = self.monitors_path(); + if !path.exists() { return Ok(Vec::new()); } let mut res = Vec::new(); @@ -102,6 +115,9 @@ impl LightningFilesystemPersister { let file = file_option.unwrap(); let owned_file_name = file.file_name(); let filename = owned_file_name.to_str(); + if filename.is_some() && filename.unwrap() == "checkval" { + continue; + } if filename.is_none() || !filename.unwrap().is_ascii() || filename.unwrap().len() < 65 { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, @@ -247,22 +263,24 @@ impl LightningStorage for LightningFilesystemPersister { type Error = std::io::Error; async fn init_fs(&self) -> Result<(), Self::Error> { - let path = self.main_path(); - let backup_path = self.backup_path(); + let path = self.monitors_path(); + let backup_path = self.monitors_backup_path(); async_blocking(move || { fs::create_dir_all(path.clone())?; if let Some(path) = backup_path { fs::create_dir_all(path.clone())?; check_dir_operations(&path)?; + check_dir_operations(path.parent().unwrap())?; } - check_dir_operations(&path) + check_dir_operations(&path)?; + check_dir_operations(path.parent().unwrap()) }) .await } async fn is_fs_initialized(&self) -> Result { - let dir_path = self.main_path(); - let backup_dir_path = self.backup_path(); + let dir_path = self.monitors_path(); + let backup_dir_path = self.monitors_backup_path(); async_blocking(move || { if !dir_path.exists() || backup_dir_path.as_ref().map(|path| !path.exists()).unwrap_or(false) { Ok(false) diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index caae118106..10fe17b0e0 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -40,6 +40,7 @@ impl SimplePaymentVerification for ElectrumClient { let (merkle_branch, header, height) = loop { if now_ms() / 1000 > try_spv_proof_until { + // Todo: find a way to not show this error when height is still 0 error!( "Waited too long until {} for transaction {:?} to validate spv proof", try_spv_proof_until, diff --git a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs index 63392401a7..f5e67a4be5 100644 --- a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs +++ b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs @@ -7,7 +7,7 @@ const T_BTC_ELECTRUMS: &[&str] = &[ "electrum3.cipig.net:10068", ]; -fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { +fn start_lightning_nodes(enable_0_confs: bool) -> (MarketMakerIt, MarketMakerIt, String, String) { let node_1_seed = "become nominee mountain person volume business diet zone govern voice debris hidden"; let node_2_seed = "february coast tortoise grab shadow vast volcano affair ordinary gesture brass oxygen"; @@ -40,7 +40,10 @@ fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { "inbound_channels_confirmations": 1 }, "counterparty_channel_config_limits": { - "outbound_channels_confirmations": 1 + "outbound_channels_confirmations": 1, + // If true, this enables sending payments between the 2 nodes straight away without waiting for on-chain confirmations + // if the other node added this node as trusted. It also overrides "outbound_channels_confirmations". + "allow_outbound_0conf": enable_0_confs }, "protocol": { "type": "LIGHTNING", @@ -74,7 +77,8 @@ fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { let (_dump_log, _dump_dashboard) = mm_node_1.mm_dump(); log!("bob log path: {}", mm_node_1.log_path.display()); - let _electrum = block_on(enable_electrum(&mm_node_1, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + let electrum = block_on(enable_electrum(&mm_node_1, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + log!("Node 1 tBTC address: {}", electrum.address); let enable_lightning_1 = block_on(enable_lightning(&mm_node_1, "tBTC-TEST-lightning")); let node_1_address = enable_lightning_1["result"]["address"].as_str().unwrap().to_string(); @@ -96,7 +100,8 @@ fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { let (_dump_log, _dump_dashboard) = mm_node_2.mm_dump(); log!("alice log path: {}", mm_node_2.log_path.display()); - let _electrum = block_on(enable_electrum(&mm_node_2, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + let electrum = block_on(enable_electrum(&mm_node_2, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + log!("Node 2 tBTC address: {}", electrum.address); let enable_lightning_2 = block_on(enable_lightning(&mm_node_2, "tBTC-TEST-lightning")); let node_2_address = enable_lightning_2["result"]["address"].as_str().unwrap().to_string(); @@ -184,7 +189,7 @@ fn test_enable_lightning() { #[test] #[cfg(not(target_arch = "wasm32"))] fn test_connect_to_lightning_node() { - let (mm_node_1, mm_node_2, node_1_id, _) = start_lightning_nodes(); + let (mm_node_1, mm_node_2, node_1_id, _) = start_lightning_nodes(false); let node_1_address = format!("{}@{}:9735", node_1_id, mm_node_1.ip.to_string()); let connect = block_on(mm_node_2.rpc(&json! ({ @@ -207,10 +212,11 @@ fn test_connect_to_lightning_node() { } #[test] +// This test is ignored because it requires refilling the tBTC addresses with test coins periodically. #[ignore] #[cfg(not(target_arch = "wasm32"))] fn test_open_channel() { - let (mm_node_1, mut mm_node_2, node_1_id, node_2_id) = start_lightning_nodes(); + let (mm_node_1, mut mm_node_2, node_1_id, node_2_id) = start_lightning_nodes(false); let node_1_address = format!("{}@{}:9735", node_1_id, mm_node_1.ip.to_string()); let open_channel = block_on(mm_node_2.rpc(&json! ({ @@ -221,8 +227,8 @@ fn test_open_channel() { "coin": "tBTC-TEST-lightning", "node_address": node_1_address, "amount": { - "type":"Exact", - "value":0.00002, + "type": "Exact", + "value": 0.0002, }, }, }))) @@ -234,7 +240,7 @@ fn test_open_channel() { let list_channels_node_1 = block_on(mm_node_1.rpc(&json! ({ "userpass": mm_node_1.userpass, "mmrpc": "2.0", - "method": "list_channels", + "method": "list_open_channels_by_filter", "params": { "coin": "tBTC-TEST-lightning", }, @@ -263,7 +269,7 @@ fn test_open_channel() { let list_channels_node_2 = block_on(mm_node_2.rpc(&json! ({ "userpass": mm_node_2.userpass, "mmrpc": "2.0", - "method": "list_channels", + "method": "list_open_channels_by_filter", "params": { "coin": "tBTC-TEST-lightning", }, @@ -285,13 +291,125 @@ fn test_open_channel() { ); assert_eq!( list_channels_node_2_res["result"]["open_channels"][0]["balance_msat"], - 2000000 + 20000000 ); block_on(mm_node_1.stop()).unwrap(); block_on(mm_node_2.stop()).unwrap(); } +#[test] +// This test is ignored because it requires refilling the tBTC addresses with test coins periodically. +#[ignore] +#[cfg(not(target_arch = "wasm32"))] +// This also tests 0_confs_channels +fn test_send_payment() { + let (mut mm_node_2, mm_node_1, node_2_id, node_1_id) = start_lightning_nodes(true); + let node_1_address = format!("{}@{}:9735", node_1_id, mm_node_1.ip.to_string()); + + let add_trusted_node = block_on(mm_node_1.rpc(&json! ({ + "userpass": mm_node_1.userpass, + "mmrpc": "2.0", + "method": "add_trusted_node", + "params": { + "coin": "tBTC-TEST-lightning", + "node_id": node_2_id + }, + }))) + .unwrap(); + assert!(add_trusted_node.0.is_success(), "!open_channel: {}", add_trusted_node.1); + + let open_channel = block_on(mm_node_2.rpc(&json! ({ + "userpass": mm_node_2.userpass, + "mmrpc": "2.0", + "method": "open_channel", + "params": { + "coin": "tBTC-TEST-lightning", + "node_address": node_1_address, + "amount": { + "type": "Exact", + "value": 0.0002, + }, + }, + }))) + .unwrap(); + assert!(open_channel.0.is_success(), "!open_channel: {}", open_channel.1); + + block_on(mm_node_2.wait_for_log(60., |log| log.contains("Received message ChannelReady"))).unwrap(); + + let send_payment = block_on(mm_node_2.rpc(&json! ({ + "userpass": mm_node_2.userpass, + "mmrpc": "2.0", + "method": "send_payment", + "params": { + "coin": "tBTC-TEST-lightning", + "payment": { + "type": "keysend", + "destination": node_1_id, + "amount_in_msat": 1000, + "expiry": 24 + } + }, + }))) + .unwrap(); + assert!(send_payment.0.is_success(), "!send_payment: {}", send_payment.1); + + let send_payment_res: Json = json::from_str(&send_payment.1).unwrap(); + log!("send_payment_res {:?}", send_payment_res); + let payment_hash = send_payment_res["result"]["payment_hash"].as_str().unwrap(); + + block_on(mm_node_2.wait_for_log(60., |log| log.contains("Successfully sent payment"))).unwrap(); + + // Check payment on the sending node side + let get_payment_details = block_on(mm_node_2.rpc(&json! ({ + "userpass": mm_node_2.userpass, + "mmrpc": "2.0", + "method": "get_payment_details", + "params": { + "coin": "tBTC-TEST-lightning", + "payment_hash": payment_hash + }, + }))) + .unwrap(); + assert!( + get_payment_details.0.is_success(), + "!get_payment_details: {}", + get_payment_details.1 + ); + + let get_payment_details_res: Json = json::from_str(&get_payment_details.1).unwrap(); + let payment = &get_payment_details_res["result"]["payment_details"]; + assert_eq!(payment["status"], "succeeded"); + assert_eq!(payment["amount_in_msat"], 1000); + assert_eq!(payment["payment_type"]["type"], "Outbound Payment"); + + // Check payment on the receiving node side + let get_payment_details = block_on(mm_node_1.rpc(&json! ({ + "userpass": mm_node_1.userpass, + "mmrpc": "2.0", + "method": "get_payment_details", + "params": { + "coin": "tBTC-TEST-lightning", + "payment_hash": payment_hash + }, + }))) + .unwrap(); + assert!( + get_payment_details.0.is_success(), + "!get_payment_details: {}", + get_payment_details.1 + ); + + let get_payment_details_res: Json = json::from_str(&get_payment_details.1).unwrap(); + let payment = &get_payment_details_res["result"]["payment_details"]; + assert_eq!(payment["status"], "succeeded"); + assert_eq!(payment["amount_in_msat"], 1000); + assert_eq!(payment["payment_type"]["type"], "Inbound Payment"); + + block_on(mm_node_1.stop()).unwrap(); + block_on(mm_node_2.stop()).unwrap(); +} + #[test] #[cfg(not(target_arch = "wasm32"))] fn test_sign_verify_message_lightning() { diff --git a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs index f202c4b9f2..97c3f1dfdf 100644 --- a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs +++ b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs @@ -9,6 +9,7 @@ use crate::{mm2::lp_stats::{add_node_to_version_stat, remove_node_from_version_s mm2::rpc::lp_commands::{get_public_key, get_public_key_hash}}; use coins::eth::EthCoin; use coins::hd_wallet::get_new_address; +use coins::lightning::{add_trusted_node, list_trusted_node, remove_trusted_node}; use coins::my_tx_history_v2::my_tx_history_v2_rpc; use coins::rpc_command::account_balance::account_balance; use coins::rpc_command::get_current_mtp::get_current_mtp_rpc; @@ -170,6 +171,7 @@ async fn dispatcher_v2(request: MmRpcRequest, ctx: MmArc) -> DispatcherResult handle_mmrpc(ctx, request, withdraw_user_action).await, #[cfg(not(target_arch = "wasm32"))] native_only_methods => match native_only_methods { + "add_trusted_node" => handle_mmrpc(ctx, request, add_trusted_node).await, "close_channel" => handle_mmrpc(ctx, request, close_channel).await, "connect_to_lightning_node" => handle_mmrpc(ctx, request, connect_to_lightning_node).await, "enable_lightning" => handle_mmrpc(ctx, request, enable_l2::).await, @@ -183,7 +185,9 @@ async fn dispatcher_v2(request: MmRpcRequest, ctx: MmArc) -> DispatcherResult handle_mmrpc(ctx, request, list_closed_channels_by_filter).await, "list_open_channels_by_filter" => handle_mmrpc(ctx, request, list_open_channels_by_filter).await, "list_payments_by_filter" => handle_mmrpc(ctx, request, list_payments_by_filter).await, + "list_trusted_node" => handle_mmrpc(ctx, request, list_trusted_node).await, "open_channel" => handle_mmrpc(ctx, request, open_channel).await, + "remove_trusted_node" => handle_mmrpc(ctx, request, remove_trusted_node).await, "send_payment" => handle_mmrpc(ctx, request, send_payment).await, "enable_solana_with_tokens" => { handle_mmrpc(ctx, request, enable_platform_coin_with_tokens::).await From b806a434e6fb2f9adb5a75d0fc827cbc18a49315 Mon Sep 17 00:00:00 2001 From: shamardy Date: Mon, 8 Aug 2022 19:16:59 +0200 Subject: [PATCH 10/33] fix wasm compilation --- mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs index 97c3f1dfdf..b99941eb83 100644 --- a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs +++ b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs @@ -9,7 +9,6 @@ use crate::{mm2::lp_stats::{add_node_to_version_stat, remove_node_from_version_s mm2::rpc::lp_commands::{get_public_key, get_public_key_hash}}; use coins::eth::EthCoin; use coins::hd_wallet::get_new_address; -use coins::lightning::{add_trusted_node, list_trusted_node, remove_trusted_node}; use coins::my_tx_history_v2::my_tx_history_v2_rpc; use coins::rpc_command::account_balance::account_balance; use coins::rpc_command::get_current_mtp::get_current_mtp_rpc; @@ -37,9 +36,9 @@ use serde_json::{self as json, Value as Json}; use std::net::SocketAddr; cfg_native! { - use coins::lightning::{close_channel, connect_to_lightning_node, generate_invoice, get_channel_details, - get_claimable_balances, get_payment_details, list_closed_channels_by_filter, list_open_channels_by_filter, list_payments_by_filter, open_channel, - send_payment, LightningCoin}; + use coins::lightning::{add_trusted_node, close_channel, connect_to_lightning_node, generate_invoice, get_channel_details, + get_claimable_balances, get_payment_details, list_closed_channels_by_filter, list_open_channels_by_filter, + list_payments_by_filter, list_trusted_node, open_channel, remove_trusted_node, send_payment, LightningCoin}; use coins::{SolanaCoin, SplToken}; use coins::z_coin::ZCoin; } From 37efa37c24fab44fe30f8957117ab75bbed2306b Mon Sep 17 00:00:00 2001 From: shamardy Date: Tue, 9 Aug 2022 22:43:13 +0200 Subject: [PATCH 11/33] update rust-lightning to v0.0.110 wip --- Cargo.lock | 20 ++++++++++---------- mm2src/coins/Cargo.toml | 10 +++++----- mm2src/coins/lightning.rs | 16 ++++++++++++---- mm2src/coins/lightning/ln_conf.rs | 28 ++++++++++------------------ mm2src/coins/lightning/ln_events.rs | 15 +++++++++++++++ mm2src/common/Cargo.toml | 2 +- 6 files changed, 53 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index db798f5994..c86b668a9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3717,18 +3717,18 @@ dependencies = [ [[package]] name = "lightning" -version = "0.0.108" +version = "0.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d885bf509066af86ae85354c8959028ad6192c22a2657ef8271e94029d30f9d0" +checksum = "2dce6da860338d5a9ddc3fd42432465310cfab93b342bbd23b41b7c1f7c509d3" dependencies = [ "bitcoin", ] [[package]] name = "lightning-background-processor" -version = "0.0.108" +version = "0.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ba6fcb3cef50ae1027a89b40f847b771e831fad843673a350586e29b01b618b" +checksum = "e8de9d0de42bb933ffb8d33c6b0a75302f08b35126bfc74398ba01ad0c201f8d" dependencies = [ "bitcoin", "lightning", @@ -3737,9 +3737,9 @@ dependencies = [ [[package]] name = "lightning-invoice" -version = "0.16.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaafc1cebaf9ea8d2a57e60aae9fe6095554b8305714f8452cd8a20a3aa5b7ba" +checksum = "32aa02b7fd0bd95e40b6ca8d9d9232b162d5e23b41bd2bc42abe9e9c78d34d72" dependencies = [ "bech32", "bitcoin_hashes", @@ -3750,9 +3750,9 @@ dependencies = [ [[package]] name = "lightning-net-tokio" -version = "0.0.108" +version = "0.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0170619152c4d6b947d5ed0de427b85691482a293e0cae52d4336a2220a776" +checksum = "ce57d093fbc643835bc64c0501b52a3531d2511dcb1237d0495d68fea3adc47d" dependencies = [ "bitcoin", "lightning", @@ -3761,9 +3761,9 @@ dependencies = [ [[package]] name = "lightning-rapid-gossip-sync" -version = "0.0.108" +version = "0.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08b9947e17c7b97bb267bf3fe6bd51493c1b03a104ab81b246ef3f3ac9077ed9" +checksum = "391732631b14f7a1d9dc84dc3f644484d9b73190a31087b3856505cf0525bea0" dependencies = [ "bitcoin", "lightning", diff --git a/mm2src/coins/Cargo.toml b/mm2src/coins/Cargo.toml index 8c232a87c1..57336ae7cb 100644 --- a/mm2src/coins/Cargo.toml +++ b/mm2src/coins/Cargo.toml @@ -96,11 +96,11 @@ web-sys = { version = "0.3.55", features = ["console", "Headers", "Request", "Re [target.'cfg(not(target_arch = "wasm32"))'.dependencies] dirs = { version = "1" } bitcoin = "0.28.1" -lightning = "0.0.108" -lightning-background-processor = "0.0.108" -lightning-invoice = "0.16.0" -lightning-net-tokio = "0.0.108" -lightning-rapid-gossip-sync = "0.0.108" +lightning = "0.0.110" +lightning-background-processor = "0.0.110" +lightning-invoice = "0.18.0" +lightning-net-tokio = "0.0.110" +lightning-rapid-gossip-sync = "0.0.110" rust-ini = { version = "0.13" } rustls = { version = "0.20", features = ["dangerous_configuration"] } secp256k1v22 = { version = "0.22", package = "secp256k1" } diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 7c6a9c15e0..9c45dda38e 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -843,6 +843,8 @@ pub struct OpenChannelRequest { pub channel_options: Option, pub counterparty_locktime: Option, pub our_htlc_minimum_msat: Option, + pub commit_upfront_shutdown_pubkey: Option, + pub announce_channel: Option, } #[derive(Serialize)] @@ -912,10 +914,16 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes let mut user_config: UserConfig = conf.into(); if let Some(locktime) = req.counterparty_locktime { - user_config.own_channel_config.our_to_self_delay = locktime; + user_config.channel_handshake_config.our_to_self_delay = locktime; } if let Some(min) = req.our_htlc_minimum_msat { - user_config.own_channel_config.our_htlc_minimum_msat = min; + user_config.channel_handshake_config.our_htlc_minimum_msat = min; + } + if let Some(commit) = req.commit_upfront_shutdown_pubkey { + user_config.channel_handshake_config.commit_upfront_shutdown_pubkey = commit; + } + if let Some(announce) = req.announce_channel { + user_config.channel_handshake_config.announced_channel = announce; } let rpc_channel_id = ln_coin.db.get_last_channel_rpc_id().await? as u64 + 1; @@ -937,7 +945,7 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes temp_channel_id, node_pubkey, true, - user_config.channel_options.announced_channel, + user_config.channel_handshake_config.announced_channel, ); // Saving node data to reconnect to it on restart @@ -1535,7 +1543,7 @@ pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResu async_blocking(move || { ln_coin .channel_manager - .force_close_channel(&channel_id, &counterparty_node_id) + .force_close_broadcasting_latest_txn(&channel_id, &counterparty_node_id) .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e))) }) .await?; diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index 685844c297..d07f441744 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -24,9 +24,6 @@ pub struct ChannelOptions { /// excess of proportional_fee_in_millionths_sats. pub base_fee_msat: Option, pub cltv_expiry_delta: Option, - /// Set to announce the channel publicly and notify all nodes that they can route via this - /// channel. - pub announced_channel: Option, /// When set, we commit to an upfront shutdown_pubkey at channel open. pub commit_upfront_shutdown_pubkey: Option, /// Limit our total exposure to in-flight HTLCs which are burned to fees as they are too @@ -51,10 +48,6 @@ impl ChannelOptions { self.cltv_expiry_delta = Some(expiry); } - if let Some(announce) = options.announced_channel { - self.announced_channel = Some(announce); - } - if let Some(commit) = options.commit_upfront_shutdown_pubkey { self.commit_upfront_shutdown_pubkey = Some(commit); } @@ -85,14 +78,6 @@ impl From for ChannelConfig { channel_config.cltv_expiry_delta = expiry; } - if let Some(announce) = options.announced_channel { - channel_config.announced_channel = announce; - } - - if let Some(commit) = options.commit_upfront_shutdown_pubkey { - channel_config.commit_upfront_shutdown_pubkey = commit; - } - if let Some(dust) = options.max_dust_htlc_exposure_msat { channel_config.max_dust_htlc_exposure_msat = dust; } @@ -126,6 +111,9 @@ pub struct OurChannelsConfig { /// Sets the percentage of the channel value we will cap the total value of outstanding inbound /// HTLCs to. pub max_inbound_in_flight_htlc_percent: Option, + /// Set to announce the channel publicly and notify all nodes that they can route via this + /// channel. + pub announced_channel: Option, } impl From for ChannelHandshakeConfig { @@ -152,6 +140,10 @@ impl From for ChannelHandshakeConfig { channel_handshake_config.max_inbound_htlc_value_in_flight_percent_of_channel = max_inbound_htlc } + if let Some(announce) = config.announced_channel { + channel_handshake_config.announced_channel = announce; + } + channel_handshake_config } } @@ -251,13 +243,13 @@ impl From for UserConfig { fn from(conf: LightningCoinConf) -> Self { let mut user_config = UserConfig::default(); if let Some(config) = conf.our_channels_config { - user_config.own_channel_config = config.into(); + user_config.channel_handshake_config = config.into(); } if let Some(limits) = conf.counterparty_channel_config_limits { - user_config.peer_channel_config_limits = limits.into(); + user_config.channel_handshake_limits = limits.into(); } if let Some(options) = conf.channel_options { - user_config.channel_options = options.into(); + user_config.channel_config = options.into(); } if let Some(accept_forwards) = conf.accept_forwards_to_priv_channels { user_config.accept_forwards_to_priv_channels = accept_forwards; diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index 1de481e079..c04c4e88af 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -128,6 +128,21 @@ impl EventHandler for LightningEventHandler { push_msat, channel_type: _, } => self.handle_open_channel_request(*temporary_channel_id, *counterparty_node_id, *funding_satoshis, *push_msat), + + // Todo + Event::HTLCHandlingFailed { + prev_channel_id, failed_next_destination + } => error!( + "Failed to handle htlc from {} to {:?}", + hex::encode(prev_channel_id), + failed_next_destination, + ), + + // Todo + Event::ProbeSuccessful { .. } => (), + + // Todo + Event::ProbeFailed { .. } => (), } } } diff --git a/mm2src/common/Cargo.toml b/mm2src/common/Cargo.toml index 4c55f4227b..cab82c8a7e 100644 --- a/mm2src/common/Cargo.toml +++ b/mm2src/common/Cargo.toml @@ -65,7 +65,7 @@ hyper = { version = "0.14.11", features = ["client", "http2", "server", "tcp"] } # got "invalid certificate: UnknownIssuer" for https://ropsten.infura.io on iOS using default-features hyper-rustls = { version = "0.23", default-features = false, features = ["http1", "http2", "webpki-tokio"] } libc = { version = "0.2" } -lightning = "0.0.108" +lightning = "0.0.110" log4rs = { version = "1.0", default-features = false, features = ["console_appender", "pattern_encoder"] } metrics = { version = "0.12" } metrics-runtime = { version = "0.13", default-features = false, features = ["metrics-observer-prometheus"] } From c366a333c5d80700cafabce0b80b1e762cd7d1e9 Mon Sep 17 00:00:00 2001 From: shamardy Date: Wed, 10 Aug 2022 14:45:12 +0200 Subject: [PATCH 12/33] update rust-lightning to v0.0.110 wip, remove InvoiceForRPC now that Invoice serialization/deserialization is supported --- Cargo.lock | 1 + mm2src/coins/Cargo.toml | 2 +- mm2src/coins/lightning.rs | 10 ++-- mm2src/coins/lightning/ln_serialization.rs | 58 ---------------------- 4 files changed, 7 insertions(+), 64 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c86b668a9f..51056bb588 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3746,6 +3746,7 @@ dependencies = [ "lightning", "num-traits", "secp256k1 0.22.1", + "serde", ] [[package]] diff --git a/mm2src/coins/Cargo.toml b/mm2src/coins/Cargo.toml index 57336ae7cb..751ea8dfa4 100644 --- a/mm2src/coins/Cargo.toml +++ b/mm2src/coins/Cargo.toml @@ -98,7 +98,7 @@ dirs = { version = "1" } bitcoin = "0.28.1" lightning = "0.0.110" lightning-background-processor = "0.0.110" -lightning-invoice = "0.18.0" +lightning-invoice = { version = "0.18.0", features = ["serde"] } lightning-net-tokio = "0.0.110" lightning-rapid-gossip-sync = "0.0.110" rust-ini = { version = "0.13" } diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 9c45dda38e..10d389f72f 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -62,7 +62,7 @@ use ln_events::LightningEventHandler; use ln_filesystem_persister::LightningFilesystemPersister; use ln_p2p::{connect_to_node, ConnectToNodeRes, PeerManager}; use ln_platform::{h256_json_from_txid, Platform}; -use ln_serialization::{InvoiceForRPC, NodeAddress}; +use ln_serialization::NodeAddress; use ln_storage::{LightningStorage, NodesAddressesMapShared, Scorer}; use ln_utils::{ChainMonitor, ChannelManager}; use mm2_core::mm_ctx::MmArc; @@ -1229,7 +1229,7 @@ pub struct GenerateInvoiceRequest { #[derive(Serialize)] pub struct GenerateInvoiceResponse { payment_hash: H256Json, - invoice: InvoiceForRPC, + invoice: Invoice, } /// Generates an invoice (request for payment) that can be paid on the lightning network by another node using send_payment. @@ -1286,7 +1286,7 @@ pub async fn generate_invoice( ln_coin.db.add_or_update_payment_in_db(payment_info).await?; Ok(GenerateInvoiceResponse { payment_hash: payment_hash.into(), - invoice: invoice.into(), + invoice, }) } @@ -1294,7 +1294,7 @@ pub async fn generate_invoice( #[serde(tag = "type")] pub enum Payment { #[serde(rename = "invoice")] - Invoice { invoice: InvoiceForRPC }, + Invoice { invoice: Invoice }, #[serde(rename = "keysend")] Keysend { // The recieving node pubkey (node ID) @@ -1335,7 +1335,7 @@ pub async fn send_payment(ctx: MmArc, req: SendPaymentReq) -> SendPaymentResult< )); } let payment_info = match req.payment { - Payment::Invoice { invoice } => ln_coin.pay_invoice(invoice.into()).await?, + Payment::Invoice { invoice } => ln_coin.pay_invoice(invoice).await?, Payment::Keysend { destination, amount_in_msat, diff --git a/mm2src/coins/lightning/ln_serialization.rs b/mm2src/coins/lightning/ln_serialization.rs index dce582977f..01b99d0bd0 100644 --- a/mm2src/coins/lightning/ln_serialization.rs +++ b/mm2src/coins/lightning/ln_serialization.rs @@ -1,51 +1,9 @@ -use lightning_invoice::Invoice; use secp256k1v22::PublicKey; use serde::{de, Serialize, Serializer}; use std::fmt; use std::net::{SocketAddr, ToSocketAddrs}; use std::str::FromStr; -#[derive(Clone, Debug, PartialEq)] -pub struct InvoiceForRPC(Invoice); - -impl From for InvoiceForRPC { - fn from(i: Invoice) -> Self { InvoiceForRPC(i) } -} - -impl From for Invoice { - fn from(i: InvoiceForRPC) -> Self { i.0 } -} - -impl Serialize for InvoiceForRPC { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(&self.0.to_string()) - } -} - -impl<'de> de::Deserialize<'de> for InvoiceForRPC { - fn deserialize>(deserializer: D) -> Result { - struct InvoiceForRPCVisitor; - - impl<'de> de::Visitor<'de> for InvoiceForRPCVisitor { - type Value = InvoiceForRPC; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!(formatter, "a lightning invoice") - } - - fn visit_str(self, v: &str) -> Result { - let invoice = Invoice::from_str(v).map_err(|e| { - let err = format!("Could not parse lightning invoice from str {}, err {}", v, e); - de::Error::custom(err) - })?; - Ok(InvoiceForRPC(invoice)) - } - } - - deserializer.deserialize_str(InvoiceForRPCVisitor) - } -} - // TODO: support connection to onion addresses #[derive(Debug, PartialEq)] pub struct NodeAddress { @@ -141,22 +99,6 @@ mod tests { use super::*; use serde_json as json; - #[test] - fn test_invoice_for_rpc_serialize() { - let invoice_for_rpc = InvoiceForRPC(str::parse::("lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09").unwrap()); - let expected = r#""lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09""#; - let actual = json::to_string(&invoice_for_rpc).unwrap(); - assert_eq!(expected, actual); - } - - #[test] - fn test_invoice_for_rpc_deserialize() { - let invoice_for_rpc = r#""lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09""#; - let expected = InvoiceForRPC(str::parse::("lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09").unwrap()); - let actual = json::from_str(invoice_for_rpc).unwrap(); - assert_eq!(expected, actual); - } - #[test] fn test_node_address_serialize() { let node_address = NodeAddress { From 5552dda6f45825e8d25033b9316b7dbb853d8a17 Mon Sep 17 00:00:00 2001 From: shamardy Date: Wed, 10 Aug 2022 17:28:59 +0200 Subject: [PATCH 13/33] update rust-lightning to v0.0.110 wip: refactor some code due to update --- mm2src/coins/lightning.rs | 31 ++++--------- mm2src/coins/lightning/ln_conf.rs | 66 ++++++++++++++++++++------- mm2src/coins/lightning/ln_events.rs | 8 ++-- mm2src/coins/lightning/ln_platform.rs | 6 +-- 4 files changed, 65 insertions(+), 46 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 10d389f72f..13aaf88496 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -11,6 +11,7 @@ mod ln_storage; mod ln_utils; use super::{lp_coinfind_or_err, DerivationMethod, MmCoinEnum}; +use crate::lightning::ln_conf::OurChannelsConfigs; use crate::lightning::ln_errors::{TrustedNodeError, TrustedNodeResult}; use crate::lightning::ln_events::init_events_abort_handlers; use crate::lightning::ln_serialization::PublicKeyForRPC; @@ -48,7 +49,6 @@ use lightning_background_processor::{BackgroundProcessor, GossipSync}; use lightning_invoice::payment; use lightning_invoice::utils::{create_invoice_from_channelmanager, DefaultRouter}; use lightning_invoice::{Invoice, InvoiceDescription}; -use lightning_rapid_gossip_sync::RapidGossipSync; use ln_conf::{ChannelOptions, LightningCoinConf, LightningProtocolConf, PlatformCoinConfirmationTargets}; use ln_db::{ClosedChannelsFilter, DBChannelDetails, DBPaymentInfo, DBPaymentsFilter, HTLCStatus, LightningDB, PaymentType}; @@ -739,9 +739,6 @@ pub async fn start_lightning( payment::Retry::Attempts(params.payment_retries.unwrap_or(5)), )); - let p2p_gossip_sync = - GossipSync::<_, Arc, Arc>>, _, _, _>::P2P(gossip_sync.clone()); - // Start Background Processing. Runs tasks periodically in the background to keep LN node operational. // InvoicePayer will act as our event handler as it handles some of the payments related events before // delegating it to LightningEventHandler. @@ -751,7 +748,7 @@ pub async fn start_lightning( invoice_payer.clone(), chain_monitor.clone(), channel_manager.clone(), - p2p_gossip_sync, + GossipSync::p2p(gossip_sync), peer_manager.clone(), logger, Some(scorer), @@ -841,10 +838,7 @@ pub struct OpenChannelRequest { #[serde(default)] pub push_msat: u64, pub channel_options: Option, - pub counterparty_locktime: Option, - pub our_htlc_minimum_msat: Option, - pub commit_upfront_shutdown_pubkey: Option, - pub announce_channel: Option, + pub channel_configs: Option, } #[derive(Serialize)] @@ -911,20 +905,13 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes None => conf.channel_options = Some(options), } } - - let mut user_config: UserConfig = conf.into(); - if let Some(locktime) = req.counterparty_locktime { - user_config.channel_handshake_config.our_to_self_delay = locktime; - } - if let Some(min) = req.our_htlc_minimum_msat { - user_config.channel_handshake_config.our_htlc_minimum_msat = min; - } - if let Some(commit) = req.commit_upfront_shutdown_pubkey { - user_config.channel_handshake_config.commit_upfront_shutdown_pubkey = commit; - } - if let Some(announce) = req.announce_channel { - user_config.channel_handshake_config.announced_channel = announce; + if let Some(configs) = req.channel_configs { + match conf.our_channels_configs.as_mut() { + Some(o) => o.update(configs), + None => conf.our_channels_configs = Some(configs), + } } + let user_config: UserConfig = conf.into(); let rpc_channel_id = ln_coin.db.get_last_channel_rpc_id().await? as u64 + 1; diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index d07f441744..8a5ac5c396 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -24,8 +24,6 @@ pub struct ChannelOptions { /// excess of proportional_fee_in_millionths_sats. pub base_fee_msat: Option, pub cltv_expiry_delta: Option, - /// When set, we commit to an upfront shutdown_pubkey at channel open. - pub commit_upfront_shutdown_pubkey: Option, /// Limit our total exposure to in-flight HTLCs which are burned to fees as they are too /// small to claim on-chain. pub max_dust_htlc_exposure_msat: Option, @@ -48,10 +46,6 @@ impl ChannelOptions { self.cltv_expiry_delta = Some(expiry); } - if let Some(commit) = options.commit_upfront_shutdown_pubkey { - self.commit_upfront_shutdown_pubkey = Some(commit); - } - if let Some(dust) = options.max_dust_htlc_exposure_msat { self.max_dust_htlc_exposure_msat = Some(dust); } @@ -91,7 +85,7 @@ impl From for ChannelConfig { } #[derive(Clone, Debug, Deserialize)] -pub struct OurChannelsConfig { +pub struct OurChannelsConfigs { /// Confirmations we will wait for before considering an inbound channel locked in. pub inbound_channels_confirmations: Option, /// The number of blocks we require our counterparty to wait to claim their money on chain @@ -114,10 +108,44 @@ pub struct OurChannelsConfig { /// Set to announce the channel publicly and notify all nodes that they can route via this /// channel. pub announced_channel: Option, + /// When set, we commit to an upfront shutdown_pubkey at channel open. + pub commit_upfront_shutdown_pubkey: Option, +} + +impl OurChannelsConfigs { + pub fn update(&mut self, config: OurChannelsConfigs) { + if let Some(confs) = config.inbound_channels_confirmations { + self.inbound_channels_confirmations = Some(confs); + } + + if let Some(delay) = config.counterparty_locktime { + self.counterparty_locktime = Some(delay); + } + + if let Some(min) = config.our_htlc_minimum_msat { + self.our_htlc_minimum_msat = Some(min); + } + + if let Some(scid_privacy) = config.negotiate_scid_privacy { + self.negotiate_scid_privacy = Some(scid_privacy); + } + + if let Some(max_inbound_htlc) = config.max_inbound_in_flight_htlc_percent { + self.max_inbound_in_flight_htlc_percent = Some(max_inbound_htlc); + } + + if let Some(announce) = config.announced_channel { + self.announced_channel = Some(announce); + } + + if let Some(commit) = config.commit_upfront_shutdown_pubkey { + self.commit_upfront_shutdown_pubkey = Some(commit); + } + } } -impl From for ChannelHandshakeConfig { - fn from(config: OurChannelsConfig) -> Self { +impl From for ChannelHandshakeConfig { + fn from(config: OurChannelsConfigs) -> Self { let mut channel_handshake_config = ChannelHandshakeConfig::default(); if let Some(confs) = config.inbound_channels_confirmations { @@ -133,17 +161,21 @@ impl From for ChannelHandshakeConfig { } if let Some(scid_privacy) = config.negotiate_scid_privacy { - channel_handshake_config.negotiate_scid_privacy = scid_privacy + channel_handshake_config.negotiate_scid_privacy = scid_privacy; } if let Some(max_inbound_htlc) = config.max_inbound_in_flight_htlc_percent { - channel_handshake_config.max_inbound_htlc_value_in_flight_percent_of_channel = max_inbound_htlc + channel_handshake_config.max_inbound_htlc_value_in_flight_percent_of_channel = max_inbound_htlc; } if let Some(announce) = config.announced_channel { channel_handshake_config.announced_channel = announce; } + if let Some(commit) = config.commit_upfront_shutdown_pubkey { + channel_handshake_config.commit_upfront_shutdown_pubkey = commit; + } + channel_handshake_config } } @@ -210,6 +242,10 @@ impl From for ChannelHandshakeLimits { channel_handshake_limits.max_minimum_depth = confs; } + if let Some(is_0conf) = limits.allow_outbound_0conf { + channel_handshake_limits.trust_own_funding_0conf = is_0conf; + } + if let Some(pref) = limits.force_announced_channel_preference { channel_handshake_limits.force_announced_channel_preference = pref; } @@ -218,10 +254,6 @@ impl From for ChannelHandshakeLimits { channel_handshake_limits.their_to_self_delay = blocks; } - if let Some(is_0conf) = limits.allow_outbound_0conf { - channel_handshake_limits.trust_own_funding_0conf = is_0conf; - } - channel_handshake_limits } } @@ -234,7 +266,7 @@ pub struct LightningCoinConf { pub accept_inbound_channels: Option, pub accept_forwards_to_priv_channels: Option, pub channel_options: Option, - pub our_channels_config: Option, + pub our_channels_configs: Option, pub counterparty_channel_config_limits: Option, pub sign_message_prefix: Option, } @@ -242,7 +274,7 @@ pub struct LightningCoinConf { impl From for UserConfig { fn from(conf: LightningCoinConf) -> Self { let mut user_config = UserConfig::default(); - if let Some(config) = conf.our_channels_config { + if let Some(config) = conf.our_channels_configs { user_config.channel_handshake_config = config.into(); } if let Some(limits) = conf.counterparty_channel_config_limits { diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index c04c4e88af..1b596db516 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -129,7 +129,8 @@ impl EventHandler for LightningEventHandler { channel_type: _, } => self.handle_open_channel_request(*temporary_channel_id, *counterparty_node_id, *funding_satoshis, *push_msat), - // Todo + // Just log an error for now, but this event can be used along PaymentForwarded for a new RPC that shows stats about how a node + // forward payments over it's outbound channels which can be useful for a user that wants to run a forwarding node for some profits. Event::HTLCHandlingFailed { prev_channel_id, failed_next_destination } => error!( @@ -138,10 +139,9 @@ impl EventHandler for LightningEventHandler { failed_next_destination, ), - // Todo + // ProbeSuccessful and ProbeFailed are events in response to a send_probe function call which sends a payment that probes a given route for liquidity. + // send_probe is not used for now but may be used in order matching in the future to check if a swap can happen or not. Event::ProbeSuccessful { .. } => (), - - // Todo Event::ProbeFailed { .. } => (), } } diff --git a/mm2src/coins/lightning/ln_platform.rs b/mm2src/coins/lightning/ln_platform.rs index 84e6cb53d0..a9e69f9473 100644 --- a/mm2src/coins/lightning/ln_platform.rs +++ b/mm2src/coins/lightning/ln_platform.rs @@ -20,12 +20,10 @@ use lightning::chain::{chaininterface::{BroadcasterInterface, ConfirmationTarget Confirm, Filter, WatchedOutput}; use rpc::v1::types::{Bytes as BytesJson, H256 as H256Json}; use spv_validation::spv_proof::TRY_SPV_PROOF_INTERVAL; -use std::cmp; use std::convert::{TryFrom, TryInto}; use std::sync::atomic::{AtomicU64, Ordering as AtomicOrdering, Ordering}; const CHECK_FOR_NEW_BEST_BLOCK_INTERVAL: f64 = 60.; -const MIN_ALLOWED_FEE_PER_1000_WEIGHT: u32 = 253; const TRY_LOOP_INTERVAL: f64 = 60.; static DEFAULT_BACKGROUND_FEES_PER_VB: AtomicU64 = AtomicU64::new(1012); @@ -531,7 +529,9 @@ impl FeeEstimator for Platform { // Must be no smaller than 253 (ie 1 satoshi-per-byte rounded up to ensure later round-downs don’t put us below 1 satoshi-per-byte). // https://docs.rs/lightning/0.0.101/lightning/chain/chaininterface/trait.FeeEstimator.html#tymethod.get_est_sat_per_1000_weight - cmp::max((fee_per_kb as f64 / 4.0).ceil() as u32, MIN_ALLOWED_FEE_PER_1000_WEIGHT) + // This has changed in rust-lightning v0.0.110 as LDK currently wraps get_est_sat_per_1000_weight to ensure that the value returned is + // no smaller than 253. https://github.com/lightningdevkit/rust-lightning/pull/1552 + (fee_per_kb as f64 / 4.0).ceil() as u32 } } From 2dfb4993406f20bd854eddf4965eedd39457edfd Mon Sep 17 00:00:00 2001 From: shamardy Date: Wed, 10 Aug 2022 20:35:01 +0200 Subject: [PATCH 14/33] update rust-lightning to v0.0.110 complete: add update_channel RPC --- mm2src/coins/lightning.rs | 35 ++++++++++++++++++- mm2src/coins/lightning/ln_errors.rs | 30 ++++++++++++++++ mm2src/mm2_core/src/mm_ctx.rs | 7 ++-- .../mm2_main/src/rpc/dispatcher/dispatcher.rs | 12 ++++--- 4 files changed, 74 insertions(+), 10 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 13aaf88496..7ac91c7abe 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -12,7 +12,7 @@ mod ln_utils; use super::{lp_coinfind_or_err, DerivationMethod, MmCoinEnum}; use crate::lightning::ln_conf::OurChannelsConfigs; -use crate::lightning::ln_errors::{TrustedNodeError, TrustedNodeResult}; +use crate::lightning::ln_errors::{TrustedNodeError, TrustedNodeResult, UpdateChannelError, UpdateChannelResult}; use crate::lightning::ln_events::init_events_abort_handlers; use crate::lightning::ln_serialization::PublicKeyForRPC; use crate::lightning::ln_sql::SqliteLightningDB; @@ -952,6 +952,39 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes }) } +#[derive(Deserialize)] +pub struct UpdateChannelReq { + pub coin: String, + pub channel_id: H256Json, + pub counterparty_node_id: PublicKeyForRPC, + pub options: ChannelOptions, +} + +/// Updates configuration for an open channel. +pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelResult { + let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; + let ln_coin = match coin { + MmCoinEnum::LightningCoin(c) => c, + _ => return MmError::err(UpdateChannelError::UnsupportedCoin(coin.ticker().to_string())), + }; + + async_blocking(move || { + let mut channel_options = ln_coin.conf.channel_options.unwrap_or_else(|| req.options.clone()); + if channel_options != req.options { + channel_options.update(req.options.clone()); + } + let channel_ids = vec![req.channel_id.0]; + let counterparty_node_id = req.counterparty_node_id.clone(); + ln_coin + .channel_manager + .update_channel_config(&counterparty_node_id.into(), &channel_ids, &channel_options.into()) + .map_to_mm(|e| UpdateChannelError::FailureToUpdateChannel(req.channel_id.to_string(), format!("{:?}", e))) + }) + .await?; + + Ok("success".into()) +} + #[derive(Deserialize)] pub struct OpenChannelsFilter { pub channel_id: Option, diff --git a/mm2src/coins/lightning/ln_errors.rs b/mm2src/coins/lightning/ln_errors.rs index c2671b6af5..f5268c08d0 100644 --- a/mm2src/coins/lightning/ln_errors.rs +++ b/mm2src/coins/lightning/ln_errors.rs @@ -14,6 +14,7 @@ use utxo_signer::with_key_pair::UtxoSignWithKeyPairError; pub type EnableLightningResult = Result>; pub type ConnectToNodeResult = Result>; pub type OpenChannelResult = Result>; +pub type UpdateChannelResult = Result>; pub type ListChannelsResult = Result>; pub type GetChannelDetailsResult = Result>; pub type GenerateInvoiceResult = Result>; @@ -218,6 +219,35 @@ impl From for OpenChannelError { fn from(err: SqlError) -> OpenChannelError { OpenChannelError::DbError(err.to_string()) } } +#[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] +#[serde(tag = "error_type", content = "error_data")] +pub enum UpdateChannelError { + #[display(fmt = "Lightning network is not supported for {}", _0)] + UnsupportedCoin(String), + #[display(fmt = "No such coin {}", _0)] + NoSuchCoin(String), + #[display(fmt = "Failure to channel {}: {}", _0, _1)] + FailureToUpdateChannel(String, String), +} + +impl HttpStatusCode for UpdateChannelError { + fn status_code(&self) -> StatusCode { + match self { + UpdateChannelError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, + UpdateChannelError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + UpdateChannelError::FailureToUpdateChannel(_, _) => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +impl From for UpdateChannelError { + fn from(e: CoinFindError) -> Self { + match e { + CoinFindError::NoSuchCoin { coin } => UpdateChannelError::NoSuchCoin(coin), + } + } +} + #[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] #[serde(tag = "error_type", content = "error_data")] pub enum ListChannelsError { diff --git a/mm2src/mm2_core/src/mm_ctx.rs b/mm2src/mm2_core/src/mm_ctx.rs index e850d4cbf2..4b7002d2c8 100644 --- a/mm2src/mm2_core/src/mm_ctx.rs +++ b/mm2src/mm2_core/src/mm_ctx.rs @@ -7,7 +7,7 @@ use futures::future::AbortHandle; use gstuff::{try_s, Constructible, ERR, ERRL}; use keys::KeyPair; use lazy_static::lazy_static; -use mm2_metrics::{MetricsArc, MetricsOps, MmMetricsError}; +use mm2_metrics::{MetricsArc, MetricsOps}; use primitives::hash::H160; use rand::Rng; use serde::{Deserialize, Serialize}; @@ -18,7 +18,6 @@ use std::any::Any; use std::collections::hash_map::{Entry, HashMap}; use std::collections::HashSet; use std::fmt; -use std::net::AddrParseError; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; @@ -29,9 +28,9 @@ cfg_wasm32! { } cfg_native! { - use mm2_metrics::prometheus; + use mm2_metrics::{prometheus, MmMetricsError}; use db_common::sqlite::rusqlite::Connection; - use std::net::{IpAddr, SocketAddr}; + use std::net::{AddrParseError, IpAddr, SocketAddr}; use std::sync::MutexGuard; } diff --git a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs index b99941eb83..d33cb662b9 100644 --- a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs +++ b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs @@ -38,7 +38,8 @@ use std::net::SocketAddr; cfg_native! { use coins::lightning::{add_trusted_node, close_channel, connect_to_lightning_node, generate_invoice, get_channel_details, get_claimable_balances, get_payment_details, list_closed_channels_by_filter, list_open_channels_by_filter, - list_payments_by_filter, list_trusted_node, open_channel, remove_trusted_node, send_payment, LightningCoin}; + list_payments_by_filter, list_trusted_node, open_channel, remove_trusted_node, send_payment, update_channel, + LightningCoin}; use coins::{SolanaCoin, SplToken}; use coins::z_coin::ZCoin; } @@ -174,6 +175,10 @@ async fn dispatcher_v2(request: MmRpcRequest, ctx: MmArc) -> DispatcherResult handle_mmrpc(ctx, request, close_channel).await, "connect_to_lightning_node" => handle_mmrpc(ctx, request, connect_to_lightning_node).await, "enable_lightning" => handle_mmrpc(ctx, request, enable_l2::).await, + "enable_solana_with_tokens" => { + handle_mmrpc(ctx, request, enable_platform_coin_with_tokens::).await + }, + "enable_spl" => handle_mmrpc(ctx, request, enable_token::).await, "generate_invoice" => handle_mmrpc(ctx, request, generate_invoice).await, "get_channel_details" => handle_mmrpc(ctx, request, get_channel_details).await, "get_claimable_balances" => handle_mmrpc(ctx, request, get_claimable_balances).await, @@ -188,10 +193,7 @@ async fn dispatcher_v2(request: MmRpcRequest, ctx: MmArc) -> DispatcherResult handle_mmrpc(ctx, request, open_channel).await, "remove_trusted_node" => handle_mmrpc(ctx, request, remove_trusted_node).await, "send_payment" => handle_mmrpc(ctx, request, send_payment).await, - "enable_solana_with_tokens" => { - handle_mmrpc(ctx, request, enable_platform_coin_with_tokens::).await - }, - "enable_spl" => handle_mmrpc(ctx, request, enable_token::).await, + "update_channel" => handle_mmrpc(ctx, request, update_channel).await, "z_coin_tx_history" => handle_mmrpc(ctx, request, coins::my_tx_history_v2::z_coin_tx_history_rpc).await, _ => MmError::err(DispatcherError::NoSuchMethod), }, From 00d2af2e64312b5ff661b0bf52492c98ae166e0d Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 12 Aug 2022 22:00:20 +0200 Subject: [PATCH 15/33] move lightning background processor to MmCtx, so it can be dropped when calling stop RPC, to persist the latest states on exit --- Cargo.lock | 1 + mm2src/coins/lightning.rs | 15 ++++++++------- mm2src/coins/lightning/ln_events.rs | 4 +++- mm2src/mm2_core/Cargo.toml | 1 + mm2src/mm2_core/src/mm_ctx.rs | 13 +++++++++++++ .../src/rpc/lp_commands/lp_commands_legacy.rs | 5 +++++ 6 files changed, 31 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 66d551266b..2f0327fb10 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4087,6 +4087,7 @@ dependencies = [ "hex 0.4.2", "keys", "lazy_static", + "lightning-background-processor", "mm2_metrics", "mm2_rpc", "primitives", diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 7ac91c7abe..534d7f36bd 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -93,8 +93,6 @@ pub struct LightningCoin { pub conf: LightningCoinConf, /// The lightning node peer manager that takes care of connecting to peers, etc.. pub peer_manager: Arc, - /// The lightning node background processor that takes care of tasks that need to happen periodically. - pub background_processor: Arc, /// The lightning node channel manager which keeps track of the number of open channels and sends messages to the appropriate /// channel, also tracks HTLC preimages and forwards onion packets appropriately. pub channel_manager: Arc, @@ -127,8 +125,8 @@ impl LightningCoin { fn my_node_id(&self) -> String { self.channel_manager.get_our_node_id().to_string() } async fn list_channels(&self) -> Vec { - let selfi = self.clone(); - async_blocking(move || selfi.channel_manager.list_channels()).await + let channel_manager = self.channel_manager.clone(); + async_blocking(move || channel_manager.list_channels()).await } async fn get_balance_msat(&self) -> (u64, u64) { @@ -743,7 +741,7 @@ pub async fn start_lightning( // InvoicePayer will act as our event handler as it handles some of the payments related events before // delegating it to LightningEventHandler. // note: background_processor stops automatically when dropped since BackgroundProcessor implements the Drop trait. - let background_processor = Arc::new(BackgroundProcessor::start( + let background_processor = BackgroundProcessor::start( persister.clone(), invoice_payer.clone(), chain_monitor.clone(), @@ -752,7 +750,11 @@ pub async fn start_lightning( peer_manager.clone(), logger, Some(scorer), - )); + ); + ctx.background_processors + .lock() + .unwrap() + .insert(conf.ticker.clone(), background_processor); // If channel_nodes_data file exists, read channels nodes data from disk and reconnect to channel nodes/peers if possible. let open_channels_nodes = Arc::new(PaMutex::new( @@ -775,7 +777,6 @@ pub async fn start_lightning( platform, conf, peer_manager, - background_processor, channel_manager, chain_monitor, keys_manager, diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index 1b596db516..842f22aa22 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -94,6 +94,7 @@ impl EventHandler for LightningEventHandler { ), // Handling updating channel penalties after successfully routing a payment along a path is done by the InvoicePayer. + // Todo: Maybe add information to db about why a payment succeeded using this event Event::PaymentPathSuccessful { payment_id, payment_hash, @@ -106,7 +107,8 @@ impl EventHandler for LightningEventHandler { ), // Handling updating channel penalties after a payment fails to route through a channel is done by the InvoicePayer. - // Also abandoning or retrying a payment is handled by the InvoicePayer. + // Also abandoning or retrying a payment is handled by the InvoicePayer. + // Todo: Add information to db about why a payment failed using this event Event::PaymentPathFailed { payment_hash, rejected_by_dest, diff --git a/mm2src/mm2_core/Cargo.toml b/mm2src/mm2_core/Cargo.toml index b260692076..3070b3f7ea 100644 --- a/mm2src/mm2_core/Cargo.toml +++ b/mm2src/mm2_core/Cargo.toml @@ -17,6 +17,7 @@ futures = { version = "0.3", package = "futures", features = ["compat", "async-a hex = "0.4.2" keys = { path = "../mm2_bitcoin/keys" } lazy_static = "1.4" +lightning-background-processor = "0.0.110" mm2_metrics = { path = "../mm2_metrics" } primitives = { path = "../mm2_bitcoin/primitives" } rand = { version = "0.7", features = ["std", "small_rng", "wasm-bindgen"] } diff --git a/mm2src/mm2_core/src/mm_ctx.rs b/mm2src/mm2_core/src/mm_ctx.rs index 4b7002d2c8..19c4c523cf 100644 --- a/mm2src/mm2_core/src/mm_ctx.rs +++ b/mm2src/mm2_core/src/mm_ctx.rs @@ -7,6 +7,8 @@ use futures::future::AbortHandle; use gstuff::{try_s, Constructible, ERR, ERRL}; use keys::KeyPair; use lazy_static::lazy_static; +#[cfg(not(target_arch = "wasm32"))] +use lightning_background_processor::BackgroundProcessor; use mm2_metrics::{MetricsArc, MetricsOps}; use primitives::hash::H160; use rand::Rng; @@ -103,6 +105,11 @@ pub struct MmCtx { pub swaps_ctx: Mutex>>, /// The context belonging to the `lp_stats` mod: `StatsContext` pub stats_ctx: Mutex>>, + /// Lightning background processors, these need to be dropped when stopping mm2 to + /// persist the latest states to the filesystem. This can be moved to LightningCoin + /// Struct in the future if the LightningCoin and other coins are dropped when mm2 stops. + #[cfg(not(target_arch = "wasm32"))] + pub background_processors: Mutex>, /// The RPC sender forwarding requests to writing part of underlying stream. #[cfg(target_arch = "wasm32")] pub wasm_rpc: Constructible, @@ -141,6 +148,8 @@ impl MmCtx { coins_needed_for_kick_start: Mutex::new(HashSet::new()), swaps_ctx: Mutex::new(None), stats_ctx: Mutex::new(None), + #[cfg(not(target_arch = "wasm32"))] + background_processors: Mutex::new(HashMap::new()), #[cfg(target_arch = "wasm32")] wasm_rpc: Constructible::default(), #[cfg(not(target_arch = "wasm32"))] @@ -381,6 +390,10 @@ impl MmArc { for handler in self.abort_handlers.lock().unwrap().drain(..) { handler.abort(); } + + #[cfg(not(target_arch = "wasm32"))] + self.background_processors.lock().unwrap().drain(); + let mut stop_listeners = self.stop_listeners.lock().expect("Can't lock stop_listeners"); // NB: It is important that we `drain` the `stop_listeners` rather than simply iterating over them // because otherwise there might be reference counting instances remaining in a listener diff --git a/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs b/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs index ad4b6c527c..158ca32a79 100644 --- a/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs +++ b/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs @@ -71,6 +71,11 @@ pub async fn disable_coin(ctx: MmArc, req: Json) -> Result>, St .map_err(|e| ERRL!("{}", e)); } + // If the coin is a Lightning Coin, we need to drop it's background processor first to + // persist the latest state to the filesystem. + #[cfg(not(target_arch = "wasm32"))] + ctx.background_processors.lock().unwrap().remove(&ticker); + try_s!(disable_coin_impl(&ctx, &ticker).await); let res = json!({ "result": { From eff1370686206df8d435eb03f0100d80e26a3dac Mon Sep 17 00:00:00 2001 From: shamardy Date: Mon, 15 Aug 2022 22:50:19 +0200 Subject: [PATCH 16/33] validating headers WIP --- mm2src/coins/utxo/rpc_clients.rs | 20 +++- .../coins/utxo/utxo_block_header_storage.rs | 2 + mm2src/coins/utxo/utxo_common.rs | 30 +++++- mm2src/mm2_bitcoin/keys/Cargo.toml | 2 +- mm2src/mm2_bitcoin/spv_validation/Cargo.toml | 1 + .../spv_validation/src/helpers_validation.rs | 102 +++++++++++------- mm2src/mm2_bitcoin/spv_validation/src/lib.rs | 1 + .../mm2_bitcoin/spv_validation/src/storage.rs | 2 +- mm2src/mm2_bitcoin/spv_validation/src/work.rs | 28 +++-- 9 files changed, 128 insertions(+), 60 deletions(-) diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index 41a09e6272..a23c604b9e 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -1968,7 +1968,25 @@ impl ElectrumClient { let params = &storage.params; let blocks_limit = params.blocks_limit_to_check; let (headers_registry, headers) = self.retrieve_last_headers(blocks_limit, height).compat().await?; - match validate_headers(headers, params.difficulty_check, params.constant_difficulty) { + let previous_header_height = if height < blocks_limit.get() { + 0 + } else { + height - blocks_limit.get() + }; + // Todo: remove unwrap, move this inside validate_headers function, maybe also move ticker inside storage? + let previous_header = storage.get_block_header(ticker, previous_header_height).await?.unwrap(); + match validate_headers( + ticker, + previous_header, + previous_header_height as u32, + headers, + params.difficulty_check, + params.constant_difficulty, + storage, + ¶ms.difficulty_algorithm, + ) + .await + { Ok(_) => { storage.add_block_headers_to_storage(ticker, headers_registry).await?; Ok(header) diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index 32f53dd89f..2ef0c98f7a 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -7,6 +7,7 @@ use chain::BlockHeader; use mm2_core::mm_ctx::MmArc; use primitives::hash::H256; use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; +use spv_validation::work::DifficultyAlgorithm; use std::collections::HashMap; use std::fmt::{Debug, Formatter}; use std::num::NonZeroU64; @@ -19,6 +20,7 @@ pub struct BlockHeaderVerificationParams { // This should to be equal to or greater than the number of blocks needed before the chain is safe from reorganization (e.g. 6 blocks for BTC) pub blocks_limit_to_check: NonZeroU64, pub check_every: f64, + pub difficulty_algorithm: Option, } pub struct BlockHeaderStorage { diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index f99cb247a4..be7d5dcd2f 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -3473,12 +3473,15 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo Some(storage) => storage, }; let params = storage.params.clone(); - let (check_every, blocks_limit_to_check, difficulty_check, constant_difficulty) = ( + let (check_every, blocks_limit_to_check, difficulty_check, constant_difficulty, difficulty_algorithm) = ( params.check_every, params.blocks_limit_to_check, params.difficulty_check, params.constant_difficulty, + params.difficulty_algorithm, ); + // Todo: what about if electrums are down for a long time, a header will be skipped and all the next validations will fail, we shouldn't get it from + // Todo: get_block_count but from storage when there is a new block (check it's the last one in storage), block_header_utxo_loop logic might completely change let height = ok_or_continue_after_sleep!(coin.as_ref().rpc_client.get_block_count().compat().await, check_every); let (block_registry, block_headers) = ok_or_continue_after_sleep!( @@ -3488,12 +3491,33 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo .await, check_every ); + let ticker = coin.as_ref().conf.ticker.as_str(); + let previous_header_height = if height < blocks_limit_to_check.get() { + 0 + } else { + height - blocks_limit_to_check.get() + }; + // Todo: remove unwrap, move this inside validate_headers function, maybe also move ticker inside storage? + let previous_header = ok_or_continue_after_sleep!( + storage.get_block_header(ticker, previous_header_height).await, + check_every + ) + .unwrap(); ok_or_continue_after_sleep!( - validate_headers(block_headers, difficulty_check, constant_difficulty), + validate_headers( + ticker, + previous_header, + previous_header_height as u32, + block_headers, + difficulty_check, + constant_difficulty, + storage, + &difficulty_algorithm + ) + .await, check_every ); - let ticker = coin.as_ref().conf.ticker.as_str(); ok_or_continue_after_sleep!( storage.add_block_headers_to_storage(ticker, block_registry).await, check_every diff --git a/mm2src/mm2_bitcoin/keys/Cargo.toml b/mm2src/mm2_bitcoin/keys/Cargo.toml index ad024e60cb..3a5e46f48c 100644 --- a/mm2src/mm2_bitcoin/keys/Cargo.toml +++ b/mm2src/mm2_bitcoin/keys/Cargo.toml @@ -12,6 +12,6 @@ derive_more = "0.99" lazy_static = "1.4" rand = "0.6" primitives = { path = "../primitives" } -secp256k1 = { version = "0.20", features = ["rand"] } +secp256k1 = { version = "0.20", features = ["rand", "recovery"] } serde = { version = "1.0", features = ["derive"] } serde_derive = "1.0" diff --git a/mm2src/mm2_bitcoin/spv_validation/Cargo.toml b/mm2src/mm2_bitcoin/spv_validation/Cargo.toml index 6d3a5e96d6..97cb21d765 100644 --- a/mm2src/mm2_bitcoin/spv_validation/Cargo.toml +++ b/mm2src/mm2_bitcoin/spv_validation/Cargo.toml @@ -12,6 +12,7 @@ keys = {path = "../keys"} primitives = { path = "../primitives" } ripemd160 = "0.9.0" rustc-hex = "2" +serde = "1.0" serialization = { path = "../serialization" } sha2 = "0.9" test_helpers = { path = "../test_helpers" } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs index 6c3c263960..089fbd19d2 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs @@ -1,12 +1,13 @@ -use chain::{BlockHeader, RawBlockHeader, RawHeaderError}; +use crate::storage::BlockHeaderStorageOps; +use crate::work::{next_block_bits, DifficultyAlgorithm, NextBlockBitsError}; +use chain::{BlockHeader, RawHeaderError}; use derive_more::Display; use primitives::hash::H256; -use primitives::U256; use ripemd160::Digest; use serialization::parse_compact_int; use sha2::Sha256; -#[derive(Debug, Display, PartialEq, Eq, Clone)] +#[derive(Clone, Debug, Display, Eq, PartialEq)] pub enum SPVError { #[display(fmt = "Overran a checked read on a slice")] ReadOverrun, @@ -14,8 +15,6 @@ pub enum SPVError { BadCompactInt, #[display(fmt = "`extract_hash` could not identify the output type")] MalformattedOutput, - #[display(fmt = "Unable to get target from block header")] - UnableToGetTarget, #[display(fmt = "Unable to get block header from network or storage: {}", _0)] UnableToGetHeader(String), #[display(fmt = "Header not exactly 80 bytes")] @@ -24,6 +23,8 @@ pub enum SPVError { UnexpectedDifficultyChange, #[display(fmt = "Header does not meet its own difficulty target")] InsufficientWork, + #[display(fmt = "Couldn't calculate the required difficulty for the block: {}", _0)] + DifficultyCalculationError(NextBlockBitsError), #[display(fmt = "Header in chain does not correctly reference parent header")] InvalidChain, #[display(fmt = "When validating a `BitcoinHeader`, the `hash` field is not the digest of the raw header")] @@ -58,6 +59,10 @@ impl From for SPVError { } } +impl From for SPVError { + fn from(e: NextBlockBitsError) -> Self { SPVError::DifficultyCalculationError(e) } +} + /// A slice of `H256`s for use in a merkle array #[derive(Debug, Clone, PartialEq, Eq)] struct MerkleArray<'a>(&'a [u8]); @@ -291,16 +296,6 @@ pub(crate) fn merkle_prove( fn validate_header_prev_hash(actual: &H256, to_compare_with: &H256) -> bool { actual == to_compare_with } -fn validate_header_work(digest: H256, target: &U256) -> bool { - let empty = H256::default(); - - if digest == empty { - return false; - } - - U256::from_little_endian(digest.as_slice()) < *target -} - /// Checks validity of header chain. /// Compares the hash of each header to the prevHash in the next header. /// @@ -318,35 +313,41 @@ fn validate_header_work(digest: H256, target: &U256) -> bool { /// /// # Notes /// Wrapper inspired by `bitcoin_spv::validatespv::validate_header_chain` -pub fn validate_headers( +// Todo: reduce number of arguments +#[allow(clippy::too_many_arguments)] +pub async fn validate_headers( + coin: &str, + previous_header: BlockHeader, + previous_height: u32, headers: Vec, difficulty_check: bool, constant_difficulty: bool, + storage: &dyn BlockHeaderStorageOps, + algorithm: &Option, ) -> Result<(), SPVError> { - let mut previous_hash = H256::default(); - let mut target = U256::default(); - for (i, header) in headers.into_iter().enumerate() { - let raw_header = RawBlockHeader::from(header.clone()); - if i == 0 { - target = match header.target() { - Ok(target) => target, - Err(_) => return Err(SPVError::UnableToGetTarget), - }; - } - let cur_target = match header.target() { - Ok(target) => target, - Err(_) => return Err(SPVError::UnableToGetTarget), - }; - if (!constant_difficulty && difficulty_check) && cur_target != target { + let mut previous_hash = previous_header.hash(); + let mut prev_bits = previous_header.bits.clone(); + let mut previous_header = previous_header; + for header in headers.into_iter() { + let cur_bits = header.bits.clone(); + if previous_height != 0 && constant_difficulty && difficulty_check && cur_bits != prev_bits { return Err(SPVError::UnexpectedDifficultyChange); } - if i != 0 && !validate_header_prev_hash(&raw_header.parent(), &previous_hash) { + if previous_height != 0 && !validate_header_prev_hash(&header.previous_header_hash, &previous_hash) { return Err(SPVError::InvalidChain); } - if difficulty_check && !validate_header_work(raw_header.digest(), &target) { - return Err(SPVError::InsufficientWork); + if let Some(algo) = algorithm { + if !constant_difficulty + && difficulty_check + && cur_bits + != next_block_bits(coin, header.time, previous_header, previous_height, storage, algo).await? + { + return Err(SPVError::InsufficientWork); + } } - previous_hash = raw_header.digest(); + prev_bits = cur_bits; + previous_header = header; + previous_hash = previous_header.hash(); } Ok(()) } @@ -357,6 +358,8 @@ mod tests { use super::*; use crate::test_utils::{self}; + use crate::work::tests::TestBlockHeadersStorage; + use common::block_on; use std::{println, vec}; use test_helpers::hex::force_deserialize_hex; @@ -540,13 +543,23 @@ mod tests { #[test] fn test_block_headers_no_difficulty_check() { - // morty: 1330480, 1330481, 1330482 + // morty: 1330481, 1330482 let headers: Vec = vec![ - "04000000bb496ba8d09f8f98b15cdaf5798163bdd70676eb1c8b538f53ab4f83da4a27000db352177c6b5ad2499a906cec33b843fb17fc1ec298cd06c7e7ceb7b62e144232d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca473de614625be6071f09006c286bc5ec73dd27a09bf687700c06fb04d0b9a063c0aa0746c9db170000fd40050053b27dad1f5a858b78f3154039759e985ed57db10ecb772810d7f158c55083a14b9f2ba26ae9fcb82012186e2528f67c45b7b216a69fe26232ad2d179a141b1b10e4d5f108c7b920b49348f6eef2d70b7f02cb01d8d9992f8f2d7b6608806b10ff329846b188de200aa37c73ac03f6c9b79cf5613c71b7969b4abafdbc1165ad955a049269584c83b36f36a3e9becf2fe81f3b1917475eb13ecfed3813ecc32206078d8c1e2797013dfc6f6a55e06f1c06a07959ef94d53ca0fc81d03cb6f614761156ed4ff1a8e5c9f0b96f3c8c3eeb9a0720cf4ed10397330f49b83439c5083eea1d1785a10d86ca2866d0da4ca746c49118b780c55aa6cd5b4c0491cefa258ecf129307d15e001415b203e89c008f4444b236aa556dbf4f6d05e0c57642cfa142df2f8546f1d37a6b2feaf98496892b41caefbe7dc7bcbb2755752df3dbf00ac1fc558896f14541aea4cc78ec5d00bbe5398fac4a658b1ae3399777f15117c0f3de3c63bc5b3edf6543d172cfc66907f9cf8706e97b14281daeb427801dfb0910743873265ae6bae71dbf22353c321f726e68f747965858f488dd507b7e6adee42509e5720373dce5b111b420c906b0f2cb391cfb9d581e2509da3829d6718469f383e07043694db87db0ce1196449a6c9cd941a8bde507e553c0ca534238dcc93633631926102c87cd0f83720ccff60de8b05b103e086a2c2cb7943f21033a5658235fc52708907e1ea722e726808db0270bf898c51e9dd0745614857783dc11a6dcd7760d4a07ddbd83a2e02b23fa789b79eed22dc411b9b48f71c54f12387065e3ff0638701e0f6a0dd56d0ce395d150b237b60c166352e69b92173b884446d7660f5857458b97c6d4ee54f8a1f60113aff30e54c1f7c572b85dcb7a2419d2f736a9b0a6d99ea549bd74e546251c0b8be7975e9a6d96aa3467b1dc6b024745fdef43b37cf21a657a3247d9adf8c252ef210d9a4e9c7191f698ccc9b10103b8bb811cdcf1a62903786476db8195ffb3cd004c57ad07a7a3c41eee391f66a7697e69409d7a78558720f6a1b9804d72de820b7b6165b8e14a2b1316576022423f22bb82fab16127be7173ddcd43fa7ea5c4474f79321a8c4b792caf12320c3047d026b7d63216a022e83655c2d811d2bd2a559970e9155b979953f9801ce918f690f43f5e3f07f7ce27a6837bf33b2490d9add8549f1e603a750c114bb92740cc3987cb9f948a6229f175a7b577b0b60d885a0a7ef05debe921376a7acdb25eaa8bb72e120e529cd775175012efb454cf41d240a946bf140af20d9a5dbed2e196d91a7ff33c2769f140fa0bb968111e1602221deae8d162e7a471354c2051acb43ec31015aaefa0b08bf1bddbb282e86a1caf45f3b63e4c6427ba9e99aed28ef79711794511511c52daf13b735e02b9833d3467bfd16886606d5555b7cc95ff2fea3b03c82cfe60e8602d9f70a3870f5b755573b955bb300bd3733b5ddf9a61fd3cd281af39520d6dfd8b7e2b165ec91749614a3b5241e2ea12470f91b58cf6163e02dfe79392db70cd17db9497cf59c89ac8377dbd02042f6ed270c8c2bc717623b203b74676890f5f4cd905b25772a25292d76b6f42a094c27eed13793d189e395ed3f28c5731976a7b45184acee45b3cf05a9c62045644dfe39f79cd331e282edae99cea652eb82819415ac2a5c21539cdd636fb835063ace3b6befffaf50bf6866e9b1a2b35037a330faeb18ca1696693dafd26b5f5da8dcd3e50ff09249bdda695f576d25024560b643d873d07293a80fe71998ef6ccd88c0cf9f69326b463c26fe4906faaf454ae68accd7ef3edffefdd2ede23a822a2267332f0791f1c4e6d5ab4661f279f5039b36a4476e56fd5b0461e585ff30a7c661b93f1".into(), "04000000001f22e1bc88c53b1554f8fdcf261fdb09f4cae6ef5e5032b788515f4a60d30d67d1b35fda68abc05f5af39e5ade224a5312b8dcd1f3629a7ff33355bb7ca93e32d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca478be6146220bb071f49000b055b22a7a4bbafd6b52efb90f963d5f80126c27e437005fb47720e0000fd4005004d9875d71c540f558813142e263f597243bdd8d8105ff3d1ffd62ae51ccf22729debe510f97ab0631701dbd34b73e570597dc8825be6bd669e693037fb701040c273b44745f4e850c2d8aeca7ccab6ef7f462206a16d75358f2e8fddf9d0dbc6333ff55b1813a37f0ba240bd2d897fbd6cfdb1989ac8f3ec93b15ae4360edf84088ac9a4ea7d3d71290532bb51675e7310be1210aa33c184d693f6f7c15c5be1e89356ae3d663d0c548fceac0974fe4cb6c6559f50643280df9508460fd04f9cde55521b4c6d61c644c6c7b7473f9e39b412e3776f5e47b6c466aaf1dc76ff2114e716eb6b9614d0c93cdc229ec13b07057a7f7446c1aac51ef0950d4361fa2d20f22f29ff490bf6d6a2a267c45d88d3152d9f5291695f2f4fba65ca9763cb4176506c73b8162611b6004af7ec8d1ea55a225cca2576e4ac84ac333b663693a2f19f7786340ad9d2212d576a0b4e7700bd7d60de88940dce1f01481f9c41350eefd7b496218bcf70c4c8922dfd18d666d37d10cb0f14dd38e1225ec179dcab5501a4434674d6f9ff9f23c4df5f445cc2accf43189fc99ac56693df373a4207b0dc991009fae4796fd7e49cea4dd139ee72264dfd47f4e1ad2420d635c7a1f37950d022ffdcccc7651b645db0ba0ce94c18dcc902279b4601806beefe05016f1f85411e6562b584da0854db2e36f602d8c4974d385aee4a01d1132082c8cd7c71443162f7d7487c73d8a46f830f72a0d352d957bef5afc33c4447ef33b2491e28000d1f4687e95ffc2b9532d28ae4c48f8551bf527dbe18c672204495f2bd546566fd5770189e28c2de0974130a492ccd8737a8c6e971d02a23c4f9f27410348d1f666f93385bdc81bad8e9a9d1dbffdfa2609ebae52740b457ecd67a3bf0db02a14f5bdf3e25b35b2d3d303094e46e0e3daef559d9f0e074e512bcaf9fcc9d035083eec16806af8a93d27b4ad46754a425b6a02b1ac22f682e48f214d66b379d7042aa39f2c5f3448d05ca4b6360e162f31f197225f4ad579d69207c666711fb3f6ca814efcf430899360cced1168cd69ec0e809a89cf2cf2015f9f895a3dadd4ced6d94793e98201b1da6a0a5d90be5d06925e3ad60b9227f84b9c3060a6db6e7857d8731f975d4a993abf10d84590da02b114625109d864de070813179b651d528f66036c30a0700ee84fc5e59757a509745b64e76fa3396f3c8b01a7724cd434e6d774dad36be8a73ad29f6859352aa15236e7825947396cb98e26b912b19ddc127590e59200c4334d1d96d7585a0e349b920f2e4e59cdedac911214c42c0894f72c8a7423d7aef3ea5ef9a5b650821f46537c65509ad8dcf6558c16c04f9877c737ff81875d9fbe01d23d37e937444cf257b0b57bc1c2a774f2e2bf5f3b0881be0e2282ba97ef6aad797f8fdb4053da4e478575805c7a93076c09847544a8e89f1cb3838df7870bcf61deb2144c6f6349c966b67545703058f9227965b97835b049538fb428431a8461586b022368626d20e9b6bfdd7232a5cc6a0aa214319cb440c45443a2446d1e17713c0e1049f0fd759d1dbff493302140376cfb153330ed455a043189260cb7d2d90333a37d3584f2d907d0a73dccee299ad14141d60d1409cda688464a13b5dab37476641741717d599a60c0ac84d85869ed449f83933ad30e2591157fd1f07b73ecf26f34e91bc00f1ca86ae34ca8231b372cdc2ed18d463ac42f92859d6f0e2c483dbb23d785f1233db2033458af9d7c1e7029ac5cc33ca7d25b2b49fd71b1ae5f5ce969b6e77333bf5fbb5e6645dd0a4d0c6e82eb534ac264ddbe28513e4b82b3578c1a6cbfaa2522aa50985fe2cce43cf3363eaacca0e09c721fd603d43c3a4fdf8dde0c9ff2c054910b16aeef7c4d86b31".into(), "04000000fcead9a1b425124f11aa97e0614120ce87bdddcad655672916f9c4564dc057002bd3df07a4602620282b276359529114ba89b59b16bec235d584c3cf5cc6b2d132d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca47bfe61462d5b9071f1a001daf299c51afbd74fd75a98ba49a6e40ae8ad92b3afdc1cf215fd6190000fd40050044b5e035b02d138a9704f9513c0865f2733b7c09294ee504c155c283f4895559b6ac39828eac98ad393a642330589e8849040f55ce44f8f2197529d0b0ed57ccdda41f1971e153ec28ac5b4eba968741db374104d65ee234580a83bea1c0cdb67b8bc207057486eb1d90e21ba0cd4f5e9fd834821fafc1517c5d1fceb50ba6f6b102a9b4edac46f2359aec795a4e2458f51114a41289634b3b1cf250e3e38f3689f951278dfa7202a7dfe311cc098fd4a8d02c8f8a74e4a5010b18ee2e60578d5e9f1c094433a73f26e6546e20a574fc261baaa79e9910ab86ed607786a1cc88e7de51ff928d434e26eaef1437f7068c743f26d7c0eea6791e869b101fee8ab41b50af6174c5e6b731a1719f31ee3e6529efef49f31665baedc9382e9665278a84467d479f139fc7a8ef66fef9bd2fd17f7779ee315d458f691a290fa7c2179de8bb91a78458c5290d4aa45b163254006800ba2fce7479511f744fd7de96495c39be93413d8b0b187fe092537e1a7646a66a125b33333f6ecd10085e23ad168b24ee7be69d01ea021a39401e4bd41d818499e7174dd9b85542076c78cb89eeec1c190301b4709dbc963d47926e31bb0235ba6a7029d49458150f6491ac9c973b8a2c893258f907baf4bcb7c39f12b900ba2b2382cd5dd84314ee504ade835ad9a1cb13a7f5928a483ebc9415429810fd99893f2f8f83970b8b47143d617e6f9853e4d86ff378be664218f1c32531143e209f171590dd48216fec879a6b9cbf04432bf4f1a3734b69b6a9f1a358a259a0f9082cfb6c1f3d9d2d9e4522ad651ccce565f06b30c1c0b27252270c2f6608cf4f3288a7e7d4b174e646de05341f7db62b00b5ccb295f058d34b87201148828e9b3f7e08f60e100f810be27eb7f4c471cda7621106fe78bc69ec2bd27acabd55dc094b8626913b7d24d9b60939754700f32574a733a195f8b0220d56f6797de0bcd7b80d561896b816586593409f76e85a7a1035f821dee32a02fdbc26bc4cca375bed418b9d678ac589249a1a5a5b24447ee9b42e33f817066caf3d4e17d0347f6acf0cbf426d4df49413b3d12350edec2681ab9cfecd0825ccfb2649a57391d3f153050dfb4350d60e5e464229ddd6e49ece95557b8ef48c18cbffbe9fc8d7700f611a4b33a2a254afcec638c485e36daf0364da7d4302e488db7b6c41297571048cfea5452e324abb9f9e1043e625fd0853b7e03063d1c3a43aa1ee62d45d890b5e4d10640e775cff6852b6d1acd4a503b3ece3b319cbcf33ff9fdf17b8f852d748db1e05af80507f5d0e1bc44444b155d7da20f7f0b4d6d83368c3bb9e1321b39472a8677ea1d3aca43b453d35edca37b7536d19c26b764958b3c7c30f3211d7b7bb7f6a6d7fd7bf2dda6e7d7b1e533556863549bbe1394a3828596f25029b7e30495e1235f084e5edd133bc29fce4f1e5e514eb1d1cb19fd8dfbb0d130fbec4e288f23dae86311ffd6f4afbaacc2ffe1cc8811a455ba6f5659f82515b56c6ac84277bff5bef98fefc74e002e4a11866a417a429541f8a62df4108e4730d3045f92984bcf1ab2f7d03f8bb1767e91791530cd8eec412919e1f2e341e66a1588a8f485f7aa005787af946b9cb10f6685420b7e1663f66374fddc5e70720507ee2134f3b02df042fcf6db4a5bdd74cc5010793634816fe447cc68e076b225cc1ca872929ef246ce356dc8d8964ff6d7119d071eccb6dc37f75b932c44cdc30723b8357a2761c6de6ab2713e6f6a782538cb731b07950d3f459760a00cc0af406d6848014746b02653636f479d952b46fdeff976e1d159ba46ae7363d5b0042d3905a0bda12aaa6eaae1a5a0d55d4c1930aa1c004cd610866853a247239366aa20f8968ea9ca3d5d6d7321a5d0f2c".into() ]; - validate_headers(headers, false, false).unwrap() + let previous_header: BlockHeader = "04000000bb496ba8d09f8f98b15cdaf5798163bdd70676eb1c8b538f53ab4f83da4a27000db352177c6b5ad2499a906cec33b843fb17fc1ec298cd06c7e7ceb7b62e144232d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca473de614625be6071f09006c286bc5ec73dd27a09bf687700c06fb04d0b9a063c0aa0746c9db170000fd40050053b27dad1f5a858b78f3154039759e985ed57db10ecb772810d7f158c55083a14b9f2ba26ae9fcb82012186e2528f67c45b7b216a69fe26232ad2d179a141b1b10e4d5f108c7b920b49348f6eef2d70b7f02cb01d8d9992f8f2d7b6608806b10ff329846b188de200aa37c73ac03f6c9b79cf5613c71b7969b4abafdbc1165ad955a049269584c83b36f36a3e9becf2fe81f3b1917475eb13ecfed3813ecc32206078d8c1e2797013dfc6f6a55e06f1c06a07959ef94d53ca0fc81d03cb6f614761156ed4ff1a8e5c9f0b96f3c8c3eeb9a0720cf4ed10397330f49b83439c5083eea1d1785a10d86ca2866d0da4ca746c49118b780c55aa6cd5b4c0491cefa258ecf129307d15e001415b203e89c008f4444b236aa556dbf4f6d05e0c57642cfa142df2f8546f1d37a6b2feaf98496892b41caefbe7dc7bcbb2755752df3dbf00ac1fc558896f14541aea4cc78ec5d00bbe5398fac4a658b1ae3399777f15117c0f3de3c63bc5b3edf6543d172cfc66907f9cf8706e97b14281daeb427801dfb0910743873265ae6bae71dbf22353c321f726e68f747965858f488dd507b7e6adee42509e5720373dce5b111b420c906b0f2cb391cfb9d581e2509da3829d6718469f383e07043694db87db0ce1196449a6c9cd941a8bde507e553c0ca534238dcc93633631926102c87cd0f83720ccff60de8b05b103e086a2c2cb7943f21033a5658235fc52708907e1ea722e726808db0270bf898c51e9dd0745614857783dc11a6dcd7760d4a07ddbd83a2e02b23fa789b79eed22dc411b9b48f71c54f12387065e3ff0638701e0f6a0dd56d0ce395d150b237b60c166352e69b92173b884446d7660f5857458b97c6d4ee54f8a1f60113aff30e54c1f7c572b85dcb7a2419d2f736a9b0a6d99ea549bd74e546251c0b8be7975e9a6d96aa3467b1dc6b024745fdef43b37cf21a657a3247d9adf8c252ef210d9a4e9c7191f698ccc9b10103b8bb811cdcf1a62903786476db8195ffb3cd004c57ad07a7a3c41eee391f66a7697e69409d7a78558720f6a1b9804d72de820b7b6165b8e14a2b1316576022423f22bb82fab16127be7173ddcd43fa7ea5c4474f79321a8c4b792caf12320c3047d026b7d63216a022e83655c2d811d2bd2a559970e9155b979953f9801ce918f690f43f5e3f07f7ce27a6837bf33b2490d9add8549f1e603a750c114bb92740cc3987cb9f948a6229f175a7b577b0b60d885a0a7ef05debe921376a7acdb25eaa8bb72e120e529cd775175012efb454cf41d240a946bf140af20d9a5dbed2e196d91a7ff33c2769f140fa0bb968111e1602221deae8d162e7a471354c2051acb43ec31015aaefa0b08bf1bddbb282e86a1caf45f3b63e4c6427ba9e99aed28ef79711794511511c52daf13b735e02b9833d3467bfd16886606d5555b7cc95ff2fea3b03c82cfe60e8602d9f70a3870f5b755573b955bb300bd3733b5ddf9a61fd3cd281af39520d6dfd8b7e2b165ec91749614a3b5241e2ea12470f91b58cf6163e02dfe79392db70cd17db9497cf59c89ac8377dbd02042f6ed270c8c2bc717623b203b74676890f5f4cd905b25772a25292d76b6f42a094c27eed13793d189e395ed3f28c5731976a7b45184acee45b3cf05a9c62045644dfe39f79cd331e282edae99cea652eb82819415ac2a5c21539cdd636fb835063ace3b6befffaf50bf6866e9b1a2b35037a330faeb18ca1696693dafd26b5f5da8dcd3e50ff09249bdda695f576d25024560b643d873d07293a80fe71998ef6ccd88c0cf9f69326b463c26fe4906faaf454ae68accd7ef3edffefdd2ede23a822a2267332f0791f1c4e6d5ab4661f279f5039b36a4476e56fd5b0461e585ff30a7c661b93f1".into(); + block_on(validate_headers( + "MORTY", + previous_header, + 1330480, + headers, + false, + false, + &TestBlockHeadersStorage {}, + &None, + )) + .unwrap() } #[test] @@ -555,6 +568,17 @@ mod tests { let headers: Vec = vec!["00200020eab6fa183da8f9e4c761b31a67a76fa6a7658eb84c760200000000000000000063cd9585d434ec0db25894ec4b1f03735f10e31709c4395ea67c50c8378f134b972f166278100a17bfd87203".into(), "0000402045c698413fbe8b5bf10635658d2a1cec72062798e51200000000000000000000869617420a4c95b1d3d6d012419d2b6c199cff9b68dd9a790892a4da8466fb056033166278100a1743ac4d5b".into(), "0400e02019d733c1fd76a1fa5950de7bee9d80f107276b93a67204000000000000000000a0d1dee718f5f732c041800e9aa2c25e92be3f6de28278545388db8a6ae27df64c37166278100a170a970c19".into()]; - validate_headers(headers, true, true).unwrap() + let previous_header: BlockHeader = "00c0d933bd3c3dcf14027754c7c8f7190a79b699188c8b24f49204000000000000000000ce2b1e4dc6cb44fae781df459428382d4b5be52766f67a42725cc0d78e00784d352e166278100a173d95ee89".into(); + block_on(validate_headers( + "BTC", + previous_header, + 724608, + headers, + true, + false, + &TestBlockHeadersStorage {}, + &None, + )) + .unwrap() } } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/lib.rs b/mm2src/mm2_bitcoin/spv_validation/src/lib.rs index d74dc35392..0f5bb7e996 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/lib.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/lib.rs @@ -4,6 +4,7 @@ extern crate keys; extern crate primitives; extern crate ripemd160; extern crate rustc_hex as hex; +extern crate serde; extern crate serialization; extern crate sha2; extern crate test_helpers; diff --git a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs index d516535f79..0f0b07f2cb 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs @@ -4,7 +4,7 @@ use derive_more::Display; use primitives::hash::H256; use std::collections::HashMap; -#[derive(Debug, Display)] +#[derive(Clone, Debug, Display, Eq, PartialEq)] pub enum BlockHeaderStorageError { #[display(fmt = "Can't add to the storage for {} - reason: {}", coin, reason)] AddToStorageError { diff --git a/mm2src/mm2_bitcoin/spv_validation/src/work.rs b/mm2src/mm2_bitcoin/spv_validation/src/work.rs index ed5935006d..c3cac78df2 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/work.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/work.rs @@ -3,6 +3,7 @@ use chain::{BlockHeader, BlockHeaderBits}; use derive_more::Display; use primitives::compact::Compact; use primitives::U256; +use serde::{Deserialize, Serialize}; use std::cmp; const RETARGETING_FACTOR: u32 = 4; @@ -21,28 +22,25 @@ pub const MAX_BITS_BTC: u32 = 486604799; fn is_retarget_height(height: u32) -> bool { height % RETARGETING_INTERVAL == 0 } -#[derive(Debug, Display)] +#[derive(Clone, Debug, Display, Eq, PartialEq)] pub enum NextBlockBitsError { #[display(fmt = "Block headers storage error: {}", _0)] StorageError(BlockHeaderStorageError), #[display(fmt = "Can't find Block header for {} with height {}", height, coin)] - NoSuchBlockHeader { - coin: String, - height: u64, - }, + NoSuchBlockHeader { coin: String, height: u64 }, #[display(fmt = "Can't find a Block header for {} with no max bits", coin)] - NoBlockHeaderWithNoMaxBits { - coin: String, - }, - Internal(String), + NoBlockHeaderWithNoMaxBits { coin: String }, } impl From for NextBlockBitsError { fn from(e: BlockHeaderStorageError) -> Self { NextBlockBitsError::StorageError(e) } } +#[derive(Clone, Debug, Deserialize, Serialize)] pub enum DifficultyAlgorithm { + #[serde(rename = "Bitcoin Mainnet")] BitcoinMainnet, + #[serde(rename = "Bitcoin Testnet")] BitcoinTestnet, } @@ -52,7 +50,7 @@ pub async fn next_block_bits( last_block_header: BlockHeader, last_block_height: u32, storage: &dyn BlockHeaderStorageOps, - algorithm: DifficultyAlgorithm, + algorithm: &DifficultyAlgorithm, ) -> Result { match algorithm { DifficultyAlgorithm::BitcoinMainnet => { @@ -125,7 +123,7 @@ async fn btc_mainnet_next_block_bits( storage: &dyn BlockHeaderStorageOps, ) -> Result { if last_block_height == 0 { - return Err(NextBlockBitsError::Internal("Last block height can't be zero".into())); + return Ok(BlockHeaderBits::Compact(MAX_BITS_BTC.into())); } let height = last_block_height + 1; @@ -145,14 +143,14 @@ async fn btc_testnet_next_block_bits( last_block_height: u32, storage: &dyn BlockHeaderStorageOps, ) -> Result { + let max_bits = BlockHeaderBits::Compact(MAX_BITS_BTC.into()); if last_block_height == 0 { - return Err(NextBlockBitsError::Internal("Last block height can't be zero".into())); + return Ok(max_bits); } let height = last_block_height + 1; let last_block_bits = last_block_header.bits.clone(); let max_time_gap = last_block_header.time + 2 * TARGET_SPACING_SECONDS; - let max_bits = BlockHeaderBits::Compact(MAX_BITS_BTC.into()); if is_retarget_height(height) { btc_retarget_bits(coin, height, last_block_header, storage).await @@ -170,7 +168,7 @@ async fn btc_testnet_next_block_bits( } #[cfg(test)] -mod tests { +pub(crate) mod tests { use super::*; use crate::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use async_trait::async_trait; @@ -203,7 +201,7 @@ mod tests { .collect() } - struct TestBlockHeadersStorage {} + pub(crate) struct TestBlockHeadersStorage {} #[async_trait] impl BlockHeaderStorageOps for TestBlockHeadersStorage { From 165d6a288a481069aef86c8e263d85b2a75eda8b Mon Sep 17 00:00:00 2001 From: shamardy Date: Wed, 17 Aug 2022 19:33:03 +0200 Subject: [PATCH 17/33] headers validation wip: make block headers storage non-optional --- mm2src/coins/utxo.rs | 34 +-- mm2src/coins/utxo/rpc_clients.rs | 222 +++++++++--------- mm2src/coins/utxo/spv.rs | 5 +- .../coins/utxo/utxo_block_header_storage.rs | 32 +-- .../utxo/utxo_builder/utxo_arc_builder.rs | 30 +-- .../utxo/utxo_builder/utxo_coin_builder.rs | 22 +- .../utxo/utxo_builder/utxo_conf_builder.rs | 7 + mm2src/coins/utxo/utxo_common.rs | 75 ++---- .../utxo_indexedb_block_header_storage.rs | 4 + .../utxo/utxo_sql_block_header_storage.rs | 57 +++++ mm2src/coins/utxo/utxo_tests.rs | 17 +- mm2src/coins/utxo/utxo_wasm_tests.rs | 7 +- mm2src/coins/z_coin.rs | 2 - .../src/for_tests/workTestVectors.json | 10 + .../spv_validation/src/helpers_validation.rs | 96 ++++++-- .../mm2_bitcoin/spv_validation/src/storage.rs | 2 + mm2src/mm2_bitcoin/spv_validation/src/work.rs | 10 + mm2src/mm2_main/src/lp_swap.rs | 1 - 18 files changed, 371 insertions(+), 262 deletions(-) diff --git a/mm2src/coins/utxo.rs b/mm2src/coins/utxo.rs index 082be0b4a4..bfcd3fba9d 100644 --- a/mm2src/coins/utxo.rs +++ b/mm2src/coins/utxo.rs @@ -73,13 +73,13 @@ use rpc::v1::types::{Bytes as BytesJson, Transaction as RpcTransaction, H256 as use script::{Builder, Script, SignatureVersion, TransactionInputSigner}; use serde_json::{self as json, Value as Json}; use serialization::{serialize, serialize_with_flags, Error as SerError, SERIALIZE_TRANSACTION_WITNESS}; -use spv_validation::helpers_validation::SPVError; +use spv_validation::helpers_validation::{BlockHeaderVerificationParams, SPVError}; use spv_validation::storage::BlockHeaderStorageError; use std::array::TryFromSliceError; use std::collections::{HashMap, HashSet}; use std::convert::TryInto; use std::hash::Hash; -use std::num::NonZeroU64; +use std::num::{NonZeroU64, TryFromIntError}; use std::ops::Deref; #[cfg(not(target_arch = "wasm32"))] use std::path::{Path, PathBuf}; @@ -94,7 +94,6 @@ use utxo_signer::{TxProvider, TxProviderError, UtxoSignTxError, UtxoSignTxResult use self::rpc_clients::{electrum_script_hash, ElectrumClient, ElectrumRpcRequest, EstimateFeeMethod, EstimateFeeMode, NativeClient, UnspentInfo, UnspentMap, UtxoRpcClientEnum, UtxoRpcError, UtxoRpcFut, UtxoRpcResult}; -use self::utxo_block_header_storage::BlockHeaderVerificationParams; use super::{big_decimal_from_sat_unsigned, BalanceError, BalanceFut, BalanceResult, CoinBalance, CoinsContext, DerivationMethod, FeeApproxStage, FoundSwapTxSpend, HistorySyncState, KmdRewardsDetails, MarketCoinOps, MmCoin, NumConversError, NumConversResult, PrivKeyActivationPolicy, PrivKeyNotAllowed, PrivKeyPolicy, @@ -511,6 +510,9 @@ pub struct UtxoCoinConf { pub trezor_coin: Option, /// Used in condition where the coin will validate spv proof or not pub enable_spv_proof: bool, + /// The parameters that specify how the coin block headers should be verified if spv proof is enabled + // Todo: "if spv proof is enabled"? maybe if block headers storage is enabled + pub block_headers_verification_params: Option, } #[derive(Debug)] @@ -585,15 +587,19 @@ impl From for GetTxError { fn from(err: SerError) -> GetTxError { GetTxError::TxDeserialization(err) } } -#[derive(Debug)] +#[derive(Debug, Display)] pub enum GetTxHeightError { HeightNotFound(String), + StorageError(BlockHeaderStorageError), + ConversionError(TryFromIntError), } impl From for SPVError { fn from(e: GetTxHeightError) -> Self { match e { GetTxHeightError::HeightNotFound(e) => SPVError::InvalidHeight(e), + GetTxHeightError::StorageError(e) => SPVError::HeaderStorageError(e), + GetTxHeightError::ConversionError(e) => SPVError::Internal(e.to_string()), } } } @@ -602,6 +608,14 @@ impl From for GetTxHeightError { fn from(e: UtxoRpcError) -> Self { GetTxHeightError::HeightNotFound(e.to_string()) } } +impl From for GetTxHeightError { + fn from(e: BlockHeaderStorageError) -> Self { GetTxHeightError::StorageError(e) } +} + +impl From for GetTxHeightError { + fn from(err: TryFromIntError) -> GetTxHeightError { GetTxHeightError::ConversionError(err) } +} + #[derive(Debug, Display)] pub enum GetBlockHeaderError { #[display(fmt = "Block header storage error: {}", _0)] @@ -1259,12 +1273,7 @@ impl UtxoActivationParams { Some("electrum") => { let servers = json::from_value(req["servers"].clone()).map_to_mm(UtxoFromLegacyReqErr::InvalidElectrumServers)?; - let block_header_params = json::from_value(req["block_header_params"].clone()) - .map_to_mm(UtxoFromLegacyReqErr::InvalidBlockHeaderVerificationParams)?; - UtxoRpcMode::Electrum { - servers, - block_header_params, - } + UtxoRpcMode::Electrum { servers } }, _ => return MmError::err(UtxoFromLegacyReqErr::UnexpectedMethod), }; @@ -1306,10 +1315,7 @@ impl UtxoActivationParams { #[serde(tag = "rpc", content = "rpc_data")] pub enum UtxoRpcMode { Native, - Electrum { - servers: Vec, - block_header_params: Option, - }, + Electrum { servers: Vec }, } #[derive(Debug)] diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index a23c604b9e..299221b7e9 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -2,7 +2,7 @@ #![cfg_attr(target_arch = "wasm32", allow(dead_code))] use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; -use crate::utxo::{output_script, sat_from_big_decimal, GetBlockHeaderError, GetTxError, GetTxHeightError}; +use crate::utxo::{output_script, sat_from_big_decimal, GetTxError, GetTxHeightError}; use crate::{big_decimal_from_sat_unsigned, NumConversError, RpcTransportEventHandler, RpcTransportEventHandlerShared}; use async_trait::async_trait; use chain::{BlockHeader, BlockHeaderBits, BlockHeaderNonce, OutPoint, Transaction as UtxoTx}; @@ -35,8 +35,8 @@ use serde_json::{self as json, Value as Json}; use serialization::{coin_variant_by_ticker, deserialize, serialize, serialize_with_flags, CoinVariant, CompactInteger, Reader, SERIALIZE_TRANSACTION_WITNESS}; use sha2::{Digest, Sha256}; -use spv_validation::helpers_validation::{validate_headers, SPVError}; -use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; +use spv_validation::helpers_validation::SPVError; +use spv_validation::storage::BlockHeaderStorageOps; use std::collections::hash_map::Entry; use std::collections::HashMap; use std::convert::TryInto; @@ -1567,7 +1567,8 @@ pub struct ElectrumClientImpl { protocol_version: OrdRange, get_balance_concurrent_map: ConcurrentRequestMap, list_unspent_concurrent_map: ConcurrentRequestMap>, - block_headers_storage: Option, + // Todo: make this not optional and check if spv is enabled to do other stuff (in coins activation add it to task manager maybe) + block_headers_storage: BlockHeaderStorage, } async fn electrum_request_multi( @@ -1710,7 +1711,7 @@ impl ElectrumClientImpl { pub fn protocol_version(&self) -> &OrdRange { &self.protocol_version } /// Get block headers storage. - pub fn block_headers_storage(&self) -> &Option { &self.block_headers_storage } + pub fn block_headers_storage(&self) -> &BlockHeaderStorage { &self.block_headers_storage } } #[derive(Clone, Debug)] @@ -1869,22 +1870,14 @@ impl ElectrumClient { rpc_func!(self, "blockchain.block.headers", start_height, count) } - pub fn retrieve_last_headers( - &self, - blocks_limit_to_check: NonZeroU64, - block_height: u64, - ) -> UtxoRpcFut<(HashMap, Vec)> { + // Todo: revise this function as it wasn't written by me, add a comment that this is inclusive + pub fn retrieve_headers(&self, from: u64, to: u64) -> UtxoRpcFut<(HashMap, Vec)> { let coin_name = self.coin_ticker.clone(); - let (from, count) = { - let from = if block_height < blocks_limit_to_check.get() { - 0 - } else { - block_height - blocks_limit_to_check.get() - }; - (from, blocks_limit_to_check) - }; + // Todo: check for to >= from and that neither are zero + let count = to - from + 1; Box::new( - self.blockchain_block_headers(from, count) + // Todo: remove unwrap + self.blockchain_block_headers(from, count.try_into().unwrap()) .map_to_mm_fut(UtxoRpcError::from) .and_then(move |headers| { let (block_registry, block_headers) = { @@ -1919,99 +1912,99 @@ impl ElectrumClient { rpc_func!(self, "blockchain.transaction.get_merkle", txid, height) } - async fn get_tx_height(&self, tx: &UtxoTx) -> Result> { - for output in tx.outputs.clone() { - let script_pubkey_str = hex::encode(electrum_script_hash(&output.script_pubkey)); - if let Ok(history) = self.scripthash_get_history(script_pubkey_str.as_str()).compat().await { - if let Some(item) = history - .into_iter() - .find(|item| item.tx_hash.reversed() == H256Json(*tx.hash()) && item.height > 0) - { - return Ok(item.height as u64); - } - } - } - MmError::err(GetTxHeightError::HeightNotFound( - "Couldn't find height through electrum!".into(), - )) - } - - async fn tx_height_from_storage_or_rpc(&self, tx: &UtxoTx) -> Result> { - if let Some(storage) = &self.block_headers_storage { - let ticker = self.coin_name(); - let tx_hash = tx.hash().reversed(); - let blockhash = self.get_verbose_transaction(&tx_hash.into()).compat().await?.blockhash; - if let Ok(Some(height)) = storage.get_block_height_by_hash(ticker, blockhash.into()).await { - if let Ok(height) = height.try_into() { - return Ok(height); - } - } - } - - self.get_tx_height(tx).await - } - - async fn valid_block_header_from_storage(&self, height: u64) -> Result> { - let storage = match &self.block_headers_storage { - Some(storage) => storage, - None => { - return MmError::err(GetBlockHeaderError::StorageError(BlockHeaderStorageError::Internal( - "block_headers_storage is not initialized".to_owned(), - ))) - }, - }; + // Todo: remove comments + // async fn get_tx_height_from_rpc(&self, tx: &UtxoTx) -> Result> { + // for output in tx.outputs.clone() { + // let script_pubkey_str = hex::encode(electrum_script_hash(&output.script_pubkey)); + // if let Ok(history) = self.scripthash_get_history(script_pubkey_str.as_str()).compat().await { + // if let Some(item) = history + // .into_iter() + // .find(|item| item.tx_hash.reversed() == H256Json(*tx.hash()) && item.height > 0) + // { + // return Ok(item.height as u64); + // } + // } + // } + // MmError::err(GetTxHeightError::HeightNotFound( + // "Couldn't find height through electrum!".into(), + // )) + // } + + // get_tx_height_from_rpc is costly since it loops through history after requesting the whole history of the script pubkey + // This method should always be used if the block headers are saved to the DB + async fn get_tx_height_from_storage(&self, tx: &UtxoTx) -> Result> { let ticker = self.coin_name(); - match storage.get_block_header(ticker, height).await? { - None => { - let bytes = self.blockchain_block_header(height).compat().await?; - let header: BlockHeader = deserialize(bytes.0.as_slice())?; - let params = &storage.params; - let blocks_limit = params.blocks_limit_to_check; - let (headers_registry, headers) = self.retrieve_last_headers(blocks_limit, height).compat().await?; - let previous_header_height = if height < blocks_limit.get() { - 0 - } else { - height - blocks_limit.get() - }; - // Todo: remove unwrap, move this inside validate_headers function, maybe also move ticker inside storage? - let previous_header = storage.get_block_header(ticker, previous_header_height).await?.unwrap(); - match validate_headers( - ticker, - previous_header, - previous_header_height as u32, - headers, - params.difficulty_check, - params.constant_difficulty, - storage, - ¶ms.difficulty_algorithm, - ) - .await - { - Ok(_) => { - storage.add_block_headers_to_storage(ticker, headers_registry).await?; - Ok(header) - }, - Err(err) => MmError::err(GetBlockHeaderError::SPVError(err)), - } - }, - Some(header) => Ok(header), - } - } - - async fn block_header_from_storage_or_rpc(&self, height: u64) -> Result> { - match &self.block_headers_storage { - Some(_) => self.valid_block_header_from_storage(height).await, - None => Ok(deserialize( - self.blockchain_block_header(height).compat().await?.as_slice(), - )?), - } - } - - pub async fn get_merkle_and_header( + let tx_hash = tx.hash().reversed(); + let blockhash = self.get_verbose_transaction(&tx_hash.into()).compat().await?.blockhash; + Ok(self + .block_headers_storage() + .get_block_height_by_hash(ticker, blockhash.into()) + .await? + .ok_or_else(|| GetTxHeightError::HeightNotFound("Transaction block header is not found in storage".into()))? + .try_into()?) + } + + // Todo: remove this or find other solution + // async fn valid_block_header_from_storage(&self, height: u64) -> Result> { + // let storage = match self.block_headers_storage() { + // Some(storage) => storage, + // None => { + // return MmError::err(GetBlockHeaderError::StorageError(BlockHeaderStorageError::Internal( + // "block_headers_storage is not initialized".to_owned(), + // ))) + // }, + // }; + // let ticker = self.coin_name(); + // match storage.get_block_header(ticker, height).await? { + // None => { + // let bytes = self.blockchain_block_header(height).compat().await?; + // let header: BlockHeader = deserialize(bytes.0.as_slice())?; + // let params = &storage.params; + // let blocks_limit = params.blocks_limit_to_check; + // let (headers_registry, headers) = self.retrieve_last_headers(blocks_limit, height).compat().await?; + // let previous_header_height = if height < blocks_limit.get() { + // 0 + // } else { + // height - blocks_limit.get() + // }; + // match validate_headers( + // ticker, + // previous_header_height, + // headers, + // params.difficulty_check, + // params.constant_difficulty, + // storage, + // ¶ms.difficulty_algorithm, + // params.genesis_block_header.clone(), + // ) + // .await + // { + // Ok(_) => { + // storage.add_block_headers_to_storage(ticker, headers_registry).await?; + // Ok(header) + // }, + // Err(err) => MmError::err(GetBlockHeaderError::SPVError(err)), + // } + // }, + // Some(header) => Ok(header), + // } + // } + + // Todo: remove this or find other solution + // async fn block_header_from_storage_or_rpc(&self, height: u64) -> Result> { + // match self.block_headers_storage() { + // Some(_) => self.valid_block_header_from_storage(height).await, + // None => Ok(deserialize( + // self.blockchain_block_header(height).compat().await?.as_slice(), + // )?), + // } + // } + + pub async fn get_merkle_and_header_from_rpc( &self, tx: &UtxoTx, ) -> Result<(TxMerkleBranch, BlockHeader, u64), MmError> { - let height = self.tx_height_from_storage_or_rpc(tx).await?; + let height = self.get_tx_height_from_storage(tx).await?; let merkle_branch = self .blockchain_transaction_get_merkle(tx.hash().reversed().into(), height) @@ -2019,7 +2012,14 @@ impl ElectrumClient { .await .map_to_mm(|e| SPVError::UnableToGetMerkle(e.to_string()))?; - let header = self.block_header_from_storage_or_rpc(height).await?; + let header: BlockHeader = deserialize( + self.blockchain_block_header(height) + .compat() + .await + .map_err(|e| SPVError::UnableToGetHeader(e.to_string()))? + .as_slice(), + ) + .map_err(|e| SPVError::UnableToGetHeader(e.to_string()))?; Ok((merkle_branch, header, height)) } @@ -2257,7 +2257,7 @@ impl ElectrumClientImpl { pub fn new( coin_ticker: String, event_handlers: Vec, - block_headers_storage: Option, + block_headers_storage: BlockHeaderStorage, ) -> ElectrumClientImpl { let protocol_version = OrdRange::new(1.2, 1.4).unwrap(); ElectrumClientImpl { @@ -2277,7 +2277,7 @@ impl ElectrumClientImpl { coin_ticker: String, event_handlers: Vec, protocol_version: OrdRange, - block_headers_storage: Option, + block_headers_storage: BlockHeaderStorage, ) -> ElectrumClientImpl { ElectrumClientImpl { protocol_version, diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index 10fe17b0e0..2a0becfc15 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -29,6 +29,7 @@ pub trait SimplePaymentVerification { #[async_trait] impl SimplePaymentVerification for ElectrumClient { + // Todo: this is not working right should get a header from DB to use for validation async fn validate_spv_proof( &self, tx: &UtxoTx, @@ -49,7 +50,8 @@ impl SimplePaymentVerification for ElectrumClient { return MmError::err(SPVError::Timeout); } - match self.get_merkle_and_header(tx).await { + // Todo: should get merkle from RPC and header from storage (also check where we get headers every where and get it from storage or RPC) + match self.get_merkle_and_header_from_rpc(tx).await { Ok(res) => break res, Err(e) => { error!( @@ -81,6 +83,7 @@ impl SimplePaymentVerification for ElectrumClient { intermediate_nodes, }; + // Todo: refactor validate function along validate_spv_proof proof.validate().map_err(MmError::new)?; Ok(ConfirmedTransactionInfo { diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index 2ef0c98f7a..b23c7358b7 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -7,57 +7,41 @@ use chain::BlockHeader; use mm2_core::mm_ctx::MmArc; use primitives::hash::H256; use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; -use spv_validation::work::DifficultyAlgorithm; use std::collections::HashMap; use std::fmt::{Debug, Formatter}; -use std::num::NonZeroU64; - -/// SPV headers verification parameters -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct BlockHeaderVerificationParams { - pub difficulty_check: bool, - pub constant_difficulty: bool, - // This should to be equal to or greater than the number of blocks needed before the chain is safe from reorganization (e.g. 6 blocks for BTC) - pub blocks_limit_to_check: NonZeroU64, - pub check_every: f64, - pub difficulty_algorithm: Option, -} pub struct BlockHeaderStorage { pub inner: Box, - pub params: BlockHeaderVerificationParams, + // Todo: BlockHeaderVerificationParams should be initialized with coin activation when spv is enabled (will be used in lopp only) + // pub params: BlockHeaderVerificationParams, } impl Debug for BlockHeaderStorage { fn fmt(&self, _f: &mut Formatter<'_>) -> std::fmt::Result { Ok(()) } } +// Todo: check if it's better to remove this? pub trait InitBlockHeaderStorageOps: Send + Sync + 'static { - fn new_from_ctx( - ctx: MmArc, - params: BlockHeaderVerificationParams, - ) -> Result + fn new_from_ctx(ctx: MmArc) -> Result where Self: Sized; } impl InitBlockHeaderStorageOps for BlockHeaderStorage { #[cfg(not(target_arch = "wasm32"))] - fn new_from_ctx(ctx: MmArc, params: BlockHeaderVerificationParams) -> Result { + fn new_from_ctx(ctx: MmArc) -> Result { let sqlite_connection = ctx.sqlite_connection.ok_or(BlockHeaderStorageError::Internal( "sqlite_connection is not initialized".to_owned(), ))?; Ok(BlockHeaderStorage { inner: Box::new(SqliteBlockHeadersStorage(sqlite_connection.clone())), - params, }) } #[cfg(target_arch = "wasm32")] - fn new_from_ctx(_ctx: MmArc, params: BlockHeaderVerificationParams) -> Result { + fn new_from_ctx(_ctx: MmArc) -> Result { Ok(BlockHeaderStorage { inner: Box::new(IndexedDBBlockHeadersStorage {}), - params, }) } } @@ -94,6 +78,10 @@ impl BlockHeaderStorageOps for BlockHeaderStorage { self.inner.get_block_header_raw(for_coin, height).await } + async fn get_last_block_height(&self, for_coin: &str) -> Result { + self.inner.get_last_block_height(for_coin).await + } + async fn get_last_block_header_with_non_max_bits( &self, for_coin: &str, diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index 2a90bc036f..627b8c4d14 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -144,6 +144,7 @@ pub trait MergeUtxoArcOps: UtxoCoinBuilderCom } pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { + // Todo: this should be called only if storing headers is enabled and should be called after syncing the latest header on coin activation fn spawn_block_header_utxo_loop_if_required( &self, weak: UtxoWeak, @@ -154,21 +155,20 @@ pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { F: Fn(UtxoArc) -> T + Send + Sync + 'static, T: UtxoCommonOps, { - if let UtxoRpcClientEnum::Electrum(electrum) = rpc_client { - if electrum.block_headers_storage().is_some() { - let ticker = self.ticker().to_owned(); - let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor)); - info!("Starting UTXO block header loop for coin {}", ticker); - spawn(async move { - if let Err(e) = fut.await { - info!( - "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", - ticker, e - ); - } - }); - return Some(abort_handle); - } + // Todo: can this be a normal if condition or add a method for is_native, is_electrum? + if let UtxoRpcClientEnum::Electrum(_) = rpc_client { + let ticker = self.ticker().to_owned(); + let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor)); + info!("Starting UTXO block header loop for coin {}", ticker); + spawn(async move { + if let Err(e) = fut.await { + info!( + "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", + ticker, e + ); + } + }); + return Some(abort_handle); } None } diff --git a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs index faced1c8f1..f0ec7ce575 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs @@ -3,8 +3,7 @@ use crate::hd_wallet_storage::{HDWalletCoinStorage, HDWalletStorageError}; use crate::utxo::rpc_clients::{ElectrumClient, ElectrumClientImpl, ElectrumRpcRequest, EstimateFeeMethod, UtxoRpcClientEnum}; use crate::utxo::tx_cache::{UtxoVerboseCacheOps, UtxoVerboseCacheShared}; -use crate::utxo::utxo_block_header_storage::{BlockHeaderStorage, BlockHeaderVerificationParams, - InitBlockHeaderStorageOps}; +use crate::utxo::utxo_block_header_storage::{BlockHeaderStorage, InitBlockHeaderStorageOps}; use crate::utxo::utxo_builder::utxo_conf_builder::{UtxoConfBuilder, UtxoConfError, UtxoConfResult}; use crate::utxo::{output_script, utxo_common, ElectrumBuilderArgs, ElectrumProtoVerifier, RecentlySpentOutPoints, TxFee, UtxoCoinConf, UtxoCoinFields, UtxoHDAccount, UtxoHDWallet, UtxoRpcMode, DEFAULT_GAP_LIMIT, @@ -394,13 +393,8 @@ pub trait UtxoCoinBuilderCommonOps { Ok(UtxoRpcClientEnum::Native(native)) } }, - UtxoRpcMode::Electrum { - servers, - block_header_params, - } => { - let electrum = self - .electrum_client(ElectrumBuilderArgs::default(), servers, block_header_params) - .await?; + UtxoRpcMode::Electrum { servers } => { + let electrum = self.electrum_client(ElectrumBuilderArgs::default(), servers).await?; Ok(UtxoRpcClientEnum::Electrum(electrum)) }, } @@ -410,7 +404,6 @@ pub trait UtxoCoinBuilderCommonOps { &self, args: ElectrumBuilderArgs, mut servers: Vec, - block_header_params: Option, ) -> UtxoCoinBuildResult { let (on_connect_tx, on_connect_rx) = mpsc::unbounded(); let ticker = self.ticker().to_owned(); @@ -426,13 +419,8 @@ pub trait UtxoCoinBuilderCommonOps { event_handlers.push(ElectrumProtoVerifier { on_connect_tx }.into_shared()); } - let block_headers_storage = match block_header_params { - Some(params) => Some( - BlockHeaderStorage::new_from_ctx(self.ctx().clone(), params) - .map_to_mm(|e| UtxoCoinBuildError::Internal(e.to_string()))?, - ), - None => None, - }; + let block_headers_storage = BlockHeaderStorage::new_from_ctx(self.ctx().clone()) + .map_to_mm(|e| UtxoCoinBuildError::Internal(e.to_string()))?; let mut rng = small_rng(); servers.as_mut_slice().shuffle(&mut rng); diff --git a/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs index db1512b38d..f8a823b454 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs @@ -11,6 +11,7 @@ pub use keys::{Address, AddressFormat as UtxoAddressFormat, AddressHashEnum, Key use mm2_err_handle::prelude::*; use script::SignatureVersion; use serde_json::{self as json, Value as Json}; +use spv_validation::helpers_validation::BlockHeaderVerificationParams; use std::num::NonZeroU64; use std::sync::atomic::AtomicBool; @@ -98,6 +99,7 @@ impl<'a> UtxoConfBuilder<'a> { let estimate_fee_blocks = self.estimate_fee_blocks(); let trezor_coin = self.trezor_coin(); let enable_spv_proof = self.enable_spv_proof(); + let block_headers_verification_params = self.block_headers_verification_params(); Ok(UtxoCoinConf { ticker: self.ticker.to_owned(), @@ -130,6 +132,7 @@ impl<'a> UtxoConfBuilder<'a> { estimate_fee_blocks, trezor_coin, enable_spv_proof, + block_headers_verification_params, }) } @@ -288,4 +291,8 @@ impl<'a> UtxoConfBuilder<'a> { } fn enable_spv_proof(&self) -> bool { self.conf["enable_spv_proof"].as_bool().unwrap_or(false) } + + fn block_headers_verification_params(&self) -> Option { + json::from_value(self.conf["block_headers_verification_params"].clone()).unwrap_or(None) + } } diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index be7d5dcd2f..8757117a2e 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -56,6 +56,8 @@ pub const DEFAULT_FEE_VOUT: usize = 0; pub const DEFAULT_SWAP_TX_SPEND_SIZE: u64 = 305; pub const DEFAULT_SWAP_VOUT: usize = 0; const MIN_BTC_TRADING_VOL: &str = "0.00777"; +// Todo: should I keep this or get it from config +const BLOCK_HEADERS_LOOP_INTERVAL: f64 = 60.; macro_rules! true_or { ($cond: expr, $etype: expr) => { @@ -3433,6 +3435,7 @@ fn increase_by_percent(num: u64, percent: f64) -> u64 { num + (percent.round() as u64) } +// Todo: This loop needs to be called when getting headers is enabled in conf only after getting all the headers when activating coin pub async fn block_header_utxo_loop(weak: UtxoWeak, constructor: impl Fn(UtxoArc) -> T) { { let coin = match weak.upgrade() { @@ -3442,11 +3445,9 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo let ticker = coin.as_ref().conf.ticker.as_str(); let storage = match &coin.as_ref().rpc_client { UtxoRpcClientEnum::Native(_) => return, - UtxoRpcClientEnum::Electrum(e) => match e.block_headers_storage() { - None => return, - Some(storage) => storage, - }, + UtxoRpcClientEnum::Electrum(e) => e.block_headers_storage(), }; + // Todo: this needs to be moved to coin activation probably match storage.is_initialized_for(ticker).await { Ok(true) => info!("Block Header Storage already initialized for {}", ticker), Ok(false) => { @@ -3468,62 +3469,38 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo UtxoRpcClientEnum::Native(_) => break, UtxoRpcClientEnum::Electrum(client) => client, }; - let storage = match client.block_headers_storage() { - None => return, - Some(storage) => storage, - }; - let params = storage.params.clone(); - let (check_every, blocks_limit_to_check, difficulty_check, constant_difficulty, difficulty_algorithm) = ( - params.check_every, - params.blocks_limit_to_check, - params.difficulty_check, - params.constant_difficulty, - params.difficulty_algorithm, + + let ticker = coin.as_ref().conf.ticker.as_str(); + let storage = client.block_headers_storage(); + // Todo: remove unwraps + let last_stored_block_height: u64 = storage.get_last_block_height(ticker).await.unwrap().try_into().unwrap(); + // Todo: what to do about chain reorganization?? + let height = ok_or_continue_after_sleep!( + coin.as_ref().rpc_client.get_block_count().compat().await, + BLOCK_HEADERS_LOOP_INTERVAL ); - // Todo: what about if electrums are down for a long time, a header will be skipped and all the next validations will fail, we shouldn't get it from - // Todo: get_block_count but from storage when there is a new block (check it's the last one in storage), block_header_utxo_loop logic might completely change - let height = - ok_or_continue_after_sleep!(coin.as_ref().rpc_client.get_block_count().compat().await, check_every); let (block_registry, block_headers) = ok_or_continue_after_sleep!( + // Todo: last_stored_block_height + 1 is repeated (add a variable) client - .retrieve_last_headers(blocks_limit_to_check, height) + .retrieve_headers(last_stored_block_height + 1, height) .compat() .await, - check_every - ); - let ticker = coin.as_ref().conf.ticker.as_str(); - let previous_header_height = if height < blocks_limit_to_check.get() { - 0 - } else { - height - blocks_limit_to_check.get() - }; - // Todo: remove unwrap, move this inside validate_headers function, maybe also move ticker inside storage? - let previous_header = ok_or_continue_after_sleep!( - storage.get_block_header(ticker, previous_header_height).await, - check_every - ) - .unwrap(); - ok_or_continue_after_sleep!( - validate_headers( - ticker, - previous_header, - previous_header_height as u32, - block_headers, - difficulty_check, - constant_difficulty, - storage, - &difficulty_algorithm - ) - .await, - check_every + BLOCK_HEADERS_LOOP_INTERVAL ); + // Todo: check this again (now if block_headers_verification_params is none in coin config headers will be added without validation) + if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { + ok_or_continue_after_sleep!( + validate_headers(ticker, last_stored_block_height + 1, block_headers, storage, params,).await, + BLOCK_HEADERS_LOOP_INTERVAL + ); + } ok_or_continue_after_sleep!( storage.add_block_headers_to_storage(ticker, block_registry).await, - check_every + BLOCK_HEADERS_LOOP_INTERVAL ); debug!("tick block_header_utxo_loop for {}", coin.as_ref().conf.ticker); - Timer::sleep(check_every).await; + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; } } diff --git a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs index 236b5645b5..6382ff71cd 100644 --- a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs @@ -37,6 +37,10 @@ impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { Ok(None) } + async fn get_last_block_height(&self, _for_coin: &str) -> Result { + Err(BlockHeaderStorageError::Internal("Not implemented".into())) + } + async fn get_last_block_header_with_non_max_bits( &self, _for_coin: &str, diff --git a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs index c73d0c40de..b1630340e3 100644 --- a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs @@ -56,6 +56,16 @@ fn get_block_header_by_height(for_coin: &str) -> Result Result { + let table_name = get_table_name_and_validate(for_coin)?; + let sql = format!( + "SELECT block_height FROM {} ORDER BY block_height DESC LIMIT 1;", + table_name + ); + + Ok(sql) +} + fn get_last_block_header_with_non_max_bits_sql(for_coin: &str) -> Result { let table_name = get_table_name_and_validate(for_coin)?; let sql = format!( @@ -211,6 +221,22 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { }) } + async fn get_last_block_height(&self, for_coin: &str) -> Result { + let sql = get_last_block_height_sql(for_coin)?; + let selfi = self.clone(); + + async_blocking(move || { + let conn = selfi.0.lock().unwrap(); + query_single_row(&conn, &sql, NO_PARAMS, |row| row.get(0)) + }) + .await + .map_err(|e| BlockHeaderStorageError::GetFromStorageError { + coin: for_coin.to_string(), + reason: e.to_string(), + })? + .ok_or_else(|| BlockHeaderStorageError::Internal("Database is empty".into())) + } + async fn get_last_block_header_with_non_max_bits( &self, for_coin: &str, @@ -383,4 +409,35 @@ mod sql_block_headers_storage_tests { assert_ne!(actual_block_header.bits, BlockHeaderBits::Compact(MAX_BITS_BTC.into())); assert_eq!(actual_block_header, expected_block_header); } + + #[test] + fn test_get_last_block_height() { + let for_coin = "get"; + let storage = SqliteBlockHeadersStorage::in_memory(); + let table = block_headers_cache_table(for_coin); + block_on(storage.init(for_coin)).unwrap(); + + let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + assert!(initialized); + + let mut headers = HashMap::with_capacity(2); + + // https://live.blockcypher.com/btc-testnet/block/00000000961a9d117feb57e516e17217207a849bf6cdfce529f31d9a96053530/ + let block_header: BlockHeader = "02000000ea01a61a2d7420a1b23875e40eb5eb4ca18b378902c8e6384514ad0000000000c0c5a1ae80582b3fe319d8543307fa67befc2a734b8eddb84b1780dfdf11fa2b20e71353ffff001d00805fe0".into(); + headers.insert(201595, block_header); + + // https://live.blockcypher.com/btc-testnet/block/0000000000ad144538e6c80289378ba14cebb50ee47538b2a120742d1aa601ea/ + let block_header: BlockHeader = "02000000cbed7fd98f1f06e85c47e13ff956533642056be45e7e6b532d4d768f00000000f2680982f333fcc9afa7f9a5e2a84dc54b7fe10605cd187362980b3aa882e9683be21353ab80011c813e1fc0".into(); + headers.insert(201594, block_header); + + // https://live.blockcypher.com/btc-testnet/block/0000000000ad144538e6c80289378ba14cebb50ee47538b2a120742d1aa601ea/ + let block_header: BlockHeader = "020000001f38c8e30b30af912fbd4c3e781506713cfb43e73dff6250348e060000000000afa8f3eede276ccb4c4ee649ad9823fc181632f262848ca330733e7e7e541beb9be51353ffff001d00a63037".into(); + headers.insert(201593, block_header); + + block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + assert!(!storage.is_table_empty(&table)); + + let last_block_height = block_on(storage.get_last_block_height(for_coin)).unwrap(); + assert_eq!(last_block_height, 201595); + } } diff --git a/mm2src/coins/utxo/utxo_tests.rs b/mm2src/coins/utxo/utxo_tests.rs index ccbe85377a..14b0267c81 100644 --- a/mm2src/coins/utxo/utxo_tests.rs +++ b/mm2src/coins/utxo/utxo_tests.rs @@ -13,9 +13,11 @@ use crate::utxo::rpc_clients::{BlockHashOrHeight, ElectrumBalance, ElectrumClien use crate::utxo::spv::SimplePaymentVerification; use crate::utxo::tx_cache::dummy_tx_cache::DummyVerboseCache; use crate::utxo::tx_cache::UtxoVerboseCacheOps; +use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; use crate::utxo::utxo_builder::{UtxoArcBuilder, UtxoCoinBuilderCommonOps}; use crate::utxo::utxo_common::UtxoTxBuilder; use crate::utxo::utxo_common_tests; +use crate::utxo::utxo_sql_block_header_storage::SqliteBlockHeadersStorage; use crate::utxo::utxo_standard::{utxo_standard_coin_with_priv_key, UtxoStandardCoin}; #[cfg(not(target_arch = "wasm32"))] use crate::WithdrawFee; use crate::{CoinBalance, PrivKeyBuildPolicy, SearchForSwapTxSpendInput, StakingInfosDetails, SwapOps, @@ -24,6 +26,7 @@ use chain::OutPoint; use common::executor::Timer; use common::{block_on, now_ms, OrdRange, PagingOptionsEnum, DEX_FEE_ADDR_RAW_PUBKEY}; use crypto::{privkey::key_pair_from_seed, Bip44Chain, RpcDerivationPath}; +use db_common::sqlite::rusqlite::Connection; use futures::future::join_all; use futures::TryFutureExt; use mm2_core::mm_ctx::MmCtxBuilder; @@ -66,7 +69,7 @@ pub fn electrum_client_for_test(servers: &[&str]) -> ElectrumClient { }; let servers = servers.into_iter().map(|s| json::from_value(s).unwrap()).collect(); - block_on(builder.electrum_client(args, servers, None)).unwrap() + block_on(builder.electrum_client(args, servers)).unwrap() } /// Returned client won't work by default, requires some mocks to be usable @@ -153,6 +156,7 @@ fn utxo_coin_fields_for_test( estimate_fee_blocks: 1, trezor_coin: None, enable_spv_proof: false, + block_headers_verification_params: None, }, decimals: TEST_COIN_DECIMALS, dust_amount: UTXO_DUST_AMOUNT, @@ -465,7 +469,12 @@ fn test_wait_for_payment_spend_timeout_electrum() { MockResult::Return(Box::new(futures01::future::ok(None))) }); - let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), None); + let block_headers_storage = BlockHeaderStorage { + inner: Box::new(SqliteBlockHeadersStorage(Arc::new(Mutex::new( + Connection::open_in_memory().unwrap(), + )))), + }; + let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), block_headers_storage); let client = UtxoRpcClientEnum::Electrum(ElectrumClient(Arc::new(client))); let coin = utxo_coin_for_test(client, None, false); let transaction = hex::decode("01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000") @@ -1464,12 +1473,12 @@ fn test_network_info_negative_time_offset() { #[test] fn test_unavailable_electrum_proto_version() { - ElectrumClientImpl::new.mock_safe(|coin_ticker, event_handlers, _| { + ElectrumClientImpl::new.mock_safe(|coin_ticker, event_handlers, block_headers_storage| { MockResult::Return(ElectrumClientImpl::with_protocol_version( coin_ticker, event_handlers, OrdRange::new(1.8, 1.9).unwrap(), - None, + block_headers_storage, )) }); diff --git a/mm2src/coins/utxo/utxo_wasm_tests.rs b/mm2src/coins/utxo/utxo_wasm_tests.rs index 20c057c01b..9ba8949500 100644 --- a/mm2src/coins/utxo/utxo_wasm_tests.rs +++ b/mm2src/coins/utxo/utxo_wasm_tests.rs @@ -1,7 +1,9 @@ use super::rpc_clients::{ElectrumClient, ElectrumClientImpl, ElectrumProtocol}; use super::*; use crate::utxo::rpc_clients::UtxoRpcClientOps; +use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; use crate::utxo::utxo_common_tests; +use crate::utxo::utxo_indexedb_block_header_storage::IndexedDBBlockHeadersStorage; use common::executor::Timer; use serialization::deserialize; use wasm_bindgen_test::*; @@ -11,7 +13,10 @@ wasm_bindgen_test_configure!(run_in_browser); const TEST_COIN_NAME: &'static str = "RICK"; pub async fn electrum_client_for_test(servers: &[&str]) -> ElectrumClient { - let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), None); + let block_headers_storage = BlockHeaderStorage { + inner: Box::new(IndexedDBBlockHeadersStorage {}), + }; + let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), block_headers_storage); for server in servers { client .add_server(&ElectrumRpcRequest { diff --git a/mm2src/coins/z_coin.rs b/mm2src/coins/z_coin.rs index 214b8ef642..c29255f9de 100644 --- a/mm2src/coins/z_coin.rs +++ b/mm2src/coins/z_coin.rs @@ -841,8 +841,6 @@ impl<'a> ZCoinBuilder<'a> { ZcoinRpcMode::Native => UtxoRpcMode::Native, ZcoinRpcMode::Light { electrum_servers, .. } => UtxoRpcMode::Electrum { servers: electrum_servers.clone(), - // TODO: Implement spv validation for zcoin - block_header_params: None, }, }; let utxo_params = UtxoActivationParams { diff --git a/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json b/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json index 764b633307..28bf5bec75 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json +++ b/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json @@ -7,6 +7,10 @@ { "height": 604800, "hex": "000000208e244d2c55bc403caa5d6eaf0f922170e413eb1e02fb02000000000000000000e03b4d9df72d8db232a20bb2ff35c433a99f1467f391f75b5f62180d96f06d6aa4c4d65d3eb215179ef91633" + }, + { + "height": 724608, + "hex": "00c0d933bd3c3dcf14027754c7c8f7190a79b699188c8b24f49204000000000000000000ce2b1e4dc6cb44fae781df459428382d4b5be52766f67a42725cc0d78e00784d352e166278100a173d95ee89" } ], "tBTC": [ @@ -26,5 +30,11 @@ "height": 201596, "hex": "02000000303505969a1df329e5fccdf69b847a201772e116e557eb7f119d1a9600000000469267f52f43b8799e72f0726ba2e56432059a8ad02b84d4fff84b9476e95f7716e41353ab80011c168cb471" } + ], + "MORTY": [ + { + "height": 1330480, + "hex": "04000000bb496ba8d09f8f98b15cdaf5798163bdd70676eb1c8b538f53ab4f83da4a27000db352177c6b5ad2499a906cec33b843fb17fc1ec298cd06c7e7ceb7b62e144232d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca473de614625be6071f09006c286bc5ec73dd27a09bf687700c06fb04d0b9a063c0aa0746c9db170000fd40050053b27dad1f5a858b78f3154039759e985ed57db10ecb772810d7f158c55083a14b9f2ba26ae9fcb82012186e2528f67c45b7b216a69fe26232ad2d179a141b1b10e4d5f108c7b920b49348f6eef2d70b7f02cb01d8d9992f8f2d7b6608806b10ff329846b188de200aa37c73ac03f6c9b79cf5613c71b7969b4abafdbc1165ad955a049269584c83b36f36a3e9becf2fe81f3b1917475eb13ecfed3813ecc32206078d8c1e2797013dfc6f6a55e06f1c06a07959ef94d53ca0fc81d03cb6f614761156ed4ff1a8e5c9f0b96f3c8c3eeb9a0720cf4ed10397330f49b83439c5083eea1d1785a10d86ca2866d0da4ca746c49118b780c55aa6cd5b4c0491cefa258ecf129307d15e001415b203e89c008f4444b236aa556dbf4f6d05e0c57642cfa142df2f8546f1d37a6b2feaf98496892b41caefbe7dc7bcbb2755752df3dbf00ac1fc558896f14541aea4cc78ec5d00bbe5398fac4a658b1ae3399777f15117c0f3de3c63bc5b3edf6543d172cfc66907f9cf8706e97b14281daeb427801dfb0910743873265ae6bae71dbf22353c321f726e68f747965858f488dd507b7e6adee42509e5720373dce5b111b420c906b0f2cb391cfb9d581e2509da3829d6718469f383e07043694db87db0ce1196449a6c9cd941a8bde507e553c0ca534238dcc93633631926102c87cd0f83720ccff60de8b05b103e086a2c2cb7943f21033a5658235fc52708907e1ea722e726808db0270bf898c51e9dd0745614857783dc11a6dcd7760d4a07ddbd83a2e02b23fa789b79eed22dc411b9b48f71c54f12387065e3ff0638701e0f6a0dd56d0ce395d150b237b60c166352e69b92173b884446d7660f5857458b97c6d4ee54f8a1f60113aff30e54c1f7c572b85dcb7a2419d2f736a9b0a6d99ea549bd74e546251c0b8be7975e9a6d96aa3467b1dc6b024745fdef43b37cf21a657a3247d9adf8c252ef210d9a4e9c7191f698ccc9b10103b8bb811cdcf1a62903786476db8195ffb3cd004c57ad07a7a3c41eee391f66a7697e69409d7a78558720f6a1b9804d72de820b7b6165b8e14a2b1316576022423f22bb82fab16127be7173ddcd43fa7ea5c4474f79321a8c4b792caf12320c3047d026b7d63216a022e83655c2d811d2bd2a559970e9155b979953f9801ce918f690f43f5e3f07f7ce27a6837bf33b2490d9add8549f1e603a750c114bb92740cc3987cb9f948a6229f175a7b577b0b60d885a0a7ef05debe921376a7acdb25eaa8bb72e120e529cd775175012efb454cf41d240a946bf140af20d9a5dbed2e196d91a7ff33c2769f140fa0bb968111e1602221deae8d162e7a471354c2051acb43ec31015aaefa0b08bf1bddbb282e86a1caf45f3b63e4c6427ba9e99aed28ef79711794511511c52daf13b735e02b9833d3467bfd16886606d5555b7cc95ff2fea3b03c82cfe60e8602d9f70a3870f5b755573b955bb300bd3733b5ddf9a61fd3cd281af39520d6dfd8b7e2b165ec91749614a3b5241e2ea12470f91b58cf6163e02dfe79392db70cd17db9497cf59c89ac8377dbd02042f6ed270c8c2bc717623b203b74676890f5f4cd905b25772a25292d76b6f42a094c27eed13793d189e395ed3f28c5731976a7b45184acee45b3cf05a9c62045644dfe39f79cd331e282edae99cea652eb82819415ac2a5c21539cdd636fb835063ace3b6befffaf50bf6866e9b1a2b35037a330faeb18ca1696693dafd26b5f5da8dcd3e50ff09249bdda695f576d25024560b643d873d07293a80fe71998ef6ccd88c0cf9f69326b463c26fe4906faaf454ae68accd7ef3edffefdd2ede23a822a2267332f0791f1c4e6d5ab4661f279f5039b36a4476e56fd5b0461e585ff30a7c661b93f1" + } ] } \ No newline at end of file diff --git a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs index 089fbd19d2..94a92d5951 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs @@ -1,11 +1,13 @@ -use crate::storage::BlockHeaderStorageOps; +use crate::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use crate::work::{next_block_bits, DifficultyAlgorithm, NextBlockBitsError}; use chain::{BlockHeader, RawHeaderError}; use derive_more::Display; use primitives::hash::H256; use ripemd160::Digest; +use serde::Deserialize; use serialization::parse_compact_int; use sha2::Sha256; +use std::convert::TryFrom; #[derive(Clone, Debug, Display, Eq, PartialEq)] pub enum SPVError { @@ -49,6 +51,10 @@ pub enum SPVError { InvalidHeight(String), #[display(fmt = "Raises during validation loop")] Timeout, + #[display(fmt = "Block headers storage error: {}", _0)] + HeaderStorageError(BlockHeaderStorageError), + #[display(fmt = "Internal error: {}", _0)] + Internal(String), } impl From for SPVError { @@ -63,6 +69,10 @@ impl From for SPVError { fn from(e: NextBlockBitsError) -> Self { SPVError::DifficultyCalculationError(e) } } +impl From for SPVError { + fn from(e: BlockHeaderStorageError) -> Self { SPVError::HeaderStorageError(e) } +} + /// A slice of `H256`s for use in a merkle array #[derive(Debug, Clone, PartialEq, Eq)] struct MerkleArray<'a>(&'a [u8]); @@ -296,6 +306,15 @@ pub(crate) fn merkle_prove( fn validate_header_prev_hash(actual: &H256, to_compare_with: &H256) -> bool { actual == to_compare_with } +/// SPV headers verification parameters +#[derive(Clone, Debug, Deserialize)] +pub struct BlockHeaderVerificationParams { + pub difficulty_check: bool, + pub constant_difficulty: bool, + pub difficulty_algorithm: Option, + pub genesis_block_header: String, +} + /// Checks validity of header chain. /// Compares the hash of each header to the prevHash in the next header. /// @@ -313,34 +332,55 @@ fn validate_header_prev_hash(actual: &H256, to_compare_with: &H256) -> bool { ac /// /// # Notes /// Wrapper inspired by `bitcoin_spv::validatespv::validate_header_chain` -// Todo: reduce number of arguments -#[allow(clippy::too_many_arguments)] pub async fn validate_headers( coin: &str, - previous_header: BlockHeader, - previous_height: u32, + previous_height: u64, headers: Vec, - difficulty_check: bool, - constant_difficulty: bool, storage: &dyn BlockHeaderStorageOps, - algorithm: &Option, + params: &BlockHeaderVerificationParams, ) -> Result<(), SPVError> { + let mut previous_header = if previous_height == 0 { + // Todo: remove unwrap and add validation earlier in coin activation (convert to blockheader struct there) + BlockHeader::try_from(params.genesis_block_header.clone()).unwrap() + } else { + storage + .get_block_header(coin, previous_height) + .await? + .ok_or(BlockHeaderStorageError::GetFromStorageError { + coin: coin.to_string(), + reason: format!("Header with height {} is not found in storage", previous_height), + })? + }; let mut previous_hash = previous_header.hash(); let mut prev_bits = previous_header.bits.clone(); - let mut previous_header = previous_header; for header in headers.into_iter() { + if previous_height == 0 { + // previous_header is genesis header in this case, checking that the first header hash is the same as the genesis header hash is enough + if header.hash() != previous_hash { + return Err(SPVError::InvalidChain); + } + continue; + } let cur_bits = header.bits.clone(); - if previous_height != 0 && constant_difficulty && difficulty_check && cur_bits != prev_bits { + if params.constant_difficulty && params.difficulty_check && cur_bits != prev_bits { return Err(SPVError::UnexpectedDifficultyChange); } - if previous_height != 0 && !validate_header_prev_hash(&header.previous_header_hash, &previous_hash) { + if !validate_header_prev_hash(&header.previous_header_hash, &previous_hash) { return Err(SPVError::InvalidChain); } - if let Some(algo) = algorithm { - if !constant_difficulty - && difficulty_check + if let Some(algorithm) = ¶ms.difficulty_algorithm { + if !params.constant_difficulty + && params.difficulty_check && cur_bits - != next_block_bits(coin, header.time, previous_header, previous_height, storage, algo).await? + != next_block_bits( + coin, + header.time, + previous_header, + previous_height as u32, + storage, + algorithm, + ) + .await? { return Err(SPVError::InsufficientWork); } @@ -548,16 +588,19 @@ mod tests { "04000000001f22e1bc88c53b1554f8fdcf261fdb09f4cae6ef5e5032b788515f4a60d30d67d1b35fda68abc05f5af39e5ade224a5312b8dcd1f3629a7ff33355bb7ca93e32d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca478be6146220bb071f49000b055b22a7a4bbafd6b52efb90f963d5f80126c27e437005fb47720e0000fd4005004d9875d71c540f558813142e263f597243bdd8d8105ff3d1ffd62ae51ccf22729debe510f97ab0631701dbd34b73e570597dc8825be6bd669e693037fb701040c273b44745f4e850c2d8aeca7ccab6ef7f462206a16d75358f2e8fddf9d0dbc6333ff55b1813a37f0ba240bd2d897fbd6cfdb1989ac8f3ec93b15ae4360edf84088ac9a4ea7d3d71290532bb51675e7310be1210aa33c184d693f6f7c15c5be1e89356ae3d663d0c548fceac0974fe4cb6c6559f50643280df9508460fd04f9cde55521b4c6d61c644c6c7b7473f9e39b412e3776f5e47b6c466aaf1dc76ff2114e716eb6b9614d0c93cdc229ec13b07057a7f7446c1aac51ef0950d4361fa2d20f22f29ff490bf6d6a2a267c45d88d3152d9f5291695f2f4fba65ca9763cb4176506c73b8162611b6004af7ec8d1ea55a225cca2576e4ac84ac333b663693a2f19f7786340ad9d2212d576a0b4e7700bd7d60de88940dce1f01481f9c41350eefd7b496218bcf70c4c8922dfd18d666d37d10cb0f14dd38e1225ec179dcab5501a4434674d6f9ff9f23c4df5f445cc2accf43189fc99ac56693df373a4207b0dc991009fae4796fd7e49cea4dd139ee72264dfd47f4e1ad2420d635c7a1f37950d022ffdcccc7651b645db0ba0ce94c18dcc902279b4601806beefe05016f1f85411e6562b584da0854db2e36f602d8c4974d385aee4a01d1132082c8cd7c71443162f7d7487c73d8a46f830f72a0d352d957bef5afc33c4447ef33b2491e28000d1f4687e95ffc2b9532d28ae4c48f8551bf527dbe18c672204495f2bd546566fd5770189e28c2de0974130a492ccd8737a8c6e971d02a23c4f9f27410348d1f666f93385bdc81bad8e9a9d1dbffdfa2609ebae52740b457ecd67a3bf0db02a14f5bdf3e25b35b2d3d303094e46e0e3daef559d9f0e074e512bcaf9fcc9d035083eec16806af8a93d27b4ad46754a425b6a02b1ac22f682e48f214d66b379d7042aa39f2c5f3448d05ca4b6360e162f31f197225f4ad579d69207c666711fb3f6ca814efcf430899360cced1168cd69ec0e809a89cf2cf2015f9f895a3dadd4ced6d94793e98201b1da6a0a5d90be5d06925e3ad60b9227f84b9c3060a6db6e7857d8731f975d4a993abf10d84590da02b114625109d864de070813179b651d528f66036c30a0700ee84fc5e59757a509745b64e76fa3396f3c8b01a7724cd434e6d774dad36be8a73ad29f6859352aa15236e7825947396cb98e26b912b19ddc127590e59200c4334d1d96d7585a0e349b920f2e4e59cdedac911214c42c0894f72c8a7423d7aef3ea5ef9a5b650821f46537c65509ad8dcf6558c16c04f9877c737ff81875d9fbe01d23d37e937444cf257b0b57bc1c2a774f2e2bf5f3b0881be0e2282ba97ef6aad797f8fdb4053da4e478575805c7a93076c09847544a8e89f1cb3838df7870bcf61deb2144c6f6349c966b67545703058f9227965b97835b049538fb428431a8461586b022368626d20e9b6bfdd7232a5cc6a0aa214319cb440c45443a2446d1e17713c0e1049f0fd759d1dbff493302140376cfb153330ed455a043189260cb7d2d90333a37d3584f2d907d0a73dccee299ad14141d60d1409cda688464a13b5dab37476641741717d599a60c0ac84d85869ed449f83933ad30e2591157fd1f07b73ecf26f34e91bc00f1ca86ae34ca8231b372cdc2ed18d463ac42f92859d6f0e2c483dbb23d785f1233db2033458af9d7c1e7029ac5cc33ca7d25b2b49fd71b1ae5f5ce969b6e77333bf5fbb5e6645dd0a4d0c6e82eb534ac264ddbe28513e4b82b3578c1a6cbfaa2522aa50985fe2cce43cf3363eaacca0e09c721fd603d43c3a4fdf8dde0c9ff2c054910b16aeef7c4d86b31".into(), "04000000fcead9a1b425124f11aa97e0614120ce87bdddcad655672916f9c4564dc057002bd3df07a4602620282b276359529114ba89b59b16bec235d584c3cf5cc6b2d132d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca47bfe61462d5b9071f1a001daf299c51afbd74fd75a98ba49a6e40ae8ad92b3afdc1cf215fd6190000fd40050044b5e035b02d138a9704f9513c0865f2733b7c09294ee504c155c283f4895559b6ac39828eac98ad393a642330589e8849040f55ce44f8f2197529d0b0ed57ccdda41f1971e153ec28ac5b4eba968741db374104d65ee234580a83bea1c0cdb67b8bc207057486eb1d90e21ba0cd4f5e9fd834821fafc1517c5d1fceb50ba6f6b102a9b4edac46f2359aec795a4e2458f51114a41289634b3b1cf250e3e38f3689f951278dfa7202a7dfe311cc098fd4a8d02c8f8a74e4a5010b18ee2e60578d5e9f1c094433a73f26e6546e20a574fc261baaa79e9910ab86ed607786a1cc88e7de51ff928d434e26eaef1437f7068c743f26d7c0eea6791e869b101fee8ab41b50af6174c5e6b731a1719f31ee3e6529efef49f31665baedc9382e9665278a84467d479f139fc7a8ef66fef9bd2fd17f7779ee315d458f691a290fa7c2179de8bb91a78458c5290d4aa45b163254006800ba2fce7479511f744fd7de96495c39be93413d8b0b187fe092537e1a7646a66a125b33333f6ecd10085e23ad168b24ee7be69d01ea021a39401e4bd41d818499e7174dd9b85542076c78cb89eeec1c190301b4709dbc963d47926e31bb0235ba6a7029d49458150f6491ac9c973b8a2c893258f907baf4bcb7c39f12b900ba2b2382cd5dd84314ee504ade835ad9a1cb13a7f5928a483ebc9415429810fd99893f2f8f83970b8b47143d617e6f9853e4d86ff378be664218f1c32531143e209f171590dd48216fec879a6b9cbf04432bf4f1a3734b69b6a9f1a358a259a0f9082cfb6c1f3d9d2d9e4522ad651ccce565f06b30c1c0b27252270c2f6608cf4f3288a7e7d4b174e646de05341f7db62b00b5ccb295f058d34b87201148828e9b3f7e08f60e100f810be27eb7f4c471cda7621106fe78bc69ec2bd27acabd55dc094b8626913b7d24d9b60939754700f32574a733a195f8b0220d56f6797de0bcd7b80d561896b816586593409f76e85a7a1035f821dee32a02fdbc26bc4cca375bed418b9d678ac589249a1a5a5b24447ee9b42e33f817066caf3d4e17d0347f6acf0cbf426d4df49413b3d12350edec2681ab9cfecd0825ccfb2649a57391d3f153050dfb4350d60e5e464229ddd6e49ece95557b8ef48c18cbffbe9fc8d7700f611a4b33a2a254afcec638c485e36daf0364da7d4302e488db7b6c41297571048cfea5452e324abb9f9e1043e625fd0853b7e03063d1c3a43aa1ee62d45d890b5e4d10640e775cff6852b6d1acd4a503b3ece3b319cbcf33ff9fdf17b8f852d748db1e05af80507f5d0e1bc44444b155d7da20f7f0b4d6d83368c3bb9e1321b39472a8677ea1d3aca43b453d35edca37b7536d19c26b764958b3c7c30f3211d7b7bb7f6a6d7fd7bf2dda6e7d7b1e533556863549bbe1394a3828596f25029b7e30495e1235f084e5edd133bc29fce4f1e5e514eb1d1cb19fd8dfbb0d130fbec4e288f23dae86311ffd6f4afbaacc2ffe1cc8811a455ba6f5659f82515b56c6ac84277bff5bef98fefc74e002e4a11866a417a429541f8a62df4108e4730d3045f92984bcf1ab2f7d03f8bb1767e91791530cd8eec412919e1f2e341e66a1588a8f485f7aa005787af946b9cb10f6685420b7e1663f66374fddc5e70720507ee2134f3b02df042fcf6db4a5bdd74cc5010793634816fe447cc68e076b225cc1ca872929ef246ce356dc8d8964ff6d7119d071eccb6dc37f75b932c44cdc30723b8357a2761c6de6ab2713e6f6a782538cb731b07950d3f459760a00cc0af406d6848014746b02653636f479d952b46fdeff976e1d159ba46ae7363d5b0042d3905a0bda12aaa6eaae1a5a0d55d4c1930aa1c004cd610866853a247239366aa20f8968ea9ca3d5d6d7321a5d0f2c".into() ]; - let previous_header: BlockHeader = "04000000bb496ba8d09f8f98b15cdaf5798163bdd70676eb1c8b538f53ab4f83da4a27000db352177c6b5ad2499a906cec33b843fb17fc1ec298cd06c7e7ceb7b62e144232d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca473de614625be6071f09006c286bc5ec73dd27a09bf687700c06fb04d0b9a063c0aa0746c9db170000fd40050053b27dad1f5a858b78f3154039759e985ed57db10ecb772810d7f158c55083a14b9f2ba26ae9fcb82012186e2528f67c45b7b216a69fe26232ad2d179a141b1b10e4d5f108c7b920b49348f6eef2d70b7f02cb01d8d9992f8f2d7b6608806b10ff329846b188de200aa37c73ac03f6c9b79cf5613c71b7969b4abafdbc1165ad955a049269584c83b36f36a3e9becf2fe81f3b1917475eb13ecfed3813ecc32206078d8c1e2797013dfc6f6a55e06f1c06a07959ef94d53ca0fc81d03cb6f614761156ed4ff1a8e5c9f0b96f3c8c3eeb9a0720cf4ed10397330f49b83439c5083eea1d1785a10d86ca2866d0da4ca746c49118b780c55aa6cd5b4c0491cefa258ecf129307d15e001415b203e89c008f4444b236aa556dbf4f6d05e0c57642cfa142df2f8546f1d37a6b2feaf98496892b41caefbe7dc7bcbb2755752df3dbf00ac1fc558896f14541aea4cc78ec5d00bbe5398fac4a658b1ae3399777f15117c0f3de3c63bc5b3edf6543d172cfc66907f9cf8706e97b14281daeb427801dfb0910743873265ae6bae71dbf22353c321f726e68f747965858f488dd507b7e6adee42509e5720373dce5b111b420c906b0f2cb391cfb9d581e2509da3829d6718469f383e07043694db87db0ce1196449a6c9cd941a8bde507e553c0ca534238dcc93633631926102c87cd0f83720ccff60de8b05b103e086a2c2cb7943f21033a5658235fc52708907e1ea722e726808db0270bf898c51e9dd0745614857783dc11a6dcd7760d4a07ddbd83a2e02b23fa789b79eed22dc411b9b48f71c54f12387065e3ff0638701e0f6a0dd56d0ce395d150b237b60c166352e69b92173b884446d7660f5857458b97c6d4ee54f8a1f60113aff30e54c1f7c572b85dcb7a2419d2f736a9b0a6d99ea549bd74e546251c0b8be7975e9a6d96aa3467b1dc6b024745fdef43b37cf21a657a3247d9adf8c252ef210d9a4e9c7191f698ccc9b10103b8bb811cdcf1a62903786476db8195ffb3cd004c57ad07a7a3c41eee391f66a7697e69409d7a78558720f6a1b9804d72de820b7b6165b8e14a2b1316576022423f22bb82fab16127be7173ddcd43fa7ea5c4474f79321a8c4b792caf12320c3047d026b7d63216a022e83655c2d811d2bd2a559970e9155b979953f9801ce918f690f43f5e3f07f7ce27a6837bf33b2490d9add8549f1e603a750c114bb92740cc3987cb9f948a6229f175a7b577b0b60d885a0a7ef05debe921376a7acdb25eaa8bb72e120e529cd775175012efb454cf41d240a946bf140af20d9a5dbed2e196d91a7ff33c2769f140fa0bb968111e1602221deae8d162e7a471354c2051acb43ec31015aaefa0b08bf1bddbb282e86a1caf45f3b63e4c6427ba9e99aed28ef79711794511511c52daf13b735e02b9833d3467bfd16886606d5555b7cc95ff2fea3b03c82cfe60e8602d9f70a3870f5b755573b955bb300bd3733b5ddf9a61fd3cd281af39520d6dfd8b7e2b165ec91749614a3b5241e2ea12470f91b58cf6163e02dfe79392db70cd17db9497cf59c89ac8377dbd02042f6ed270c8c2bc717623b203b74676890f5f4cd905b25772a25292d76b6f42a094c27eed13793d189e395ed3f28c5731976a7b45184acee45b3cf05a9c62045644dfe39f79cd331e282edae99cea652eb82819415ac2a5c21539cdd636fb835063ace3b6befffaf50bf6866e9b1a2b35037a330faeb18ca1696693dafd26b5f5da8dcd3e50ff09249bdda695f576d25024560b643d873d07293a80fe71998ef6ccd88c0cf9f69326b463c26fe4906faaf454ae68accd7ef3edffefdd2ede23a822a2267332f0791f1c4e6d5ab4661f279f5039b36a4476e56fd5b0461e585ff30a7c661b93f1".into(); + let params = BlockHeaderVerificationParams { + difficulty_check: false, + constant_difficulty: false, + difficulty_algorithm: None, + // Will not be used since previous_height is not 0 + genesis_block_header: "".into(), + }; block_on(validate_headers( "MORTY", - previous_header, 1330480, headers, - false, - false, &TestBlockHeadersStorage {}, - &None, + ¶ms, )) .unwrap() } @@ -568,16 +611,19 @@ mod tests { let headers: Vec = vec!["00200020eab6fa183da8f9e4c761b31a67a76fa6a7658eb84c760200000000000000000063cd9585d434ec0db25894ec4b1f03735f10e31709c4395ea67c50c8378f134b972f166278100a17bfd87203".into(), "0000402045c698413fbe8b5bf10635658d2a1cec72062798e51200000000000000000000869617420a4c95b1d3d6d012419d2b6c199cff9b68dd9a790892a4da8466fb056033166278100a1743ac4d5b".into(), "0400e02019d733c1fd76a1fa5950de7bee9d80f107276b93a67204000000000000000000a0d1dee718f5f732c041800e9aa2c25e92be3f6de28278545388db8a6ae27df64c37166278100a170a970c19".into()]; - let previous_header: BlockHeader = "00c0d933bd3c3dcf14027754c7c8f7190a79b699188c8b24f49204000000000000000000ce2b1e4dc6cb44fae781df459428382d4b5be52766f67a42725cc0d78e00784d352e166278100a173d95ee89".into(); + let params = BlockHeaderVerificationParams { + difficulty_check: true, + constant_difficulty: false, + difficulty_algorithm: Some(DifficultyAlgorithm::BitcoinMainnet), + // Will not be used since previous_height is not 0 + genesis_block_header: "010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e36299".into() + }; block_on(validate_headers( "BTC", - previous_header, 724608, headers, - true, - false, &TestBlockHeadersStorage {}, - &None, + ¶ms, )) .unwrap() } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs index 0f0b07f2cb..a0aef25ebd 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs @@ -69,6 +69,8 @@ pub trait BlockHeaderStorageOps: Send + Sync + 'static { height: u64, ) -> Result, BlockHeaderStorageError>; + async fn get_last_block_height(&self, for_coin: &str) -> Result; + async fn get_last_block_header_with_non_max_bits( &self, for_coin: &str, diff --git a/mm2src/mm2_bitcoin/spv_validation/src/work.rs b/mm2src/mm2_bitcoin/spv_validation/src/work.rs index c3cac78df2..f2ecd46965 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/work.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/work.rs @@ -177,6 +177,7 @@ pub(crate) mod tests { use primitives::hash::H256; use serde::Deserialize; use std::collections::HashMap; + use std::convert::TryInto; const BLOCK_HEADERS_STR: &str = include_str!("./for_tests/workTestVectors.json"); @@ -233,6 +234,15 @@ pub(crate) mod tests { Ok(None) } + async fn get_last_block_height(&self, for_coin: &str) -> Result { + Ok(get_block_headers_for_coin(for_coin) + .into_keys() + .max_by(|a, b| a.cmp(b)) + .unwrap() + .try_into() + .unwrap()) + } + async fn get_last_block_header_with_non_max_bits( &self, for_coin: &str, diff --git a/mm2src/mm2_main/src/lp_swap.rs b/mm2src/mm2_main/src/lp_swap.rs index e82fd72d21..0766093e37 100644 --- a/mm2src/mm2_main/src/lp_swap.rs +++ b/mm2src/mm2_main/src/lp_swap.rs @@ -1546,7 +1546,6 @@ mod lp_swap_tests { disable_cert_verification: false, }) .collect(), - block_header_params: None, }, utxo_merge_params: None, tx_history: false, From e85eddf49301ea04632f4773e2db47468edfa09b Mon Sep 17 00:00:00 2001 From: shamardy Date: Thu, 18 Aug 2022 22:34:09 +0200 Subject: [PATCH 18/33] fix validate spv proof to use a validated header from storage + other refactors --- mm2src/coins/utxo.rs | 3 +- mm2src/coins/utxo/bch.rs | 2 +- mm2src/coins/utxo/rpc_clients.rs | 151 +++++++----------- mm2src/coins/utxo/spv.rs | 17 +- .../spv_validation/src/spv_proof.rs | 66 ++++---- 5 files changed, 94 insertions(+), 145 deletions(-) diff --git a/mm2src/coins/utxo.rs b/mm2src/coins/utxo.rs index bfcd3fba9d..0e041843be 100644 --- a/mm2src/coins/utxo.rs +++ b/mm2src/coins/utxo.rs @@ -511,7 +511,8 @@ pub struct UtxoCoinConf { /// Used in condition where the coin will validate spv proof or not pub enable_spv_proof: bool, /// The parameters that specify how the coin block headers should be verified if spv proof is enabled - // Todo: "if spv proof is enabled"? maybe if block headers storage is enabled + // Todo: "if spv proof is enabled"? maybe if block headers storage is enabled (enable spv can't be on without having blockheaders in storage) + // Todo: maybe refacor enable_spv_proof, block_headers_verification_params pub block_headers_verification_params: Option, } diff --git a/mm2src/coins/utxo/bch.rs b/mm2src/coins/utxo/bch.rs index 567e1b836a..085f3f4bea 100644 --- a/mm2src/coins/utxo/bch.rs +++ b/mm2src/coins/utxo/bch.rs @@ -622,7 +622,7 @@ impl BchCoin { Ok(slp_tx_details_builder.build()) } - pub async fn get_block_timestamp(&self, height: u64) -> Result> { + pub async fn get_block_timestamp(&self, height: u64) -> Result> { self.as_ref().rpc_client.get_block_timestamp(height).await } } diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index 299221b7e9..4222bfdc7f 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -2,7 +2,7 @@ #![cfg_attr(target_arch = "wasm32", allow(dead_code))] use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; -use crate::utxo::{output_script, sat_from_big_decimal, GetTxError, GetTxHeightError}; +use crate::utxo::{output_script, sat_from_big_decimal, GetBlockHeaderError, GetTxError, GetTxHeightError}; use crate::{big_decimal_from_sat_unsigned, NumConversError, RpcTransportEventHandler, RpcTransportEventHandlerShared}; use async_trait::async_trait; use chain::{BlockHeader, BlockHeaderBits, BlockHeaderNonce, OutPoint, Transaction as UtxoTx}; @@ -343,7 +343,7 @@ pub trait UtxoRpcClientOps: fmt::Debug + Send + Sync + 'static { ) -> UtxoRpcFut; /// Returns block time in seconds since epoch (Jan 1 1970 GMT). - async fn get_block_timestamp(&self, height: u64) -> Result>; + async fn get_block_timestamp(&self, height: u64) -> Result>; /// Returns verbose transaction by the given `txid` if it's on-chain or None if it's not. async fn get_tx_if_onchain(&self, tx_hash: &H256Json) -> Result, MmError> { @@ -901,7 +901,7 @@ impl UtxoRpcClientOps for NativeClient { Box::new(fut.boxed().compat()) } - async fn get_block_timestamp(&self, height: u64) -> Result> { + async fn get_block_timestamp(&self, height: u64) -> Result> { let block = self.get_block_by_height(height).await?; Ok(block.time as u64) } @@ -1870,14 +1870,19 @@ impl ElectrumClient { rpc_func!(self, "blockchain.block.headers", start_height, count) } - // Todo: revise this function as it wasn't written by me, add a comment that this is inclusive pub fn retrieve_headers(&self, from: u64, to: u64) -> UtxoRpcFut<(HashMap, Vec)> { let coin_name = self.coin_ticker.clone(); - // Todo: check for to >= from and that neither are zero - let count = to - from + 1; + if from == 0 || to < from { + return Box::new(futures01::future::err( + UtxoRpcError::Internal("Invalid values for from/to parameters".to_string()).into(), + )); + } + let count: NonZeroU64 = match (to - from + 1).try_into() { + Ok(c) => c, + Err(e) => return Box::new(futures01::future::err(UtxoRpcError::Internal(e.to_string()).into())), + }; Box::new( - // Todo: remove unwrap - self.blockchain_block_headers(from, count.try_into().unwrap()) + self.blockchain_block_headers(from, count) .map_to_mm_fut(UtxoRpcError::from) .and_then(move |headers| { let (block_registry, block_headers) = { @@ -1912,24 +1917,6 @@ impl ElectrumClient { rpc_func!(self, "blockchain.transaction.get_merkle", txid, height) } - // Todo: remove comments - // async fn get_tx_height_from_rpc(&self, tx: &UtxoTx) -> Result> { - // for output in tx.outputs.clone() { - // let script_pubkey_str = hex::encode(electrum_script_hash(&output.script_pubkey)); - // if let Ok(history) = self.scripthash_get_history(script_pubkey_str.as_str()).compat().await { - // if let Some(item) = history - // .into_iter() - // .find(|item| item.tx_hash.reversed() == H256Json(*tx.hash()) && item.height > 0) - // { - // return Ok(item.height as u64); - // } - // } - // } - // MmError::err(GetTxHeightError::HeightNotFound( - // "Couldn't find height through electrum!".into(), - // )) - // } - // get_tx_height_from_rpc is costly since it loops through history after requesting the whole history of the script pubkey // This method should always be used if the block headers are saved to the DB async fn get_tx_height_from_storage(&self, tx: &UtxoTx) -> Result> { @@ -1944,63 +1931,45 @@ impl ElectrumClient { .try_into()?) } - // Todo: remove this or find other solution - // async fn valid_block_header_from_storage(&self, height: u64) -> Result> { - // let storage = match self.block_headers_storage() { - // Some(storage) => storage, - // None => { - // return MmError::err(GetBlockHeaderError::StorageError(BlockHeaderStorageError::Internal( - // "block_headers_storage is not initialized".to_owned(), - // ))) - // }, - // }; - // let ticker = self.coin_name(); - // match storage.get_block_header(ticker, height).await? { - // None => { - // let bytes = self.blockchain_block_header(height).compat().await?; - // let header: BlockHeader = deserialize(bytes.0.as_slice())?; - // let params = &storage.params; - // let blocks_limit = params.blocks_limit_to_check; - // let (headers_registry, headers) = self.retrieve_last_headers(blocks_limit, height).compat().await?; - // let previous_header_height = if height < blocks_limit.get() { - // 0 - // } else { - // height - blocks_limit.get() - // }; - // match validate_headers( - // ticker, - // previous_header_height, - // headers, - // params.difficulty_check, - // params.constant_difficulty, - // storage, - // ¶ms.difficulty_algorithm, - // params.genesis_block_header.clone(), - // ) - // .await - // { - // Ok(_) => { - // storage.add_block_headers_to_storage(ticker, headers_registry).await?; - // Ok(header) - // }, - // Err(err) => MmError::err(GetBlockHeaderError::SPVError(err)), - // } - // }, - // Some(header) => Ok(header), - // } - // } - - // Todo: remove this or find other solution - // async fn block_header_from_storage_or_rpc(&self, height: u64) -> Result> { - // match self.block_headers_storage() { - // Some(_) => self.valid_block_header_from_storage(height).await, - // None => Ok(deserialize( - // self.blockchain_block_header(height).compat().await?.as_slice(), - // )?), - // } - // } - - pub async fn get_merkle_and_header_from_rpc( + // get_tx_height_from_storage is always preferred to be used instead of this, but if there is no headers in storage (storing headers is not enabled) + // this function can be used instead + // Todo: This can be used in lightning if spv is not enabled for platform coin (connected to trusted server), will remove #[allow(dead_code)] after it's used + #[allow(dead_code)] + async fn get_tx_height_from_rpc(&self, tx: &UtxoTx) -> Result> { + for output in tx.outputs.clone() { + let script_pubkey_str = hex::encode(electrum_script_hash(&output.script_pubkey)); + if let Ok(history) = self.scripthash_get_history(script_pubkey_str.as_str()).compat().await { + if let Some(item) = history + .into_iter() + .find(|item| item.tx_hash.reversed() == H256Json(*tx.hash()) && item.height > 0) + { + return Ok(item.height as u64); + } + } + } + MmError::err(GetTxHeightError::HeightNotFound( + "Couldn't find height through electrum!".into(), + )) + } + + async fn block_header_from_storage(&self, height: u64) -> Result> { + let ticker = self.coin_name(); + self.block_headers_storage() + .get_block_header(ticker, height) + .await? + .ok_or_else(|| GetBlockHeaderError::Internal("Header not in storage!".into()).into()) + } + + async fn block_header_from_storage_or_rpc(&self, height: u64) -> Result> { + match self.block_header_from_storage(height).await { + Ok(h) => Ok(h), + Err(_) => Ok(deserialize( + self.blockchain_block_header(height).compat().await?.as_slice(), + )?), + } + } + + pub async fn get_merkle_and_validated_header( &self, tx: &UtxoTx, ) -> Result<(TxMerkleBranch, BlockHeader, u64), MmError> { @@ -2012,14 +1981,7 @@ impl ElectrumClient { .await .map_to_mm(|e| SPVError::UnableToGetMerkle(e.to_string()))?; - let header: BlockHeader = deserialize( - self.blockchain_block_header(height) - .compat() - .await - .map_err(|e| SPVError::UnableToGetHeader(e.to_string()))? - .as_slice(), - ) - .map_err(|e| SPVError::UnableToGetHeader(e.to_string()))?; + let header = self.block_header_from_storage(height).await?; Ok((merkle_branch, header, height)) } @@ -2244,11 +2206,8 @@ impl UtxoRpcClientOps for ElectrumClient { ) } - async fn get_block_timestamp(&self, height: u64) -> Result> { - let header_bytes = self.blockchain_block_header(height).compat().await?; - let header: BlockHeader = - deserialize(header_bytes.0.as_slice()).map_to_mm(|e| UtxoRpcError::InvalidResponse(format!("{:?}", e)))?; - Ok(header.time as u64) + async fn get_block_timestamp(&self, height: u64) -> Result> { + Ok(self.block_header_from_storage_or_rpc(height).await?.time as u64) } } diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index 2a0becfc15..9cf1a012a8 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -1,6 +1,6 @@ use crate::utxo::rpc_clients::ElectrumClient; use async_trait::async_trait; -use chain::{BlockHeader, RawBlockHeader, Transaction as UtxoTx}; +use chain::{BlockHeader, Transaction as UtxoTx}; use common::executor::Timer; use common::log::error; use common::now_ms; @@ -29,7 +29,6 @@ pub trait SimplePaymentVerification { #[async_trait] impl SimplePaymentVerification for ElectrumClient { - // Todo: this is not working right should get a header from DB to use for validation async fn validate_spv_proof( &self, tx: &UtxoTx, @@ -39,7 +38,7 @@ impl SimplePaymentVerification for ElectrumClient { return MmError::err(SPVError::InvalidVout); } - let (merkle_branch, header, height) = loop { + let (merkle_branch, validated_header, height) = loop { if now_ms() / 1000 > try_spv_proof_until { // Todo: find a way to not show this error when height is still 0 error!( @@ -50,8 +49,8 @@ impl SimplePaymentVerification for ElectrumClient { return MmError::err(SPVError::Timeout); } - // Todo: should get merkle from RPC and header from storage (also check where we get headers every where and get it from storage or RPC) - match self.get_merkle_and_header_from_rpc(tx).await { + // Todo: break up this function to blockchain_transaction_get_merkle, block_header_from_storage + match self.get_merkle_and_validated_header(tx).await { Ok(res) => break res, Err(e) => { error!( @@ -66,7 +65,6 @@ impl SimplePaymentVerification for ElectrumClient { } }; - let raw_header = RawBlockHeader::new(header.raw().take())?; let intermediate_nodes: Vec = merkle_branch .merkle .into_iter() @@ -78,17 +76,14 @@ impl SimplePaymentVerification for ElectrumClient { vin: serialize_list(&tx.inputs).take(), vout: serialize_list(&tx.outputs).take(), index: merkle_branch.pos as u64, - confirming_header: header.clone(), - raw_header, intermediate_nodes, }; - // Todo: refactor validate function along validate_spv_proof - proof.validate().map_err(MmError::new)?; + proof.validate(&validated_header).map_err(MmError::new)?; Ok(ConfirmedTransactionInfo { tx: tx.clone(), - header, + header: validated_header, index: proof.index, height, }) diff --git a/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs b/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs index 646afc0f16..acaee40dae 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs @@ -1,6 +1,5 @@ use crate::helpers_validation::{merkle_prove, validate_vin, validate_vout, SPVError}; use chain::BlockHeader; -use chain::RawBlockHeader; use primitives::hash::H256; pub const TRY_SPV_PROOF_INTERVAL: u64 = 10; @@ -15,15 +14,11 @@ pub struct SPVProof { pub vout: Vec, /// The transaction index in the merkle tree pub index: u64, - /// The confirming UTXO header - pub confirming_header: BlockHeader, - /// The Raw confirming UTXO Header - pub raw_header: RawBlockHeader, /// The intermediate nodes (digests between leaf and root) pub intermediate_nodes: Vec, } -/// Checks validity of an entire SPV Proof +/// Checks validity of an entire SPV Proof against a previously validated UTXO header retrieved from storage /// /// # Arguments /// @@ -36,30 +31,17 @@ pub struct SPVProof { /// # Notes /// Re-write with our own types based on `bitcoin_spv::std_types::SPVProof::validate` impl SPVProof { - pub fn validate_block_header(&self) -> Result<(), SPVError> { - if self.confirming_header.hash() != self.raw_header.digest() { - return Err(SPVError::WrongDigest); - } - if self.confirming_header.merkle_root_hash != self.raw_header.extract_merkle_root() { - return Err(SPVError::WrongMerkleRoot); - } - if self.confirming_header.previous_header_hash != self.raw_header.parent() { - return Err(SPVError::WrongPrevHash); - } - Ok(()) - } - - pub fn validate(&self) -> Result<(), SPVError> { + // Todo: This can be a part of block headers storage trait or a trait implemented for it (or maybe pass blockheader storage to this function) + pub fn validate(&self, validated_header: &BlockHeader) -> Result<(), SPVError> { if !validate_vin(self.vin.as_slice()) { return Err(SPVError::InvalidVin); } if !validate_vout(self.vout.as_slice()) { return Err(SPVError::InvalidVout); } - self.validate_block_header()?; merkle_prove( self.tx_id, - self.confirming_header.merkle_root_hash, + validated_header.merkle_root_hash, self.intermediate_nodes.clone(), self.index, ) @@ -69,25 +51,37 @@ impl SPVProof { #[cfg(test)] mod spv_proof_tests { use crate::spv_proof::SPVProof; - use chain::BlockHeader; - use chain::RawBlockHeader; + use chain::{BlockHeader, Transaction}; use hex::FromHex; - use serialization::deserialize; + use primitives::hash::H256; + use serialization::{deserialize, serialize_list}; #[test] - fn test_block_header() { - let header_hex = "040000008e4e7283b71dd1572d220935db0a1654d1042e92378579f8abab67b143f93a02fa026610d2634b72ff729b9ea7850c0d2c25eeaf7a82878ca42a8e9912028863a2d8a734eb73a4dc734072dbfd12406f1e7121bfe0e3d6c10922495c44e5cc1c91185d5ee519011d0400b9caaf41d4b63a6ab55bb4e6925d46fc3adea7be37b713d3a615e7cf0000fd40050001a80fa65b9a46fdb1506a7a4d26f43e7995d69902489b9f6c4599c88f9c169605cc135258953da0d6299ada4ff81a76ad63c943261078d5dd1918f91cea68b65b7fc362e9df49ba57c2ea5c6dba91591c85eb0d59a1905ac66e2295b7a291a1695301489a3cc7310fd45f2b94e3b8d94f3051e9bbaada1e0641fcec6e0d6230e76753aa9574a3f3e28eaa085959beffd3231dbe1aeea3955328f3a973650a38e31632a4ffc7ec007a3345124c0b99114e2444b3ef0ada75adbd077b247bbf3229adcffbe95bc62daac88f96317d5768540b5db636f8c39a8529a736465ed830ab2c1bbddf523587abe14397a6f1835d248092c4b5b691a955572607093177a5911e317739187b41f4aa662aa6bca0401f1a0a77915ebb6947db686cff549c5f4e7b9dd93123b00a1ae8d411cfb13fa7674de21cbee8e9fc74e12aa6753b261eab3d9256c7c32cc9b16219dad73c61014e7d88d74d5e218f12e11bc47557347ff49a9ab4490647418d2a5c2da1df24d16dfb611173608fe4b10a357b0fa7a1918b9f2d7836c84bf05f384e1e678b2fdd47af0d8e66e739fe45209ede151a180aba1188058a0db093e30bc9851980cf6fbfa5adb612d1146905da662c3347d7e7e569a1041641049d951ab867bc0c6a3863c7667d43f596a849434958cee2b63dc8fa11bd0f38aa96df86ed66461993f64736345313053508c4e939506c08a766f5b6ed0950759f3901bbc4db3dc97e05bf20b9dda4ff242083db304a4e487ac2101b823998371542354e5d534b5b6ae6420cc19b11512108b61208f4d9a5a97263d2c060da893544dea6251bcadc682d2238af35f2b1c2f65a73b89a4e194f9e1eef6f0e5948ef8d0d2862f48fd3356126b00c6a2d3770ecd0d1a78fa34974b454f270b23d461e357c9356c19496522b59ff9d5b4608c542ff89e558798324021704b2cfe9f6c1a70906c43c7a690f16615f198d29fa647d84ce8461fa570b33e3eada2ed7d77e1f280a0d2e9f03c2e1db535d922b1759a191b417595f3c15d8e8b7f810527ff942e18443a3860e67ccba356809ecedc31c5d8db59c7e039dae4b53d126679e8ffa20cc26e8b9d229c8f6ee434ad053f5f4f5a94e249a13afb995aad82b4d90890187e516e114b168fc7c7e291b9738ea578a7bab0ba31030b14ba90b772b577806ea2d17856b0cb9e74254ba582a9f2638ea7ed2ca23be898c6108ff8f466b443537ed9ec56b8771bfbf0f2f6e1092a28a7fd182f111e1dbdd155ea82c6cb72d5f9e6518cc667b8226b5f5c6646125fc851e97cf125f48949f988ed37c4283072fc03dd1da3e35161e17f44c0e22c76f708bb66405737ef24176e291b4fc2eadab876115dc62d48e053a85f0ad132ef07ad5175b036fe39e1ad14fcdcdc6ac5b3daabe05161a72a50545dd812e0f9af133d061b726f491e904d89ee57811ef58d3bda151f577aed381963a30d91fb98dc49413300d132a7021a5e834e266b4ac982d76e00f43f5336b8e8028a0cacfa11813b01e50f71236a73a4c0d0757c1832b0680ada56c80edf070f438ab2bc587542f926ff8d3644b8b8a56c78576f127dec7aed9cb3e1bc2442f978a9df1dc3056a63e653132d0f419213d3cb86e7b61720de1aa3af4b3757a58156970da27560c6629257158452b9d5e4283dc6fe7df42d2fda3352d5b62ce5a984d912777c3b01837df8968a4d494db1b663e0e68197dbf196f21ea11a77095263dec548e2010460840231329d83978885ee2423e8b327785970e27c6c6d436157fb5b56119b19239edbb730ebae013d82c35df4a6e70818a74d1ef7a2e87c090ff90e32939f58ed24e85b492b5750fd2cd14b9b8517136b76b1cc6ccc6f6f027f65f1967a0eb4f32cd6e5d5315"; + fn test_validate() { + // https://live.blockcypher.com/btc-testnet/block/000000000000004d36632fda8180ff16855d606e5515aab0750d9d4fe55fe7d6/ + let header_hex = "0000602002bf77bbb098f90f149430c314e71ef4e2671ea5e04a2503e0000000000000000406ffb54f2925360aae81bd3199f456928bbe6ae83a877902da9d9ffb08215da0ba3161ffff001a545a850b"; let header_bytes: Vec = header_hex.from_hex().unwrap(); - let header: BlockHeader = deserialize(header_bytes.as_slice()).unwrap(); + let validated_header: BlockHeader = deserialize(header_bytes.as_slice()).unwrap(); + //https://live.blockcypher.com/btc-testnet/tx/eefbafa4006e77099db059eebe14687965813283e5754d317431d9984554735d/ + let tx: Transaction = "0200000000010146c398e70cceaf9d8f734e603bc53e4c4c0605ab46cb1b5807a62c90f5aed50d0100000000feffffff023c0fc10c010000001600145033f65b590f2065fe55414213f1d25ab20b6c4f487d1700000000001600144b812d5ef41fc433654d186463d41b458821ff740247304402202438dc18801919baa64eb18f7e925ab6acdedc3751ea58ea164a26723b79fd39022060b46c1d277714c640cdc8512c36c862ffc646e7ff62438ef5cc847a5990bbf801210247b49d9e6b0089a1663668829e573c629c936eb430c043af9634aa57cf97a33cbee81f00".into(); + let intermediate_nodes: Vec = vec![ + "434d6b93388ab077aa12d6257253cc036fd6122e9e88465a86f4fd682fc6e006".into(), + "bd9af28e56cf6731e78ee1503a65d9cc9b15c148daa474e71e085176f48996ac".into(), + "605f6f83423ef3b86623927ef2d9dcb0f8d9e40a8132217c2fa0910b84488ec7".into(), + "10b7ef06ef0756823dbf39dea717be397e7ccb49bbefc5cfc45e6f9d58793baf".into(), + "19183ceae11796a9b1d0893e0561870bbce4d060c9547b1e91ad8b34eb3d5001".into(), + "1b16723739522955422b4286b4d8620d2a704b6997e6bbd809d151b8d8d64611".into(), + "6f8496469b19dd35871684332dfd3fc0205d83d2c58c44ebdae068542bc951f6".into(), + "e0d2733bd7bce4e5690b71bc8f7cedb1edbc49a5ff85c3678ecdec894ea1c023".into(), + ]; + let intermediate_nodes = intermediate_nodes.into_iter().map(|hash| hash.reversed()).collect(); let spv_proof = SPVProof { - tx_id: Default::default(), - vin: vec![], - vout: vec![], - index: 0, - confirming_header: header, - raw_header: RawBlockHeader::new(header_bytes).unwrap(), - intermediate_nodes: vec![], + tx_id: tx.hash(), + vin: serialize_list(&tx.inputs).take(), + vout: serialize_list(&tx.outputs).take(), + index: 1, + intermediate_nodes, }; - spv_proof.validate_block_header().unwrap() + spv_proof.validate(&validated_header).unwrap() } } From e0d6738eb14b0e8decfba1910fb33f25bb722a4f Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 19 Aug 2022 00:00:22 +0200 Subject: [PATCH 19/33] get_last_block_height from storage return 0 if db is empty --- mm2src/coins/utxo/utxo_block_header_storage.rs | 2 +- mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs | 4 ++-- mm2src/coins/utxo/utxo_common.rs | 12 +++++------- .../coins/utxo/utxo_indexedb_block_header_storage.rs | 2 +- mm2src/coins/utxo/utxo_sql_block_header_storage.rs | 10 ++++++++-- .../spv_validation/src/helpers_validation.rs | 4 ++-- mm2src/mm2_bitcoin/spv_validation/src/storage.rs | 2 +- mm2src/mm2_bitcoin/spv_validation/src/work.rs | 5 +---- 8 files changed, 21 insertions(+), 20 deletions(-) diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index b23c7358b7..9dd70eabc6 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -78,7 +78,7 @@ impl BlockHeaderStorageOps for BlockHeaderStorage { self.inner.get_block_header_raw(for_coin, height).await } - async fn get_last_block_height(&self, for_coin: &str) -> Result { + async fn get_last_block_height(&self, for_coin: &str) -> Result { self.inner.get_last_block_height(for_coin).await } diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index 627b8c4d14..a9ab706eb4 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -145,6 +145,7 @@ pub trait MergeUtxoArcOps: UtxoCoinBuilderCom pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { // Todo: this should be called only if storing headers is enabled and should be called after syncing the latest header on coin activation + // Todo: probably this function needs to be refactored fn spawn_block_header_utxo_loop_if_required( &self, weak: UtxoWeak, @@ -155,8 +156,7 @@ pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { F: Fn(UtxoArc) -> T + Send + Sync + 'static, T: UtxoCommonOps, { - // Todo: can this be a normal if condition or add a method for is_native, is_electrum? - if let UtxoRpcClientEnum::Electrum(_) = rpc_client { + if !rpc_client.is_native() { let ticker = self.ticker().to_owned(); let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor)); info!("Starting UTXO block header loop for coin {}", ticker); diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index 8757117a2e..087c63798b 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -3447,7 +3447,6 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo UtxoRpcClientEnum::Native(_) => return, UtxoRpcClientEnum::Electrum(e) => e.block_headers_storage(), }; - // Todo: this needs to be moved to coin activation probably match storage.is_initialized_for(ticker).await { Ok(true) => info!("Block Header Storage already initialized for {}", ticker), Ok(false) => { @@ -3472,17 +3471,16 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo let ticker = coin.as_ref().conf.ticker.as_str(); let storage = client.block_headers_storage(); - // Todo: remove unwraps - let last_stored_block_height: u64 = storage.get_last_block_height(ticker).await.unwrap().try_into().unwrap(); + let from_block_height = + ok_or_continue_after_sleep!(storage.get_last_block_height(ticker).await, BLOCK_HEADERS_LOOP_INTERVAL) + 1; // Todo: what to do about chain reorganization?? - let height = ok_or_continue_after_sleep!( + let to_block_height = ok_or_continue_after_sleep!( coin.as_ref().rpc_client.get_block_count().compat().await, BLOCK_HEADERS_LOOP_INTERVAL ); let (block_registry, block_headers) = ok_or_continue_after_sleep!( - // Todo: last_stored_block_height + 1 is repeated (add a variable) client - .retrieve_headers(last_stored_block_height + 1, height) + .retrieve_headers(from_block_height, to_block_height) .compat() .await, BLOCK_HEADERS_LOOP_INTERVAL @@ -3490,7 +3488,7 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo // Todo: check this again (now if block_headers_verification_params is none in coin config headers will be added without validation) if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { ok_or_continue_after_sleep!( - validate_headers(ticker, last_stored_block_height + 1, block_headers, storage, params,).await, + validate_headers(ticker, from_block_height, block_headers, storage, params,).await, BLOCK_HEADERS_LOOP_INTERVAL ); } diff --git a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs index 6382ff71cd..050e766ded 100644 --- a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs @@ -37,7 +37,7 @@ impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { Ok(None) } - async fn get_last_block_height(&self, _for_coin: &str) -> Result { + async fn get_last_block_height(&self, _for_coin: &str) -> Result { Err(BlockHeaderStorageError::Internal("Not implemented".into())) } diff --git a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs index b1630340e3..2100ad189e 100644 --- a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs @@ -11,6 +11,7 @@ use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use spv_validation::work::MAX_BITS_BTC; use std::collections::HashMap; use std::convert::TryInto; +use std::num::TryFromIntError; use std::sync::{Arc, Mutex}; fn block_headers_cache_table(ticker: &str) -> String { ticker.to_owned() + "_block_headers_cache" } @@ -221,7 +222,7 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { }) } - async fn get_last_block_height(&self, for_coin: &str) -> Result { + async fn get_last_block_height(&self, for_coin: &str) -> Result { let sql = get_last_block_height_sql(for_coin)?; let selfi = self.clone(); @@ -234,7 +235,12 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { coin: for_coin.to_string(), reason: e.to_string(), })? - .ok_or_else(|| BlockHeaderStorageError::Internal("Database is empty".into())) + .unwrap_or(0i64) + .try_into() + .map_err(|e: TryFromIntError| BlockHeaderStorageError::DecodeError { + coin: for_coin.to_string(), + reason: e.to_string(), + }) // last_block_height is 0 if the database is empty } async fn get_last_block_header_with_non_max_bits( diff --git a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs index 94a92d5951..cd16c74a3b 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs @@ -340,8 +340,8 @@ pub async fn validate_headers( params: &BlockHeaderVerificationParams, ) -> Result<(), SPVError> { let mut previous_header = if previous_height == 0 { - // Todo: remove unwrap and add validation earlier in coin activation (convert to blockheader struct there) - BlockHeader::try_from(params.genesis_block_header.clone()).unwrap() + // Todo: add validation earlier in coin activation (convert to blockheader struct there) + BlockHeader::try_from(params.genesis_block_header.clone()).map_err(|e| SPVError::Internal(e.to_string()))? } else { storage .get_block_header(coin, previous_height) diff --git a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs index a0aef25ebd..8795a1927b 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs @@ -69,7 +69,7 @@ pub trait BlockHeaderStorageOps: Send + Sync + 'static { height: u64, ) -> Result, BlockHeaderStorageError>; - async fn get_last_block_height(&self, for_coin: &str) -> Result; + async fn get_last_block_height(&self, for_coin: &str) -> Result; async fn get_last_block_header_with_non_max_bits( &self, diff --git a/mm2src/mm2_bitcoin/spv_validation/src/work.rs b/mm2src/mm2_bitcoin/spv_validation/src/work.rs index f2ecd46965..0d6fd625ea 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/work.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/work.rs @@ -177,7 +177,6 @@ pub(crate) mod tests { use primitives::hash::H256; use serde::Deserialize; use std::collections::HashMap; - use std::convert::TryInto; const BLOCK_HEADERS_STR: &str = include_str!("./for_tests/workTestVectors.json"); @@ -234,12 +233,10 @@ pub(crate) mod tests { Ok(None) } - async fn get_last_block_height(&self, for_coin: &str) -> Result { + async fn get_last_block_height(&self, for_coin: &str) -> Result { Ok(get_block_headers_for_coin(for_coin) .into_keys() .max_by(|a, b| a.cmp(b)) - .unwrap() - .try_into() .unwrap()) } From 858c8fd5da6651aedc0d4b436d7f9ff75d1b2470 Mon Sep 17 00:00:00 2001 From: shamardy Date: Tue, 23 Aug 2022 18:32:01 +0200 Subject: [PATCH 20/33] wip: add block headers synchronization to utxo activation v2 --- mm2src/coins/qrc20.rs | 5 +- mm2src/coins/utxo.rs | 13 +++- mm2src/coins/utxo/bch.rs | 14 +++- mm2src/coins/utxo/qtum.rs | 20 +++-- mm2src/coins/utxo/spv.rs | 1 - .../coins/utxo/utxo_block_header_storage.rs | 15 +--- mm2src/coins/utxo/utxo_builder/mod.rs | 3 +- .../utxo/utxo_builder/utxo_arc_builder.rs | 73 ++++++++++++++---- .../utxo/utxo_builder/utxo_coin_builder.rs | 5 +- mm2src/coins/utxo/utxo_common.rs | 74 +++++++++++++------ mm2src/coins/utxo/utxo_standard.rs | 1 + mm2src/coins/z_coin.rs | 4 +- .../init_utxo_standard_activation.rs | 56 +++++++++++++- .../init_utxo_standard_statuses.rs | 5 ++ .../spv_validation/src/spv_proof.rs | 1 - 15 files changed, 220 insertions(+), 70 deletions(-) diff --git a/mm2src/coins/qrc20.rs b/mm2src/coins/qrc20.rs index 4378cf44f4..b00196949c 100644 --- a/mm2src/coins/qrc20.rs +++ b/mm2src/coins/qrc20.rs @@ -7,7 +7,8 @@ use crate::utxo::rpc_clients::{ElectrumClient, NativeClient, UnspentInfo, UtxoRp #[cfg(not(target_arch = "wasm32"))] use crate::utxo::tx_cache::{UtxoVerboseCacheOps, UtxoVerboseCacheShared}; use crate::utxo::utxo_builder::{UtxoCoinBuildError, UtxoCoinBuildResult, UtxoCoinBuilderCommonOps, - UtxoCoinWithIguanaPrivKeyBuilder, UtxoFieldsWithIguanaPrivKeyBuilder}; + UtxoCoinWithIguanaPrivKeyBuilder, UtxoFieldsWithIguanaPrivKeyBuilder, + UtxoSyncStatusLoopHandle}; use crate::utxo::utxo_common::{self, big_decimal_from_sat, check_all_inputs_signed_by_pub, UtxoTxBuilder}; use crate::utxo::{qtum, ActualTxFee, AdditionalTxData, BroadcastTxErr, FeePolicy, GenerateTxError, GetUtxoListOps, HistoryUtxoTx, HistoryUtxoTxMap, MatureUnspentList, RecentlySpentOutPointsGuard, @@ -188,6 +189,8 @@ impl<'a> UtxoCoinBuilderCommonOps for Qrc20CoinBuilder<'a> { fn ticker(&self) -> &str { self.ticker } + fn sync_status_loop_handle(&self) -> Option { None } + async fn decimals(&self, rpc_client: &UtxoRpcClientEnum) -> UtxoCoinBuildResult { if let Some(d) = self.conf()["decimals"].as_u64() { return Ok(d as u8); diff --git a/mm2src/coins/utxo.rs b/mm2src/coins/utxo.rs index 0e041843be..ba826a76f4 100644 --- a/mm2src/coins/utxo.rs +++ b/mm2src/coins/utxo.rs @@ -508,11 +508,11 @@ pub struct UtxoCoinConf { pub estimate_fee_blocks: u32, /// The name of the coin with which Trezor wallet associates this asset. pub trezor_coin: Option, - /// Used in condition where the coin will validate spv proof or not + /// Whether to verify swaps and lightning transactions using spv or not. When enabled, block headers will be retrieved, verified according + /// to block_headers_verification_params and stored in the DB. Can be false if the coin's RPC server is trusted. pub enable_spv_proof: bool, - /// The parameters that specify how the coin block headers should be verified if spv proof is enabled - // Todo: "if spv proof is enabled"? maybe if block headers storage is enabled (enable spv can't be on without having blockheaders in storage) - // Todo: maybe refacor enable_spv_proof, block_headers_verification_params + /// The parameters that specify how the coin block headers should be verified. If None and enable_spv_proof is true, + /// headers will be saved in DB without verification, can be none if the coin's RPC server is trusted. pub block_headers_verification_params: Option, } @@ -1319,6 +1319,11 @@ pub enum UtxoRpcMode { Electrum { servers: Vec }, } +impl UtxoRpcMode { + #[inline] + pub fn is_native(&self) -> bool { matches!(*self, UtxoRpcMode::Native) } +} + #[derive(Debug)] pub struct ElectrumBuilderArgs { pub spawn_ping: bool, diff --git a/mm2src/coins/utxo/bch.rs b/mm2src/coins/utxo/bch.rs index 085f3f4bea..890acef673 100644 --- a/mm2src/coins/utxo/bch.rs +++ b/mm2src/coins/utxo/bch.rs @@ -656,9 +656,17 @@ pub async fn bch_coin_from_conf_and_params( let priv_key_policy = PrivKeyBuildPolicy::IguanaPrivKey(priv_key); let coin = try_s!( - UtxoArcBuilder::new(ctx, ticker, conf, ¶ms.utxo_params, priv_key_policy, constructor) - .build() - .await + UtxoArcBuilder::new( + ctx, + ticker, + conf, + ¶ms.utxo_params, + priv_key_policy, + None, + constructor + ) + .build() + .await ); Ok(coin) } diff --git a/mm2src/coins/utxo/qtum.rs b/mm2src/coins/utxo/qtum.rs index dd28557da1..16f6593faf 100644 --- a/mm2src/coins/utxo/qtum.rs +++ b/mm2src/coins/utxo/qtum.rs @@ -14,7 +14,7 @@ use crate::rpc_command::init_scan_for_new_addresses::{self, InitScanAddressesRpc use crate::rpc_command::init_withdraw::{InitWithdrawCoin, WithdrawTaskHandle}; use crate::utxo::utxo_builder::{BlockHeaderUtxoArcOps, MergeUtxoArcOps, UtxoCoinBuildError, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, UtxoFieldsWithHardwareWalletBuilder, - UtxoFieldsWithIguanaPrivKeyBuilder}; + UtxoFieldsWithIguanaPrivKeyBuilder, UtxoSyncStatusLoopHandle}; use crate::{eth, CanRefundHtlc, CoinBalance, CoinWithDerivationMethod, DelegationError, DelegationFut, GetWithdrawSenderAddress, NegotiateSwapContractAddrErr, PrivKeyBuildPolicy, SearchForSwapTxSpendInput, SignatureResult, StakingInfosFut, SwapOps, TradePreimageValue, TransactionFut, UnexpectedDerivationMethod, @@ -201,6 +201,8 @@ impl<'a> UtxoCoinBuilderCommonOps for QtumCoinBuilder<'a> { fn ticker(&self) -> &str { self.ticker } + fn sync_status_loop_handle(&self) -> Option { None } + fn check_utxo_maturity(&self) -> bool { self.activation_params().check_utxo_maturity.unwrap_or(true) } } @@ -217,20 +219,22 @@ impl<'a> UtxoCoinBuilder for QtumCoinBuilder<'a> { async fn build(self) -> MmResult { let utxo = self.build_utxo_fields().await?; - let rpc_client = utxo.rpc_client.clone(); + // Todo: Remove this if other comment is removed + // let rpc_client = utxo.rpc_client.clone(); let utxo_arc = UtxoArc::new(utxo); let utxo_weak = utxo_arc.downgrade(); let result_coin = QtumCoin::from(utxo_arc); - if let Some(abort_handler) = self.spawn_merge_utxo_loop_if_required(utxo_weak.clone(), QtumCoin::from) { + if let Some(abort_handler) = self.spawn_merge_utxo_loop_if_required(utxo_weak, QtumCoin::from) { self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } - if let Some(abort_handler) = - self.spawn_block_header_utxo_loop_if_required(utxo_weak, &rpc_client, QtumCoin::from) - { - self.ctx.abort_handlers.lock().unwrap().push(abort_handler); - } + // Todo: Remove this or add it to qtum + // if let Some(abort_handler) = + // self.spawn_block_header_utxo_loop_if_required(utxo_weak, &rpc_client, QtumCoin::from, self.sync_status_notifier) + // { + // self.ctx.abort_handlers.lock().unwrap().push(abort_handler); + // } Ok(result_coin) } diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index 9cf1a012a8..2606b065ec 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -49,7 +49,6 @@ impl SimplePaymentVerification for ElectrumClient { return MmError::err(SPVError::Timeout); } - // Todo: break up this function to blockchain_transaction_get_merkle, block_header_from_storage match self.get_merkle_and_validated_header(tx).await { Ok(res) => break res, Err(e) => { diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index 9dd70eabc6..83df2ff946 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -12,24 +12,15 @@ use std::fmt::{Debug, Formatter}; pub struct BlockHeaderStorage { pub inner: Box, - // Todo: BlockHeaderVerificationParams should be initialized with coin activation when spv is enabled (will be used in lopp only) - // pub params: BlockHeaderVerificationParams, } impl Debug for BlockHeaderStorage { fn fmt(&self, _f: &mut Formatter<'_>) -> std::fmt::Result { Ok(()) } } -// Todo: check if it's better to remove this? -pub trait InitBlockHeaderStorageOps: Send + Sync + 'static { - fn new_from_ctx(ctx: MmArc) -> Result - where - Self: Sized; -} - -impl InitBlockHeaderStorageOps for BlockHeaderStorage { +impl BlockHeaderStorage { #[cfg(not(target_arch = "wasm32"))] - fn new_from_ctx(ctx: MmArc) -> Result { + pub(crate) fn new_from_ctx(ctx: MmArc) -> Result { let sqlite_connection = ctx.sqlite_connection.ok_or(BlockHeaderStorageError::Internal( "sqlite_connection is not initialized".to_owned(), ))?; @@ -39,7 +30,7 @@ impl InitBlockHeaderStorageOps for BlockHeaderStorage { } #[cfg(target_arch = "wasm32")] - fn new_from_ctx(_ctx: MmArc) -> Result { + pub(crate) fn new_from_ctx(_ctx: MmArc) -> Result { Ok(BlockHeaderStorage { inner: Box::new(IndexedDBBlockHeadersStorage {}), }) diff --git a/mm2src/coins/utxo/utxo_builder/mod.rs b/mm2src/coins/utxo/utxo_builder/mod.rs index cd48444513..8633c3e5bd 100644 --- a/mm2src/coins/utxo/utxo_builder/mod.rs +++ b/mm2src/coins/utxo/utxo_builder/mod.rs @@ -2,7 +2,8 @@ mod utxo_arc_builder; mod utxo_coin_builder; mod utxo_conf_builder; -pub use utxo_arc_builder::{BlockHeaderUtxoArcOps, MergeUtxoArcOps, UtxoArcBuilder}; +pub use utxo_arc_builder::{BlockHeaderUtxoArcOps, MergeUtxoArcOps, UtxoArcBuilder, UtxoSyncStatus, + UtxoSyncStatusLoopHandle}; pub use utxo_coin_builder::{UtxoCoinBuildError, UtxoCoinBuildResult, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, UtxoCoinWithIguanaPrivKeyBuilder, UtxoFieldsWithHardwareWalletBuilder, UtxoFieldsWithIguanaPrivKeyBuilder}; diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index a9ab706eb4..6d8645becf 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -6,12 +6,46 @@ use crate::utxo::{GetUtxoListOps, UtxoArc, UtxoCommonOps, UtxoWeak}; use crate::{PrivKeyBuildPolicy, UtxoActivationParams}; use async_trait::async_trait; use common::executor::spawn; -use common::log::info; +use common::log::{info, LogOnError}; +use futures::channel::mpsc::Sender as AsyncSender; use futures::future::{abortable, AbortHandle}; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; use serde_json::Value as Json; +pub enum UtxoSyncStatus { + SyncingBlockHeaders { + current_scanned_block: u64, + latest_block: u64, + }, + TemporaryError(String), + PermanentError(String), + Finished { + block_number: u64, + }, +} + +#[derive(Clone)] +pub struct UtxoSyncStatusLoopHandle(AsyncSender); + +impl UtxoSyncStatusLoopHandle { + pub fn new(sync_status_notifier: AsyncSender) -> Self { + UtxoSyncStatusLoopHandle(sync_status_notifier) + } + + pub fn notify_on_temp_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::TemporaryError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_on_permanent_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::PermanentError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } +} + pub struct UtxoArcBuilder<'a, F, T> where F: Fn(UtxoArc) -> T + Send + Sync + 'static, @@ -21,6 +55,7 @@ where conf: &'a Json, activation_params: &'a UtxoActivationParams, priv_key_policy: PrivKeyBuildPolicy<'a>, + sync_status_loop_handle: Option, constructor: F, } @@ -34,6 +69,7 @@ where conf: &'a Json, activation_params: &'a UtxoActivationParams, priv_key_policy: PrivKeyBuildPolicy<'a>, + sync_status_loop_handle: Option, constructor: F, ) -> UtxoArcBuilder<'a, F, T> { UtxoArcBuilder { @@ -42,6 +78,7 @@ where conf, activation_params, priv_key_policy, + sync_status_loop_handle, constructor, } } @@ -59,6 +96,8 @@ where fn activation_params(&self) -> &UtxoActivationParams { self.activation_params } fn ticker(&self) -> &str { self.ticker } + + fn sync_status_loop_handle(&self) -> Option { self.sync_status_loop_handle.clone() } } impl<'a, F, T> UtxoFieldsWithIguanaPrivKeyBuilder for UtxoArcBuilder<'a, F, T> where @@ -94,6 +133,7 @@ where self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } + // Todo: find a better way for this if let Some(abort_handler) = self.spawn_block_header_utxo_loop_if_required(utxo_weak, &rpc_client, self.constructor.clone()) { @@ -156,20 +196,25 @@ pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { F: Fn(UtxoArc) -> T + Send + Sync + 'static, T: UtxoCommonOps, { - if !rpc_client.is_native() { - let ticker = self.ticker().to_owned(); - let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor)); - info!("Starting UTXO block header loop for coin {}", ticker); - spawn(async move { - if let Err(e) = fut.await { - info!( - "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", - ticker, e - ); - } - }); - return Some(abort_handle); + // Todo: add condition for enable_spv_proof (should block headers be saved when enable_spv_proof is true only? what about for getting tx height?) + // Todo: because of sync_status_loop_handle this whole function might be refactored (rpc_client.is_native() should be checked when creating sync_status_loop_handle) + if let Some(sync_status_loop_handle) = self.sync_status_loop_handle() { + if !rpc_client.is_native() { + let ticker = self.ticker().to_owned(); + let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor, sync_status_loop_handle)); + info!("Starting UTXO block header loop for coin {}", ticker); + spawn(async move { + if let Err(e) = fut.await { + info!( + "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", + ticker, e + ); + } + }); + return Some(abort_handle); + } } + None } } diff --git a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs index f0ec7ce575..c6324435ee 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs @@ -3,7 +3,8 @@ use crate::hd_wallet_storage::{HDWalletCoinStorage, HDWalletStorageError}; use crate::utxo::rpc_clients::{ElectrumClient, ElectrumClientImpl, ElectrumRpcRequest, EstimateFeeMethod, UtxoRpcClientEnum}; use crate::utxo::tx_cache::{UtxoVerboseCacheOps, UtxoVerboseCacheShared}; -use crate::utxo::utxo_block_header_storage::{BlockHeaderStorage, InitBlockHeaderStorageOps}; +use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; +use crate::utxo::utxo_builder::utxo_arc_builder::UtxoSyncStatusLoopHandle; use crate::utxo::utxo_builder::utxo_conf_builder::{UtxoConfBuilder, UtxoConfError, UtxoConfResult}; use crate::utxo::{output_script, utxo_common, ElectrumBuilderArgs, ElectrumProtoVerifier, RecentlySpentOutPoints, TxFee, UtxoCoinConf, UtxoCoinFields, UtxoHDAccount, UtxoHDWallet, UtxoRpcMode, DEFAULT_GAP_LIMIT, @@ -285,6 +286,8 @@ pub trait UtxoCoinBuilderCommonOps { fn ticker(&self) -> &str; + fn sync_status_loop_handle(&self) -> Option; + fn address_format(&self) -> UtxoCoinBuildResult { let format_from_req = self.activation_params().address_format.clone(); let format_from_conf = json::from_value::>(self.conf()["address_format"].clone()) diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index 087c63798b..58acb4e45d 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -50,13 +50,13 @@ use std::sync::atomic::Ordering as AtomicOrdering; use utxo_signer::with_key_pair::p2sh_spend; use utxo_signer::UtxoSignerOps; +use crate::utxo::utxo_builder::UtxoSyncStatusLoopHandle; pub use chain::Transaction as UtxoTx; pub const DEFAULT_FEE_VOUT: usize = 0; pub const DEFAULT_SWAP_TX_SPEND_SIZE: u64 = 305; pub const DEFAULT_SWAP_VOUT: usize = 0; const MIN_BTC_TRADING_VOL: &str = "0.00777"; -// Todo: should I keep this or get it from config const BLOCK_HEADERS_LOOP_INTERVAL: f64 = 60.; macro_rules! true_or { @@ -3435,8 +3435,13 @@ fn increase_by_percent(num: u64, percent: f64) -> u64 { num + (percent.round() as u64) } -// Todo: This loop needs to be called when getting headers is enabled in conf only after getting all the headers when activating coin -pub async fn block_header_utxo_loop(weak: UtxoWeak, constructor: impl Fn(UtxoArc) -> T) { +// Todo: This loop needs to be called when getting headers is enabled in conf only after getting all the headers when activating coin +// Todo: add test for enabling utxo with enable_spv_proof to check that all the headers are retrieved right (should be ignored cause it will take a long time) +pub async fn block_header_utxo_loop( + weak: UtxoWeak, + constructor: impl Fn(UtxoArc) -> T, + mut sync_status_loop_handle: UtxoSyncStatusLoopHandle, +) { { let coin = match weak.upgrade() { Some(arc) => constructor(arc), @@ -3462,37 +3467,64 @@ pub async fn block_header_utxo_loop(weak: UtxoWeak, constructo Err(_e) => return, }; } + // Todo: should notify the status not only the errors while let Some(arc) = weak.upgrade() { let coin = constructor(arc); let client = match &coin.as_ref().rpc_client { + // Todo: should I send UtxoSyncStatus::Finished here just in case? UtxoRpcClientEnum::Native(_) => break, UtxoRpcClientEnum::Electrum(client) => client, }; let ticker = coin.as_ref().conf.ticker.as_str(); let storage = client.block_headers_storage(); - let from_block_height = - ok_or_continue_after_sleep!(storage.get_last_block_height(ticker).await, BLOCK_HEADERS_LOOP_INTERVAL) + 1; + let from_block_height = match storage.get_last_block_height(ticker).await { + Ok(h) => h + 1, + Err(e) => { + error!("Error {} on getting the height of the last stored header in DB!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + continue; + }, + }; + // Todo: what to do about chain reorganization?? - let to_block_height = ok_or_continue_after_sleep!( - coin.as_ref().rpc_client.get_block_count().compat().await, - BLOCK_HEADERS_LOOP_INTERVAL - ); - let (block_registry, block_headers) = ok_or_continue_after_sleep!( - client - .retrieve_headers(from_block_height, to_block_height) - .compat() - .await, - BLOCK_HEADERS_LOOP_INTERVAL - ); - // Todo: check this again (now if block_headers_verification_params is none in coin config headers will be added without validation) + let to_block_height = match coin.as_ref().rpc_client.get_block_count().compat().await { + Ok(h) => h, + Err(e) => { + error!("Error {} on getting the height of the latest block from rpc!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + continue; + }, + }; + + let (block_registry, block_headers) = match client + .retrieve_headers(from_block_height, to_block_height) + .compat() + .await + { + Ok(res) => res, + Err(e) => { + error!("Error {} on retrieving the latest headers from rpc!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + continue; + }, + }; + + // Todo: an attack can be used to send a fake header to fail validating and can't confirm a tx, should use a different server in such case (watch towers shall help) if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { - ok_or_continue_after_sleep!( - validate_headers(ticker, from_block_height, block_headers, storage, params,).await, - BLOCK_HEADERS_LOOP_INTERVAL - ); + if let Err(e) = validate_headers(ticker, from_block_height, block_headers, storage, params).await { + error!("Error {} on validating the latest headers!", e); + sync_status_loop_handle.notify_on_permanent_error(e.to_string()); + // Todo: should rotate_servers here, if error is not due to rpc (instead of waiting)??? (should also check if error is due to RPC or not before sending permanent_error) + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + continue; + } } + // Todo: remove ok_or_continue_after_sleep ok_or_continue_after_sleep!( storage.add_block_headers_to_storage(ticker, block_registry).await, BLOCK_HEADERS_LOOP_INTERVAL diff --git a/mm2src/coins/utxo/utxo_standard.rs b/mm2src/coins/utxo/utxo_standard.rs index d03123fdc3..f48f88ad86 100644 --- a/mm2src/coins/utxo/utxo_standard.rs +++ b/mm2src/coins/utxo/utxo_standard.rs @@ -57,6 +57,7 @@ pub async fn utxo_standard_coin_with_priv_key( conf, activation_params, priv_key_policy, + None, UtxoStandardCoin::from ) .build() diff --git a/mm2src/coins/z_coin.rs b/mm2src/coins/z_coin.rs index c29255f9de..26c3a5b69d 100644 --- a/mm2src/coins/z_coin.rs +++ b/mm2src/coins/z_coin.rs @@ -3,7 +3,7 @@ use crate::rpc_command::init_withdraw::{InitWithdrawCoin, WithdrawInProgressStat use crate::utxo::rpc_clients::{ElectrumRpcRequest, UnspentInfo, UtxoRpcClientEnum, UtxoRpcError, UtxoRpcFut, UtxoRpcResult}; use crate::utxo::utxo_builder::{UtxoCoinBuilderCommonOps, UtxoCoinWithIguanaPrivKeyBuilder, - UtxoFieldsWithIguanaPrivKeyBuilder}; + UtxoFieldsWithIguanaPrivKeyBuilder, UtxoSyncStatusLoopHandle}; use crate::utxo::utxo_common::{addresses_from_script, big_decimal_from_sat, big_decimal_from_sat_unsigned, payment_script}; use crate::utxo::{sat_from_big_decimal, utxo_common, ActualTxFee, AdditionalTxData, Address, BroadcastTxErr, @@ -738,6 +738,8 @@ impl<'a> UtxoCoinBuilderCommonOps for ZCoinBuilder<'a> { fn activation_params(&self) -> &UtxoActivationParams { &self.utxo_params } fn ticker(&self) -> &str { self.ticker } + + fn sync_status_loop_handle(&self) -> Option { None } } #[async_trait] diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs index 26cbd372c2..166e79cec7 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs @@ -8,11 +8,13 @@ use crate::utxo_activation::init_utxo_standard_statuses::{UtxoStandardAwaitingSt UtxoStandardUserAction}; use crate::utxo_activation::utxo_standard_activation_result::UtxoStandardActivationResult; use async_trait::async_trait; -use coins::utxo::utxo_builder::{UtxoArcBuilder, UtxoCoinBuilder}; +use coins::utxo::utxo_builder::{UtxoArcBuilder, UtxoCoinBuilder, UtxoSyncStatus, UtxoSyncStatusLoopHandle}; use coins::utxo::utxo_standard::UtxoStandardCoin; use coins::utxo::UtxoActivationParams; use coins::CoinProtocol; use crypto::CryptoCtx; +use futures::channel::mpsc::channel; +use futures::StreamExt; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; use serde_json::Value as Json; @@ -48,28 +50,78 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { &activation_ctx.init_utxo_standard_task_manager } + // Todo: in test should check that it will continue syncing after the coin is activated async fn init_standalone_coin( ctx: MmArc, ticker: String, coin_conf: Json, activation_request: &Self::ActivationRequest, _protocol_info: Self::StandaloneProtocol, - _task_handle: &UtxoStandardRpcTaskHandle, + task_handle: &UtxoStandardRpcTaskHandle, ) -> MmResult { let crypto_ctx = CryptoCtx::from_ctx(&ctx)?; let priv_key_policy = priv_key_build_policy(&crypto_ctx, activation_request.priv_key_policy); + // todo: add a function for this instead of coin.as_ref().conf.enable_spv_proof && !coin.as_ref().rpc_client.is_native() + // Todo: should this step be used before this or maybe inside UtxoArcBuilder?? + // Todo: channel can be used to send errors after coin init to rotate_servers etc.. + let (sync_status_loop_handle, maybe_sync_watcher) = + if coin_conf["enable_spv_proof"].as_bool().unwrap_or(false) && !activation_request.mode.is_native() { + let (sync_status_notifier, sync_watcher) = channel(1); + ( + Some(UtxoSyncStatusLoopHandle::new(sync_status_notifier)), + Some(sync_watcher), + ) + } else { + (None, None) + }; + let coin = UtxoArcBuilder::new( &ctx, &ticker, &coin_conf, activation_request, priv_key_policy, + sync_status_loop_handle, UtxoStandardCoin::from, ) .build() .await .mm_err(|e| InitUtxoStandardError::from_build_err(e, ticker.clone()))?; + + if let Some(mut sync_watcher) = maybe_sync_watcher { + loop { + // todo: should this be a timeouterror? + let in_progress_status = + match sync_watcher + .next() + .await + .ok_or(InitUtxoStandardError::CoinCreationError { + ticker: ticker.clone(), + error: "Error waiting for block headers synchronization status!".into(), + })? { + UtxoSyncStatus::SyncingBlockHeaders { + current_scanned_block, + latest_block, + } => UtxoStandardInProgressStatus::SyncingBlockHeaders { + current_scanned_block, + latest_block, + }, + UtxoSyncStatus::TemporaryError(e) => UtxoStandardInProgressStatus::TemporaryError(e), + // Todo: should it be a new error type other than CoinCreationError or maybe internal?? + UtxoSyncStatus::PermanentError(e) => { + return Err(InitUtxoStandardError::CoinCreationError { + ticker: ticker.clone(), + error: e, + } + .into()) + }, + UtxoSyncStatus::Finished { .. } => break, + }; + task_handle.update_in_progress_status(in_progress_status)?; + } + } + Ok(coin) } diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs index 92ab03da56..6927966108 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs @@ -8,6 +8,11 @@ pub type UtxoStandardUserAction = HwRpcTaskUserAction; #[derive(Clone, Serialize)] pub enum UtxoStandardInProgressStatus { ActivatingCoin, + SyncingBlockHeaders { + current_scanned_block: u64, + latest_block: u64, + }, + TemporaryError(String), RequestingWalletBalance, Finishing, /// This status doesn't require the user to send `UserAction`, diff --git a/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs b/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs index acaee40dae..396417e6d1 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs @@ -31,7 +31,6 @@ pub struct SPVProof { /// # Notes /// Re-write with our own types based on `bitcoin_spv::std_types::SPVProof::validate` impl SPVProof { - // Todo: This can be a part of block headers storage trait or a trait implemented for it (or maybe pass blockheader storage to this function) pub fn validate(&self, validated_header: &BlockHeader) -> Result<(), SPVError> { if !validate_vin(self.vin.as_slice()) { return Err(SPVError::InvalidVin); From 59d1d9117445b17793fbac59956a24b0b36bd57a Mon Sep 17 00:00:00 2001 From: shamardy Date: Tue, 23 Aug 2022 19:35:15 +0200 Subject: [PATCH 21/33] fix tests --- mm2src/coins/utxo/utxo_tests.rs | 1 + .../src/utxo_activation/init_utxo_standard_activation.rs | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/mm2src/coins/utxo/utxo_tests.rs b/mm2src/coins/utxo/utxo_tests.rs index 14b0267c81..45fc142bb4 100644 --- a/mm2src/coins/utxo/utxo_tests.rs +++ b/mm2src/coins/utxo/utxo_tests.rs @@ -60,6 +60,7 @@ pub fn electrum_client_for_test(servers: &[&str]) -> ElectrumClient { &Json::Null, ¶ms, priv_key_policy, + None, UtxoStandardCoin::from, ); let args = ElectrumBuilderArgs { diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs index 166e79cec7..f56a5e40e5 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs @@ -91,7 +91,6 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { if let Some(mut sync_watcher) = maybe_sync_watcher { loop { - // todo: should this be a timeouterror? let in_progress_status = match sync_watcher .next() @@ -108,7 +107,6 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { latest_block, }, UtxoSyncStatus::TemporaryError(e) => UtxoStandardInProgressStatus::TemporaryError(e), - // Todo: should it be a new error type other than CoinCreationError or maybe internal?? UtxoSyncStatus::PermanentError(e) => { return Err(InitUtxoStandardError::CoinCreationError { ticker: ticker.clone(), From 2a95fa338647d414e324b7d9d4a3587639ddf852 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 26 Aug 2022 00:44:53 +0200 Subject: [PATCH 22/33] wip: add block headers synchronization to utxo activation v2 + test that block headers validation work for the complete BTC chain --- mm2src/coins/utxo/rpc_clients.rs | 18 +- .../coins/utxo/utxo_block_header_storage.rs | 71 ++++---- .../utxo/utxo_builder/utxo_arc_builder.rs | 21 ++- .../utxo/utxo_builder/utxo_coin_builder.rs | 12 +- mm2src/coins/utxo/utxo_common.rs | 67 +++---- .../utxo_indexedb_block_header_storage.rs | 22 +-- .../utxo/utxo_sql_block_header_storage.rs | 171 +++++++++--------- mm2src/coins/utxo/utxo_tests.rs | 22 ++- .../init_utxo_standard_activation.rs | 8 +- .../init_utxo_standard_statuses.rs | 4 +- mm2src/mm2_bitcoin/chain/src/block_header.rs | 49 ++++- .../mm2_bitcoin/serialization/src/reader.rs | 15 +- .../spv_validation/src/helpers_validation.rs | 17 +- .../mm2_bitcoin/spv_validation/src/storage.rs | 30 +-- mm2src/mm2_bitcoin/spv_validation/src/work.rs | 61 +++---- mm2src/mm2_main/src/mm2_tests.rs | 98 +++++++++- mm2src/mm2_main/src/mm2_tests/electrums.rs | 19 ++ mm2src/mm2_main/src/mm2_tests/structs.rs | 15 ++ mm2src/mm2_test_helpers/src/for_tests.rs | 90 +++++++++ 19 files changed, 525 insertions(+), 285 deletions(-) diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index 4222bfdc7f..f823fd9eb1 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -1870,7 +1870,11 @@ impl ElectrumClient { rpc_func!(self, "blockchain.block.headers", start_height, count) } - pub fn retrieve_headers(&self, from: u64, to: u64) -> UtxoRpcFut<(HashMap, Vec)> { + pub fn retrieve_headers( + &self, + from: u64, + to: u64, + ) -> UtxoRpcFut<(HashMap, Vec, u64)> { let coin_name = self.coin_ticker.clone(); if from == 0 || to < from { return Box::new(futures01::future::err( @@ -1885,7 +1889,7 @@ impl ElectrumClient { self.blockchain_block_headers(from, count) .map_to_mm_fut(UtxoRpcError::from) .and_then(move |headers| { - let (block_registry, block_headers) = { + let (block_registry, block_headers, last_height) = { if headers.count == 0 { return MmError::err(UtxoRpcError::Internal("No headers available".to_string())); } @@ -1905,9 +1909,9 @@ impl ElectrumClient { block_registry.insert(starting_height, block_header.clone()); starting_height += 1; } - (block_registry, block_headers) + (block_registry, block_headers, starting_height - 1) }; - Ok((block_registry, block_headers)) + Ok((block_registry, block_headers, last_height)) }), ) } @@ -1920,12 +1924,11 @@ impl ElectrumClient { // get_tx_height_from_rpc is costly since it loops through history after requesting the whole history of the script pubkey // This method should always be used if the block headers are saved to the DB async fn get_tx_height_from_storage(&self, tx: &UtxoTx) -> Result> { - let ticker = self.coin_name(); let tx_hash = tx.hash().reversed(); let blockhash = self.get_verbose_transaction(&tx_hash.into()).compat().await?.blockhash; Ok(self .block_headers_storage() - .get_block_height_by_hash(ticker, blockhash.into()) + .get_block_height_by_hash(blockhash.into()) .await? .ok_or_else(|| GetTxHeightError::HeightNotFound("Transaction block header is not found in storage".into()))? .try_into()?) @@ -1953,9 +1956,8 @@ impl ElectrumClient { } async fn block_header_from_storage(&self, height: u64) -> Result> { - let ticker = self.coin_name(); self.block_headers_storage() - .get_block_header(ticker, height) + .get_block_header(height) .await? .ok_or_else(|| GetBlockHeaderError::Internal("Header not in storage!".into()).into()) } diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index 83df2ff946..56065b460a 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -19,72 +19,75 @@ impl Debug for BlockHeaderStorage { } impl BlockHeaderStorage { - #[cfg(not(target_arch = "wasm32"))] - pub(crate) fn new_from_ctx(ctx: MmArc) -> Result { + #[cfg(all(not(test), not(target_arch = "wasm32")))] + pub(crate) fn new_from_ctx(ctx: MmArc, ticker: String) -> Result { let sqlite_connection = ctx.sqlite_connection.ok_or(BlockHeaderStorageError::Internal( "sqlite_connection is not initialized".to_owned(), ))?; Ok(BlockHeaderStorage { - inner: Box::new(SqliteBlockHeadersStorage(sqlite_connection.clone())), + inner: Box::new(SqliteBlockHeadersStorage { + ticker, + conn: sqlite_connection.clone(), + }), }) } #[cfg(target_arch = "wasm32")] - pub(crate) fn new_from_ctx(_ctx: MmArc) -> Result { + pub(crate) fn new_from_ctx(_ctx: MmArc, _ticker: String) -> Result { Ok(BlockHeaderStorage { inner: Box::new(IndexedDBBlockHeadersStorage {}), }) } + + #[cfg(all(test, not(target_arch = "wasm32")))] + pub(crate) fn new_from_ctx(ctx: MmArc, ticker: String) -> Result { + use db_common::sqlite::rusqlite::Connection; + use std::sync::{Arc, Mutex}; + + let sqlite_connection = Arc::new(Mutex::new(Connection::open_in_memory().unwrap())); + let sqlite_connection = ctx.sqlite_connection.clone_or(sqlite_connection); + + Ok(BlockHeaderStorage { + inner: Box::new(SqliteBlockHeadersStorage { + ticker, + conn: sqlite_connection, + }), + }) + } } #[async_trait] impl BlockHeaderStorageOps for BlockHeaderStorage { - async fn init(&self, for_coin: &str) -> Result<(), BlockHeaderStorageError> { self.inner.init(for_coin).await } + async fn init(&self) -> Result<(), BlockHeaderStorageError> { self.inner.init().await } - async fn is_initialized_for(&self, for_coin: &str) -> Result { - self.inner.is_initialized_for(for_coin).await + async fn is_initialized_for(&self) -> Result { + self.inner.is_initialized_for().await } async fn add_block_headers_to_storage( &self, - for_coin: &str, headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { - self.inner.add_block_headers_to_storage(for_coin, headers).await + self.inner.add_block_headers_to_storage(headers).await } - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_block_header(for_coin, height).await + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError> { + self.inner.get_block_header(height).await } - async fn get_block_header_raw( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_block_header_raw(for_coin, height).await + async fn get_block_header_raw(&self, height: u64) -> Result, BlockHeaderStorageError> { + self.inner.get_block_header_raw(height).await } - async fn get_last_block_height(&self, for_coin: &str) -> Result { - self.inner.get_last_block_height(for_coin).await + async fn get_last_block_height(&self) -> Result { + self.inner.get_last_block_height().await } - async fn get_last_block_header_with_non_max_bits( - &self, - for_coin: &str, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_last_block_header_with_non_max_bits(for_coin).await + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError> { + self.inner.get_last_block_header_with_non_max_bits().await } - async fn get_block_height_by_hash( - &self, - for_coin: &str, - hash: H256, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_block_height_by_hash(for_coin, hash).await + async fn get_block_height_by_hash(&self, hash: H256) -> Result, BlockHeaderStorageError> { + self.inner.get_block_height_by_hash(hash).await } } diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index 6d8645becf..3bbb769447 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -14,15 +14,10 @@ use mm2_err_handle::prelude::*; use serde_json::Value as Json; pub enum UtxoSyncStatus { - SyncingBlockHeaders { - current_scanned_block: u64, - latest_block: u64, - }, + SyncingBlockHeaders { from: u64, to: u64 }, TemporaryError(String), PermanentError(String), - Finished { - block_number: u64, - }, + Finished { block_number: u64 }, } #[derive(Clone)] @@ -33,6 +28,12 @@ impl UtxoSyncStatusLoopHandle { UtxoSyncStatusLoopHandle(sync_status_notifier) } + pub fn notify_blocks_headers_sync_status(&mut self, from: u64, to: u64) { + self.0 + .try_send(UtxoSyncStatus::SyncingBlockHeaders { from, to }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + pub fn notify_on_temp_error(&mut self, error: String) { self.0 .try_send(UtxoSyncStatus::TemporaryError(error)) @@ -44,6 +45,12 @@ impl UtxoSyncStatusLoopHandle { .try_send(UtxoSyncStatus::PermanentError(error)) .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); } + + pub fn notify_sync_finished(&mut self, block_number: u64) { + self.0 + .try_send(UtxoSyncStatus::Finished { block_number }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } } pub struct UtxoArcBuilder<'a, F, T> diff --git a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs index c6324435ee..2562e05bfc 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs @@ -30,6 +30,7 @@ use mm2_err_handle::prelude::*; use primitives::hash::H256; use rand::seq::SliceRandom; use serde_json::{self as json, Value as Json}; +use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use std::sync::{Arc, Mutex, Weak}; cfg_native! { @@ -77,6 +78,7 @@ pub enum UtxoCoinBuildError { fmt = "Coin doesn't support Trezor hardware wallet. Please consider adding the 'trezor_coin' field to the coins config" )] CoinDoesntSupportTrezor, + BlockHeaderStorageError(BlockHeaderStorageError), #[display(fmt = "Internal error: {}", _0)] Internal(String), } @@ -98,6 +100,10 @@ impl From for UtxoCoinBuildError { fn from(e: HDWalletStorageError) -> Self { UtxoCoinBuildError::HDWalletStorageError(e) } } +impl From for UtxoCoinBuildError { + fn from(e: BlockHeaderStorageError) -> Self { UtxoCoinBuildError::BlockHeaderStorageError(e) } +} + #[async_trait] pub trait UtxoCoinBuilder: UtxoFieldsWithIguanaPrivKeyBuilder + UtxoFieldsWithHardwareWalletBuilder { type ResultCoin; @@ -422,8 +428,12 @@ pub trait UtxoCoinBuilderCommonOps { event_handlers.push(ElectrumProtoVerifier { on_connect_tx }.into_shared()); } - let block_headers_storage = BlockHeaderStorage::new_from_ctx(self.ctx().clone()) + let storage_ticker = self.ticker().replace('-', "_"); + let block_headers_storage = BlockHeaderStorage::new_from_ctx(self.ctx().clone(), storage_ticker) .map_to_mm(|e| UtxoCoinBuildError::Internal(e.to_string()))?; + if !block_headers_storage.is_initialized_for().await? { + block_headers_storage.init().await?; + } let mut rng = small_rng(); servers.as_mut_slice().shuffle(&mut rng); diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index 58acb4e45d..7dfcaefc2b 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -21,7 +21,7 @@ use chain::constants::SEQUENCE_FINAL; use chain::{OutPoint, TransactionOutput}; use common::executor::Timer; use common::jsonrpc_client::JsonRpcErrorType; -use common::log::{debug, error, info, warn}; +use common::log::{error, info, warn}; use common::{now_ms, one_hundred, ten_f64}; use crypto::{Bip32DerPathOps, Bip44Chain, Bip44DerPathError, Bip44DerivationPath, RpcDerivationPath}; use futures::compat::Future01CompatExt; @@ -3442,31 +3442,6 @@ pub async fn block_header_utxo_loop( constructor: impl Fn(UtxoArc) -> T, mut sync_status_loop_handle: UtxoSyncStatusLoopHandle, ) { - { - let coin = match weak.upgrade() { - Some(arc) => constructor(arc), - None => return, - }; - let ticker = coin.as_ref().conf.ticker.as_str(); - let storage = match &coin.as_ref().rpc_client { - UtxoRpcClientEnum::Native(_) => return, - UtxoRpcClientEnum::Electrum(e) => e.block_headers_storage(), - }; - match storage.is_initialized_for(ticker).await { - Ok(true) => info!("Block Header Storage already initialized for {}", ticker), - Ok(false) => { - if let Err(e) = storage.init(ticker).await { - error!( - "Couldn't initiate storage - aborting the block_header_utxo_loop: {:?}", - e - ); - return; - } - info!("Block Header Storage successfully initialized for {}", ticker); - }, - Err(_e) => return, - }; - } // Todo: should notify the status not only the errors while let Some(arc) = weak.upgrade() { let coin = constructor(arc); @@ -3476,14 +3451,13 @@ pub async fn block_header_utxo_loop( UtxoRpcClientEnum::Electrum(client) => client, }; - let ticker = coin.as_ref().conf.ticker.as_str(); let storage = client.block_headers_storage(); - let from_block_height = match storage.get_last_block_height(ticker).await { - Ok(h) => h + 1, + let from_block_height = match storage.get_last_block_height().await { + Ok(h) => h, Err(e) => { error!("Error {} on getting the height of the last stored header in DB!", e); sync_status_loop_handle.notify_on_temp_error(e.to_string()); - Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + Timer::sleep(10.).await; continue; }, }; @@ -3494,13 +3468,20 @@ pub async fn block_header_utxo_loop( Err(e) => { error!("Error {} on getting the height of the latest block from rpc!", e); sync_status_loop_handle.notify_on_temp_error(e.to_string()); - Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + Timer::sleep(10.).await; continue; }, }; - let (block_registry, block_headers) = match client - .retrieve_headers(from_block_height, to_block_height) + if from_block_height == to_block_height { + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + continue; + } + + sync_status_loop_handle.notify_blocks_headers_sync_status(from_block_height + 1, to_block_height); + + let (block_registry, block_headers, last_retrieved_height) = match client + .retrieve_headers(from_block_height + 1, to_block_height) .compat() .await { @@ -3508,29 +3489,35 @@ pub async fn block_header_utxo_loop( Err(e) => { error!("Error {} on retrieving the latest headers from rpc!", e); sync_status_loop_handle.notify_on_temp_error(e.to_string()); - Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + Timer::sleep(10.).await; continue; }, }; + let ticker = coin.as_ref().conf.ticker.as_str(); // Todo: an attack can be used to send a fake header to fail validating and can't confirm a tx, should use a different server in such case (watch towers shall help) if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { if let Err(e) = validate_headers(ticker, from_block_height, block_headers, storage, params).await { error!("Error {} on validating the latest headers!", e); sync_status_loop_handle.notify_on_permanent_error(e.to_string()); // Todo: should rotate_servers here, if error is not due to rpc (instead of waiting)??? (should also check if error is due to RPC or not before sending permanent_error) - Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; - continue; + // Todo: when using rotate_servers add sleep and continue again + // Timer::sleep(10.).await; + // continue; + break; } } - // Todo: remove ok_or_continue_after_sleep ok_or_continue_after_sleep!( - storage.add_block_headers_to_storage(ticker, block_registry).await, + storage.add_block_headers_to_storage(block_registry).await, BLOCK_HEADERS_LOOP_INTERVAL ); - debug!("tick block_header_utxo_loop for {}", coin.as_ref().conf.ticker); - Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + + // blockchain.block.headers returns a maximum of 2016 headers (tested for btc) so the loop needs to continue until we have all headers up to the current one. + if last_retrieved_height == to_block_height { + sync_status_loop_handle.notify_sync_finished(to_block_height); + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + } } } diff --git a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs index 050e766ded..f1ec11c7be 100644 --- a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs @@ -15,7 +15,6 @@ impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { async fn add_block_headers_to_storage( &self, - _for_coin: &str, _headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { Ok(()) @@ -29,30 +28,15 @@ impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { Ok(None) } - async fn get_block_header_raw( - &self, - _for_coin: &str, - _height: u64, - ) -> Result, BlockHeaderStorageError> { - Ok(None) - } + async fn get_block_header_raw(&self, _height: u64) -> Result, BlockHeaderStorageError> { Ok(None) } async fn get_last_block_height(&self, _for_coin: &str) -> Result { Err(BlockHeaderStorageError::Internal("Not implemented".into())) } - async fn get_last_block_header_with_non_max_bits( - &self, - _for_coin: &str, - ) -> Result, BlockHeaderStorageError> { + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError> { Ok(None) } - async fn get_block_height_by_hash( - &self, - _for_coin: &str, - _hash: H256, - ) -> Result, BlockHeaderStorageError> { - Ok(None) - } + async fn get_block_height_by_hash(&self, _hash: H256) -> Result, BlockHeaderStorageError> { Ok(None) } } diff --git a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs index 2100ad189e..ca88cc135e 100644 --- a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs @@ -7,6 +7,7 @@ use db_common::{sqlite::rusqlite::Error as SqlError, sqlite::validate_table_name, sqlite::CHECK_TABLE_EXISTS_SQL}; use primitives::hash::H256; +use serialization::{coin_variant_by_ticker, Reader}; use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use spv_validation::work::MAX_BITS_BTC; use std::collections::HashMap; @@ -85,7 +86,10 @@ fn get_block_height_by_hash(for_coin: &str) -> Result>); +pub struct SqliteBlockHeadersStorage { + pub ticker: String, + pub conn: Arc>, +} fn query_single_row( conn: &Connection, @@ -106,12 +110,12 @@ where #[async_trait] impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { - async fn init(&self, for_coin: &str) -> Result<(), BlockHeaderStorageError> { + async fn init(&self) -> Result<(), BlockHeaderStorageError> { + let coin = self.ticker.clone(); let selfi = self.clone(); - let sql_cache = create_block_header_cache_table_sql(for_coin)?; - let coin = for_coin.to_owned(); + let sql_cache = create_block_header_cache_table_sql(&coin)?; async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); conn.execute(&sql_cache, NO_PARAMS).map(|_| ()).map_err(|e| { BlockHeaderStorageError::InitializationError { coin, @@ -123,11 +127,12 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { .await } - async fn is_initialized_for(&self, for_coin: &str) -> Result { - let block_headers_cache_table = get_table_name_and_validate(for_coin)?; + async fn is_initialized_for(&self) -> Result { + let coin = self.ticker.clone(); + let block_headers_cache_table = get_table_name_and_validate(&coin)?; let selfi = self.clone(); async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); let cache_initialized = query_single_row( &conn, CHECK_TABLE_EXISTS_SQL, @@ -141,17 +146,16 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { async fn add_block_headers_to_storage( &self, - for_coin: &str, headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { - let for_coin = for_coin.to_owned(); + let coin = self.ticker.clone(); let selfi = self.clone(); async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + let mut conn = selfi.conn.lock().unwrap(); let sql_transaction = conn .transaction() .map_err(|e| BlockHeaderStorageError::AddToStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; @@ -167,16 +171,16 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { &hash as &dyn ToSql, ]; sql_transaction - .execute(&insert_block_header_in_cache_sql(&for_coin)?, block_cache_params) + .execute(&insert_block_header_in_cache_sql(&coin.clone())?, block_cache_params) .map_err(|e| BlockHeaderStorageError::AddToStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; } sql_transaction .commit() .map_err(|e| BlockHeaderStorageError::AddToStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; Ok(()) @@ -184,17 +188,20 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { .await } - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - if let Some(header_raw) = self.get_block_header_raw(for_coin, height).await? { + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); + if let Some(header_raw) = self.get_block_header_raw(height).await? { + let serialized = &hex::decode(header_raw).map_err(|e| BlockHeaderStorageError::DecodeError { + coin: coin.clone(), + reason: e.to_string(), + })?; + let coin_variant = coin_variant_by_ticker(&coin); + let mut reader = Reader::new_with_coin_variant(serialized, coin_variant); let header: BlockHeader = - header_raw - .try_into() + reader + .read() .map_err(|e: serialization::Error| BlockHeaderStorageError::DecodeError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), })?; return Ok(Some(header)); @@ -202,61 +209,57 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { Ok(None) } - async fn get_block_header_raw( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_header_raw(&self, height: u64) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); let params = [height as i64]; - let sql = get_block_header_by_height(for_coin)?; + let sql = get_block_header_by_height(&coin)?; let selfi = self.clone(); async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); query_single_row(&conn, &sql, params, string_from_row) }) .await .map_err(|e| BlockHeaderStorageError::GetFromStorageError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), }) } - async fn get_last_block_height(&self, for_coin: &str) -> Result { - let sql = get_last_block_height_sql(for_coin)?; + async fn get_last_block_height(&self) -> Result { + let coin = self.ticker.clone(); + let sql = get_last_block_height_sql(&coin)?; let selfi = self.clone(); async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); query_single_row(&conn, &sql, NO_PARAMS, |row| row.get(0)) }) .await .map_err(|e| BlockHeaderStorageError::GetFromStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })? .unwrap_or(0i64) .try_into() .map_err(|e: TryFromIntError| BlockHeaderStorageError::DecodeError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), }) // last_block_height is 0 if the database is empty } - async fn get_last_block_header_with_non_max_bits( - &self, - for_coin: &str, - ) -> Result, BlockHeaderStorageError> { - let sql = get_last_block_header_with_non_max_bits_sql(for_coin)?; + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); + let sql = get_last_block_header_with_non_max_bits_sql(&coin)?; let selfi = self.clone(); let maybe_header_raw = async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); query_single_row(&conn, &sql, NO_PARAMS, string_from_row) }) .await .map_err(|e| BlockHeaderStorageError::GetFromStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; @@ -265,7 +268,7 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { header_raw .try_into() .map_err(|e: serialization::Error| BlockHeaderStorageError::DecodeError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), })?; return Ok(Some(header)); @@ -273,22 +276,19 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { Ok(None) } - async fn get_block_height_by_hash( - &self, - for_coin: &str, - hash: H256, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_height_by_hash(&self, hash: H256) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); let params = [hash.to_string()]; - let sql = get_block_height_by_hash(for_coin)?; + let sql = get_block_height_by_hash(&coin)?; let selfi = self.clone(); async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); query_single_row(&conn, &sql, params, |row| row.get(0)) }) .await .map_err(|e| BlockHeaderStorageError::GetFromStorageError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), }) } @@ -296,14 +296,17 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { #[cfg(test)] impl SqliteBlockHeadersStorage { - pub fn in_memory() -> Self { - SqliteBlockHeadersStorage(Arc::new(Mutex::new(Connection::open_in_memory().unwrap()))) + pub fn in_memory(ticker: String) -> Self { + SqliteBlockHeadersStorage { + ticker, + conn: Arc::new(Mutex::new(Connection::open_in_memory().unwrap())), + } } fn is_table_empty(&self, table_name: &str) -> bool { validate_table_name(table_name).unwrap(); let sql = "SELECT COUNT(block_height) FROM ".to_owned() + table_name + ";"; - let conn = self.0.lock().unwrap(); + let conn = self.conn.lock().unwrap(); let rows_count: u32 = conn.query_row(&sql, NO_PARAMS, |row| row.get(0)).unwrap(); rows_count == 0 } @@ -319,75 +322,71 @@ mod sql_block_headers_storage_tests { #[test] fn test_init_collection() { let for_coin = "init_collection"; - let storage = SqliteBlockHeadersStorage::in_memory(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(!initialized); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); // repetitive init must not fail - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); } #[test] fn test_add_block_headers() { let for_coin = "insert"; - let storage = SqliteBlockHeadersStorage::in_memory(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); let table = block_headers_cache_table(for_coin); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); let mut headers = HashMap::with_capacity(1); let block_header: BlockHeader = "0000002076d41d3e4b0bfd4c0d3b30aa69fdff3ed35d85829efd04000000000000000000b386498b583390959d9bac72346986e3015e83ac0b54bc7747a11a494ac35c94bb3ce65a53fb45177f7e311c".into(); headers.insert(520481, block_header); - block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); assert!(!storage.is_table_empty(&table)); } #[test] fn test_get_block_header() { let for_coin = "get"; - let storage = SqliteBlockHeadersStorage::in_memory(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); let table = block_headers_cache_table(for_coin); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); let mut headers = HashMap::with_capacity(1); let block_header: BlockHeader = "0000002076d41d3e4b0bfd4c0d3b30aa69fdff3ed35d85829efd04000000000000000000b386498b583390959d9bac72346986e3015e83ac0b54bc7747a11a494ac35c94bb3ce65a53fb45177f7e311c".into(); headers.insert(520481, block_header); - block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); assert!(!storage.is_table_empty(&table)); - let hex = block_on(storage.get_block_header_raw(for_coin, 520481)) - .unwrap() - .unwrap(); + let hex = block_on(storage.get_block_header_raw(520481)).unwrap().unwrap(); assert_eq!(hex, "0000002076d41d3e4b0bfd4c0d3b30aa69fdff3ed35d85829efd04000000000000000000b386498b583390959d9bac72346986e3015e83ac0b54bc7747a11a494ac35c94bb3ce65a53fb45177f7e311c".to_string()); - let block_header = block_on(storage.get_block_header(for_coin, 520481)).unwrap().unwrap(); + let block_header = block_on(storage.get_block_header(520481)).unwrap().unwrap(); let block_hash: H256 = "0000000000000000002e31d0714a5ab23100945ff87ba2d856cd566a3c9344ec".into(); assert_eq!(block_header.hash(), block_hash.reversed()); - let height = block_on(storage.get_block_height_by_hash(for_coin, block_hash)) - .unwrap() - .unwrap(); + let height = block_on(storage.get_block_height_by_hash(block_hash)).unwrap().unwrap(); assert_eq!(height, 520481); } #[test] fn test_get_last_block_header_with_non_max_bits() { let for_coin = "get"; - let storage = SqliteBlockHeadersStorage::in_memory(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); let table = block_headers_cache_table(for_coin); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); let mut headers = HashMap::with_capacity(2); @@ -406,10 +405,10 @@ mod sql_block_headers_storage_tests { let block_header: BlockHeader = "020000001f38c8e30b30af912fbd4c3e781506713cfb43e73dff6250348e060000000000afa8f3eede276ccb4c4ee649ad9823fc181632f262848ca330733e7e7e541beb9be51353ffff001d00a63037".into(); headers.insert(201593, block_header); - block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); assert!(!storage.is_table_empty(&table)); - let actual_block_header = block_on(storage.get_last_block_header_with_non_max_bits(for_coin)) + let actual_block_header = block_on(storage.get_last_block_header_with_non_max_bits()) .unwrap() .unwrap(); assert_ne!(actual_block_header.bits, BlockHeaderBits::Compact(MAX_BITS_BTC.into())); @@ -419,11 +418,11 @@ mod sql_block_headers_storage_tests { #[test] fn test_get_last_block_height() { let for_coin = "get"; - let storage = SqliteBlockHeadersStorage::in_memory(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); let table = block_headers_cache_table(for_coin); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); let mut headers = HashMap::with_capacity(2); @@ -440,10 +439,10 @@ mod sql_block_headers_storage_tests { let block_header: BlockHeader = "020000001f38c8e30b30af912fbd4c3e781506713cfb43e73dff6250348e060000000000afa8f3eede276ccb4c4ee649ad9823fc181632f262848ca330733e7e7e541beb9be51353ffff001d00a63037".into(); headers.insert(201593, block_header); - block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); assert!(!storage.is_table_empty(&table)); - let last_block_height = block_on(storage.get_last_block_height(for_coin)).unwrap(); + let last_block_height = block_on(storage.get_last_block_height()).unwrap(); assert_eq!(last_block_height, 201595); } } diff --git a/mm2src/coins/utxo/utxo_tests.rs b/mm2src/coins/utxo/utxo_tests.rs index 45fc142bb4..eaebe4f4e0 100644 --- a/mm2src/coins/utxo/utxo_tests.rs +++ b/mm2src/coins/utxo/utxo_tests.rs @@ -22,7 +22,7 @@ use crate::utxo::utxo_standard::{utxo_standard_coin_with_priv_key, UtxoStandardC #[cfg(not(target_arch = "wasm32"))] use crate::WithdrawFee; use crate::{CoinBalance, PrivKeyBuildPolicy, SearchForSwapTxSpendInput, StakingInfosDetails, SwapOps, TradePreimageValue, TxFeeDetails}; -use chain::OutPoint; +use chain::{BlockHeader, OutPoint}; use common::executor::Timer; use common::{block_on, now_ms, OrdRange, PagingOptionsEnum, DEX_FEE_ADDR_RAW_PUBKEY}; use crypto::{privkey::key_pair_from_seed, Bip44Chain, RpcDerivationPath}; @@ -35,6 +35,7 @@ use mm2_test_helpers::for_tests::RICK_ELECTRUM_ADDRS; use mocktopus::mocking::*; use rpc::v1::types::H256 as H256Json; use serialization::{deserialize, CoinVariant}; +use spv_validation::storage::BlockHeaderStorageOps; use std::convert::TryFrom; use std::iter; use std::mem::discriminant; @@ -471,9 +472,10 @@ fn test_wait_for_payment_spend_timeout_electrum() { }); let block_headers_storage = BlockHeaderStorage { - inner: Box::new(SqliteBlockHeadersStorage(Arc::new(Mutex::new( - Connection::open_in_memory().unwrap(), - )))), + inner: Box::new(SqliteBlockHeadersStorage { + ticker: TEST_COIN_NAME.into(), + conn: Arc::new(Mutex::new(Connection::open_in_memory().unwrap())), + }), }; let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), block_headers_storage); let client = UtxoRpcClientEnum::Electrum(ElectrumClient(Arc::new(client))); @@ -967,6 +969,18 @@ fn test_spv_proof() { let tx_str = "0400008085202f8902bf17bf7d1daace52e08f732a6b8771743ca4b1cb765a187e72fd091a0aabfd52000000006a47304402203eaaa3c4da101240f80f9c5e9de716a22b1ec6d66080de6a0cca32011cd77223022040d9082b6242d6acf9a1a8e658779e1c655d708379862f235e8ba7b8ca4e69c6012102031d4256c4bc9f99ac88bf3dba21773132281f65f9bf23a59928bce08961e2f3ffffffffff023ca13c0e9e085dd13f481f193e8a3e8fd609020936e98b5587342d994f4d020000006b483045022100c0ba56adb8de923975052312467347d83238bd8d480ce66e8b709a7997373994022048507bcac921fdb2302fa5224ce86e41b7efc1a2e20ae63aa738dfa99b7be826012102031d4256c4bc9f99ac88bf3dba21773132281f65f9bf23a59928bce08961e2f3ffffffff0300e1f5050000000017a9141ee6d4c38a3c078eab87ad1a5e4b00f21259b10d870000000000000000166a1400000000000000000000000000000000000000001b94d736000000001976a91405aab5342166f8594baf17a7d9bef5d56744332788ac2d08e35e000000000000000000000000000000"; let tx: UtxoTx = tx_str.into(); + let header: BlockHeader = deserialize( + block_on(client.blockchain_block_header(452248).compat()) + .unwrap() + .as_slice(), + ) + .unwrap(); + + let mut headers = HashMap::new(); + headers.insert(452248, header); + let storage = client.block_headers_storage(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); + let res = block_on(client.validate_spv_proof(&tx, now_ms() / 1000 + 30)); res.unwrap(); } diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs index f56a5e40e5..321360fd1d 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs @@ -99,12 +99,8 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { ticker: ticker.clone(), error: "Error waiting for block headers synchronization status!".into(), })? { - UtxoSyncStatus::SyncingBlockHeaders { - current_scanned_block, - latest_block, - } => UtxoStandardInProgressStatus::SyncingBlockHeaders { - current_scanned_block, - latest_block, + UtxoSyncStatus::SyncingBlockHeaders { from, to } => { + UtxoStandardInProgressStatus::SyncingBlockHeaders { from, to } }, UtxoSyncStatus::TemporaryError(e) => UtxoStandardInProgressStatus::TemporaryError(e), UtxoSyncStatus::PermanentError(e) => { diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs index 6927966108..d52c13fad6 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs @@ -9,8 +9,8 @@ pub type UtxoStandardUserAction = HwRpcTaskUserAction; pub enum UtxoStandardInProgressStatus { ActivatingCoin, SyncingBlockHeaders { - current_scanned_block: u64, - latest_block: u64, + from: u64, + to: u64, }, TemporaryError(String), RequestingWalletBalance, diff --git a/mm2src/mm2_bitcoin/chain/src/block_header.rs b/mm2src/mm2_bitcoin/chain/src/block_header.rs index 1fa72a9438..86f9f35c58 100644 --- a/mm2src/mm2_bitcoin/chain/src/block_header.rs +++ b/mm2src/mm2_bitcoin/chain/src/block_header.rs @@ -166,7 +166,8 @@ impl Serializable for BlockHeader { }; s.append(&self.time); s.append(&self.bits); - if !self.is_prog_pow() && self.version != KAWPOW_VERSION { + // If a BTC header uses KAWPOW_VERSION, the nonce can't be zero + if !self.is_prog_pow() && (self.version != KAWPOW_VERSION || self.nonce != BlockHeaderNonce::U32(0)) { s.append(&self.nonce); } if let Some(sol) = &self.solution { @@ -234,21 +235,31 @@ impl Deserializable for BlockHeader { None }; - let hash_final_sapling_root = if version == 4 { Some(reader.read()?) } else { None }; + let hash_final_sapling_root = if version == 4 && !reader.coin_variant().is_btc() { + Some(reader.read()?) + } else { + None + }; let time = reader.read()?; - let bits = if version == 4 { + let bits = if version == 4 && !reader.coin_variant().is_btc() { BlockHeaderBits::U32(reader.read()?) } else { BlockHeaderBits::Compact(reader.read()?) }; - let nonce = if version == 4 { + let nonce = if version == 4 && !reader.coin_variant().is_btc() { BlockHeaderNonce::H256(reader.read()?) - } else if version == KAWPOW_VERSION || version == MTP_POW_VERSION && time >= PROG_POW_SWITCH_TIME { + } else if (version == KAWPOW_VERSION && !reader.coin_variant().is_btc()) + || version == MTP_POW_VERSION && time >= PROG_POW_SWITCH_TIME + { BlockHeaderNonce::U32(0) } else { BlockHeaderNonce::U32(reader.read()?) }; - let solution = if version == 4 { Some(reader.read_list()?) } else { None }; + let solution = if version == 4 && !reader.coin_variant().is_btc() { + Some(reader.read_list()?) + } else { + None + }; // https://en.bitcoin.it/wiki/Merged_mining_specification#Merged_mining_coinbase let aux_pow = if matches!( @@ -296,7 +307,7 @@ impl Deserializable for BlockHeader { }; // https://github.com/RavenProject/Ravencoin/blob/61c790447a5afe150d9892705ac421d595a2df60/src/primitives/block.h#L67 - let (n_height, n_nonce_u64, mix_hash) = if version == KAWPOW_VERSION { + let (n_height, n_nonce_u64, mix_hash) = if version == KAWPOW_VERSION && !reader.coin_variant().is_btc() { (Some(reader.read()?), Some(reader.read()?), Some(reader.read()?)) } else { (None, None, None) @@ -2467,6 +2478,30 @@ mod tests { assert_eq!(serialized.take(), headers_bytes); } + #[test] + fn test_btc_v4_block_headers_serde_11() { + // https://live.blockcypher.com/btc/block/0000000000000000097336f8439779072501753e2f48b8798c66188139f2d9cf/ + let header = "04000000462a79dfa51b541648ee55df74cdc14b9ea7feb932e912060000000000000000374c1707a72691be50070bc5029d586e9200d672c6c3dfd29d267bf6b2b01b9e0ace395654a91118923bd9d5"; + let header_bytes = &header.from_hex::>().unwrap() as &[u8]; + let mut reader = Reader::new_with_coin_variant(header_bytes, CoinVariant::BTC); + let header = reader.read::().unwrap(); + assert_eq!(header.version, 4); + let serialized = serialize(&header); + assert_eq!(serialized.take(), header_bytes); + } + + #[test] + fn test_btc_kow_pow_version_block_headers_serde_11() { + // https://live.blockcypher.com/btc/block/000000000000000006e35d6675fb0fec767a5f3b346261a5160f6e2a8d258070/ + let header = "00000030af7e7389ca428b05d8902fcdc148e70974524d39cb56bc0100000000000000007ce0cd0c9c648d1b585d29b9ab23ebc987619d43925b3c768d7cb4bc097cfb821441c05614a107187aef1ee1"; + let header_bytes = &header.from_hex::>().unwrap() as &[u8]; + let mut reader = Reader::new_with_coin_variant(header_bytes, CoinVariant::BTC); + let header = reader.read::().unwrap(); + assert_eq!(header.version, KAWPOW_VERSION); + let serialized = serialize(&header); + assert_eq!(serialized.take(), header_bytes); + } + #[test] fn test_from_blockheader_to_ext_blockheader() { // https://live.blockcypher.com/btc/block/00000000000000000020cf2bdc6563fb25c424af588d5fb7223461e72715e4a9/ diff --git a/mm2src/mm2_bitcoin/serialization/src/reader.rs b/mm2src/mm2_bitcoin/serialization/src/reader.rs index 6b9045431d..f437747c3c 100644 --- a/mm2src/mm2_bitcoin/serialization/src/reader.rs +++ b/mm2src/mm2_bitcoin/serialization/src/reader.rs @@ -54,18 +54,27 @@ pub enum CoinVariant { LBC, Standard, Qtum, + // Todo: https://github.com/KomodoPlatform/atomicDEX-API/issues/1345 + BTC, } impl CoinVariant { pub fn is_qtum(&self) -> bool { matches!(self, CoinVariant::Qtum) } pub fn is_lbc(&self) -> bool { matches!(self, CoinVariant::LBC) } + + pub fn is_btc(&self) -> bool { matches!(self, CoinVariant::BTC) } } pub fn coin_variant_by_ticker(ticker: &str) -> CoinVariant { - match ticker { - "LBC" => CoinVariant::LBC, - _ => CoinVariant::Standard, + if ticker == "BTC" || ticker.contains("BTC-") || ticker.contains("BTC_") { + // "BTC", "BTC-segwit", "tBTC", "tBTC-segwit", etc.. + CoinVariant::BTC + } else if ticker == "LBC" || ticker.contains("LBC-") || ticker.contains("LBC_") { + // "LBC", "LBC-segwit", etc.. + CoinVariant::LBC + } else { + CoinVariant::Standard } } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs index cd16c74a3b..3c86ed89ad 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs @@ -27,8 +27,8 @@ pub enum SPVError { InsufficientWork, #[display(fmt = "Couldn't calculate the required difficulty for the block: {}", _0)] DifficultyCalculationError(NextBlockBitsError), - #[display(fmt = "Header in chain does not correctly reference parent header")] - InvalidChain, + #[display(fmt = "Header {} in chain does not correctly reference parent header", _0)] + InvalidChain(u64), #[display(fmt = "When validating a `BitcoinHeader`, the `hash` field is not the digest of the raw header")] WrongDigest, #[display( @@ -339,12 +339,13 @@ pub async fn validate_headers( storage: &dyn BlockHeaderStorageOps, params: &BlockHeaderVerificationParams, ) -> Result<(), SPVError> { + let mut previous_height = previous_height; let mut previous_header = if previous_height == 0 { // Todo: add validation earlier in coin activation (convert to blockheader struct there) BlockHeader::try_from(params.genesis_block_header.clone()).map_err(|e| SPVError::Internal(e.to_string()))? } else { storage - .get_block_header(coin, previous_height) + .get_block_header(previous_height) .await? .ok_or(BlockHeaderStorageError::GetFromStorageError { coin: coin.to_string(), @@ -357,8 +358,9 @@ pub async fn validate_headers( if previous_height == 0 { // previous_header is genesis header in this case, checking that the first header hash is the same as the genesis header hash is enough if header.hash() != previous_hash { - return Err(SPVError::InvalidChain); + return Err(SPVError::InvalidChain(previous_height + 1)); } + previous_height += 1; continue; } let cur_bits = header.bits.clone(); @@ -366,7 +368,7 @@ pub async fn validate_headers( return Err(SPVError::UnexpectedDifficultyChange); } if !validate_header_prev_hash(&header.previous_header_hash, &previous_hash) { - return Err(SPVError::InvalidChain); + return Err(SPVError::InvalidChain(previous_height + 1)); } if let Some(algorithm) = ¶ms.difficulty_algorithm { if !params.constant_difficulty @@ -388,6 +390,7 @@ pub async fn validate_headers( prev_bits = cur_bits; previous_header = header; previous_hash = previous_header.hash(); + previous_height += 1; } Ok(()) } @@ -599,7 +602,7 @@ mod tests { "MORTY", 1330480, headers, - &TestBlockHeadersStorage {}, + &TestBlockHeadersStorage { ticker: "MORTY".into() }, ¶ms, )) .unwrap() @@ -622,7 +625,7 @@ mod tests { "BTC", 724608, headers, - &TestBlockHeadersStorage {}, + &TestBlockHeadersStorage { ticker: "BTC".into() }, ¶ms, )) .unwrap() diff --git a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs index 8795a1927b..7c007e01f3 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs @@ -42,43 +42,27 @@ pub enum BlockHeaderStorageError { #[async_trait] pub trait BlockHeaderStorageOps: Send + Sync + 'static { /// Initializes collection/tables in storage for a specified coin - async fn init(&self, for_coin: &str) -> Result<(), BlockHeaderStorageError>; + async fn init(&self) -> Result<(), BlockHeaderStorageError>; - async fn is_initialized_for(&self, for_coin: &str) -> Result; + async fn is_initialized_for(&self) -> Result; // Adds multiple block headers to the selected coin's header storage // Should store it as `COIN_HEIGHT=hex_string` // use this function for headers that comes from `blockchain_block_headers` async fn add_block_headers_to_storage( &self, - for_coin: &str, headers: HashMap, ) -> Result<(), BlockHeaderStorageError>; /// Gets the block header by height from the selected coin's storage as BlockHeader - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError>; + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError>; /// Gets the block header by height from the selected coin's storage as hex - async fn get_block_header_raw( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError>; + async fn get_block_header_raw(&self, height: u64) -> Result, BlockHeaderStorageError>; - async fn get_last_block_height(&self, for_coin: &str) -> Result; + async fn get_last_block_height(&self) -> Result; - async fn get_last_block_header_with_non_max_bits( - &self, - for_coin: &str, - ) -> Result, BlockHeaderStorageError>; + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError>; - async fn get_block_height_by_hash( - &self, - for_coin: &str, - hash: H256, - ) -> Result, BlockHeaderStorageError>; + async fn get_block_height_by_hash(&self, hash: H256) -> Result, BlockHeaderStorageError>; } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/work.rs b/mm2src/mm2_bitcoin/spv_validation/src/work.rs index 0d6fd625ea..8d31aedda0 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/work.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/work.rs @@ -26,7 +26,7 @@ fn is_retarget_height(height: u32) -> bool { height % RETARGETING_INTERVAL == 0 pub enum NextBlockBitsError { #[display(fmt = "Block headers storage error: {}", _0)] StorageError(BlockHeaderStorageError), - #[display(fmt = "Can't find Block header for {} with height {}", height, coin)] + #[display(fmt = "Can't find Block header for {} with height {}", coin, height)] NoSuchBlockHeader { coin: String, height: u64 }, #[display(fmt = "Can't find a Block header for {} with no max bits", coin)] NoBlockHeaderWithNoMaxBits { coin: String }, @@ -85,10 +85,16 @@ async fn btc_retarget_bits( last_block_header: BlockHeader, storage: &dyn BlockHeaderStorageOps, ) -> Result { + let max_bits_compact: Compact = MAX_BITS_BTC.into(); + let retarget_ref = (height - RETARGETING_INTERVAL).into(); + if retarget_ref == 0 { + return Ok(BlockHeaderBits::Compact(max_bits_compact)); + } + let retarget_header = storage - .get_block_header(coin, retarget_ref) + .get_block_header(retarget_ref) .await? .ok_or(NextBlockBitsError::NoSuchBlockHeader { coin: coin.into(), @@ -106,9 +112,7 @@ async fn btc_retarget_bits( let target_timespan_seconds: U256 = TARGET_TIMESPAN_SECONDS.into(); let retarget = retarget / target_timespan_seconds; - let max_bits_compact: Compact = MAX_BITS_BTC.into(); let max_bits: U256 = max_bits_compact.into(); - if retarget > max_bits { Ok(BlockHeaderBits::Compact(max_bits_compact)) } else { @@ -159,11 +163,12 @@ async fn btc_testnet_next_block_bits( } else if last_block_bits != max_bits { Ok(last_block_bits.clone()) } else { - let last_block_header_with_non_max_bits = storage - .get_last_block_header_with_non_max_bits(coin) + let last_non_max_bits = storage + .get_last_block_header_with_non_max_bits() .await? - .ok_or(NextBlockBitsError::NoBlockHeaderWithNoMaxBits { coin: coin.into() })?; - Ok(last_block_header_with_non_max_bits.bits) + .map(|header| header.bits) + .unwrap_or(max_bits); + Ok(last_non_max_bits) } } @@ -201,40 +206,33 @@ pub(crate) mod tests { .collect() } - pub(crate) struct TestBlockHeadersStorage {} + pub struct TestBlockHeadersStorage { + pub(crate) ticker: String, + } #[async_trait] impl BlockHeaderStorageOps for TestBlockHeadersStorage { - async fn init(&self, _for_coin: &str) -> Result<(), BlockHeaderStorageError> { Ok(()) } + async fn init(&self) -> Result<(), BlockHeaderStorageError> { Ok(()) } - async fn is_initialized_for(&self, _for_coin: &str) -> Result { Ok(true) } + async fn is_initialized_for(&self) -> Result { Ok(true) } async fn add_block_headers_to_storage( &self, - _for_coin: &str, _headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { Ok(()) } - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - Ok(get_block_headers_for_coin(for_coin).get(&height).cloned()) + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError> { + Ok(get_block_headers_for_coin(&self.ticker).get(&height).cloned()) } - async fn get_block_header_raw( - &self, - _for_coin: &str, - _height: u64, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_header_raw(&self, _height: u64) -> Result, BlockHeaderStorageError> { Ok(None) } - async fn get_last_block_height(&self, for_coin: &str) -> Result { - Ok(get_block_headers_for_coin(for_coin) + async fn get_last_block_height(&self) -> Result { + Ok(get_block_headers_for_coin(&self.ticker) .into_keys() .max_by(|a, b| a.cmp(b)) .unwrap()) @@ -242,26 +240,21 @@ pub(crate) mod tests { async fn get_last_block_header_with_non_max_bits( &self, - for_coin: &str, ) -> Result, BlockHeaderStorageError> { - let mut headers = get_block_headers_for_coin(for_coin); + let mut headers = get_block_headers_for_coin(&self.ticker); headers.retain(|_, h| h.bits != BlockHeaderBits::Compact(MAX_BITS_BTC.into())); let header = headers.into_iter().max_by(|a, b| a.0.cmp(&b.0)); Ok(header.map(|(_, h)| h)) } - async fn get_block_height_by_hash( - &self, - _for_coin: &str, - _hash: H256, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_height_by_hash(&self, _hash: H256) -> Result, BlockHeaderStorageError> { Ok(None) } } #[test] fn test_btc_mainnet_next_block_bits() { - let storage = TestBlockHeadersStorage {}; + let storage = TestBlockHeadersStorage { ticker: "BTC".into() }; let last_header: BlockHeader = "000000201d758432ecd495a2177b44d3fe6c22af183461a0b9ea0d0000000000000000008283a1dfa795d9b68bd8c18601e443368265072cbf8c76bfe58de46edd303798035de95d3eb2151756fdb0e8".into(); @@ -287,7 +280,7 @@ pub(crate) mod tests { #[test] fn test_btc_testnet_next_block_bits() { - let storage = TestBlockHeadersStorage {}; + let storage = TestBlockHeadersStorage { ticker: "tBTC".into() }; // https://live.blockcypher.com/btc-testnet/block/000000000057db3806384e2ec1b02b2c86bd928206ff8dff98f54d616b7fa5f2/ let current_header: BlockHeader = "02000000303505969a1df329e5fccdf69b847a201772e116e557eb7f119d1a9600000000469267f52f43b8799e72f0726ba2e56432059a8ad02b84d4fff84b9476e95f7716e41353ab80011c168cb471".into(); diff --git a/mm2src/mm2_main/src/mm2_tests.rs b/mm2src/mm2_main/src/mm2_tests.rs index 1d6a78f4b4..b2b307170e 100644 --- a/mm2src/mm2_main/src/mm2_tests.rs +++ b/mm2src/mm2_main/src/mm2_tests.rs @@ -8,10 +8,11 @@ use crypto::privkey::key_pair_from_seed; use http::{HeaderMap, StatusCode}; use mm2_metrics::{MetricType, MetricsJson}; use mm2_number::{BigDecimal, BigRational, Fraction, MmNumber}; -use mm2_test_helpers::for_tests::{check_my_swap_status, check_recent_swaps, check_stats_swap_status, - enable_native as enable_native_impl, enable_qrc20, find_metrics_in_json, - from_env_file, init_z_coin_light, init_z_coin_status, mm_spat, morty_conf, - rick_conf, sign_message, verify_message, wait_till_history_has_records, LocalStart, +use mm2_test_helpers::for_tests::{btc_with_spv_conf, check_my_swap_status, check_recent_swaps, + check_stats_swap_status, enable_native as enable_native_impl, enable_qrc20, + find_metrics_in_json, from_env_file, init_utxo_electrum, init_utxo_status, + init_z_coin_light, init_z_coin_status, mm_spat, morty_conf, rick_conf, sign_message, + tbtc_with_spv_conf, verify_message, wait_till_history_has_records, LocalStart, MarketMakerIt, Mm2TestConf, RaiiDump, MAKER_ERROR_EVENTS, MAKER_SUCCESS_EVENTS, MORTY, RICK, TAKER_ERROR_EVENTS, TAKER_SUCCESS_EVENTS}; use serde_json::{self as json, Value as Json}; @@ -201,6 +202,34 @@ async fn enable_z_coin_light( } } +async fn enable_utxo_v2_electrum( + mm: &MarketMakerIt, + coin: &str, + servers: Vec, + timeout: u64, +) -> UtxoStandardActivationResult { + let init = init_utxo_electrum(mm, coin, servers).await; + let init: RpcV2Response = json::from_value(init).unwrap(); + let timeout = now_ms() + (timeout * 1000); + + loop { + if now_ms() > timeout { + panic!("{} initialization timed out", coin); + } + + let status = init_utxo_status(mm, init.result.task_id).await; + let status: RpcV2Response = json::from_value(status).unwrap(); + log!("init_utxo_status: {:?}", status); + if let InitUtxoStatus::Ready(rpc_result) = status.result { + match rpc_result { + MmRpcResult::Ok { result } => break result, + MmRpcResult::Err(e) => panic!("{} initialization error {:?}", coin, e), + } + } + Timer::sleep(1.).await; + } +} + /// Integration test for RPC server. /// Check that MM doesn't crash in case of invalid RPC requests #[test] @@ -7524,3 +7553,64 @@ fn test_no_login() { .unwrap(); assert!(version.0.is_success(), "!version: {}", version.1); } + +// This test is ignored because block headers sync and validation can take some time +#[test] +#[ignore] +#[cfg(not(target_arch = "wasm32"))] +fn test_btc_block_header_sync() { + let coins = json!([btc_with_spv_conf()]); + + let mm_bob = MarketMakerIt::start( + json! ({ + "gui": "nogui", + "netid": 9998, + "myipaddr": env::var ("BOB_TRADE_IP") .ok(), + "rpcip": env::var ("BOB_TRADE_IP") .ok(), + "passphrase": "bob passphrase", + "coins": coins, + "rpc_password": "pass", + }), + "pass".into(), + local_start!("bob"), + ) + .unwrap(); + let (_dump_log, _dump_dashboard) = mm_bob.mm_dump(); + log!("log path: {}", mm_bob.log_path.display()); + + let utxo_bob = block_on(enable_utxo_v2_electrum(&mm_bob, "BTC", btc_electrums(), 600)); + log!("enable UTXO bob {:?}", utxo_bob); + + block_on(mm_bob.stop()).unwrap(); +} + +// This test is ignored because block headers sync and validation can take some time +// Todo: this test is failing, need a small fix in calculating btc_testnet_next_block_bits, and to add each block header individually while validating it. +#[test] +#[ignore] +#[cfg(not(target_arch = "wasm32"))] +fn test_tbtc_block_header_sync() { + let coins = json!([tbtc_with_spv_conf()]); + + let mm_bob = MarketMakerIt::start( + json! ({ + "gui": "nogui", + "netid": 9998, + "myipaddr": env::var ("BOB_TRADE_IP") .ok(), + "rpcip": env::var ("BOB_TRADE_IP") .ok(), + "passphrase": "bob passphrase", + "coins": coins, + "rpc_password": "pass", + }), + "pass".into(), + local_start!("bob"), + ) + .unwrap(); + let (_dump_log, _dump_dashboard) = mm_bob.mm_dump(); + log!("log path: {}", mm_bob.log_path.display()); + + let utxo_bob = block_on(enable_utxo_v2_electrum(&mm_bob, "tBTC-TEST", tbtc_electrums(), 100000)); + log!("enable UTXO bob {:?}", utxo_bob); + + block_on(mm_bob.stop()).unwrap(); +} diff --git a/mm2src/mm2_main/src/mm2_tests/electrums.rs b/mm2src/mm2_main/src/mm2_tests/electrums.rs index 20728e4f23..c2855dbfe4 100644 --- a/mm2src/mm2_main/src/mm2_tests/electrums.rs +++ b/mm2src/mm2_main/src/mm2_tests/electrums.rs @@ -38,6 +38,25 @@ pub fn morty_electrums() -> Vec { ] } +#[allow(dead_code)] +#[cfg(target_arch = "wasm32")] +pub fn btc_electrums() -> Vec { + vec![ + json!({ "url": "electrum1.cipig.net:30000", "protocol": "WSS" }), + json!({ "url": "electrum2.cipig.net:30000", "protocol": "WSS" }), + json!({ "url": "electrum3.cipig.net:30000", "protocol": "WSS" }), + ] +} + +#[cfg(not(target_arch = "wasm32"))] +pub fn btc_electrums() -> Vec { + vec![ + json!({ "url": "electrum1.cipig.net:10000" }), + json!({ "url": "electrum2.cipig.net:10000" }), + json!({ "url": "electrum3.cipig.net:10000" }), + ] +} + #[allow(dead_code)] #[cfg(target_arch = "wasm32")] pub fn tbtc_electrums() -> Vec { diff --git a/mm2src/mm2_main/src/mm2_tests/structs.rs b/mm2src/mm2_main/src/mm2_tests/structs.rs index 30b9ffdc67..c884b8675b 100644 --- a/mm2src/mm2_main/src/mm2_tests/structs.rs +++ b/mm2src/mm2_main/src/mm2_tests/structs.rs @@ -622,6 +622,13 @@ pub struct ZcoinActivationResult { pub wallet_balance: EnableCoinBalance, } +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct UtxoStandardActivationResult { + pub current_block: u64, + pub wallet_balance: EnableCoinBalance, +} + #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct InitTaskResult { @@ -643,6 +650,14 @@ pub enum InitZcoinStatus { UserActionRequired(Json), } +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields, tag = "status", content = "details")] +pub enum InitUtxoStatus { + Ready(MmRpcResult), + InProgress(Json), + UserActionRequired(Json), +} + #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields, tag = "status", content = "details")] pub enum WithdrawStatus { diff --git a/mm2src/mm2_test_helpers/src/for_tests.rs b/mm2src/mm2_test_helpers/src/for_tests.rs index b68b5d31b5..4f298fb0f7 100644 --- a/mm2src/mm2_test_helpers/src/for_tests.rs +++ b/mm2src/mm2_test_helpers/src/for_tests.rs @@ -262,6 +262,56 @@ pub fn morty_conf() -> Json { }) } +pub fn btc_with_spv_conf() -> Json { + json!({ + "coin": "BTC", + "asset":"BTC", + "pubtype": 0, + "p2shtype": 5, + "wiftype": 128, + "segwit": true, + "bech32_hrp": "bc", + "txfee": 0, + "estimate_fee_mode": "ECONOMICAL", + "required_confirmations": 0, + "enable_spv_proof": true, + "protocol": { + "type": "UTXO" + }, + "block_headers_verification_params": { + "difficulty_check": true, + "constant_difficulty": false, + "difficulty_algorithm": "Bitcoin Mainnet", + "genesis_block_header": "010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e36299" + } + }) +} + +pub fn tbtc_with_spv_conf() -> Json { + json!({ + "coin": "tBTC-TEST", + "asset":"tBTC-TEST", + "pubtype": 0, + "p2shtype": 5, + "wiftype": 128, + "segwit": true, + "bech32_hrp": "tb", + "txfee": 0, + "estimate_fee_mode": "ECONOMICAL", + "required_confirmations": 0, + "enable_spv_proof": true, + "protocol": { + "type": "UTXO" + }, + "block_headers_verification_params": { + "difficulty_check": true, + "constant_difficulty": false, + "difficulty_algorithm": "Bitcoin Testnet", + "genesis_block_header": "0100000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000bac8b0fa927c0ac8234287e33c5f74d38d354820e24756ad709d7038fc5f31f020e7494dffff001d03e4b672" + } + }) +} + #[cfg(target_arch = "wasm32")] pub fn mm_ctx_with_custom_db() -> MmArc { MmCtxBuilder::new().with_test_db_namespace().into_mm_arc() } @@ -1611,3 +1661,43 @@ pub async fn send_raw_transaction(mm: &MarketMakerIt, coin: &str, tx: &str) -> J ); json::from_str(&request.1).unwrap() } + +pub async fn init_utxo_electrum(mm: &MarketMakerIt, coin: &str, servers: Vec) -> Json { + let request = mm + .rpc(&json! ({ + "userpass": mm.userpass, + "method": "init_utxo", + "mmrpc": "2.0", + "params": { + "ticker": coin, + "activation_params": { + "mode": { + "rpc": "Electrum", + "rpc_data": { + "servers": servers + } + } + }, + } + })) + .await + .unwrap(); + assert_eq!(request.0, StatusCode::OK, "'init_z_coin' failed: {}", request.1); + json::from_str(&request.1).unwrap() +} + +pub async fn init_utxo_status(mm: &MarketMakerIt, task_id: u64) -> Json { + let request = mm + .rpc(&json! ({ + "userpass": mm.userpass, + "method": "init_utxo_status", + "mmrpc": "2.0", + "params": { + "task_id": task_id, + } + })) + .await + .unwrap(); + assert_eq!(request.0, StatusCode::OK, "'init_utxo_status' failed: {}", request.1); + json::from_str(&request.1).unwrap() +} From c3aa9510e9b8e584833f573b63f820992aeb4175 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 26 Aug 2022 01:12:34 +0200 Subject: [PATCH 23/33] fix wasm --- .../utxo/utxo_indexedb_block_header_storage.rs | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs index f1ec11c7be..ce7706b125 100644 --- a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs @@ -9,9 +9,9 @@ pub struct IndexedDBBlockHeadersStorage {} #[async_trait] impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { - async fn init(&self, _for_coin: &str) -> Result<(), BlockHeaderStorageError> { Ok(()) } + async fn init(&self) -> Result<(), BlockHeaderStorageError> { Ok(()) } - async fn is_initialized_for(&self, _for_coin: &str) -> Result { Ok(true) } + async fn is_initialized_for(&self) -> Result { Ok(true) } async fn add_block_headers_to_storage( &self, @@ -20,17 +20,11 @@ impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { Ok(()) } - async fn get_block_header( - &self, - _for_coin: &str, - _height: u64, - ) -> Result, BlockHeaderStorageError> { - Ok(None) - } + async fn get_block_header(&self, _height: u64) -> Result, BlockHeaderStorageError> { Ok(None) } async fn get_block_header_raw(&self, _height: u64) -> Result, BlockHeaderStorageError> { Ok(None) } - async fn get_last_block_height(&self, _for_coin: &str) -> Result { + async fn get_last_block_height(&self) -> Result { Err(BlockHeaderStorageError::Internal("Not implemented".into())) } From c6efe5ac7fe956a9f61ebe950b135f00aaac1eec Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 26 Aug 2022 22:03:03 +0200 Subject: [PATCH 24/33] get confirmed lightning tx from rpc if spv is not enabled --- mm2src/coins/lightning/ln_platform.rs | 32 ++++++++--- mm2src/coins/utxo.rs | 25 +++++++++ mm2src/coins/utxo/rpc_clients.rs | 39 +++++++++++-- mm2src/coins/utxo/spv.rs | 23 +++----- .../utxo/utxo_builder/utxo_arc_builder.rs | 55 +++++++++---------- mm2src/coins/utxo/utxo_common.rs | 8 +-- .../init_utxo_standard_activation.rs | 9 ++- .../init_utxo_standard_statuses.rs | 4 +- .../spv_validation/src/helpers_validation.rs | 1 - .../mm2_main/src/mm2_tests/lightning_tests.rs | 2 +- 10 files changed, 125 insertions(+), 73 deletions(-) diff --git a/mm2src/coins/lightning/ln_platform.rs b/mm2src/coins/lightning/ln_platform.rs index a9e69f9473..e99226b374 100644 --- a/mm2src/coins/lightning/ln_platform.rs +++ b/mm2src/coins/lightning/ln_platform.rs @@ -1,9 +1,11 @@ use super::*; use crate::lightning::ln_errors::{SaveChannelClosingError, SaveChannelClosingResult}; -use crate::utxo::rpc_clients::{BestBlock as RpcBestBlock, BlockHashOrHeight, ElectrumBlockHeader, ElectrumClient, - ElectrumNonce, EstimateFeeMethod, UtxoRpcClientEnum, UtxoRpcResult}; -use crate::utxo::spv::{ConfirmedTransactionInfo, SimplePaymentVerification}; +use crate::utxo::rpc_clients::{BestBlock as RpcBestBlock, BlockHashOrHeight, ConfirmedTransactionInfo, + ElectrumBlockHeader, ElectrumClient, ElectrumNonce, EstimateFeeMethod, + UtxoRpcClientEnum, UtxoRpcResult}; +use crate::utxo::spv::SimplePaymentVerification; use crate::utxo::utxo_standard::UtxoStandardCoin; +use crate::utxo::GetConfirmedTxError; use crate::{MarketCoinOps, MmCoin}; use bitcoin::blockdata::block::BlockHeader; use bitcoin::blockdata::script::Script; @@ -310,11 +312,17 @@ impl Platform { }, }); + let is_spv_enabled = self.coin.as_ref().conf.enable_spv_proof; let confirmed_transactions_futs = on_chain_txs .map(|transaction| async move { - client - .validate_spv_proof(&transaction, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) - .await + if is_spv_enabled { + client + .validate_spv_proof(&transaction, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) + .await + .map_err(GetConfirmedTxError::SPVError) + } else { + client.get_confirmed_tx_info_from_rpc(&transaction).await + } }) .collect::>(); join_all(confirmed_transactions_futs) @@ -372,12 +380,18 @@ impl Platform { .any(|info| info.tx.hash() == output.spending_tx.hash()) }); + let is_spv_enabled = self.coin.as_ref().conf.enable_spv_proof; let confirmed_transactions_futs = spent_outputs_info .into_iter() .map(|output| async move { - client - .validate_spv_proof(&output.spending_tx, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) - .await + if is_spv_enabled { + client + .validate_spv_proof(&output.spending_tx, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) + .await + .map_err(GetConfirmedTxError::SPVError) + } else { + client.get_confirmed_tx_info_from_rpc(&output.spending_tx).await + } }) .collect::>(); let mut confirmed_transaction_info = join_all(confirmed_transactions_futs) diff --git a/mm2src/coins/utxo.rs b/mm2src/coins/utxo.rs index 1ab074f2b2..0b9998a533 100644 --- a/mm2src/coins/utxo.rs +++ b/mm2src/coins/utxo.rs @@ -658,6 +658,31 @@ impl From for SPVError { fn from(e: GetBlockHeaderError) -> Self { SPVError::UnableToGetHeader(e.to_string()) } } +#[derive(Debug, Display)] +pub enum GetConfirmedTxError { + HeightNotFound(GetTxHeightError), + UnableToGetHeader(GetBlockHeaderError), + RpcError(JsonRpcError), + SerializationError(serialization::Error), + SPVError(SPVError), +} + +impl From for GetConfirmedTxError { + fn from(err: GetTxHeightError) -> Self { GetConfirmedTxError::HeightNotFound(err) } +} + +impl From for GetConfirmedTxError { + fn from(err: GetBlockHeaderError) -> Self { GetConfirmedTxError::UnableToGetHeader(err) } +} + +impl From for GetConfirmedTxError { + fn from(err: JsonRpcError) -> Self { GetConfirmedTxError::RpcError(err) } +} + +impl From for GetConfirmedTxError { + fn from(err: serialization::Error) -> Self { GetConfirmedTxError::SerializationError(err) } +} + impl UtxoCoinFields { pub fn transaction_preimage(&self) -> TransactionInputSigner { let lock_time = if self.conf.ticker == "KMD" { diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index f823fd9eb1..14410ebf0e 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -2,7 +2,8 @@ #![cfg_attr(target_arch = "wasm32", allow(dead_code))] use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; -use crate::utxo::{output_script, sat_from_big_decimal, GetBlockHeaderError, GetTxError, GetTxHeightError}; +use crate::utxo::{output_script, sat_from_big_decimal, GetBlockHeaderError, GetConfirmedTxError, GetTxError, + GetTxHeightError}; use crate::{big_decimal_from_sat_unsigned, NumConversError, RpcTransportEventHandler, RpcTransportEventHandlerShared}; use async_trait::async_trait; use chain::{BlockHeader, BlockHeaderBits, BlockHeaderNonce, OutPoint, Transaction as UtxoTx}; @@ -1256,6 +1257,14 @@ pub struct TxMerkleBranch { pub pos: usize, } +#[derive(Clone)] +pub struct ConfirmedTransactionInfo { + pub tx: UtxoTx, + pub header: BlockHeader, + pub index: u64, + pub height: u64, +} + #[derive(Debug, PartialEq)] pub struct BestBlock { pub height: u64, @@ -1567,7 +1576,6 @@ pub struct ElectrumClientImpl { protocol_version: OrdRange, get_balance_concurrent_map: ConcurrentRequestMap, list_unspent_concurrent_map: ConcurrentRequestMap>, - // Todo: make this not optional and check if spv is enabled to do other stuff (in coins activation add it to task manager maybe) block_headers_storage: BlockHeaderStorage, } @@ -1936,9 +1944,7 @@ impl ElectrumClient { // get_tx_height_from_storage is always preferred to be used instead of this, but if there is no headers in storage (storing headers is not enabled) // this function can be used instead - // Todo: This can be used in lightning if spv is not enabled for platform coin (connected to trusted server), will remove #[allow(dead_code)] after it's used - #[allow(dead_code)] - async fn get_tx_height_from_rpc(&self, tx: &UtxoTx) -> Result> { + async fn get_tx_height_from_rpc(&self, tx: &UtxoTx) -> Result { for output in tx.outputs.clone() { let script_pubkey_str = hex::encode(electrum_script_hash(&output.script_pubkey)); if let Ok(history) = self.scripthash_get_history(script_pubkey_str.as_str()).compat().await { @@ -1950,7 +1956,7 @@ impl ElectrumClient { } } } - MmError::err(GetTxHeightError::HeightNotFound( + Err(GetTxHeightError::HeightNotFound( "Couldn't find height through electrum!".into(), )) } @@ -1971,6 +1977,27 @@ impl ElectrumClient { } } + pub async fn get_confirmed_tx_info_from_rpc( + &self, + tx: &UtxoTx, + ) -> Result { + let height = self.get_tx_height_from_rpc(tx).await?; + + let merkle_branch = self + .blockchain_transaction_get_merkle(tx.hash().reversed().into(), height) + .compat() + .await?; + + let header = deserialize(self.blockchain_block_header(height).compat().await?.as_slice())?; + + Ok(ConfirmedTransactionInfo { + tx: tx.clone(), + header, + index: merkle_branch.pos as u64, + height, + }) + } + pub async fn get_merkle_and_validated_header( &self, tx: &UtxoTx, diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index 2606b065ec..6084a405ec 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -1,30 +1,21 @@ -use crate::utxo::rpc_clients::ElectrumClient; +use crate::utxo::rpc_clients::{ConfirmedTransactionInfo, ElectrumClient}; use async_trait::async_trait; -use chain::{BlockHeader, Transaction as UtxoTx}; +use chain::Transaction as UtxoTx; use common::executor::Timer; use common::log::error; use common::now_ms; use keys::hash::H256; -use mm2_err_handle::prelude::*; use serialization::serialize_list; use spv_validation::helpers_validation::SPVError; use spv_validation::spv_proof::{SPVProof, TRY_SPV_PROOF_INTERVAL}; -#[derive(Clone)] -pub struct ConfirmedTransactionInfo { - pub tx: UtxoTx, - pub header: BlockHeader, - pub index: u64, - pub height: u64, -} - #[async_trait] pub trait SimplePaymentVerification { async fn validate_spv_proof( &self, tx: &UtxoTx, try_spv_proof_until: u64, - ) -> Result>; + ) -> Result; } #[async_trait] @@ -33,9 +24,9 @@ impl SimplePaymentVerification for ElectrumClient { &self, tx: &UtxoTx, try_spv_proof_until: u64, - ) -> Result> { + ) -> Result { if tx.outputs.is_empty() { - return MmError::err(SPVError::InvalidVout); + return Err(SPVError::InvalidVout); } let (merkle_branch, validated_header, height) = loop { @@ -46,7 +37,7 @@ impl SimplePaymentVerification for ElectrumClient { try_spv_proof_until, tx.hash().reversed(), ); - return MmError::err(SPVError::Timeout); + return Err(SPVError::Timeout); } match self.get_merkle_and_validated_header(tx).await { @@ -78,7 +69,7 @@ impl SimplePaymentVerification for ElectrumClient { intermediate_nodes, }; - proof.validate(&validated_header).map_err(MmError::new)?; + proof.validate(&validated_header)?; Ok(ConfirmedTransactionInfo { tx: tx.clone(), diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index 3bbb769447..b45faa14d1 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -1,4 +1,3 @@ -use crate::utxo::rpc_clients::UtxoRpcClientEnum; use crate::utxo::utxo_builder::{UtxoCoinBuildError, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, UtxoFieldsWithHardwareWalletBuilder, UtxoFieldsWithIguanaPrivKeyBuilder}; use crate::utxo::utxo_common::{block_header_utxo_loop, merge_utxo_loop}; @@ -14,10 +13,15 @@ use mm2_err_handle::prelude::*; use serde_json::Value as Json; pub enum UtxoSyncStatus { - SyncingBlockHeaders { from: u64, to: u64 }, + SyncingBlockHeaders { + current_scanned_block: u64, + last_block: u64, + }, TemporaryError(String), PermanentError(String), - Finished { block_number: u64 }, + Finished { + block_number: u64, + }, } #[derive(Clone)] @@ -28,9 +32,12 @@ impl UtxoSyncStatusLoopHandle { UtxoSyncStatusLoopHandle(sync_status_notifier) } - pub fn notify_blocks_headers_sync_status(&mut self, from: u64, to: u64) { + pub fn notify_blocks_headers_sync_status(&mut self, current_scanned_block: u64, last_block: u64) { self.0 - .try_send(UtxoSyncStatus::SyncingBlockHeaders { from, to }) + .try_send(UtxoSyncStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, + }) .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); } @@ -130,7 +137,6 @@ where async fn build(self) -> MmResult { let utxo = self.build_utxo_fields().await?; - let rpc_client = utxo.rpc_client.clone(); let utxo_arc = UtxoArc::new(utxo); let utxo_weak = utxo_arc.downgrade(); let result_coin = (self.constructor)(utxo_arc); @@ -141,8 +147,7 @@ where } // Todo: find a better way for this - if let Some(abort_handler) = - self.spawn_block_header_utxo_loop_if_required(utxo_weak, &rpc_client, self.constructor.clone()) + if let Some(abort_handler) = self.spawn_block_header_utxo_loop_if_required(utxo_weak, self.constructor.clone()) { self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } @@ -193,33 +198,25 @@ pub trait MergeUtxoArcOps: UtxoCoinBuilderCom pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { // Todo: this should be called only if storing headers is enabled and should be called after syncing the latest header on coin activation // Todo: probably this function needs to be refactored - fn spawn_block_header_utxo_loop_if_required( - &self, - weak: UtxoWeak, - rpc_client: &UtxoRpcClientEnum, - constructor: F, - ) -> Option + fn spawn_block_header_utxo_loop_if_required(&self, weak: UtxoWeak, constructor: F) -> Option where F: Fn(UtxoArc) -> T + Send + Sync + 'static, T: UtxoCommonOps, { // Todo: add condition for enable_spv_proof (should block headers be saved when enable_spv_proof is true only? what about for getting tx height?) - // Todo: because of sync_status_loop_handle this whole function might be refactored (rpc_client.is_native() should be checked when creating sync_status_loop_handle) if let Some(sync_status_loop_handle) = self.sync_status_loop_handle() { - if !rpc_client.is_native() { - let ticker = self.ticker().to_owned(); - let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor, sync_status_loop_handle)); - info!("Starting UTXO block header loop for coin {}", ticker); - spawn(async move { - if let Err(e) = fut.await { - info!( - "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", - ticker, e - ); - } - }); - return Some(abort_handle); - } + let ticker = self.ticker().to_owned(); + let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor, sync_status_loop_handle)); + info!("Starting UTXO block header loop for coin {}", ticker); + spawn(async move { + if let Err(e) = fut.await { + info!( + "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", + ticker, e + ); + } + }); + return Some(abort_handle); } None diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index 99f76aca77..9478f98ca8 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -3157,7 +3157,7 @@ pub fn validate_payment( client .validate_spv_proof(&tx, try_spv_proof_until) .await - .map_err(|e| format!("{:?}", e))?; + .map_err(|e| format!("{}", e))?; } } @@ -3445,18 +3445,14 @@ fn increase_by_percent(num: u64, percent: f64) -> u64 { num + (percent.round() as u64) } -// Todo: This loop needs to be called when getting headers is enabled in conf only after getting all the headers when activating coin -// Todo: add test for enabling utxo with enable_spv_proof to check that all the headers are retrieved right (should be ignored cause it will take a long time) pub async fn block_header_utxo_loop( weak: UtxoWeak, constructor: impl Fn(UtxoArc) -> T, mut sync_status_loop_handle: UtxoSyncStatusLoopHandle, ) { - // Todo: should notify the status not only the errors while let Some(arc) = weak.upgrade() { let coin = constructor(arc); let client = match &coin.as_ref().rpc_client { - // Todo: should I send UtxoSyncStatus::Finished here just in case? UtxoRpcClientEnum::Native(_) => break, UtxoRpcClientEnum::Electrum(client) => client, }; @@ -3472,7 +3468,6 @@ pub async fn block_header_utxo_loop( }, }; - // Todo: what to do about chain reorganization?? let to_block_height = match coin.as_ref().rpc_client.get_block_count().compat().await { Ok(h) => h, Err(e) => { @@ -3483,6 +3478,7 @@ pub async fn block_header_utxo_loop( }, }; + // Todo: Add code for the case if a chain reorganization happens if from_block_height == to_block_height { Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; continue; diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs index 321360fd1d..0ff1b3fa90 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs @@ -50,7 +50,6 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { &activation_ctx.init_utxo_standard_task_manager } - // Todo: in test should check that it will continue syncing after the coin is activated async fn init_standalone_coin( ctx: MmArc, ticker: String, @@ -99,8 +98,12 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { ticker: ticker.clone(), error: "Error waiting for block headers synchronization status!".into(), })? { - UtxoSyncStatus::SyncingBlockHeaders { from, to } => { - UtxoStandardInProgressStatus::SyncingBlockHeaders { from, to } + UtxoSyncStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, + } => UtxoStandardInProgressStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, }, UtxoSyncStatus::TemporaryError(e) => UtxoStandardInProgressStatus::TemporaryError(e), UtxoSyncStatus::PermanentError(e) => { diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs index d52c13fad6..daf1692b42 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs @@ -9,8 +9,8 @@ pub type UtxoStandardUserAction = HwRpcTaskUserAction; pub enum UtxoStandardInProgressStatus { ActivatingCoin, SyncingBlockHeaders { - from: u64, - to: u64, + current_scanned_block: u64, + last_block: u64, }, TemporaryError(String), RequestingWalletBalance, diff --git a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs index 3c86ed89ad..0c6976ca50 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs @@ -341,7 +341,6 @@ pub async fn validate_headers( ) -> Result<(), SPVError> { let mut previous_height = previous_height; let mut previous_header = if previous_height == 0 { - // Todo: add validation earlier in coin activation (convert to blockheader struct there) BlockHeader::try_from(params.genesis_block_header.clone()).map_err(|e| SPVError::Internal(e.to_string()))? } else { storage diff --git a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs index f5e67a4be5..19711a08fc 100644 --- a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs +++ b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs @@ -36,7 +36,7 @@ fn start_lightning_nodes(enable_0_confs: bool) -> (MarketMakerIt, MarketMakerIt, "coin": "tBTC-TEST-lightning", "mm2": 1, "decimals": 11, - "our_channels_config": { + "our_channels_configs": { "inbound_channels_confirmations": 1 }, "counterparty_channel_config_limits": { From 9283e019cd305f36b9ca2ec6d4bd851617d6ee68 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 26 Aug 2022 23:18:01 +0200 Subject: [PATCH 25/33] fix todo comments --- mm2src/coins/utxo/qtum.rs | 9 --------- mm2src/coins/utxo/spv.rs | 2 +- mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs | 5 +---- mm2src/coins/utxo/utxo_common.rs | 6 +----- .../src/utxo_activation/init_utxo_standard_activation.rs | 3 --- 5 files changed, 3 insertions(+), 22 deletions(-) diff --git a/mm2src/coins/utxo/qtum.rs b/mm2src/coins/utxo/qtum.rs index 825c94c7e7..dc27555d12 100644 --- a/mm2src/coins/utxo/qtum.rs +++ b/mm2src/coins/utxo/qtum.rs @@ -220,8 +220,6 @@ impl<'a> UtxoCoinBuilder for QtumCoinBuilder<'a> { async fn build(self) -> MmResult { let utxo = self.build_utxo_fields().await?; - // Todo: Remove this if other comment is removed - // let rpc_client = utxo.rpc_client.clone(); let utxo_arc = UtxoArc::new(utxo); let utxo_weak = utxo_arc.downgrade(); let result_coin = QtumCoin::from(utxo_arc); @@ -230,13 +228,6 @@ impl<'a> UtxoCoinBuilder for QtumCoinBuilder<'a> { self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } - // Todo: Remove this or add it to qtum - // if let Some(abort_handler) = - // self.spawn_block_header_utxo_loop_if_required(utxo_weak, &rpc_client, QtumCoin::from, self.sync_status_notifier) - // { - // self.ctx.abort_handlers.lock().unwrap().push(abort_handler); - // } - Ok(result_coin) } } diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index 6084a405ec..9401e6b0b7 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -31,7 +31,7 @@ impl SimplePaymentVerification for ElectrumClient { let (merkle_branch, validated_header, height) = loop { if now_ms() / 1000 > try_spv_proof_until { - // Todo: find a way to not show this error when height is still 0 + // Todo: Should not show this error when height is 0 error!( "Waited too long until {} for transaction {:?} to validate spv proof", try_spv_proof_until, diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index b45faa14d1..e44305490f 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -146,7 +146,7 @@ where self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } - // Todo: find a better way for this + // This only works for v2 utxo activation since sync_status_loop_handle is initialized there only. if let Some(abort_handler) = self.spawn_block_header_utxo_loop_if_required(utxo_weak, self.constructor.clone()) { self.ctx.abort_handlers.lock().unwrap().push(abort_handler); @@ -196,14 +196,11 @@ pub trait MergeUtxoArcOps: UtxoCoinBuilderCom } pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { - // Todo: this should be called only if storing headers is enabled and should be called after syncing the latest header on coin activation - // Todo: probably this function needs to be refactored fn spawn_block_header_utxo_loop_if_required(&self, weak: UtxoWeak, constructor: F) -> Option where F: Fn(UtxoArc) -> T + Send + Sync + 'static, T: UtxoCommonOps, { - // Todo: add condition for enable_spv_proof (should block headers be saved when enable_spv_proof is true only? what about for getting tx height?) if let Some(sync_status_loop_handle) = self.sync_status_loop_handle() { let ticker = self.ticker().to_owned(); let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor, sync_status_loop_handle)); diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index 9478f98ca8..f78d7c9a49 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -3501,15 +3501,11 @@ pub async fn block_header_utxo_loop( }; let ticker = coin.as_ref().conf.ticker.as_str(); - // Todo: an attack can be used to send a fake header to fail validating and can't confirm a tx, should use a different server in such case (watch towers shall help) if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { if let Err(e) = validate_headers(ticker, from_block_height, block_headers, storage, params).await { error!("Error {} on validating the latest headers!", e); + // Todo: remove this electrum server and use another in this case since the headers from this server are invalid sync_status_loop_handle.notify_on_permanent_error(e.to_string()); - // Todo: should rotate_servers here, if error is not due to rpc (instead of waiting)??? (should also check if error is due to RPC or not before sending permanent_error) - // Todo: when using rotate_servers add sleep and continue again - // Timer::sleep(10.).await; - // continue; break; } } diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs index 0ff1b3fa90..faba3d313a 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs @@ -61,9 +61,6 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { let crypto_ctx = CryptoCtx::from_ctx(&ctx)?; let priv_key_policy = priv_key_build_policy(&crypto_ctx, activation_request.priv_key_policy); - // todo: add a function for this instead of coin.as_ref().conf.enable_spv_proof && !coin.as_ref().rpc_client.is_native() - // Todo: should this step be used before this or maybe inside UtxoArcBuilder?? - // Todo: channel can be used to send errors after coin init to rotate_servers etc.. let (sync_status_loop_handle, maybe_sync_watcher) = if coin_conf["enable_spv_proof"].as_bool().unwrap_or(false) && !activation_request.mode.is_native() { let (sync_status_notifier, sync_watcher) = channel(1); From 6d6ab4e0d3dc40bea6dbbeb3f5c795f7e265f8b3 Mon Sep 17 00:00:00 2001 From: shamardy Date: Sat, 27 Aug 2022 00:20:06 +0200 Subject: [PATCH 26/33] fix typo --- mm2src/coins/lightning.rs | 2 +- mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index fad079a494..d73d478c02 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -1751,7 +1751,7 @@ pub struct ListTrustedNodesResponse { trusted_nodes: Vec, } -pub async fn list_trusted_node(ctx: MmArc, req: ListTrustedNodesReq) -> TrustedNodeResult { +pub async fn list_trusted_nodes(ctx: MmArc, req: ListTrustedNodesReq) -> TrustedNodeResult { let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; let ln_coin = match coin { MmCoinEnum::LightningCoin(c) => c, diff --git a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs index f36fe346da..ba48363be2 100644 --- a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs +++ b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs @@ -39,7 +39,7 @@ use std::net::SocketAddr; cfg_native! { use coins::lightning::{add_trusted_node, close_channel, connect_to_lightning_node, generate_invoice, get_channel_details, get_claimable_balances, get_payment_details, list_closed_channels_by_filter, list_open_channels_by_filter, - list_payments_by_filter, list_trusted_node, open_channel, remove_trusted_node, send_payment, update_channel, + list_payments_by_filter, list_trusted_nodes, open_channel, remove_trusted_node, send_payment, update_channel, LightningCoin}; use coins::{SolanaCoin, SplToken}; use coins::z_coin::ZCoin; @@ -193,7 +193,7 @@ async fn dispatcher_v2(request: MmRpcRequest, ctx: MmArc) -> DispatcherResult handle_mmrpc(ctx, request, list_closed_channels_by_filter).await, "list_open_channels_by_filter" => handle_mmrpc(ctx, request, list_open_channels_by_filter).await, "list_payments_by_filter" => handle_mmrpc(ctx, request, list_payments_by_filter).await, - "list_trusted_node" => handle_mmrpc(ctx, request, list_trusted_node).await, + "list_trusted_nodes" => handle_mmrpc(ctx, request, list_trusted_nodes).await, "open_channel" => handle_mmrpc(ctx, request, open_channel).await, "remove_trusted_node" => handle_mmrpc(ctx, request, remove_trusted_node).await, "send_payment" => handle_mmrpc(ctx, request, send_payment).await, From 36fe8e5603778b82e39adc0a230855b366fd34cd Mon Sep 17 00:00:00 2001 From: shamardy Date: Tue, 30 Aug 2022 23:17:25 +0200 Subject: [PATCH 27/33] First review fixes --- mm2src/coins/lightning.rs | 67 +++++++++++++------ mm2src/coins/lightning/ln_conf.rs | 2 +- mm2src/coins/utxo/rpc_clients.rs | 8 +-- .../coins/utxo/utxo_block_header_storage.rs | 9 +-- .../utxo/utxo_sql_block_header_storage.rs | 5 +- mm2src/coins/utxo/utxo_standard.rs | 4 +- mm2src/mm2_bitcoin/serialization/src/lib.rs | 3 +- .../mm2_bitcoin/serialization/src/reader.rs | 30 +++++---- 8 files changed, 76 insertions(+), 52 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index d73d478c02..8c461ace0c 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -962,11 +962,16 @@ pub struct UpdateChannelReq { pub coin: String, pub channel_id: H256Json, pub counterparty_node_id: PublicKeyForRPC, - pub options: ChannelOptions, + pub channel_options: ChannelOptions, +} + +#[derive(Serialize)] +pub struct UpdateChannelResponse { + channel_options: ChannelOptions, } /// Updates configuration for an open channel. -pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelResult { +pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelResult { let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; let ln_coin = match coin { MmCoinEnum::LightningCoin(c) => c, @@ -974,20 +979,28 @@ pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelR }; async_blocking(move || { - let mut channel_options = ln_coin.conf.channel_options.unwrap_or_else(|| req.options.clone()); - if channel_options != req.options { - channel_options.update(req.options.clone()); + let mut channel_options = ln_coin + .conf + .channel_options + .unwrap_or_else(|| req.channel_options.clone()); + if channel_options != req.channel_options { + channel_options.update(req.channel_options.clone()); } let channel_ids = vec![req.channel_id.0]; let counterparty_node_id = req.counterparty_node_id.clone(); ln_coin .channel_manager - .update_channel_config(&counterparty_node_id.into(), &channel_ids, &channel_options.into()) - .map_to_mm(|e| UpdateChannelError::FailureToUpdateChannel(req.channel_id.to_string(), format!("{:?}", e))) + .update_channel_config( + &counterparty_node_id.into(), + &channel_ids, + &channel_options.clone().into(), + ) + .map_to_mm(|e| { + UpdateChannelError::FailureToUpdateChannel(req.channel_id.to_string(), format!("{:?}", e)) + })?; + Ok(UpdateChannelResponse { channel_options }) }) - .await?; - - Ok("success".into()) + .await } #[derive(Deserialize)] @@ -1707,18 +1720,25 @@ pub struct AddTrustedNodeReq { pub node_id: PublicKeyForRPC, } -pub async fn add_trusted_node(ctx: MmArc, req: AddTrustedNodeReq) -> TrustedNodeResult { +#[derive(Serialize)] +pub struct AddTrustedNodeResponse { + pub added_node: PublicKeyForRPC, +} + +pub async fn add_trusted_node(ctx: MmArc, req: AddTrustedNodeReq) -> TrustedNodeResult { let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; let ln_coin = match coin { MmCoinEnum::LightningCoin(c) => c, _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), }; - if ln_coin.trusted_nodes.lock().insert(req.node_id.into()) { + if ln_coin.trusted_nodes.lock().insert(req.node_id.clone().into()) { ln_coin.persister.save_trusted_nodes(ln_coin.trusted_nodes).await?; } - Ok("success".into()) + Ok(AddTrustedNodeResponse { + added_node: req.node_id, + }) } #[derive(Deserialize)] @@ -1727,18 +1747,28 @@ pub struct RemoveTrustedNodeReq { pub node_id: PublicKeyForRPC, } -pub async fn remove_trusted_node(ctx: MmArc, req: RemoveTrustedNodeReq) -> TrustedNodeResult { +#[derive(Serialize)] +pub struct RemoveTrustedNodeResponse { + pub removed_node: PublicKeyForRPC, +} + +pub async fn remove_trusted_node( + ctx: MmArc, + req: RemoveTrustedNodeReq, +) -> TrustedNodeResult { let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; let ln_coin = match coin { MmCoinEnum::LightningCoin(c) => c, _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), }; - if ln_coin.trusted_nodes.lock().remove(&req.node_id.into()) { + if ln_coin.trusted_nodes.lock().remove(&req.node_id.clone().into()) { ln_coin.persister.save_trusted_nodes(ln_coin.trusted_nodes).await?; } - Ok("success".into()) + Ok(RemoveTrustedNodeResponse { + removed_node: req.node_id, + }) } #[derive(Deserialize)] @@ -1752,10 +1782,9 @@ pub struct ListTrustedNodesResponse { } pub async fn list_trusted_nodes(ctx: MmArc, req: ListTrustedNodesReq) -> TrustedNodeResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(TrustedNodeError::UnsupportedCoin(e.ticker().to_string())), }; let trusted_nodes = ln_coin.trusted_nodes.lock().clone(); diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index 8a5ac5c396..c782eeeb09 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -15,7 +15,7 @@ pub struct LightningProtocolConf { pub confirmation_targets: PlatformCoinConfirmationTargets, } -#[derive(Clone, Debug, Deserialize, PartialEq)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] pub struct ChannelOptions { /// Amount (in millionths of a satoshi) charged per satoshi for payments forwarded outbound /// over the channel. diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index 14410ebf0e..29e3d6d765 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -33,8 +33,8 @@ use mm2_number::{BigDecimal, BigInt, MmNumber}; #[cfg(test)] use mocktopus::macros::*; use rpc::v1::types::{Bytes as BytesJson, Transaction as RpcTransaction, H256 as H256Json}; use serde_json::{self as json, Value as Json}; -use serialization::{coin_variant_by_ticker, deserialize, serialize, serialize_with_flags, CoinVariant, CompactInteger, - Reader, SERIALIZE_TRANSACTION_WITNESS}; +use serialization::{deserialize, serialize, serialize_with_flags, CoinVariant, CompactInteger, Reader, + SERIALIZE_TRANSACTION_WITNESS}; use sha2::{Digest, Sha256}; use spv_validation::helpers_validation::SPVError; use spv_validation::storage::BlockHeaderStorageOps; @@ -1904,8 +1904,8 @@ impl ElectrumClient { let len = CompactInteger::from(headers.count); let mut serialized = serialize(&len).take(); serialized.extend(headers.hex.0.into_iter()); - let coin_variant = coin_variant_by_ticker(&coin_name); - let mut reader = Reader::new_with_coin_variant(serialized.as_slice(), coin_variant); + let mut reader = + Reader::new_with_coin_variant(serialized.as_slice(), coin_name.as_str().into()); let maybe_block_headers = reader.read_list::(); let block_headers = match maybe_block_headers { Ok(headers) => headers, diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index 56065b460a..5a2d91b405 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -44,14 +44,11 @@ impl BlockHeaderStorage { use db_common::sqlite::rusqlite::Connection; use std::sync::{Arc, Mutex}; - let sqlite_connection = Arc::new(Mutex::new(Connection::open_in_memory().unwrap())); - let sqlite_connection = ctx.sqlite_connection.clone_or(sqlite_connection); + let conn = Arc::new(Mutex::new(Connection::open_in_memory().unwrap())); + let conn = ctx.sqlite_connection.clone_or(sqlite_connection); Ok(BlockHeaderStorage { - inner: Box::new(SqliteBlockHeadersStorage { - ticker, - conn: sqlite_connection, - }), + inner: Box::new(SqliteBlockHeadersStorage { ticker, conn }), }) } } diff --git a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs index ca88cc135e..0a54eef435 100644 --- a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs @@ -7,7 +7,7 @@ use db_common::{sqlite::rusqlite::Error as SqlError, sqlite::validate_table_name, sqlite::CHECK_TABLE_EXISTS_SQL}; use primitives::hash::H256; -use serialization::{coin_variant_by_ticker, Reader}; +use serialization::Reader; use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use spv_validation::work::MAX_BITS_BTC; use std::collections::HashMap; @@ -195,8 +195,7 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { coin: coin.clone(), reason: e.to_string(), })?; - let coin_variant = coin_variant_by_ticker(&coin); - let mut reader = Reader::new_with_coin_variant(serialized, coin_variant); + let mut reader = Reader::new_with_coin_variant(serialized, coin.as_str().into()); let header: BlockHeader = reader .read() diff --git a/mm2src/coins/utxo/utxo_standard.rs b/mm2src/coins/utxo/utxo_standard.rs index 577e536c2d..c67a7ea844 100644 --- a/mm2src/coins/utxo/utxo_standard.rs +++ b/mm2src/coins/utxo/utxo_standard.rs @@ -22,7 +22,6 @@ use crypto::Bip44Chain; use futures::{FutureExt, TryFutureExt}; use mm2_metrics::MetricsArc; use mm2_number::MmNumber; -use serialization::coin_variant_by_ticker; use utxo_signer::UtxoSignerOps; #[derive(Clone)] @@ -164,8 +163,7 @@ impl UtxoCommonOps for UtxoStandardCoin { } async fn get_current_mtp(&self) -> UtxoRpcResult { - let coin_variant = coin_variant_by_ticker(self.ticker()); - utxo_common::get_current_mtp(&self.utxo_arc, coin_variant).await + utxo_common::get_current_mtp(&self.utxo_arc, self.ticker().into()).await } fn is_unspent_mature(&self, output: &RpcTransaction) -> bool { diff --git a/mm2src/mm2_bitcoin/serialization/src/lib.rs b/mm2src/mm2_bitcoin/serialization/src/lib.rs index 22e806516d..389c28f072 100644 --- a/mm2src/mm2_bitcoin/serialization/src/lib.rs +++ b/mm2src/mm2_bitcoin/serialization/src/lib.rs @@ -13,7 +13,6 @@ pub use primitives::{bytes, compact, hash}; pub use compact_integer::{parse_compact_int, CompactInteger}; pub use list::List; -pub use reader::{coin_variant_by_ticker, deserialize, deserialize_iterator, CoinVariant, Deserializable, Error, - ReadIterator, Reader}; +pub use reader::{deserialize, deserialize_iterator, CoinVariant, Deserializable, Error, ReadIterator, Reader}; pub use stream::{serialize, serialize_list, serialize_with_flags, serialized_list_size, serialized_list_size_with_flags, Serializable, Stream, SERIALIZE_TRANSACTION_WITNESS}; diff --git a/mm2src/mm2_bitcoin/serialization/src/reader.rs b/mm2src/mm2_bitcoin/serialization/src/reader.rs index f437747c3c..d4002782c0 100644 --- a/mm2src/mm2_bitcoin/serialization/src/reader.rs +++ b/mm2src/mm2_bitcoin/serialization/src/reader.rs @@ -51,30 +51,32 @@ pub trait Deserializable { #[derive(Debug)] pub enum CoinVariant { - LBC, - Standard, - Qtum, // Todo: https://github.com/KomodoPlatform/atomicDEX-API/issues/1345 BTC, + Qtum, + LBC, + Standard, } impl CoinVariant { + pub fn is_btc(&self) -> bool { matches!(self, CoinVariant::BTC) } + pub fn is_qtum(&self) -> bool { matches!(self, CoinVariant::Qtum) } pub fn is_lbc(&self) -> bool { matches!(self, CoinVariant::LBC) } - - pub fn is_btc(&self) -> bool { matches!(self, CoinVariant::BTC) } } -pub fn coin_variant_by_ticker(ticker: &str) -> CoinVariant { - if ticker == "BTC" || ticker.contains("BTC-") || ticker.contains("BTC_") { - // "BTC", "BTC-segwit", "tBTC", "tBTC-segwit", etc.. - CoinVariant::BTC - } else if ticker == "LBC" || ticker.contains("LBC-") || ticker.contains("LBC_") { - // "LBC", "LBC-segwit", etc.. - CoinVariant::LBC - } else { - CoinVariant::Standard +impl From<&str> for CoinVariant { + fn from(ticker: &str) -> Self { + match ticker { + // "BTC", "BTC-segwit", "tBTC", "tBTC-segwit", etc.. + t if t == "BTC" || t.contains("BTC-") || t.contains("BTC_") => CoinVariant::BTC, + // "QTUM", "QTUM-segwit", "tQTUM", "tQTUM-segwit", etc.. + t if t == "QTUM" || t.contains("QTUM-") || t.contains("QTUM_") => CoinVariant::Qtum, + // "LBC", "LBC-segwit", etc.. + t if t == "LBC" || t.contains("LBC-") || t.contains("LBC_") => CoinVariant::LBC, + _ => CoinVariant::Standard, + } } } From a7e95e97891531b2c4c9331f837232d83865fd6a Mon Sep 17 00:00:00 2001 From: shamardy Date: Tue, 30 Aug 2022 23:24:00 +0200 Subject: [PATCH 28/33] fix check tests --- mm2src/coins/utxo/utxo_block_header_storage.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index 5a2d91b405..b553c049f7 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -45,7 +45,7 @@ impl BlockHeaderStorage { use std::sync::{Arc, Mutex}; let conn = Arc::new(Mutex::new(Connection::open_in_memory().unwrap())); - let conn = ctx.sqlite_connection.clone_or(sqlite_connection); + let conn = ctx.sqlite_connection.clone_or(conn); Ok(BlockHeaderStorage { inner: Box::new(SqliteBlockHeadersStorage { ticker, conn }), From ded9078e0e68e93205301a2caf0cf9f0e154f705 Mon Sep 17 00:00:00 2001 From: shamardy Date: Thu, 1 Sep 2022 19:28:46 +0200 Subject: [PATCH 29/33] second review fixes wip --- Cargo.lock | 22 +- mm2src/coins/Cargo.toml | 1 - mm2src/coins/lightning.rs | 62 +++--- mm2src/coins/lightning/ln_conf.rs | 4 +- mm2src/coins/lightning/ln_errors.rs | 8 +- mm2src/coins/lightning/ln_events.rs | 37 ++-- .../lightning/ln_filesystem_persister.rs | 188 +++++++++--------- 7 files changed, 160 insertions(+), 162 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b43bb6c0ae..17ccdf892f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1074,7 +1074,6 @@ dependencies = [ "lightning-invoice", "lightning-net-tokio", "lightning-rapid-gossip-sync", - "metrics 0.12.1", "mm2_core", "mm2_db", "mm2_err_handle", @@ -4114,15 +4113,6 @@ dependencies = [ "parity-util-mem", ] -[[package]] -name = "metrics" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b70227ece8711a1aa2f99655efd795d0cff297a5b9fe39645a93aacf6ad39d" -dependencies = [ - "metrics-core", -] - [[package]] name = "metrics" version = "0.19.0" @@ -4133,12 +4123,6 @@ dependencies = [ "metrics-macros", ] -[[package]] -name = "metrics-core" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c064b3a1ff41f4bf6c91185c8a0caeccf8a8a27e9d0f92cc54cf3dbec812f48" - [[package]] name = "metrics-exporter-prometheus" version = "0.10.0" @@ -4148,7 +4132,7 @@ dependencies = [ "hyper", "indexmap", "ipnet", - "metrics 0.19.0", + "metrics", "metrics-util", "parking_lot 0.11.1", "quanta", @@ -4180,7 +4164,7 @@ dependencies = [ "crossbeam-utils 0.8.8", "hashbrown 0.11.2", "indexmap", - "metrics 0.19.0", + "metrics", "num_cpus", "ordered-float", "parking_lot 0.11.1", @@ -4462,7 +4446,7 @@ dependencies = [ "hyper", "hyper-rustls 0.23.0", "itertools", - "metrics 0.19.0", + "metrics", "metrics-exporter-prometheus", "metrics-util", "mm2_err_handle", diff --git a/mm2src/coins/Cargo.toml b/mm2src/coins/Cargo.toml index 7cb3aeb5e0..6c67fcac28 100644 --- a/mm2src/coins/Cargo.toml +++ b/mm2src/coins/Cargo.toml @@ -48,7 +48,6 @@ jsonrpc-core = "8.0.1" keys = { path = "../mm2_bitcoin/keys" } lazy_static = "1.4" libc = "0.2" -metrics = "0.12" mm2_core = { path = "../mm2_core" } mm2_err_handle = { path = "../mm2_err_handle" } mm2_io = { path = "../mm2_io" } diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 8c461ace0c..02008bac45 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -145,6 +145,13 @@ impl LightningCoin { }) } + async fn get_channel_by_rpc_id(&self, rpc_id: u64) -> Option { + self.list_channels() + .await + .into_iter() + .find(|chan| chan.user_channel_id == rpc_id) + } + async fn pay_invoice(&self, invoice: Invoice) -> SendPaymentResult { let payment_hash = PaymentHash((invoice.payment_hash()).into_inner()); let payment_type = PaymentType::OutboundPayment { @@ -906,13 +913,13 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes let mut conf = ln_coin.conf.clone(); if let Some(options) = req.channel_options { match conf.channel_options.as_mut() { - Some(o) => o.update(options), + Some(o) => o.update_according_to(options), None => conf.channel_options = Some(options), } } if let Some(configs) = req.channel_configs { match conf.our_channels_configs.as_mut() { - Some(o) => o.update(configs), + Some(o) => o.update_according_to(configs), None => conf.our_channels_configs = Some(configs), } } @@ -960,8 +967,7 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes #[derive(Deserialize)] pub struct UpdateChannelReq { pub coin: String, - pub channel_id: H256Json, - pub counterparty_node_id: PublicKeyForRPC, + pub rpc_channel_id: u64, pub channel_options: ChannelOptions, } @@ -978,26 +984,25 @@ pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelR _ => return MmError::err(UpdateChannelError::UnsupportedCoin(coin.ticker().to_string())), }; + let channel_details = ln_coin + .get_channel_by_rpc_id(req.rpc_channel_id) + .await + .ok_or(UpdateChannelError::NoSuchChannel(req.rpc_channel_id))?; + async_blocking(move || { let mut channel_options = ln_coin .conf .channel_options .unwrap_or_else(|| req.channel_options.clone()); if channel_options != req.channel_options { - channel_options.update(req.channel_options.clone()); + channel_options.update_according_to(req.channel_options.clone()); } - let channel_ids = vec![req.channel_id.0]; - let counterparty_node_id = req.counterparty_node_id.clone(); + let channel_ids = vec![channel_details.channel_id]; + let counterparty_node_id = channel_details.counterparty.node_id; ln_coin .channel_manager - .update_channel_config( - &counterparty_node_id.into(), - &channel_ids, - &channel_options.clone().into(), - ) - .map_to_mm(|e| { - UpdateChannelError::FailureToUpdateChannel(req.channel_id.to_string(), format!("{:?}", e)) - })?; + .update_channel_config(&counterparty_node_id, &channel_ids, &channel_options.clone().into()) + .map_to_mm(|e| UpdateChannelError::FailureToUpdateChannel(req.rpc_channel_id, format!("{:?}", e)))?; Ok(UpdateChannelResponse { channel_options }) }) .await @@ -1237,12 +1242,8 @@ pub async fn get_channel_details( MmCoinEnum::LightningCoin(c) => c, _ => return MmError::err(GetChannelDetailsError::UnsupportedCoin(coin.ticker().to_string())), }; - let channel_details = match ln_coin - .list_channels() - .await - .into_iter() - .find(|chan| chan.user_channel_id == req.rpc_channel_id) - { + + let channel_details = match ln_coin.get_channel_by_rpc_id(req.rpc_channel_id).await { Some(details) => GetChannelDetailsResponse::Open(details.into()), None => GetChannelDetailsResponse::Closed( ln_coin @@ -1562,21 +1563,25 @@ pub async fn get_payment_details( #[derive(Deserialize)] pub struct CloseChannelReq { pub coin: String, - pub channel_id: H256Json, - pub counterparty_node_id: PublicKeyForRPC, + pub rpc_channel_id: u64, #[serde(default)] pub force_close: bool, } -// Todo: use either counterparty_node_id or channel_id to close channel/s pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResult { let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; let ln_coin = match coin { MmCoinEnum::LightningCoin(c) => c, _ => return MmError::err(CloseChannelError::UnsupportedCoin(coin.ticker().to_string())), }; - let channel_id = req.channel_id.0; - let counterparty_node_id: PublicKey = req.counterparty_node_id.into(); + + let channel_details = ln_coin + .get_channel_by_rpc_id(req.rpc_channel_id) + .await + .ok_or(CloseChannelError::NoSuchChannel(req.rpc_channel_id))?; + let channel_id = channel_details.channel_id; + let counterparty_node_id = channel_details.counterparty.node_id; + if req.force_close { async_blocking(move || { ln_coin @@ -1595,7 +1600,10 @@ pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResu .await?; } - Ok(format!("Initiated closing of channel: {:?}", req.channel_id)) + Ok(format!( + "Initiated closing of channel with rpc_channel_id: {}", + req.rpc_channel_id + )) } /// Details about the balance(s) available for spending once the channel appears on chain. diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index c782eeeb09..4f0a6f00cd 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -33,7 +33,7 @@ pub struct ChannelOptions { } impl ChannelOptions { - pub fn update(&mut self, options: ChannelOptions) { + pub fn update_according_to(&mut self, options: ChannelOptions) { if let Some(fee) = options.proportional_fee_in_millionths_sats { self.proportional_fee_in_millionths_sats = Some(fee); } @@ -113,7 +113,7 @@ pub struct OurChannelsConfigs { } impl OurChannelsConfigs { - pub fn update(&mut self, config: OurChannelsConfigs) { + pub fn update_according_to(&mut self, config: OurChannelsConfigs) { if let Some(confs) = config.inbound_channels_confirmations { self.inbound_channels_confirmations = Some(confs); } diff --git a/mm2src/coins/lightning/ln_errors.rs b/mm2src/coins/lightning/ln_errors.rs index f5268c08d0..d7ac252d0c 100644 --- a/mm2src/coins/lightning/ln_errors.rs +++ b/mm2src/coins/lightning/ln_errors.rs @@ -226,14 +226,17 @@ pub enum UpdateChannelError { UnsupportedCoin(String), #[display(fmt = "No such coin {}", _0)] NoSuchCoin(String), + #[display(fmt = "No such channel with rpc_channel_id {}", _0)] + NoSuchChannel(u64), #[display(fmt = "Failure to channel {}: {}", _0, _1)] - FailureToUpdateChannel(String, String), + FailureToUpdateChannel(u64, String), } impl HttpStatusCode for UpdateChannelError { fn status_code(&self) -> StatusCode { match self { UpdateChannelError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, + UpdateChannelError::NoSuchChannel(_) => StatusCode::NOT_FOUND, UpdateChannelError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, UpdateChannelError::FailureToUpdateChannel(_, _) => StatusCode::INTERNAL_SERVER_ERROR, } @@ -476,6 +479,8 @@ pub enum CloseChannelError { UnsupportedCoin(String), #[display(fmt = "No such coin {}", _0)] NoSuchCoin(String), + #[display(fmt = "No such channel with rpc_channel_id {}", _0)] + NoSuchChannel(u64), #[display(fmt = "Closing channel error: {}", _0)] CloseChannelError(String), } @@ -484,6 +489,7 @@ impl HttpStatusCode for CloseChannelError { fn status_code(&self) -> StatusCode { match self { CloseChannelError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, + CloseChannelError::NoSuchChannel(_) => StatusCode::NOT_FOUND, CloseChannelError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, CloseChannelError::CloseChannelError(_) => StatusCode::INTERNAL_SERVER_ERROR, } diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index 842f22aa22..114f8a12cb 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -352,9 +352,9 @@ impl LightningEventHandler { payment_info.status = HTLCStatus::Succeeded; payment_info.amt_msat = Some(amount_msat as i64); payment_info.last_updated = (now_ms() / 1000) as i64; - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); } }), PaymentPurpose::SpontaneousPayment(payment_preimage) => { @@ -371,9 +371,9 @@ impl LightningEventHandler { last_updated: (now_ms() / 1000) as i64, }; spawn(async move { - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); }); }, } @@ -397,9 +397,9 @@ impl LightningEventHandler { payment_info.fee_paid_msat = fee_paid_msat.map(|f| f as i64); payment_info.last_updated = (now_ms() / 1000) as i64; let amt_msat = payment_info.amt_msat; - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); info!( "Successfully sent payment of {} millisatoshis with payment hash {}", amt_msat.unwrap_or_default(), @@ -441,9 +441,9 @@ impl LightningEventHandler { if let Ok(Some(mut payment_info)) = db.get_payment_from_db(payment_hash).await.error_log_passthrough() { payment_info.status = HTLCStatus::Failed; payment_info.last_updated = (now_ms() / 1000) as i64; - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); } }); } @@ -461,6 +461,11 @@ impl LightningEventHandler { fn handle_spendable_outputs(&self, outputs: Vec) { info!("Handling SpendableOutputs event!"); + if outputs.is_empty() { + error!("Received SpendableOutputs event with no outputs!"); + return; + } + // Todo: add support for Hardware wallets for funding transactions and spending spendable outputs (channel closing transactions) let my_address = match self.platform.coin.as_ref().derivation_method.iguana_or_err() { Ok(addr) => addr.clone(), @@ -566,14 +571,18 @@ impl LightningEventHandler { spawn(async move { if let Ok(last_channel_rpc_id) = db.get_last_channel_rpc_id().await.error_log_passthrough() { let user_channel_id = last_channel_rpc_id as u64 + 1; - if (trusted_nodes.lock().contains(&counterparty_node_id) + + let trusted_nodes = trusted_nodes.lock().clone(); + let accepted_inbound_channel_with_0conf = trusted_nodes.contains(&counterparty_node_id) && channel_manager .accept_inbound_channel_from_trusted_peer_0conf( &temporary_channel_id, &counterparty_node_id, user_channel_id, ) - .is_ok()) + .is_ok(); + + if accepted_inbound_channel_with_0conf || channel_manager .accept_inbound_channel(&temporary_channel_id, &counterparty_node_id, user_channel_id) .is_ok() diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index c01c0cf8fa..c9e38e7659 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -11,7 +11,7 @@ use lightning::chain::keysinterface::{KeysInterface, Sign}; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringParameters}; use lightning::util::persist::KVStorePersister; use lightning::util::ser::{ReadableArgs, Writeable}; -use mm2_io::fs::check_dir_operations; +use mm2_io::fs::{check_dir_operations, read_json, write_json}; use secp256k1v22::PublicKey; use std::collections::{HashMap, HashSet}; use std::fs; @@ -27,6 +27,8 @@ use std::sync::{Arc, Mutex}; #[cfg(target_family = "windows")] use {std::ffi::OsStr, std::os::windows::ffi::OsStrExt}; +const USE_TMP_FILE: bool = true; + pub struct LightningFilesystemPersister { main_path: PathBuf, backup_path: Option, @@ -53,11 +55,10 @@ impl LightningFilesystemPersister { } pub fn nodes_addresses_backup_path(&self) -> Option { - if let Some(mut backup_path) = self.backup_path() { + self.backup_path().map(|mut backup_path| { backup_path.push("channel_nodes_data"); - return Some(backup_path); - } - None + backup_path + }) } pub fn network_graph_path(&self) -> PathBuf { @@ -91,11 +92,10 @@ impl LightningFilesystemPersister { } pub fn monitors_backup_path(&self) -> Option { - if let Some(mut backup_path) = self.backup_path() { + self.backup_path().map(|mut backup_path| { backup_path.push("monitors"); - return Some(backup_path); - } - None + backup_path + }) } /// Read `ChannelMonitor`s from disk. @@ -114,45 +114,57 @@ impl LightningFilesystemPersister { for file_option in fs::read_dir(path).unwrap() { let file = file_option.unwrap(); let owned_file_name = file.file_name(); - let filename = owned_file_name.to_str(); - if filename.is_some() && filename.unwrap() == "checkval" { + let filename = match owned_file_name.to_str() { + Some(name) => name, + None => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Invalid ChannelMonitor file name: {:?}", owned_file_name), + )) + }, + }; + if filename == "checkval" { continue; } - if filename.is_none() || !filename.unwrap().is_ascii() || filename.unwrap().len() < 65 { + if !filename.is_ascii() || filename.len() < 65 { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, - "Invalid ChannelMonitor file name", + format!("Invalid ChannelMonitor file name: {}", filename), )); } - if filename.unwrap().ends_with(".tmp") { + if filename.ends_with(".tmp") { // If we were in the middle of committing an new update and crashed, it should be // safe to ignore the update - we should never have returned to the caller and // irrevocably committed to the new state in any way. continue; } - let txid = Txid::from_hex(filename.unwrap().split_at(64).0); - if txid.is_err() { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Invalid tx ID in filename", - )); - } + let txid = match Txid::from_hex(filename.split_at(64).0) { + Ok(tx_id) => tx_id, + Err(e) => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Invalid tx ID in filename error: {}", e), + )) + }, + }; - let index = filename.unwrap().split_at(65).1.parse::(); - if index.is_err() { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Invalid tx index in filename", - )); - } + let index = match filename.split_at(65).1.parse::() { + Ok(i) => i, + Err(e) => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Invalid tx index in filename error: {}", e), + )) + }, + }; let contents = fs::read(&file.path())?; let mut buffer = Cursor::new(&contents); match <(BlockHash, ChannelMonitor)>::read(&mut buffer, &*keys_manager) { Ok((blockhash, channel_monitor)) => { - if channel_monitor.get_funding_txo().0.txid != txid.unwrap() - || channel_monitor.get_funding_txo().0.index != index.unwrap() + if channel_monitor.get_funding_txo().0.txid != txid + || channel_monitor.get_funding_txo().0.index != index { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, @@ -311,54 +323,43 @@ impl LightningStorage for LightningFilesystemPersister { if !path.exists() { return Ok(HashMap::new()); } - async_blocking(move || { - let file = fs::File::open(path)?; - let reader = BufReader::new(file); - let nodes_addresses: HashMap = - serde_json::from_reader(reader).map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - nodes_addresses - .iter() - .map(|(pubkey_str, addr)| { - let pubkey = PublicKey::from_str(pubkey_str) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - Ok((pubkey, *addr)) - }) - .collect() - }) - .await + + let nodes_addresses: HashMap = read_json(&path) + .await + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))? + .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))?; + + nodes_addresses + .iter() + .map(|(pubkey_str, addr)| { + let pubkey = PublicKey::from_str(pubkey_str) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; + Ok((pubkey, *addr)) + }) + .collect() } async fn save_nodes_addresses(&self, nodes_addresses: NodesAddressesMapShared) -> Result<(), Self::Error> { let path = self.nodes_addresses_path(); let backup_path = self.nodes_addresses_backup_path(); - async_blocking(move || { - let nodes_addresses: HashMap = nodes_addresses - .lock() - .iter() - .map(|(pubkey, addr)| (pubkey.to_string(), *addr)) - .collect(); - - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(path)?; - serde_json::to_writer(file, &nodes_addresses) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - if let Some(path) = backup_path { - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(path)?; - serde_json::to_writer(file, &nodes_addresses) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - } + let nodes_addresses: HashMap = nodes_addresses + .lock() + .iter() + .map(|(pubkey, addr)| (pubkey.to_string(), *addr)) + .collect(); - Ok(()) - }) - .await + write_json(&nodes_addresses, &path, USE_TMP_FILE) + .await + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; + + if let Some(path) = backup_path { + write_json(&nodes_addresses, &path, USE_TMP_FILE) + .await + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; + } + + Ok(()) } async fn get_network_graph(&self, network: Network, logger: Arc) -> Result { @@ -401,36 +402,27 @@ impl LightningStorage for LightningFilesystemPersister { if !path.exists() { return Ok(HashSet::new()); } - async_blocking(move || { - let file = fs::File::open(path)?; - let reader = BufReader::new(file); - let trusted_nodes: HashSet = - serde_json::from_reader(reader).map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - trusted_nodes - .iter() - .map(|pubkey_str| { - let pubkey = PublicKey::from_str(pubkey_str) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - Ok(pubkey) - }) - .collect() - }) - .await + + let trusted_nodes: HashSet = read_json(&path) + .await + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))? + .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))?; + + trusted_nodes + .iter() + .map(|pubkey_str| { + let pubkey = PublicKey::from_str(pubkey_str) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; + Ok(pubkey) + }) + .collect() } async fn save_trusted_nodes(&self, trusted_nodes: TrustedNodesShared) -> Result<(), Self::Error> { let path = self.trusted_nodes_path(); - async_blocking(move || { - let trusted_nodes: HashSet = trusted_nodes.lock().iter().map(|pubkey| pubkey.to_string()).collect(); - - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(path)?; - serde_json::to_writer(file, &trusted_nodes) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) - }) - .await + let trusted_nodes: HashSet = trusted_nodes.lock().iter().map(|pubkey| pubkey.to_string()).collect(); + write_json(&trusted_nodes, &path, USE_TMP_FILE) + .await + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) } } From 64efb724c617ca960f00646132867cad84a2dea0 Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 2 Sep 2022 20:41:05 +0200 Subject: [PATCH 30/33] second review fixes wip: fix default fees --- mm2src/coins/lightning.rs | 2 +- mm2src/coins/lightning/ln_platform.rs | 66 +++++++++++++++++---------- 2 files changed, 42 insertions(+), 26 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 02008bac45..1441589dac 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -662,7 +662,7 @@ pub async fn start_lightning( protocol_conf.network.clone(), protocol_conf.confirmation_targets, )); - platform.set_default_fees().await?; + platform.set_latest_fees().await?; // Initialize the Logger let logger = ctx.log.0.clone(); diff --git a/mm2src/coins/lightning/ln_platform.rs b/mm2src/coins/lightning/ln_platform.rs index e99226b374..f9dff6db6e 100644 --- a/mm2src/coins/lightning/ln_platform.rs +++ b/mm2src/coins/lightning/ln_platform.rs @@ -28,14 +28,6 @@ use std::sync::atomic::{AtomicU64, Ordering as AtomicOrdering, Ordering}; const CHECK_FOR_NEW_BEST_BLOCK_INTERVAL: f64 = 60.; const TRY_LOOP_INTERVAL: f64 = 60.; -static DEFAULT_BACKGROUND_FEES_PER_VB: AtomicU64 = AtomicU64::new(1012); -static DEFAULT_NORMAL_FEES_PER_VB: AtomicU64 = AtomicU64::new(8000); -static DEFAULT_HIGH_PRIORITY_FEES_PER_VB: AtomicU64 = AtomicU64::new(20000); - -fn set_default_background_fees(fee: u64) { DEFAULT_BACKGROUND_FEES_PER_VB.store(fee, Ordering::Relaxed); } -fn set_default_normal_fees(fee: u64) { DEFAULT_BACKGROUND_FEES_PER_VB.store(fee, Ordering::Relaxed); } -fn set_default_high_priority_fees(fee: u64) { DEFAULT_BACKGROUND_FEES_PER_VB.store(fee, Ordering::Relaxed); } - #[inline] pub fn h256_json_from_txid(txid: Txid) -> H256Json { H256Json::from(txid.as_hash().into_inner()).reversed() } @@ -141,6 +133,23 @@ async fn get_funding_tx_bytes_loop(rpc_client: &UtxoRpcClientEnum, tx_hash: H256 } } +pub struct LatestFees { + background: AtomicU64, + normal: AtomicU64, + high_priority: AtomicU64, +} + +impl LatestFees { + #[inline] + fn set_background_fees(&self, fee: u64) { self.background.store(fee, Ordering::Relaxed); } + + #[inline] + fn set_normal_fees(&self, fee: u64) { self.normal.store(fee, Ordering::Relaxed); } + + #[inline] + fn set_high_priority_fees(&self, fee: u64) { self.high_priority.store(fee, Ordering::Relaxed); } +} + pub struct Platform { pub coin: UtxoStandardCoin, /// Main/testnet/signet/regtest Needed for lightning node to know which network to connect to @@ -149,6 +158,8 @@ pub struct Platform { pub best_block_height: AtomicU64, /// Number of blocks for every Confirmation target. This is used in the FeeEstimator. pub confirmations_targets: PlatformCoinConfirmationTargets, + /// Latest fees are used when the call for estimate_fee_sat fails. + pub latest_fees: LatestFees, /// This cache stores the transactions that the LN node has interest in. pub registered_txs: PaMutex>, /// This cache stores the outputs that the LN node has interest in. @@ -162,13 +173,18 @@ impl Platform { pub fn new( coin: UtxoStandardCoin, network: BlockchainNetwork, - default_fees_and_confirmations: PlatformCoinConfirmationTargets, + confirmations_targets: PlatformCoinConfirmationTargets, ) -> Self { Platform { coin, network, best_block_height: AtomicU64::new(0), - confirmations_targets: default_fees_and_confirmations, + confirmations_targets, + latest_fees: LatestFees { + background: AtomicU64::new(0), + normal: AtomicU64::new(0), + high_priority: AtomicU64::new(0), + }, registered_txs: PaMutex::new(HashSet::new()), registered_outputs: PaMutex::new(Vec::new()), unsigned_funding_txs: PaMutex::new(HashMap::new()), @@ -178,11 +194,11 @@ impl Platform { #[inline] fn rpc_client(&self) -> &UtxoRpcClientEnum { &self.coin.as_ref().rpc_client } - pub async fn set_default_fees(&self) -> UtxoRpcResult<()> { + pub async fn set_latest_fees(&self) -> UtxoRpcResult<()> { let platform_coin = &self.coin; let conf = &platform_coin.as_ref().conf; - let default_background_fees = self + let latest_background_fees = self .rpc_client() .estimate_fee_sat( platform_coin.decimals(), @@ -193,9 +209,9 @@ impl Platform { ) .compat() .await?; - set_default_background_fees(default_background_fees); + self.latest_fees.set_background_fees(latest_background_fees); - let default_normal_fees = self + let latest_normal_fees = self .rpc_client() .estimate_fee_sat( platform_coin.decimals(), @@ -206,9 +222,9 @@ impl Platform { ) .compat() .await?; - set_default_normal_fees(default_normal_fees); + self.latest_fees.set_normal_fees(latest_normal_fees); - let default_high_priority_fees = self + let latest_high_priority_fees = self .rpc_client() .estimate_fee_sat( platform_coin.decimals(), @@ -219,7 +235,7 @@ impl Platform { ) .compat() .await?; - set_default_high_priority_fees(default_high_priority_fees); + self.latest_fees.set_high_priority_fees(latest_high_priority_fees); Ok(()) } @@ -508,10 +524,10 @@ impl FeeEstimator for Platform { fn get_est_sat_per_1000_weight(&self, confirmation_target: ConfirmationTarget) -> u32 { let platform_coin = &self.coin; - let default_fee = match confirmation_target { - ConfirmationTarget::Background => DEFAULT_BACKGROUND_FEES_PER_VB.load(Ordering::Relaxed), - ConfirmationTarget::Normal => DEFAULT_NORMAL_FEES_PER_VB.load(Ordering::Relaxed), - ConfirmationTarget::HighPriority => DEFAULT_HIGH_PRIORITY_FEES_PER_VB.load(Ordering::Relaxed), + let latest_fees = match confirmation_target { + ConfirmationTarget::Background => self.latest_fees.background.load(Ordering::Relaxed), + ConfirmationTarget::Normal => self.latest_fees.normal.load(Ordering::Relaxed), + ConfirmationTarget::HighPriority => self.latest_fees.high_priority.load(Ordering::Relaxed), }; let conf = &platform_coin.as_ref().conf; @@ -531,14 +547,14 @@ impl FeeEstimator for Platform { n_blocks, ) .wait() - .unwrap_or(default_fee) + .unwrap_or(latest_fees) }); // Set default fee to last known fee for the corresponding confirmation target match confirmation_target { - ConfirmationTarget::Background => DEFAULT_BACKGROUND_FEES_PER_VB.store(fee_per_kb, Ordering::Relaxed), - ConfirmationTarget::Normal => DEFAULT_NORMAL_FEES_PER_VB.store(fee_per_kb, Ordering::Relaxed), - ConfirmationTarget::HighPriority => DEFAULT_HIGH_PRIORITY_FEES_PER_VB.store(fee_per_kb, Ordering::Relaxed), + ConfirmationTarget::Background => self.latest_fees.set_background_fees(fee_per_kb), + ConfirmationTarget::Normal => self.latest_fees.set_normal_fees(fee_per_kb), + ConfirmationTarget::HighPriority => self.latest_fees.set_high_priority_fees(fee_per_kb), }; // Must be no smaller than 253 (ie 1 satoshi-per-byte rounded up to ensure later round-downs don’t put us below 1 satoshi-per-byte). From f092279141c52f7021bc3e885d15955377ed76bb Mon Sep 17 00:00:00 2001 From: shamardy Date: Fri, 2 Sep 2022 21:48:06 +0200 Subject: [PATCH 31/33] second review fixes complete --- mm2src/coins/lightning.rs | 77 ++++++++----------- mm2src/coins/lightning/ln_errors.rs | 29 ++++--- .../lightning/ln_filesystem_persister.rs | 3 + mm2src/coins/lightning/ln_utils.rs | 5 +- mm2src/coins/my_tx_history_v2.rs | 2 +- mm2src/coins/rpc_command/get_current_mtp.rs | 2 +- mm2src/coins/utxo/rpc_clients.rs | 1 + 7 files changed, 54 insertions(+), 65 deletions(-) diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index 1441589dac..21735e84ae 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -237,6 +237,7 @@ impl LightningCoin { self.list_channels().await.into_iter().map(From::from).collect(); total_open_channels.sort_by(|a, b| a.rpc_channel_id.cmp(&b.rpc_channel_id)); + drop_mutability!(total_open_channels); let open_channels_filtered = if let Some(ref f) = filter { total_open_channels @@ -807,10 +808,9 @@ pub struct ConnectToNodeRequest { /// Connect to a certain node on the lightning network. pub async fn connect_to_lightning_node(ctx: MmArc, req: ConnectToNodeRequest) -> ConnectToNodeResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ConnectToNodeError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ConnectToNodeError::UnsupportedCoin(e.ticker().to_string())), }; let node_pubkey = req.node_address.pubkey; @@ -861,10 +861,9 @@ pub struct OpenChannelResponse { /// Opens a channel on the lightning network. pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(OpenChannelError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(OpenChannelError::UnsupportedCoin(e.ticker().to_string())), }; // Making sure that the node data is correct and that we can connect to it before doing more operations @@ -923,6 +922,7 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes None => conf.our_channels_configs = Some(configs), } } + drop_mutability!(conf); let user_config: UserConfig = conf.into(); let rpc_channel_id = ln_coin.db.get_last_channel_rpc_id().await? as u64 + 1; @@ -978,10 +978,9 @@ pub struct UpdateChannelResponse { /// Updates configuration for an open channel. pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(UpdateChannelError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(UpdateChannelError::UnsupportedCoin(e.ticker().to_string())), }; let channel_details = ln_coin @@ -997,11 +996,12 @@ pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelR if channel_options != req.channel_options { channel_options.update_according_to(req.channel_options.clone()); } - let channel_ids = vec![channel_details.channel_id]; + drop_mutability!(channel_options); + let channel_ids = &[channel_details.channel_id]; let counterparty_node_id = channel_details.counterparty.node_id; ln_coin .channel_manager - .update_channel_config(&counterparty_node_id, &channel_ids, &channel_options.clone().into()) + .update_channel_config(&counterparty_node_id, channel_ids, &channel_options.clone().into()) .map_to_mm(|e| UpdateChannelError::FailureToUpdateChannel(req.rpc_channel_id, format!("{:?}", e)))?; Ok(UpdateChannelResponse { channel_options }) }) @@ -1156,10 +1156,9 @@ pub async fn list_open_channels_by_filter( ctx: MmArc, req: ListOpenChannelsRequest, ) -> ListChannelsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ListChannelsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ListChannelsError::UnsupportedCoin(e.ticker().to_string())), }; let result = ln_coin @@ -1200,10 +1199,9 @@ pub async fn list_closed_channels_by_filter( ctx: MmArc, req: ListClosedChannelsRequest, ) -> ListChannelsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ListChannelsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ListChannelsError::UnsupportedCoin(e.ticker().to_string())), }; let closed_channels_res = ln_coin .db @@ -1237,10 +1235,9 @@ pub async fn get_channel_details( ctx: MmArc, req: GetChannelDetailsRequest, ) -> GetChannelDetailsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(GetChannelDetailsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(GetChannelDetailsError::UnsupportedCoin(e.ticker().to_string())), }; let channel_details = match ln_coin.get_channel_by_rpc_id(req.rpc_channel_id).await { @@ -1276,10 +1273,9 @@ pub async fn generate_invoice( ctx: MmArc, req: GenerateInvoiceRequest, ) -> GenerateInvoiceResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(GenerateInvoiceError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(GenerateInvoiceError::UnsupportedCoin(e.ticker().to_string())), }; let open_channels_nodes = ln_coin.open_channels_nodes.lock().clone(); for (node_pubkey, node_addr) in open_channels_nodes { @@ -1359,10 +1355,9 @@ pub struct SendPaymentResponse { } pub async fn send_payment(ctx: MmArc, req: SendPaymentReq) -> SendPaymentResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(SendPaymentError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(SendPaymentError::UnsupportedCoin(e.ticker().to_string())), }; let open_channels_nodes = ln_coin.open_channels_nodes.lock().clone(); for (node_pubkey, node_addr) in open_channels_nodes { @@ -1506,10 +1501,9 @@ pub struct ListPaymentsResponse { } pub async fn list_payments_by_filter(ctx: MmArc, req: ListPaymentsReq) -> ListPaymentsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ListPaymentsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ListPaymentsError::UnsupportedCoin(e.ticker().to_string())), }; let get_payments_res = ln_coin .db @@ -1545,10 +1539,9 @@ pub async fn get_payment_details( ctx: MmArc, req: GetPaymentDetailsRequest, ) -> GetPaymentDetailsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(GetPaymentDetailsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(GetPaymentDetailsError::UnsupportedCoin(e.ticker().to_string())), }; if let Some(payment_info) = ln_coin.db.get_payment_from_db(PaymentHash(req.payment_hash.0)).await? { @@ -1569,10 +1562,9 @@ pub struct CloseChannelReq { } pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(CloseChannelError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(CloseChannelError::UnsupportedCoin(e.ticker().to_string())), }; let channel_details = ln_coin @@ -1699,10 +1691,9 @@ pub async fn get_claimable_balances( ctx: MmArc, req: ClaimableBalancesReq, ) -> ClaimableBalancesResult> { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ClaimableBalancesError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ClaimableBalancesError::UnsupportedCoin(e.ticker().to_string())), }; let ignored_channels = if req.include_open_channels_balances { Vec::new() @@ -1734,10 +1725,9 @@ pub struct AddTrustedNodeResponse { } pub async fn add_trusted_node(ctx: MmArc, req: AddTrustedNodeReq) -> TrustedNodeResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(TrustedNodeError::UnsupportedCoin(e.ticker().to_string())), }; if ln_coin.trusted_nodes.lock().insert(req.node_id.clone().into()) { @@ -1764,10 +1754,9 @@ pub async fn remove_trusted_node( ctx: MmArc, req: RemoveTrustedNodeReq, ) -> TrustedNodeResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(TrustedNodeError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(TrustedNodeError::UnsupportedCoin(e.ticker().to_string())), }; if ln_coin.trusted_nodes.lock().remove(&req.node_id.clone().into()) { diff --git a/mm2src/coins/lightning/ln_errors.rs b/mm2src/coins/lightning/ln_errors.rs index d7ac252d0c..c76102cddc 100644 --- a/mm2src/coins/lightning/ln_errors.rs +++ b/mm2src/coins/lightning/ln_errors.rs @@ -100,7 +100,7 @@ impl HttpStatusCode for ConnectToNodeError { ConnectToNodeError::ParseError(_) | ConnectToNodeError::IOError(_) | ConnectToNodeError::ConnectionError(_) => StatusCode::INTERNAL_SERVER_ERROR, - ConnectToNodeError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ConnectToNodeError::NoSuchCoin(_) => StatusCode::NOT_FOUND, } } } @@ -165,8 +165,9 @@ impl HttpStatusCode for OpenChannelError { | OpenChannelError::IOError(_) | OpenChannelError::DbError(_) | OpenChannelError::InvalidPath(_) - | OpenChannelError::ConvertTxErr(_) => StatusCode::INTERNAL_SERVER_ERROR, - OpenChannelError::NoSuchCoin(_) | OpenChannelError::BalanceError(_) => StatusCode::PRECONDITION_REQUIRED, + | OpenChannelError::ConvertTxErr(_) + | OpenChannelError::BalanceError(_) => StatusCode::INTERNAL_SERVER_ERROR, + OpenChannelError::NoSuchCoin(_) => StatusCode::NOT_FOUND, } } } @@ -237,7 +238,7 @@ impl HttpStatusCode for UpdateChannelError { match self { UpdateChannelError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, UpdateChannelError::NoSuchChannel(_) => StatusCode::NOT_FOUND, - UpdateChannelError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + UpdateChannelError::NoSuchCoin(_) => StatusCode::NOT_FOUND, UpdateChannelError::FailureToUpdateChannel(_, _) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -266,7 +267,7 @@ impl HttpStatusCode for ListChannelsError { fn status_code(&self) -> StatusCode { match self { ListChannelsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - ListChannelsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ListChannelsError::NoSuchCoin(_) => StatusCode::NOT_FOUND, ListChannelsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -301,8 +302,7 @@ impl HttpStatusCode for GetChannelDetailsError { fn status_code(&self) -> StatusCode { match self { GetChannelDetailsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - GetChannelDetailsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, - GetChannelDetailsError::NoSuchChannel(_) => StatusCode::NOT_FOUND, + GetChannelDetailsError::NoSuchCoin(_) | GetChannelDetailsError::NoSuchChannel(_) => StatusCode::NOT_FOUND, GetChannelDetailsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -337,7 +337,7 @@ impl HttpStatusCode for GenerateInvoiceError { fn status_code(&self) -> StatusCode { match self { GenerateInvoiceError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - GenerateInvoiceError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + GenerateInvoiceError::NoSuchCoin(_) => StatusCode::NOT_FOUND, GenerateInvoiceError::SignOrCreationError(_) | GenerateInvoiceError::DbError(_) => { StatusCode::INTERNAL_SERVER_ERROR }, @@ -382,7 +382,7 @@ impl HttpStatusCode for SendPaymentError { fn status_code(&self) -> StatusCode { match self { SendPaymentError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - SendPaymentError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + SendPaymentError::NoSuchCoin(_) => StatusCode::NOT_FOUND, SendPaymentError::PaymentError(_) | SendPaymentError::NoRouteFound(_) | SendPaymentError::CLTVExpiryError(_, _) @@ -418,7 +418,7 @@ impl HttpStatusCode for ListPaymentsError { fn status_code(&self) -> StatusCode { match self { ListPaymentsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - ListPaymentsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ListPaymentsError::NoSuchCoin(_) => StatusCode::NOT_FOUND, ListPaymentsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -453,7 +453,7 @@ impl HttpStatusCode for GetPaymentDetailsError { fn status_code(&self) -> StatusCode { match self { GetPaymentDetailsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - GetPaymentDetailsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + GetPaymentDetailsError::NoSuchCoin(_) => StatusCode::NOT_FOUND, GetPaymentDetailsError::NoSuchPayment(_) => StatusCode::NOT_FOUND, GetPaymentDetailsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } @@ -489,8 +489,7 @@ impl HttpStatusCode for CloseChannelError { fn status_code(&self) -> StatusCode { match self { CloseChannelError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - CloseChannelError::NoSuchChannel(_) => StatusCode::NOT_FOUND, - CloseChannelError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + CloseChannelError::NoSuchChannel(_) | CloseChannelError::NoSuchCoin(_) => StatusCode::NOT_FOUND, CloseChannelError::CloseChannelError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -517,7 +516,7 @@ impl HttpStatusCode for ClaimableBalancesError { fn status_code(&self) -> StatusCode { match self { ClaimableBalancesError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - ClaimableBalancesError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ClaimableBalancesError::NoSuchCoin(_) => StatusCode::NOT_FOUND, } } } @@ -571,7 +570,7 @@ impl HttpStatusCode for TrustedNodeError { fn status_code(&self) -> StatusCode { match self { TrustedNodeError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - TrustedNodeError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + TrustedNodeError::NoSuchCoin(_) => StatusCode::NOT_FOUND, TrustedNodeError::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index c9e38e7659..fa6222b5c3 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -189,11 +189,13 @@ impl KVStorePersister for LightningFilesystemPersister { fn persist(&self, key: &str, object: &W) -> std::io::Result<()> { let mut dest_file = self.main_path(); dest_file.push(key); + drop_mutability!(dest_file); write_to_file(dest_file, object)?; if !matches!(key, "network_graph" | "scorer") { if let Some(mut dest_file) = self.backup_path() { dest_file.push(key); + drop_mutability!(dest_file); write_to_file(dest_file, object)?; } } @@ -221,6 +223,7 @@ fn path_to_windows_str>(path: T) -> Vec(dest_file: PathBuf, data: &W) -> std::io::Result<()> { let mut tmp_file = dest_file.clone(); tmp_file.set_extension("tmp"); + drop_mutability!(tmp_file); // Do a crazy dance with lots of fsync()s to be overly cautious here... // We never want to end up in a state where we've lost the old data, or end up using the diff --git a/mm2src/coins/lightning/ln_utils.rs b/mm2src/coins/lightning/ln_utils.rs index 262d74b811..c08c59035c 100644 --- a/mm2src/coins/lightning/ln_utils.rs +++ b/mm2src/coins/lightning/ln_utils.rs @@ -146,10 +146,7 @@ pub async fn init_channel_manager( let chain_monitor_for_args = chain_monitor.clone(); let (channel_manager_blockhash, channel_manager, channelmonitors) = async_blocking(move || { - let mut manager_file = match File::open(persister.manager_path()) { - Ok(f) => f, - Err(e) => return Err(e.into()), - }; + let mut manager_file = File::open(persister.manager_path())?; let mut channel_monitor_mut_references = Vec::new(); for (_, channel_monitor) in channelmonitors.iter_mut() { diff --git a/mm2src/coins/my_tx_history_v2.rs b/mm2src/coins/my_tx_history_v2.rs index c297264ab2..df49663e29 100644 --- a/mm2src/coins/my_tx_history_v2.rs +++ b/mm2src/coins/my_tx_history_v2.rs @@ -271,7 +271,7 @@ pub enum MyTxHistoryErrorV2 { impl HttpStatusCode for MyTxHistoryErrorV2 { fn status_code(&self) -> StatusCode { match self { - MyTxHistoryErrorV2::CoinIsNotActive(_) => StatusCode::PRECONDITION_REQUIRED, + MyTxHistoryErrorV2::CoinIsNotActive(_) => StatusCode::NOT_FOUND, MyTxHistoryErrorV2::StorageIsNotInitialized(_) | MyTxHistoryErrorV2::StorageError(_) | MyTxHistoryErrorV2::RpcError(_) diff --git a/mm2src/coins/rpc_command/get_current_mtp.rs b/mm2src/coins/rpc_command/get_current_mtp.rs index 46fa0f3034..24b4f563b9 100644 --- a/mm2src/coins/rpc_command/get_current_mtp.rs +++ b/mm2src/coins/rpc_command/get_current_mtp.rs @@ -31,7 +31,7 @@ pub enum GetCurrentMtpError { impl HttpStatusCode for GetCurrentMtpError { fn status_code(&self) -> StatusCode { match self { - GetCurrentMtpError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + GetCurrentMtpError::NoSuchCoin(_) => StatusCode::NOT_FOUND, GetCurrentMtpError::NotSupportedCoin(_) => StatusCode::BAD_REQUEST, GetCurrentMtpError::RpcError(_) => StatusCode::INTERNAL_SERVER_ERROR, } diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index 29e3d6d765..6b5fdff067 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -1904,6 +1904,7 @@ impl ElectrumClient { let len = CompactInteger::from(headers.count); let mut serialized = serialize(&len).take(); serialized.extend(headers.hex.0.into_iter()); + drop_mutability!(serialized); let mut reader = Reader::new_with_coin_variant(serialized.as_slice(), coin_name.as_str().into()); let maybe_block_headers = reader.read_list::(); From 632401cbfbdf13b03e1c1cc3cf6a15c25122bbc7 Mon Sep 17 00:00:00 2001 From: shamardy Date: Tue, 6 Sep 2022 00:29:38 +0200 Subject: [PATCH 32/33] review fixes wip: invalid_data_err, fix ordering --- .../lightning/ln_filesystem_persister.rs | 97 +++++++------------ mm2src/coins/lightning/ln_platform.rs | 16 +-- mm2src/coins/utxo/utxo_builder/mod.rs | 5 +- .../utxo/utxo_builder/utxo_arc_builder.rs | 54 +---------- .../utxo/utxo_builder/utxo_coin_builder.rs | 52 +++++++++- mm2src/mm2_io/src/fs.rs | 8 ++ 6 files changed, 104 insertions(+), 128 deletions(-) diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index fa6222b5c3..3c42c55e8b 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -11,7 +11,7 @@ use lightning::chain::keysinterface::{KeysInterface, Sign}; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringParameters}; use lightning::util::persist::KVStorePersister; use lightning::util::ser::{ReadableArgs, Writeable}; -use mm2_io::fs::{check_dir_operations, read_json, write_json}; +use mm2_io::fs::{check_dir_operations, invalid_data_err, read_json, write_json}; use secp256k1v22::PublicKey; use std::collections::{HashMap, HashSet}; use std::fs; @@ -111,26 +111,17 @@ impl LightningFilesystemPersister { return Ok(Vec::new()); } let mut res = Vec::new(); - for file_option in fs::read_dir(path).unwrap() { - let file = file_option.unwrap(); + for file_option in fs::read_dir(path)? { + let file = file_option?; let owned_file_name = file.file_name(); - let filename = match owned_file_name.to_str() { - Some(name) => name, - None => { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - format!("Invalid ChannelMonitor file name: {:?}", owned_file_name), - )) - }, - }; + let filename = owned_file_name.to_str().ok_or_else(|| { + invalid_data_err("Invalid ChannelMonitor file name", format!("{:?}", owned_file_name)) + })?; if filename == "checkval" { continue; } if !filename.is_ascii() || filename.len() < 65 { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - format!("Invalid ChannelMonitor file name: {}", filename), - )); + return Err(invalid_data_err("Invalid ChannelMonitor file name", filename)); } if filename.ends_with(".tmp") { // If we were in the middle of committing an new update and crashed, it should be @@ -139,47 +130,28 @@ impl LightningFilesystemPersister { continue; } - let txid = match Txid::from_hex(filename.split_at(64).0) { - Ok(tx_id) => tx_id, - Err(e) => { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - format!("Invalid tx ID in filename error: {}", e), - )) - }, - }; + let txid = Txid::from_hex(filename.split_at(64).0) + .map_err(|e| invalid_data_err("Invalid tx ID in filename error", e))?; - let index = match filename.split_at(65).1.parse::() { - Ok(i) => i, - Err(e) => { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - format!("Invalid tx index in filename error: {}", e), - )) - }, - }; + let index = filename + .split_at(65) + .1 + .parse::() + .map_err(|e| invalid_data_err("Invalid tx index in filename error", e))?; let contents = fs::read(&file.path())?; let mut buffer = Cursor::new(&contents); - match <(BlockHash, ChannelMonitor)>::read(&mut buffer, &*keys_manager) { - Ok((blockhash, channel_monitor)) => { - if channel_monitor.get_funding_txo().0.txid != txid - || channel_monitor.get_funding_txo().0.index != index - { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "ChannelMonitor was stored in the wrong file", - )); - } - res.push((blockhash, channel_monitor)); - }, - Err(e) => { - return Err(std::io::Error::new( - std::io::ErrorKind::InvalidData, - format!("Failed to deserialize ChannelMonitor: {}", e), - )) - }, + let (blockhash, channel_monitor) = <(BlockHash, ChannelMonitor)>::read(&mut buffer, &*keys_manager) + .map_err(|e| invalid_data_err("Failed to deserialize ChannelMonito", e))?; + + if channel_monitor.get_funding_txo().0.txid != txid || channel_monitor.get_funding_txo().0.index != index { + return Err(invalid_data_err( + "ChannelMonitor was stored in the wrong file", + filename, + )); } + + res.push((blockhash, channel_monitor)); } Ok(res) } @@ -329,14 +301,13 @@ impl LightningStorage for LightningFilesystemPersister { let nodes_addresses: HashMap = read_json(&path) .await - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))? + .map_err(|e| invalid_data_err("Error", e))? .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))?; nodes_addresses .iter() .map(|(pubkey_str, addr)| { - let pubkey = PublicKey::from_str(pubkey_str) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; + let pubkey = PublicKey::from_str(pubkey_str).map_err(|e| invalid_data_err("Error", e))?; Ok((pubkey, *addr)) }) .collect() @@ -354,12 +325,12 @@ impl LightningStorage for LightningFilesystemPersister { write_json(&nodes_addresses, &path, USE_TMP_FILE) .await - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; + .map_err(|e| invalid_data_err("Error", e))?; if let Some(path) = backup_path { write_json(&nodes_addresses, &path, USE_TMP_FILE) .await - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; + .map_err(|e| invalid_data_err("Error", e))?; } Ok(()) @@ -373,8 +344,7 @@ impl LightningStorage for LightningFilesystemPersister { async_blocking(move || { let file = fs::File::open(path)?; common::log::info!("Reading the saved lightning network graph from file, this can take some time!"); - NetworkGraph::read(&mut BufReader::new(file), logger) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + NetworkGraph::read(&mut BufReader::new(file), logger).map_err(|e| invalid_data_err("Error", e)) }) .await } @@ -394,7 +364,7 @@ impl LightningStorage for LightningFilesystemPersister { &mut BufReader::new(file), (ProbabilisticScoringParameters::default(), network_graph, logger), ) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; + .map_err(|e| invalid_data_err("Error", e))?; Ok(Mutex::new(scorer)) }) .await @@ -408,14 +378,13 @@ impl LightningStorage for LightningFilesystemPersister { let trusted_nodes: HashSet = read_json(&path) .await - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))? + .map_err(|e| invalid_data_err("Error", e))? .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))?; trusted_nodes .iter() .map(|pubkey_str| { - let pubkey = PublicKey::from_str(pubkey_str) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; + let pubkey = PublicKey::from_str(pubkey_str).map_err(|e| invalid_data_err("Error", e))?; Ok(pubkey) }) .collect() @@ -426,6 +395,6 @@ impl LightningStorage for LightningFilesystemPersister { let trusted_nodes: HashSet = trusted_nodes.lock().iter().map(|pubkey| pubkey.to_string()).collect(); write_json(&trusted_nodes, &path, USE_TMP_FILE) .await - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + .map_err(|e| invalid_data_err("Error", e)) } } diff --git a/mm2src/coins/lightning/ln_platform.rs b/mm2src/coins/lightning/ln_platform.rs index f9dff6db6e..3a271bb65c 100644 --- a/mm2src/coins/lightning/ln_platform.rs +++ b/mm2src/coins/lightning/ln_platform.rs @@ -141,13 +141,13 @@ pub struct LatestFees { impl LatestFees { #[inline] - fn set_background_fees(&self, fee: u64) { self.background.store(fee, Ordering::Relaxed); } + fn set_background_fees(&self, fee: u64) { self.background.store(fee, Ordering::Release); } #[inline] - fn set_normal_fees(&self, fee: u64) { self.normal.store(fee, Ordering::Relaxed); } + fn set_normal_fees(&self, fee: u64) { self.normal.store(fee, Ordering::Release); } #[inline] - fn set_high_priority_fees(&self, fee: u64) { self.high_priority.store(fee, Ordering::Relaxed); } + fn set_high_priority_fees(&self, fee: u64) { self.high_priority.store(fee, Ordering::Release); } } pub struct Platform { @@ -242,11 +242,11 @@ impl Platform { #[inline] pub fn update_best_block_height(&self, new_height: u64) { - self.best_block_height.store(new_height, AtomicOrdering::Relaxed); + self.best_block_height.store(new_height, AtomicOrdering::Release); } #[inline] - pub fn best_block_height(&self) -> u64 { self.best_block_height.load(AtomicOrdering::Relaxed) } + pub fn best_block_height(&self) -> u64 { self.best_block_height.load(AtomicOrdering::Acquire) } pub fn add_tx(&self, txid: Txid) { let mut registered_txs = self.registered_txs.lock(); @@ -525,9 +525,9 @@ impl FeeEstimator for Platform { let platform_coin = &self.coin; let latest_fees = match confirmation_target { - ConfirmationTarget::Background => self.latest_fees.background.load(Ordering::Relaxed), - ConfirmationTarget::Normal => self.latest_fees.normal.load(Ordering::Relaxed), - ConfirmationTarget::HighPriority => self.latest_fees.high_priority.load(Ordering::Relaxed), + ConfirmationTarget::Background => self.latest_fees.background.load(Ordering::Acquire), + ConfirmationTarget::Normal => self.latest_fees.normal.load(Ordering::Acquire), + ConfirmationTarget::HighPriority => self.latest_fees.high_priority.load(Ordering::Acquire), }; let conf = &platform_coin.as_ref().conf; diff --git a/mm2src/coins/utxo/utxo_builder/mod.rs b/mm2src/coins/utxo/utxo_builder/mod.rs index 8633c3e5bd..22e82a589c 100644 --- a/mm2src/coins/utxo/utxo_builder/mod.rs +++ b/mm2src/coins/utxo/utxo_builder/mod.rs @@ -2,9 +2,8 @@ mod utxo_arc_builder; mod utxo_coin_builder; mod utxo_conf_builder; -pub use utxo_arc_builder::{BlockHeaderUtxoArcOps, MergeUtxoArcOps, UtxoArcBuilder, UtxoSyncStatus, - UtxoSyncStatusLoopHandle}; +pub use utxo_arc_builder::{BlockHeaderUtxoArcOps, MergeUtxoArcOps, UtxoArcBuilder}; pub use utxo_coin_builder::{UtxoCoinBuildError, UtxoCoinBuildResult, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, UtxoCoinWithIguanaPrivKeyBuilder, UtxoFieldsWithHardwareWalletBuilder, - UtxoFieldsWithIguanaPrivKeyBuilder}; + UtxoFieldsWithIguanaPrivKeyBuilder, UtxoSyncStatus, UtxoSyncStatusLoopHandle}; pub use utxo_conf_builder::{UtxoConfBuilder, UtxoConfError, UtxoConfResult}; diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index e44305490f..bc0a10947e 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -1,65 +1,17 @@ use crate::utxo::utxo_builder::{UtxoCoinBuildError, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, - UtxoFieldsWithHardwareWalletBuilder, UtxoFieldsWithIguanaPrivKeyBuilder}; + UtxoFieldsWithHardwareWalletBuilder, UtxoFieldsWithIguanaPrivKeyBuilder, + UtxoSyncStatusLoopHandle}; use crate::utxo::utxo_common::{block_header_utxo_loop, merge_utxo_loop}; use crate::utxo::{GetUtxoListOps, UtxoArc, UtxoCommonOps, UtxoWeak}; use crate::{PrivKeyBuildPolicy, UtxoActivationParams}; use async_trait::async_trait; use common::executor::spawn; -use common::log::{info, LogOnError}; -use futures::channel::mpsc::Sender as AsyncSender; +use common::log::info; use futures::future::{abortable, AbortHandle}; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; use serde_json::Value as Json; -pub enum UtxoSyncStatus { - SyncingBlockHeaders { - current_scanned_block: u64, - last_block: u64, - }, - TemporaryError(String), - PermanentError(String), - Finished { - block_number: u64, - }, -} - -#[derive(Clone)] -pub struct UtxoSyncStatusLoopHandle(AsyncSender); - -impl UtxoSyncStatusLoopHandle { - pub fn new(sync_status_notifier: AsyncSender) -> Self { - UtxoSyncStatusLoopHandle(sync_status_notifier) - } - - pub fn notify_blocks_headers_sync_status(&mut self, current_scanned_block: u64, last_block: u64) { - self.0 - .try_send(UtxoSyncStatus::SyncingBlockHeaders { - current_scanned_block, - last_block, - }) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } - - pub fn notify_on_temp_error(&mut self, error: String) { - self.0 - .try_send(UtxoSyncStatus::TemporaryError(error)) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } - - pub fn notify_on_permanent_error(&mut self, error: String) { - self.0 - .try_send(UtxoSyncStatus::PermanentError(error)) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } - - pub fn notify_sync_finished(&mut self, block_number: u64) { - self.0 - .try_send(UtxoSyncStatus::Finished { block_number }) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } -} - pub struct UtxoArcBuilder<'a, F, T> where F: Fn(UtxoArc) -> T + Send + Sync + 'static, diff --git a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs index 2562e05bfc..66b349e6b9 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs @@ -4,7 +4,6 @@ use crate::utxo::rpc_clients::{ElectrumClient, ElectrumClientImpl, ElectrumRpcRe UtxoRpcClientEnum}; use crate::utxo::tx_cache::{UtxoVerboseCacheOps, UtxoVerboseCacheShared}; use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; -use crate::utxo::utxo_builder::utxo_arc_builder::UtxoSyncStatusLoopHandle; use crate::utxo::utxo_builder::utxo_conf_builder::{UtxoConfBuilder, UtxoConfError, UtxoConfResult}; use crate::utxo::{output_script, utxo_common, ElectrumBuilderArgs, ElectrumProtoVerifier, RecentlySpentOutPoints, TxFee, UtxoCoinConf, UtxoCoinFields, UtxoHDAccount, UtxoHDWallet, UtxoRpcMode, DEFAULT_GAP_LIMIT, @@ -14,11 +13,12 @@ use crate::{BlockchainNetwork, CoinTransportMetrics, DerivationMethod, HistorySy use async_trait::async_trait; use chain::TxHashAlgo; use common::executor::{spawn, Timer}; -use common::log::{error, info}; +use common::log::{error, info, LogOnError}; use common::small_rng; use crypto::{Bip32DerPathError, Bip44DerPathError, Bip44PathToCoin, CryptoCtx, CryptoInitError, HwWalletType}; use derive_more::Display; use futures::channel::mpsc; +use futures::channel::mpsc::Sender as AsyncSender; use futures::compat::Future01CompatExt; use futures::lock::Mutex as AsyncMutex; use futures::StreamExt; @@ -104,6 +104,54 @@ impl From for UtxoCoinBuildError { fn from(e: BlockHeaderStorageError) -> Self { UtxoCoinBuildError::BlockHeaderStorageError(e) } } +pub enum UtxoSyncStatus { + SyncingBlockHeaders { + current_scanned_block: u64, + last_block: u64, + }, + TemporaryError(String), + PermanentError(String), + Finished { + block_number: u64, + }, +} + +#[derive(Clone)] +pub struct UtxoSyncStatusLoopHandle(AsyncSender); + +impl UtxoSyncStatusLoopHandle { + pub fn new(sync_status_notifier: AsyncSender) -> Self { + UtxoSyncStatusLoopHandle(sync_status_notifier) + } + + pub fn notify_blocks_headers_sync_status(&mut self, current_scanned_block: u64, last_block: u64) { + self.0 + .try_send(UtxoSyncStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, + }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_on_temp_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::TemporaryError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_on_permanent_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::PermanentError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_sync_finished(&mut self, block_number: u64) { + self.0 + .try_send(UtxoSyncStatus::Finished { block_number }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } +} + #[async_trait] pub trait UtxoCoinBuilder: UtxoFieldsWithIguanaPrivKeyBuilder + UtxoFieldsWithHardwareWalletBuilder { type ResultCoin; diff --git a/mm2src/mm2_io/src/fs.rs b/mm2src/mm2_io/src/fs.rs index 76ff7e2864..e844086d89 100644 --- a/mm2src/mm2_io/src/fs.rs +++ b/mm2src/mm2_io/src/fs.rs @@ -25,6 +25,14 @@ pub enum FsJsonError { Deserializing(JsonError), } +#[inline] +pub fn invalid_data_err(msg: &str, err: Error) -> io::Error +where + Error: std::fmt::Display, +{ + io::Error::new(std::io::ErrorKind::InvalidData, format!("{}: {}", msg, err)) +} + pub fn check_dir_operations(dir_path: &Path) -> Result<(), io::Error> { let r: [u8; 32] = random(); let mut check: Vec = Vec::with_capacity(r.len()); From 83d34f3d4fc6d4cec78d7c4883e94667819f1ccb Mon Sep 17 00:00:00 2001 From: shamardy Date: Tue, 6 Sep 2022 21:06:58 +0200 Subject: [PATCH 33/33] review fixes: block_headers_status_notifier, block_headers_status_watcher --- mm2src/coins/qrc20.rs | 5 +- mm2src/coins/utxo.rs | 59 ++++- mm2src/coins/utxo/bch.rs | 14 +- mm2src/coins/utxo/qtum.rs | 4 +- mm2src/coins/utxo/utxo_builder/mod.rs | 2 +- .../utxo/utxo_builder/utxo_arc_builder.rs | 203 +++++++++++++++--- .../utxo/utxo_builder/utxo_coin_builder.rs | 86 +++----- mm2src/coins/utxo/utxo_common.rs | 143 +----------- mm2src/coins/utxo/utxo_standard.rs | 1 - mm2src/coins/utxo/utxo_tests.rs | 3 +- mm2src/coins/z_coin.rs | 4 +- .../init_utxo_standard_activation.rs | 20 +- 12 files changed, 271 insertions(+), 273 deletions(-) diff --git a/mm2src/coins/qrc20.rs b/mm2src/coins/qrc20.rs index 1afd5dcc31..d137e75db4 100644 --- a/mm2src/coins/qrc20.rs +++ b/mm2src/coins/qrc20.rs @@ -7,8 +7,7 @@ use crate::utxo::rpc_clients::{ElectrumClient, NativeClient, UnspentInfo, UtxoRp #[cfg(not(target_arch = "wasm32"))] use crate::utxo::tx_cache::{UtxoVerboseCacheOps, UtxoVerboseCacheShared}; use crate::utxo::utxo_builder::{UtxoCoinBuildError, UtxoCoinBuildResult, UtxoCoinBuilderCommonOps, - UtxoCoinWithIguanaPrivKeyBuilder, UtxoFieldsWithIguanaPrivKeyBuilder, - UtxoSyncStatusLoopHandle}; + UtxoCoinWithIguanaPrivKeyBuilder, UtxoFieldsWithIguanaPrivKeyBuilder}; use crate::utxo::utxo_common::{self, big_decimal_from_sat, check_all_inputs_signed_by_pub, UtxoTxBuilder}; use crate::utxo::{qtum, ActualTxFee, AdditionalTxData, BroadcastTxErr, FeePolicy, GenerateTxError, GetUtxoListOps, HistoryUtxoTx, HistoryUtxoTxMap, MatureUnspentList, RecentlySpentOutPointsGuard, @@ -190,8 +189,6 @@ impl<'a> UtxoCoinBuilderCommonOps for Qrc20CoinBuilder<'a> { fn ticker(&self) -> &str { self.ticker } - fn sync_status_loop_handle(&self) -> Option { None } - async fn decimals(&self, rpc_client: &UtxoRpcClientEnum) -> UtxoCoinBuildResult { if let Some(d) = self.conf()["decimals"].as_u64() { return Ok(d as u8); diff --git a/mm2src/coins/utxo.rs b/mm2src/coins/utxo.rs index 0b9998a533..ab33cf0940 100644 --- a/mm2src/coins/utxo.rs +++ b/mm2src/coins/utxo.rs @@ -47,13 +47,14 @@ use chain::{OutPoint, TransactionOutput, TxHashAlgo}; #[cfg(not(target_arch = "wasm32"))] use common::first_char_to_upper; use common::jsonrpc_client::JsonRpcError; +use common::log::LogOnError; use common::now_ms; use crypto::trezor::utxo::TrezorUtxoCoin; use crypto::{Bip32DerPathOps, Bip32Error, Bip44Chain, Bip44DerPathError, Bip44PathToAccount, Bip44PathToCoin, ChildNumber, DerivationPath, Secp256k1ExtendedPublicKey}; use derive_more::Display; #[cfg(not(target_arch = "wasm32"))] use dirs::home_dir; -use futures::channel::mpsc; +use futures::channel::mpsc::{Receiver as AsyncReceiver, Sender as AsyncSender, UnboundedSender}; use futures::compat::Future01CompatExt; use futures::lock::{Mutex as AsyncMutex, MutexGuard as AsyncMutexGuard}; use futures01::Future; @@ -440,6 +441,54 @@ impl From for LightningCurrency { } } +pub enum UtxoSyncStatus { + SyncingBlockHeaders { + current_scanned_block: u64, + last_block: u64, + }, + TemporaryError(String), + PermanentError(String), + Finished { + block_number: u64, + }, +} + +#[derive(Clone)] +pub struct UtxoSyncStatusLoopHandle(AsyncSender); + +impl UtxoSyncStatusLoopHandle { + pub fn new(sync_status_notifier: AsyncSender) -> Self { + UtxoSyncStatusLoopHandle(sync_status_notifier) + } + + pub fn notify_blocks_headers_sync_status(&mut self, current_scanned_block: u64, last_block: u64) { + self.0 + .try_send(UtxoSyncStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, + }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_on_temp_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::TemporaryError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_on_permanent_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::PermanentError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_sync_finished(&mut self, block_number: u64) { + self.0 + .try_send(UtxoSyncStatus::Finished { block_number }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } +} + #[derive(Debug)] pub struct UtxoCoinConf { pub ticker: String, @@ -545,6 +594,12 @@ pub struct UtxoCoinFields { /// The flag determines whether to use mature unspent outputs *only* to generate transactions. /// https://github.com/KomodoPlatform/atomicDEX-API/issues/1181 pub check_utxo_maturity: bool, + /// The notifier/sender of the block headers synchronization status, + /// initialized only for non-native mode if spv is enabled for the coin. + pub block_headers_status_notifier: Option, + /// The watcher/receiver of the block headers synchronization status, + /// initialized only for non-native mode if spv is enabled for the coin. + pub block_headers_status_watcher: Option>>, } #[derive(Debug, Display)] @@ -1229,7 +1284,7 @@ pub fn coin_daemon_data_dir(name: &str, is_asset_chain: bool) -> PathBuf { /// Electrum protocol version verifier. /// The structure is used to handle the `on_connected` event and notify `electrum_version_loop`. struct ElectrumProtoVerifier { - on_connect_tx: mpsc::UnboundedSender, + on_connect_tx: UnboundedSender, } impl ElectrumProtoVerifier { diff --git a/mm2src/coins/utxo/bch.rs b/mm2src/coins/utxo/bch.rs index 06ee5d8a57..5b7de796c3 100644 --- a/mm2src/coins/utxo/bch.rs +++ b/mm2src/coins/utxo/bch.rs @@ -656,17 +656,9 @@ pub async fn bch_coin_from_conf_and_params( let priv_key_policy = PrivKeyBuildPolicy::IguanaPrivKey(priv_key); let coin = try_s!( - UtxoArcBuilder::new( - ctx, - ticker, - conf, - ¶ms.utxo_params, - priv_key_policy, - None, - constructor - ) - .build() - .await + UtxoArcBuilder::new(ctx, ticker, conf, ¶ms.utxo_params, priv_key_policy, constructor) + .build() + .await ); Ok(coin) } diff --git a/mm2src/coins/utxo/qtum.rs b/mm2src/coins/utxo/qtum.rs index dc27555d12..4db3fc408c 100644 --- a/mm2src/coins/utxo/qtum.rs +++ b/mm2src/coins/utxo/qtum.rs @@ -14,7 +14,7 @@ use crate::rpc_command::init_scan_for_new_addresses::{self, InitScanAddressesRpc use crate::rpc_command::init_withdraw::{InitWithdrawCoin, WithdrawTaskHandle}; use crate::utxo::utxo_builder::{BlockHeaderUtxoArcOps, MergeUtxoArcOps, UtxoCoinBuildError, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, UtxoFieldsWithHardwareWalletBuilder, - UtxoFieldsWithIguanaPrivKeyBuilder, UtxoSyncStatusLoopHandle}; + UtxoFieldsWithIguanaPrivKeyBuilder}; use crate::{eth, CanRefundHtlc, CoinBalance, CoinWithDerivationMethod, DelegationError, DelegationFut, GetWithdrawSenderAddress, NegotiateSwapContractAddrErr, PrivKeyBuildPolicy, SearchForSwapTxSpendInput, SignatureResult, StakingInfosFut, SwapOps, TradePreimageValue, TransactionFut, TxMarshalingErr, @@ -202,8 +202,6 @@ impl<'a> UtxoCoinBuilderCommonOps for QtumCoinBuilder<'a> { fn ticker(&self) -> &str { self.ticker } - fn sync_status_loop_handle(&self) -> Option { None } - fn check_utxo_maturity(&self) -> bool { self.activation_params().check_utxo_maturity.unwrap_or(true) } } diff --git a/mm2src/coins/utxo/utxo_builder/mod.rs b/mm2src/coins/utxo/utxo_builder/mod.rs index 22e82a589c..cd48444513 100644 --- a/mm2src/coins/utxo/utxo_builder/mod.rs +++ b/mm2src/coins/utxo/utxo_builder/mod.rs @@ -5,5 +5,5 @@ mod utxo_conf_builder; pub use utxo_arc_builder::{BlockHeaderUtxoArcOps, MergeUtxoArcOps, UtxoArcBuilder}; pub use utxo_coin_builder::{UtxoCoinBuildError, UtxoCoinBuildResult, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, UtxoCoinWithIguanaPrivKeyBuilder, UtxoFieldsWithHardwareWalletBuilder, - UtxoFieldsWithIguanaPrivKeyBuilder, UtxoSyncStatus, UtxoSyncStatusLoopHandle}; + UtxoFieldsWithIguanaPrivKeyBuilder}; pub use utxo_conf_builder::{UtxoConfBuilder, UtxoConfError, UtxoConfResult}; diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index bc0a10947e..fc49c9f36d 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -1,16 +1,23 @@ +use crate::utxo::rpc_clients::UtxoRpcClientEnum; use crate::utxo::utxo_builder::{UtxoCoinBuildError, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, - UtxoFieldsWithHardwareWalletBuilder, UtxoFieldsWithIguanaPrivKeyBuilder, - UtxoSyncStatusLoopHandle}; -use crate::utxo::utxo_common::{block_header_utxo_loop, merge_utxo_loop}; -use crate::utxo::{GetUtxoListOps, UtxoArc, UtxoCommonOps, UtxoWeak}; -use crate::{PrivKeyBuildPolicy, UtxoActivationParams}; + UtxoFieldsWithHardwareWalletBuilder, UtxoFieldsWithIguanaPrivKeyBuilder}; +use crate::utxo::{generate_and_send_tx, FeePolicy, GetUtxoListOps, UtxoArc, UtxoCommonOps, UtxoSyncStatusLoopHandle, + UtxoWeak}; +use crate::{DerivationMethod, PrivKeyBuildPolicy, UtxoActivationParams}; use async_trait::async_trait; -use common::executor::spawn; -use common::log::info; +use chain::TransactionOutput; +use common::executor::{spawn, Timer}; +use common::log::{error, info, warn}; +use futures::compat::Future01CompatExt; use futures::future::{abortable, AbortHandle}; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; +use script::Builder; use serde_json::Value as Json; +use spv_validation::helpers_validation::validate_headers; +use spv_validation::storage::BlockHeaderStorageOps; + +const BLOCK_HEADERS_LOOP_INTERVAL: f64 = 60.; pub struct UtxoArcBuilder<'a, F, T> where @@ -21,7 +28,6 @@ where conf: &'a Json, activation_params: &'a UtxoActivationParams, priv_key_policy: PrivKeyBuildPolicy<'a>, - sync_status_loop_handle: Option, constructor: F, } @@ -35,7 +41,6 @@ where conf: &'a Json, activation_params: &'a UtxoActivationParams, priv_key_policy: PrivKeyBuildPolicy<'a>, - sync_status_loop_handle: Option, constructor: F, ) -> UtxoArcBuilder<'a, F, T> { UtxoArcBuilder { @@ -44,7 +49,6 @@ where conf, activation_params, priv_key_policy, - sync_status_loop_handle, constructor, } } @@ -62,8 +66,6 @@ where fn activation_params(&self) -> &UtxoActivationParams { self.activation_params } fn ticker(&self) -> &str { self.ticker } - - fn sync_status_loop_handle(&self) -> Option { self.sync_status_loop_handle.clone() } } impl<'a, F, T> UtxoFieldsWithIguanaPrivKeyBuilder for UtxoArcBuilder<'a, F, T> where @@ -89,6 +91,7 @@ where async fn build(self) -> MmResult { let utxo = self.build_utxo_fields().await?; + let sync_status_loop_handle = utxo.block_headers_status_notifier.clone(); let utxo_arc = UtxoArc::new(utxo); let utxo_weak = utxo_arc.downgrade(); let result_coin = (self.constructor)(utxo_arc); @@ -98,11 +101,12 @@ where self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } - // This only works for v2 utxo activation since sync_status_loop_handle is initialized there only. - if let Some(abort_handler) = self.spawn_block_header_utxo_loop_if_required(utxo_weak, self.constructor.clone()) - { + if let Some(sync_status_loop_handle) = sync_status_loop_handle { + let abort_handler = + self.spawn_block_header_utxo_loop(utxo_weak, self.constructor.clone(), sync_status_loop_handle); self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } + Ok(result_coin) } } @@ -121,6 +125,65 @@ where { } +async fn merge_utxo_loop( + weak: UtxoWeak, + merge_at: usize, + check_every: f64, + max_merge_at_once: usize, + constructor: impl Fn(UtxoArc) -> T, +) where + T: UtxoCommonOps + GetUtxoListOps, +{ + loop { + Timer::sleep(check_every).await; + + let coin = match weak.upgrade() { + Some(arc) => constructor(arc), + None => break, + }; + + let my_address = match coin.as_ref().derivation_method { + DerivationMethod::Iguana(ref my_address) => my_address, + DerivationMethod::HDWallet(_) => { + warn!("'merge_utxo_loop' is currently not used for HD wallets"); + return; + }, + }; + + let ticker = &coin.as_ref().conf.ticker; + let (unspents, recently_spent) = match coin.get_unspent_ordered_list(my_address).await { + Ok((unspents, recently_spent)) => (unspents, recently_spent), + Err(e) => { + error!("Error {} on get_unspent_ordered_list of coin {}", e, ticker); + continue; + }, + }; + if unspents.len() >= merge_at { + let unspents: Vec<_> = unspents.into_iter().take(max_merge_at_once).collect(); + info!("Trying to merge {} UTXOs of coin {}", unspents.len(), ticker); + let value = unspents.iter().fold(0, |sum, unspent| sum + unspent.value); + let script_pubkey = Builder::build_p2pkh(&my_address.hash).to_bytes(); + let output = TransactionOutput { value, script_pubkey }; + let merge_tx_fut = generate_and_send_tx( + &coin, + unspents, + None, + FeePolicy::DeductFromOutput(0), + recently_spent, + vec![output], + ); + match merge_tx_fut.await { + Ok(tx) => info!( + "UTXO merge successful for coin {}, tx_hash {:?}", + ticker, + tx.hash().reversed() + ), + Err(e) => error!("Error {:?} on UTXO merge attempt for coin {}", e, ticker), + } + } + } +} + pub trait MergeUtxoArcOps: UtxoCoinBuilderCommonOps { fn spawn_merge_utxo_loop_if_required(&self, weak: UtxoWeak, constructor: F) -> Option where @@ -147,27 +210,103 @@ pub trait MergeUtxoArcOps: UtxoCoinBuilderCom } } +async fn block_header_utxo_loop( + weak: UtxoWeak, + constructor: impl Fn(UtxoArc) -> T, + mut sync_status_loop_handle: UtxoSyncStatusLoopHandle, +) { + while let Some(arc) = weak.upgrade() { + let coin = constructor(arc); + let client = match &coin.as_ref().rpc_client { + UtxoRpcClientEnum::Native(_) => break, + UtxoRpcClientEnum::Electrum(client) => client, + }; + + let storage = client.block_headers_storage(); + let from_block_height = match storage.get_last_block_height().await { + Ok(h) => h, + Err(e) => { + error!("Error {} on getting the height of the last stored header in DB!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(10.).await; + continue; + }, + }; + + let to_block_height = match coin.as_ref().rpc_client.get_block_count().compat().await { + Ok(h) => h, + Err(e) => { + error!("Error {} on getting the height of the latest block from rpc!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(10.).await; + continue; + }, + }; + + // Todo: Add code for the case if a chain reorganization happens + if from_block_height == to_block_height { + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + continue; + } + + sync_status_loop_handle.notify_blocks_headers_sync_status(from_block_height + 1, to_block_height); + + let (block_registry, block_headers, last_retrieved_height) = match client + .retrieve_headers(from_block_height + 1, to_block_height) + .compat() + .await + { + Ok(res) => res, + Err(e) => { + error!("Error {} on retrieving the latest headers from rpc!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(10.).await; + continue; + }, + }; + + let ticker = coin.as_ref().conf.ticker.as_str(); + if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { + if let Err(e) = validate_headers(ticker, from_block_height, block_headers, storage, params).await { + error!("Error {} on validating the latest headers!", e); + // Todo: remove this electrum server and use another in this case since the headers from this server are invalid + sync_status_loop_handle.notify_on_permanent_error(e.to_string()); + break; + } + } + + ok_or_continue_after_sleep!( + storage.add_block_headers_to_storage(block_registry).await, + BLOCK_HEADERS_LOOP_INTERVAL + ); + + // blockchain.block.headers returns a maximum of 2016 headers (tested for btc) so the loop needs to continue until we have all headers up to the current one. + if last_retrieved_height == to_block_height { + sync_status_loop_handle.notify_sync_finished(to_block_height); + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + } + } +} + pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { - fn spawn_block_header_utxo_loop_if_required(&self, weak: UtxoWeak, constructor: F) -> Option + fn spawn_block_header_utxo_loop( + &self, + weak: UtxoWeak, + constructor: F, + sync_status_loop_handle: UtxoSyncStatusLoopHandle, + ) -> AbortHandle where F: Fn(UtxoArc) -> T + Send + Sync + 'static, T: UtxoCommonOps, { - if let Some(sync_status_loop_handle) = self.sync_status_loop_handle() { - let ticker = self.ticker().to_owned(); - let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor, sync_status_loop_handle)); - info!("Starting UTXO block header loop for coin {}", ticker); - spawn(async move { - if let Err(e) = fut.await { - info!( - "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", - ticker, e - ); - } - }); - return Some(abort_handle); - } - - None + let ticker = self.ticker().to_owned(); + let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor, sync_status_loop_handle)); + info!("Starting UTXO block header loop for coin {}", ticker); + spawn(async move { + if let Err(e) = fut.await { + info!("spawn_block_header_utxo_loop stopped for {}, reason {}", ticker, e); + } + }); + abort_handle } } diff --git a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs index 66b349e6b9..eb3730cfa7 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs @@ -6,19 +6,18 @@ use crate::utxo::tx_cache::{UtxoVerboseCacheOps, UtxoVerboseCacheShared}; use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; use crate::utxo::utxo_builder::utxo_conf_builder::{UtxoConfBuilder, UtxoConfError, UtxoConfResult}; use crate::utxo::{output_script, utxo_common, ElectrumBuilderArgs, ElectrumProtoVerifier, RecentlySpentOutPoints, - TxFee, UtxoCoinConf, UtxoCoinFields, UtxoHDAccount, UtxoHDWallet, UtxoRpcMode, DEFAULT_GAP_LIMIT, - UTXO_DUST_AMOUNT}; + TxFee, UtxoCoinConf, UtxoCoinFields, UtxoHDAccount, UtxoHDWallet, UtxoRpcMode, UtxoSyncStatus, + UtxoSyncStatusLoopHandle, DEFAULT_GAP_LIMIT, UTXO_DUST_AMOUNT}; use crate::{BlockchainNetwork, CoinTransportMetrics, DerivationMethod, HistorySyncState, PrivKeyBuildPolicy, PrivKeyPolicy, RpcClientType, UtxoActivationParams}; use async_trait::async_trait; use chain::TxHashAlgo; use common::executor::{spawn, Timer}; -use common::log::{error, info, LogOnError}; +use common::log::{error, info}; use common::small_rng; use crypto::{Bip32DerPathError, Bip44DerPathError, Bip44PathToCoin, CryptoCtx, CryptoInitError, HwWalletType}; use derive_more::Display; -use futures::channel::mpsc; -use futures::channel::mpsc::Sender as AsyncSender; +use futures::channel::mpsc::{channel, unbounded, Receiver as AsyncReceiver, UnboundedReceiver}; use futures::compat::Future01CompatExt; use futures::lock::Mutex as AsyncMutex; use futures::StreamExt; @@ -104,54 +103,6 @@ impl From for UtxoCoinBuildError { fn from(e: BlockHeaderStorageError) -> Self { UtxoCoinBuildError::BlockHeaderStorageError(e) } } -pub enum UtxoSyncStatus { - SyncingBlockHeaders { - current_scanned_block: u64, - last_block: u64, - }, - TemporaryError(String), - PermanentError(String), - Finished { - block_number: u64, - }, -} - -#[derive(Clone)] -pub struct UtxoSyncStatusLoopHandle(AsyncSender); - -impl UtxoSyncStatusLoopHandle { - pub fn new(sync_status_notifier: AsyncSender) -> Self { - UtxoSyncStatusLoopHandle(sync_status_notifier) - } - - pub fn notify_blocks_headers_sync_status(&mut self, current_scanned_block: u64, last_block: u64) { - self.0 - .try_send(UtxoSyncStatus::SyncingBlockHeaders { - current_scanned_block, - last_block, - }) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } - - pub fn notify_on_temp_error(&mut self, error: String) { - self.0 - .try_send(UtxoSyncStatus::TemporaryError(error)) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } - - pub fn notify_on_permanent_error(&mut self, error: String) { - self.0 - .try_send(UtxoSyncStatus::PermanentError(error)) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } - - pub fn notify_sync_finished(&mut self, block_number: u64) { - self.0 - .try_send(UtxoSyncStatus::Finished { block_number }) - .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); - } -} - #[async_trait] pub trait UtxoCoinBuilder: UtxoFieldsWithIguanaPrivKeyBuilder + UtxoFieldsWithHardwareWalletBuilder { type ResultCoin; @@ -218,6 +169,7 @@ pub trait UtxoFieldsWithIguanaPrivKeyBuilder: UtxoCoinBuilderCommonOps { let tx_hash_algo = self.tx_hash_algo(); let check_utxo_maturity = self.check_utxo_maturity(); let tx_cache = self.tx_cache(); + let (block_headers_status_notifier, block_headers_status_watcher) = self.block_header_status_channel(); let coin = UtxoCoinFields { conf, @@ -232,6 +184,8 @@ pub trait UtxoFieldsWithIguanaPrivKeyBuilder: UtxoCoinBuilderCommonOps { tx_fee, tx_hash_algo, check_utxo_maturity, + block_headers_status_notifier, + block_headers_status_watcher, }; Ok(coin) } @@ -279,6 +233,7 @@ pub trait UtxoFieldsWithHardwareWalletBuilder: UtxoCoinBuilderCommonOps { let tx_hash_algo = self.tx_hash_algo(); let check_utxo_maturity = self.check_utxo_maturity(); let tx_cache = self.tx_cache(); + let (block_headers_status_notifier, block_headers_status_watcher) = self.block_header_status_channel(); let coin = UtxoCoinFields { conf, @@ -293,6 +248,8 @@ pub trait UtxoFieldsWithHardwareWalletBuilder: UtxoCoinBuilderCommonOps { tx_fee, tx_hash_algo, check_utxo_maturity, + block_headers_status_notifier, + block_headers_status_watcher, }; Ok(coin) } @@ -340,8 +297,6 @@ pub trait UtxoCoinBuilderCommonOps { fn ticker(&self) -> &str; - fn sync_status_loop_handle(&self) -> Option; - fn address_format(&self) -> UtxoCoinBuildResult { let format_from_req = self.activation_params().address_format.clone(); let format_from_conf = json::from_value::>(self.conf()["address_format"].clone()) @@ -462,7 +417,7 @@ pub trait UtxoCoinBuilderCommonOps { args: ElectrumBuilderArgs, mut servers: Vec, ) -> UtxoCoinBuildResult { - let (on_connect_tx, on_connect_rx) = mpsc::unbounded(); + let (on_connect_tx, on_connect_rx) = unbounded(); let ticker = self.ticker().to_owned(); let ctx = self.ctx(); let mut event_handlers = vec![]; @@ -633,6 +588,23 @@ pub trait UtxoCoinBuilderCommonOps { #[cfg(not(target_arch = "wasm32"))] fn tx_cache_path(&self) -> PathBuf { self.ctx().dbdir().join("TX_CACHE") } + + fn block_header_status_channel( + &self, + ) -> ( + Option, + Option>>, + ) { + if self.conf()["enable_spv_proof"].as_bool().unwrap_or(false) && !self.activation_params().mode.is_native() { + let (sync_status_notifier, sync_watcher) = channel(1); + ( + Some(UtxoSyncStatusLoopHandle::new(sync_status_notifier)), + Some(AsyncMutex::new(sync_watcher)), + ) + } else { + (None, None) + } + } } /// Attempts to parse native daemon conf file and return rpcport, rpcuser and rpcpassword @@ -704,7 +676,7 @@ fn spawn_electrum_ping_loop(weak_client: Weak, servers: Vec< /// Weak reference will allow to stop the thread if client is dropped. fn spawn_electrum_version_loop( weak_client: Weak, - mut on_connect_rx: mpsc::UnboundedReceiver, + mut on_connect_rx: UnboundedReceiver, client_name: String, ) { spawn(async move { diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index f78d7c9a49..f03f11771f 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -21,7 +21,7 @@ use chain::constants::SEQUENCE_FINAL; use chain::{OutPoint, TransactionOutput}; use common::executor::Timer; use common::jsonrpc_client::JsonRpcErrorType; -use common::log::{error, info, warn}; +use common::log::{error, warn}; use common::{now_ms, one_hundred, ten_f64}; use crypto::{Bip32DerPathOps, Bip44Chain, Bip44DerPathError, Bip44DerivationPath, RpcDerivationPath}; use futures::compat::Future01CompatExt; @@ -41,8 +41,6 @@ use secp256k1::{PublicKey, Signature}; use serde_json::{self as json}; use serialization::{deserialize, serialize, serialize_with_flags, CoinVariant, CompactInteger, Serializable, Stream, SERIALIZE_TRANSACTION_WITNESS}; -use spv_validation::helpers_validation::validate_headers; -use spv_validation::storage::BlockHeaderStorageOps; use std::cmp::Ordering; use std::collections::hash_map::{Entry, HashMap}; use std::str::FromStr; @@ -50,14 +48,12 @@ use std::sync::atomic::Ordering as AtomicOrdering; use utxo_signer::with_key_pair::p2sh_spend; use utxo_signer::UtxoSignerOps; -use crate::utxo::utxo_builder::UtxoSyncStatusLoopHandle; pub use chain::Transaction as UtxoTx; pub const DEFAULT_FEE_VOUT: usize = 0; pub const DEFAULT_SWAP_TX_SPEND_SIZE: u64 = 305; pub const DEFAULT_SWAP_VOUT: usize = 0; const MIN_BTC_TRADING_VOL: &str = "0.00777"; -const BLOCK_HEADERS_LOOP_INTERVAL: f64 = 60.; macro_rules! true_or { ($cond: expr, $etype: expr) => { @@ -3445,143 +3441,6 @@ fn increase_by_percent(num: u64, percent: f64) -> u64 { num + (percent.round() as u64) } -pub async fn block_header_utxo_loop( - weak: UtxoWeak, - constructor: impl Fn(UtxoArc) -> T, - mut sync_status_loop_handle: UtxoSyncStatusLoopHandle, -) { - while let Some(arc) = weak.upgrade() { - let coin = constructor(arc); - let client = match &coin.as_ref().rpc_client { - UtxoRpcClientEnum::Native(_) => break, - UtxoRpcClientEnum::Electrum(client) => client, - }; - - let storage = client.block_headers_storage(); - let from_block_height = match storage.get_last_block_height().await { - Ok(h) => h, - Err(e) => { - error!("Error {} on getting the height of the last stored header in DB!", e); - sync_status_loop_handle.notify_on_temp_error(e.to_string()); - Timer::sleep(10.).await; - continue; - }, - }; - - let to_block_height = match coin.as_ref().rpc_client.get_block_count().compat().await { - Ok(h) => h, - Err(e) => { - error!("Error {} on getting the height of the latest block from rpc!", e); - sync_status_loop_handle.notify_on_temp_error(e.to_string()); - Timer::sleep(10.).await; - continue; - }, - }; - - // Todo: Add code for the case if a chain reorganization happens - if from_block_height == to_block_height { - Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; - continue; - } - - sync_status_loop_handle.notify_blocks_headers_sync_status(from_block_height + 1, to_block_height); - - let (block_registry, block_headers, last_retrieved_height) = match client - .retrieve_headers(from_block_height + 1, to_block_height) - .compat() - .await - { - Ok(res) => res, - Err(e) => { - error!("Error {} on retrieving the latest headers from rpc!", e); - sync_status_loop_handle.notify_on_temp_error(e.to_string()); - Timer::sleep(10.).await; - continue; - }, - }; - - let ticker = coin.as_ref().conf.ticker.as_str(); - if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { - if let Err(e) = validate_headers(ticker, from_block_height, block_headers, storage, params).await { - error!("Error {} on validating the latest headers!", e); - // Todo: remove this electrum server and use another in this case since the headers from this server are invalid - sync_status_loop_handle.notify_on_permanent_error(e.to_string()); - break; - } - } - - ok_or_continue_after_sleep!( - storage.add_block_headers_to_storage(block_registry).await, - BLOCK_HEADERS_LOOP_INTERVAL - ); - - // blockchain.block.headers returns a maximum of 2016 headers (tested for btc) so the loop needs to continue until we have all headers up to the current one. - if last_retrieved_height == to_block_height { - sync_status_loop_handle.notify_sync_finished(to_block_height); - Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; - } - } -} - -pub async fn merge_utxo_loop( - weak: UtxoWeak, - merge_at: usize, - check_every: f64, - max_merge_at_once: usize, - constructor: impl Fn(UtxoArc) -> T, -) where - T: UtxoCommonOps + GetUtxoListOps, -{ - loop { - Timer::sleep(check_every).await; - - let coin = match weak.upgrade() { - Some(arc) => constructor(arc), - None => break, - }; - - let my_address = match coin.as_ref().derivation_method { - DerivationMethod::Iguana(ref my_address) => my_address, - DerivationMethod::HDWallet(_) => { - warn!("'merge_utxo_loop' is currently not used for HD wallets"); - return; - }, - }; - - let ticker = &coin.as_ref().conf.ticker; - let (unspents, recently_spent) = match coin.get_unspent_ordered_list(my_address).await { - Ok((unspents, recently_spent)) => (unspents, recently_spent), - Err(e) => { - error!("Error {} on get_unspent_ordered_list of coin {}", e, ticker); - continue; - }, - }; - if unspents.len() >= merge_at { - let unspents: Vec<_> = unspents.into_iter().take(max_merge_at_once).collect(); - info!("Trying to merge {} UTXOs of coin {}", unspents.len(), ticker); - let value = unspents.iter().fold(0, |sum, unspent| sum + unspent.value); - let script_pubkey = Builder::build_p2pkh(&my_address.hash).to_bytes(); - let output = TransactionOutput { value, script_pubkey }; - let merge_tx_fut = generate_and_send_tx( - &coin, - unspents, - None, - FeePolicy::DeductFromOutput(0), - recently_spent, - vec![output], - ); - match merge_tx_fut.await { - Ok(tx) => info!( - "UTXO merge successful for coin {}, tx_hash {:?}", - ticker, - tx.hash().reversed() - ), - Err(e) => error!("Error {:?} on UTXO merge attempt for coin {}", e, ticker), - } - } - } -} - pub async fn can_refund_htlc(coin: &T, locktime: u64) -> Result> where T: UtxoCommonOps, diff --git a/mm2src/coins/utxo/utxo_standard.rs b/mm2src/coins/utxo/utxo_standard.rs index c67a7ea844..42c872ebd7 100644 --- a/mm2src/coins/utxo/utxo_standard.rs +++ b/mm2src/coins/utxo/utxo_standard.rs @@ -56,7 +56,6 @@ pub async fn utxo_standard_coin_with_priv_key( conf, activation_params, priv_key_policy, - None, UtxoStandardCoin::from ) .build() diff --git a/mm2src/coins/utxo/utxo_tests.rs b/mm2src/coins/utxo/utxo_tests.rs index 44555a24ca..2db59fb042 100644 --- a/mm2src/coins/utxo/utxo_tests.rs +++ b/mm2src/coins/utxo/utxo_tests.rs @@ -61,7 +61,6 @@ pub fn electrum_client_for_test(servers: &[&str]) -> ElectrumClient { &Json::Null, ¶ms, priv_key_policy, - None, UtxoStandardCoin::from, ); let args = ElectrumBuilderArgs { @@ -171,6 +170,8 @@ fn utxo_coin_fields_for_test( recently_spent_outpoints: AsyncMutex::new(RecentlySpentOutPoints::new(my_script_pubkey)), tx_hash_algo: TxHashAlgo::DSHA256, check_utxo_maturity: false, + block_headers_status_notifier: None, + block_headers_status_watcher: None, } } diff --git a/mm2src/coins/z_coin.rs b/mm2src/coins/z_coin.rs index 34cec4529e..87e9a8621b 100644 --- a/mm2src/coins/z_coin.rs +++ b/mm2src/coins/z_coin.rs @@ -3,7 +3,7 @@ use crate::rpc_command::init_withdraw::{InitWithdrawCoin, WithdrawInProgressStat use crate::utxo::rpc_clients::{ElectrumRpcRequest, UnspentInfo, UtxoRpcClientEnum, UtxoRpcError, UtxoRpcFut, UtxoRpcResult}; use crate::utxo::utxo_builder::{UtxoCoinBuilderCommonOps, UtxoCoinWithIguanaPrivKeyBuilder, - UtxoFieldsWithIguanaPrivKeyBuilder, UtxoSyncStatusLoopHandle}; + UtxoFieldsWithIguanaPrivKeyBuilder}; use crate::utxo::utxo_common::{addresses_from_script, big_decimal_from_sat, big_decimal_from_sat_unsigned, payment_script}; use crate::utxo::{sat_from_big_decimal, utxo_common, ActualTxFee, AdditionalTxData, Address, BroadcastTxErr, @@ -728,8 +728,6 @@ impl<'a> UtxoCoinBuilderCommonOps for ZCoinBuilder<'a> { fn activation_params(&self) -> &UtxoActivationParams { &self.utxo_params } fn ticker(&self) -> &str { self.ticker } - - fn sync_status_loop_handle(&self) -> Option { None } } #[async_trait] diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs index faba3d313a..e70e9bd1f1 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs @@ -8,12 +8,11 @@ use crate::utxo_activation::init_utxo_standard_statuses::{UtxoStandardAwaitingSt UtxoStandardUserAction}; use crate::utxo_activation::utxo_standard_activation_result::UtxoStandardActivationResult; use async_trait::async_trait; -use coins::utxo::utxo_builder::{UtxoArcBuilder, UtxoCoinBuilder, UtxoSyncStatus, UtxoSyncStatusLoopHandle}; +use coins::utxo::utxo_builder::{UtxoArcBuilder, UtxoCoinBuilder}; use coins::utxo::utxo_standard::UtxoStandardCoin; -use coins::utxo::UtxoActivationParams; +use coins::utxo::{UtxoActivationParams, UtxoSyncStatus}; use coins::CoinProtocol; use crypto::CryptoCtx; -use futures::channel::mpsc::channel; use futures::StreamExt; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; @@ -61,31 +60,20 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { let crypto_ctx = CryptoCtx::from_ctx(&ctx)?; let priv_key_policy = priv_key_build_policy(&crypto_ctx, activation_request.priv_key_policy); - let (sync_status_loop_handle, maybe_sync_watcher) = - if coin_conf["enable_spv_proof"].as_bool().unwrap_or(false) && !activation_request.mode.is_native() { - let (sync_status_notifier, sync_watcher) = channel(1); - ( - Some(UtxoSyncStatusLoopHandle::new(sync_status_notifier)), - Some(sync_watcher), - ) - } else { - (None, None) - }; - let coin = UtxoArcBuilder::new( &ctx, &ticker, &coin_conf, activation_request, priv_key_policy, - sync_status_loop_handle, UtxoStandardCoin::from, ) .build() .await .mm_err(|e| InitUtxoStandardError::from_build_err(e, ticker.clone()))?; - if let Some(mut sync_watcher) = maybe_sync_watcher { + if let Some(sync_watcher_mutex) = &coin.as_ref().block_headers_status_watcher { + let mut sync_watcher = sync_watcher_mutex.lock().await; loop { let in_progress_status = match sync_watcher