diff --git a/Cargo.lock b/Cargo.lock index 8f65815fcc..41cb6b3d5c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -497,7 +497,7 @@ dependencies = [ "hkd32", "hmac 0.11.0", "ripemd160", - "secp256k1", + "secp256k1 0.20.3", "sha2 0.9.9", "subtle 2.4.0", "zeroize", @@ -523,13 +523,13 @@ dependencies = [ [[package]] name = "bitcoin" -version = "0.27.1" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a41df6ad9642c5c15ae312dd3d074de38fd3eb7cc87ad4ce10f90292a83fe4d" +checksum = "05bba324e6baf655b882df672453dbbc527bc938cadd27750ae510aaccc3a66a" dependencies = [ "bech32", "bitcoin_hashes", - "secp256k1", + "secp256k1 0.22.1", ] [[package]] @@ -1074,7 +1074,7 @@ dependencies = [ "lightning-background-processor", "lightning-invoice", "lightning-net-tokio", - "lightning-persister", + "lightning-rapid-gossip-sync", "mm2_core", "mm2_db", "mm2_err_handle", @@ -1098,7 +1098,8 @@ dependencies = [ "rust-ini", "rustls 0.20.4", "script", - "secp256k1", + "secp256k1 0.20.3", + "secp256k1 0.22.1", "ser_error", "ser_error_derive", "serde", @@ -1537,7 +1538,7 @@ dependencies = [ "primitives", "rpc_task", "rustc-hex 2.1.0", - "secp256k1", + "secp256k1 0.20.3", "ser_error", "ser_error_derive", "serde", @@ -2222,7 +2223,7 @@ dependencies = [ "mem", "rand 0.6.5", "rustc-hex 1.0.0", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_derive", "tiny-keccak 1.4.4", @@ -3026,7 +3027,7 @@ dependencies = [ "js-sys", "mm2_err_handle", "rusb", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_derive", "wasm-bindgen", @@ -3206,9 +3207,9 @@ checksum = "5a9d968042a4902e08810946fc7cd5851eb75e80301342305af755ca06cb82ce" [[package]] name = "indexmap" -version = "1.8.2" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a" +checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" dependencies = [ "autocfg 1.0.0", "hashbrown 0.11.2", @@ -3408,7 +3409,7 @@ dependencies = [ "primitives", "rand 0.6.5", "rustc-hex 2.1.0", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_derive", ] @@ -3883,39 +3884,43 @@ dependencies = [ [[package]] name = "lightning" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dce6da860338d5a9ddc3fd42432465310cfab93b342bbd23b41b7c1f7c509d3" dependencies = [ "bitcoin", - "secp256k1", ] [[package]] name = "lightning-background-processor" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8de9d0de42bb933ffb8d33c6b0a75302f08b35126bfc74398ba01ad0c201f8d" dependencies = [ "bitcoin", "lightning", - "lightning-persister", + "lightning-rapid-gossip-sync", ] [[package]] name = "lightning-invoice" -version = "0.14.0" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32aa02b7fd0bd95e40b6ca8d9d9232b162d5e23b41bd2bc42abe9e9c78d34d72" dependencies = [ "bech32", "bitcoin_hashes", "lightning", "num-traits", - "secp256k1", + "secp256k1 0.22.1", + "serde", ] [[package]] name = "lightning-net-tokio" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +version = "0.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce57d093fbc643835bc64c0501b52a3531d2511dcb1237d0495d68fea3adc47d" dependencies = [ "bitcoin", "lightning", @@ -3923,14 +3928,13 @@ dependencies = [ ] [[package]] -name = "lightning-persister" -version = "0.0.106" -source = "git+https://github.com/shamardy/rust-lightning?branch=0.0.106#af4a89c08c22d0110d386df0e288b2f825aaebbc" +name = "lightning-rapid-gossip-sync" +version = "0.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "391732631b14f7a1d9dc84dc3f644484d9b73190a31087b3856505cf0525bea0" dependencies = [ "bitcoin", - "libc", "lightning", - "winapi", ] [[package]] @@ -4251,7 +4255,7 @@ dependencies = [ "rand 0.7.3", "regex", "rmp-serde", - "secp256k1", + "secp256k1 0.20.3", "serde", "serde_bytes", "serde_json", @@ -4277,6 +4281,7 @@ dependencies = [ "hex 0.4.2", "keys", "lazy_static", + "lightning-background-processor", "mm2_metrics", "mm2_rpc", "primitives", @@ -4431,7 +4436,7 @@ dependencies = [ "rpc", "rpc_task", "script", - "secp256k1", + "secp256k1 0.20.3", "ser_error", "ser_error_derive", "serde", @@ -5012,7 +5017,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" dependencies = [ "lock_api 0.4.6", - "parking_lot_core 0.9.3", + "parking_lot_core 0.9.1", ] [[package]] @@ -5060,15 +5065,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.3" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" +checksum = "28141e0cc4143da2443301914478dc976a61ffdb3f043058310c70df2fed8954" dependencies = [ "cfg-if 1.0.0", "libc", "redox_syscall 0.2.10", "smallvec 1.6.1", - "windows-sys", + "windows-sys 0.32.0", ] [[package]] @@ -5136,9 +5141,9 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "petgraph" -version = "0.6.2" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" +checksum = "51b305cc4569dd4e8765bab46261f67ef5d4d11a4b6e745100ee5dad8948b46c" dependencies = [ "fixedbitset", "indexmap", @@ -5780,9 +5785,9 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "10.3.0" +version = "10.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "738bc47119e3eeccc7e94c4a506901aea5e7b4944ecd0829cbebf4af04ceda12" +checksum = "2c49596760fce12ca21550ac21dc5a9617b2ea4b6e0aa7d8dab8ff2824fc2bba" dependencies = [ "bitflags", ] @@ -6275,7 +6280,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ "lazy_static", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -6350,14 +6355,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a" dependencies = [ "rand 0.6.5", - "secp256k1-sys", + "secp256k1-sys 0.4.2", +] + +[[package]] +name = "secp256k1" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26947345339603ae8395f68e2f3d85a6b0a8ddfe6315818e80b8504415099db0" +dependencies = [ + "secp256k1-sys 0.5.2", ] [[package]] name = "secp256k1-sys" -version = "0.4.0" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +dependencies = [ + "cc", +] + +[[package]] +name = "secp256k1-sys" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67e4b6455ee49f5901c8985b88f98fb0a0e1d90a6661f5a03f4888bd987dad29" +checksum = "152e20a0fd0519390fc43ab404663af8a0b794273d2a91d60ad4a39f13ffe110" dependencies = [ "cc", ] @@ -9107,43 +9130,86 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3df6e476185f92a12c072be4a189a0210dcdcf512a1891d6dff9edb874deadc6" +dependencies = [ + "windows_aarch64_msvc 0.32.0", + "windows_i686_gnu 0.32.0", + "windows_i686_msvc 0.32.0", + "windows_x86_64_gnu 0.32.0", + "windows_x86_64_msvc 0.32.0", +] + [[package]] name = "windows-sys" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_msvc", + "windows_aarch64_msvc 0.36.1", + "windows_i686_gnu 0.36.1", + "windows_i686_msvc 0.36.1", + "windows_x86_64_gnu 0.36.1", + "windows_x86_64_msvc 0.36.1", ] +[[package]] +name = "windows_aarch64_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8e92753b1c443191654ec532f14c199742964a061be25d77d7a96f09db20bf5" + [[package]] name = "windows_aarch64_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +[[package]] +name = "windows_i686_gnu" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a711c68811799e017b6038e0922cb27a5e2f43a2ddb609fe0b6f3eeda9de615" + [[package]] name = "windows_i686_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +[[package]] +name = "windows_i686_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c11bb1a02615db74680b32a68e2d61f553cc24c4eb5b4ca10311740e44172" + [[package]] name = "windows_i686_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +[[package]] +name = "windows_x86_64_gnu" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c912b12f7454c6620635bbff3450962753834be2a594819bd5e945af18ec64bc" + [[package]] name = "windows_x86_64_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +[[package]] +name = "windows_x86_64_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "504a2476202769977a040c6364301a3f65d0cc9e3fb08600b2bda150a0488316" + [[package]] name = "windows_x86_64_msvc" version = "0.36.1" @@ -9303,7 +9369,7 @@ dependencies = [ "rand 0.7.3", "rand_core 0.5.1", "ripemd160", - "secp256k1", + "secp256k1 0.20.3", "sha2 0.9.9", "subtle 2.4.0", "zcash_note_encryption", diff --git a/deny.toml b/deny.toml index 36db856da9..e4db904c61 100644 --- a/deny.toml +++ b/deny.toml @@ -261,6 +261,8 @@ skip = [ { name = "rustls-pemfile", version = "*" }, { name = "scopeguard", version = "*" }, { name = "sct", version = "*" }, + { name = "secp256k1", version = "*" }, + { name = "secp256k1-sys", version = "*" }, { name = "semver", version = "*" }, { name = "send_wrapper", version = "*" }, { name = "sha2", version = "*" }, diff --git a/mm2src/coins/Cargo.toml b/mm2src/coins/Cargo.toml index 1ab063bf1f..e0f59f35b0 100644 --- a/mm2src/coins/Cargo.toml +++ b/mm2src/coins/Cargo.toml @@ -17,7 +17,6 @@ async-trait = "0.1.52" base64 = "0.10.0" base58 = "0.2.0" bip32 = { version = "0.2.2", default-features = false, features = ["alloc", "secp256k1-ffi"] } -bitcoin = "0.27.1" bitcoin_hashes = "0.10.0" bitcrypto = { path = "../mm2_bitcoin/crypto" } bincode = "1.3.3" @@ -50,8 +49,6 @@ jsonrpc-core = "8.0.1" keys = { path = "../mm2_bitcoin/keys" } lazy_static = "1.4" libc = "0.2" -lightning = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } -lightning-invoice = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } mm2_core = { path = "../mm2_core" } mm2_err_handle = { path = "../mm2_err_handle" } mm2_io = { path = "../mm2_io" } @@ -103,20 +100,24 @@ web-sys = { version = "0.3.55", features = ["console", "Headers", "Request", "Re [target.'cfg(not(target_arch = "wasm32"))'.dependencies] cosmrs = { version = "0.7", features = ["rpc"] } dirs = { version = "1" } -lightning-background-processor = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } -lightning-persister = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } -lightning-net-tokio = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } +bitcoin = "0.28.1" +lightning = "0.0.110" +lightning-background-processor = "0.0.110" +lightning-invoice = { version = "0.18.0", features = ["serde"] } +lightning-net-tokio = "0.0.110" +lightning-rapid-gossip-sync = "0.0.110" rust-ini = { version = "0.13" } rustls = { version = "0.20", features = ["dangerous_configuration"] } -tokio = { version = "1.7" } -tokio-rustls = { version = "0.23" } -tonic = { version = "0.7", features = ["tls", "tls-webpki-roots", "compression"] } -webpki-roots = { version = "0.22" } +secp256k1v22 = { version = "0.22", package = "secp256k1" } solana-client = { version = "1", default-features = false } solana-sdk = { version = "1", default-features = false } solana-transaction-status = "1" -spl-token = { version = "3" } spl-associated-token-account = "1" +spl-token = { version = "3" } +tokio = { version = "1.7" } +tokio-rustls = { version = "0.23" } +tonic = { version = "0.7", features = ["tls", "tls-webpki-roots", "compression"] } +webpki-roots = { version = "0.22" } zcash_client_backend = { git = "https://github.com/KomodoPlatform/librustzcash.git" } zcash_client_sqlite = { git = "https://github.com/KomodoPlatform/librustzcash.git" } zcash_primitives = { features = ["transparent-inputs"], git = "https://github.com/KomodoPlatform/librustzcash.git" } diff --git a/mm2src/coins/lightning.rs b/mm2src/coins/lightning.rs index b911ec46aa..21735e84ae 100644 --- a/mm2src/coins/lightning.rs +++ b/mm2src/coins/lightning.rs @@ -11,8 +11,12 @@ mod ln_storage; mod ln_utils; use super::{lp_coinfind_or_err, DerivationMethod, MmCoinEnum}; +use crate::lightning::ln_conf::OurChannelsConfigs; +use crate::lightning::ln_errors::{TrustedNodeError, TrustedNodeResult, UpdateChannelError, UpdateChannelResult}; use crate::lightning::ln_events::init_events_abort_handlers; +use crate::lightning::ln_serialization::PublicKeyForRPC; use crate::lightning::ln_sql::SqliteLightningDB; +use crate::lightning::ln_storage::{NetworkGraph, TrustedNodesShared}; use crate::utxo::rpc_clients::UtxoRpcClientEnum; use crate::utxo::utxo_common::{big_decimal_from_sat_unsigned, UtxoTxBuilder}; use crate::utxo::{sat_from_big_decimal, BlockchainNetwork, FeePolicy, GetUtxoListOps, UtxoTxGenerationOps}; @@ -39,13 +43,13 @@ use lightning::chain::keysinterface::{KeysInterface, KeysManager, Recipient}; use lightning::chain::Access; use lightning::ln::channelmanager::{ChannelDetails, MIN_FINAL_CLTV_EXPIRY}; use lightning::ln::{PaymentHash, PaymentPreimage}; -use lightning::routing::network_graph::{NetGraphMsgHandler, NetworkGraph}; +use lightning::routing::gossip; use lightning::util::config::UserConfig; -use lightning_background_processor::BackgroundProcessor; +use lightning_background_processor::{BackgroundProcessor, GossipSync}; use lightning_invoice::payment; use lightning_invoice::utils::{create_invoice_from_channelmanager, DefaultRouter}; use lightning_invoice::{Invoice, InvoiceDescription}; -use ln_conf::{ChannelOptions, LightningCoinConf, LightningProtocolConf, PlatformCoinConfirmations}; +use ln_conf::{ChannelOptions, LightningCoinConf, LightningProtocolConf, PlatformCoinConfirmationTargets}; use ln_db::{ClosedChannelsFilter, DBChannelDetails, DBPaymentInfo, DBPaymentsFilter, HTLCStatus, LightningDB, PaymentType}; use ln_errors::{ClaimableBalancesError, ClaimableBalancesResult, CloseChannelError, CloseChannelResult, @@ -55,10 +59,10 @@ use ln_errors::{ClaimableBalancesError, ClaimableBalancesResult, CloseChannelErr ListPaymentsError, ListPaymentsResult, OpenChannelError, OpenChannelResult, SendPaymentError, SendPaymentResult}; use ln_events::LightningEventHandler; -use ln_filesystem_persister::{LightningFilesystemPersister, LightningPersisterShared}; +use ln_filesystem_persister::LightningFilesystemPersister; use ln_p2p::{connect_to_node, ConnectToNodeRes, PeerManager}; use ln_platform::{h256_json_from_txid, Platform}; -use ln_serialization::{InvoiceForRPC, NodeAddress, PublicKeyForRPC}; +use ln_serialization::NodeAddress; use ln_storage::{LightningStorage, NodesAddressesMapShared, Scorer}; use ln_utils::{ChainMonitor, ChannelManager}; use mm2_core::mm_ctx::MmArc; @@ -68,7 +72,7 @@ use mm2_number::{BigDecimal, MmNumber}; use parking_lot::Mutex as PaMutex; use rpc::v1::types::{Bytes as BytesJson, H256 as H256Json}; use script::{Builder, TransactionInputSigner}; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use serde::{Deserialize, Serialize}; use serde_json::Value as Json; use std::collections::hash_map::Entry; @@ -76,10 +80,12 @@ use std::collections::{HashMap, HashSet}; use std::fmt; use std::net::SocketAddr; use std::str::FromStr; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; + +pub const DEFAULT_INVOICE_EXPIRY: u32 = 3600; type Router = DefaultRouter, Arc>; -type InvoicePayer = payment::InvoicePayer, Router, Arc>, Arc, E>; +type InvoicePayer = payment::InvoicePayer, Router, Arc, Arc, E>; #[derive(Clone)] pub struct LightningCoin { @@ -87,8 +93,6 @@ pub struct LightningCoin { pub conf: LightningCoinConf, /// The lightning node peer manager that takes care of connecting to peers, etc.. pub peer_manager: Arc, - /// The lightning node background processor that takes care of tasks that need to happen periodically. - pub background_processor: Arc, /// The lightning node channel manager which keeps track of the number of open channels and sends messages to the appropriate /// channel, also tracks HTLC preimages and forwards onion packets appropriately. pub channel_manager: Arc, @@ -99,12 +103,15 @@ pub struct LightningCoin { /// The lightning node invoice payer. pub invoice_payer: Arc>>, /// The lightning node persister that takes care of writing/reading data from storage. - pub persister: LightningPersisterShared, + pub persister: Arc, /// The lightning node db struct that takes care of reading/writing data from/to db. pub db: SqliteLightningDB, /// The mutex storing the addresses of the nodes that the lightning node has open channels with, /// these addresses are used for reconnecting. pub open_channels_nodes: NodesAddressesMapShared, + /// The mutex storing the public keys of the nodes that our lightning node trusts to allow 0 confirmation + /// inbound channels from. + pub trusted_nodes: TrustedNodesShared, } impl fmt::Debug for LightningCoin { @@ -117,9 +124,14 @@ impl LightningCoin { #[inline] fn my_node_id(&self) -> String { self.channel_manager.get_our_node_id().to_string() } - fn get_balance_msat(&self) -> (u64, u64) { - self.channel_manager - .list_channels() + async fn list_channels(&self) -> Vec { + let channel_manager = self.channel_manager.clone(); + async_blocking(move || channel_manager.list_channels()).await + } + + async fn get_balance_msat(&self) -> (u64, u64) { + self.list_channels() + .await .iter() .fold((0, 0), |(spendable, unspendable), chan| { if chan.is_usable { @@ -133,11 +145,15 @@ impl LightningCoin { }) } - fn pay_invoice(&self, invoice: Invoice) -> SendPaymentResult { - self.invoice_payer - .pay_invoice(&invoice) - .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e)))?; - let payment_hash = PaymentHash((*invoice.payment_hash()).into_inner()); + async fn get_channel_by_rpc_id(&self, rpc_id: u64) -> Option { + self.list_channels() + .await + .into_iter() + .find(|chan| chan.user_channel_id == rpc_id) + } + + async fn pay_invoice(&self, invoice: Invoice) -> SendPaymentResult { + let payment_hash = PaymentHash((invoice.payment_hash()).into_inner()); let payment_type = PaymentType::OutboundPayment { destination: *invoice.payee_pub_key().unwrap_or(&invoice.recover_payee_pub_key()), }; @@ -146,13 +162,24 @@ impl LightningCoin { InvoiceDescription::Hash(h) => hex::encode(h.0.into_inner()), }; let payment_secret = Some(*invoice.payment_secret()); + let amt_msat = invoice.amount_milli_satoshis().map(|a| a as i64); + + let selfi = self.clone(); + async_blocking(move || { + selfi + .invoice_payer + .pay_invoice(&invoice) + .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e))) + }) + .await?; + Ok(DBPaymentInfo { payment_hash, payment_type, description, preimage: None, secret: payment_secret, - amt_msat: invoice.amount_milli_satoshis().map(|a| a as i64), + amt_msat, fee_paid_msat: None, status: HTLCStatus::Pending, created_at: (now_ms() / 1000) as i64, @@ -160,7 +187,7 @@ impl LightningCoin { }) } - fn keysend( + async fn keysend( &self, destination: PublicKey, amount_msat: u64, @@ -173,9 +200,16 @@ impl LightningCoin { )); } let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); - self.invoice_payer - .pay_pubkey(destination, payment_preimage, amount_msat, final_cltv_expiry_delta) - .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e)))?; + + let selfi = self.clone(); + async_blocking(move || { + selfi + .invoice_payer + .pay_pubkey(destination, payment_preimage, amount_msat, final_cltv_expiry_delta) + .map_to_mm(|e| SendPaymentError::PaymentError(format!("{:?}", e))) + }) + .await?; + let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).into_inner()); let payment_type = PaymentType::OutboundPayment { destination }; @@ -199,14 +233,11 @@ impl LightningCoin { paging: PagingOptionsEnum, limit: usize, ) -> ListChannelsResult { - let mut total_open_channels: Vec = self - .channel_manager - .list_channels() - .into_iter() - .map(From::from) - .collect(); + let mut total_open_channels: Vec = + self.list_channels().await.into_iter().map(From::from).collect(); total_open_channels.sort_by(|a, b| a.rpc_channel_id.cmp(&b.rpc_channel_id)); + drop_mutability!(total_open_channels); let open_channels_filtered = if let Some(ref f) = filter { total_open_channels @@ -425,13 +456,16 @@ impl MarketCoinOps for LightningCoin { } fn my_balance(&self) -> BalanceFut { + let coin = self.clone(); let decimals = self.decimals(); - let (spendable_msat, unspendable_msat) = self.get_balance_msat(); - let my_balance = CoinBalance { - spendable: big_decimal_from_sat_unsigned(spendable_msat, decimals), - unspendable: big_decimal_from_sat_unsigned(unspendable_msat, decimals), + let fut = async move { + let (spendable_msat, unspendable_msat) = coin.get_balance_msat().await; + Ok(CoinBalance { + spendable: big_decimal_from_sat_unsigned(spendable_msat, decimals), + unspendable: big_decimal_from_sat_unsigned(unspendable_msat, decimals), + }) }; - Box::new(futures01::future::ok(my_balance)) + Box::new(fut.boxed().compat()) } fn base_coin_balance(&self) -> BalanceFut { @@ -495,6 +529,7 @@ impl MarketCoinOps for LightningCoin { .keys_manager .get_node_secret(Recipient::Node) .map_err(|_| "Unsupported recipient".to_string())? + .display_secret() .to_string()) } @@ -626,8 +661,9 @@ pub async fn start_lightning( let platform = Arc::new(Platform::new( platform_coin.clone(), protocol_conf.network.clone(), - protocol_conf.confirmations, + protocol_conf.confirmation_targets, )); + platform.set_latest_fees().await?; // Initialize the Logger let logger = ctx.log.0.clone(); @@ -638,10 +674,14 @@ pub async fn start_lightning( // Initialize the KeysManager let keys_manager = ln_utils::init_keys_manager(ctx)?; - // Initialize the NetGraphMsgHandler. This is used for providing routes to send payments over - let network_graph = Arc::new(persister.get_network_graph(protocol_conf.network.into()).await?); + // Initialize the P2PGossipSync. This is used for providing routes to send payments over + let network_graph = Arc::new( + persister + .get_network_graph(protocol_conf.network.into(), logger.clone()) + .await?, + ); - let network_gossip = Arc::new(NetGraphMsgHandler::new( + let gossip_sync = Arc::new(gossip::P2PGossipSync::new( network_graph.clone(), None::>, logger.clone(), @@ -666,7 +706,7 @@ pub async fn start_lightning( ctx.clone(), params.listening_port, channel_manager.clone(), - network_gossip.clone(), + gossip_sync.clone(), keys_manager .get_node_secret(Recipient::Node) .map_to_mm(|_| EnableLightningError::UnsupportedMode("'start_lightning'".into(), "local node".into()))?, @@ -674,6 +714,8 @@ pub async fn start_lightning( ) .await?; + let trusted_nodes = Arc::new(PaMutex::new(persister.get_trusted_nodes().await?)); + let events_abort_handlers = init_events_abort_handlers(platform.clone(), db.clone()).await?; // Initialize the event handler @@ -682,12 +724,12 @@ pub async fn start_lightning( channel_manager.clone(), keys_manager.clone(), db.clone(), + trusted_nodes.clone(), events_abort_handlers, )); // Initialize routing Scorer - let scorer = Arc::new(Mutex::new(persister.get_scorer(network_graph.clone()).await?)); - spawn(ln_utils::persist_scorer_loop(persister.clone(), scorer.clone())); + let scorer = Arc::new(persister.get_scorer(network_graph.clone(), logger.clone()).await?); // Create InvoicePayer // random_seed_bytes are additional random seed to improve privacy by adding a random CLTV expiry offset to each path's final hop. @@ -700,25 +742,31 @@ pub async fn start_lightning( let invoice_payer = Arc::new(InvoicePayer::new( channel_manager.clone(), router, - scorer, + scorer.clone(), logger.clone(), event_handler, - payment::RetryAttempts(params.payment_retries.unwrap_or(5)), + // Todo: Add option for choosing payment::Retry::Timeout instead of Attempts in LightningParams + payment::Retry::Attempts(params.payment_retries.unwrap_or(5)), )); // Start Background Processing. Runs tasks periodically in the background to keep LN node operational. // InvoicePayer will act as our event handler as it handles some of the payments related events before // delegating it to LightningEventHandler. // note: background_processor stops automatically when dropped since BackgroundProcessor implements the Drop trait. - let background_processor = Arc::new(BackgroundProcessor::start( + let background_processor = BackgroundProcessor::start( persister.clone(), invoice_payer.clone(), chain_monitor.clone(), channel_manager.clone(), - Some(network_gossip), + GossipSync::p2p(gossip_sync), peer_manager.clone(), logger, - )); + Some(scorer), + ); + ctx.background_processors + .lock() + .unwrap() + .insert(conf.ticker.clone(), background_processor); // If channel_nodes_data file exists, read channels nodes data from disk and reconnect to channel nodes/peers if possible. let open_channels_nodes = Arc::new(PaMutex::new( @@ -741,7 +789,6 @@ pub async fn start_lightning( platform, conf, peer_manager, - background_processor, channel_manager, chain_monitor, keys_manager, @@ -749,6 +796,7 @@ pub async fn start_lightning( persister, db, open_channels_nodes, + trusted_nodes, }) } @@ -760,10 +808,9 @@ pub struct ConnectToNodeRequest { /// Connect to a certain node on the lightning network. pub async fn connect_to_lightning_node(ctx: MmArc, req: ConnectToNodeRequest) -> ConnectToNodeResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ConnectToNodeError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ConnectToNodeError::UnsupportedCoin(e.ticker().to_string())), }; let node_pubkey = req.node_address.pubkey; @@ -803,8 +850,7 @@ pub struct OpenChannelRequest { #[serde(default)] pub push_msat: u64, pub channel_options: Option, - pub counterparty_locktime: Option, - pub our_htlc_minimum_msat: Option, + pub channel_configs: Option, } #[derive(Serialize)] @@ -815,10 +861,9 @@ pub struct OpenChannelResponse { /// Opens a channel on the lightning network. pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(OpenChannelError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(OpenChannelError::UnsupportedCoin(e.ticker().to_string())), }; // Making sure that the node data is correct and that we can connect to it before doing more operations @@ -867,18 +912,18 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes let mut conf = ln_coin.conf.clone(); if let Some(options) = req.channel_options { match conf.channel_options.as_mut() { - Some(o) => o.update(options), + Some(o) => o.update_according_to(options), None => conf.channel_options = Some(options), } } - - let mut user_config: UserConfig = conf.into(); - if let Some(locktime) = req.counterparty_locktime { - user_config.own_channel_config.our_to_self_delay = locktime; - } - if let Some(min) = req.our_htlc_minimum_msat { - user_config.own_channel_config.our_htlc_minimum_msat = min; + if let Some(configs) = req.channel_configs { + match conf.our_channels_configs.as_mut() { + Some(o) => o.update_according_to(configs), + None => conf.our_channels_configs = Some(configs), + } } + drop_mutability!(conf); + let user_config: UserConfig = conf.into(); let rpc_channel_id = ln_coin.db.get_last_channel_rpc_id().await? as u64 + 1; @@ -899,7 +944,7 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes temp_channel_id, node_pubkey, true, - user_config.channel_options.announced_channel, + user_config.channel_handshake_config.announced_channel, ); // Saving node data to reconnect to it on restart @@ -919,6 +964,50 @@ pub async fn open_channel(ctx: MmArc, req: OpenChannelRequest) -> OpenChannelRes }) } +#[derive(Deserialize)] +pub struct UpdateChannelReq { + pub coin: String, + pub rpc_channel_id: u64, + pub channel_options: ChannelOptions, +} + +#[derive(Serialize)] +pub struct UpdateChannelResponse { + channel_options: ChannelOptions, +} + +/// Updates configuration for an open channel. +pub async fn update_channel(ctx: MmArc, req: UpdateChannelReq) -> UpdateChannelResult { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { + MmCoinEnum::LightningCoin(c) => c, + e => return MmError::err(UpdateChannelError::UnsupportedCoin(e.ticker().to_string())), + }; + + let channel_details = ln_coin + .get_channel_by_rpc_id(req.rpc_channel_id) + .await + .ok_or(UpdateChannelError::NoSuchChannel(req.rpc_channel_id))?; + + async_blocking(move || { + let mut channel_options = ln_coin + .conf + .channel_options + .unwrap_or_else(|| req.channel_options.clone()); + if channel_options != req.channel_options { + channel_options.update_according_to(req.channel_options.clone()); + } + drop_mutability!(channel_options); + let channel_ids = &[channel_details.channel_id]; + let counterparty_node_id = channel_details.counterparty.node_id; + ln_coin + .channel_manager + .update_channel_config(&counterparty_node_id, channel_ids, &channel_options.clone().into()) + .map_to_mm(|e| UpdateChannelError::FailureToUpdateChannel(req.rpc_channel_id, format!("{:?}", e)))?; + Ok(UpdateChannelResponse { channel_options }) + }) + .await +} + #[derive(Deserialize)] pub struct OpenChannelsFilter { pub channel_id: Option, @@ -933,7 +1022,7 @@ pub struct OpenChannelsFilter { pub to_outbound_capacity_msat: Option, pub from_inbound_capacity_msat: Option, pub to_inbound_capacity_msat: Option, - pub confirmed: Option, + pub is_ready: Option, pub is_usable: Option, pub is_public: Option, } @@ -971,7 +1060,7 @@ fn apply_open_channel_filter(channel_details: &ChannelDetailsForRPC, filter: &Op let is_to_inbound_capacity_msat = filter.to_inbound_capacity_msat.is_none() || Some(&channel_details.inbound_capacity_msat) <= filter.to_inbound_capacity_msat.as_ref(); - let is_confirmed = filter.confirmed.is_none() || Some(&channel_details.confirmed) == filter.confirmed.as_ref(); + let is_confirmed = filter.is_ready.is_none() || Some(&channel_details.is_ready) == filter.is_ready.as_ref(); let is_usable = filter.is_usable.is_none() || Some(&channel_details.is_usable) == filter.is_usable.as_ref(); @@ -1019,8 +1108,8 @@ pub struct ChannelDetailsForRPC { pub inbound_capacity_msat: u64, // Channel is confirmed onchain, this means that funding_locked messages have been exchanged, // the channel is not currently being shut down, and the required confirmation count has been reached. - pub confirmed: bool, - // Channel is confirmed and funding_locked messages have been exchanged, the peer is connected, + pub is_ready: bool, + // Channel is confirmed and channel_ready messages have been exchanged, the peer is connected, // and the channel is not currently negotiating a shutdown. pub is_usable: bool, // A publicly-announced channel. @@ -1040,7 +1129,7 @@ impl From for ChannelDetailsForRPC { balance_msat: details.balance_msat, outbound_capacity_msat: details.outbound_capacity_msat, inbound_capacity_msat: details.inbound_capacity_msat, - confirmed: details.is_funding_locked, + is_ready: details.is_channel_ready, is_usable: details.is_usable, is_public: details.is_public, } @@ -1067,10 +1156,9 @@ pub async fn list_open_channels_by_filter( ctx: MmArc, req: ListOpenChannelsRequest, ) -> ListChannelsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ListChannelsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ListChannelsError::UnsupportedCoin(e.ticker().to_string())), }; let result = ln_coin @@ -1111,10 +1199,9 @@ pub async fn list_closed_channels_by_filter( ctx: MmArc, req: ListClosedChannelsRequest, ) -> ListChannelsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ListChannelsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ListChannelsError::UnsupportedCoin(e.ticker().to_string())), }; let closed_channels_res = ln_coin .db @@ -1148,17 +1235,12 @@ pub async fn get_channel_details( ctx: MmArc, req: GetChannelDetailsRequest, ) -> GetChannelDetailsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(GetChannelDetailsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(GetChannelDetailsError::UnsupportedCoin(e.ticker().to_string())), }; - let channel_details = match ln_coin - .channel_manager - .list_channels() - .into_iter() - .find(|chan| chan.user_channel_id == req.rpc_channel_id) - { + + let channel_details = match ln_coin.get_channel_by_rpc_id(req.rpc_channel_id).await { Some(details) => GetChannelDetailsResponse::Open(details.into()), None => GetChannelDetailsResponse::Closed( ln_coin @@ -1177,12 +1259,13 @@ pub struct GenerateInvoiceRequest { pub coin: String, pub amount_in_msat: Option, pub description: String, + pub expiry: Option, } #[derive(Serialize)] pub struct GenerateInvoiceResponse { payment_hash: H256Json, - invoice: InvoiceForRPC, + invoice: Invoice, } /// Generates an invoice (request for payment) that can be paid on the lightning network by another node using send_payment. @@ -1190,10 +1273,9 @@ pub async fn generate_invoice( ctx: MmArc, req: GenerateInvoiceRequest, ) -> GenerateInvoiceResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(GenerateInvoiceError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(GenerateInvoiceError::UnsupportedCoin(e.ticker().to_string())), }; let open_channels_nodes = ln_coin.open_channels_nodes.lock().clone(); for (node_pubkey, node_addr) in open_channels_nodes { @@ -1204,14 +1286,25 @@ pub async fn generate_invoice( node_pubkey )); } + let network = ln_coin.platform.network.clone().into(); - let invoice = create_invoice_from_channelmanager( - &ln_coin.channel_manager, - ln_coin.keys_manager, - network, - req.amount_in_msat, - req.description.clone(), - )?; + let channel_manager = ln_coin.channel_manager.clone(); + let keys_manager = ln_coin.keys_manager.clone(); + let amount_in_msat = req.amount_in_msat; + let description = req.description.clone(); + let expiry = req.expiry.unwrap_or(DEFAULT_INVOICE_EXPIRY); + let invoice = async_blocking(move || { + create_invoice_from_channelmanager( + &channel_manager, + keys_manager, + network, + amount_in_msat, + description, + expiry, + ) + }) + .await?; + let payment_hash = invoice.payment_hash().into_inner(); let payment_info = DBPaymentInfo { payment_hash: PaymentHash(payment_hash), @@ -1228,7 +1321,7 @@ pub async fn generate_invoice( ln_coin.db.add_or_update_payment_in_db(payment_info).await?; Ok(GenerateInvoiceResponse { payment_hash: payment_hash.into(), - invoice: invoice.into(), + invoice, }) } @@ -1236,7 +1329,7 @@ pub async fn generate_invoice( #[serde(tag = "type")] pub enum Payment { #[serde(rename = "invoice")] - Invoice { invoice: InvoiceForRPC }, + Invoice { invoice: Invoice }, #[serde(rename = "keysend")] Keysend { // The recieving node pubkey (node ID) @@ -1262,10 +1355,9 @@ pub struct SendPaymentResponse { } pub async fn send_payment(ctx: MmArc, req: SendPaymentReq) -> SendPaymentResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(SendPaymentError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(SendPaymentError::UnsupportedCoin(e.ticker().to_string())), }; let open_channels_nodes = ln_coin.open_channels_nodes.lock().clone(); for (node_pubkey, node_addr) in open_channels_nodes { @@ -1277,12 +1369,12 @@ pub async fn send_payment(ctx: MmArc, req: SendPaymentReq) -> SendPaymentResult< )); } let payment_info = match req.payment { - Payment::Invoice { invoice } => ln_coin.pay_invoice(invoice.into())?, + Payment::Invoice { invoice } => ln_coin.pay_invoice(invoice).await?, Payment::Keysend { destination, amount_in_msat, expiry, - } => ln_coin.keysend(destination.into(), amount_in_msat, expiry)?, + } => ln_coin.keysend(destination.into(), amount_in_msat, expiry).await?, }; ln_coin.db.add_or_update_payment_in_db(payment_info.clone()).await?; Ok(SendPaymentResponse { @@ -1409,10 +1501,9 @@ pub struct ListPaymentsResponse { } pub async fn list_payments_by_filter(ctx: MmArc, req: ListPaymentsReq) -> ListPaymentsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ListPaymentsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ListPaymentsError::UnsupportedCoin(e.ticker().to_string())), }; let get_payments_res = ln_coin .db @@ -1448,10 +1539,9 @@ pub async fn get_payment_details( ctx: MmArc, req: GetPaymentDetailsRequest, ) -> GetPaymentDetailsResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(GetPaymentDetailsError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(GetPaymentDetailsError::UnsupportedCoin(e.ticker().to_string())), }; if let Some(payment_info) = ln_coin.db.get_payment_from_db(PaymentHash(req.payment_hash.0)).await? { @@ -1466,30 +1556,46 @@ pub async fn get_payment_details( #[derive(Deserialize)] pub struct CloseChannelReq { pub coin: String, - pub channel_id: H256Json, + pub rpc_channel_id: u64, #[serde(default)] pub force_close: bool, } pub async fn close_channel(ctx: MmArc, req: CloseChannelReq) -> CloseChannelResult { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(CloseChannelError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(CloseChannelError::UnsupportedCoin(e.ticker().to_string())), }; + + let channel_details = ln_coin + .get_channel_by_rpc_id(req.rpc_channel_id) + .await + .ok_or(CloseChannelError::NoSuchChannel(req.rpc_channel_id))?; + let channel_id = channel_details.channel_id; + let counterparty_node_id = channel_details.counterparty.node_id; + if req.force_close { - ln_coin - .channel_manager - .force_close_channel(&req.channel_id.0) - .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e)))?; + async_blocking(move || { + ln_coin + .channel_manager + .force_close_broadcasting_latest_txn(&channel_id, &counterparty_node_id) + .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e))) + }) + .await?; } else { - ln_coin - .channel_manager - .close_channel(&req.channel_id.0) - .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e)))?; + async_blocking(move || { + ln_coin + .channel_manager + .close_channel(&channel_id, &counterparty_node_id) + .map_to_mm(|e| CloseChannelError::CloseChannelError(format!("{:?}", e))) + }) + .await?; } - Ok(format!("Initiated closing of channel: {:?}", req.channel_id)) + Ok(format!( + "Initiated closing of channel with rpc_channel_id: {}", + req.rpc_channel_id + )) } /// Details about the balance(s) available for spending once the channel appears on chain. @@ -1585,22 +1691,102 @@ pub async fn get_claimable_balances( ctx: MmArc, req: ClaimableBalancesReq, ) -> ClaimableBalancesResult> { - let coin = lp_coinfind_or_err(&ctx, &req.coin).await?; - let ln_coin = match coin { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { MmCoinEnum::LightningCoin(c) => c, - _ => return MmError::err(ClaimableBalancesError::UnsupportedCoin(coin.ticker().to_string())), + e => return MmError::err(ClaimableBalancesError::UnsupportedCoin(e.ticker().to_string())), }; let ignored_channels = if req.include_open_channels_balances { Vec::new() } else { - ln_coin.channel_manager.list_channels() + ln_coin.list_channels().await }; - let claimable_balances = ln_coin - .chain_monitor - .get_claimable_balances(&ignored_channels.iter().collect::>()[..]) - .into_iter() - .map(From::from) - .collect(); + let claimable_balances = async_blocking(move || { + ln_coin + .chain_monitor + .get_claimable_balances(&ignored_channels.iter().collect::>()[..]) + .into_iter() + .map(From::from) + .collect() + }) + .await; Ok(claimable_balances) } + +#[derive(Deserialize)] +pub struct AddTrustedNodeReq { + pub coin: String, + pub node_id: PublicKeyForRPC, +} + +#[derive(Serialize)] +pub struct AddTrustedNodeResponse { + pub added_node: PublicKeyForRPC, +} + +pub async fn add_trusted_node(ctx: MmArc, req: AddTrustedNodeReq) -> TrustedNodeResult { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { + MmCoinEnum::LightningCoin(c) => c, + e => return MmError::err(TrustedNodeError::UnsupportedCoin(e.ticker().to_string())), + }; + + if ln_coin.trusted_nodes.lock().insert(req.node_id.clone().into()) { + ln_coin.persister.save_trusted_nodes(ln_coin.trusted_nodes).await?; + } + + Ok(AddTrustedNodeResponse { + added_node: req.node_id, + }) +} + +#[derive(Deserialize)] +pub struct RemoveTrustedNodeReq { + pub coin: String, + pub node_id: PublicKeyForRPC, +} + +#[derive(Serialize)] +pub struct RemoveTrustedNodeResponse { + pub removed_node: PublicKeyForRPC, +} + +pub async fn remove_trusted_node( + ctx: MmArc, + req: RemoveTrustedNodeReq, +) -> TrustedNodeResult { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { + MmCoinEnum::LightningCoin(c) => c, + e => return MmError::err(TrustedNodeError::UnsupportedCoin(e.ticker().to_string())), + }; + + if ln_coin.trusted_nodes.lock().remove(&req.node_id.clone().into()) { + ln_coin.persister.save_trusted_nodes(ln_coin.trusted_nodes).await?; + } + + Ok(RemoveTrustedNodeResponse { + removed_node: req.node_id, + }) +} + +#[derive(Deserialize)] +pub struct ListTrustedNodesReq { + pub coin: String, +} + +#[derive(Serialize)] +pub struct ListTrustedNodesResponse { + trusted_nodes: Vec, +} + +pub async fn list_trusted_nodes(ctx: MmArc, req: ListTrustedNodesReq) -> TrustedNodeResult { + let ln_coin = match lp_coinfind_or_err(&ctx, &req.coin).await? { + MmCoinEnum::LightningCoin(c) => c, + e => return MmError::err(TrustedNodeError::UnsupportedCoin(e.ticker().to_string())), + }; + + let trusted_nodes = ln_coin.trusted_nodes.lock().clone(); + + Ok(ListTrustedNodesResponse { + trusted_nodes: trusted_nodes.into_iter().map(PublicKeyForRPC).collect(), + }) +} diff --git a/mm2src/coins/lightning/ln_conf.rs b/mm2src/coins/lightning/ln_conf.rs index b40234d32d..4f0a6f00cd 100644 --- a/mm2src/coins/lightning/ln_conf.rs +++ b/mm2src/coins/lightning/ln_conf.rs @@ -2,26 +2,20 @@ use crate::utxo::BlockchainNetwork; use lightning::util::config::{ChannelConfig, ChannelHandshakeConfig, ChannelHandshakeLimits, UserConfig}; #[derive(Clone, Debug, Deserialize, Serialize)] -pub struct DefaultFeesAndConfirmations { - pub default_fee_per_kb: u64, - pub n_blocks: u32, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct PlatformCoinConfirmations { - pub background: DefaultFeesAndConfirmations, - pub normal: DefaultFeesAndConfirmations, - pub high_priority: DefaultFeesAndConfirmations, +pub struct PlatformCoinConfirmationTargets { + pub background: u32, + pub normal: u32, + pub high_priority: u32, } #[derive(Debug)] pub struct LightningProtocolConf { pub platform_coin_ticker: String, pub network: BlockchainNetwork, - pub confirmations: PlatformCoinConfirmations, + pub confirmation_targets: PlatformCoinConfirmationTargets, } -#[derive(Clone, Debug, Deserialize, PartialEq)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] pub struct ChannelOptions { /// Amount (in millionths of a satoshi) charged per satoshi for payments forwarded outbound /// over the channel. @@ -30,11 +24,6 @@ pub struct ChannelOptions { /// excess of proportional_fee_in_millionths_sats. pub base_fee_msat: Option, pub cltv_expiry_delta: Option, - /// Set to announce the channel publicly and notify all nodes that they can route via this - /// channel. - pub announced_channel: Option, - /// When set, we commit to an upfront shutdown_pubkey at channel open. - pub commit_upfront_shutdown_pubkey: Option, /// Limit our total exposure to in-flight HTLCs which are burned to fees as they are too /// small to claim on-chain. pub max_dust_htlc_exposure_msat: Option, @@ -44,7 +33,7 @@ pub struct ChannelOptions { } impl ChannelOptions { - pub fn update(&mut self, options: ChannelOptions) { + pub fn update_according_to(&mut self, options: ChannelOptions) { if let Some(fee) = options.proportional_fee_in_millionths_sats { self.proportional_fee_in_millionths_sats = Some(fee); } @@ -57,14 +46,6 @@ impl ChannelOptions { self.cltv_expiry_delta = Some(expiry); } - if let Some(announce) = options.announced_channel { - self.announced_channel = Some(announce); - } - - if let Some(commit) = options.commit_upfront_shutdown_pubkey { - self.commit_upfront_shutdown_pubkey = Some(commit); - } - if let Some(dust) = options.max_dust_htlc_exposure_msat { self.max_dust_htlc_exposure_msat = Some(dust); } @@ -91,14 +72,6 @@ impl From for ChannelConfig { channel_config.cltv_expiry_delta = expiry; } - if let Some(announce) = options.announced_channel { - channel_config.announced_channel = announce; - } - - if let Some(commit) = options.commit_upfront_shutdown_pubkey { - channel_config.commit_upfront_shutdown_pubkey = commit; - } - if let Some(dust) = options.max_dust_htlc_exposure_msat { channel_config.max_dust_htlc_exposure_msat = dust; } @@ -112,7 +85,7 @@ impl From for ChannelConfig { } #[derive(Clone, Debug, Deserialize)] -pub struct OurChannelsConfig { +pub struct OurChannelsConfigs { /// Confirmations we will wait for before considering an inbound channel locked in. pub inbound_channels_confirmations: Option, /// The number of blocks we require our counterparty to wait to claim their money on chain @@ -129,10 +102,50 @@ pub struct OurChannelsConfig { /// our real on-chain channel UTXO in each invoice and requiring that our counterparty only /// relay HTLCs to us using the channel's SCID alias. pub negotiate_scid_privacy: Option, + /// Sets the percentage of the channel value we will cap the total value of outstanding inbound + /// HTLCs to. + pub max_inbound_in_flight_htlc_percent: Option, + /// Set to announce the channel publicly and notify all nodes that they can route via this + /// channel. + pub announced_channel: Option, + /// When set, we commit to an upfront shutdown_pubkey at channel open. + pub commit_upfront_shutdown_pubkey: Option, } -impl From for ChannelHandshakeConfig { - fn from(config: OurChannelsConfig) -> Self { +impl OurChannelsConfigs { + pub fn update_according_to(&mut self, config: OurChannelsConfigs) { + if let Some(confs) = config.inbound_channels_confirmations { + self.inbound_channels_confirmations = Some(confs); + } + + if let Some(delay) = config.counterparty_locktime { + self.counterparty_locktime = Some(delay); + } + + if let Some(min) = config.our_htlc_minimum_msat { + self.our_htlc_minimum_msat = Some(min); + } + + if let Some(scid_privacy) = config.negotiate_scid_privacy { + self.negotiate_scid_privacy = Some(scid_privacy); + } + + if let Some(max_inbound_htlc) = config.max_inbound_in_flight_htlc_percent { + self.max_inbound_in_flight_htlc_percent = Some(max_inbound_htlc); + } + + if let Some(announce) = config.announced_channel { + self.announced_channel = Some(announce); + } + + if let Some(commit) = config.commit_upfront_shutdown_pubkey { + self.commit_upfront_shutdown_pubkey = Some(commit); + } + } +} + +impl From for ChannelHandshakeConfig { + fn from(config: OurChannelsConfigs) -> Self { let mut channel_handshake_config = ChannelHandshakeConfig::default(); if let Some(confs) = config.inbound_channels_confirmations { @@ -148,7 +161,19 @@ impl From for ChannelHandshakeConfig { } if let Some(scid_privacy) = config.negotiate_scid_privacy { - channel_handshake_config.negotiate_scid_privacy = scid_privacy + channel_handshake_config.negotiate_scid_privacy = scid_privacy; + } + + if let Some(max_inbound_htlc) = config.max_inbound_in_flight_htlc_percent { + channel_handshake_config.max_inbound_htlc_value_in_flight_percent_of_channel = max_inbound_htlc; + } + + if let Some(announce) = config.announced_channel { + channel_handshake_config.announced_channel = announce; + } + + if let Some(commit) = config.commit_upfront_shutdown_pubkey { + channel_handshake_config.commit_upfront_shutdown_pubkey = commit; } channel_handshake_config @@ -159,6 +184,8 @@ impl From for ChannelHandshakeConfig { pub struct CounterpartyLimits { /// Minimum allowed satoshis when an inbound channel is funded. pub min_funding_sats: Option, + /// Maximum allowed satoshis when an inbound channel is funded. + pub max_funding_sats: Option, /// The remote node sets a limit on the minimum size of HTLCs we can send to them. This allows /// us to limit the maximum minimum-size they can require. pub max_htlc_minimum_msat: Option, @@ -178,6 +205,9 @@ pub struct CounterpartyLimits { pub force_announced_channel_preference: Option, /// Set to the amount of time we're willing to wait to claim money back to us. pub our_locktime_limit: Option, + /// When set an outbound channel can be used straight away without waiting for any on-chain confirmations. + /// https://docs.rs/lightning/latest/lightning/util/config/struct.ChannelHandshakeLimits.html#structfield.trust_own_funding_0conf + pub allow_outbound_0conf: Option, } impl From for ChannelHandshakeLimits { @@ -188,6 +218,10 @@ impl From for ChannelHandshakeLimits { channel_handshake_limits.min_funding_satoshis = sats; } + if let Some(sats) = limits.max_funding_sats { + channel_handshake_limits.max_funding_satoshis = sats; + } + if let Some(msat) = limits.max_htlc_minimum_msat { channel_handshake_limits.max_htlc_minimum_msat = msat; } @@ -208,6 +242,10 @@ impl From for ChannelHandshakeLimits { channel_handshake_limits.max_minimum_depth = confs; } + if let Some(is_0conf) = limits.allow_outbound_0conf { + channel_handshake_limits.trust_own_funding_0conf = is_0conf; + } + if let Some(pref) = limits.force_announced_channel_preference { channel_handshake_limits.force_announced_channel_preference = pref; } @@ -228,7 +266,7 @@ pub struct LightningCoinConf { pub accept_inbound_channels: Option, pub accept_forwards_to_priv_channels: Option, pub channel_options: Option, - pub our_channels_config: Option, + pub our_channels_configs: Option, pub counterparty_channel_config_limits: Option, pub sign_message_prefix: Option, } @@ -236,14 +274,14 @@ pub struct LightningCoinConf { impl From for UserConfig { fn from(conf: LightningCoinConf) -> Self { let mut user_config = UserConfig::default(); - if let Some(config) = conf.our_channels_config { - user_config.own_channel_config = config.into(); + if let Some(config) = conf.our_channels_configs { + user_config.channel_handshake_config = config.into(); } if let Some(limits) = conf.counterparty_channel_config_limits { - user_config.peer_channel_config_limits = limits.into(); + user_config.channel_handshake_limits = limits.into(); } if let Some(options) = conf.channel_options { - user_config.channel_options = options.into(); + user_config.channel_config = options.into(); } if let Some(accept_forwards) = conf.accept_forwards_to_priv_channels { user_config.accept_forwards_to_priv_channels = accept_forwards; diff --git a/mm2src/coins/lightning/ln_db.rs b/mm2src/coins/lightning/ln_db.rs index b47a72361a..248c164b06 100644 --- a/mm2src/coins/lightning/ln_db.rs +++ b/mm2src/coins/lightning/ln_db.rs @@ -3,7 +3,7 @@ use common::{now_ms, PagingOptionsEnum}; use db_common::sqlite::rusqlite::types::FromSqlError; use derive_more::Display; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use serde::{Deserialize, Serialize}; use std::str::FromStr; diff --git a/mm2src/coins/lightning/ln_errors.rs b/mm2src/coins/lightning/ln_errors.rs index e70f555d8c..c76102cddc 100644 --- a/mm2src/coins/lightning/ln_errors.rs +++ b/mm2src/coins/lightning/ln_errors.rs @@ -14,6 +14,7 @@ use utxo_signer::with_key_pair::UtxoSignWithKeyPairError; pub type EnableLightningResult = Result>; pub type ConnectToNodeResult = Result>; pub type OpenChannelResult = Result>; +pub type UpdateChannelResult = Result>; pub type ListChannelsResult = Result>; pub type GetChannelDetailsResult = Result>; pub type GenerateInvoiceResult = Result>; @@ -23,6 +24,7 @@ pub type GetPaymentDetailsResult = Result> pub type CloseChannelResult = Result>; pub type ClaimableBalancesResult = Result>; pub type SaveChannelClosingResult = Result>; +pub type TrustedNodeResult = Result>; #[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] #[serde(tag = "error_type", content = "error_data")] @@ -72,6 +74,10 @@ impl From for EnableLightningError { fn from(err: SqlError) -> EnableLightningError { EnableLightningError::DbError(err.to_string()) } } +impl From for EnableLightningError { + fn from(e: UtxoRpcError) -> Self { EnableLightningError::RpcError(e.to_string()) } +} + #[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] #[serde(tag = "error_type", content = "error_data")] pub enum ConnectToNodeError { @@ -94,7 +100,7 @@ impl HttpStatusCode for ConnectToNodeError { ConnectToNodeError::ParseError(_) | ConnectToNodeError::IOError(_) | ConnectToNodeError::ConnectionError(_) => StatusCode::INTERNAL_SERVER_ERROR, - ConnectToNodeError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ConnectToNodeError::NoSuchCoin(_) => StatusCode::NOT_FOUND, } } } @@ -159,8 +165,9 @@ impl HttpStatusCode for OpenChannelError { | OpenChannelError::IOError(_) | OpenChannelError::DbError(_) | OpenChannelError::InvalidPath(_) - | OpenChannelError::ConvertTxErr(_) => StatusCode::INTERNAL_SERVER_ERROR, - OpenChannelError::NoSuchCoin(_) | OpenChannelError::BalanceError(_) => StatusCode::PRECONDITION_REQUIRED, + | OpenChannelError::ConvertTxErr(_) + | OpenChannelError::BalanceError(_) => StatusCode::INTERNAL_SERVER_ERROR, + OpenChannelError::NoSuchCoin(_) => StatusCode::NOT_FOUND, } } } @@ -213,6 +220,38 @@ impl From for OpenChannelError { fn from(err: SqlError) -> OpenChannelError { OpenChannelError::DbError(err.to_string()) } } +#[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] +#[serde(tag = "error_type", content = "error_data")] +pub enum UpdateChannelError { + #[display(fmt = "Lightning network is not supported for {}", _0)] + UnsupportedCoin(String), + #[display(fmt = "No such coin {}", _0)] + NoSuchCoin(String), + #[display(fmt = "No such channel with rpc_channel_id {}", _0)] + NoSuchChannel(u64), + #[display(fmt = "Failure to channel {}: {}", _0, _1)] + FailureToUpdateChannel(u64, String), +} + +impl HttpStatusCode for UpdateChannelError { + fn status_code(&self) -> StatusCode { + match self { + UpdateChannelError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, + UpdateChannelError::NoSuchChannel(_) => StatusCode::NOT_FOUND, + UpdateChannelError::NoSuchCoin(_) => StatusCode::NOT_FOUND, + UpdateChannelError::FailureToUpdateChannel(_, _) => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +impl From for UpdateChannelError { + fn from(e: CoinFindError) -> Self { + match e { + CoinFindError::NoSuchCoin { coin } => UpdateChannelError::NoSuchCoin(coin), + } + } +} + #[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] #[serde(tag = "error_type", content = "error_data")] pub enum ListChannelsError { @@ -228,7 +267,7 @@ impl HttpStatusCode for ListChannelsError { fn status_code(&self) -> StatusCode { match self { ListChannelsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - ListChannelsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ListChannelsError::NoSuchCoin(_) => StatusCode::NOT_FOUND, ListChannelsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -263,8 +302,7 @@ impl HttpStatusCode for GetChannelDetailsError { fn status_code(&self) -> StatusCode { match self { GetChannelDetailsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - GetChannelDetailsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, - GetChannelDetailsError::NoSuchChannel(_) => StatusCode::NOT_FOUND, + GetChannelDetailsError::NoSuchCoin(_) | GetChannelDetailsError::NoSuchChannel(_) => StatusCode::NOT_FOUND, GetChannelDetailsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -299,7 +337,7 @@ impl HttpStatusCode for GenerateInvoiceError { fn status_code(&self) -> StatusCode { match self { GenerateInvoiceError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - GenerateInvoiceError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + GenerateInvoiceError::NoSuchCoin(_) => StatusCode::NOT_FOUND, GenerateInvoiceError::SignOrCreationError(_) | GenerateInvoiceError::DbError(_) => { StatusCode::INTERNAL_SERVER_ERROR }, @@ -344,7 +382,7 @@ impl HttpStatusCode for SendPaymentError { fn status_code(&self) -> StatusCode { match self { SendPaymentError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - SendPaymentError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + SendPaymentError::NoSuchCoin(_) => StatusCode::NOT_FOUND, SendPaymentError::PaymentError(_) | SendPaymentError::NoRouteFound(_) | SendPaymentError::CLTVExpiryError(_, _) @@ -380,7 +418,7 @@ impl HttpStatusCode for ListPaymentsError { fn status_code(&self) -> StatusCode { match self { ListPaymentsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - ListPaymentsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ListPaymentsError::NoSuchCoin(_) => StatusCode::NOT_FOUND, ListPaymentsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -415,7 +453,7 @@ impl HttpStatusCode for GetPaymentDetailsError { fn status_code(&self) -> StatusCode { match self { GetPaymentDetailsError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - GetPaymentDetailsError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + GetPaymentDetailsError::NoSuchCoin(_) => StatusCode::NOT_FOUND, GetPaymentDetailsError::NoSuchPayment(_) => StatusCode::NOT_FOUND, GetPaymentDetailsError::DbError(_) => StatusCode::INTERNAL_SERVER_ERROR, } @@ -441,6 +479,8 @@ pub enum CloseChannelError { UnsupportedCoin(String), #[display(fmt = "No such coin {}", _0)] NoSuchCoin(String), + #[display(fmt = "No such channel with rpc_channel_id {}", _0)] + NoSuchChannel(u64), #[display(fmt = "Closing channel error: {}", _0)] CloseChannelError(String), } @@ -449,7 +489,7 @@ impl HttpStatusCode for CloseChannelError { fn status_code(&self) -> StatusCode { match self { CloseChannelError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - CloseChannelError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + CloseChannelError::NoSuchChannel(_) | CloseChannelError::NoSuchCoin(_) => StatusCode::NOT_FOUND, CloseChannelError::CloseChannelError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -476,7 +516,7 @@ impl HttpStatusCode for ClaimableBalancesError { fn status_code(&self) -> StatusCode { match self { ClaimableBalancesError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, - ClaimableBalancesError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + ClaimableBalancesError::NoSuchCoin(_) => StatusCode::NOT_FOUND, } } } @@ -514,3 +554,36 @@ impl From for SaveChannelClosingError { impl From for SaveChannelClosingError { fn from(err: TryFromIntError) -> SaveChannelClosingError { SaveChannelClosingError::ConversionError(err) } } + +#[derive(Debug, Deserialize, Display, Serialize, SerializeErrorType)] +#[serde(tag = "error_type", content = "error_data")] +pub enum TrustedNodeError { + #[display(fmt = "Lightning network is not supported for {}", _0)] + UnsupportedCoin(String), + #[display(fmt = "No such coin {}", _0)] + NoSuchCoin(String), + #[display(fmt = "I/O error {}", _0)] + IOError(String), +} + +impl HttpStatusCode for TrustedNodeError { + fn status_code(&self) -> StatusCode { + match self { + TrustedNodeError::UnsupportedCoin(_) => StatusCode::BAD_REQUEST, + TrustedNodeError::NoSuchCoin(_) => StatusCode::NOT_FOUND, + TrustedNodeError::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +impl From for TrustedNodeError { + fn from(e: CoinFindError) -> Self { + match e { + CoinFindError::NoSuchCoin { coin } => TrustedNodeError::NoSuchCoin(coin), + } + } +} + +impl From for TrustedNodeError { + fn from(err: std::io::Error) -> TrustedNodeError { TrustedNodeError::IOError(err.to_string()) } +} diff --git a/mm2src/coins/lightning/ln_events.rs b/mm2src/coins/lightning/ln_events.rs index 59af6a94ee..114f8a12cb 100644 --- a/mm2src/coins/lightning/ln_events.rs +++ b/mm2src/coins/lightning/ln_events.rs @@ -9,13 +9,14 @@ use common::executor::{spawn, Timer}; use common::log::{error, info}; use common::{now_ms, spawn_abortable, AbortOnDropHandle}; use core::time::Duration; +use futures::compat::Future01CompatExt; use lightning::chain::chaininterface::{ConfirmationTarget, FeeEstimator}; use lightning::chain::keysinterface::SpendableOutputDescriptor; use lightning::util::events::{Event, EventHandler, PaymentPurpose}; use parking_lot::Mutex as PaMutex; use rand::Rng; use script::{Builder, SignatureVersion}; -use secp256k1::Secp256k1; +use secp256k1v22::Secp256k1; use std::convert::TryFrom; use std::sync::Arc; use utxo_signer::with_key_pair::sign_tx; @@ -27,6 +28,7 @@ pub struct LightningEventHandler { channel_manager: Arc, keys_manager: Arc, db: SqliteLightningDB, + trusted_nodes: TrustedNodesShared, abort_handlers: Arc>>, } @@ -38,18 +40,20 @@ impl EventHandler for LightningEventHandler { channel_value_satoshis, output_script, user_channel_id, + counterparty_node_id, } => self.handle_funding_generation_ready( *temporary_channel_id, *channel_value_satoshis, output_script, *user_channel_id, + counterparty_node_id, ), Event::PaymentReceived { payment_hash, - amt, + amount_msat, purpose, - } => self.handle_payment_received(*payment_hash, *amt, purpose), + } => self.handle_payment_received(payment_hash, *amount_msat, purpose), Event::PaymentSent { payment_preimage, @@ -58,16 +62,20 @@ impl EventHandler for LightningEventHandler { .. } => self.handle_payment_sent(*payment_preimage, *payment_hash, *fee_paid_msat), + Event::PaymentClaimed { payment_hash, amount_msat, purpose } => self.handle_payment_claimed(*payment_hash, *amount_msat, purpose), + Event::PaymentFailed { payment_hash, .. } => self.handle_payment_failed(*payment_hash), Event::PendingHTLCsForwardable { time_forwardable } => self.handle_pending_htlcs_forwards(*time_forwardable), - Event::SpendableOutputs { outputs } => self.handle_spendable_outputs(outputs), + Event::SpendableOutputs { outputs } => self.handle_spendable_outputs(outputs.clone()), // Todo: an RPC for total amount earned - Event::PaymentForwarded { fee_earned_msat, claim_from_onchain_tx } => info!( - "Received a fee of {} milli-satoshis for a successfully forwarded payment through our {} lightning node. Was the forwarded HTLC claimed by our counterparty via an on-chain transaction?: {}", + Event::PaymentForwarded { fee_earned_msat, claim_from_onchain_tx, prev_channel_id, next_channel_id} => info!( + "Received a fee of {} milli-satoshis for a successfully forwarded payment from {} to {} through our {} lightning node. Was the forwarded HTLC claimed by our counterparty via an on-chain transaction?: {}", fee_earned_msat.unwrap_or_default(), + prev_channel_id.map(hex::encode).unwrap_or_else(|| "unknown".into()), + next_channel_id.map(hex::encode).unwrap_or_else(|| "unknown".into()), self.platform.coin.ticker(), claim_from_onchain_tx, ), @@ -86,6 +94,7 @@ impl EventHandler for LightningEventHandler { ), // Handling updating channel penalties after successfully routing a payment along a path is done by the InvoicePayer. + // Todo: Maybe add information to db about why a payment succeeded using this event Event::PaymentPathSuccessful { payment_id, payment_hash, @@ -98,7 +107,8 @@ impl EventHandler for LightningEventHandler { ), // Handling updating channel penalties after a payment fails to route through a channel is done by the InvoicePayer. - // Also abandoning or retrying a payment is handled by the InvoicePayer. + // Also abandoning or retrying a payment is handled by the InvoicePayer. + // Todo: Add information to db about why a payment failed using this event Event::PaymentPathFailed { payment_hash, rejected_by_dest, @@ -120,6 +130,21 @@ impl EventHandler for LightningEventHandler { push_msat, channel_type: _, } => self.handle_open_channel_request(*temporary_channel_id, *counterparty_node_id, *funding_satoshis, *push_msat), + + // Just log an error for now, but this event can be used along PaymentForwarded for a new RPC that shows stats about how a node + // forward payments over it's outbound channels which can be useful for a user that wants to run a forwarding node for some profits. + Event::HTLCHandlingFailed { + prev_channel_id, failed_next_destination + } => error!( + "Failed to handle htlc from {} to {:?}", + hex::encode(prev_channel_id), + failed_next_destination, + ), + + // ProbeSuccessful and ProbeFailed are events in response to a send_probe function call which sends a payment that probes a given route for liquidity. + // send_probe is not used for now but may be used in order matching in the future to check if a swap can happen or not. + Event::ProbeSuccessful { .. } => (), + Event::ProbeFailed { .. } => (), } } } @@ -232,6 +257,7 @@ impl LightningEventHandler { channel_manager: Arc, keys_manager: Arc, db: SqliteLightningDB, + trusted_nodes: TrustedNodesShared, abort_handlers: Arc>>, ) -> Self { LightningEventHandler { @@ -239,6 +265,7 @@ impl LightningEventHandler { channel_manager, keys_manager, db, + trusted_nodes, abort_handlers, } } @@ -249,10 +276,11 @@ impl LightningEventHandler { channel_value_satoshis: u64, output_script: &Script, user_channel_id: u64, + counterparty_node_id: &PublicKey, ) { info!( - "Handling FundingGenerationReady event for internal channel id: {}", - user_channel_id + "Handling FundingGenerationReady event for internal channel id: {} with: {}", + user_channel_id, counterparty_node_id ); let funding_tx = match sign_funding_transaction(user_channel_id, output_script, self.platform.clone()) { Ok(tx) => tx, @@ -267,9 +295,9 @@ impl LightningEventHandler { }; let funding_txid = funding_tx.txid(); // Give the funding transaction back to LDK for opening the channel. - if let Err(e) = self - .channel_manager - .funding_transaction_generated(&temporary_channel_id, funding_tx) + if let Err(e) = + self.channel_manager + .funding_transaction_generated(&temporary_channel_id, counterparty_node_id, funding_tx) { error!("{:?}", e); return; @@ -289,62 +317,63 @@ impl LightningEventHandler { }); } - fn handle_payment_received(&self, payment_hash: PaymentHash, amt: u64, purpose: &PaymentPurpose) { + fn handle_payment_received(&self, payment_hash: &PaymentHash, received_amount: u64, purpose: &PaymentPurpose) { info!( - "Handling PaymentReceived event for payment_hash: {}", - hex::encode(payment_hash.0) + "Handling PaymentReceived event for payment_hash: {} with amount {}", + hex::encode(payment_hash.0), + received_amount ); - let (payment_preimage, payment_secret) = match purpose { - PaymentPurpose::InvoicePayment { - payment_preimage, - payment_secret, - } => match payment_preimage { - Some(preimage) => (*preimage, Some(*payment_secret)), - None => return, - }, - PaymentPurpose::SpontaneousPayment(preimage) => (*preimage, None), - }; - let status = match self.channel_manager.claim_funds(payment_preimage) { - true => { - info!( - "Received an amount of {} millisatoshis for payment hash {}", - amt, - hex::encode(payment_hash.0) - ); - HTLCStatus::Succeeded + let payment_preimage = match purpose { + PaymentPurpose::InvoicePayment { payment_preimage, .. } => match payment_preimage { + Some(preimage) => *preimage, + None => { + // Free the htlc immediately if we don't have the preimage required to claim the payment + // to allow for this inbound liquidity to be used for other inbound payments. + self.channel_manager.fail_htlc_backwards(payment_hash); + return; + }, }, - false => HTLCStatus::Failed, + PaymentPurpose::SpontaneousPayment(preimage) => *preimage, }; + self.channel_manager.claim_funds(payment_preimage); + } + + fn handle_payment_claimed(&self, payment_hash: PaymentHash, amount_msat: u64, purpose: &PaymentPurpose) { + info!( + "Received an amount of {} millisatoshis for payment hash {}", + amount_msat, + hex::encode(payment_hash.0) + ); let db = self.db.clone(); - match purpose { - PaymentPurpose::InvoicePayment { .. } => spawn(async move { + match *purpose { + PaymentPurpose::InvoicePayment { payment_preimage, .. } => spawn(async move { if let Ok(Some(mut payment_info)) = db.get_payment_from_db(payment_hash).await.error_log_passthrough() { - payment_info.preimage = Some(payment_preimage); + payment_info.preimage = payment_preimage; payment_info.status = HTLCStatus::Succeeded; - payment_info.amt_msat = Some(amt as i64); + payment_info.amt_msat = Some(amount_msat as i64); payment_info.last_updated = (now_ms() / 1000) as i64; - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); } }), - PaymentPurpose::SpontaneousPayment(_) => { + PaymentPurpose::SpontaneousPayment(payment_preimage) => { let payment_info = DBPaymentInfo { payment_hash, payment_type: PaymentType::InboundPayment, description: "".into(), preimage: Some(payment_preimage), - secret: payment_secret, - amt_msat: Some(amt as i64), + secret: None, + amt_msat: Some(amount_msat as i64), fee_paid_msat: None, - status, + status: HTLCStatus::Succeeded, created_at: (now_ms() / 1000) as i64, last_updated: (now_ms() / 1000) as i64, }; spawn(async move { - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); }); }, } @@ -368,9 +397,9 @@ impl LightningEventHandler { payment_info.fee_paid_msat = fee_paid_msat.map(|f| f as i64); payment_info.last_updated = (now_ms() / 1000) as i64; let amt_msat = payment_info.amt_msat; - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); info!( "Successfully sent payment of {} millisatoshis with payment hash {}", amt_msat.unwrap_or_default(), @@ -412,9 +441,9 @@ impl LightningEventHandler { if let Ok(Some(mut payment_info)) = db.get_payment_from_db(payment_hash).await.error_log_passthrough() { payment_info.status = HTLCStatus::Failed; payment_info.last_updated = (now_ms() / 1000) as i64; - if let Err(e) = db.add_or_update_payment_in_db(payment_info).await { - error!("Unable to update payment information in DB: {}", e); - } + db.add_or_update_payment_in_db(payment_info) + .await + .error_log_with_msg("Unable to update payment information in DB!"); } }); } @@ -430,85 +459,97 @@ impl LightningEventHandler { }); } - fn handle_spendable_outputs(&self, outputs: &[SpendableOutputDescriptor]) { + fn handle_spendable_outputs(&self, outputs: Vec) { info!("Handling SpendableOutputs event!"); - let platform_coin = &self.platform.coin; + if outputs.is_empty() { + error!("Received SpendableOutputs event with no outputs!"); + return; + } + // Todo: add support for Hardware wallets for funding transactions and spending spendable outputs (channel closing transactions) - let my_address = match platform_coin.as_ref().derivation_method.iguana_or_err() { - Ok(addr) => addr, + let my_address = match self.platform.coin.as_ref().derivation_method.iguana_or_err() { + Ok(addr) => addr.clone(), Err(e) => { error!("{}", e); return; }, }; - let change_destination_script = Builder::build_witness_script(&my_address.hash).to_bytes().take().into(); - let feerate_sat_per_1000_weight = self.platform.get_est_sat_per_1000_weight(ConfirmationTarget::Normal); - let output_descriptors = &outputs.iter().collect::>(); - let claiming_tx = match self.keys_manager.spend_spendable_outputs( - output_descriptors, - Vec::new(), - change_destination_script, - feerate_sat_per_1000_weight, - &Secp256k1::new(), - ) { - Ok(tx) => tx, - Err(_) => { - error!("Error spending spendable outputs"); - return; - }, - }; - let claiming_txid = claiming_tx.txid(); - let tx_hex = serialize_hex(&claiming_tx); - if let Err(e) = tokio::task::block_in_place(move || self.platform.coin.send_raw_tx(&tx_hex).wait()) { - // TODO: broadcast transaction through p2p network in this case - error!( - "Broadcasting of the claiming transaction {} failed: {}", - claiming_txid, e - ); - return; - } - - let claiming_tx_inputs_value = outputs.iter().fold(0, |sum, output| match output { - SpendableOutputDescriptor::StaticOutput { output, .. } => sum + output.value, - SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => sum + descriptor.output.value, - SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => sum + descriptor.output.value, - }); - let claiming_tx_outputs_value = claiming_tx.output.iter().fold(0, |sum, txout| sum + txout.value); - if claiming_tx_inputs_value < claiming_tx_outputs_value { - error!( - "Claiming transaction input value {} can't be less than outputs value {}!", - claiming_tx_inputs_value, claiming_tx_outputs_value - ); - return; - } - let claiming_tx_fee = claiming_tx_inputs_value - claiming_tx_outputs_value; - let claiming_tx_fee_per_channel = (claiming_tx_fee as f64) / (outputs.len() as f64); + let platform = self.platform.clone(); + let db = self.db.clone(); + let keys_manager = self.keys_manager.clone(); - for output in outputs { - let (closing_txid, claimed_balance) = match output { - SpendableOutputDescriptor::StaticOutput { outpoint, output } => { - (outpoint.txid.to_string(), output.value) - }, - SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => { - (descriptor.outpoint.txid.to_string(), descriptor.output.value) - }, - SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => { - (descriptor.outpoint.txid.to_string(), descriptor.output.value) + let abort_handler = spawn_abortable(async move { + let change_destination_script = Builder::build_witness_script(&my_address.hash).to_bytes().take().into(); + let feerate_sat_per_1000_weight = platform.get_est_sat_per_1000_weight(ConfirmationTarget::Normal); + let output_descriptors = outputs.iter().collect::>(); + let claiming_tx = match keys_manager.spend_spendable_outputs( + &output_descriptors, + Vec::new(), + change_destination_script, + feerate_sat_per_1000_weight, + &Secp256k1::new(), + ) { + Ok(tx) => tx, + Err(_) => { + error!("Error spending spendable outputs"); + return; }, }; - let db = self.db.clone(); - - // This doesn't need to be respawned on restart unlike add_closing_tx_to_db since Event::SpendableOutputs will be re-fired on restart - // if the spending_tx is not broadcasted. - let abort_handler = spawn_abortable(add_claiming_tx_to_db_loop( - db, - closing_txid, - claiming_txid.to_string(), - (claimed_balance as f64) - claiming_tx_fee_per_channel, - )); - self.abort_handlers.lock().push(abort_handler); - } + + let claiming_txid = claiming_tx.txid(); + let tx_hex = serialize_hex(&claiming_tx); + + if let Err(e) = platform.coin.send_raw_tx(&tx_hex).compat().await { + // TODO: broadcast transaction through p2p network in this case, we have to check that the transactions is confirmed on-chain after this. + error!( + "Broadcasting of the claiming transaction {} failed: {}", + claiming_txid, e + ); + return; + } + + let claiming_tx_inputs_value = outputs.iter().fold(0, |sum, output| match output { + SpendableOutputDescriptor::StaticOutput { output, .. } => sum + output.value, + SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => sum + descriptor.output.value, + SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => sum + descriptor.output.value, + }); + let claiming_tx_outputs_value = claiming_tx.output.iter().fold(0, |sum, txout| sum + txout.value); + if claiming_tx_inputs_value < claiming_tx_outputs_value { + error!( + "Claiming transaction input value {} can't be less than outputs value {}!", + claiming_tx_inputs_value, claiming_tx_outputs_value + ); + return; + } + let claiming_tx_fee = claiming_tx_inputs_value - claiming_tx_outputs_value; + let claiming_tx_fee_per_channel = (claiming_tx_fee as f64) / (outputs.len() as f64); + + for output in outputs { + let (closing_txid, claimed_balance) = match output { + SpendableOutputDescriptor::StaticOutput { outpoint, output } => { + (outpoint.txid.to_string(), output.value) + }, + SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => { + (descriptor.outpoint.txid.to_string(), descriptor.output.value) + }, + SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => { + (descriptor.outpoint.txid.to_string(), descriptor.output.value) + }, + }; + + // This doesn't need to be respawned on restart unlike add_closing_tx_to_db since Event::SpendableOutputs will be re-fired on restart + // if the spending_tx is not broadcasted. + add_claiming_tx_to_db_loop( + db.clone(), + closing_txid, + claiming_txid.to_string(), + (claimed_balance as f64) - claiming_tx_fee_per_channel, + ) + .await; + } + }); + self.abort_handlers.lock().push(abort_handler); } fn handle_open_channel_request( @@ -524,14 +565,27 @@ impl LightningEventHandler { ); let db = self.db.clone(); + let trusted_nodes = self.trusted_nodes.clone(); let channel_manager = self.channel_manager.clone(); let platform = self.platform.clone(); spawn(async move { if let Ok(last_channel_rpc_id) = db.get_last_channel_rpc_id().await.error_log_passthrough() { let user_channel_id = last_channel_rpc_id as u64 + 1; - if channel_manager - .accept_inbound_channel(&temporary_channel_id, user_channel_id) - .is_ok() + + let trusted_nodes = trusted_nodes.lock().clone(); + let accepted_inbound_channel_with_0conf = trusted_nodes.contains(&counterparty_node_id) + && channel_manager + .accept_inbound_channel_from_trusted_peer_0conf( + &temporary_channel_id, + &counterparty_node_id, + user_channel_id, + ) + .is_ok(); + + if accepted_inbound_channel_with_0conf + || channel_manager + .accept_inbound_channel(&temporary_channel_id, &counterparty_node_id, user_channel_id) + .is_ok() { let is_public = match channel_manager .list_channels() diff --git a/mm2src/coins/lightning/ln_filesystem_persister.rs b/mm2src/coins/lightning/ln_filesystem_persister.rs index 7182f51eb3..3c42c55e8b 100644 --- a/mm2src/coins/lightning/ln_filesystem_persister.rs +++ b/mm2src/coins/lightning/ln_filesystem_persister.rs @@ -1,29 +1,24 @@ -use crate::lightning::ln_platform::Platform; -use crate::lightning::ln_storage::{LightningStorage, NodesAddressesMap, NodesAddressesMapShared, Scorer}; -use crate::lightning::ln_utils::{ChainMonitor, ChannelManager}; +use crate::lightning::ln_storage::{LightningStorage, NetworkGraph, NodesAddressesMap, NodesAddressesMapShared, Scorer, + TrustedNodesShared}; use async_trait::async_trait; use bitcoin::blockdata::constants::genesis_block; -use bitcoin::Network; -use bitcoin_hashes::hex::ToHex; +use bitcoin::{BlockHash, Network, Txid}; +use bitcoin_hashes::hex::FromHex; use common::async_blocking; use common::log::LogState; -use lightning::chain::channelmonitor::{ChannelMonitor, ChannelMonitorUpdate}; -use lightning::chain::keysinterface::{InMemorySigner, KeysManager, Sign}; -use lightning::chain::transaction::OutPoint; -use lightning::chain::{chainmonitor, ChannelMonitorUpdateErr}; -use lightning::routing::network_graph::NetworkGraph; -use lightning::routing::scoring::ProbabilisticScoringParameters; -use lightning::util::ser::{Readable, ReadableArgs, Writeable}; -use lightning_background_processor::Persister; -use lightning_persister::FilesystemPersister; -use mm2_io::fs::check_dir_operations; -use secp256k1::PublicKey; -use std::collections::HashMap; +use lightning::chain::channelmonitor::ChannelMonitor; +use lightning::chain::keysinterface::{KeysInterface, Sign}; +use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringParameters}; +use lightning::util::persist::KVStorePersister; +use lightning::util::ser::{ReadableArgs, Writeable}; +use mm2_io::fs::{check_dir_operations, invalid_data_err, read_json, write_json}; +use secp256k1v22::PublicKey; +use std::collections::{HashMap, HashSet}; use std::fs; -use std::io::{BufReader, BufWriter}; +use std::io::{BufReader, BufWriter, Cursor}; use std::net::SocketAddr; use std::ops::Deref; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::str::FromStr; use std::sync::{Arc, Mutex}; @@ -32,23 +27,18 @@ use std::sync::{Arc, Mutex}; #[cfg(target_family = "windows")] use {std::ffi::OsStr, std::os::windows::ffi::OsStrExt}; +const USE_TMP_FILE: bool = true; + pub struct LightningFilesystemPersister { main_path: PathBuf, backup_path: Option, - channels_persister: FilesystemPersister, } impl LightningFilesystemPersister { /// Initialize a new LightningPersister and set the path to the individual channels' /// files. #[inline] - pub fn new(main_path: PathBuf, backup_path: Option) -> Self { - Self { - main_path: main_path.clone(), - backup_path, - channels_persister: FilesystemPersister::new(main_path.display().to_string()), - } - } + pub fn new(main_path: PathBuf, backup_path: Option) -> Self { Self { main_path, backup_path } } /// Get the directory which was provided when this persister was initialized. #[inline] @@ -58,18 +48,6 @@ impl LightningFilesystemPersister { #[inline] pub fn backup_path(&self) -> Option { self.backup_path.clone() } - /// Get the channels_persister which was initialized when this persister was initialized. - #[inline] - pub fn channels_persister(&self) -> &FilesystemPersister { &self.channels_persister } - - pub fn monitor_backup_path(&self) -> Option { - if let Some(mut backup_path) = self.backup_path() { - backup_path.push("monitors"); - return Some(backup_path); - } - None - } - pub fn nodes_addresses_path(&self) -> PathBuf { let mut path = self.main_path(); path.push("channel_nodes_data"); @@ -77,11 +55,10 @@ impl LightningFilesystemPersister { } pub fn nodes_addresses_backup_path(&self) -> Option { - if let Some(mut backup_path) = self.backup_path() { + self.backup_path().map(|mut backup_path| { backup_path.push("channel_nodes_data"); - return Some(backup_path); - } - None + backup_path + }) } pub fn network_graph_path(&self) -> PathBuf { @@ -96,43 +73,103 @@ impl LightningFilesystemPersister { path } + pub fn trusted_nodes_path(&self) -> PathBuf { + let mut path = self.main_path(); + path.push("trusted_nodes"); + path + } + pub fn manager_path(&self) -> PathBuf { let mut path = self.main_path(); path.push("manager"); path } -} -#[derive(Clone)] -pub struct LightningPersisterShared(pub Arc); + pub fn monitors_path(&self) -> PathBuf { + let mut path = self.main_path(); + path.push("monitors"); + path + } -impl Deref for LightningPersisterShared { - type Target = LightningFilesystemPersister; - fn deref(&self) -> &LightningFilesystemPersister { self.0.deref() } -} + pub fn monitors_backup_path(&self) -> Option { + self.backup_path().map(|mut backup_path| { + backup_path.push("monitors"); + backup_path + }) + } -impl Persister, Arc, Arc, Arc, Arc> - for LightningPersisterShared -{ - fn persist_manager(&self, channel_manager: &ChannelManager) -> Result<(), std::io::Error> { - FilesystemPersister::persist_manager(self.0.main_path().display().to_string(), channel_manager)?; - if let Some(backup_path) = self.0.backup_path() { - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(backup_path)?; - channel_manager.write(&mut BufWriter::new(file))?; + /// Read `ChannelMonitor`s from disk. + pub fn read_channelmonitors( + &self, + keys_manager: K, + ) -> Result)>, std::io::Error> + where + K::Target: KeysInterface + Sized, + { + let path = self.monitors_path(); + if !path.exists() { + return Ok(Vec::new()); } - Ok(()) + let mut res = Vec::new(); + for file_option in fs::read_dir(path)? { + let file = file_option?; + let owned_file_name = file.file_name(); + let filename = owned_file_name.to_str().ok_or_else(|| { + invalid_data_err("Invalid ChannelMonitor file name", format!("{:?}", owned_file_name)) + })?; + if filename == "checkval" { + continue; + } + if !filename.is_ascii() || filename.len() < 65 { + return Err(invalid_data_err("Invalid ChannelMonitor file name", filename)); + } + if filename.ends_with(".tmp") { + // If we were in the middle of committing an new update and crashed, it should be + // safe to ignore the update - we should never have returned to the caller and + // irrevocably committed to the new state in any way. + continue; + } + + let txid = Txid::from_hex(filename.split_at(64).0) + .map_err(|e| invalid_data_err("Invalid tx ID in filename error", e))?; + + let index = filename + .split_at(65) + .1 + .parse::() + .map_err(|e| invalid_data_err("Invalid tx index in filename error", e))?; + + let contents = fs::read(&file.path())?; + let mut buffer = Cursor::new(&contents); + let (blockhash, channel_monitor) = <(BlockHash, ChannelMonitor)>::read(&mut buffer, &*keys_manager) + .map_err(|e| invalid_data_err("Failed to deserialize ChannelMonito", e))?; + + if channel_monitor.get_funding_txo().0.txid != txid || channel_monitor.get_funding_txo().0.index != index { + return Err(invalid_data_err( + "ChannelMonitor was stored in the wrong file", + filename, + )); + } + + res.push((blockhash, channel_monitor)); + } + Ok(res) } +} - fn persist_graph(&self, network_graph: &NetworkGraph) -> Result<(), std::io::Error> { - if FilesystemPersister::persist_network_graph(self.0.main_path().display().to_string(), network_graph).is_err() - { - // Persistence errors here are non-fatal as we can just fetch the routing graph - // again later, but they may indicate a disk error which could be fatal elsewhere. - eprintln!("Warning: Failed to persist network graph, check your disk and permissions"); +impl KVStorePersister for LightningFilesystemPersister { + fn persist(&self, key: &str, object: &W) -> std::io::Result<()> { + let mut dest_file = self.main_path(); + dest_file.push(key); + drop_mutability!(dest_file); + write_to_file(dest_file, object)?; + + if !matches!(key, "network_graph" | "scorer") { + if let Some(mut dest_file) = self.backup_path() { + dest_file.push(key); + drop_mutability!(dest_file); + write_to_file(dest_file, object)?; + } } Ok(()) @@ -155,49 +192,40 @@ fn path_to_windows_str>(path: T) -> Vec( - mut path: PathBuf, - filename: String, - monitor: &ChannelMonitor, -) -> std::io::Result<()> { +fn write_to_file(dest_file: PathBuf, data: &W) -> std::io::Result<()> { + let mut tmp_file = dest_file.clone(); + tmp_file.set_extension("tmp"); + drop_mutability!(tmp_file); + // Do a crazy dance with lots of fsync()s to be overly cautious here... // We never want to end up in a state where we've lost the old data, or end up using the // old data on power loss after we've returned. // The way to atomically write a file on Unix platforms is: // open(tmpname), write(tmpfile), fsync(tmpfile), close(tmpfile), rename(), fsync(dir) - path.push(filename); - let filename_with_path = path.display().to_string(); - let tmp_filename = format!("{}.tmp", filename_with_path); - { - let mut f = fs::File::create(&tmp_filename)?; - monitor.write(&mut f)?; - f.sync_all()?; + // Note that going by rust-lang/rust@d602a6b, on MacOS it is only safe to use + // rust stdlib 1.36 or higher. + let mut buf = BufWriter::new(fs::File::create(&tmp_file)?); + data.write(&mut buf)?; + buf.into_inner()?.sync_all()?; } // Fsync the parent directory on Unix. #[cfg(target_family = "unix")] { - fs::rename(&tmp_filename, &filename_with_path)?; - let path = Path::new(&filename_with_path).parent().ok_or_else(|| { - std::io::Error::new( - std::io::ErrorKind::NotFound, - format!("can't find parent dir for {}", filename_with_path), - ) - })?; - let dir_file = fs::OpenOptions::new().read(true).open(path)?; + let parent_directory = dest_file.parent().unwrap(); + fs::rename(&tmp_file, &dest_file)?; + let dir_file = fs::OpenOptions::new().read(true).open(parent_directory)?; unsafe { libc::fsync(dir_file.as_raw_fd()); } } #[cfg(target_family = "windows")] { - let src = PathBuf::from(tmp_filename); - let dst = PathBuf::from(filename_with_path.clone()); - if Path::new(&filename_with_path).exists() { + if dest_file.exists() { unsafe { winapi::um::winbase::ReplaceFileW( - path_to_windows_str(dst).as_ptr(), - path_to_windows_str(src).as_ptr(), + path_to_windows_str(dest_file).as_ptr(), + path_to_windows_str(tmp_file).as_ptr(), std::ptr::null(), winapi::um::winbase::REPLACEFILE_IGNORE_MERGE_ERRORS, std::ptr::null_mut() as *mut winapi::ctypes::c_void, @@ -207,8 +235,8 @@ fn write_monitor_to_file( } else { call!(unsafe { winapi::um::winbase::MoveFileExW( - path_to_windows_str(src).as_ptr(), - path_to_windows_str(dst).as_ptr(), + path_to_windows_str(tmp_file).as_ptr(), + path_to_windows_str(dest_file).as_ptr(), winapi::um::winbase::MOVEFILE_WRITE_THROUGH | winapi::um::winbase::MOVEFILE_REPLACE_EXISTING, ) }); @@ -217,62 +245,29 @@ fn write_monitor_to_file( Ok(()) } -impl chainmonitor::Persist for LightningFilesystemPersister { - fn persist_new_channel( - &self, - funding_txo: OutPoint, - monitor: &ChannelMonitor, - update_id: chainmonitor::MonitorUpdateId, - ) -> Result<(), ChannelMonitorUpdateErr> { - self.channels_persister - .persist_new_channel(funding_txo, monitor, update_id)?; - if let Some(backup_path) = self.monitor_backup_path() { - let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); - write_monitor_to_file(backup_path, filename, monitor) - .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; - } - Ok(()) - } - - fn update_persisted_channel( - &self, - funding_txo: OutPoint, - update: &Option, - monitor: &ChannelMonitor, - update_id: chainmonitor::MonitorUpdateId, - ) -> Result<(), ChannelMonitorUpdateErr> { - self.channels_persister - .update_persisted_channel(funding_txo, update, monitor, update_id)?; - if let Some(backup_path) = self.monitor_backup_path() { - let filename = format!("{}_{}", funding_txo.txid.to_hex(), funding_txo.index); - write_monitor_to_file(backup_path, filename, monitor) - .map_err(|_| ChannelMonitorUpdateErr::PermanentFailure)?; - } - Ok(()) - } -} - #[async_trait] impl LightningStorage for LightningFilesystemPersister { type Error = std::io::Error; async fn init_fs(&self) -> Result<(), Self::Error> { - let path = self.main_path(); - let backup_path = self.backup_path(); + let path = self.monitors_path(); + let backup_path = self.monitors_backup_path(); async_blocking(move || { fs::create_dir_all(path.clone())?; if let Some(path) = backup_path { fs::create_dir_all(path.clone())?; check_dir_operations(&path)?; + check_dir_operations(path.parent().unwrap())?; } - check_dir_operations(&path) + check_dir_operations(&path)?; + check_dir_operations(path.parent().unwrap()) }) .await } async fn is_fs_initialized(&self) -> Result { - let dir_path = self.main_path(); - let backup_dir_path = self.backup_path(); + let dir_path = self.monitors_path(); + let backup_dir_path = self.monitors_backup_path(); async_blocking(move || { if !dir_path.exists() || backup_dir_path.as_ref().map(|path| !path.exists()).unwrap_or(false) { Ok(false) @@ -303,97 +298,103 @@ impl LightningStorage for LightningFilesystemPersister { if !path.exists() { return Ok(HashMap::new()); } - async_blocking(move || { - let file = fs::File::open(path)?; - let reader = BufReader::new(file); - let nodes_addresses: HashMap = - serde_json::from_reader(reader).map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - nodes_addresses - .iter() - .map(|(pubkey_str, addr)| { - let pubkey = PublicKey::from_str(pubkey_str) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - Ok((pubkey, *addr)) - }) - .collect() - }) - .await + + let nodes_addresses: HashMap = read_json(&path) + .await + .map_err(|e| invalid_data_err("Error", e))? + .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))?; + + nodes_addresses + .iter() + .map(|(pubkey_str, addr)| { + let pubkey = PublicKey::from_str(pubkey_str).map_err(|e| invalid_data_err("Error", e))?; + Ok((pubkey, *addr)) + }) + .collect() } async fn save_nodes_addresses(&self, nodes_addresses: NodesAddressesMapShared) -> Result<(), Self::Error> { let path = self.nodes_addresses_path(); let backup_path = self.nodes_addresses_backup_path(); - async_blocking(move || { - let nodes_addresses: HashMap = nodes_addresses - .lock() - .iter() - .map(|(pubkey, addr)| (pubkey.to_string(), *addr)) - .collect(); - - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(path)?; - serde_json::to_writer(file, &nodes_addresses) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - if let Some(path) = backup_path { - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(path)?; - serde_json::to_writer(file, &nodes_addresses) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; - } + let nodes_addresses: HashMap = nodes_addresses + .lock() + .iter() + .map(|(pubkey, addr)| (pubkey.to_string(), *addr)) + .collect(); - Ok(()) - }) - .await + write_json(&nodes_addresses, &path, USE_TMP_FILE) + .await + .map_err(|e| invalid_data_err("Error", e))?; + + if let Some(path) = backup_path { + write_json(&nodes_addresses, &path, USE_TMP_FILE) + .await + .map_err(|e| invalid_data_err("Error", e))?; + } + + Ok(()) } - async fn get_network_graph(&self, network: Network) -> Result { + async fn get_network_graph(&self, network: Network, logger: Arc) -> Result { let path = self.network_graph_path(); if !path.exists() { - return Ok(NetworkGraph::new(genesis_block(network).header.block_hash())); + return Ok(NetworkGraph::new(genesis_block(network).header.block_hash(), logger)); } async_blocking(move || { let file = fs::File::open(path)?; common::log::info!("Reading the saved lightning network graph from file, this can take some time!"); - NetworkGraph::read(&mut BufReader::new(file)) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + NetworkGraph::read(&mut BufReader::new(file), logger).map_err(|e| invalid_data_err("Error", e)) }) .await } - async fn get_scorer(&self, network_graph: Arc) -> Result { + async fn get_scorer(&self, network_graph: Arc, logger: Arc) -> Result { let path = self.scorer_path(); if !path.exists() { - return Ok(Scorer::new(ProbabilisticScoringParameters::default(), network_graph)); + return Ok(Mutex::new(ProbabilisticScorer::new( + ProbabilisticScoringParameters::default(), + network_graph, + logger, + ))); } async_blocking(move || { let file = fs::File::open(path)?; - Scorer::read( + let scorer = ProbabilisticScorer::read( &mut BufReader::new(file), - (ProbabilisticScoringParameters::default(), network_graph), + (ProbabilisticScoringParameters::default(), network_graph, logger), ) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + .map_err(|e| invalid_data_err("Error", e))?; + Ok(Mutex::new(scorer)) }) .await } - async fn save_scorer(&self, scorer: Arc>) -> Result<(), Self::Error> { - let path = self.scorer_path(); - async_blocking(move || { - let scorer = scorer.lock().unwrap(); - let file = fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(path)?; - scorer.write(&mut BufWriter::new(file)) - }) - .await + async fn get_trusted_nodes(&self) -> Result, Self::Error> { + let path = self.trusted_nodes_path(); + if !path.exists() { + return Ok(HashSet::new()); + } + + let trusted_nodes: HashSet = read_json(&path) + .await + .map_err(|e| invalid_data_err("Error", e))? + .ok_or_else(|| std::io::Error::from(std::io::ErrorKind::NotFound))?; + + trusted_nodes + .iter() + .map(|pubkey_str| { + let pubkey = PublicKey::from_str(pubkey_str).map_err(|e| invalid_data_err("Error", e))?; + Ok(pubkey) + }) + .collect() + } + + async fn save_trusted_nodes(&self, trusted_nodes: TrustedNodesShared) -> Result<(), Self::Error> { + let path = self.trusted_nodes_path(); + let trusted_nodes: HashSet = trusted_nodes.lock().iter().map(|pubkey| pubkey.to_string()).collect(); + write_json(&trusted_nodes, &path, USE_TMP_FILE) + .await + .map_err(|e| invalid_data_err("Error", e)) } } diff --git a/mm2src/coins/lightning/ln_p2p.rs b/mm2src/coins/lightning/ln_p2p.rs index cb4ab8c14a..1acd762289 100644 --- a/mm2src/coins/lightning/ln_p2p.rs +++ b/mm2src/coins/lightning/ln_p2p.rs @@ -5,18 +5,18 @@ use derive_more::Display; use lightning::chain::Access; use lightning::ln::msgs::NetAddress; use lightning::ln::peer_handler::{IgnoringMessageHandler, MessageHandler, SimpleArcPeerManager}; -use lightning::routing::network_graph::{NetGraphMsgHandler, NetworkGraph}; +use lightning::routing::gossip; use lightning_net_tokio::SocketDescriptor; use mm2_net::ip_addr::fetch_external_ip; use rand::RngCore; -use secp256k1::SecretKey; +use secp256k1v22::{PublicKey, SecretKey}; use std::net::{IpAddr, Ipv4Addr}; use tokio::net::TcpListener; const TRY_RECONNECTING_TO_NODE_INTERVAL: f64 = 60.; const BROADCAST_NODE_ANNOUNCEMENT_INTERVAL: u64 = 600; -type NetworkGossip = NetGraphMsgHandler, Arc, Arc>; +pub type NetworkGossip = gossip::P2PGossipSync, Arc, Arc>; pub type PeerManager = SimpleArcPeerManager; @@ -34,7 +34,9 @@ pub async fn connect_to_node( node_addr: SocketAddr, peer_manager: Arc, ) -> ConnectToNodeResult { - if peer_manager.get_peer_node_ids().contains(&pubkey) { + let peer_manager_ref = peer_manager.clone(); + let peer_node_ids = async_blocking(move || peer_manager_ref.get_peer_node_ids()).await; + if peer_node_ids.contains(&pubkey) { return Ok(ConnectToNodeRes::AlreadyConnected { pubkey, node_addr }); } @@ -61,7 +63,9 @@ pub async fn connect_to_node( std::task::Poll::Pending => {}, } - if peer_manager.get_peer_node_ids().contains(&pubkey) { + let peer_manager = peer_manager.clone(); + let peer_node_ids = async_blocking(move || peer_manager.get_peer_node_ids()).await; + if peer_node_ids.contains(&pubkey) { break; } @@ -132,7 +136,8 @@ pub async fn ln_node_announcement_loop( continue; }, }; - channel_manager.broadcast_node_announcement(node_color, node_name, addresses); + let channel_manager = channel_manager.clone(); + async_blocking(move || channel_manager.broadcast_node_announcement(node_color, node_name, addresses)).await; Timer::sleep(BROADCAST_NODE_ANNOUNCEMENT_INTERVAL as f64).await; } @@ -163,7 +168,7 @@ pub async fn init_peer_manager( ctx: MmArc, listening_port: u16, channel_manager: Arc, - network_gossip: Arc, + gossip_sync: Arc, node_secret: SecretKey, logger: Arc, ) -> EnableLightningResult> { @@ -180,7 +185,7 @@ pub async fn init_peer_manager( rand::thread_rng().fill_bytes(&mut ephemeral_bytes); let lightning_msg_handler = MessageHandler { chan_handler: channel_manager, - route_handler: network_gossip, + route_handler: gossip_sync, }; // IgnoringMessageHandler is used as custom message types (experimental and application-specific messages) is not needed diff --git a/mm2src/coins/lightning/ln_platform.rs b/mm2src/coins/lightning/ln_platform.rs index 38535624a3..3a271bb65c 100644 --- a/mm2src/coins/lightning/ln_platform.rs +++ b/mm2src/coins/lightning/ln_platform.rs @@ -1,9 +1,11 @@ use super::*; use crate::lightning::ln_errors::{SaveChannelClosingError, SaveChannelClosingResult}; -use crate::utxo::rpc_clients::{BestBlock as RpcBestBlock, BlockHashOrHeight, ElectrumBlockHeader, ElectrumClient, - ElectrumNonce, EstimateFeeMethod, UtxoRpcClientEnum}; -use crate::utxo::spv::{ConfirmedTransactionInfo, SimplePaymentVerification}; +use crate::utxo::rpc_clients::{BestBlock as RpcBestBlock, BlockHashOrHeight, ConfirmedTransactionInfo, + ElectrumBlockHeader, ElectrumClient, ElectrumNonce, EstimateFeeMethod, + UtxoRpcClientEnum, UtxoRpcResult}; +use crate::utxo::spv::SimplePaymentVerification; use crate::utxo::utxo_standard::UtxoStandardCoin; +use crate::utxo::GetConfirmedTxError; use crate::{MarketCoinOps, MmCoin}; use bitcoin::blockdata::block::BlockHeader; use bitcoin::blockdata::script::Script; @@ -20,12 +22,10 @@ use lightning::chain::{chaininterface::{BroadcasterInterface, ConfirmationTarget Confirm, Filter, WatchedOutput}; use rpc::v1::types::{Bytes as BytesJson, H256 as H256Json}; use spv_validation::spv_proof::TRY_SPV_PROOF_INTERVAL; -use std::cmp; use std::convert::{TryFrom, TryInto}; -use std::sync::atomic::{AtomicU64, Ordering as AtomicOrdering}; +use std::sync::atomic::{AtomicU64, Ordering as AtomicOrdering, Ordering}; const CHECK_FOR_NEW_BEST_BLOCK_INTERVAL: f64 = 60.; -const MIN_ALLOWED_FEE_PER_1000_WEIGHT: u32 = 253; const TRY_LOOP_INTERVAL: f64 = 60.; #[inline] @@ -43,8 +43,8 @@ pub async fn get_best_header(best_header_listener: &ElectrumClient) -> EnableLig } pub async fn update_best_block( - chain_monitor: &ChainMonitor, - channel_manager: &ChannelManager, + chain_monitor: Arc, + channel_manager: Arc, best_header: ElectrumBlockHeader, ) { { @@ -81,8 +81,8 @@ pub async fn update_best_block( (block_header, h.height as u32) }, }; - channel_manager.best_block_updated(&new_best_header, new_best_height); - chain_monitor.best_block_updated(&new_best_header, new_best_height); + async_blocking(move || channel_manager.best_block_updated(&new_best_header, new_best_height)).await; + async_blocking(move || chain_monitor.best_block_updated(&new_best_header, new_best_height)).await; } } @@ -100,16 +100,21 @@ pub async fn ln_best_block_update_loop( // in case a transaction confirmation fails due to electrums being down. This way there will be no need to wait for a new // block to confirm such transaction and causing delays. platform - .process_txs_confirmations(&best_header_listener, &db, &chain_monitor, &channel_manager) + .process_txs_confirmations( + &best_header_listener, + &db, + Arc::clone(&chain_monitor), + Arc::clone(&channel_manager), + ) .await; let best_header = ok_or_continue_after_sleep!(get_best_header(&best_header_listener).await, TRY_LOOP_INTERVAL); if current_best_block != best_header.clone().into() { platform.update_best_block_height(best_header.block_height()); platform - .process_txs_unconfirmations(&chain_monitor, &channel_manager) + .process_txs_unconfirmations(Arc::clone(&chain_monitor), Arc::clone(&channel_manager)) .await; current_best_block = best_header.clone().into(); - update_best_block(&chain_monitor, &channel_manager, best_header).await; + update_best_block(Arc::clone(&chain_monitor), Arc::clone(&channel_manager), best_header).await; } Timer::sleep(CHECK_FOR_NEW_BEST_BLOCK_INTERVAL).await; } @@ -128,15 +133,33 @@ async fn get_funding_tx_bytes_loop(rpc_client: &UtxoRpcClientEnum, tx_hash: H256 } } +pub struct LatestFees { + background: AtomicU64, + normal: AtomicU64, + high_priority: AtomicU64, +} + +impl LatestFees { + #[inline] + fn set_background_fees(&self, fee: u64) { self.background.store(fee, Ordering::Release); } + + #[inline] + fn set_normal_fees(&self, fee: u64) { self.normal.store(fee, Ordering::Release); } + + #[inline] + fn set_high_priority_fees(&self, fee: u64) { self.high_priority.store(fee, Ordering::Release); } +} + pub struct Platform { pub coin: UtxoStandardCoin, /// Main/testnet/signet/regtest Needed for lightning node to know which network to connect to pub network: BlockchainNetwork, /// The best block height. pub best_block_height: AtomicU64, - /// Default fees to and confirmation targets to be used for FeeEstimator. Default fees are used when the call for - /// estimate_fee_sat fails. - pub default_fees_and_confirmations: PlatformCoinConfirmations, + /// Number of blocks for every Confirmation target. This is used in the FeeEstimator. + pub confirmations_targets: PlatformCoinConfirmationTargets, + /// Latest fees are used when the call for estimate_fee_sat fails. + pub latest_fees: LatestFees, /// This cache stores the transactions that the LN node has interest in. pub registered_txs: PaMutex>, /// This cache stores the outputs that the LN node has interest in. @@ -150,13 +173,18 @@ impl Platform { pub fn new( coin: UtxoStandardCoin, network: BlockchainNetwork, - default_fees_and_confirmations: PlatformCoinConfirmations, + confirmations_targets: PlatformCoinConfirmationTargets, ) -> Self { Platform { coin, network, best_block_height: AtomicU64::new(0), - default_fees_and_confirmations, + confirmations_targets, + latest_fees: LatestFees { + background: AtomicU64::new(0), + normal: AtomicU64::new(0), + high_priority: AtomicU64::new(0), + }, registered_txs: PaMutex::new(HashSet::new()), registered_outputs: PaMutex::new(Vec::new()), unsigned_funding_txs: PaMutex::new(HashMap::new()), @@ -166,13 +194,59 @@ impl Platform { #[inline] fn rpc_client(&self) -> &UtxoRpcClientEnum { &self.coin.as_ref().rpc_client } + pub async fn set_latest_fees(&self) -> UtxoRpcResult<()> { + let platform_coin = &self.coin; + let conf = &platform_coin.as_ref().conf; + + let latest_background_fees = self + .rpc_client() + .estimate_fee_sat( + platform_coin.decimals(), + // Todo: when implementing Native client detect_fee_method should be used for Native and EstimateFeeMethod::Standard for Electrum + &EstimateFeeMethod::Standard, + &conf.estimate_fee_mode, + self.confirmations_targets.background, + ) + .compat() + .await?; + self.latest_fees.set_background_fees(latest_background_fees); + + let latest_normal_fees = self + .rpc_client() + .estimate_fee_sat( + platform_coin.decimals(), + // Todo: when implementing Native client detect_fee_method should be used for Native and EstimateFeeMethod::Standard for Electrum + &EstimateFeeMethod::Standard, + &conf.estimate_fee_mode, + self.confirmations_targets.normal, + ) + .compat() + .await?; + self.latest_fees.set_normal_fees(latest_normal_fees); + + let latest_high_priority_fees = self + .rpc_client() + .estimate_fee_sat( + platform_coin.decimals(), + // Todo: when implementing Native client detect_fee_method should be used for Native and EstimateFeeMethod::Standard for Electrum + &EstimateFeeMethod::Standard, + &conf.estimate_fee_mode, + self.confirmations_targets.high_priority, + ) + .compat() + .await?; + self.latest_fees.set_high_priority_fees(latest_high_priority_fees); + + Ok(()) + } + #[inline] pub fn update_best_block_height(&self, new_height: u64) { - self.best_block_height.store(new_height, AtomicOrdering::Relaxed); + self.best_block_height.store(new_height, AtomicOrdering::Release); } #[inline] - pub fn best_block_height(&self) -> u64 { self.best_block_height.load(AtomicOrdering::Relaxed) } + pub fn best_block_height(&self) -> u64 { self.best_block_height.load(AtomicOrdering::Acquire) } pub fn add_tx(&self, txid: Txid) { let mut registered_txs = self.registered_txs.lock(); @@ -184,9 +258,9 @@ impl Platform { registered_outputs.push(output); } - async fn process_tx_for_unconfirmation(&self, txid: Txid, monitor: &T) + async fn process_tx_for_unconfirmation(&self, txid: Txid, monitor: Arc) where - T: Confirm, + T: Confirm + Send + Sync + 'static, { let rpc_txid = h256_json_from_txid(txid); match self.rpc_client().get_tx_if_onchain(&rpc_txid).await { @@ -196,7 +270,8 @@ impl Platform { "Transaction {} is not found on chain. The transaction will be re-broadcasted.", txid, ); - monitor.transaction_unconfirmed(&txid); + let monitor = monitor.clone(); + async_blocking(move || monitor.transaction_unconfirmed(&txid)).await; // If a transaction is unconfirmed due to a block reorganization; LDK will rebroadcast it. // In this case, this transaction needs to be added again to the registered transactions // to start watching for it on the chain again. @@ -209,17 +284,23 @@ impl Platform { } } - pub async fn process_txs_unconfirmations(&self, chain_monitor: &ChainMonitor, channel_manager: &ChannelManager) { + pub async fn process_txs_unconfirmations( + &self, + chain_monitor: Arc, + channel_manager: Arc, + ) { // Retrieve channel manager transaction IDs to check the chain for un-confirmations let channel_manager_relevant_txids = channel_manager.get_relevant_txids(); for txid in channel_manager_relevant_txids { - self.process_tx_for_unconfirmation(txid, channel_manager).await; + self.process_tx_for_unconfirmation(txid, Arc::clone(&channel_manager)) + .await; } // Retrieve chain monitor transaction IDs to check the chain for un-confirmations let chain_monitor_relevant_txids = chain_monitor.get_relevant_txids(); for txid in chain_monitor_relevant_txids { - self.process_tx_for_unconfirmation(txid, chain_monitor).await; + self.process_tx_for_unconfirmation(txid, Arc::clone(&chain_monitor)) + .await; } } @@ -247,11 +328,17 @@ impl Platform { }, }); + let is_spv_enabled = self.coin.as_ref().conf.enable_spv_proof; let confirmed_transactions_futs = on_chain_txs .map(|transaction| async move { - client - .validate_spv_proof(&transaction, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) - .await + if is_spv_enabled { + client + .validate_spv_proof(&transaction, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) + .await + .map_err(GetConfirmedTxError::SPVError) + } else { + client.get_confirmed_tx_info_from_rpc(&transaction).await + } }) .collect::>(); join_all(confirmed_transactions_futs) @@ -309,12 +396,18 @@ impl Platform { .any(|info| info.tx.hash() == output.spending_tx.hash()) }); + let is_spv_enabled = self.coin.as_ref().conf.enable_spv_proof; let confirmed_transactions_futs = spent_outputs_info .into_iter() .map(|output| async move { - client - .validate_spv_proof(&output.spending_tx, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) - .await + if is_spv_enabled { + client + .validate_spv_proof(&output.spending_tx, (now_ms() / 1000) + TRY_SPV_PROOF_INTERVAL) + .await + .map_err(GetConfirmedTxError::SPVError) + } else { + client.get_confirmed_tx_info_from_rpc(&output.spending_tx).await + } }) .collect::>(); let mut confirmed_transaction_info = join_all(confirmed_transactions_futs) @@ -346,8 +439,8 @@ impl Platform { &self, client: &ElectrumClient, db: &SqliteLightningDB, - chain_monitor: &ChainMonitor, - channel_manager: &ChannelManager, + chain_monitor: Arc, + channel_manager: Arc, ) { let mut transactions_to_confirm = self.get_confirmed_registered_txs(client).await; self.append_spent_registered_output_txs(&mut transactions_to_confirm, client) @@ -366,22 +459,31 @@ impl Platform { { error!("Unable to update the funding tx block height in DB: {}", e); } - channel_manager.transactions_confirmed( - &confirmed_transaction_info.header.clone().into(), - &[( - confirmed_transaction_info.index as usize, - &confirmed_transaction_info.tx.clone().into(), - )], - confirmed_transaction_info.height as u32, - ); - chain_monitor.transactions_confirmed( - &confirmed_transaction_info.header.into(), - &[( - confirmed_transaction_info.index as usize, - &confirmed_transaction_info.tx.into(), - )], - confirmed_transaction_info.height as u32, - ); + let channel_manager = channel_manager.clone(); + let confirmed_transaction_info_cloned = confirmed_transaction_info.clone(); + async_blocking(move || { + channel_manager.transactions_confirmed( + &confirmed_transaction_info_cloned.header.clone().into(), + &[( + confirmed_transaction_info_cloned.index as usize, + &confirmed_transaction_info_cloned.tx.clone().into(), + )], + confirmed_transaction_info_cloned.height as u32, + ) + }) + .await; + let chain_monitor = chain_monitor.clone(); + async_blocking(move || { + chain_monitor.transactions_confirmed( + &confirmed_transaction_info.header.into(), + &[( + confirmed_transaction_info.index as usize, + &confirmed_transaction_info.tx.into(), + )], + confirmed_transaction_info.height as u32, + ) + }) + .await; } } @@ -422,17 +524,17 @@ impl FeeEstimator for Platform { fn get_est_sat_per_1000_weight(&self, confirmation_target: ConfirmationTarget) -> u32 { let platform_coin = &self.coin; - let default_fee = match confirmation_target { - ConfirmationTarget::Background => self.default_fees_and_confirmations.background.default_fee_per_kb, - ConfirmationTarget::Normal => self.default_fees_and_confirmations.normal.default_fee_per_kb, - ConfirmationTarget::HighPriority => self.default_fees_and_confirmations.high_priority.default_fee_per_kb, + let latest_fees = match confirmation_target { + ConfirmationTarget::Background => self.latest_fees.background.load(Ordering::Acquire), + ConfirmationTarget::Normal => self.latest_fees.normal.load(Ordering::Acquire), + ConfirmationTarget::HighPriority => self.latest_fees.high_priority.load(Ordering::Acquire), }; let conf = &platform_coin.as_ref().conf; let n_blocks = match confirmation_target { - ConfirmationTarget::Background => self.default_fees_and_confirmations.background.n_blocks, - ConfirmationTarget::Normal => self.default_fees_and_confirmations.normal.n_blocks, - ConfirmationTarget::HighPriority => self.default_fees_and_confirmations.high_priority.n_blocks, + ConfirmationTarget::Background => self.confirmations_targets.background, + ConfirmationTarget::Normal => self.confirmations_targets.normal, + ConfirmationTarget::HighPriority => self.confirmations_targets.high_priority, }; let fee_per_kb = tokio::task::block_in_place(move || { self.rpc_client() @@ -445,11 +547,21 @@ impl FeeEstimator for Platform { n_blocks, ) .wait() - .unwrap_or(default_fee) + .unwrap_or(latest_fees) }); + + // Set default fee to last known fee for the corresponding confirmation target + match confirmation_target { + ConfirmationTarget::Background => self.latest_fees.set_background_fees(fee_per_kb), + ConfirmationTarget::Normal => self.latest_fees.set_normal_fees(fee_per_kb), + ConfirmationTarget::HighPriority => self.latest_fees.set_high_priority_fees(fee_per_kb), + }; + // Must be no smaller than 253 (ie 1 satoshi-per-byte rounded up to ensure later round-downs don’t put us below 1 satoshi-per-byte). // https://docs.rs/lightning/0.0.101/lightning/chain/chaininterface/trait.FeeEstimator.html#tymethod.get_est_sat_per_1000_weight - cmp::max((fee_per_kb as f64 / 4.0).ceil() as u32, MIN_ALLOWED_FEE_PER_1000_WEIGHT) + // This has changed in rust-lightning v0.0.110 as LDK currently wraps get_est_sat_per_1000_weight to ensure that the value returned is + // no smaller than 253. https://github.com/lightningdevkit/rust-lightning/pull/1552 + (fee_per_kb as f64 / 4.0).ceil() as u32 } } diff --git a/mm2src/coins/lightning/ln_serialization.rs b/mm2src/coins/lightning/ln_serialization.rs index 82f0a700c7..01b99d0bd0 100644 --- a/mm2src/coins/lightning/ln_serialization.rs +++ b/mm2src/coins/lightning/ln_serialization.rs @@ -1,51 +1,9 @@ -use lightning_invoice::Invoice; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use serde::{de, Serialize, Serializer}; use std::fmt; use std::net::{SocketAddr, ToSocketAddrs}; use std::str::FromStr; -#[derive(Clone, Debug, PartialEq)] -pub struct InvoiceForRPC(Invoice); - -impl From for InvoiceForRPC { - fn from(i: Invoice) -> Self { InvoiceForRPC(i) } -} - -impl From for Invoice { - fn from(i: InvoiceForRPC) -> Self { i.0 } -} - -impl Serialize for InvoiceForRPC { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(&self.0.to_string()) - } -} - -impl<'de> de::Deserialize<'de> for InvoiceForRPC { - fn deserialize>(deserializer: D) -> Result { - struct InvoiceForRPCVisitor; - - impl<'de> de::Visitor<'de> for InvoiceForRPCVisitor { - type Value = InvoiceForRPC; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!(formatter, "a lightning invoice") - } - - fn visit_str(self, v: &str) -> Result { - let invoice = Invoice::from_str(v).map_err(|e| { - let err = format!("Could not parse lightning invoice from str {}, err {}", v, e); - de::Error::custom(err) - })?; - Ok(InvoiceForRPC(invoice)) - } - } - - deserializer.deserialize_str(InvoiceForRPCVisitor) - } -} - // TODO: support connection to onion addresses #[derive(Debug, PartialEq)] pub struct NodeAddress { @@ -141,22 +99,6 @@ mod tests { use super::*; use serde_json as json; - #[test] - fn test_invoice_for_rpc_serialize() { - let invoice_for_rpc = InvoiceForRPC(str::parse::("lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09").unwrap()); - let expected = r#""lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09""#; - let actual = json::to_string(&invoice_for_rpc).unwrap(); - assert_eq!(expected, actual); - } - - #[test] - fn test_invoice_for_rpc_deserialize() { - let invoice_for_rpc = r#""lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09""#; - let expected = InvoiceForRPC(str::parse::("lntb20u1p3zqmvrpp52hej7trefx6y633aujj6nltjs8cf7lzyp78tfn5y5wpa3udk5tvqdp8xys9xcmpd3sjqsmgd9czq3njv9c8qatrvd5kumcxqrrsscqp79qy9qsqsp5ccy2qgmptg8dthxsjvw2c43uyvqkg6cqey3jpks4xf0tv7xfrqrq3xfnuffau2h2k8defphv2xsktzn2qj5n2l8d9l9zx64fg6jcmdg9kmpevneyyhfnzrpspqdrky8u7l4c6qdnquh8lnevswwrtcd9ypcq89ga09").unwrap()); - let actual = json::from_str(invoice_for_rpc).unwrap(); - assert_eq!(expected, actual); - } - #[test] fn test_node_address_serialize() { let node_address = NodeAddress { diff --git a/mm2src/coins/lightning/ln_sql.rs b/mm2src/coins/lightning/ln_sql.rs index 2cbeb98116..ac21b6351e 100644 --- a/mm2src/coins/lightning/ln_sql.rs +++ b/mm2src/coins/lightning/ln_sql.rs @@ -9,7 +9,7 @@ use db_common::sqlite::{h256_option_slice_from_row, h256_slice_from_row, offset_ sql_text_conversion_err, string_from_row, validate_table_name, SqlNamedParams, SqliteConnShared, CHECK_TABLE_EXISTS_SQL}; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use secp256k1::PublicKey; +use secp256k1v22::PublicKey; use std::convert::TryInto; use std::str::FromStr; @@ -890,7 +890,7 @@ mod tests { use db_common::sqlite::rusqlite::Connection; use rand::distributions::Alphanumeric; use rand::{Rng, RngCore}; - use secp256k1::{Secp256k1, SecretKey}; + use secp256k1v22::{Secp256k1, SecretKey}; use std::num::NonZeroUsize; use std::sync::{Arc, Mutex}; diff --git a/mm2src/coins/lightning/ln_storage.rs b/mm2src/coins/lightning/ln_storage.rs index bd44fdc0e1..610720bc3f 100644 --- a/mm2src/coins/lightning/ln_storage.rs +++ b/mm2src/coins/lightning/ln_storage.rs @@ -1,16 +1,20 @@ use async_trait::async_trait; use bitcoin::Network; -use lightning::routing::network_graph::NetworkGraph; +use common::log::LogState; +use lightning::routing::gossip; use lightning::routing::scoring::ProbabilisticScorer; use parking_lot::Mutex as PaMutex; -use secp256k1::PublicKey; -use std::collections::HashMap; +use secp256k1v22::PublicKey; +use std::collections::{HashMap, HashSet}; use std::net::SocketAddr; use std::sync::{Arc, Mutex}; pub type NodesAddressesMap = HashMap; pub type NodesAddressesMapShared = Arc>; -pub type Scorer = ProbabilisticScorer>; +pub type TrustedNodesShared = Arc>>; + +pub type NetworkGraph = gossip::NetworkGraph>; +pub type Scorer = Mutex, Arc>>; #[async_trait] pub trait LightningStorage { @@ -21,13 +25,15 @@ pub trait LightningStorage { async fn is_fs_initialized(&self) -> Result; - async fn get_nodes_addresses(&self) -> Result, Self::Error>; + async fn get_nodes_addresses(&self) -> Result; async fn save_nodes_addresses(&self, nodes_addresses: NodesAddressesMapShared) -> Result<(), Self::Error>; - async fn get_network_graph(&self, network: Network) -> Result; + async fn get_network_graph(&self, network: Network, logger: Arc) -> Result; + + async fn get_scorer(&self, network_graph: Arc, logger: Arc) -> Result; - async fn get_scorer(&self, network_graph: Arc) -> Result; + async fn get_trusted_nodes(&self) -> Result, Self::Error>; - async fn save_scorer(&self, scorer: Arc>) -> Result<(), Self::Error>; + async fn save_trusted_nodes(&self, trusted_nodes: TrustedNodesShared) -> Result<(), Self::Error>; } diff --git a/mm2src/coins/lightning/ln_utils.rs b/mm2src/coins/lightning/ln_utils.rs index 30b3e3c2de..c08c59035c 100644 --- a/mm2src/coins/lightning/ln_utils.rs +++ b/mm2src/coins/lightning/ln_utils.rs @@ -1,36 +1,31 @@ use super::*; use crate::lightning::ln_db::LightningDB; -use crate::lightning::ln_filesystem_persister::LightningPersisterShared; use crate::lightning::ln_platform::{get_best_header, ln_best_block_update_loop, update_best_block}; use crate::lightning::ln_sql::SqliteLightningDB; -use crate::lightning::ln_storage::{LightningStorage, NodesAddressesMap, Scorer}; +use crate::lightning::ln_storage::{LightningStorage, NodesAddressesMap}; use crate::utxo::rpc_clients::BestBlock as RpcBestBlock; use bitcoin::hash_types::BlockHash; use bitcoin_hashes::{sha256d, Hash}; -use common::executor::{spawn, Timer}; -use common::log; +use common::executor::spawn; use common::log::LogState; use lightning::chain::keysinterface::{InMemorySigner, KeysManager}; use lightning::chain::{chainmonitor, BestBlock, Watch}; -use lightning::ln::channelmanager; use lightning::ln::channelmanager::{ChainParameters, ChannelManagerReadArgs, SimpleArcChannelManager}; use lightning::util::config::UserConfig; use lightning::util::ser::ReadableArgs; use mm2_core::mm_ctx::MmArc; use std::fs::File; use std::path::PathBuf; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; use std::time::SystemTime; -const SCORER_PERSIST_INTERVAL: u64 = 600; - pub type ChainMonitor = chainmonitor::ChainMonitor< InMemorySigner, Arc, Arc, Arc, Arc, - LightningPersisterShared, + Arc, >; pub type ChannelManager = SimpleArcChannelManager; @@ -52,13 +47,10 @@ pub async fn init_persister( ctx: &MmArc, ticker: String, backup_path: Option, -) -> EnableLightningResult { +) -> EnableLightningResult> { let ln_data_dir = ln_data_dir(ctx, &ticker); let ln_data_backup_dir = ln_data_backup_dir(ctx, backup_path, &ticker); - let persister = LightningPersisterShared(Arc::new(LightningFilesystemPersister::new( - ln_data_dir, - ln_data_backup_dir, - ))); + let persister = Arc::new(LightningFilesystemPersister::new(ln_data_dir, ln_data_backup_dir)); let is_initialized = persister.is_fs_initialized().await?; if !is_initialized { @@ -98,7 +90,7 @@ pub fn init_keys_manager(ctx: &MmArc) -> EnableLightningResult> pub async fn init_channel_manager( platform: Arc, logger: Arc, - persister: LightningPersisterShared, + persister: Arc, db: SqliteLightningDB, keys_manager: Arc, user_config: UserConfig, @@ -120,13 +112,19 @@ pub async fn init_channel_manager( )); // Read ChannelMonitor state from disk, important for lightning node is restarting and has at least 1 channel - let mut channelmonitors = persister - .channels_persister() - .read_channelmonitors(keys_manager.clone()) - .map_to_mm(|e| EnableLightningError::IOError(e.to_string()))?; + let channels_persister = persister.clone(); + let channels_keys_manager = keys_manager.clone(); + let mut channelmonitors = async_blocking(move || { + channels_persister + .read_channelmonitors(channels_keys_manager) + .map_to_mm(|e| EnableLightningError::IOError(e.to_string())) + }) + .await?; // This is used for Electrum only to prepare for chain synchronization for (_, chan_mon) in channelmonitors.iter() { + // Although there is a mutex lock inside the load_outputs_to_watch fn + // it shouldn't be held by anything yet, so async_blocking is not needed. chan_mon.load_outputs_to_watch(&platform); } @@ -143,63 +141,78 @@ pub async fn init_channel_manager( platform.update_best_block_height(best_header.block_height()); let best_block = RpcBestBlock::from(best_header.clone()); let best_block_hash = BlockHash::from_hash(sha256d::Hash::from_inner(best_block.hash.0)); - let (channel_manager_blockhash, channel_manager) = { - if let Ok(mut f) = File::open(persister.manager_path()) { + + let channel_manager = if persister.manager_path().exists() { + let chain_monitor_for_args = chain_monitor.clone(); + + let (channel_manager_blockhash, channel_manager, channelmonitors) = async_blocking(move || { + let mut manager_file = File::open(persister.manager_path())?; + let mut channel_monitor_mut_references = Vec::new(); for (_, channel_monitor) in channelmonitors.iter_mut() { channel_monitor_mut_references.push(channel_monitor); } + // Read ChannelManager data from the file let read_args = ChannelManagerReadArgs::new( keys_manager.clone(), fee_estimator.clone(), - chain_monitor.clone(), + chain_monitor_for_args, broadcaster.clone(), logger.clone(), user_config, channel_monitor_mut_references, ); - <(BlockHash, ChannelManager)>::read(&mut f, read_args) - .map_to_mm(|e| EnableLightningError::IOError(e.to_string()))? - } else { - // Initialize the ChannelManager to starting a new node without history - let chain_params = ChainParameters { - network: platform.network.clone().into(), - best_block: BestBlock::new(best_block_hash, best_block.height as u32), - }; - let new_channel_manager = channelmanager::ChannelManager::new( - fee_estimator.clone(), - chain_monitor.clone(), - broadcaster.clone(), - logger.clone(), - keys_manager.clone(), - user_config, - chain_params, - ); - (best_block_hash, new_channel_manager) - } - }; - - let channel_manager: Arc = Arc::new(channel_manager); + <(BlockHash, Arc)>::read(&mut manager_file, read_args) + .map(|(h, c)| (h, c, channelmonitors)) + .map_to_mm(|e| EnableLightningError::IOError(e.to_string())) + }) + .await?; - // Sync ChannelMonitors and ChannelManager to chain tip if the node is restarting and has open channels - platform - .process_txs_confirmations(&rpc_client, &db, &chain_monitor, &channel_manager) - .await; - if channel_manager_blockhash != best_block_hash { + // Sync ChannelMonitors and ChannelManager to chain tip if the node is restarting and has open channels platform - .process_txs_unconfirmations(&chain_monitor, &channel_manager) + .process_txs_confirmations( + &rpc_client, + &db, + Arc::clone(&chain_monitor), + Arc::clone(&channel_manager), + ) .await; - update_best_block(&chain_monitor, &channel_manager, best_header).await; - } + if channel_manager_blockhash != best_block_hash { + platform + .process_txs_unconfirmations(Arc::clone(&chain_monitor), Arc::clone(&channel_manager)) + .await; + update_best_block(Arc::clone(&chain_monitor), Arc::clone(&channel_manager), best_header).await; + } - // Give ChannelMonitors to ChainMonitor - for (_, channel_monitor) in channelmonitors.drain(..) { - let funding_outpoint = channel_monitor.get_funding_txo().0; - chain_monitor - .watch_channel(funding_outpoint, channel_monitor) - .map_to_mm(|e| EnableLightningError::IOError(format!("{:?}", e)))?; - } + // Give ChannelMonitors to ChainMonitor + for (_, channel_monitor) in channelmonitors.into_iter() { + let funding_outpoint = channel_monitor.get_funding_txo().0; + let chain_monitor = chain_monitor.clone(); + async_blocking(move || { + chain_monitor + .watch_channel(funding_outpoint, channel_monitor) + .map_to_mm(|e| EnableLightningError::IOError(format!("{:?}", e))) + }) + .await?; + } + channel_manager + } else { + // Initialize the ChannelManager to starting a new node without history + let chain_params = ChainParameters { + network: platform.network.clone().into(), + best_block: BestBlock::new(best_block_hash, best_block.height as u32), + }; + Arc::new(ChannelManager::new( + fee_estimator.clone(), + chain_monitor.clone(), + broadcaster.clone(), + logger.clone(), + keys_manager.clone(), + user_config, + chain_params, + )) + }; // Update best block whenever there's a new chain tip or a block has been newly disconnected spawn(ln_best_block_update_loop( @@ -214,23 +227,11 @@ pub async fn init_channel_manager( Ok((chain_monitor, channel_manager)) } -pub async fn persist_scorer_loop(persister: LightningPersisterShared, scorer: Arc>) { - loop { - if let Err(e) = persister.save_scorer(scorer.clone()).await { - log::warn!( - "Failed to persist scorer error: {}, please check disk space and permissions", - e - ); - } - Timer::sleep(SCORER_PERSIST_INTERVAL as f64).await; - } -} - pub async fn get_open_channels_nodes_addresses( - persister: LightningPersisterShared, + persister: Arc, channel_manager: Arc, ) -> EnableLightningResult { - let channels = channel_manager.list_channels(); + let channels = async_blocking(move || channel_manager.list_channels()).await; let mut nodes_addresses = persister.get_nodes_addresses().await?; nodes_addresses.retain(|pubkey, _node_addr| { channels diff --git a/mm2src/coins/lp_coins.rs b/mm2src/coins/lp_coins.rs index 3df20da85d..1f5fe774cb 100644 --- a/mm2src/coins/lp_coins.rs +++ b/mm2src/coins/lp_coins.rs @@ -65,7 +65,7 @@ use utxo_signer::with_key_pair::UtxoSignWithKeyPairError; cfg_native! { use crate::lightning::LightningCoin; - use crate::lightning::ln_conf::PlatformCoinConfirmations; + use crate::lightning::ln_conf::PlatformCoinConfirmationTargets; use async_std::fs; use futures::AsyncWriteExt; use std::io; @@ -2095,7 +2095,7 @@ pub enum CoinProtocol { LIGHTNING { platform: String, network: BlockchainNetwork, - confirmations: PlatformCoinConfirmations, + confirmation_targets: PlatformCoinConfirmationTargets, }, #[cfg(not(target_arch = "wasm32"))] SOLANA, diff --git a/mm2src/coins/my_tx_history_v2.rs b/mm2src/coins/my_tx_history_v2.rs index c297264ab2..df49663e29 100644 --- a/mm2src/coins/my_tx_history_v2.rs +++ b/mm2src/coins/my_tx_history_v2.rs @@ -271,7 +271,7 @@ pub enum MyTxHistoryErrorV2 { impl HttpStatusCode for MyTxHistoryErrorV2 { fn status_code(&self) -> StatusCode { match self { - MyTxHistoryErrorV2::CoinIsNotActive(_) => StatusCode::PRECONDITION_REQUIRED, + MyTxHistoryErrorV2::CoinIsNotActive(_) => StatusCode::NOT_FOUND, MyTxHistoryErrorV2::StorageIsNotInitialized(_) | MyTxHistoryErrorV2::StorageError(_) | MyTxHistoryErrorV2::RpcError(_) diff --git a/mm2src/coins/rpc_command/get_current_mtp.rs b/mm2src/coins/rpc_command/get_current_mtp.rs index 46fa0f3034..24b4f563b9 100644 --- a/mm2src/coins/rpc_command/get_current_mtp.rs +++ b/mm2src/coins/rpc_command/get_current_mtp.rs @@ -31,7 +31,7 @@ pub enum GetCurrentMtpError { impl HttpStatusCode for GetCurrentMtpError { fn status_code(&self) -> StatusCode { match self { - GetCurrentMtpError::NoSuchCoin(_) => StatusCode::PRECONDITION_REQUIRED, + GetCurrentMtpError::NoSuchCoin(_) => StatusCode::NOT_FOUND, GetCurrentMtpError::NotSupportedCoin(_) => StatusCode::BAD_REQUEST, GetCurrentMtpError::RpcError(_) => StatusCode::INTERNAL_SERVER_ERROR, } diff --git a/mm2src/coins/utxo.rs b/mm2src/coins/utxo.rs index 4f0886fc2b..ab33cf0940 100644 --- a/mm2src/coins/utxo.rs +++ b/mm2src/coins/utxo.rs @@ -39,6 +39,7 @@ pub mod utxo_standard; pub mod utxo_withdraw; use async_trait::async_trait; +#[cfg(not(target_arch = "wasm32"))] use bitcoin::network::constants::Network as BitcoinNetwork; pub use bitcrypto::{dhash160, sha256, ChecksumType}; pub use chain::Transaction as UtxoTx; @@ -46,19 +47,21 @@ use chain::{OutPoint, TransactionOutput, TxHashAlgo}; #[cfg(not(target_arch = "wasm32"))] use common::first_char_to_upper; use common::jsonrpc_client::JsonRpcError; +use common::log::LogOnError; use common::now_ms; use crypto::trezor::utxo::TrezorUtxoCoin; use crypto::{Bip32DerPathOps, Bip32Error, Bip44Chain, Bip44DerPathError, Bip44PathToAccount, Bip44PathToCoin, ChildNumber, DerivationPath, Secp256k1ExtendedPublicKey}; use derive_more::Display; #[cfg(not(target_arch = "wasm32"))] use dirs::home_dir; -use futures::channel::mpsc; +use futures::channel::mpsc::{Receiver as AsyncReceiver, Sender as AsyncSender, UnboundedSender}; use futures::compat::Future01CompatExt; use futures::lock::{Mutex as AsyncMutex, MutexGuard as AsyncMutexGuard}; use futures01::Future; use keys::bytes::Bytes; pub use keys::{Address, AddressFormat as UtxoAddressFormat, AddressHashEnum, KeyPair, Private, Public, Secret, Type as ScriptType}; +#[cfg(not(target_arch = "wasm32"))] use lightning_invoice::Currency as LightningCurrency; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; @@ -71,13 +74,13 @@ use rpc::v1::types::{Bytes as BytesJson, Transaction as RpcTransaction, H256 as use script::{Builder, Script, SignatureVersion, TransactionInputSigner}; use serde_json::{self as json, Value as Json}; use serialization::{serialize, serialize_with_flags, Error as SerError, SERIALIZE_TRANSACTION_WITNESS}; -use spv_validation::helpers_validation::SPVError; +use spv_validation::helpers_validation::{BlockHeaderVerificationParams, SPVError}; use spv_validation::storage::BlockHeaderStorageError; use std::array::TryFromSliceError; use std::collections::{HashMap, HashSet}; use std::convert::TryInto; use std::hash::Hash; -use std::num::NonZeroU64; +use std::num::{NonZeroU64, TryFromIntError}; use std::ops::Deref; #[cfg(not(target_arch = "wasm32"))] use std::path::{Path, PathBuf}; @@ -92,7 +95,6 @@ use utxo_signer::{TxProvider, TxProviderError, UtxoSignTxError, UtxoSignTxResult use self::rpc_clients::{electrum_script_hash, ElectrumClient, ElectrumRpcRequest, EstimateFeeMethod, EstimateFeeMode, NativeClient, UnspentInfo, UnspentMap, UtxoRpcClientEnum, UtxoRpcError, UtxoRpcFut, UtxoRpcResult}; -use self::utxo_block_header_storage::BlockHeaderVerificationParams; use super::{big_decimal_from_sat_unsigned, BalanceError, BalanceFut, BalanceResult, CoinBalance, CoinsContext, DerivationMethod, FeeApproxStage, FoundSwapTxSpend, HistorySyncState, KmdRewardsDetails, MarketCoinOps, MmCoin, NumConversError, NumConversResult, PrivKeyActivationPolicy, PrivKeyNotAllowed, PrivKeyPolicy, @@ -417,6 +419,7 @@ pub enum BlockchainNetwork { Regtest, } +#[cfg(not(target_arch = "wasm32"))] impl From for BitcoinNetwork { fn from(network: BlockchainNetwork) -> Self { match network { @@ -427,6 +430,7 @@ impl From for BitcoinNetwork { } } +#[cfg(not(target_arch = "wasm32"))] impl From for LightningCurrency { fn from(network: BlockchainNetwork) -> Self { match network { @@ -437,6 +441,54 @@ impl From for LightningCurrency { } } +pub enum UtxoSyncStatus { + SyncingBlockHeaders { + current_scanned_block: u64, + last_block: u64, + }, + TemporaryError(String), + PermanentError(String), + Finished { + block_number: u64, + }, +} + +#[derive(Clone)] +pub struct UtxoSyncStatusLoopHandle(AsyncSender); + +impl UtxoSyncStatusLoopHandle { + pub fn new(sync_status_notifier: AsyncSender) -> Self { + UtxoSyncStatusLoopHandle(sync_status_notifier) + } + + pub fn notify_blocks_headers_sync_status(&mut self, current_scanned_block: u64, last_block: u64) { + self.0 + .try_send(UtxoSyncStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, + }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_on_temp_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::TemporaryError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_on_permanent_error(&mut self, error: String) { + self.0 + .try_send(UtxoSyncStatus::PermanentError(error)) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } + + pub fn notify_sync_finished(&mut self, block_number: u64) { + self.0 + .try_send(UtxoSyncStatus::Finished { block_number }) + .debug_log_with_msg("No one seems interested in UtxoSyncStatus"); + } +} + #[derive(Debug)] pub struct UtxoCoinConf { pub ticker: String, @@ -505,8 +557,12 @@ pub struct UtxoCoinConf { pub estimate_fee_blocks: u32, /// The name of the coin with which Trezor wallet associates this asset. pub trezor_coin: Option, - /// Used in condition where the coin will validate spv proof or not + /// Whether to verify swaps and lightning transactions using spv or not. When enabled, block headers will be retrieved, verified according + /// to block_headers_verification_params and stored in the DB. Can be false if the coin's RPC server is trusted. pub enable_spv_proof: bool, + /// The parameters that specify how the coin block headers should be verified. If None and enable_spv_proof is true, + /// headers will be saved in DB without verification, can be none if the coin's RPC server is trusted. + pub block_headers_verification_params: Option, } pub struct UtxoCoinFields { @@ -538,6 +594,12 @@ pub struct UtxoCoinFields { /// The flag determines whether to use mature unspent outputs *only* to generate transactions. /// https://github.com/KomodoPlatform/atomicDEX-API/issues/1181 pub check_utxo_maturity: bool, + /// The notifier/sender of the block headers synchronization status, + /// initialized only for non-native mode if spv is enabled for the coin. + pub block_headers_status_notifier: Option, + /// The watcher/receiver of the block headers synchronization status, + /// initialized only for non-native mode if spv is enabled for the coin. + pub block_headers_status_watcher: Option>>, } #[derive(Debug, Display)] @@ -580,15 +642,19 @@ impl From for GetTxError { fn from(err: SerError) -> GetTxError { GetTxError::TxDeserialization(err) } } -#[derive(Debug)] +#[derive(Debug, Display)] pub enum GetTxHeightError { HeightNotFound(String), + StorageError(BlockHeaderStorageError), + ConversionError(TryFromIntError), } impl From for SPVError { fn from(e: GetTxHeightError) -> Self { match e { GetTxHeightError::HeightNotFound(e) => SPVError::InvalidHeight(e), + GetTxHeightError::StorageError(e) => SPVError::HeaderStorageError(e), + GetTxHeightError::ConversionError(e) => SPVError::Internal(e.to_string()), } } } @@ -597,6 +663,14 @@ impl From for GetTxHeightError { fn from(e: UtxoRpcError) -> Self { GetTxHeightError::HeightNotFound(e.to_string()) } } +impl From for GetTxHeightError { + fn from(e: BlockHeaderStorageError) -> Self { GetTxHeightError::StorageError(e) } +} + +impl From for GetTxHeightError { + fn from(err: TryFromIntError) -> GetTxHeightError { GetTxHeightError::ConversionError(err) } +} + #[derive(Debug, Display)] pub enum GetBlockHeaderError { #[display(fmt = "Block header storage error: {}", _0)] @@ -639,6 +713,31 @@ impl From for SPVError { fn from(e: GetBlockHeaderError) -> Self { SPVError::UnableToGetHeader(e.to_string()) } } +#[derive(Debug, Display)] +pub enum GetConfirmedTxError { + HeightNotFound(GetTxHeightError), + UnableToGetHeader(GetBlockHeaderError), + RpcError(JsonRpcError), + SerializationError(serialization::Error), + SPVError(SPVError), +} + +impl From for GetConfirmedTxError { + fn from(err: GetTxHeightError) -> Self { GetConfirmedTxError::HeightNotFound(err) } +} + +impl From for GetConfirmedTxError { + fn from(err: GetBlockHeaderError) -> Self { GetConfirmedTxError::UnableToGetHeader(err) } +} + +impl From for GetConfirmedTxError { + fn from(err: JsonRpcError) -> Self { GetConfirmedTxError::RpcError(err) } +} + +impl From for GetConfirmedTxError { + fn from(err: serialization::Error) -> Self { GetConfirmedTxError::SerializationError(err) } +} + impl UtxoCoinFields { pub fn transaction_preimage(&self) -> TransactionInputSigner { let lock_time = if self.conf.ticker == "KMD" { @@ -1185,7 +1284,7 @@ pub fn coin_daemon_data_dir(name: &str, is_asset_chain: bool) -> PathBuf { /// Electrum protocol version verifier. /// The structure is used to handle the `on_connected` event and notify `electrum_version_loop`. struct ElectrumProtoVerifier { - on_connect_tx: mpsc::UnboundedSender, + on_connect_tx: UnboundedSender, } impl ElectrumProtoVerifier { @@ -1254,12 +1353,7 @@ impl UtxoActivationParams { Some("electrum") => { let servers = json::from_value(req["servers"].clone()).map_to_mm(UtxoFromLegacyReqErr::InvalidElectrumServers)?; - let block_header_params = json::from_value(req["block_header_params"].clone()) - .map_to_mm(UtxoFromLegacyReqErr::InvalidBlockHeaderVerificationParams)?; - UtxoRpcMode::Electrum { - servers, - block_header_params, - } + UtxoRpcMode::Electrum { servers } }, _ => return MmError::err(UtxoFromLegacyReqErr::UnexpectedMethod), }; @@ -1301,10 +1395,12 @@ impl UtxoActivationParams { #[serde(tag = "rpc", content = "rpc_data")] pub enum UtxoRpcMode { Native, - Electrum { - servers: Vec, - block_header_params: Option, - }, + Electrum { servers: Vec }, +} + +impl UtxoRpcMode { + #[inline] + pub fn is_native(&self) -> bool { matches!(*self, UtxoRpcMode::Native) } } #[derive(Debug)] diff --git a/mm2src/coins/utxo/bch.rs b/mm2src/coins/utxo/bch.rs index a0125db8ef..5b7de796c3 100644 --- a/mm2src/coins/utxo/bch.rs +++ b/mm2src/coins/utxo/bch.rs @@ -622,7 +622,7 @@ impl BchCoin { Ok(slp_tx_details_builder.build()) } - pub async fn get_block_timestamp(&self, height: u64) -> Result> { + pub async fn get_block_timestamp(&self, height: u64) -> Result> { self.as_ref().rpc_client.get_block_timestamp(height).await } } diff --git a/mm2src/coins/utxo/qtum.rs b/mm2src/coins/utxo/qtum.rs index 01c0091e17..4db3fc408c 100644 --- a/mm2src/coins/utxo/qtum.rs +++ b/mm2src/coins/utxo/qtum.rs @@ -218,18 +218,11 @@ impl<'a> UtxoCoinBuilder for QtumCoinBuilder<'a> { async fn build(self) -> MmResult { let utxo = self.build_utxo_fields().await?; - let rpc_client = utxo.rpc_client.clone(); let utxo_arc = UtxoArc::new(utxo); let utxo_weak = utxo_arc.downgrade(); let result_coin = QtumCoin::from(utxo_arc); - if let Some(abort_handler) = self.spawn_merge_utxo_loop_if_required(utxo_weak.clone(), QtumCoin::from) { - self.ctx.abort_handlers.lock().unwrap().push(abort_handler); - } - - if let Some(abort_handler) = - self.spawn_block_header_utxo_loop_if_required(utxo_weak, &rpc_client, QtumCoin::from) - { + if let Some(abort_handler) = self.spawn_merge_utxo_loop_if_required(utxo_weak, QtumCoin::from) { self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } diff --git a/mm2src/coins/utxo/rpc_clients.rs b/mm2src/coins/utxo/rpc_clients.rs index 66a23f36a3..627d96e8c0 100644 --- a/mm2src/coins/utxo/rpc_clients.rs +++ b/mm2src/coins/utxo/rpc_clients.rs @@ -2,7 +2,8 @@ #![cfg_attr(target_arch = "wasm32", allow(dead_code))] use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; -use crate::utxo::{output_script, sat_from_big_decimal, GetBlockHeaderError, GetTxError, GetTxHeightError}; +use crate::utxo::{output_script, sat_from_big_decimal, GetBlockHeaderError, GetConfirmedTxError, GetTxError, + GetTxHeightError}; use crate::{big_decimal_from_sat_unsigned, NumConversError, RpcTransportEventHandler, RpcTransportEventHandlerShared}; use async_trait::async_trait; use chain::{BlockHeader, BlockHeaderBits, BlockHeaderNonce, OutPoint, Transaction as UtxoTx}; @@ -32,11 +33,11 @@ use mm2_number::{BigDecimal, BigInt, MmNumber}; #[cfg(test)] use mocktopus::macros::*; use rpc::v1::types::{Bytes as BytesJson, Transaction as RpcTransaction, H256 as H256Json}; use serde_json::{self as json, Value as Json}; -use serialization::{coin_variant_by_ticker, deserialize, serialize, serialize_with_flags, CoinVariant, CompactInteger, - Reader, SERIALIZE_TRANSACTION_WITNESS}; +use serialization::{deserialize, serialize, serialize_with_flags, CoinVariant, CompactInteger, Reader, + SERIALIZE_TRANSACTION_WITNESS}; use sha2::{Digest, Sha256}; -use spv_validation::helpers_validation::{validate_headers, SPVError}; -use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; +use spv_validation::helpers_validation::SPVError; +use spv_validation::storage::BlockHeaderStorageOps; use std::collections::hash_map::Entry; use std::collections::HashMap; use std::convert::TryInto; @@ -343,7 +344,7 @@ pub trait UtxoRpcClientOps: fmt::Debug + Send + Sync + 'static { ) -> UtxoRpcFut; /// Returns block time in seconds since epoch (Jan 1 1970 GMT). - async fn get_block_timestamp(&self, height: u64) -> Result>; + async fn get_block_timestamp(&self, height: u64) -> Result>; /// Returns verbose transaction by the given `txid` if it's on-chain or None if it's not. async fn get_tx_if_onchain(&self, tx_hash: &H256Json) -> Result, MmError> { @@ -901,7 +902,7 @@ impl UtxoRpcClientOps for NativeClient { Box::new(fut.boxed().compat()) } - async fn get_block_timestamp(&self, height: u64) -> Result> { + async fn get_block_timestamp(&self, height: u64) -> Result> { let block = self.get_block_by_height(height).await?; Ok(block.time as u64) } @@ -1256,6 +1257,14 @@ pub struct TxMerkleBranch { pub pos: usize, } +#[derive(Clone)] +pub struct ConfirmedTransactionInfo { + pub tx: UtxoTx, + pub header: BlockHeader, + pub index: u64, + pub height: u64, +} + #[derive(Debug, PartialEq)] pub struct BestBlock { pub height: u64, @@ -1567,7 +1576,7 @@ pub struct ElectrumClientImpl { protocol_version: OrdRange, get_balance_concurrent_map: ConcurrentRequestMap, list_unspent_concurrent_map: ConcurrentRequestMap>, - block_headers_storage: Option, + block_headers_storage: BlockHeaderStorage, } async fn electrum_request_multi( @@ -1710,7 +1719,7 @@ impl ElectrumClientImpl { pub fn protocol_version(&self) -> &OrdRange { &self.protocol_version } /// Get block headers storage. - pub fn block_headers_storage(&self) -> &Option { &self.block_headers_storage } + pub fn block_headers_storage(&self) -> &BlockHeaderStorage { &self.block_headers_storage } } #[derive(Clone, Debug)] @@ -1869,33 +1878,35 @@ impl ElectrumClient { rpc_func!(self, "blockchain.block.headers", start_height, count) } - pub fn retrieve_last_headers( + pub fn retrieve_headers( &self, - blocks_limit_to_check: NonZeroU64, - block_height: u64, - ) -> UtxoRpcFut<(HashMap, Vec)> { + from: u64, + to: u64, + ) -> UtxoRpcFut<(HashMap, Vec, u64)> { let coin_name = self.coin_ticker.clone(); - let (from, count) = { - let from = if block_height < blocks_limit_to_check.get() { - 0 - } else { - block_height - blocks_limit_to_check.get() - }; - (from, blocks_limit_to_check) + if from == 0 || to < from { + return Box::new(futures01::future::err( + UtxoRpcError::Internal("Invalid values for from/to parameters".to_string()).into(), + )); + } + let count: NonZeroU64 = match (to - from + 1).try_into() { + Ok(c) => c, + Err(e) => return Box::new(futures01::future::err(UtxoRpcError::Internal(e.to_string()).into())), }; Box::new( self.blockchain_block_headers(from, count) .map_to_mm_fut(UtxoRpcError::from) .and_then(move |headers| { - let (block_registry, block_headers) = { + let (block_registry, block_headers, last_height) = { if headers.count == 0 { return MmError::err(UtxoRpcError::Internal("No headers available".to_string())); } let len = CompactInteger::from(headers.count); let mut serialized = serialize(&len).take(); serialized.extend(headers.hex.0.into_iter()); - let coin_variant = coin_variant_by_ticker(&coin_name); - let mut reader = Reader::new_with_coin_variant(serialized.as_slice(), coin_variant); + drop_mutability!(serialized); + let mut reader = + Reader::new_with_coin_variant(serialized.as_slice(), coin_name.as_str().into()); let maybe_block_headers = reader.read_list::(); let block_headers = match maybe_block_headers { Ok(headers) => headers, @@ -1907,9 +1918,9 @@ impl ElectrumClient { block_registry.insert(starting_height, block_header.clone()); starting_height += 1; } - (block_registry, block_headers) + (block_registry, block_headers, starting_height - 1) }; - Ok((block_registry, block_headers)) + Ok((block_registry, block_headers, last_height)) }), ) } @@ -1919,7 +1930,22 @@ impl ElectrumClient { rpc_func!(self, "blockchain.transaction.get_merkle", txid, height) } - async fn get_tx_height(&self, tx: &UtxoTx) -> Result> { + // get_tx_height_from_rpc is costly since it loops through history after requesting the whole history of the script pubkey + // This method should always be used if the block headers are saved to the DB + async fn get_tx_height_from_storage(&self, tx: &UtxoTx) -> Result> { + let tx_hash = tx.hash().reversed(); + let blockhash = self.get_verbose_transaction(&tx_hash.into()).compat().await?.blockhash; + Ok(self + .block_headers_storage() + .get_block_height_by_hash(blockhash.into()) + .await? + .ok_or_else(|| GetTxHeightError::HeightNotFound("Transaction block header is not found in storage".into()))? + .try_into()?) + } + + // get_tx_height_from_storage is always preferred to be used instead of this, but if there is no headers in storage (storing headers is not enabled) + // this function can be used instead + async fn get_tx_height_from_rpc(&self, tx: &UtxoTx) -> Result { for output in tx.outputs.clone() { let script_pubkey_str = hex::encode(electrum_script_hash(&output.script_pubkey)); if let Ok(history) = self.scripthash_get_history(script_pubkey_str.as_str()).compat().await { @@ -1931,69 +1957,53 @@ impl ElectrumClient { } } } - MmError::err(GetTxHeightError::HeightNotFound( + Err(GetTxHeightError::HeightNotFound( "Couldn't find height through electrum!".into(), )) } - async fn tx_height_from_storage_or_rpc(&self, tx: &UtxoTx) -> Result> { - if let Some(storage) = &self.block_headers_storage { - let ticker = self.coin_name(); - let tx_hash = tx.hash().reversed(); - let blockhash = self.get_verbose_transaction(&tx_hash.into()).compat().await?.blockhash; - if let Ok(Some(height)) = storage.get_block_height_by_hash(ticker, blockhash.into()).await { - if let Ok(height) = height.try_into() { - return Ok(height); - } - } - } - - self.get_tx_height(tx).await - } - - async fn valid_block_header_from_storage(&self, height: u64) -> Result> { - let storage = match &self.block_headers_storage { - Some(storage) => storage, - None => { - return MmError::err(GetBlockHeaderError::StorageError(BlockHeaderStorageError::Internal( - "block_headers_storage is not initialized".to_owned(), - ))) - }, - }; - let ticker = self.coin_name(); - match storage.get_block_header(ticker, height).await? { - None => { - let bytes = self.blockchain_block_header(height).compat().await?; - let header: BlockHeader = deserialize(bytes.0.as_slice())?; - let params = &storage.params; - let blocks_limit = params.blocks_limit_to_check; - let (headers_registry, headers) = self.retrieve_last_headers(blocks_limit, height).compat().await?; - match validate_headers(headers, params.difficulty_check, params.constant_difficulty) { - Ok(_) => { - storage.add_block_headers_to_storage(ticker, headers_registry).await?; - Ok(header) - }, - Err(err) => MmError::err(GetBlockHeaderError::SPVError(err)), - } - }, - Some(header) => Ok(header), - } + async fn block_header_from_storage(&self, height: u64) -> Result> { + self.block_headers_storage() + .get_block_header(height) + .await? + .ok_or_else(|| GetBlockHeaderError::Internal("Header not in storage!".into()).into()) } async fn block_header_from_storage_or_rpc(&self, height: u64) -> Result> { - match &self.block_headers_storage { - Some(_) => self.valid_block_header_from_storage(height).await, - None => Ok(deserialize( + match self.block_header_from_storage(height).await { + Ok(h) => Ok(h), + Err(_) => Ok(deserialize( self.blockchain_block_header(height).compat().await?.as_slice(), )?), } } - pub async fn get_merkle_and_header( + pub async fn get_confirmed_tx_info_from_rpc( + &self, + tx: &UtxoTx, + ) -> Result { + let height = self.get_tx_height_from_rpc(tx).await?; + + let merkle_branch = self + .blockchain_transaction_get_merkle(tx.hash().reversed().into(), height) + .compat() + .await?; + + let header = deserialize(self.blockchain_block_header(height).compat().await?.as_slice())?; + + Ok(ConfirmedTransactionInfo { + tx: tx.clone(), + header, + index: merkle_branch.pos as u64, + height, + }) + } + + pub async fn get_merkle_and_validated_header( &self, tx: &UtxoTx, ) -> Result<(TxMerkleBranch, BlockHeader, u64), MmError> { - let height = self.tx_height_from_storage_or_rpc(tx).await?; + let height = self.get_tx_height_from_storage(tx).await?; let merkle_branch = self .blockchain_transaction_get_merkle(tx.hash().reversed().into(), height) @@ -2001,7 +2011,7 @@ impl ElectrumClient { .await .map_to_mm(|e| SPVError::UnableToGetMerkle(e.to_string()))?; - let header = self.block_header_from_storage_or_rpc(height).await?; + let header = self.block_header_from_storage(height).await?; Ok((merkle_branch, header, height)) } @@ -2226,11 +2236,8 @@ impl UtxoRpcClientOps for ElectrumClient { ) } - async fn get_block_timestamp(&self, height: u64) -> Result> { - let header_bytes = self.blockchain_block_header(height).compat().await?; - let header: BlockHeader = - deserialize(header_bytes.0.as_slice()).map_to_mm(|e| UtxoRpcError::InvalidResponse(format!("{:?}", e)))?; - Ok(header.time as u64) + async fn get_block_timestamp(&self, height: u64) -> Result> { + Ok(self.block_header_from_storage_or_rpc(height).await?.time as u64) } } @@ -2239,7 +2246,7 @@ impl ElectrumClientImpl { pub fn new( coin_ticker: String, event_handlers: Vec, - block_headers_storage: Option, + block_headers_storage: BlockHeaderStorage, ) -> ElectrumClientImpl { let protocol_version = OrdRange::new(1.2, 1.4).unwrap(); ElectrumClientImpl { @@ -2259,7 +2266,7 @@ impl ElectrumClientImpl { coin_ticker: String, event_handlers: Vec, protocol_version: OrdRange, - block_headers_storage: Option, + block_headers_storage: BlockHeaderStorage, ) -> ElectrumClientImpl { ElectrumClientImpl { protocol_version, diff --git a/mm2src/coins/utxo/spv.rs b/mm2src/coins/utxo/spv.rs index 6d85d0afbe..9401e6b0b7 100644 --- a/mm2src/coins/utxo/spv.rs +++ b/mm2src/coins/utxo/spv.rs @@ -1,29 +1,21 @@ -use crate::utxo::rpc_clients::ElectrumClient; +use crate::utxo::rpc_clients::{ConfirmedTransactionInfo, ElectrumClient}; use async_trait::async_trait; -use chain::{BlockHeader, RawBlockHeader, Transaction as UtxoTx}; +use chain::Transaction as UtxoTx; use common::executor::Timer; use common::log::error; use common::now_ms; use keys::hash::H256; -use mm2_err_handle::prelude::*; use serialization::serialize_list; use spv_validation::helpers_validation::SPVError; use spv_validation::spv_proof::{SPVProof, TRY_SPV_PROOF_INTERVAL}; -pub struct ConfirmedTransactionInfo { - pub tx: UtxoTx, - pub header: BlockHeader, - pub index: u64, - pub height: u64, -} - #[async_trait] pub trait SimplePaymentVerification { async fn validate_spv_proof( &self, tx: &UtxoTx, try_spv_proof_until: u64, - ) -> Result>; + ) -> Result; } #[async_trait] @@ -32,22 +24,23 @@ impl SimplePaymentVerification for ElectrumClient { &self, tx: &UtxoTx, try_spv_proof_until: u64, - ) -> Result> { + ) -> Result { if tx.outputs.is_empty() { - return MmError::err(SPVError::InvalidVout); + return Err(SPVError::InvalidVout); } - let (merkle_branch, header, height) = loop { + let (merkle_branch, validated_header, height) = loop { if now_ms() / 1000 > try_spv_proof_until { + // Todo: Should not show this error when height is 0 error!( "Waited too long until {} for transaction {:?} to validate spv proof", try_spv_proof_until, tx.hash().reversed(), ); - return MmError::err(SPVError::Timeout); + return Err(SPVError::Timeout); } - match self.get_merkle_and_header(tx).await { + match self.get_merkle_and_validated_header(tx).await { Ok(res) => break res, Err(e) => { error!( @@ -62,7 +55,6 @@ impl SimplePaymentVerification for ElectrumClient { } }; - let raw_header = RawBlockHeader::new(header.raw().take())?; let intermediate_nodes: Vec = merkle_branch .merkle .into_iter() @@ -74,16 +66,14 @@ impl SimplePaymentVerification for ElectrumClient { vin: serialize_list(&tx.inputs).take(), vout: serialize_list(&tx.outputs).take(), index: merkle_branch.pos as u64, - confirming_header: header.clone(), - raw_header, intermediate_nodes, }; - proof.validate().map_err(MmError::new)?; + proof.validate(&validated_header)?; Ok(ConfirmedTransactionInfo { tx: tx.clone(), - header, + header: validated_header, index: proof.index, height, }) diff --git a/mm2src/coins/utxo/utxo_block_header_storage.rs b/mm2src/coins/utxo/utxo_block_header_storage.rs index 32f53dd89f..b553c049f7 100644 --- a/mm2src/coins/utxo/utxo_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_block_header_storage.rs @@ -9,101 +9,82 @@ use primitives::hash::H256; use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use std::collections::HashMap; use std::fmt::{Debug, Formatter}; -use std::num::NonZeroU64; - -/// SPV headers verification parameters -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct BlockHeaderVerificationParams { - pub difficulty_check: bool, - pub constant_difficulty: bool, - // This should to be equal to or greater than the number of blocks needed before the chain is safe from reorganization (e.g. 6 blocks for BTC) - pub blocks_limit_to_check: NonZeroU64, - pub check_every: f64, -} pub struct BlockHeaderStorage { pub inner: Box, - pub params: BlockHeaderVerificationParams, } impl Debug for BlockHeaderStorage { fn fmt(&self, _f: &mut Formatter<'_>) -> std::fmt::Result { Ok(()) } } -pub trait InitBlockHeaderStorageOps: Send + Sync + 'static { - fn new_from_ctx( - ctx: MmArc, - params: BlockHeaderVerificationParams, - ) -> Result - where - Self: Sized; -} - -impl InitBlockHeaderStorageOps for BlockHeaderStorage { - #[cfg(not(target_arch = "wasm32"))] - fn new_from_ctx(ctx: MmArc, params: BlockHeaderVerificationParams) -> Result { +impl BlockHeaderStorage { + #[cfg(all(not(test), not(target_arch = "wasm32")))] + pub(crate) fn new_from_ctx(ctx: MmArc, ticker: String) -> Result { let sqlite_connection = ctx.sqlite_connection.ok_or(BlockHeaderStorageError::Internal( "sqlite_connection is not initialized".to_owned(), ))?; Ok(BlockHeaderStorage { - inner: Box::new(SqliteBlockHeadersStorage(sqlite_connection.clone())), - params, + inner: Box::new(SqliteBlockHeadersStorage { + ticker, + conn: sqlite_connection.clone(), + }), }) } #[cfg(target_arch = "wasm32")] - fn new_from_ctx(_ctx: MmArc, params: BlockHeaderVerificationParams) -> Result { + pub(crate) fn new_from_ctx(_ctx: MmArc, _ticker: String) -> Result { Ok(BlockHeaderStorage { inner: Box::new(IndexedDBBlockHeadersStorage {}), - params, + }) + } + + #[cfg(all(test, not(target_arch = "wasm32")))] + pub(crate) fn new_from_ctx(ctx: MmArc, ticker: String) -> Result { + use db_common::sqlite::rusqlite::Connection; + use std::sync::{Arc, Mutex}; + + let conn = Arc::new(Mutex::new(Connection::open_in_memory().unwrap())); + let conn = ctx.sqlite_connection.clone_or(conn); + + Ok(BlockHeaderStorage { + inner: Box::new(SqliteBlockHeadersStorage { ticker, conn }), }) } } #[async_trait] impl BlockHeaderStorageOps for BlockHeaderStorage { - async fn init(&self, for_coin: &str) -> Result<(), BlockHeaderStorageError> { self.inner.init(for_coin).await } + async fn init(&self) -> Result<(), BlockHeaderStorageError> { self.inner.init().await } - async fn is_initialized_for(&self, for_coin: &str) -> Result { - self.inner.is_initialized_for(for_coin).await + async fn is_initialized_for(&self) -> Result { + self.inner.is_initialized_for().await } async fn add_block_headers_to_storage( &self, - for_coin: &str, headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { - self.inner.add_block_headers_to_storage(for_coin, headers).await + self.inner.add_block_headers_to_storage(headers).await } - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_block_header(for_coin, height).await + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError> { + self.inner.get_block_header(height).await } - async fn get_block_header_raw( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_block_header_raw(for_coin, height).await + async fn get_block_header_raw(&self, height: u64) -> Result, BlockHeaderStorageError> { + self.inner.get_block_header_raw(height).await } - async fn get_last_block_header_with_non_max_bits( - &self, - for_coin: &str, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_last_block_header_with_non_max_bits(for_coin).await + async fn get_last_block_height(&self) -> Result { + self.inner.get_last_block_height().await } - async fn get_block_height_by_hash( - &self, - for_coin: &str, - hash: H256, - ) -> Result, BlockHeaderStorageError> { - self.inner.get_block_height_by_hash(for_coin, hash).await + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError> { + self.inner.get_last_block_header_with_non_max_bits().await + } + + async fn get_block_height_by_hash(&self, hash: H256) -> Result, BlockHeaderStorageError> { + self.inner.get_block_height_by_hash(hash).await } } diff --git a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs index 2a90bc036f..fc49c9f36d 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_arc_builder.rs @@ -1,16 +1,23 @@ use crate::utxo::rpc_clients::UtxoRpcClientEnum; use crate::utxo::utxo_builder::{UtxoCoinBuildError, UtxoCoinBuilder, UtxoCoinBuilderCommonOps, UtxoFieldsWithHardwareWalletBuilder, UtxoFieldsWithIguanaPrivKeyBuilder}; -use crate::utxo::utxo_common::{block_header_utxo_loop, merge_utxo_loop}; -use crate::utxo::{GetUtxoListOps, UtxoArc, UtxoCommonOps, UtxoWeak}; -use crate::{PrivKeyBuildPolicy, UtxoActivationParams}; +use crate::utxo::{generate_and_send_tx, FeePolicy, GetUtxoListOps, UtxoArc, UtxoCommonOps, UtxoSyncStatusLoopHandle, + UtxoWeak}; +use crate::{DerivationMethod, PrivKeyBuildPolicy, UtxoActivationParams}; use async_trait::async_trait; -use common::executor::spawn; -use common::log::info; +use chain::TransactionOutput; +use common::executor::{spawn, Timer}; +use common::log::{error, info, warn}; +use futures::compat::Future01CompatExt; use futures::future::{abortable, AbortHandle}; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; +use script::Builder; use serde_json::Value as Json; +use spv_validation::helpers_validation::validate_headers; +use spv_validation::storage::BlockHeaderStorageOps; + +const BLOCK_HEADERS_LOOP_INTERVAL: f64 = 60.; pub struct UtxoArcBuilder<'a, F, T> where @@ -84,7 +91,7 @@ where async fn build(self) -> MmResult { let utxo = self.build_utxo_fields().await?; - let rpc_client = utxo.rpc_client.clone(); + let sync_status_loop_handle = utxo.block_headers_status_notifier.clone(); let utxo_arc = UtxoArc::new(utxo); let utxo_weak = utxo_arc.downgrade(); let result_coin = (self.constructor)(utxo_arc); @@ -94,11 +101,12 @@ where self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } - if let Some(abort_handler) = - self.spawn_block_header_utxo_loop_if_required(utxo_weak, &rpc_client, self.constructor.clone()) - { + if let Some(sync_status_loop_handle) = sync_status_loop_handle { + let abort_handler = + self.spawn_block_header_utxo_loop(utxo_weak, self.constructor.clone(), sync_status_loop_handle); self.ctx.abort_handlers.lock().unwrap().push(abort_handler); } + Ok(result_coin) } } @@ -117,6 +125,65 @@ where { } +async fn merge_utxo_loop( + weak: UtxoWeak, + merge_at: usize, + check_every: f64, + max_merge_at_once: usize, + constructor: impl Fn(UtxoArc) -> T, +) where + T: UtxoCommonOps + GetUtxoListOps, +{ + loop { + Timer::sleep(check_every).await; + + let coin = match weak.upgrade() { + Some(arc) => constructor(arc), + None => break, + }; + + let my_address = match coin.as_ref().derivation_method { + DerivationMethod::Iguana(ref my_address) => my_address, + DerivationMethod::HDWallet(_) => { + warn!("'merge_utxo_loop' is currently not used for HD wallets"); + return; + }, + }; + + let ticker = &coin.as_ref().conf.ticker; + let (unspents, recently_spent) = match coin.get_unspent_ordered_list(my_address).await { + Ok((unspents, recently_spent)) => (unspents, recently_spent), + Err(e) => { + error!("Error {} on get_unspent_ordered_list of coin {}", e, ticker); + continue; + }, + }; + if unspents.len() >= merge_at { + let unspents: Vec<_> = unspents.into_iter().take(max_merge_at_once).collect(); + info!("Trying to merge {} UTXOs of coin {}", unspents.len(), ticker); + let value = unspents.iter().fold(0, |sum, unspent| sum + unspent.value); + let script_pubkey = Builder::build_p2pkh(&my_address.hash).to_bytes(); + let output = TransactionOutput { value, script_pubkey }; + let merge_tx_fut = generate_and_send_tx( + &coin, + unspents, + None, + FeePolicy::DeductFromOutput(0), + recently_spent, + vec![output], + ); + match merge_tx_fut.await { + Ok(tx) => info!( + "UTXO merge successful for coin {}, tx_hash {:?}", + ticker, + tx.hash().reversed() + ), + Err(e) => error!("Error {:?} on UTXO merge attempt for coin {}", e, ticker), + } + } + } +} + pub trait MergeUtxoArcOps: UtxoCoinBuilderCommonOps { fn spawn_merge_utxo_loop_if_required(&self, weak: UtxoWeak, constructor: F) -> Option where @@ -143,33 +210,103 @@ pub trait MergeUtxoArcOps: UtxoCoinBuilderCom } } +async fn block_header_utxo_loop( + weak: UtxoWeak, + constructor: impl Fn(UtxoArc) -> T, + mut sync_status_loop_handle: UtxoSyncStatusLoopHandle, +) { + while let Some(arc) = weak.upgrade() { + let coin = constructor(arc); + let client = match &coin.as_ref().rpc_client { + UtxoRpcClientEnum::Native(_) => break, + UtxoRpcClientEnum::Electrum(client) => client, + }; + + let storage = client.block_headers_storage(); + let from_block_height = match storage.get_last_block_height().await { + Ok(h) => h, + Err(e) => { + error!("Error {} on getting the height of the last stored header in DB!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(10.).await; + continue; + }, + }; + + let to_block_height = match coin.as_ref().rpc_client.get_block_count().compat().await { + Ok(h) => h, + Err(e) => { + error!("Error {} on getting the height of the latest block from rpc!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(10.).await; + continue; + }, + }; + + // Todo: Add code for the case if a chain reorganization happens + if from_block_height == to_block_height { + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + continue; + } + + sync_status_loop_handle.notify_blocks_headers_sync_status(from_block_height + 1, to_block_height); + + let (block_registry, block_headers, last_retrieved_height) = match client + .retrieve_headers(from_block_height + 1, to_block_height) + .compat() + .await + { + Ok(res) => res, + Err(e) => { + error!("Error {} on retrieving the latest headers from rpc!", e); + sync_status_loop_handle.notify_on_temp_error(e.to_string()); + Timer::sleep(10.).await; + continue; + }, + }; + + let ticker = coin.as_ref().conf.ticker.as_str(); + if let Some(params) = &coin.as_ref().conf.block_headers_verification_params { + if let Err(e) = validate_headers(ticker, from_block_height, block_headers, storage, params).await { + error!("Error {} on validating the latest headers!", e); + // Todo: remove this electrum server and use another in this case since the headers from this server are invalid + sync_status_loop_handle.notify_on_permanent_error(e.to_string()); + break; + } + } + + ok_or_continue_after_sleep!( + storage.add_block_headers_to_storage(block_registry).await, + BLOCK_HEADERS_LOOP_INTERVAL + ); + + // blockchain.block.headers returns a maximum of 2016 headers (tested for btc) so the loop needs to continue until we have all headers up to the current one. + if last_retrieved_height == to_block_height { + sync_status_loop_handle.notify_sync_finished(to_block_height); + Timer::sleep(BLOCK_HEADERS_LOOP_INTERVAL).await; + } + } +} + pub trait BlockHeaderUtxoArcOps: UtxoCoinBuilderCommonOps { - fn spawn_block_header_utxo_loop_if_required( + fn spawn_block_header_utxo_loop( &self, weak: UtxoWeak, - rpc_client: &UtxoRpcClientEnum, constructor: F, - ) -> Option + sync_status_loop_handle: UtxoSyncStatusLoopHandle, + ) -> AbortHandle where F: Fn(UtxoArc) -> T + Send + Sync + 'static, T: UtxoCommonOps, { - if let UtxoRpcClientEnum::Electrum(electrum) = rpc_client { - if electrum.block_headers_storage().is_some() { - let ticker = self.ticker().to_owned(); - let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor)); - info!("Starting UTXO block header loop for coin {}", ticker); - spawn(async move { - if let Err(e) = fut.await { - info!( - "spawn_block_header_utxo_loop_if_required stopped for {}, reason {}", - ticker, e - ); - } - }); - return Some(abort_handle); + let ticker = self.ticker().to_owned(); + let (fut, abort_handle) = abortable(block_header_utxo_loop(weak, constructor, sync_status_loop_handle)); + info!("Starting UTXO block header loop for coin {}", ticker); + spawn(async move { + if let Err(e) = fut.await { + info!("spawn_block_header_utxo_loop stopped for {}, reason {}", ticker, e); } - } - None + }); + abort_handle } } diff --git a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs index faced1c8f1..eb3730cfa7 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_coin_builder.rs @@ -3,12 +3,11 @@ use crate::hd_wallet_storage::{HDWalletCoinStorage, HDWalletStorageError}; use crate::utxo::rpc_clients::{ElectrumClient, ElectrumClientImpl, ElectrumRpcRequest, EstimateFeeMethod, UtxoRpcClientEnum}; use crate::utxo::tx_cache::{UtxoVerboseCacheOps, UtxoVerboseCacheShared}; -use crate::utxo::utxo_block_header_storage::{BlockHeaderStorage, BlockHeaderVerificationParams, - InitBlockHeaderStorageOps}; +use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; use crate::utxo::utxo_builder::utxo_conf_builder::{UtxoConfBuilder, UtxoConfError, UtxoConfResult}; use crate::utxo::{output_script, utxo_common, ElectrumBuilderArgs, ElectrumProtoVerifier, RecentlySpentOutPoints, - TxFee, UtxoCoinConf, UtxoCoinFields, UtxoHDAccount, UtxoHDWallet, UtxoRpcMode, DEFAULT_GAP_LIMIT, - UTXO_DUST_AMOUNT}; + TxFee, UtxoCoinConf, UtxoCoinFields, UtxoHDAccount, UtxoHDWallet, UtxoRpcMode, UtxoSyncStatus, + UtxoSyncStatusLoopHandle, DEFAULT_GAP_LIMIT, UTXO_DUST_AMOUNT}; use crate::{BlockchainNetwork, CoinTransportMetrics, DerivationMethod, HistorySyncState, PrivKeyBuildPolicy, PrivKeyPolicy, RpcClientType, UtxoActivationParams}; use async_trait::async_trait; @@ -18,7 +17,7 @@ use common::log::{error, info}; use common::small_rng; use crypto::{Bip32DerPathError, Bip44DerPathError, Bip44PathToCoin, CryptoCtx, CryptoInitError, HwWalletType}; use derive_more::Display; -use futures::channel::mpsc; +use futures::channel::mpsc::{channel, unbounded, Receiver as AsyncReceiver, UnboundedReceiver}; use futures::compat::Future01CompatExt; use futures::lock::Mutex as AsyncMutex; use futures::StreamExt; @@ -30,6 +29,7 @@ use mm2_err_handle::prelude::*; use primitives::hash::H256; use rand::seq::SliceRandom; use serde_json::{self as json, Value as Json}; +use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use std::sync::{Arc, Mutex, Weak}; cfg_native! { @@ -77,6 +77,7 @@ pub enum UtxoCoinBuildError { fmt = "Coin doesn't support Trezor hardware wallet. Please consider adding the 'trezor_coin' field to the coins config" )] CoinDoesntSupportTrezor, + BlockHeaderStorageError(BlockHeaderStorageError), #[display(fmt = "Internal error: {}", _0)] Internal(String), } @@ -98,6 +99,10 @@ impl From for UtxoCoinBuildError { fn from(e: HDWalletStorageError) -> Self { UtxoCoinBuildError::HDWalletStorageError(e) } } +impl From for UtxoCoinBuildError { + fn from(e: BlockHeaderStorageError) -> Self { UtxoCoinBuildError::BlockHeaderStorageError(e) } +} + #[async_trait] pub trait UtxoCoinBuilder: UtxoFieldsWithIguanaPrivKeyBuilder + UtxoFieldsWithHardwareWalletBuilder { type ResultCoin; @@ -164,6 +169,7 @@ pub trait UtxoFieldsWithIguanaPrivKeyBuilder: UtxoCoinBuilderCommonOps { let tx_hash_algo = self.tx_hash_algo(); let check_utxo_maturity = self.check_utxo_maturity(); let tx_cache = self.tx_cache(); + let (block_headers_status_notifier, block_headers_status_watcher) = self.block_header_status_channel(); let coin = UtxoCoinFields { conf, @@ -178,6 +184,8 @@ pub trait UtxoFieldsWithIguanaPrivKeyBuilder: UtxoCoinBuilderCommonOps { tx_fee, tx_hash_algo, check_utxo_maturity, + block_headers_status_notifier, + block_headers_status_watcher, }; Ok(coin) } @@ -225,6 +233,7 @@ pub trait UtxoFieldsWithHardwareWalletBuilder: UtxoCoinBuilderCommonOps { let tx_hash_algo = self.tx_hash_algo(); let check_utxo_maturity = self.check_utxo_maturity(); let tx_cache = self.tx_cache(); + let (block_headers_status_notifier, block_headers_status_watcher) = self.block_header_status_channel(); let coin = UtxoCoinFields { conf, @@ -239,6 +248,8 @@ pub trait UtxoFieldsWithHardwareWalletBuilder: UtxoCoinBuilderCommonOps { tx_fee, tx_hash_algo, check_utxo_maturity, + block_headers_status_notifier, + block_headers_status_watcher, }; Ok(coin) } @@ -394,13 +405,8 @@ pub trait UtxoCoinBuilderCommonOps { Ok(UtxoRpcClientEnum::Native(native)) } }, - UtxoRpcMode::Electrum { - servers, - block_header_params, - } => { - let electrum = self - .electrum_client(ElectrumBuilderArgs::default(), servers, block_header_params) - .await?; + UtxoRpcMode::Electrum { servers } => { + let electrum = self.electrum_client(ElectrumBuilderArgs::default(), servers).await?; Ok(UtxoRpcClientEnum::Electrum(electrum)) }, } @@ -410,9 +416,8 @@ pub trait UtxoCoinBuilderCommonOps { &self, args: ElectrumBuilderArgs, mut servers: Vec, - block_header_params: Option, ) -> UtxoCoinBuildResult { - let (on_connect_tx, on_connect_rx) = mpsc::unbounded(); + let (on_connect_tx, on_connect_rx) = unbounded(); let ticker = self.ticker().to_owned(); let ctx = self.ctx(); let mut event_handlers = vec![]; @@ -426,13 +431,12 @@ pub trait UtxoCoinBuilderCommonOps { event_handlers.push(ElectrumProtoVerifier { on_connect_tx }.into_shared()); } - let block_headers_storage = match block_header_params { - Some(params) => Some( - BlockHeaderStorage::new_from_ctx(self.ctx().clone(), params) - .map_to_mm(|e| UtxoCoinBuildError::Internal(e.to_string()))?, - ), - None => None, - }; + let storage_ticker = self.ticker().replace('-', "_"); + let block_headers_storage = BlockHeaderStorage::new_from_ctx(self.ctx().clone(), storage_ticker) + .map_to_mm(|e| UtxoCoinBuildError::Internal(e.to_string()))?; + if !block_headers_storage.is_initialized_for().await? { + block_headers_storage.init().await?; + } let mut rng = small_rng(); servers.as_mut_slice().shuffle(&mut rng); @@ -584,6 +588,23 @@ pub trait UtxoCoinBuilderCommonOps { #[cfg(not(target_arch = "wasm32"))] fn tx_cache_path(&self) -> PathBuf { self.ctx().dbdir().join("TX_CACHE") } + + fn block_header_status_channel( + &self, + ) -> ( + Option, + Option>>, + ) { + if self.conf()["enable_spv_proof"].as_bool().unwrap_or(false) && !self.activation_params().mode.is_native() { + let (sync_status_notifier, sync_watcher) = channel(1); + ( + Some(UtxoSyncStatusLoopHandle::new(sync_status_notifier)), + Some(AsyncMutex::new(sync_watcher)), + ) + } else { + (None, None) + } + } } /// Attempts to parse native daemon conf file and return rpcport, rpcuser and rpcpassword @@ -655,7 +676,7 @@ fn spawn_electrum_ping_loop(weak_client: Weak, servers: Vec< /// Weak reference will allow to stop the thread if client is dropped. fn spawn_electrum_version_loop( weak_client: Weak, - mut on_connect_rx: mpsc::UnboundedReceiver, + mut on_connect_rx: UnboundedReceiver, client_name: String, ) { spawn(async move { diff --git a/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs b/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs index db1512b38d..f8a823b454 100644 --- a/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs +++ b/mm2src/coins/utxo/utxo_builder/utxo_conf_builder.rs @@ -11,6 +11,7 @@ pub use keys::{Address, AddressFormat as UtxoAddressFormat, AddressHashEnum, Key use mm2_err_handle::prelude::*; use script::SignatureVersion; use serde_json::{self as json, Value as Json}; +use spv_validation::helpers_validation::BlockHeaderVerificationParams; use std::num::NonZeroU64; use std::sync::atomic::AtomicBool; @@ -98,6 +99,7 @@ impl<'a> UtxoConfBuilder<'a> { let estimate_fee_blocks = self.estimate_fee_blocks(); let trezor_coin = self.trezor_coin(); let enable_spv_proof = self.enable_spv_proof(); + let block_headers_verification_params = self.block_headers_verification_params(); Ok(UtxoCoinConf { ticker: self.ticker.to_owned(), @@ -130,6 +132,7 @@ impl<'a> UtxoConfBuilder<'a> { estimate_fee_blocks, trezor_coin, enable_spv_proof, + block_headers_verification_params, }) } @@ -288,4 +291,8 @@ impl<'a> UtxoConfBuilder<'a> { } fn enable_spv_proof(&self) -> bool { self.conf["enable_spv_proof"].as_bool().unwrap_or(false) } + + fn block_headers_verification_params(&self) -> Option { + json::from_value(self.conf["block_headers_verification_params"].clone()).unwrap_or(None) + } } diff --git a/mm2src/coins/utxo/utxo_common.rs b/mm2src/coins/utxo/utxo_common.rs index dd0a071fda..f03f11771f 100644 --- a/mm2src/coins/utxo/utxo_common.rs +++ b/mm2src/coins/utxo/utxo_common.rs @@ -21,7 +21,7 @@ use chain::constants::SEQUENCE_FINAL; use chain::{OutPoint, TransactionOutput}; use common::executor::Timer; use common::jsonrpc_client::JsonRpcErrorType; -use common::log::{debug, error, info, warn}; +use common::log::{error, warn}; use common::{now_ms, one_hundred, ten_f64}; use crypto::{Bip32DerPathOps, Bip44Chain, Bip44DerPathError, Bip44DerivationPath, RpcDerivationPath}; use futures::compat::Future01CompatExt; @@ -41,8 +41,6 @@ use secp256k1::{PublicKey, Signature}; use serde_json::{self as json}; use serialization::{deserialize, serialize, serialize_with_flags, CoinVariant, CompactInteger, Serializable, Stream, SERIALIZE_TRANSACTION_WITNESS}; -use spv_validation::helpers_validation::validate_headers; -use spv_validation::storage::BlockHeaderStorageOps; use std::cmp::Ordering; use std::collections::hash_map::{Entry, HashMap}; use std::str::FromStr; @@ -3155,7 +3153,7 @@ pub fn validate_payment( client .validate_spv_proof(&tx, try_spv_proof_until) .await - .map_err(|e| format!("{:?}", e))?; + .map_err(|e| format!("{}", e))?; } } @@ -3443,135 +3441,6 @@ fn increase_by_percent(num: u64, percent: f64) -> u64 { num + (percent.round() as u64) } -pub async fn block_header_utxo_loop(weak: UtxoWeak, constructor: impl Fn(UtxoArc) -> T) { - { - let coin = match weak.upgrade() { - Some(arc) => constructor(arc), - None => return, - }; - let ticker = coin.as_ref().conf.ticker.as_str(); - let storage = match &coin.as_ref().rpc_client { - UtxoRpcClientEnum::Native(_) => return, - UtxoRpcClientEnum::Electrum(e) => match e.block_headers_storage() { - None => return, - Some(storage) => storage, - }, - }; - match storage.is_initialized_for(ticker).await { - Ok(true) => info!("Block Header Storage already initialized for {}", ticker), - Ok(false) => { - if let Err(e) = storage.init(ticker).await { - error!( - "Couldn't initiate storage - aborting the block_header_utxo_loop: {:?}", - e - ); - return; - } - info!("Block Header Storage successfully initialized for {}", ticker); - }, - Err(_e) => return, - }; - } - while let Some(arc) = weak.upgrade() { - let coin = constructor(arc); - let client = match &coin.as_ref().rpc_client { - UtxoRpcClientEnum::Native(_) => break, - UtxoRpcClientEnum::Electrum(client) => client, - }; - let storage = match client.block_headers_storage() { - None => return, - Some(storage) => storage, - }; - let params = storage.params.clone(); - let (check_every, blocks_limit_to_check, difficulty_check, constant_difficulty) = ( - params.check_every, - params.blocks_limit_to_check, - params.difficulty_check, - params.constant_difficulty, - ); - let height = - ok_or_continue_after_sleep!(coin.as_ref().rpc_client.get_block_count().compat().await, check_every); - let (block_registry, block_headers) = ok_or_continue_after_sleep!( - client - .retrieve_last_headers(blocks_limit_to_check, height) - .compat() - .await, - check_every - ); - ok_or_continue_after_sleep!( - validate_headers(block_headers, difficulty_check, constant_difficulty), - check_every - ); - - let ticker = coin.as_ref().conf.ticker.as_str(); - ok_or_continue_after_sleep!( - storage.add_block_headers_to_storage(ticker, block_registry).await, - check_every - ); - debug!("tick block_header_utxo_loop for {}", coin.as_ref().conf.ticker); - Timer::sleep(check_every).await; - } -} - -pub async fn merge_utxo_loop( - weak: UtxoWeak, - merge_at: usize, - check_every: f64, - max_merge_at_once: usize, - constructor: impl Fn(UtxoArc) -> T, -) where - T: UtxoCommonOps + GetUtxoListOps, -{ - loop { - Timer::sleep(check_every).await; - - let coin = match weak.upgrade() { - Some(arc) => constructor(arc), - None => break, - }; - - let my_address = match coin.as_ref().derivation_method { - DerivationMethod::Iguana(ref my_address) => my_address, - DerivationMethod::HDWallet(_) => { - warn!("'merge_utxo_loop' is currently not used for HD wallets"); - return; - }, - }; - - let ticker = &coin.as_ref().conf.ticker; - let (unspents, recently_spent) = match coin.get_unspent_ordered_list(my_address).await { - Ok((unspents, recently_spent)) => (unspents, recently_spent), - Err(e) => { - error!("Error {} on get_unspent_ordered_list of coin {}", e, ticker); - continue; - }, - }; - if unspents.len() >= merge_at { - let unspents: Vec<_> = unspents.into_iter().take(max_merge_at_once).collect(); - info!("Trying to merge {} UTXOs of coin {}", unspents.len(), ticker); - let value = unspents.iter().fold(0, |sum, unspent| sum + unspent.value); - let script_pubkey = Builder::build_p2pkh(&my_address.hash).to_bytes(); - let output = TransactionOutput { value, script_pubkey }; - let merge_tx_fut = generate_and_send_tx( - &coin, - unspents, - None, - FeePolicy::DeductFromOutput(0), - recently_spent, - vec![output], - ); - match merge_tx_fut.await { - Ok(tx) => info!( - "UTXO merge successful for coin {}, tx_hash {:?}", - ticker, - tx.hash().reversed() - ), - Err(e) => error!("Error {:?} on UTXO merge attempt for coin {}", e, ticker), - } - } - } -} - pub async fn can_refund_htlc(coin: &T, locktime: u64) -> Result> where T: UtxoCommonOps, diff --git a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs index e0e98658a4..ce7706b125 100644 --- a/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_indexedb_block_header_storage.rs @@ -9,46 +9,28 @@ pub struct IndexedDBBlockHeadersStorage {} #[async_trait] impl BlockHeaderStorageOps for IndexedDBBlockHeadersStorage { - async fn init(&self, _for_coin: &str) -> Result<(), BlockHeaderStorageError> { Ok(()) } + async fn init(&self) -> Result<(), BlockHeaderStorageError> { Ok(()) } - async fn is_initialized_for(&self, _for_coin: &str) -> Result { Ok(true) } + async fn is_initialized_for(&self) -> Result { Ok(true) } async fn add_block_headers_to_storage( &self, - _for_coin: &str, _headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { Ok(()) } - async fn get_block_header( - &self, - _for_coin: &str, - _height: u64, - ) -> Result, BlockHeaderStorageError> { - Ok(None) - } + async fn get_block_header(&self, _height: u64) -> Result, BlockHeaderStorageError> { Ok(None) } - async fn get_block_header_raw( - &self, - _for_coin: &str, - _height: u64, - ) -> Result, BlockHeaderStorageError> { - Ok(None) - } + async fn get_block_header_raw(&self, _height: u64) -> Result, BlockHeaderStorageError> { Ok(None) } - async fn get_last_block_header_with_non_max_bits( - &self, - _for_coin: &str, - ) -> Result, BlockHeaderStorageError> { - Ok(None) + async fn get_last_block_height(&self) -> Result { + Err(BlockHeaderStorageError::Internal("Not implemented".into())) } - async fn get_block_height_by_hash( - &self, - for_coin: &str, - hash: H256, - ) -> Result, BlockHeaderStorageError> { + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError> { Ok(None) } + + async fn get_block_height_by_hash(&self, _hash: H256) -> Result, BlockHeaderStorageError> { Ok(None) } } diff --git a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs index c73d0c40de..0a54eef435 100644 --- a/mm2src/coins/utxo/utxo_sql_block_header_storage.rs +++ b/mm2src/coins/utxo/utxo_sql_block_header_storage.rs @@ -7,10 +7,12 @@ use db_common::{sqlite::rusqlite::Error as SqlError, sqlite::validate_table_name, sqlite::CHECK_TABLE_EXISTS_SQL}; use primitives::hash::H256; +use serialization::Reader; use spv_validation::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use spv_validation::work::MAX_BITS_BTC; use std::collections::HashMap; use std::convert::TryInto; +use std::num::TryFromIntError; use std::sync::{Arc, Mutex}; fn block_headers_cache_table(ticker: &str) -> String { ticker.to_owned() + "_block_headers_cache" } @@ -56,6 +58,16 @@ fn get_block_header_by_height(for_coin: &str) -> Result Result { + let table_name = get_table_name_and_validate(for_coin)?; + let sql = format!( + "SELECT block_height FROM {} ORDER BY block_height DESC LIMIT 1;", + table_name + ); + + Ok(sql) +} + fn get_last_block_header_with_non_max_bits_sql(for_coin: &str) -> Result { let table_name = get_table_name_and_validate(for_coin)?; let sql = format!( @@ -74,7 +86,10 @@ fn get_block_height_by_hash(for_coin: &str) -> Result>); +pub struct SqliteBlockHeadersStorage { + pub ticker: String, + pub conn: Arc>, +} fn query_single_row( conn: &Connection, @@ -95,12 +110,12 @@ where #[async_trait] impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { - async fn init(&self, for_coin: &str) -> Result<(), BlockHeaderStorageError> { + async fn init(&self) -> Result<(), BlockHeaderStorageError> { + let coin = self.ticker.clone(); let selfi = self.clone(); - let sql_cache = create_block_header_cache_table_sql(for_coin)?; - let coin = for_coin.to_owned(); + let sql_cache = create_block_header_cache_table_sql(&coin)?; async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); conn.execute(&sql_cache, NO_PARAMS).map(|_| ()).map_err(|e| { BlockHeaderStorageError::InitializationError { coin, @@ -112,11 +127,12 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { .await } - async fn is_initialized_for(&self, for_coin: &str) -> Result { - let block_headers_cache_table = get_table_name_and_validate(for_coin)?; + async fn is_initialized_for(&self) -> Result { + let coin = self.ticker.clone(); + let block_headers_cache_table = get_table_name_and_validate(&coin)?; let selfi = self.clone(); async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); let cache_initialized = query_single_row( &conn, CHECK_TABLE_EXISTS_SQL, @@ -130,17 +146,16 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { async fn add_block_headers_to_storage( &self, - for_coin: &str, headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { - let for_coin = for_coin.to_owned(); + let coin = self.ticker.clone(); let selfi = self.clone(); async_blocking(move || { - let mut conn = selfi.0.lock().unwrap(); + let mut conn = selfi.conn.lock().unwrap(); let sql_transaction = conn .transaction() .map_err(|e| BlockHeaderStorageError::AddToStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; @@ -156,16 +171,16 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { &hash as &dyn ToSql, ]; sql_transaction - .execute(&insert_block_header_in_cache_sql(&for_coin)?, block_cache_params) + .execute(&insert_block_header_in_cache_sql(&coin.clone())?, block_cache_params) .map_err(|e| BlockHeaderStorageError::AddToStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; } sql_transaction .commit() .map_err(|e| BlockHeaderStorageError::AddToStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; Ok(()) @@ -173,17 +188,19 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { .await } - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - if let Some(header_raw) = self.get_block_header_raw(for_coin, height).await? { + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); + if let Some(header_raw) = self.get_block_header_raw(height).await? { + let serialized = &hex::decode(header_raw).map_err(|e| BlockHeaderStorageError::DecodeError { + coin: coin.clone(), + reason: e.to_string(), + })?; + let mut reader = Reader::new_with_coin_variant(serialized, coin.as_str().into()); let header: BlockHeader = - header_raw - .try_into() + reader + .read() .map_err(|e: serialization::Error| BlockHeaderStorageError::DecodeError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), })?; return Ok(Some(header)); @@ -191,40 +208,57 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { Ok(None) } - async fn get_block_header_raw( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_header_raw(&self, height: u64) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); let params = [height as i64]; - let sql = get_block_header_by_height(for_coin)?; + let sql = get_block_header_by_height(&coin)?; let selfi = self.clone(); async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); query_single_row(&conn, &sql, params, string_from_row) }) .await .map_err(|e| BlockHeaderStorageError::GetFromStorageError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), }) } - async fn get_last_block_header_with_non_max_bits( - &self, - for_coin: &str, - ) -> Result, BlockHeaderStorageError> { - let sql = get_last_block_header_with_non_max_bits_sql(for_coin)?; + async fn get_last_block_height(&self) -> Result { + let coin = self.ticker.clone(); + let sql = get_last_block_height_sql(&coin)?; + let selfi = self.clone(); + + async_blocking(move || { + let conn = selfi.conn.lock().unwrap(); + query_single_row(&conn, &sql, NO_PARAMS, |row| row.get(0)) + }) + .await + .map_err(|e| BlockHeaderStorageError::GetFromStorageError { + coin: coin.clone(), + reason: e.to_string(), + })? + .unwrap_or(0i64) + .try_into() + .map_err(|e: TryFromIntError| BlockHeaderStorageError::DecodeError { + coin, + reason: e.to_string(), + }) // last_block_height is 0 if the database is empty + } + + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); + let sql = get_last_block_header_with_non_max_bits_sql(&coin)?; let selfi = self.clone(); let maybe_header_raw = async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); query_single_row(&conn, &sql, NO_PARAMS, string_from_row) }) .await .map_err(|e| BlockHeaderStorageError::GetFromStorageError { - coin: for_coin.to_string(), + coin: coin.clone(), reason: e.to_string(), })?; @@ -233,7 +267,7 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { header_raw .try_into() .map_err(|e: serialization::Error| BlockHeaderStorageError::DecodeError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), })?; return Ok(Some(header)); @@ -241,22 +275,19 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { Ok(None) } - async fn get_block_height_by_hash( - &self, - for_coin: &str, - hash: H256, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_height_by_hash(&self, hash: H256) -> Result, BlockHeaderStorageError> { + let coin = self.ticker.clone(); let params = [hash.to_string()]; - let sql = get_block_height_by_hash(for_coin)?; + let sql = get_block_height_by_hash(&coin)?; let selfi = self.clone(); async_blocking(move || { - let conn = selfi.0.lock().unwrap(); + let conn = selfi.conn.lock().unwrap(); query_single_row(&conn, &sql, params, |row| row.get(0)) }) .await .map_err(|e| BlockHeaderStorageError::GetFromStorageError { - coin: for_coin.to_string(), + coin, reason: e.to_string(), }) } @@ -264,14 +295,17 @@ impl BlockHeaderStorageOps for SqliteBlockHeadersStorage { #[cfg(test)] impl SqliteBlockHeadersStorage { - pub fn in_memory() -> Self { - SqliteBlockHeadersStorage(Arc::new(Mutex::new(Connection::open_in_memory().unwrap()))) + pub fn in_memory(ticker: String) -> Self { + SqliteBlockHeadersStorage { + ticker, + conn: Arc::new(Mutex::new(Connection::open_in_memory().unwrap())), + } } fn is_table_empty(&self, table_name: &str) -> bool { validate_table_name(table_name).unwrap(); let sql = "SELECT COUNT(block_height) FROM ".to_owned() + table_name + ";"; - let conn = self.0.lock().unwrap(); + let conn = self.conn.lock().unwrap(); let rows_count: u32 = conn.query_row(&sql, NO_PARAMS, |row| row.get(0)).unwrap(); rows_count == 0 } @@ -287,75 +321,71 @@ mod sql_block_headers_storage_tests { #[test] fn test_init_collection() { let for_coin = "init_collection"; - let storage = SqliteBlockHeadersStorage::in_memory(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(!initialized); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); // repetitive init must not fail - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); } #[test] fn test_add_block_headers() { let for_coin = "insert"; - let storage = SqliteBlockHeadersStorage::in_memory(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); let table = block_headers_cache_table(for_coin); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); let mut headers = HashMap::with_capacity(1); let block_header: BlockHeader = "0000002076d41d3e4b0bfd4c0d3b30aa69fdff3ed35d85829efd04000000000000000000b386498b583390959d9bac72346986e3015e83ac0b54bc7747a11a494ac35c94bb3ce65a53fb45177f7e311c".into(); headers.insert(520481, block_header); - block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); assert!(!storage.is_table_empty(&table)); } #[test] fn test_get_block_header() { let for_coin = "get"; - let storage = SqliteBlockHeadersStorage::in_memory(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); let table = block_headers_cache_table(for_coin); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); let mut headers = HashMap::with_capacity(1); let block_header: BlockHeader = "0000002076d41d3e4b0bfd4c0d3b30aa69fdff3ed35d85829efd04000000000000000000b386498b583390959d9bac72346986e3015e83ac0b54bc7747a11a494ac35c94bb3ce65a53fb45177f7e311c".into(); headers.insert(520481, block_header); - block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); assert!(!storage.is_table_empty(&table)); - let hex = block_on(storage.get_block_header_raw(for_coin, 520481)) - .unwrap() - .unwrap(); + let hex = block_on(storage.get_block_header_raw(520481)).unwrap().unwrap(); assert_eq!(hex, "0000002076d41d3e4b0bfd4c0d3b30aa69fdff3ed35d85829efd04000000000000000000b386498b583390959d9bac72346986e3015e83ac0b54bc7747a11a494ac35c94bb3ce65a53fb45177f7e311c".to_string()); - let block_header = block_on(storage.get_block_header(for_coin, 520481)).unwrap().unwrap(); + let block_header = block_on(storage.get_block_header(520481)).unwrap().unwrap(); let block_hash: H256 = "0000000000000000002e31d0714a5ab23100945ff87ba2d856cd566a3c9344ec".into(); assert_eq!(block_header.hash(), block_hash.reversed()); - let height = block_on(storage.get_block_height_by_hash(for_coin, block_hash)) - .unwrap() - .unwrap(); + let height = block_on(storage.get_block_height_by_hash(block_hash)).unwrap().unwrap(); assert_eq!(height, 520481); } #[test] fn test_get_last_block_header_with_non_max_bits() { let for_coin = "get"; - let storage = SqliteBlockHeadersStorage::in_memory(); + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); let table = block_headers_cache_table(for_coin); - block_on(storage.init(for_coin)).unwrap(); + block_on(storage.init()).unwrap(); - let initialized = block_on(storage.is_initialized_for(for_coin)).unwrap(); + let initialized = block_on(storage.is_initialized_for()).unwrap(); assert!(initialized); let mut headers = HashMap::with_capacity(2); @@ -374,13 +404,44 @@ mod sql_block_headers_storage_tests { let block_header: BlockHeader = "020000001f38c8e30b30af912fbd4c3e781506713cfb43e73dff6250348e060000000000afa8f3eede276ccb4c4ee649ad9823fc181632f262848ca330733e7e7e541beb9be51353ffff001d00a63037".into(); headers.insert(201593, block_header); - block_on(storage.add_block_headers_to_storage(for_coin, headers)).unwrap(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); assert!(!storage.is_table_empty(&table)); - let actual_block_header = block_on(storage.get_last_block_header_with_non_max_bits(for_coin)) + let actual_block_header = block_on(storage.get_last_block_header_with_non_max_bits()) .unwrap() .unwrap(); assert_ne!(actual_block_header.bits, BlockHeaderBits::Compact(MAX_BITS_BTC.into())); assert_eq!(actual_block_header, expected_block_header); } + + #[test] + fn test_get_last_block_height() { + let for_coin = "get"; + let storage = SqliteBlockHeadersStorage::in_memory(for_coin.into()); + let table = block_headers_cache_table(for_coin); + block_on(storage.init()).unwrap(); + + let initialized = block_on(storage.is_initialized_for()).unwrap(); + assert!(initialized); + + let mut headers = HashMap::with_capacity(2); + + // https://live.blockcypher.com/btc-testnet/block/00000000961a9d117feb57e516e17217207a849bf6cdfce529f31d9a96053530/ + let block_header: BlockHeader = "02000000ea01a61a2d7420a1b23875e40eb5eb4ca18b378902c8e6384514ad0000000000c0c5a1ae80582b3fe319d8543307fa67befc2a734b8eddb84b1780dfdf11fa2b20e71353ffff001d00805fe0".into(); + headers.insert(201595, block_header); + + // https://live.blockcypher.com/btc-testnet/block/0000000000ad144538e6c80289378ba14cebb50ee47538b2a120742d1aa601ea/ + let block_header: BlockHeader = "02000000cbed7fd98f1f06e85c47e13ff956533642056be45e7e6b532d4d768f00000000f2680982f333fcc9afa7f9a5e2a84dc54b7fe10605cd187362980b3aa882e9683be21353ab80011c813e1fc0".into(); + headers.insert(201594, block_header); + + // https://live.blockcypher.com/btc-testnet/block/0000000000ad144538e6c80289378ba14cebb50ee47538b2a120742d1aa601ea/ + let block_header: BlockHeader = "020000001f38c8e30b30af912fbd4c3e781506713cfb43e73dff6250348e060000000000afa8f3eede276ccb4c4ee649ad9823fc181632f262848ca330733e7e7e541beb9be51353ffff001d00a63037".into(); + headers.insert(201593, block_header); + + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); + assert!(!storage.is_table_empty(&table)); + + let last_block_height = block_on(storage.get_last_block_height()).unwrap(); + assert_eq!(last_block_height, 201595); + } } diff --git a/mm2src/coins/utxo/utxo_standard.rs b/mm2src/coins/utxo/utxo_standard.rs index 87b61ef2b0..42c872ebd7 100644 --- a/mm2src/coins/utxo/utxo_standard.rs +++ b/mm2src/coins/utxo/utxo_standard.rs @@ -22,7 +22,6 @@ use crypto::Bip44Chain; use futures::{FutureExt, TryFutureExt}; use mm2_metrics::MetricsArc; use mm2_number::MmNumber; -use serialization::coin_variant_by_ticker; use utxo_signer::UtxoSignerOps; #[derive(Clone)] @@ -163,8 +162,7 @@ impl UtxoCommonOps for UtxoStandardCoin { } async fn get_current_mtp(&self) -> UtxoRpcResult { - let coin_variant = coin_variant_by_ticker(self.ticker()); - utxo_common::get_current_mtp(&self.utxo_arc, coin_variant).await + utxo_common::get_current_mtp(&self.utxo_arc, self.ticker().into()).await } fn is_unspent_mature(&self, output: &RpcTransaction) -> bool { diff --git a/mm2src/coins/utxo/utxo_tests.rs b/mm2src/coins/utxo/utxo_tests.rs index 8705a11fa6..2db59fb042 100644 --- a/mm2src/coins/utxo/utxo_tests.rs +++ b/mm2src/coins/utxo/utxo_tests.rs @@ -13,17 +13,20 @@ use crate::utxo::rpc_clients::{BlockHashOrHeight, ElectrumBalance, ElectrumClien use crate::utxo::spv::SimplePaymentVerification; use crate::utxo::tx_cache::dummy_tx_cache::DummyVerboseCache; use crate::utxo::tx_cache::UtxoVerboseCacheOps; +use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; use crate::utxo::utxo_builder::{UtxoArcBuilder, UtxoCoinBuilderCommonOps}; use crate::utxo::utxo_common::UtxoTxBuilder; use crate::utxo::utxo_common_tests; +use crate::utxo::utxo_sql_block_header_storage::SqliteBlockHeadersStorage; use crate::utxo::utxo_standard::{utxo_standard_coin_with_priv_key, UtxoStandardCoin}; #[cfg(not(target_arch = "wasm32"))] use crate::WithdrawFee; use crate::{CoinBalance, PrivKeyBuildPolicy, SearchForSwapTxSpendInput, StakingInfosDetails, SwapOps, TradePreimageValue, TxFeeDetails, TxMarshalingErr}; -use chain::OutPoint; +use chain::{BlockHeader, OutPoint}; use common::executor::Timer; use common::{block_on, now_ms, OrdRange, PagingOptionsEnum, DEX_FEE_ADDR_RAW_PUBKEY}; use crypto::{privkey::key_pair_from_seed, Bip44Chain, RpcDerivationPath}; +use db_common::sqlite::rusqlite::Connection; use futures::future::join_all; use futures::TryFutureExt; use mm2_core::mm_ctx::MmCtxBuilder; @@ -32,6 +35,7 @@ use mm2_test_helpers::for_tests::RICK_ELECTRUM_ADDRS; use mocktopus::mocking::*; use rpc::v1::types::H256 as H256Json; use serialization::{deserialize, CoinVariant}; +use spv_validation::storage::BlockHeaderStorageOps; use std::convert::TryFrom; use std::iter; use std::mem::discriminant; @@ -66,7 +70,7 @@ pub fn electrum_client_for_test(servers: &[&str]) -> ElectrumClient { }; let servers = servers.into_iter().map(|s| json::from_value(s).unwrap()).collect(); - block_on(builder.electrum_client(args, servers, None)).unwrap() + block_on(builder.electrum_client(args, servers)).unwrap() } /// Returned client won't work by default, requires some mocks to be usable @@ -153,6 +157,7 @@ fn utxo_coin_fields_for_test( estimate_fee_blocks: 1, trezor_coin: None, enable_spv_proof: false, + block_headers_verification_params: None, }, decimals: TEST_COIN_DECIMALS, dust_amount: UTXO_DUST_AMOUNT, @@ -165,6 +170,8 @@ fn utxo_coin_fields_for_test( recently_spent_outpoints: AsyncMutex::new(RecentlySpentOutPoints::new(my_script_pubkey)), tx_hash_algo: TxHashAlgo::DSHA256, check_utxo_maturity: false, + block_headers_status_notifier: None, + block_headers_status_watcher: None, } } @@ -465,7 +472,13 @@ fn test_wait_for_payment_spend_timeout_electrum() { MockResult::Return(Box::new(futures01::future::ok(None))) }); - let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), None); + let block_headers_storage = BlockHeaderStorage { + inner: Box::new(SqliteBlockHeadersStorage { + ticker: TEST_COIN_NAME.into(), + conn: Arc::new(Mutex::new(Connection::open_in_memory().unwrap())), + }), + }; + let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), block_headers_storage); let client = UtxoRpcClientEnum::Electrum(ElectrumClient(Arc::new(client))); let coin = utxo_coin_for_test(client, None, false); let transaction = hex::decode("01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000") @@ -957,6 +970,18 @@ fn test_spv_proof() { let tx_str = "0400008085202f8902bf17bf7d1daace52e08f732a6b8771743ca4b1cb765a187e72fd091a0aabfd52000000006a47304402203eaaa3c4da101240f80f9c5e9de716a22b1ec6d66080de6a0cca32011cd77223022040d9082b6242d6acf9a1a8e658779e1c655d708379862f235e8ba7b8ca4e69c6012102031d4256c4bc9f99ac88bf3dba21773132281f65f9bf23a59928bce08961e2f3ffffffffff023ca13c0e9e085dd13f481f193e8a3e8fd609020936e98b5587342d994f4d020000006b483045022100c0ba56adb8de923975052312467347d83238bd8d480ce66e8b709a7997373994022048507bcac921fdb2302fa5224ce86e41b7efc1a2e20ae63aa738dfa99b7be826012102031d4256c4bc9f99ac88bf3dba21773132281f65f9bf23a59928bce08961e2f3ffffffff0300e1f5050000000017a9141ee6d4c38a3c078eab87ad1a5e4b00f21259b10d870000000000000000166a1400000000000000000000000000000000000000001b94d736000000001976a91405aab5342166f8594baf17a7d9bef5d56744332788ac2d08e35e000000000000000000000000000000"; let tx: UtxoTx = tx_str.into(); + let header: BlockHeader = deserialize( + block_on(client.blockchain_block_header(452248).compat()) + .unwrap() + .as_slice(), + ) + .unwrap(); + + let mut headers = HashMap::new(); + headers.insert(452248, header); + let storage = client.block_headers_storage(); + block_on(storage.add_block_headers_to_storage(headers)).unwrap(); + let res = block_on(client.validate_spv_proof(&tx, now_ms() / 1000 + 30)); res.unwrap(); } @@ -1464,12 +1489,12 @@ fn test_network_info_negative_time_offset() { #[test] fn test_unavailable_electrum_proto_version() { - ElectrumClientImpl::new.mock_safe(|coin_ticker, event_handlers, _| { + ElectrumClientImpl::new.mock_safe(|coin_ticker, event_handlers, block_headers_storage| { MockResult::Return(ElectrumClientImpl::with_protocol_version( coin_ticker, event_handlers, OrdRange::new(1.8, 1.9).unwrap(), - None, + block_headers_storage, )) }); diff --git a/mm2src/coins/utxo/utxo_wasm_tests.rs b/mm2src/coins/utxo/utxo_wasm_tests.rs index 20c057c01b..9ba8949500 100644 --- a/mm2src/coins/utxo/utxo_wasm_tests.rs +++ b/mm2src/coins/utxo/utxo_wasm_tests.rs @@ -1,7 +1,9 @@ use super::rpc_clients::{ElectrumClient, ElectrumClientImpl, ElectrumProtocol}; use super::*; use crate::utxo::rpc_clients::UtxoRpcClientOps; +use crate::utxo::utxo_block_header_storage::BlockHeaderStorage; use crate::utxo::utxo_common_tests; +use crate::utxo::utxo_indexedb_block_header_storage::IndexedDBBlockHeadersStorage; use common::executor::Timer; use serialization::deserialize; use wasm_bindgen_test::*; @@ -11,7 +13,10 @@ wasm_bindgen_test_configure!(run_in_browser); const TEST_COIN_NAME: &'static str = "RICK"; pub async fn electrum_client_for_test(servers: &[&str]) -> ElectrumClient { - let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), None); + let block_headers_storage = BlockHeaderStorage { + inner: Box::new(IndexedDBBlockHeadersStorage {}), + }; + let client = ElectrumClientImpl::new(TEST_COIN_NAME.into(), Default::default(), block_headers_storage); for server in servers { client .add_server(&ElectrumRpcRequest { diff --git a/mm2src/coins/z_coin.rs b/mm2src/coins/z_coin.rs index 853e179c25..17f3fd617b 100644 --- a/mm2src/coins/z_coin.rs +++ b/mm2src/coins/z_coin.rs @@ -841,8 +841,6 @@ impl<'a> ZCoinBuilder<'a> { ZcoinRpcMode::Native => UtxoRpcMode::Native, ZcoinRpcMode::Light { electrum_servers, .. } => UtxoRpcMode::Electrum { servers: electrum_servers.clone(), - // TODO: Implement spv validation for zcoin - block_header_params: None, }, }; let utxo_params = UtxoActivationParams { diff --git a/mm2src/coins_activation/src/lightning_activation.rs b/mm2src/coins_activation/src/lightning_activation.rs index 5572755d09..eadbb192d3 100644 --- a/mm2src/coins_activation/src/lightning_activation.rs +++ b/mm2src/coins_activation/src/lightning_activation.rs @@ -38,11 +38,11 @@ impl TryFromCoinProtocol for LightningProtocolConf { CoinProtocol::LIGHTNING { platform, network, - confirmations, + confirmation_targets, } => Ok(LightningProtocolConf { platform_coin_ticker: platform, network, - confirmations, + confirmation_targets, }), proto => MmError::err(proto), } diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs index 26cbd372c2..e70e9bd1f1 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_activation.rs @@ -10,9 +10,10 @@ use crate::utxo_activation::utxo_standard_activation_result::UtxoStandardActivat use async_trait::async_trait; use coins::utxo::utxo_builder::{UtxoArcBuilder, UtxoCoinBuilder}; use coins::utxo::utxo_standard::UtxoStandardCoin; -use coins::utxo::UtxoActivationParams; +use coins::utxo::{UtxoActivationParams, UtxoSyncStatus}; use coins::CoinProtocol; use crypto::CryptoCtx; +use futures::StreamExt; use mm2_core::mm_ctx::MmArc; use mm2_err_handle::prelude::*; use serde_json::Value as Json; @@ -54,7 +55,7 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { coin_conf: Json, activation_request: &Self::ActivationRequest, _protocol_info: Self::StandaloneProtocol, - _task_handle: &UtxoStandardRpcTaskHandle, + task_handle: &UtxoStandardRpcTaskHandle, ) -> MmResult { let crypto_ctx = CryptoCtx::from_ctx(&ctx)?; let priv_key_policy = priv_key_build_policy(&crypto_ctx, activation_request.priv_key_policy); @@ -70,6 +71,39 @@ impl InitStandaloneCoinActivationOps for UtxoStandardCoin { .build() .await .mm_err(|e| InitUtxoStandardError::from_build_err(e, ticker.clone()))?; + + if let Some(sync_watcher_mutex) = &coin.as_ref().block_headers_status_watcher { + let mut sync_watcher = sync_watcher_mutex.lock().await; + loop { + let in_progress_status = + match sync_watcher + .next() + .await + .ok_or(InitUtxoStandardError::CoinCreationError { + ticker: ticker.clone(), + error: "Error waiting for block headers synchronization status!".into(), + })? { + UtxoSyncStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, + } => UtxoStandardInProgressStatus::SyncingBlockHeaders { + current_scanned_block, + last_block, + }, + UtxoSyncStatus::TemporaryError(e) => UtxoStandardInProgressStatus::TemporaryError(e), + UtxoSyncStatus::PermanentError(e) => { + return Err(InitUtxoStandardError::CoinCreationError { + ticker: ticker.clone(), + error: e, + } + .into()) + }, + UtxoSyncStatus::Finished { .. } => break, + }; + task_handle.update_in_progress_status(in_progress_status)?; + } + } + Ok(coin) } diff --git a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs index 92ab03da56..daf1692b42 100644 --- a/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs +++ b/mm2src/coins_activation/src/utxo_activation/init_utxo_standard_statuses.rs @@ -8,6 +8,11 @@ pub type UtxoStandardUserAction = HwRpcTaskUserAction; #[derive(Clone, Serialize)] pub enum UtxoStandardInProgressStatus { ActivatingCoin, + SyncingBlockHeaders { + current_scanned_block: u64, + last_block: u64, + }, + TemporaryError(String), RequestingWalletBalance, Finishing, /// This status doesn't require the user to send `UserAction`, diff --git a/mm2src/common/Cargo.toml b/mm2src/common/Cargo.toml index 0ed80ea559..6697353f02 100644 --- a/mm2src/common/Cargo.toml +++ b/mm2src/common/Cargo.toml @@ -28,7 +28,6 @@ http = "0.2" http-body = "0.1" itertools = "0.10" lazy_static = "1.4" -lightning = { git = "https://github.com/shamardy/rust-lightning", branch = "0.0.106" } log = "0.4.8" parking_lot = { version = "0.12.0", features = ["nightly"] } parking_lot_core = { version = "0.6", features = ["nightly"] } @@ -64,6 +63,7 @@ hyper = { version = "0.14.11", features = ["client", "http2", "server", "tcp"] } # got "invalid certificate: UnknownIssuer" for https://ropsten.infura.io on iOS using default-features hyper-rustls = { version = "0.23", default-features = false, features = ["http1", "http2", "webpki-tokio"] } libc = { version = "0.2" } +lightning = "0.0.110" log4rs = { version = "1.0", default-features = false, features = ["console_appender", "pattern_encoder"] } tokio = { version = "1.7", features = ["io-util", "rt-multi-thread", "net"] } diff --git a/mm2src/mm2_bitcoin/chain/Cargo.toml b/mm2src/mm2_bitcoin/chain/Cargo.toml index 4263b584cb..d029c0f8fb 100644 --- a/mm2src/mm2_bitcoin/chain/Cargo.toml +++ b/mm2src/mm2_bitcoin/chain/Cargo.toml @@ -5,8 +5,10 @@ authors = ["debris "] [dependencies] rustc-hex = "2" -bitcoin = "0.27.1" bitcrypto = { path = "../crypto" } primitives = { path = "../primitives" } serialization = { path = "../serialization" } serialization_derive = { path = "../serialization_derive" } + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +bitcoin = "0.28.1" diff --git a/mm2src/mm2_bitcoin/chain/src/block_header.rs b/mm2src/mm2_bitcoin/chain/src/block_header.rs index 5ca9238f8d..86f9f35c58 100644 --- a/mm2src/mm2_bitcoin/chain/src/block_header.rs +++ b/mm2src/mm2_bitcoin/chain/src/block_header.rs @@ -1,6 +1,8 @@ use compact::Compact; use crypto::dhash256; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::blockdata::block::BlockHeader as ExtBlockHeader; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::hash_types::{BlockHash as ExtBlockHash, TxMerkleNode as ExtTxMerkleNode}; use hash::H256; use hex::FromHex; @@ -164,7 +166,8 @@ impl Serializable for BlockHeader { }; s.append(&self.time); s.append(&self.bits); - if !self.is_prog_pow() && self.version != KAWPOW_VERSION { + // If a BTC header uses KAWPOW_VERSION, the nonce can't be zero + if !self.is_prog_pow() && (self.version != KAWPOW_VERSION || self.nonce != BlockHeaderNonce::U32(0)) { s.append(&self.nonce); } if let Some(sol) = &self.solution { @@ -232,21 +235,31 @@ impl Deserializable for BlockHeader { None }; - let hash_final_sapling_root = if version == 4 { Some(reader.read()?) } else { None }; + let hash_final_sapling_root = if version == 4 && !reader.coin_variant().is_btc() { + Some(reader.read()?) + } else { + None + }; let time = reader.read()?; - let bits = if version == 4 { + let bits = if version == 4 && !reader.coin_variant().is_btc() { BlockHeaderBits::U32(reader.read()?) } else { BlockHeaderBits::Compact(reader.read()?) }; - let nonce = if version == 4 { + let nonce = if version == 4 && !reader.coin_variant().is_btc() { BlockHeaderNonce::H256(reader.read()?) - } else if version == KAWPOW_VERSION || version == MTP_POW_VERSION && time >= PROG_POW_SWITCH_TIME { + } else if (version == KAWPOW_VERSION && !reader.coin_variant().is_btc()) + || version == MTP_POW_VERSION && time >= PROG_POW_SWITCH_TIME + { BlockHeaderNonce::U32(0) } else { BlockHeaderNonce::U32(reader.read()?) }; - let solution = if version == 4 { Some(reader.read_list()?) } else { None }; + let solution = if version == 4 && !reader.coin_variant().is_btc() { + Some(reader.read_list()?) + } else { + None + }; // https://en.bitcoin.it/wiki/Merged_mining_specification#Merged_mining_coinbase let aux_pow = if matches!( @@ -294,7 +307,7 @@ impl Deserializable for BlockHeader { }; // https://github.com/RavenProject/Ravencoin/blob/61c790447a5afe150d9892705ac421d595a2df60/src/primitives/block.h#L67 - let (n_height, n_nonce_u64, mix_hash) = if version == KAWPOW_VERSION { + let (n_height, n_nonce_u64, mix_hash) = if version == KAWPOW_VERSION && !reader.coin_variant().is_btc() { (Some(reader.read()?), Some(reader.read()?), Some(reader.read()?)) } else { (None, None, None) @@ -352,6 +365,7 @@ impl TryFrom for BlockHeader { } } +#[cfg(not(target_arch = "wasm32"))] impl From for ExtBlockHeader { fn from(header: BlockHeader) -> Self { let prev_blockhash = ExtBlockHash::from_hash(header.previous_header_hash.to_sha256d()); @@ -2464,6 +2478,30 @@ mod tests { assert_eq!(serialized.take(), headers_bytes); } + #[test] + fn test_btc_v4_block_headers_serde_11() { + // https://live.blockcypher.com/btc/block/0000000000000000097336f8439779072501753e2f48b8798c66188139f2d9cf/ + let header = "04000000462a79dfa51b541648ee55df74cdc14b9ea7feb932e912060000000000000000374c1707a72691be50070bc5029d586e9200d672c6c3dfd29d267bf6b2b01b9e0ace395654a91118923bd9d5"; + let header_bytes = &header.from_hex::>().unwrap() as &[u8]; + let mut reader = Reader::new_with_coin_variant(header_bytes, CoinVariant::BTC); + let header = reader.read::().unwrap(); + assert_eq!(header.version, 4); + let serialized = serialize(&header); + assert_eq!(serialized.take(), header_bytes); + } + + #[test] + fn test_btc_kow_pow_version_block_headers_serde_11() { + // https://live.blockcypher.com/btc/block/000000000000000006e35d6675fb0fec767a5f3b346261a5160f6e2a8d258070/ + let header = "00000030af7e7389ca428b05d8902fcdc148e70974524d39cb56bc0100000000000000007ce0cd0c9c648d1b585d29b9ab23ebc987619d43925b3c768d7cb4bc097cfb821441c05614a107187aef1ee1"; + let header_bytes = &header.from_hex::>().unwrap() as &[u8]; + let mut reader = Reader::new_with_coin_variant(header_bytes, CoinVariant::BTC); + let header = reader.read::().unwrap(); + assert_eq!(header.version, KAWPOW_VERSION); + let serialized = serialize(&header); + assert_eq!(serialized.take(), header_bytes); + } + #[test] fn test_from_blockheader_to_ext_blockheader() { // https://live.blockcypher.com/btc/block/00000000000000000020cf2bdc6563fb25c424af588d5fb7223461e72715e4a9/ diff --git a/mm2src/mm2_bitcoin/chain/src/lib.rs b/mm2src/mm2_bitcoin/chain/src/lib.rs index b3f474fd49..58133355bf 100644 --- a/mm2src/mm2_bitcoin/chain/src/lib.rs +++ b/mm2src/mm2_bitcoin/chain/src/lib.rs @@ -1,3 +1,4 @@ +#[cfg(not(target_arch = "wasm32"))] extern crate bitcoin as ext_bitcoin; extern crate bitcrypto as crypto; extern crate primitives; diff --git a/mm2src/mm2_bitcoin/chain/src/transaction.rs b/mm2src/mm2_bitcoin/chain/src/transaction.rs index e2585275e2..8d250a583a 100644 --- a/mm2src/mm2_bitcoin/chain/src/transaction.rs +++ b/mm2src/mm2_bitcoin/chain/src/transaction.rs @@ -4,8 +4,11 @@ use bytes::Bytes; use constants::{LOCKTIME_THRESHOLD, SEQUENCE_FINAL}; use crypto::{dhash256, sha256}; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::blockdata::transaction::{OutPoint as ExtOutpoint, Transaction as ExtTransaction, TxIn, TxOut}; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::hash_types::Txid; +#[cfg(not(target_arch = "wasm32"))] use ext_bitcoin::Witness; use hash::{CipherText, EncCipherText, OutCipherText, ZkProof, ZkProofSapling, H256, H512, H64}; use hex::FromHex; use ser::{deserialize, serialize, serialize_with_flags, SERIALIZE_TRANSACTION_WITNESS}; @@ -37,6 +40,7 @@ impl OutPoint { pub fn is_null(&self) -> bool { self.hash.is_zero() && self.index == u32::MAX } } +#[cfg(not(target_arch = "wasm32"))] impl From for ExtOutpoint { fn from(outpoint: OutPoint) -> Self { ExtOutpoint { @@ -69,13 +73,14 @@ impl TransactionInput { pub fn has_witness(&self) -> bool { !self.script_witness.is_empty() } } +#[cfg(not(target_arch = "wasm32"))] impl From for TxIn { fn from(txin: TransactionInput) -> Self { TxIn { previous_output: txin.previous_output.into(), script_sig: txin.script_sig.take().into(), sequence: txin.sequence, - witness: txin.script_witness.into_iter().map(|s| s.take()).collect(), + witness: Witness::from_vec(txin.script_witness.into_iter().map(|s| s.take()).collect()), } } } @@ -95,6 +100,7 @@ impl Default for TransactionOutput { } } +#[cfg(not(target_arch = "wasm32"))] impl From for TxOut { fn from(txout: TransactionOutput) -> Self { TxOut { @@ -226,6 +232,7 @@ impl From<&'static str> for Transaction { fn from(s: &'static str) -> Self { deserialize(&s.from_hex::>().unwrap() as &[u8]).unwrap() } } +#[cfg(not(target_arch = "wasm32"))] impl From for ExtTransaction { fn from(tx: Transaction) -> Self { ExtTransaction { diff --git a/mm2src/mm2_bitcoin/keys/Cargo.toml b/mm2src/mm2_bitcoin/keys/Cargo.toml index ad024e60cb..3a5e46f48c 100644 --- a/mm2src/mm2_bitcoin/keys/Cargo.toml +++ b/mm2src/mm2_bitcoin/keys/Cargo.toml @@ -12,6 +12,6 @@ derive_more = "0.99" lazy_static = "1.4" rand = "0.6" primitives = { path = "../primitives" } -secp256k1 = { version = "0.20", features = ["rand"] } +secp256k1 = { version = "0.20", features = ["rand", "recovery"] } serde = { version = "1.0", features = ["derive"] } serde_derive = "1.0" diff --git a/mm2src/mm2_bitcoin/serialization/src/lib.rs b/mm2src/mm2_bitcoin/serialization/src/lib.rs index 22e806516d..389c28f072 100644 --- a/mm2src/mm2_bitcoin/serialization/src/lib.rs +++ b/mm2src/mm2_bitcoin/serialization/src/lib.rs @@ -13,7 +13,6 @@ pub use primitives::{bytes, compact, hash}; pub use compact_integer::{parse_compact_int, CompactInteger}; pub use list::List; -pub use reader::{coin_variant_by_ticker, deserialize, deserialize_iterator, CoinVariant, Deserializable, Error, - ReadIterator, Reader}; +pub use reader::{deserialize, deserialize_iterator, CoinVariant, Deserializable, Error, ReadIterator, Reader}; pub use stream::{serialize, serialize_list, serialize_with_flags, serialized_list_size, serialized_list_size_with_flags, Serializable, Stream, SERIALIZE_TRANSACTION_WITNESS}; diff --git a/mm2src/mm2_bitcoin/serialization/src/reader.rs b/mm2src/mm2_bitcoin/serialization/src/reader.rs index 6b9045431d..d4002782c0 100644 --- a/mm2src/mm2_bitcoin/serialization/src/reader.rs +++ b/mm2src/mm2_bitcoin/serialization/src/reader.rs @@ -51,21 +51,32 @@ pub trait Deserializable { #[derive(Debug)] pub enum CoinVariant { + // Todo: https://github.com/KomodoPlatform/atomicDEX-API/issues/1345 + BTC, + Qtum, LBC, Standard, - Qtum, } impl CoinVariant { + pub fn is_btc(&self) -> bool { matches!(self, CoinVariant::BTC) } + pub fn is_qtum(&self) -> bool { matches!(self, CoinVariant::Qtum) } pub fn is_lbc(&self) -> bool { matches!(self, CoinVariant::LBC) } } -pub fn coin_variant_by_ticker(ticker: &str) -> CoinVariant { - match ticker { - "LBC" => CoinVariant::LBC, - _ => CoinVariant::Standard, +impl From<&str> for CoinVariant { + fn from(ticker: &str) -> Self { + match ticker { + // "BTC", "BTC-segwit", "tBTC", "tBTC-segwit", etc.. + t if t == "BTC" || t.contains("BTC-") || t.contains("BTC_") => CoinVariant::BTC, + // "QTUM", "QTUM-segwit", "tQTUM", "tQTUM-segwit", etc.. + t if t == "QTUM" || t.contains("QTUM-") || t.contains("QTUM_") => CoinVariant::Qtum, + // "LBC", "LBC-segwit", etc.. + t if t == "LBC" || t.contains("LBC-") || t.contains("LBC_") => CoinVariant::LBC, + _ => CoinVariant::Standard, + } } } diff --git a/mm2src/mm2_bitcoin/spv_validation/Cargo.toml b/mm2src/mm2_bitcoin/spv_validation/Cargo.toml index 6d3a5e96d6..97cb21d765 100644 --- a/mm2src/mm2_bitcoin/spv_validation/Cargo.toml +++ b/mm2src/mm2_bitcoin/spv_validation/Cargo.toml @@ -12,6 +12,7 @@ keys = {path = "../keys"} primitives = { path = "../primitives" } ripemd160 = "0.9.0" rustc-hex = "2" +serde = "1.0" serialization = { path = "../serialization" } sha2 = "0.9" test_helpers = { path = "../test_helpers" } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json b/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json index 764b633307..28bf5bec75 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json +++ b/mm2src/mm2_bitcoin/spv_validation/src/for_tests/workTestVectors.json @@ -7,6 +7,10 @@ { "height": 604800, "hex": "000000208e244d2c55bc403caa5d6eaf0f922170e413eb1e02fb02000000000000000000e03b4d9df72d8db232a20bb2ff35c433a99f1467f391f75b5f62180d96f06d6aa4c4d65d3eb215179ef91633" + }, + { + "height": 724608, + "hex": "00c0d933bd3c3dcf14027754c7c8f7190a79b699188c8b24f49204000000000000000000ce2b1e4dc6cb44fae781df459428382d4b5be52766f67a42725cc0d78e00784d352e166278100a173d95ee89" } ], "tBTC": [ @@ -26,5 +30,11 @@ "height": 201596, "hex": "02000000303505969a1df329e5fccdf69b847a201772e116e557eb7f119d1a9600000000469267f52f43b8799e72f0726ba2e56432059a8ad02b84d4fff84b9476e95f7716e41353ab80011c168cb471" } + ], + "MORTY": [ + { + "height": 1330480, + "hex": "04000000bb496ba8d09f8f98b15cdaf5798163bdd70676eb1c8b538f53ab4f83da4a27000db352177c6b5ad2499a906cec33b843fb17fc1ec298cd06c7e7ceb7b62e144232d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca473de614625be6071f09006c286bc5ec73dd27a09bf687700c06fb04d0b9a063c0aa0746c9db170000fd40050053b27dad1f5a858b78f3154039759e985ed57db10ecb772810d7f158c55083a14b9f2ba26ae9fcb82012186e2528f67c45b7b216a69fe26232ad2d179a141b1b10e4d5f108c7b920b49348f6eef2d70b7f02cb01d8d9992f8f2d7b6608806b10ff329846b188de200aa37c73ac03f6c9b79cf5613c71b7969b4abafdbc1165ad955a049269584c83b36f36a3e9becf2fe81f3b1917475eb13ecfed3813ecc32206078d8c1e2797013dfc6f6a55e06f1c06a07959ef94d53ca0fc81d03cb6f614761156ed4ff1a8e5c9f0b96f3c8c3eeb9a0720cf4ed10397330f49b83439c5083eea1d1785a10d86ca2866d0da4ca746c49118b780c55aa6cd5b4c0491cefa258ecf129307d15e001415b203e89c008f4444b236aa556dbf4f6d05e0c57642cfa142df2f8546f1d37a6b2feaf98496892b41caefbe7dc7bcbb2755752df3dbf00ac1fc558896f14541aea4cc78ec5d00bbe5398fac4a658b1ae3399777f15117c0f3de3c63bc5b3edf6543d172cfc66907f9cf8706e97b14281daeb427801dfb0910743873265ae6bae71dbf22353c321f726e68f747965858f488dd507b7e6adee42509e5720373dce5b111b420c906b0f2cb391cfb9d581e2509da3829d6718469f383e07043694db87db0ce1196449a6c9cd941a8bde507e553c0ca534238dcc93633631926102c87cd0f83720ccff60de8b05b103e086a2c2cb7943f21033a5658235fc52708907e1ea722e726808db0270bf898c51e9dd0745614857783dc11a6dcd7760d4a07ddbd83a2e02b23fa789b79eed22dc411b9b48f71c54f12387065e3ff0638701e0f6a0dd56d0ce395d150b237b60c166352e69b92173b884446d7660f5857458b97c6d4ee54f8a1f60113aff30e54c1f7c572b85dcb7a2419d2f736a9b0a6d99ea549bd74e546251c0b8be7975e9a6d96aa3467b1dc6b024745fdef43b37cf21a657a3247d9adf8c252ef210d9a4e9c7191f698ccc9b10103b8bb811cdcf1a62903786476db8195ffb3cd004c57ad07a7a3c41eee391f66a7697e69409d7a78558720f6a1b9804d72de820b7b6165b8e14a2b1316576022423f22bb82fab16127be7173ddcd43fa7ea5c4474f79321a8c4b792caf12320c3047d026b7d63216a022e83655c2d811d2bd2a559970e9155b979953f9801ce918f690f43f5e3f07f7ce27a6837bf33b2490d9add8549f1e603a750c114bb92740cc3987cb9f948a6229f175a7b577b0b60d885a0a7ef05debe921376a7acdb25eaa8bb72e120e529cd775175012efb454cf41d240a946bf140af20d9a5dbed2e196d91a7ff33c2769f140fa0bb968111e1602221deae8d162e7a471354c2051acb43ec31015aaefa0b08bf1bddbb282e86a1caf45f3b63e4c6427ba9e99aed28ef79711794511511c52daf13b735e02b9833d3467bfd16886606d5555b7cc95ff2fea3b03c82cfe60e8602d9f70a3870f5b755573b955bb300bd3733b5ddf9a61fd3cd281af39520d6dfd8b7e2b165ec91749614a3b5241e2ea12470f91b58cf6163e02dfe79392db70cd17db9497cf59c89ac8377dbd02042f6ed270c8c2bc717623b203b74676890f5f4cd905b25772a25292d76b6f42a094c27eed13793d189e395ed3f28c5731976a7b45184acee45b3cf05a9c62045644dfe39f79cd331e282edae99cea652eb82819415ac2a5c21539cdd636fb835063ace3b6befffaf50bf6866e9b1a2b35037a330faeb18ca1696693dafd26b5f5da8dcd3e50ff09249bdda695f576d25024560b643d873d07293a80fe71998ef6ccd88c0cf9f69326b463c26fe4906faaf454ae68accd7ef3edffefdd2ede23a822a2267332f0791f1c4e6d5ab4661f279f5039b36a4476e56fd5b0461e585ff30a7c661b93f1" + } ] } \ No newline at end of file diff --git a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs index 6c3c263960..0c6976ca50 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/helpers_validation.rs @@ -1,12 +1,15 @@ -use chain::{BlockHeader, RawBlockHeader, RawHeaderError}; +use crate::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; +use crate::work::{next_block_bits, DifficultyAlgorithm, NextBlockBitsError}; +use chain::{BlockHeader, RawHeaderError}; use derive_more::Display; use primitives::hash::H256; -use primitives::U256; use ripemd160::Digest; +use serde::Deserialize; use serialization::parse_compact_int; use sha2::Sha256; +use std::convert::TryFrom; -#[derive(Debug, Display, PartialEq, Eq, Clone)] +#[derive(Clone, Debug, Display, Eq, PartialEq)] pub enum SPVError { #[display(fmt = "Overran a checked read on a slice")] ReadOverrun, @@ -14,8 +17,6 @@ pub enum SPVError { BadCompactInt, #[display(fmt = "`extract_hash` could not identify the output type")] MalformattedOutput, - #[display(fmt = "Unable to get target from block header")] - UnableToGetTarget, #[display(fmt = "Unable to get block header from network or storage: {}", _0)] UnableToGetHeader(String), #[display(fmt = "Header not exactly 80 bytes")] @@ -24,8 +25,10 @@ pub enum SPVError { UnexpectedDifficultyChange, #[display(fmt = "Header does not meet its own difficulty target")] InsufficientWork, - #[display(fmt = "Header in chain does not correctly reference parent header")] - InvalidChain, + #[display(fmt = "Couldn't calculate the required difficulty for the block: {}", _0)] + DifficultyCalculationError(NextBlockBitsError), + #[display(fmt = "Header {} in chain does not correctly reference parent header", _0)] + InvalidChain(u64), #[display(fmt = "When validating a `BitcoinHeader`, the `hash` field is not the digest of the raw header")] WrongDigest, #[display( @@ -48,6 +51,10 @@ pub enum SPVError { InvalidHeight(String), #[display(fmt = "Raises during validation loop")] Timeout, + #[display(fmt = "Block headers storage error: {}", _0)] + HeaderStorageError(BlockHeaderStorageError), + #[display(fmt = "Internal error: {}", _0)] + Internal(String), } impl From for SPVError { @@ -58,6 +65,14 @@ impl From for SPVError { } } +impl From for SPVError { + fn from(e: NextBlockBitsError) -> Self { SPVError::DifficultyCalculationError(e) } +} + +impl From for SPVError { + fn from(e: BlockHeaderStorageError) -> Self { SPVError::HeaderStorageError(e) } +} + /// A slice of `H256`s for use in a merkle array #[derive(Debug, Clone, PartialEq, Eq)] struct MerkleArray<'a>(&'a [u8]); @@ -291,14 +306,13 @@ pub(crate) fn merkle_prove( fn validate_header_prev_hash(actual: &H256, to_compare_with: &H256) -> bool { actual == to_compare_with } -fn validate_header_work(digest: H256, target: &U256) -> bool { - let empty = H256::default(); - - if digest == empty { - return false; - } - - U256::from_little_endian(digest.as_slice()) < *target +/// SPV headers verification parameters +#[derive(Clone, Debug, Deserialize)] +pub struct BlockHeaderVerificationParams { + pub difficulty_check: bool, + pub constant_difficulty: bool, + pub difficulty_algorithm: Option, + pub genesis_block_header: String, } /// Checks validity of header chain. @@ -318,35 +332,64 @@ fn validate_header_work(digest: H256, target: &U256) -> bool { /// /// # Notes /// Wrapper inspired by `bitcoin_spv::validatespv::validate_header_chain` -pub fn validate_headers( +pub async fn validate_headers( + coin: &str, + previous_height: u64, headers: Vec, - difficulty_check: bool, - constant_difficulty: bool, + storage: &dyn BlockHeaderStorageOps, + params: &BlockHeaderVerificationParams, ) -> Result<(), SPVError> { - let mut previous_hash = H256::default(); - let mut target = U256::default(); - for (i, header) in headers.into_iter().enumerate() { - let raw_header = RawBlockHeader::from(header.clone()); - if i == 0 { - target = match header.target() { - Ok(target) => target, - Err(_) => return Err(SPVError::UnableToGetTarget), - }; + let mut previous_height = previous_height; + let mut previous_header = if previous_height == 0 { + BlockHeader::try_from(params.genesis_block_header.clone()).map_err(|e| SPVError::Internal(e.to_string()))? + } else { + storage + .get_block_header(previous_height) + .await? + .ok_or(BlockHeaderStorageError::GetFromStorageError { + coin: coin.to_string(), + reason: format!("Header with height {} is not found in storage", previous_height), + })? + }; + let mut previous_hash = previous_header.hash(); + let mut prev_bits = previous_header.bits.clone(); + for header in headers.into_iter() { + if previous_height == 0 { + // previous_header is genesis header in this case, checking that the first header hash is the same as the genesis header hash is enough + if header.hash() != previous_hash { + return Err(SPVError::InvalidChain(previous_height + 1)); + } + previous_height += 1; + continue; } - let cur_target = match header.target() { - Ok(target) => target, - Err(_) => return Err(SPVError::UnableToGetTarget), - }; - if (!constant_difficulty && difficulty_check) && cur_target != target { + let cur_bits = header.bits.clone(); + if params.constant_difficulty && params.difficulty_check && cur_bits != prev_bits { return Err(SPVError::UnexpectedDifficultyChange); } - if i != 0 && !validate_header_prev_hash(&raw_header.parent(), &previous_hash) { - return Err(SPVError::InvalidChain); + if !validate_header_prev_hash(&header.previous_header_hash, &previous_hash) { + return Err(SPVError::InvalidChain(previous_height + 1)); } - if difficulty_check && !validate_header_work(raw_header.digest(), &target) { - return Err(SPVError::InsufficientWork); + if let Some(algorithm) = ¶ms.difficulty_algorithm { + if !params.constant_difficulty + && params.difficulty_check + && cur_bits + != next_block_bits( + coin, + header.time, + previous_header, + previous_height as u32, + storage, + algorithm, + ) + .await? + { + return Err(SPVError::InsufficientWork); + } } - previous_hash = raw_header.digest(); + prev_bits = cur_bits; + previous_header = header; + previous_hash = previous_header.hash(); + previous_height += 1; } Ok(()) } @@ -357,6 +400,8 @@ mod tests { use super::*; use crate::test_utils::{self}; + use crate::work::tests::TestBlockHeadersStorage; + use common::block_on; use std::{println, vec}; use test_helpers::hex::force_deserialize_hex; @@ -540,13 +585,26 @@ mod tests { #[test] fn test_block_headers_no_difficulty_check() { - // morty: 1330480, 1330481, 1330482 + // morty: 1330481, 1330482 let headers: Vec = vec![ - "04000000bb496ba8d09f8f98b15cdaf5798163bdd70676eb1c8b538f53ab4f83da4a27000db352177c6b5ad2499a906cec33b843fb17fc1ec298cd06c7e7ceb7b62e144232d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca473de614625be6071f09006c286bc5ec73dd27a09bf687700c06fb04d0b9a063c0aa0746c9db170000fd40050053b27dad1f5a858b78f3154039759e985ed57db10ecb772810d7f158c55083a14b9f2ba26ae9fcb82012186e2528f67c45b7b216a69fe26232ad2d179a141b1b10e4d5f108c7b920b49348f6eef2d70b7f02cb01d8d9992f8f2d7b6608806b10ff329846b188de200aa37c73ac03f6c9b79cf5613c71b7969b4abafdbc1165ad955a049269584c83b36f36a3e9becf2fe81f3b1917475eb13ecfed3813ecc32206078d8c1e2797013dfc6f6a55e06f1c06a07959ef94d53ca0fc81d03cb6f614761156ed4ff1a8e5c9f0b96f3c8c3eeb9a0720cf4ed10397330f49b83439c5083eea1d1785a10d86ca2866d0da4ca746c49118b780c55aa6cd5b4c0491cefa258ecf129307d15e001415b203e89c008f4444b236aa556dbf4f6d05e0c57642cfa142df2f8546f1d37a6b2feaf98496892b41caefbe7dc7bcbb2755752df3dbf00ac1fc558896f14541aea4cc78ec5d00bbe5398fac4a658b1ae3399777f15117c0f3de3c63bc5b3edf6543d172cfc66907f9cf8706e97b14281daeb427801dfb0910743873265ae6bae71dbf22353c321f726e68f747965858f488dd507b7e6adee42509e5720373dce5b111b420c906b0f2cb391cfb9d581e2509da3829d6718469f383e07043694db87db0ce1196449a6c9cd941a8bde507e553c0ca534238dcc93633631926102c87cd0f83720ccff60de8b05b103e086a2c2cb7943f21033a5658235fc52708907e1ea722e726808db0270bf898c51e9dd0745614857783dc11a6dcd7760d4a07ddbd83a2e02b23fa789b79eed22dc411b9b48f71c54f12387065e3ff0638701e0f6a0dd56d0ce395d150b237b60c166352e69b92173b884446d7660f5857458b97c6d4ee54f8a1f60113aff30e54c1f7c572b85dcb7a2419d2f736a9b0a6d99ea549bd74e546251c0b8be7975e9a6d96aa3467b1dc6b024745fdef43b37cf21a657a3247d9adf8c252ef210d9a4e9c7191f698ccc9b10103b8bb811cdcf1a62903786476db8195ffb3cd004c57ad07a7a3c41eee391f66a7697e69409d7a78558720f6a1b9804d72de820b7b6165b8e14a2b1316576022423f22bb82fab16127be7173ddcd43fa7ea5c4474f79321a8c4b792caf12320c3047d026b7d63216a022e83655c2d811d2bd2a559970e9155b979953f9801ce918f690f43f5e3f07f7ce27a6837bf33b2490d9add8549f1e603a750c114bb92740cc3987cb9f948a6229f175a7b577b0b60d885a0a7ef05debe921376a7acdb25eaa8bb72e120e529cd775175012efb454cf41d240a946bf140af20d9a5dbed2e196d91a7ff33c2769f140fa0bb968111e1602221deae8d162e7a471354c2051acb43ec31015aaefa0b08bf1bddbb282e86a1caf45f3b63e4c6427ba9e99aed28ef79711794511511c52daf13b735e02b9833d3467bfd16886606d5555b7cc95ff2fea3b03c82cfe60e8602d9f70a3870f5b755573b955bb300bd3733b5ddf9a61fd3cd281af39520d6dfd8b7e2b165ec91749614a3b5241e2ea12470f91b58cf6163e02dfe79392db70cd17db9497cf59c89ac8377dbd02042f6ed270c8c2bc717623b203b74676890f5f4cd905b25772a25292d76b6f42a094c27eed13793d189e395ed3f28c5731976a7b45184acee45b3cf05a9c62045644dfe39f79cd331e282edae99cea652eb82819415ac2a5c21539cdd636fb835063ace3b6befffaf50bf6866e9b1a2b35037a330faeb18ca1696693dafd26b5f5da8dcd3e50ff09249bdda695f576d25024560b643d873d07293a80fe71998ef6ccd88c0cf9f69326b463c26fe4906faaf454ae68accd7ef3edffefdd2ede23a822a2267332f0791f1c4e6d5ab4661f279f5039b36a4476e56fd5b0461e585ff30a7c661b93f1".into(), "04000000001f22e1bc88c53b1554f8fdcf261fdb09f4cae6ef5e5032b788515f4a60d30d67d1b35fda68abc05f5af39e5ade224a5312b8dcd1f3629a7ff33355bb7ca93e32d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca478be6146220bb071f49000b055b22a7a4bbafd6b52efb90f963d5f80126c27e437005fb47720e0000fd4005004d9875d71c540f558813142e263f597243bdd8d8105ff3d1ffd62ae51ccf22729debe510f97ab0631701dbd34b73e570597dc8825be6bd669e693037fb701040c273b44745f4e850c2d8aeca7ccab6ef7f462206a16d75358f2e8fddf9d0dbc6333ff55b1813a37f0ba240bd2d897fbd6cfdb1989ac8f3ec93b15ae4360edf84088ac9a4ea7d3d71290532bb51675e7310be1210aa33c184d693f6f7c15c5be1e89356ae3d663d0c548fceac0974fe4cb6c6559f50643280df9508460fd04f9cde55521b4c6d61c644c6c7b7473f9e39b412e3776f5e47b6c466aaf1dc76ff2114e716eb6b9614d0c93cdc229ec13b07057a7f7446c1aac51ef0950d4361fa2d20f22f29ff490bf6d6a2a267c45d88d3152d9f5291695f2f4fba65ca9763cb4176506c73b8162611b6004af7ec8d1ea55a225cca2576e4ac84ac333b663693a2f19f7786340ad9d2212d576a0b4e7700bd7d60de88940dce1f01481f9c41350eefd7b496218bcf70c4c8922dfd18d666d37d10cb0f14dd38e1225ec179dcab5501a4434674d6f9ff9f23c4df5f445cc2accf43189fc99ac56693df373a4207b0dc991009fae4796fd7e49cea4dd139ee72264dfd47f4e1ad2420d635c7a1f37950d022ffdcccc7651b645db0ba0ce94c18dcc902279b4601806beefe05016f1f85411e6562b584da0854db2e36f602d8c4974d385aee4a01d1132082c8cd7c71443162f7d7487c73d8a46f830f72a0d352d957bef5afc33c4447ef33b2491e28000d1f4687e95ffc2b9532d28ae4c48f8551bf527dbe18c672204495f2bd546566fd5770189e28c2de0974130a492ccd8737a8c6e971d02a23c4f9f27410348d1f666f93385bdc81bad8e9a9d1dbffdfa2609ebae52740b457ecd67a3bf0db02a14f5bdf3e25b35b2d3d303094e46e0e3daef559d9f0e074e512bcaf9fcc9d035083eec16806af8a93d27b4ad46754a425b6a02b1ac22f682e48f214d66b379d7042aa39f2c5f3448d05ca4b6360e162f31f197225f4ad579d69207c666711fb3f6ca814efcf430899360cced1168cd69ec0e809a89cf2cf2015f9f895a3dadd4ced6d94793e98201b1da6a0a5d90be5d06925e3ad60b9227f84b9c3060a6db6e7857d8731f975d4a993abf10d84590da02b114625109d864de070813179b651d528f66036c30a0700ee84fc5e59757a509745b64e76fa3396f3c8b01a7724cd434e6d774dad36be8a73ad29f6859352aa15236e7825947396cb98e26b912b19ddc127590e59200c4334d1d96d7585a0e349b920f2e4e59cdedac911214c42c0894f72c8a7423d7aef3ea5ef9a5b650821f46537c65509ad8dcf6558c16c04f9877c737ff81875d9fbe01d23d37e937444cf257b0b57bc1c2a774f2e2bf5f3b0881be0e2282ba97ef6aad797f8fdb4053da4e478575805c7a93076c09847544a8e89f1cb3838df7870bcf61deb2144c6f6349c966b67545703058f9227965b97835b049538fb428431a8461586b022368626d20e9b6bfdd7232a5cc6a0aa214319cb440c45443a2446d1e17713c0e1049f0fd759d1dbff493302140376cfb153330ed455a043189260cb7d2d90333a37d3584f2d907d0a73dccee299ad14141d60d1409cda688464a13b5dab37476641741717d599a60c0ac84d85869ed449f83933ad30e2591157fd1f07b73ecf26f34e91bc00f1ca86ae34ca8231b372cdc2ed18d463ac42f92859d6f0e2c483dbb23d785f1233db2033458af9d7c1e7029ac5cc33ca7d25b2b49fd71b1ae5f5ce969b6e77333bf5fbb5e6645dd0a4d0c6e82eb534ac264ddbe28513e4b82b3578c1a6cbfaa2522aa50985fe2cce43cf3363eaacca0e09c721fd603d43c3a4fdf8dde0c9ff2c054910b16aeef7c4d86b31".into(), "04000000fcead9a1b425124f11aa97e0614120ce87bdddcad655672916f9c4564dc057002bd3df07a4602620282b276359529114ba89b59b16bec235d584c3cf5cc6b2d132d719d14c15e565c05e84ead95a2f101a1b658ee2f36eb7ca65206e27cfca47bfe61462d5b9071f1a001daf299c51afbd74fd75a98ba49a6e40ae8ad92b3afdc1cf215fd6190000fd40050044b5e035b02d138a9704f9513c0865f2733b7c09294ee504c155c283f4895559b6ac39828eac98ad393a642330589e8849040f55ce44f8f2197529d0b0ed57ccdda41f1971e153ec28ac5b4eba968741db374104d65ee234580a83bea1c0cdb67b8bc207057486eb1d90e21ba0cd4f5e9fd834821fafc1517c5d1fceb50ba6f6b102a9b4edac46f2359aec795a4e2458f51114a41289634b3b1cf250e3e38f3689f951278dfa7202a7dfe311cc098fd4a8d02c8f8a74e4a5010b18ee2e60578d5e9f1c094433a73f26e6546e20a574fc261baaa79e9910ab86ed607786a1cc88e7de51ff928d434e26eaef1437f7068c743f26d7c0eea6791e869b101fee8ab41b50af6174c5e6b731a1719f31ee3e6529efef49f31665baedc9382e9665278a84467d479f139fc7a8ef66fef9bd2fd17f7779ee315d458f691a290fa7c2179de8bb91a78458c5290d4aa45b163254006800ba2fce7479511f744fd7de96495c39be93413d8b0b187fe092537e1a7646a66a125b33333f6ecd10085e23ad168b24ee7be69d01ea021a39401e4bd41d818499e7174dd9b85542076c78cb89eeec1c190301b4709dbc963d47926e31bb0235ba6a7029d49458150f6491ac9c973b8a2c893258f907baf4bcb7c39f12b900ba2b2382cd5dd84314ee504ade835ad9a1cb13a7f5928a483ebc9415429810fd99893f2f8f83970b8b47143d617e6f9853e4d86ff378be664218f1c32531143e209f171590dd48216fec879a6b9cbf04432bf4f1a3734b69b6a9f1a358a259a0f9082cfb6c1f3d9d2d9e4522ad651ccce565f06b30c1c0b27252270c2f6608cf4f3288a7e7d4b174e646de05341f7db62b00b5ccb295f058d34b87201148828e9b3f7e08f60e100f810be27eb7f4c471cda7621106fe78bc69ec2bd27acabd55dc094b8626913b7d24d9b60939754700f32574a733a195f8b0220d56f6797de0bcd7b80d561896b816586593409f76e85a7a1035f821dee32a02fdbc26bc4cca375bed418b9d678ac589249a1a5a5b24447ee9b42e33f817066caf3d4e17d0347f6acf0cbf426d4df49413b3d12350edec2681ab9cfecd0825ccfb2649a57391d3f153050dfb4350d60e5e464229ddd6e49ece95557b8ef48c18cbffbe9fc8d7700f611a4b33a2a254afcec638c485e36daf0364da7d4302e488db7b6c41297571048cfea5452e324abb9f9e1043e625fd0853b7e03063d1c3a43aa1ee62d45d890b5e4d10640e775cff6852b6d1acd4a503b3ece3b319cbcf33ff9fdf17b8f852d748db1e05af80507f5d0e1bc44444b155d7da20f7f0b4d6d83368c3bb9e1321b39472a8677ea1d3aca43b453d35edca37b7536d19c26b764958b3c7c30f3211d7b7bb7f6a6d7fd7bf2dda6e7d7b1e533556863549bbe1394a3828596f25029b7e30495e1235f084e5edd133bc29fce4f1e5e514eb1d1cb19fd8dfbb0d130fbec4e288f23dae86311ffd6f4afbaacc2ffe1cc8811a455ba6f5659f82515b56c6ac84277bff5bef98fefc74e002e4a11866a417a429541f8a62df4108e4730d3045f92984bcf1ab2f7d03f8bb1767e91791530cd8eec412919e1f2e341e66a1588a8f485f7aa005787af946b9cb10f6685420b7e1663f66374fddc5e70720507ee2134f3b02df042fcf6db4a5bdd74cc5010793634816fe447cc68e076b225cc1ca872929ef246ce356dc8d8964ff6d7119d071eccb6dc37f75b932c44cdc30723b8357a2761c6de6ab2713e6f6a782538cb731b07950d3f459760a00cc0af406d6848014746b02653636f479d952b46fdeff976e1d159ba46ae7363d5b0042d3905a0bda12aaa6eaae1a5a0d55d4c1930aa1c004cd610866853a247239366aa20f8968ea9ca3d5d6d7321a5d0f2c".into() ]; - validate_headers(headers, false, false).unwrap() + let params = BlockHeaderVerificationParams { + difficulty_check: false, + constant_difficulty: false, + difficulty_algorithm: None, + // Will not be used since previous_height is not 0 + genesis_block_header: "".into(), + }; + block_on(validate_headers( + "MORTY", + 1330480, + headers, + &TestBlockHeadersStorage { ticker: "MORTY".into() }, + ¶ms, + )) + .unwrap() } #[test] @@ -555,6 +613,20 @@ mod tests { let headers: Vec = vec!["00200020eab6fa183da8f9e4c761b31a67a76fa6a7658eb84c760200000000000000000063cd9585d434ec0db25894ec4b1f03735f10e31709c4395ea67c50c8378f134b972f166278100a17bfd87203".into(), "0000402045c698413fbe8b5bf10635658d2a1cec72062798e51200000000000000000000869617420a4c95b1d3d6d012419d2b6c199cff9b68dd9a790892a4da8466fb056033166278100a1743ac4d5b".into(), "0400e02019d733c1fd76a1fa5950de7bee9d80f107276b93a67204000000000000000000a0d1dee718f5f732c041800e9aa2c25e92be3f6de28278545388db8a6ae27df64c37166278100a170a970c19".into()]; - validate_headers(headers, true, true).unwrap() + let params = BlockHeaderVerificationParams { + difficulty_check: true, + constant_difficulty: false, + difficulty_algorithm: Some(DifficultyAlgorithm::BitcoinMainnet), + // Will not be used since previous_height is not 0 + genesis_block_header: "010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e36299".into() + }; + block_on(validate_headers( + "BTC", + 724608, + headers, + &TestBlockHeadersStorage { ticker: "BTC".into() }, + ¶ms, + )) + .unwrap() } } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/lib.rs b/mm2src/mm2_bitcoin/spv_validation/src/lib.rs index d74dc35392..0f5bb7e996 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/lib.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/lib.rs @@ -4,6 +4,7 @@ extern crate keys; extern crate primitives; extern crate ripemd160; extern crate rustc_hex as hex; +extern crate serde; extern crate serialization; extern crate sha2; extern crate test_helpers; diff --git a/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs b/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs index 646afc0f16..396417e6d1 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/spv_proof.rs @@ -1,6 +1,5 @@ use crate::helpers_validation::{merkle_prove, validate_vin, validate_vout, SPVError}; use chain::BlockHeader; -use chain::RawBlockHeader; use primitives::hash::H256; pub const TRY_SPV_PROOF_INTERVAL: u64 = 10; @@ -15,15 +14,11 @@ pub struct SPVProof { pub vout: Vec, /// The transaction index in the merkle tree pub index: u64, - /// The confirming UTXO header - pub confirming_header: BlockHeader, - /// The Raw confirming UTXO Header - pub raw_header: RawBlockHeader, /// The intermediate nodes (digests between leaf and root) pub intermediate_nodes: Vec, } -/// Checks validity of an entire SPV Proof +/// Checks validity of an entire SPV Proof against a previously validated UTXO header retrieved from storage /// /// # Arguments /// @@ -36,30 +31,16 @@ pub struct SPVProof { /// # Notes /// Re-write with our own types based on `bitcoin_spv::std_types::SPVProof::validate` impl SPVProof { - pub fn validate_block_header(&self) -> Result<(), SPVError> { - if self.confirming_header.hash() != self.raw_header.digest() { - return Err(SPVError::WrongDigest); - } - if self.confirming_header.merkle_root_hash != self.raw_header.extract_merkle_root() { - return Err(SPVError::WrongMerkleRoot); - } - if self.confirming_header.previous_header_hash != self.raw_header.parent() { - return Err(SPVError::WrongPrevHash); - } - Ok(()) - } - - pub fn validate(&self) -> Result<(), SPVError> { + pub fn validate(&self, validated_header: &BlockHeader) -> Result<(), SPVError> { if !validate_vin(self.vin.as_slice()) { return Err(SPVError::InvalidVin); } if !validate_vout(self.vout.as_slice()) { return Err(SPVError::InvalidVout); } - self.validate_block_header()?; merkle_prove( self.tx_id, - self.confirming_header.merkle_root_hash, + validated_header.merkle_root_hash, self.intermediate_nodes.clone(), self.index, ) @@ -69,25 +50,37 @@ impl SPVProof { #[cfg(test)] mod spv_proof_tests { use crate::spv_proof::SPVProof; - use chain::BlockHeader; - use chain::RawBlockHeader; + use chain::{BlockHeader, Transaction}; use hex::FromHex; - use serialization::deserialize; + use primitives::hash::H256; + use serialization::{deserialize, serialize_list}; #[test] - fn test_block_header() { - let header_hex = "040000008e4e7283b71dd1572d220935db0a1654d1042e92378579f8abab67b143f93a02fa026610d2634b72ff729b9ea7850c0d2c25eeaf7a82878ca42a8e9912028863a2d8a734eb73a4dc734072dbfd12406f1e7121bfe0e3d6c10922495c44e5cc1c91185d5ee519011d0400b9caaf41d4b63a6ab55bb4e6925d46fc3adea7be37b713d3a615e7cf0000fd40050001a80fa65b9a46fdb1506a7a4d26f43e7995d69902489b9f6c4599c88f9c169605cc135258953da0d6299ada4ff81a76ad63c943261078d5dd1918f91cea68b65b7fc362e9df49ba57c2ea5c6dba91591c85eb0d59a1905ac66e2295b7a291a1695301489a3cc7310fd45f2b94e3b8d94f3051e9bbaada1e0641fcec6e0d6230e76753aa9574a3f3e28eaa085959beffd3231dbe1aeea3955328f3a973650a38e31632a4ffc7ec007a3345124c0b99114e2444b3ef0ada75adbd077b247bbf3229adcffbe95bc62daac88f96317d5768540b5db636f8c39a8529a736465ed830ab2c1bbddf523587abe14397a6f1835d248092c4b5b691a955572607093177a5911e317739187b41f4aa662aa6bca0401f1a0a77915ebb6947db686cff549c5f4e7b9dd93123b00a1ae8d411cfb13fa7674de21cbee8e9fc74e12aa6753b261eab3d9256c7c32cc9b16219dad73c61014e7d88d74d5e218f12e11bc47557347ff49a9ab4490647418d2a5c2da1df24d16dfb611173608fe4b10a357b0fa7a1918b9f2d7836c84bf05f384e1e678b2fdd47af0d8e66e739fe45209ede151a180aba1188058a0db093e30bc9851980cf6fbfa5adb612d1146905da662c3347d7e7e569a1041641049d951ab867bc0c6a3863c7667d43f596a849434958cee2b63dc8fa11bd0f38aa96df86ed66461993f64736345313053508c4e939506c08a766f5b6ed0950759f3901bbc4db3dc97e05bf20b9dda4ff242083db304a4e487ac2101b823998371542354e5d534b5b6ae6420cc19b11512108b61208f4d9a5a97263d2c060da893544dea6251bcadc682d2238af35f2b1c2f65a73b89a4e194f9e1eef6f0e5948ef8d0d2862f48fd3356126b00c6a2d3770ecd0d1a78fa34974b454f270b23d461e357c9356c19496522b59ff9d5b4608c542ff89e558798324021704b2cfe9f6c1a70906c43c7a690f16615f198d29fa647d84ce8461fa570b33e3eada2ed7d77e1f280a0d2e9f03c2e1db535d922b1759a191b417595f3c15d8e8b7f810527ff942e18443a3860e67ccba356809ecedc31c5d8db59c7e039dae4b53d126679e8ffa20cc26e8b9d229c8f6ee434ad053f5f4f5a94e249a13afb995aad82b4d90890187e516e114b168fc7c7e291b9738ea578a7bab0ba31030b14ba90b772b577806ea2d17856b0cb9e74254ba582a9f2638ea7ed2ca23be898c6108ff8f466b443537ed9ec56b8771bfbf0f2f6e1092a28a7fd182f111e1dbdd155ea82c6cb72d5f9e6518cc667b8226b5f5c6646125fc851e97cf125f48949f988ed37c4283072fc03dd1da3e35161e17f44c0e22c76f708bb66405737ef24176e291b4fc2eadab876115dc62d48e053a85f0ad132ef07ad5175b036fe39e1ad14fcdcdc6ac5b3daabe05161a72a50545dd812e0f9af133d061b726f491e904d89ee57811ef58d3bda151f577aed381963a30d91fb98dc49413300d132a7021a5e834e266b4ac982d76e00f43f5336b8e8028a0cacfa11813b01e50f71236a73a4c0d0757c1832b0680ada56c80edf070f438ab2bc587542f926ff8d3644b8b8a56c78576f127dec7aed9cb3e1bc2442f978a9df1dc3056a63e653132d0f419213d3cb86e7b61720de1aa3af4b3757a58156970da27560c6629257158452b9d5e4283dc6fe7df42d2fda3352d5b62ce5a984d912777c3b01837df8968a4d494db1b663e0e68197dbf196f21ea11a77095263dec548e2010460840231329d83978885ee2423e8b327785970e27c6c6d436157fb5b56119b19239edbb730ebae013d82c35df4a6e70818a74d1ef7a2e87c090ff90e32939f58ed24e85b492b5750fd2cd14b9b8517136b76b1cc6ccc6f6f027f65f1967a0eb4f32cd6e5d5315"; + fn test_validate() { + // https://live.blockcypher.com/btc-testnet/block/000000000000004d36632fda8180ff16855d606e5515aab0750d9d4fe55fe7d6/ + let header_hex = "0000602002bf77bbb098f90f149430c314e71ef4e2671ea5e04a2503e0000000000000000406ffb54f2925360aae81bd3199f456928bbe6ae83a877902da9d9ffb08215da0ba3161ffff001a545a850b"; let header_bytes: Vec = header_hex.from_hex().unwrap(); - let header: BlockHeader = deserialize(header_bytes.as_slice()).unwrap(); + let validated_header: BlockHeader = deserialize(header_bytes.as_slice()).unwrap(); + //https://live.blockcypher.com/btc-testnet/tx/eefbafa4006e77099db059eebe14687965813283e5754d317431d9984554735d/ + let tx: Transaction = "0200000000010146c398e70cceaf9d8f734e603bc53e4c4c0605ab46cb1b5807a62c90f5aed50d0100000000feffffff023c0fc10c010000001600145033f65b590f2065fe55414213f1d25ab20b6c4f487d1700000000001600144b812d5ef41fc433654d186463d41b458821ff740247304402202438dc18801919baa64eb18f7e925ab6acdedc3751ea58ea164a26723b79fd39022060b46c1d277714c640cdc8512c36c862ffc646e7ff62438ef5cc847a5990bbf801210247b49d9e6b0089a1663668829e573c629c936eb430c043af9634aa57cf97a33cbee81f00".into(); + let intermediate_nodes: Vec = vec![ + "434d6b93388ab077aa12d6257253cc036fd6122e9e88465a86f4fd682fc6e006".into(), + "bd9af28e56cf6731e78ee1503a65d9cc9b15c148daa474e71e085176f48996ac".into(), + "605f6f83423ef3b86623927ef2d9dcb0f8d9e40a8132217c2fa0910b84488ec7".into(), + "10b7ef06ef0756823dbf39dea717be397e7ccb49bbefc5cfc45e6f9d58793baf".into(), + "19183ceae11796a9b1d0893e0561870bbce4d060c9547b1e91ad8b34eb3d5001".into(), + "1b16723739522955422b4286b4d8620d2a704b6997e6bbd809d151b8d8d64611".into(), + "6f8496469b19dd35871684332dfd3fc0205d83d2c58c44ebdae068542bc951f6".into(), + "e0d2733bd7bce4e5690b71bc8f7cedb1edbc49a5ff85c3678ecdec894ea1c023".into(), + ]; + let intermediate_nodes = intermediate_nodes.into_iter().map(|hash| hash.reversed()).collect(); let spv_proof = SPVProof { - tx_id: Default::default(), - vin: vec![], - vout: vec![], - index: 0, - confirming_header: header, - raw_header: RawBlockHeader::new(header_bytes).unwrap(), - intermediate_nodes: vec![], + tx_id: tx.hash(), + vin: serialize_list(&tx.inputs).take(), + vout: serialize_list(&tx.outputs).take(), + index: 1, + intermediate_nodes, }; - spv_proof.validate_block_header().unwrap() + spv_proof.validate(&validated_header).unwrap() } } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs index d516535f79..7c007e01f3 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/storage.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/storage.rs @@ -4,7 +4,7 @@ use derive_more::Display; use primitives::hash::H256; use std::collections::HashMap; -#[derive(Debug, Display)] +#[derive(Clone, Debug, Display, Eq, PartialEq)] pub enum BlockHeaderStorageError { #[display(fmt = "Can't add to the storage for {} - reason: {}", coin, reason)] AddToStorageError { @@ -42,41 +42,27 @@ pub enum BlockHeaderStorageError { #[async_trait] pub trait BlockHeaderStorageOps: Send + Sync + 'static { /// Initializes collection/tables in storage for a specified coin - async fn init(&self, for_coin: &str) -> Result<(), BlockHeaderStorageError>; + async fn init(&self) -> Result<(), BlockHeaderStorageError>; - async fn is_initialized_for(&self, for_coin: &str) -> Result; + async fn is_initialized_for(&self) -> Result; // Adds multiple block headers to the selected coin's header storage // Should store it as `COIN_HEIGHT=hex_string` // use this function for headers that comes from `blockchain_block_headers` async fn add_block_headers_to_storage( &self, - for_coin: &str, headers: HashMap, ) -> Result<(), BlockHeaderStorageError>; /// Gets the block header by height from the selected coin's storage as BlockHeader - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError>; + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError>; /// Gets the block header by height from the selected coin's storage as hex - async fn get_block_header_raw( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError>; + async fn get_block_header_raw(&self, height: u64) -> Result, BlockHeaderStorageError>; - async fn get_last_block_header_with_non_max_bits( - &self, - for_coin: &str, - ) -> Result, BlockHeaderStorageError>; + async fn get_last_block_height(&self) -> Result; - async fn get_block_height_by_hash( - &self, - for_coin: &str, - hash: H256, - ) -> Result, BlockHeaderStorageError>; + async fn get_last_block_header_with_non_max_bits(&self) -> Result, BlockHeaderStorageError>; + + async fn get_block_height_by_hash(&self, hash: H256) -> Result, BlockHeaderStorageError>; } diff --git a/mm2src/mm2_bitcoin/spv_validation/src/work.rs b/mm2src/mm2_bitcoin/spv_validation/src/work.rs index ed5935006d..8d31aedda0 100644 --- a/mm2src/mm2_bitcoin/spv_validation/src/work.rs +++ b/mm2src/mm2_bitcoin/spv_validation/src/work.rs @@ -3,6 +3,7 @@ use chain::{BlockHeader, BlockHeaderBits}; use derive_more::Display; use primitives::compact::Compact; use primitives::U256; +use serde::{Deserialize, Serialize}; use std::cmp; const RETARGETING_FACTOR: u32 = 4; @@ -21,28 +22,25 @@ pub const MAX_BITS_BTC: u32 = 486604799; fn is_retarget_height(height: u32) -> bool { height % RETARGETING_INTERVAL == 0 } -#[derive(Debug, Display)] +#[derive(Clone, Debug, Display, Eq, PartialEq)] pub enum NextBlockBitsError { #[display(fmt = "Block headers storage error: {}", _0)] StorageError(BlockHeaderStorageError), - #[display(fmt = "Can't find Block header for {} with height {}", height, coin)] - NoSuchBlockHeader { - coin: String, - height: u64, - }, + #[display(fmt = "Can't find Block header for {} with height {}", coin, height)] + NoSuchBlockHeader { coin: String, height: u64 }, #[display(fmt = "Can't find a Block header for {} with no max bits", coin)] - NoBlockHeaderWithNoMaxBits { - coin: String, - }, - Internal(String), + NoBlockHeaderWithNoMaxBits { coin: String }, } impl From for NextBlockBitsError { fn from(e: BlockHeaderStorageError) -> Self { NextBlockBitsError::StorageError(e) } } +#[derive(Clone, Debug, Deserialize, Serialize)] pub enum DifficultyAlgorithm { + #[serde(rename = "Bitcoin Mainnet")] BitcoinMainnet, + #[serde(rename = "Bitcoin Testnet")] BitcoinTestnet, } @@ -52,7 +50,7 @@ pub async fn next_block_bits( last_block_header: BlockHeader, last_block_height: u32, storage: &dyn BlockHeaderStorageOps, - algorithm: DifficultyAlgorithm, + algorithm: &DifficultyAlgorithm, ) -> Result { match algorithm { DifficultyAlgorithm::BitcoinMainnet => { @@ -87,10 +85,16 @@ async fn btc_retarget_bits( last_block_header: BlockHeader, storage: &dyn BlockHeaderStorageOps, ) -> Result { + let max_bits_compact: Compact = MAX_BITS_BTC.into(); + let retarget_ref = (height - RETARGETING_INTERVAL).into(); + if retarget_ref == 0 { + return Ok(BlockHeaderBits::Compact(max_bits_compact)); + } + let retarget_header = storage - .get_block_header(coin, retarget_ref) + .get_block_header(retarget_ref) .await? .ok_or(NextBlockBitsError::NoSuchBlockHeader { coin: coin.into(), @@ -108,9 +112,7 @@ async fn btc_retarget_bits( let target_timespan_seconds: U256 = TARGET_TIMESPAN_SECONDS.into(); let retarget = retarget / target_timespan_seconds; - let max_bits_compact: Compact = MAX_BITS_BTC.into(); let max_bits: U256 = max_bits_compact.into(); - if retarget > max_bits { Ok(BlockHeaderBits::Compact(max_bits_compact)) } else { @@ -125,7 +127,7 @@ async fn btc_mainnet_next_block_bits( storage: &dyn BlockHeaderStorageOps, ) -> Result { if last_block_height == 0 { - return Err(NextBlockBitsError::Internal("Last block height can't be zero".into())); + return Ok(BlockHeaderBits::Compact(MAX_BITS_BTC.into())); } let height = last_block_height + 1; @@ -145,14 +147,14 @@ async fn btc_testnet_next_block_bits( last_block_height: u32, storage: &dyn BlockHeaderStorageOps, ) -> Result { + let max_bits = BlockHeaderBits::Compact(MAX_BITS_BTC.into()); if last_block_height == 0 { - return Err(NextBlockBitsError::Internal("Last block height can't be zero".into())); + return Ok(max_bits); } let height = last_block_height + 1; let last_block_bits = last_block_header.bits.clone(); let max_time_gap = last_block_header.time + 2 * TARGET_SPACING_SECONDS; - let max_bits = BlockHeaderBits::Compact(MAX_BITS_BTC.into()); if is_retarget_height(height) { btc_retarget_bits(coin, height, last_block_header, storage).await @@ -161,16 +163,17 @@ async fn btc_testnet_next_block_bits( } else if last_block_bits != max_bits { Ok(last_block_bits.clone()) } else { - let last_block_header_with_non_max_bits = storage - .get_last_block_header_with_non_max_bits(coin) + let last_non_max_bits = storage + .get_last_block_header_with_non_max_bits() .await? - .ok_or(NextBlockBitsError::NoBlockHeaderWithNoMaxBits { coin: coin.into() })?; - Ok(last_block_header_with_non_max_bits.bits) + .map(|header| header.bits) + .unwrap_or(max_bits); + Ok(last_non_max_bits) } } #[cfg(test)] -mod tests { +pub(crate) mod tests { use super::*; use crate::storage::{BlockHeaderStorageError, BlockHeaderStorageOps}; use async_trait::async_trait; @@ -203,60 +206,55 @@ mod tests { .collect() } - struct TestBlockHeadersStorage {} + pub struct TestBlockHeadersStorage { + pub(crate) ticker: String, + } #[async_trait] impl BlockHeaderStorageOps for TestBlockHeadersStorage { - async fn init(&self, _for_coin: &str) -> Result<(), BlockHeaderStorageError> { Ok(()) } + async fn init(&self) -> Result<(), BlockHeaderStorageError> { Ok(()) } - async fn is_initialized_for(&self, _for_coin: &str) -> Result { Ok(true) } + async fn is_initialized_for(&self) -> Result { Ok(true) } async fn add_block_headers_to_storage( &self, - _for_coin: &str, _headers: HashMap, ) -> Result<(), BlockHeaderStorageError> { Ok(()) } - async fn get_block_header( - &self, - for_coin: &str, - height: u64, - ) -> Result, BlockHeaderStorageError> { - Ok(get_block_headers_for_coin(for_coin).get(&height).cloned()) + async fn get_block_header(&self, height: u64) -> Result, BlockHeaderStorageError> { + Ok(get_block_headers_for_coin(&self.ticker).get(&height).cloned()) } - async fn get_block_header_raw( - &self, - _for_coin: &str, - _height: u64, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_header_raw(&self, _height: u64) -> Result, BlockHeaderStorageError> { Ok(None) } + async fn get_last_block_height(&self) -> Result { + Ok(get_block_headers_for_coin(&self.ticker) + .into_keys() + .max_by(|a, b| a.cmp(b)) + .unwrap()) + } + async fn get_last_block_header_with_non_max_bits( &self, - for_coin: &str, ) -> Result, BlockHeaderStorageError> { - let mut headers = get_block_headers_for_coin(for_coin); + let mut headers = get_block_headers_for_coin(&self.ticker); headers.retain(|_, h| h.bits != BlockHeaderBits::Compact(MAX_BITS_BTC.into())); let header = headers.into_iter().max_by(|a, b| a.0.cmp(&b.0)); Ok(header.map(|(_, h)| h)) } - async fn get_block_height_by_hash( - &self, - _for_coin: &str, - _hash: H256, - ) -> Result, BlockHeaderStorageError> { + async fn get_block_height_by_hash(&self, _hash: H256) -> Result, BlockHeaderStorageError> { Ok(None) } } #[test] fn test_btc_mainnet_next_block_bits() { - let storage = TestBlockHeadersStorage {}; + let storage = TestBlockHeadersStorage { ticker: "BTC".into() }; let last_header: BlockHeader = "000000201d758432ecd495a2177b44d3fe6c22af183461a0b9ea0d0000000000000000008283a1dfa795d9b68bd8c18601e443368265072cbf8c76bfe58de46edd303798035de95d3eb2151756fdb0e8".into(); @@ -282,7 +280,7 @@ mod tests { #[test] fn test_btc_testnet_next_block_bits() { - let storage = TestBlockHeadersStorage {}; + let storage = TestBlockHeadersStorage { ticker: "tBTC".into() }; // https://live.blockcypher.com/btc-testnet/block/000000000057db3806384e2ec1b02b2c86bd928206ff8dff98f54d616b7fa5f2/ let current_header: BlockHeader = "02000000303505969a1df329e5fccdf69b847a201772e116e557eb7f119d1a9600000000469267f52f43b8799e72f0726ba2e56432059a8ad02b84d4fff84b9476e95f7716e41353ab80011c168cb471".into(); diff --git a/mm2src/mm2_core/Cargo.toml b/mm2src/mm2_core/Cargo.toml index b260692076..3070b3f7ea 100644 --- a/mm2src/mm2_core/Cargo.toml +++ b/mm2src/mm2_core/Cargo.toml @@ -17,6 +17,7 @@ futures = { version = "0.3", package = "futures", features = ["compat", "async-a hex = "0.4.2" keys = { path = "../mm2_bitcoin/keys" } lazy_static = "1.4" +lightning-background-processor = "0.0.110" mm2_metrics = { path = "../mm2_metrics" } primitives = { path = "../mm2_bitcoin/primitives" } rand = { version = "0.7", features = ["std", "small_rng", "wasm-bindgen"] } diff --git a/mm2src/mm2_core/src/mm_ctx.rs b/mm2src/mm2_core/src/mm_ctx.rs index 80fdf40452..d0265924d9 100644 --- a/mm2src/mm2_core/src/mm_ctx.rs +++ b/mm2src/mm2_core/src/mm_ctx.rs @@ -7,7 +7,9 @@ use futures::future::AbortHandle; use gstuff::{try_s, Constructible, ERR, ERRL}; use keys::KeyPair; use lazy_static::lazy_static; -use mm2_metrics::{MetricsArc, MetricsOps, MmMetricsError}; +#[cfg(not(target_arch = "wasm32"))] +use lightning_background_processor::BackgroundProcessor; +use mm2_metrics::{MetricsArc, MetricsOps}; use primitives::hash::H160; use rand::Rng; use serde::{Deserialize, Serialize}; @@ -18,7 +20,6 @@ use std::any::Any; use std::collections::hash_map::{Entry, HashMap}; use std::collections::HashSet; use std::fmt; -use std::net::AddrParseError; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; @@ -29,9 +30,9 @@ cfg_wasm32! { } cfg_native! { - use mm2_metrics::prometheus; + use mm2_metrics::{prometheus, MmMetricsError}; use db_common::sqlite::rusqlite::Connection; - use std::net::{IpAddr, SocketAddr}; + use std::net::{AddrParseError, IpAddr, SocketAddr}; use std::sync::MutexGuard; } @@ -105,6 +106,11 @@ pub struct MmCtx { pub swaps_ctx: Mutex>>, /// The context belonging to the `lp_stats` mod: `StatsContext` pub stats_ctx: Mutex>>, + /// Lightning background processors, these need to be dropped when stopping mm2 to + /// persist the latest states to the filesystem. This can be moved to LightningCoin + /// Struct in the future if the LightningCoin and other coins are dropped when mm2 stops. + #[cfg(not(target_arch = "wasm32"))] + pub background_processors: Mutex>, /// The RPC sender forwarding requests to writing part of underlying stream. #[cfg(target_arch = "wasm32")] pub wasm_rpc: Constructible, @@ -144,6 +150,8 @@ impl MmCtx { coins_needed_for_kick_start: Mutex::new(HashSet::new()), swaps_ctx: Mutex::new(None), stats_ctx: Mutex::new(None), + #[cfg(not(target_arch = "wasm32"))] + background_processors: Mutex::new(HashMap::new()), #[cfg(target_arch = "wasm32")] wasm_rpc: Constructible::default(), #[cfg(not(target_arch = "wasm32"))] @@ -384,6 +392,10 @@ impl MmArc { for handler in self.abort_handlers.lock().unwrap().drain(..) { handler.abort(); } + + #[cfg(not(target_arch = "wasm32"))] + self.background_processors.lock().unwrap().drain(); + let mut stop_listeners = self.stop_listeners.lock().expect("Can't lock stop_listeners"); // NB: It is important that we `drain` the `stop_listeners` rather than simply iterating over them // because otherwise there might be reference counting instances remaining in a listener diff --git a/mm2src/mm2_io/src/fs.rs b/mm2src/mm2_io/src/fs.rs index 76ff7e2864..e844086d89 100644 --- a/mm2src/mm2_io/src/fs.rs +++ b/mm2src/mm2_io/src/fs.rs @@ -25,6 +25,14 @@ pub enum FsJsonError { Deserializing(JsonError), } +#[inline] +pub fn invalid_data_err(msg: &str, err: Error) -> io::Error +where + Error: std::fmt::Display, +{ + io::Error::new(std::io::ErrorKind::InvalidData, format!("{}: {}", msg, err)) +} + pub fn check_dir_operations(dir_path: &Path) -> Result<(), io::Error> { let r: [u8; 32] = random(); let mut check: Vec = Vec::with_capacity(r.len()); diff --git a/mm2src/mm2_main/src/lp_swap.rs b/mm2src/mm2_main/src/lp_swap.rs index e82fd72d21..0766093e37 100644 --- a/mm2src/mm2_main/src/lp_swap.rs +++ b/mm2src/mm2_main/src/lp_swap.rs @@ -1546,7 +1546,6 @@ mod lp_swap_tests { disable_cert_verification: false, }) .collect(), - block_header_params: None, }, utxo_merge_params: None, tx_history: false, diff --git a/mm2src/mm2_main/src/mm2_tests.rs b/mm2src/mm2_main/src/mm2_tests.rs index 15cce2c01d..c632796ee0 100644 --- a/mm2src/mm2_main/src/mm2_tests.rs +++ b/mm2src/mm2_main/src/mm2_tests.rs @@ -8,10 +8,11 @@ use crypto::privkey::key_pair_from_seed; use http::{HeaderMap, StatusCode}; use mm2_metrics::{MetricType, MetricsJson}; use mm2_number::{BigDecimal, BigRational, Fraction, MmNumber}; -use mm2_test_helpers::for_tests::{check_my_swap_status, check_recent_swaps, check_stats_swap_status, - enable_native as enable_native_impl, enable_qrc20, find_metrics_in_json, - from_env_file, init_z_coin_light, init_z_coin_status, mm_spat, morty_conf, - rick_conf, sign_message, verify_message, wait_till_history_has_records, LocalStart, +use mm2_test_helpers::for_tests::{btc_with_spv_conf, check_my_swap_status, check_recent_swaps, + check_stats_swap_status, enable_native as enable_native_impl, enable_qrc20, + find_metrics_in_json, from_env_file, init_utxo_electrum, init_utxo_status, + init_z_coin_light, init_z_coin_status, mm_spat, morty_conf, rick_conf, sign_message, + tbtc_with_spv_conf, verify_message, wait_till_history_has_records, LocalStart, MarketMakerIt, Mm2TestConf, RaiiDump, MAKER_ERROR_EVENTS, MAKER_SUCCESS_EVENTS, MORTY, RICK, TAKER_ERROR_EVENTS, TAKER_SUCCESS_EVENTS}; use serde_json::{self as json, Value as Json}; @@ -205,6 +206,34 @@ async fn enable_z_coin_light( } } +async fn enable_utxo_v2_electrum( + mm: &MarketMakerIt, + coin: &str, + servers: Vec, + timeout: u64, +) -> UtxoStandardActivationResult { + let init = init_utxo_electrum(mm, coin, servers).await; + let init: RpcV2Response = json::from_value(init).unwrap(); + let timeout = now_ms() + (timeout * 1000); + + loop { + if now_ms() > timeout { + panic!("{} initialization timed out", coin); + } + + let status = init_utxo_status(mm, init.result.task_id).await; + let status: RpcV2Response = json::from_value(status).unwrap(); + log!("init_utxo_status: {:?}", status); + if let InitUtxoStatus::Ready(rpc_result) = status.result { + match rpc_result { + MmRpcResult::Ok { result } => break result, + MmRpcResult::Err(e) => panic!("{} initialization error {:?}", coin, e), + } + } + Timer::sleep(1.).await; + } +} + /// Integration test for RPC server. /// Check that MM doesn't crash in case of invalid RPC requests #[test] @@ -7799,3 +7828,64 @@ fn test_gui_storage_coins_functionality() { }; assert_eq!(actual.result, expected); } + +// This test is ignored because block headers sync and validation can take some time +#[test] +#[ignore] +#[cfg(not(target_arch = "wasm32"))] +fn test_btc_block_header_sync() { + let coins = json!([btc_with_spv_conf()]); + + let mm_bob = MarketMakerIt::start( + json! ({ + "gui": "nogui", + "netid": 9998, + "myipaddr": env::var ("BOB_TRADE_IP") .ok(), + "rpcip": env::var ("BOB_TRADE_IP") .ok(), + "passphrase": "bob passphrase", + "coins": coins, + "rpc_password": "pass", + }), + "pass".into(), + local_start!("bob"), + ) + .unwrap(); + let (_dump_log, _dump_dashboard) = mm_bob.mm_dump(); + log!("log path: {}", mm_bob.log_path.display()); + + let utxo_bob = block_on(enable_utxo_v2_electrum(&mm_bob, "BTC", btc_electrums(), 600)); + log!("enable UTXO bob {:?}", utxo_bob); + + block_on(mm_bob.stop()).unwrap(); +} + +// This test is ignored because block headers sync and validation can take some time +// Todo: this test is failing, need a small fix in calculating btc_testnet_next_block_bits, and to add each block header individually while validating it. +#[test] +#[ignore] +#[cfg(not(target_arch = "wasm32"))] +fn test_tbtc_block_header_sync() { + let coins = json!([tbtc_with_spv_conf()]); + + let mm_bob = MarketMakerIt::start( + json!({ + "gui": "nogui", + "netid": 9998, + "myipaddr": env::var ("BOB_TRADE_IP") .ok(), + "rpcip": env::var ("BOB_TRADE_IP") .ok(), + "passphrase": "bob passphrase", + "coins": coins, + "rpc_password": "pass", + }), + "pass".into(), + local_start!("bob"), + ) + .unwrap(); + let (_dump_log, _dump_dashboard) = mm_bob.mm_dump(); + log!("log path: {}", mm_bob.log_path.display()); + + let utxo_bob = block_on(enable_utxo_v2_electrum(&mm_bob, "tBTC-TEST", tbtc_electrums(), 100000)); + log!("enable UTXO bob {:?}", utxo_bob); + + block_on(mm_bob.stop()).unwrap(); +} diff --git a/mm2src/mm2_main/src/mm2_tests/electrums.rs b/mm2src/mm2_main/src/mm2_tests/electrums.rs index 20728e4f23..c2855dbfe4 100644 --- a/mm2src/mm2_main/src/mm2_tests/electrums.rs +++ b/mm2src/mm2_main/src/mm2_tests/electrums.rs @@ -38,6 +38,25 @@ pub fn morty_electrums() -> Vec { ] } +#[allow(dead_code)] +#[cfg(target_arch = "wasm32")] +pub fn btc_electrums() -> Vec { + vec![ + json!({ "url": "electrum1.cipig.net:30000", "protocol": "WSS" }), + json!({ "url": "electrum2.cipig.net:30000", "protocol": "WSS" }), + json!({ "url": "electrum3.cipig.net:30000", "protocol": "WSS" }), + ] +} + +#[cfg(not(target_arch = "wasm32"))] +pub fn btc_electrums() -> Vec { + vec![ + json!({ "url": "electrum1.cipig.net:10000" }), + json!({ "url": "electrum2.cipig.net:10000" }), + json!({ "url": "electrum3.cipig.net:10000" }), + ] +} + #[allow(dead_code)] #[cfg(target_arch = "wasm32")] pub fn tbtc_electrums() -> Vec { diff --git a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs index a44a4fe5bf..19711a08fc 100644 --- a/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs +++ b/mm2src/mm2_main/src/mm2_tests/lightning_tests.rs @@ -7,7 +7,7 @@ const T_BTC_ELECTRUMS: &[&str] = &[ "electrum3.cipig.net:10068", ]; -fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { +fn start_lightning_nodes(enable_0_confs: bool) -> (MarketMakerIt, MarketMakerIt, String, String) { let node_1_seed = "become nominee mountain person volume business diet zone govern voice debris hidden"; let node_2_seed = "february coast tortoise grab shadow vast volcano affair ordinary gesture brass oxygen"; @@ -36,30 +36,24 @@ fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { "coin": "tBTC-TEST-lightning", "mm2": 1, "decimals": 11, - "our_channels_config": { + "our_channels_configs": { "inbound_channels_confirmations": 1 }, "counterparty_channel_config_limits": { - "outbound_channels_confirmations": 1 + "outbound_channels_confirmations": 1, + // If true, this enables sending payments between the 2 nodes straight away without waiting for on-chain confirmations + // if the other node added this node as trusted. It also overrides "outbound_channels_confirmations". + "allow_outbound_0conf": enable_0_confs }, "protocol": { "type": "LIGHTNING", "protocol_data":{ "platform": "tBTC-TEST-segwit", "network": "testnet", - "confirmations": { - "background": { - "default_fee_per_kb": 1012, - "n_blocks": 12 - }, - "normal": { - "default_fee_per_kb": 8000, - "n_blocks": 6 - }, - "high_priority": { - "default_fee_per_kb": 20000, - "n_blocks": 1 - } + "confirmation_targets": { + "background": 12, + "normal": 6, + "high_priority": 1 } } } @@ -83,7 +77,8 @@ fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { let (_dump_log, _dump_dashboard) = mm_node_1.mm_dump(); log!("bob log path: {}", mm_node_1.log_path.display()); - let _electrum = block_on(enable_electrum(&mm_node_1, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + let electrum = block_on(enable_electrum(&mm_node_1, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + log!("Node 1 tBTC address: {}", electrum.address); let enable_lightning_1 = block_on(enable_lightning(&mm_node_1, "tBTC-TEST-lightning")); let node_1_address = enable_lightning_1["result"]["address"].as_str().unwrap().to_string(); @@ -105,7 +100,8 @@ fn start_lightning_nodes() -> (MarketMakerIt, MarketMakerIt, String, String) { let (_dump_log, _dump_dashboard) = mm_node_2.mm_dump(); log!("alice log path: {}", mm_node_2.log_path.display()); - let _electrum = block_on(enable_electrum(&mm_node_2, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + let electrum = block_on(enable_electrum(&mm_node_2, "tBTC-TEST-segwit", false, T_BTC_ELECTRUMS)); + log!("Node 2 tBTC address: {}", electrum.address); let enable_lightning_2 = block_on(enable_lightning(&mm_node_2, "tBTC-TEST-lightning")); let node_2_address = enable_lightning_2["result"]["address"].as_str().unwrap().to_string(); @@ -148,19 +144,10 @@ fn test_enable_lightning() { "protocol_data":{ "platform": "tBTC-TEST-segwit", "network": "testnet", - "confirmations": { - "background": { - "default_fee_per_kb": 1012, - "n_blocks": 12 - }, - "normal": { - "default_fee_per_kb": 8000, - "n_blocks": 6 - }, - "high_priority": { - "default_fee_per_kb": 20000, - "n_blocks": 1 - } + "confirmation_targets": { + "background": 12, + "normal": 6, + "high_priority": 1 } } } @@ -202,7 +189,7 @@ fn test_enable_lightning() { #[test] #[cfg(not(target_arch = "wasm32"))] fn test_connect_to_lightning_node() { - let (mm_node_1, mm_node_2, node_1_id, _) = start_lightning_nodes(); + let (mm_node_1, mm_node_2, node_1_id, _) = start_lightning_nodes(false); let node_1_address = format!("{}@{}:9735", node_1_id, mm_node_1.ip.to_string()); let connect = block_on(mm_node_2.rpc(&json! ({ @@ -225,10 +212,11 @@ fn test_connect_to_lightning_node() { } #[test] +// This test is ignored because it requires refilling the tBTC addresses with test coins periodically. #[ignore] #[cfg(not(target_arch = "wasm32"))] fn test_open_channel() { - let (mm_node_1, mut mm_node_2, node_1_id, node_2_id) = start_lightning_nodes(); + let (mm_node_1, mut mm_node_2, node_1_id, node_2_id) = start_lightning_nodes(false); let node_1_address = format!("{}@{}:9735", node_1_id, mm_node_1.ip.to_string()); let open_channel = block_on(mm_node_2.rpc(&json! ({ @@ -239,8 +227,8 @@ fn test_open_channel() { "coin": "tBTC-TEST-lightning", "node_address": node_1_address, "amount": { - "type":"Exact", - "value":0.00002, + "type": "Exact", + "value": 0.0002, }, }, }))) @@ -252,7 +240,7 @@ fn test_open_channel() { let list_channels_node_1 = block_on(mm_node_1.rpc(&json! ({ "userpass": mm_node_1.userpass, "mmrpc": "2.0", - "method": "list_channels", + "method": "list_open_channels_by_filter", "params": { "coin": "tBTC-TEST-lightning", }, @@ -281,7 +269,7 @@ fn test_open_channel() { let list_channels_node_2 = block_on(mm_node_2.rpc(&json! ({ "userpass": mm_node_2.userpass, "mmrpc": "2.0", - "method": "list_channels", + "method": "list_open_channels_by_filter", "params": { "coin": "tBTC-TEST-lightning", }, @@ -303,13 +291,125 @@ fn test_open_channel() { ); assert_eq!( list_channels_node_2_res["result"]["open_channels"][0]["balance_msat"], - 2000000 + 20000000 ); block_on(mm_node_1.stop()).unwrap(); block_on(mm_node_2.stop()).unwrap(); } +#[test] +// This test is ignored because it requires refilling the tBTC addresses with test coins periodically. +#[ignore] +#[cfg(not(target_arch = "wasm32"))] +// This also tests 0_confs_channels +fn test_send_payment() { + let (mut mm_node_2, mm_node_1, node_2_id, node_1_id) = start_lightning_nodes(true); + let node_1_address = format!("{}@{}:9735", node_1_id, mm_node_1.ip.to_string()); + + let add_trusted_node = block_on(mm_node_1.rpc(&json! ({ + "userpass": mm_node_1.userpass, + "mmrpc": "2.0", + "method": "add_trusted_node", + "params": { + "coin": "tBTC-TEST-lightning", + "node_id": node_2_id + }, + }))) + .unwrap(); + assert!(add_trusted_node.0.is_success(), "!open_channel: {}", add_trusted_node.1); + + let open_channel = block_on(mm_node_2.rpc(&json! ({ + "userpass": mm_node_2.userpass, + "mmrpc": "2.0", + "method": "open_channel", + "params": { + "coin": "tBTC-TEST-lightning", + "node_address": node_1_address, + "amount": { + "type": "Exact", + "value": 0.0002, + }, + }, + }))) + .unwrap(); + assert!(open_channel.0.is_success(), "!open_channel: {}", open_channel.1); + + block_on(mm_node_2.wait_for_log(60., |log| log.contains("Received message ChannelReady"))).unwrap(); + + let send_payment = block_on(mm_node_2.rpc(&json! ({ + "userpass": mm_node_2.userpass, + "mmrpc": "2.0", + "method": "send_payment", + "params": { + "coin": "tBTC-TEST-lightning", + "payment": { + "type": "keysend", + "destination": node_1_id, + "amount_in_msat": 1000, + "expiry": 24 + } + }, + }))) + .unwrap(); + assert!(send_payment.0.is_success(), "!send_payment: {}", send_payment.1); + + let send_payment_res: Json = json::from_str(&send_payment.1).unwrap(); + log!("send_payment_res {:?}", send_payment_res); + let payment_hash = send_payment_res["result"]["payment_hash"].as_str().unwrap(); + + block_on(mm_node_2.wait_for_log(60., |log| log.contains("Successfully sent payment"))).unwrap(); + + // Check payment on the sending node side + let get_payment_details = block_on(mm_node_2.rpc(&json! ({ + "userpass": mm_node_2.userpass, + "mmrpc": "2.0", + "method": "get_payment_details", + "params": { + "coin": "tBTC-TEST-lightning", + "payment_hash": payment_hash + }, + }))) + .unwrap(); + assert!( + get_payment_details.0.is_success(), + "!get_payment_details: {}", + get_payment_details.1 + ); + + let get_payment_details_res: Json = json::from_str(&get_payment_details.1).unwrap(); + let payment = &get_payment_details_res["result"]["payment_details"]; + assert_eq!(payment["status"], "succeeded"); + assert_eq!(payment["amount_in_msat"], 1000); + assert_eq!(payment["payment_type"]["type"], "Outbound Payment"); + + // Check payment on the receiving node side + let get_payment_details = block_on(mm_node_1.rpc(&json! ({ + "userpass": mm_node_1.userpass, + "mmrpc": "2.0", + "method": "get_payment_details", + "params": { + "coin": "tBTC-TEST-lightning", + "payment_hash": payment_hash + }, + }))) + .unwrap(); + assert!( + get_payment_details.0.is_success(), + "!get_payment_details: {}", + get_payment_details.1 + ); + + let get_payment_details_res: Json = json::from_str(&get_payment_details.1).unwrap(); + let payment = &get_payment_details_res["result"]["payment_details"]; + assert_eq!(payment["status"], "succeeded"); + assert_eq!(payment["amount_in_msat"], 1000); + assert_eq!(payment["payment_type"]["type"], "Inbound Payment"); + + block_on(mm_node_1.stop()).unwrap(); + block_on(mm_node_2.stop()).unwrap(); +} + #[test] #[cfg(not(target_arch = "wasm32"))] fn test_sign_verify_message_lightning() { @@ -346,19 +446,10 @@ fn test_sign_verify_message_lightning() { "protocol_data":{ "platform": "tBTC-TEST-segwit", "network": "testnet", - "confirmations": { - "background": { - "default_fee_per_kb": 1012, - "n_blocks": 12 - }, - "normal": { - "default_fee_per_kb": 8000, - "n_blocks": 6 - }, - "high_priority": { - "default_fee_per_kb": 20000, - "n_blocks": 1 - } + "confirmation_targets": { + "background": 12, + "normal": 6, + "high_priority": 1 } } } diff --git a/mm2src/mm2_main/src/mm2_tests/structs.rs b/mm2src/mm2_main/src/mm2_tests/structs.rs index 6482444f83..214201489c 100644 --- a/mm2src/mm2_main/src/mm2_tests/structs.rs +++ b/mm2src/mm2_main/src/mm2_tests/structs.rs @@ -622,6 +622,13 @@ pub struct ZcoinActivationResult { pub wallet_balance: EnableCoinBalance, } +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct UtxoStandardActivationResult { + pub current_block: u64, + pub wallet_balance: EnableCoinBalance, +} + #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields)] pub struct InitTaskResult { @@ -643,6 +650,14 @@ pub enum InitZcoinStatus { UserActionRequired(Json), } +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields, tag = "status", content = "details")] +pub enum InitUtxoStatus { + Ready(MmRpcResult), + InProgress(Json), + UserActionRequired(Json), +} + #[derive(Debug, Deserialize)] #[serde(deny_unknown_fields, tag = "status", content = "details")] pub enum WithdrawStatus { diff --git a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs index fbcdd19606..fa6e951adb 100644 --- a/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs +++ b/mm2src/mm2_main/src/rpc/dispatcher/dispatcher.rs @@ -37,9 +37,10 @@ use serde_json::{self as json, Value as Json}; use std::net::SocketAddr; cfg_native! { - use coins::lightning::{close_channel, connect_to_lightning_node, generate_invoice, get_channel_details, - get_claimable_balances, get_payment_details, list_closed_channels_by_filter, list_open_channels_by_filter, list_payments_by_filter, open_channel, - send_payment, LightningCoin}; + use coins::lightning::{add_trusted_node, close_channel, connect_to_lightning_node, generate_invoice, get_channel_details, + get_claimable_balances, get_payment_details, list_closed_channels_by_filter, list_open_channels_by_filter, + list_payments_by_filter, list_trusted_nodes, open_channel, remove_trusted_node, send_payment, update_channel, + LightningCoin}; use coins::{SolanaCoin, SplToken}; use coins::z_coin::ZCoin; } @@ -179,9 +180,14 @@ async fn dispatcher_v2(request: MmRpcRequest, ctx: MmArc) -> DispatcherResult handle_mmrpc(ctx, request, withdraw_user_action).await, #[cfg(not(target_arch = "wasm32"))] native_only_methods => match native_only_methods { + "add_trusted_node" => handle_mmrpc(ctx, request, add_trusted_node).await, "close_channel" => handle_mmrpc(ctx, request, close_channel).await, "connect_to_lightning_node" => handle_mmrpc(ctx, request, connect_to_lightning_node).await, "enable_lightning" => handle_mmrpc(ctx, request, enable_l2::).await, + "enable_solana_with_tokens" => { + handle_mmrpc(ctx, request, enable_platform_coin_with_tokens::).await + }, + "enable_spl" => handle_mmrpc(ctx, request, enable_token::).await, "generate_invoice" => handle_mmrpc(ctx, request, generate_invoice).await, "get_channel_details" => handle_mmrpc(ctx, request, get_channel_details).await, "get_claimable_balances" => handle_mmrpc(ctx, request, get_claimable_balances).await, @@ -192,12 +198,11 @@ async fn dispatcher_v2(request: MmRpcRequest, ctx: MmArc) -> DispatcherResult handle_mmrpc(ctx, request, list_closed_channels_by_filter).await, "list_open_channels_by_filter" => handle_mmrpc(ctx, request, list_open_channels_by_filter).await, "list_payments_by_filter" => handle_mmrpc(ctx, request, list_payments_by_filter).await, + "list_trusted_nodes" => handle_mmrpc(ctx, request, list_trusted_nodes).await, "open_channel" => handle_mmrpc(ctx, request, open_channel).await, + "remove_trusted_node" => handle_mmrpc(ctx, request, remove_trusted_node).await, "send_payment" => handle_mmrpc(ctx, request, send_payment).await, - "enable_solana_with_tokens" => { - handle_mmrpc(ctx, request, enable_platform_coin_with_tokens::).await - }, - "enable_spl" => handle_mmrpc(ctx, request, enable_token::).await, + "update_channel" => handle_mmrpc(ctx, request, update_channel).await, "z_coin_tx_history" => handle_mmrpc(ctx, request, coins::my_tx_history_v2::z_coin_tx_history_rpc).await, _ => MmError::err(DispatcherError::NoSuchMethod), }, diff --git a/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs b/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs index ad4b6c527c..158ca32a79 100644 --- a/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs +++ b/mm2src/mm2_main/src/rpc/lp_commands/lp_commands_legacy.rs @@ -71,6 +71,11 @@ pub async fn disable_coin(ctx: MmArc, req: Json) -> Result>, St .map_err(|e| ERRL!("{}", e)); } + // If the coin is a Lightning Coin, we need to drop it's background processor first to + // persist the latest state to the filesystem. + #[cfg(not(target_arch = "wasm32"))] + ctx.background_processors.lock().unwrap().remove(&ticker); + try_s!(disable_coin_impl(&ctx, &ticker).await); let res = json!({ "result": { diff --git a/mm2src/mm2_test_helpers/src/for_tests.rs b/mm2src/mm2_test_helpers/src/for_tests.rs index 1da3095820..2372c228d3 100644 --- a/mm2src/mm2_test_helpers/src/for_tests.rs +++ b/mm2src/mm2_test_helpers/src/for_tests.rs @@ -277,6 +277,56 @@ pub fn atom_testnet_conf() -> Json { }) } +pub fn btc_with_spv_conf() -> Json { + json!({ + "coin": "BTC", + "asset":"BTC", + "pubtype": 0, + "p2shtype": 5, + "wiftype": 128, + "segwit": true, + "bech32_hrp": "bc", + "txfee": 0, + "estimate_fee_mode": "ECONOMICAL", + "required_confirmations": 0, + "enable_spv_proof": true, + "protocol": { + "type": "UTXO" + }, + "block_headers_verification_params": { + "difficulty_check": true, + "constant_difficulty": false, + "difficulty_algorithm": "Bitcoin Mainnet", + "genesis_block_header": "010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e36299" + } + }) +} + +pub fn tbtc_with_spv_conf() -> Json { + json!({ + "coin": "tBTC-TEST", + "asset":"tBTC-TEST", + "pubtype": 0, + "p2shtype": 5, + "wiftype": 128, + "segwit": true, + "bech32_hrp": "tb", + "txfee": 0, + "estimate_fee_mode": "ECONOMICAL", + "required_confirmations": 0, + "enable_spv_proof": true, + "protocol": { + "type": "UTXO" + }, + "block_headers_verification_params": { + "difficulty_check": true, + "constant_difficulty": false, + "difficulty_algorithm": "Bitcoin Testnet", + "genesis_block_header": "0100000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000bac8b0fa927c0ac8234287e33c5f74d38d354820e24756ad709d7038fc5f31f020e7494dffff001d03e4b672" + } + }) +} + #[cfg(target_arch = "wasm32")] pub fn mm_ctx_with_custom_db() -> MmArc { MmCtxBuilder::new().with_test_db_namespace().into_mm_arc() } @@ -1676,3 +1726,43 @@ pub async fn enable_tendermint(mm: &MarketMakerIt, coin: &str, rpc_urls: &[&str] ); json::from_str(&request.1).unwrap() } + +pub async fn init_utxo_electrum(mm: &MarketMakerIt, coin: &str, servers: Vec) -> Json { + let request = mm + .rpc(&json! ({ + "userpass": mm.userpass, + "method": "init_utxo", + "mmrpc": "2.0", + "params": { + "ticker": coin, + "activation_params": { + "mode": { + "rpc": "Electrum", + "rpc_data": { + "servers": servers + } + } + }, + } + })) + .await + .unwrap(); + assert_eq!(request.0, StatusCode::OK, "'init_z_coin' failed: {}", request.1); + json::from_str(&request.1).unwrap() +} + +pub async fn init_utxo_status(mm: &MarketMakerIt, task_id: u64) -> Json { + let request = mm + .rpc(&json! ({ + "userpass": mm.userpass, + "method": "init_utxo_status", + "mmrpc": "2.0", + "params": { + "task_id": task_id, + } + })) + .await + .unwrap(); + assert_eq!(request.0, StatusCode::OK, "'init_utxo_status' failed: {}", request.1); + json::from_str(&request.1).unwrap() +}