From 177f1643a537a9a5c10abd77edef8bfd9c7c57b6 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Wed, 28 Aug 2024 15:19:03 +0200 Subject: [PATCH 01/31] Initial spark name architecture --- src/chain.h | 5 ++ src/consensus/params.h | 3 + src/primitives/transaction.h | 3 +- src/sparkname.cpp | 110 +++++++++++++++++++++++++++++++++++ src/sparkname.h | 76 ++++++++++++++++++++++++ 5 files changed, 196 insertions(+), 1 deletion(-) create mode 100644 src/sparkname.cpp create mode 100644 src/sparkname.h diff --git a/src/chain.h b/src/chain.h index a62f14cb41..74a105393b 100644 --- a/src/chain.h +++ b/src/chain.h @@ -268,6 +268,11 @@ class CBlockIndex //! std::map {feature name} -> {block number when feature is re-enabled again, parameter} ActiveSporkMap activeDisablingSporks; + //! List of spark names that were created or extended in this block. Map of spark name to + std::map> addedSparkNames; + //! List of spark names that were removed in this block + std::map> removedSparkNames; + void SetNull() { phashBlock = NULL; diff --git a/src/consensus/params.h b/src/consensus/params.h index a2c7e48d8c..8566ae35bd 100644 --- a/src/consensus/params.h +++ b/src/consensus/params.h @@ -264,6 +264,9 @@ struct Params { int nSparkStartBlock; + int nSparkNamesStartBlock; + int nSparkNamesFee[21]; + int nLelantusGracefulPeriod; // Lelantus Blacklist diff --git a/src/primitives/transaction.h b/src/primitives/transaction.h index ed9bee3465..50cb5e7b55 100644 --- a/src/primitives/transaction.h +++ b/src/primitives/transaction.h @@ -36,7 +36,8 @@ enum { TRANSACTION_QUORUM_COMMITMENT = 6, TRANSACTION_SPORK = 7, TRANSACTION_LELANTUS = 8, - TRANSACTION_SPARK = 9 + TRANSACTION_SPARK = 9, + TRANSACTION_ALIAS = 10, }; /** An outpoint - a combination of a transaction hash and an index n into its vout */ diff --git a/src/sparkname.cpp b/src/sparkname.cpp new file mode 100644 index 0000000000..5ceaadf079 --- /dev/null +++ b/src/sparkname.cpp @@ -0,0 +1,110 @@ +#include "chain.h" +#include "libspark/spend_transaction.h" +#include "script/standard.h" +#include "base58.h" +#include "sparkname.h" + +CSparkNameManager *CSparkNameManager::sharedAliasManager = new CSparkNameManager(); + +bool CSparkNameManager::BlockConnected(CBlockIndex *pindex) +{ + for (const auto &entry : pindex->addedSparkNames) + sparkNames[entry.first] = entry.second; + + for (const auto &entry : pindex->removedSparkNames) + sparkNames.erase(entry.first); + + return true; +} + +bool CSparkNameManager::BlockDisconnected(CBlockIndex *pindex) +{ + for (const auto &entry : pindex->addedSparkNames) + sparkNames.erase(entry.first); + + for (const auto &entry : pindex->removedSparkNames) + sparkNames[entry.first] = entry.second; + + return true; +} + +std::set CSparkNameManager::GetSparkNames(int nHeight) +{ + std::set result; + for (const auto &entry : sparkNames) + if (entry.second.second >= nHeight) + result.insert(entry.first); + + return result; +} + +bool CSparkNameManager::GetSparkAddress(const std::string &name, int nHeight, spark::Address &address) +{ + auto it = sparkNames.find(name); + if (it == sparkNames.end() || it->second.second < nHeight) { + address = it->second.first; + return true; + } + else { + return false; + } +} + +bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state) +{ + const Consensus::Params &consensusParams = Params().GetConsensus(); + + if (!tx.IsSparkSpend()) + return state.Error("CheckSparkNameTx: not a spark name tx"); + + CSparkNameTxData sparkNameData; + + CDataStream serializedSpark(SER_NETWORK, PROTOCOL_VERSION); + serializedSpark.write((const char *)tx.vExtraPayload.data(), tx.vExtraPayload.size()); + const spark::Params *params = spark::Params::get_default(); + spark::SpendTransaction spendTransaction(params); + try { + serializedSpark >> spendTransaction; + if (serializedSpark.size() == 0) + // silently ignore + return true; + + serializedSpark >> sparkNameData; + } + catch (const std::exception &) { + return state.DoS(100, error("CheckSparkNameTx: failed to deserialize spend")); + } + + if (nHeight < consensusParams.nSparkNamesStartBlock) + return state.DoS(100, error("CheckSparkNameTx: spark names are not allowed before block %d", consensusParams.nSparkStartBlock)); + + if (sparkNameData.name.size() < 1 || sparkNameData.name.size() > 20) + return state.DoS(100, error("CheckSparkNameTx: invalid name length")); + + for (char c: sparkNameData.name) + if (!isalnum(c) && c != '-') + return state.DoS(100, error("CheckSparkNameTx: invalid name")); + + constexpr int nBlockPerYear = 365*24*24; // 24 blocks per hour + int nYears = (sparkNameData.sparkNameValidityBlocks + nBlockPerYear-1) / nBlockPerYear; + + if (sparkNameData.sparkNameValidityBlocks > nBlockPerYear * 5) + return state.DoS(100, error("CheckSparkNameTx: can't be valid for more than 5 years")); + + CAmount nameFee = consensusParams.nSparkNamesFee[sparkNameData.name.size()] * nYears; + CScript devPayoutScript = GetScriptForDestination(CBitcoinAddress(consensusParams.stage3DevelopmentFundAddress).Get()); + bool payoutFound = false; + for (const CTxOut &txout: tx.vout) + if (txout.scriptPubKey == devPayoutScript && txout.nValue >= nameFee) { + payoutFound = true; + break; + } + + if (!payoutFound) + return state.DoS(100, error("CheckSparkNameTx: name fee is either missing or insufficient")); + + if (sparkNames.count(sparkNameData.name) > 0 && sparkNames[sparkNameData.name].first.encode() != sparkNameData.sparkAddress.encode()) + return state.DoS(100, error("CheckSparkNameTx: name already exists")); + + return true; +} \ No newline at end of file diff --git a/src/sparkname.h b/src/sparkname.h new file mode 100644 index 0000000000..15c475b363 --- /dev/null +++ b/src/sparkname.h @@ -0,0 +1,76 @@ +#ifndef FIFO_SPARKNAME_H +#define FIFO_SPARKNAME_H + +#include +#include "consensus/validation.h" +#include "primitives/transaction.h" +#include "evo/evodb.h" +#include "libspark/keys.h" + +/* + * Spark alias transaction data. This is to be stored in the transaction's extra data field + * right after Spark data. The transaction is considered a Spark alias transaction if it spends + * to a transparent output designated as an alias output, has this data in the extra data field + * after Spark data, and has spent enough to cover the alias fee. + */ +class CSparkNameTxData +{ +public: + static const uint16_t CURRENT_VERSION = 1; + +public: + uint16_t nVersion{CURRENT_VERSION}; // version + uint256 inputsHash; + + // 1-20 symbols, only alphanumeric characters and hyphens + std::string name; + // destination address for the alias + spark::Address sparkAddress; + // proof of ownership of the spark address + std::vector addressOwnershipProof; + // number of blocks the spark name is valid for + uint32_t sparkNameValidityBlocks{0}; + + ADD_SERIALIZE_METHODS; + + template + void SerializationOp(Stream &s, Operation ser_action) + { + READWRITE(nVersion); + READWRITE(inputsHash); + READWRITE(name); + READWRITE(sparkAddress); + READWRITE(addressOwnershipProof); + READWRITE(sparkNameValidityBlocks); + } +}; + +class CSparkNameManager +{ +private: + static CSparkNameManager *sharedAliasManager; + + std::map> sparkNames; + +public: + CSparkNameManager() {} + + // update the state with contents of spark name transactions containted in block + bool BlockConnected(CBlockIndex *pindex); + bool BlockDisconnected(CBlockIndex *pindex); + + bool CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state); + + // test if the spark name tx is valid + bool IsSparkNameValid(const CTransaction &tx, CValidationState &state); + + // return all valid names + std::set GetSparkNames(int nHeight); + + // return the address associated with the spark name + bool GetSparkAddress(const std::string &name, int nHeight, spark::Address &address); + + static CSparkNameManager *GetAliasManager() { return sharedAliasManager; }; +}; + +#endif // FIRO_SPARKNAME_H \ No newline at end of file From 3eb4c37a5eb32e38ea77c24f53e18767ed211e0d Mon Sep 17 00:00:00 2001 From: levonpetrosyan93 Date: Sat, 5 Oct 2024 06:00:07 +0400 Subject: [PATCH 02/31] Spark address ownership proofs implemented. --- src/Makefile.am | 1 + src/Makefile.test.include | 1 + src/libspark/keys.cpp | 66 +++++++++++++++++++++++++++++++++++++++ src/libspark/keys.h | 11 +++++++ src/libspark/util.h | 1 + 5 files changed, 80 insertions(+) diff --git a/src/Makefile.am b/src/Makefile.am index ba939bc51f..ab3c2702fb 100644 --- a/src/Makefile.am +++ b/src/Makefile.am @@ -684,6 +684,7 @@ libspark_a_SOURCES = \ libspark/hash.cpp \ libspark/mint_transaction.h \ libspark/mint_transaction.cpp \ + libspark/ownership_proof.h \ libspark/spend_transaction.h \ libspark/spend_transaction.cpp \ libspark/f4grumble.h \ diff --git a/src/Makefile.test.include b/src/Makefile.test.include index 2abce15d3c..e4b5fb388a 100644 --- a/src/Makefile.test.include +++ b/src/Makefile.test.include @@ -92,6 +92,7 @@ BITCOIN_TESTS = \ liblelantus/test/sigma_extended_test.cpp \ libspark/test/transcript_test.cpp \ libspark/test/schnorr_test.cpp \ + libspark/test/ownership_test.cpp \ libspark/test/chaum_test.cpp \ libspark/test/bpplus_test.cpp \ libspark/test/grootle_test.cpp \ diff --git a/src/libspark/keys.cpp b/src/libspark/keys.cpp index 791c05a2bf..456e45e203 100644 --- a/src/libspark/keys.cpp +++ b/src/libspark/keys.cpp @@ -1,5 +1,6 @@ #include "keys.h" #include "../hash.h" +#include "transcript.h" namespace spark { @@ -243,4 +244,69 @@ unsigned char Address::decode(const std::string& str) { return network; } +Scalar Address::challenge(const Scalar& m, const GroupElement& A, const GroupElement& H) const { + Transcript transcript(LABEL_TRANSCRIPT_OWNERSHIP); + transcript.add("G", this->params->get_G()); + transcript.add("F", this->params->get_F()); + transcript.add("H", H); + transcript.add("A", A); + transcript.add("m", m); + transcript.add("d", this->d); + transcript.add("Q1", this->Q1); + transcript.add("Q2", this->Q2); + + return transcript.challenge("c"); +} + + +void Address::prove_own(const Scalar& m, + const SpendKey& spend_key, + const IncomingViewKey& incomingViewKey, + OwnershipProof& proof) const { + Scalar a, b, c; + a.randomize(); + b.randomize(); + c.randomize(); + + GroupElement H = SparkUtils::hash_div(this->d); + proof.A = H * a + this->params->get_G() * b + this->params->get_F() * c; + + if (proof.A.isInfinity()) { + throw std::invalid_argument("Bad Proof construction!"); + } + + Scalar x = challenge(m, proof.A, H); + + if (x.isZero()) { + throw std::invalid_argument("Unexpected challenge!"); + } + + Scalar x_sqr = x.square(); + + uint64_t i = incomingViewKey.get_diversifier(this->d); + proof.t1 = a + x * spend_key.get_s1(); + proof.t2 = b + x_sqr * spend_key.get_r(); + proof.t3 = c + x_sqr * (SparkUtils::hash_Q2(spend_key.get_s1(), i) + spend_key.get_s2()); +} + +bool Address::verify_own(const Scalar& m, + OwnershipProof& proof) const { + if (proof.A.isInfinity()) { + throw std::invalid_argument("Bad Ownership Proof!"); + } + + GroupElement H = SparkUtils::hash_div(this->d); + Scalar x = challenge(m, proof.A, H); + if (x.isZero()) { + throw std::invalid_argument("Unexpected challenge!"); + } + + Scalar x_sqr = x.square(); + + GroupElement left = proof.A + this->Q1 * x + this->Q2 * x_sqr; + GroupElement right = H * proof.t1 + this->params->get_G() * proof.t2 + this->params->get_F() * proof.t3; + + return left == right; +} + } diff --git a/src/libspark/keys.h b/src/libspark/keys.h index 4af8b25687..285e37ce41 100644 --- a/src/libspark/keys.h +++ b/src/libspark/keys.h @@ -4,6 +4,8 @@ #include "f4grumble.h" #include "params.h" #include "util.h" +#include "../uint256.h" +#include "ownership_proof.h" namespace spark { @@ -82,6 +84,15 @@ class Address { std::string encode(const unsigned char network) const; unsigned char decode(const std::string& str); + Scalar challenge(const Scalar& m, const GroupElement& A, const GroupElement& H) const; + void prove_own(const Scalar& m, + const SpendKey& spend_key, + const IncomingViewKey& incomingViewKey, + OwnershipProof& proof) const; + + bool verify_own(const Scalar& m, + OwnershipProof& proof) const; + private: const Params* params; std::vector d; diff --git a/src/libspark/util.h b/src/libspark/util.h index 015c67e074..19b8c9298b 100644 --- a/src/libspark/util.h +++ b/src/libspark/util.h @@ -30,6 +30,7 @@ const std::string LABEL_TRANSCRIPT_BPPLUS = "BULLETPROOF_PLUS_V1"; const std::string LABEL_TRANSCRIPT_CHAUM = "CHAUM_V1"; const std::string LABEL_TRANSCRIPT_GROOTLE = "GROOTLE_V1"; const std::string LABEL_TRANSCRIPT_SCHNORR = "SCHNORR_V1"; +const std::string LABEL_TRANSCRIPT_OWNERSHIP = "OWNERSHIP_V1"; // Generator labels const std::string LABEL_GENERATOR_F = "F"; From 7534405985308b3aea9eeb2a691f0ad7c303b18b Mon Sep 17 00:00:00 2001 From: levonpetrosyan93 Date: Sun, 6 Oct 2024 04:55:10 +0400 Subject: [PATCH 03/31] Missing files added --- src/libspark/ownership_proof.h | 31 ++++++++ src/libspark/test/ownership_test.cpp | 110 +++++++++++++++++++++++++++ 2 files changed, 141 insertions(+) create mode 100644 src/libspark/ownership_proof.h create mode 100644 src/libspark/test/ownership_test.cpp diff --git a/src/libspark/ownership_proof.h b/src/libspark/ownership_proof.h new file mode 100644 index 0000000000..f8a881f821 --- /dev/null +++ b/src/libspark/ownership_proof.h @@ -0,0 +1,31 @@ +#ifndef FIRO_LIBSPARK_OWNERSHIP_PROOF_H +#define FIRO_LIBSPARK_OWNERSHIP_PROOF_H + +#include "params.h" + +namespace spark { + +class OwnershipProof{ +public: + inline std::size_t memoryRequired() const { + return Scalar::memoryRequired() * 3 + GroupElement::memoryRequired(); + } + + ADD_SERIALIZE_METHODS; + template + inline void SerializationOp(Stream& s, Operation ser_action) { + READWRITE(A); + READWRITE(t1); + READWRITE(t2); + READWRITE(t3); + } + +public: + GroupElement A; + Scalar t1; + Scalar t2; + Scalar t3; +}; +} + +#endif diff --git a/src/libspark/test/ownership_test.cpp b/src/libspark/test/ownership_test.cpp new file mode 100644 index 0000000000..5f23dcf6b5 --- /dev/null +++ b/src/libspark/test/ownership_test.cpp @@ -0,0 +1,110 @@ +#include "../keys.h" +#include "../../test/test_bitcoin.h" +#include + +namespace spark { + +BOOST_FIXTURE_TEST_SUITE(spark_address_ownership_tests, BasicTestingSetup) + +BOOST_AUTO_TEST_CASE(serialization) +{ + Scalar m; + m.randomize(); + + OwnershipProof proof; + + const Params* params; + params = Params::get_test(); + + // Generate keys + SpendKey spend_key(params); + FullViewKey full_view_key(spend_key); + IncomingViewKey incoming_view_key(full_view_key); + + // Generate address + const uint64_t i = 12345; + Address address(incoming_view_key, i); + address.prove_own(m, spend_key, incoming_view_key, proof); + + CDataStream serialized(SER_NETWORK, PROTOCOL_VERSION); + serialized << proof; + + OwnershipProof deserialized; + serialized >> deserialized; + + BOOST_CHECK(proof.A == deserialized.A); + BOOST_CHECK(proof.t1 == deserialized.t1); + BOOST_CHECK(proof.t2 == deserialized.t2); + BOOST_CHECK(proof.t3 == deserialized.t3); + +} + +BOOST_AUTO_TEST_CASE(completeness) +{ + Scalar m; + m.randomize(); + + OwnershipProof proof; + + const Params* params; + params = Params::get_test(); + + // Generate keys + SpendKey spend_key(params); + FullViewKey full_view_key(spend_key); + IncomingViewKey incoming_view_key(full_view_key); + + // Generate address + const uint64_t i = 12345; + Address address(incoming_view_key, i); + address.prove_own(m, spend_key, incoming_view_key, proof); + + CDataStream serialized(SER_NETWORK, PROTOCOL_VERSION); + serialized << proof; + + OwnershipProof deserialized; + serialized >> deserialized; + + BOOST_CHECK(address.verify_own(m, deserialized)); +} + +BOOST_AUTO_TEST_CASE(bad_proofs) +{ + Scalar m; + m.randomize(); + + OwnershipProof proof; + + const Params* params; + params = Params::get_test(); + + // Generate keys + SpendKey spend_key(params); + FullViewKey full_view_key(spend_key); + IncomingViewKey incoming_view_key(full_view_key); + + // Generate address + const uint64_t i = 12345; + Address address(incoming_view_key, i); + address.prove_own(m, spend_key, incoming_view_key, proof); + + OwnershipProof evil_proof1 = proof; + evil_proof1.A.randomize(); + BOOST_CHECK(!address.verify_own(m, evil_proof1)); + + OwnershipProof evil_proof2 = proof; + evil_proof2.t1.randomize(); + BOOST_CHECK(!address.verify_own(m, evil_proof2)); + + OwnershipProof evil_proof3 = proof; + evil_proof3.t2.randomize(); + BOOST_CHECK(!address.verify_own(m, evil_proof3)); + + OwnershipProof evil_proof4 = proof; + evil_proof4.t3.randomize(); + BOOST_CHECK(!address.verify_own(m, evil_proof4)); +} + +BOOST_AUTO_TEST_SUITE_END() + +} From 5e14aa444a3274dc3e4cc5f293206a3f35c9a071 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 13 Oct 2024 16:32:36 +0200 Subject: [PATCH 04/31] Check the ownership proof for spark names: initial implementation --- src/Makefile.am | 2 ++ src/secp256k1/src/cpp/Scalar.cpp | 12 ++++---- src/sparkname.cpp | 51 +++++++++++++++++++++++++++++++- src/sparkname.h | 5 +++- 4 files changed, 62 insertions(+), 8 deletions(-) diff --git a/src/Makefile.am b/src/Makefile.am index ab3c2702fb..057f0c128c 100644 --- a/src/Makefile.am +++ b/src/Makefile.am @@ -244,6 +244,7 @@ BITCOIN_CORE_H = \ sigma.h \ lelantus.h \ spark/state.h \ + sparkname.h \ blacklists.h \ coin_containers.h \ firo_params.h \ @@ -424,6 +425,7 @@ libbitcoin_server_a_SOURCES = \ bip47/paymentcode.cpp \ spark/state.cpp \ spark/primitives.cpp \ + sparkname.cpp \ coin_containers.cpp \ mtpstate.cpp \ $(BITCOIN_CORE_H) diff --git a/src/secp256k1/src/cpp/Scalar.cpp b/src/secp256k1/src/cpp/Scalar.cpp index be0b66515d..0d8f3e0285 100644 --- a/src/secp256k1/src/cpp/Scalar.cpp +++ b/src/secp256k1/src/cpp/Scalar.cpp @@ -214,7 +214,7 @@ Scalar& Scalar::randomize() { do { if (RAND_bytes(temp, 32) != 1) { - throw "Unable to generate random Scalar"; + throw std::runtime_error("Unable to generate random Scalar"); } generate(temp); } while (!this->isMember()); // we need to ensure, generated value is valid @@ -256,7 +256,7 @@ Scalar Scalar::hash(const unsigned char* data, size_t len) { secp256k1_scalar result; secp256k1_scalar_set_b32(&result,hash,&overflow); if (overflow) { - throw "Scalar: hashing overflowed"; + throw std::runtime_error("Scalar: hashing overflowed"); } Scalar result_(&result); result_.mod_p(); @@ -292,7 +292,7 @@ unsigned const char* Scalar::deserialize(unsigned const char* buffer) { secp256k1_scalar_set_b32(reinterpret_cast(value_), buffer, &overflow); if (overflow) { - throw "Scalar: decoding overflowed"; + throw std::runtime_error("Scalar: decoding overflowed"); } return buffer + 32; @@ -314,7 +314,7 @@ std::string Scalar::GetHex() const { void Scalar::SetHex(const std::string& str) { if (str.size() != 64) { - throw "Scalar: decoding invalid length"; + throw std::runtime_error("Scalar: decoding invalid length"); } std::array buffer; @@ -325,7 +325,7 @@ void Scalar::SetHex(const std::string& str) { if (::isxdigit(hexs[0]) && ::isxdigit(hexs[1])) { buffer[i] = strtol(hexs.c_str(), NULL, 16); } else { - throw "Scalar: decoding invalid hex"; + throw std::runtime_error("Scalar: decoding invalid hex"); } } @@ -334,7 +334,7 @@ void Scalar::SetHex(const std::string& str) { secp256k1_scalar_set_b32(reinterpret_cast(value_), buffer.data(), &overflow); if (overflow) { - throw "Scalar: decoding overflowed"; + throw std::runtime_error("Scalar: decoding overflowed"); } } diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 5ceaadf079..f46af0526d 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -1,5 +1,8 @@ #include "chain.h" #include "libspark/spend_transaction.h" +#include "libspark/ownership_proof.h" +#include "libspark/keys.h" +#include "spark/state.h" #include "script/standard.h" #include "base58.h" #include "sparkname.h" @@ -63,12 +66,14 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV serializedSpark.write((const char *)tx.vExtraPayload.data(), tx.vExtraPayload.size()); const spark::Params *params = spark::Params::get_default(); spark::SpendTransaction spendTransaction(params); + size_t sparkNameDataPos; try { serializedSpark >> spendTransaction; if (serializedSpark.size() == 0) // silently ignore return true; + sparkNameDataPos = tx.vExtraPayload.size() - serializedSpark.size(); serializedSpark >> sparkNameData; } catch (const std::exception &) { @@ -103,8 +108,52 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV if (!payoutFound) return state.DoS(100, error("CheckSparkNameTx: name fee is either missing or insufficient")); - if (sparkNames.count(sparkNameData.name) > 0 && sparkNames[sparkNameData.name].first.encode() != sparkNameData.sparkAddress.encode()) + unsigned char sparkNetworkType = spark::GetNetworkType(); + if (sparkNames.count(sparkNameData.name) > 0 && + sparkNames[sparkNameData.name].first.encode(sparkNetworkType) != sparkNameData.sparkAddress) return state.DoS(100, error("CheckSparkNameTx: name already exists")); + // calculate the hash of the all the transaction except the spark ownership proof + CMutableTransaction txMutable(tx); + CSparkNameTxData sparkNameDataCopy = sparkNameData; + + txMutable.vExtraPayload.erase(txMutable.vExtraPayload.begin() + sparkNameDataPos, txMutable.vExtraPayload.end()); + sparkNameDataCopy.addressOwnershipProof.clear(); + CDataStream serializedSparkNameData(SER_NETWORK, PROTOCOL_VERSION); + serializedSparkNameData << sparkNameDataCopy; + txMutable.vExtraPayload.insert(txMutable.vExtraPayload.end(), serializedSparkNameData.begin(), serializedSparkNameData.end()); + + CHashWriter ss(SER_GETHASH, PROTOCOL_VERSION); + ss << txMutable; + spark::OwnershipProof ownershipProof; + + try { + CDataStream ownershipProofStream(SER_NETWORK, PROTOCOL_VERSION); + ownershipProofStream.write((const char *)sparkNameData.addressOwnershipProof.data(), sparkNameData.addressOwnershipProof.size()); + ownershipProofStream >> ownershipProof; + } + catch (const std::exception &) { + return state.DoS(100, error("CheckSparkNameTx: failed to deserialize ownership proof")); + } + + spark::Scalar m; + try { + m.SetHex(ss.GetHash().ToString()); + } + catch (const std::exception &) { + return state.DoS(100, error("CheckSparkNameTx: hash is out of range")); + } + + spark::Address sparkAddress; + try { + sparkAddress.decode(sparkNameData.sparkAddress); + } + catch (const std::exception &) { + return state.DoS(100, error("CheckSparkNameTx: cannot decode spark address")); + } + + if (!sparkAddress.verify_own(m, ownershipProof)) + return state.DoS(100, error("CheckSparkNameTx: ownership proof is invalid")); + return true; } \ No newline at end of file diff --git a/src/sparkname.h b/src/sparkname.h index 15c475b363..1db2af6f03 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -25,11 +25,13 @@ class CSparkNameTxData // 1-20 symbols, only alphanumeric characters and hyphens std::string name; // destination address for the alias - spark::Address sparkAddress; + std::string sparkAddress; // proof of ownership of the spark address std::vector addressOwnershipProof; // number of blocks the spark name is valid for uint32_t sparkNameValidityBlocks{0}; + // failsafe if the hash of the transaction data is can't be converted to a scalar for proof creation/verification + uint32_t hashFailsafe{0}; ADD_SERIALIZE_METHODS; @@ -42,6 +44,7 @@ class CSparkNameTxData READWRITE(sparkAddress); READWRITE(addressOwnershipProof); READWRITE(sparkNameValidityBlocks); + READWRITE(hashFailsafe); } }; From e520cc36134084b44754ac19861fe9a7fc603f0d Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 17 Nov 2024 23:45:58 +0100 Subject: [PATCH 05/31] Fixes to the core part of spark names --- src/Makefile.qt.include | 12 +- src/qt/forms/createsparkname.ui | 227 ++++++++++++++++++++++++++++++++ src/sparkname.cpp | 37 ++++-- src/sparkname.h | 4 + src/txmempool.h | 3 + src/validation.cpp | 1 + 6 files changed, 266 insertions(+), 18 deletions(-) create mode 100644 src/qt/forms/createsparkname.ui diff --git a/src/Makefile.qt.include b/src/Makefile.qt.include index 613cc3178e..f6278971fa 100644 --- a/src/Makefile.qt.include +++ b/src/Makefile.qt.include @@ -119,7 +119,8 @@ QT_FORMS_UI = \ qt/forms/sendtopcodedialog.ui \ qt/forms/signverifymessagedialog.ui \ qt/forms/transactiondescdialog.ui \ - qt/forms/lelantusdialog.ui + qt/forms/lelantusdialog.ui \ + qt/forms/createsparkname.ui QT_MOC_CPP = \ qt/moc_addressbookpage.cpp \ @@ -181,7 +182,8 @@ QT_MOC_CPP = \ qt/moc_automintmodel.cpp \ qt/moc_automintnotification.cpp \ qt/moc_pcodemodel.cpp \ - qt/moc_sparkmodel.cpp + qt/moc_sparkmodel.cpp \ + qt/moc_createsparkname.cpp BITCOIN_MM = \ qt/macdockiconhandler.mm \ @@ -264,7 +266,8 @@ BITCOIN_QT_H = \ qt/lelantusdialog.h \ qt/lelantuscoincontroldialog.h \ qt/automintmodel.h \ - qt/sparkmodel.h + qt/sparkmodel.h \ + qt/createsparkname.h RES_ICONS = \ qt/res/icons/add.png \ @@ -454,7 +457,8 @@ BITCOIN_QT_WALLET_CPP = \ qt/lelantusdialog.cpp \ qt/lelantuscoincontroldialog.cpp \ qt/automintmodel.cpp \ - qt/sparkmodel.cpp + qt/sparkmodel.cpp \ + qt/createsparkname.cpp BITCOIN_QT_CPP = $(BITCOIN_QT_BASE_CPP) if TARGET_WINDOWS diff --git a/src/qt/forms/createsparkname.ui b/src/qt/forms/createsparkname.ui new file mode 100644 index 0000000000..eac9d1b271 --- /dev/null +++ b/src/qt/forms/createsparkname.ui @@ -0,0 +1,227 @@ + + + Dialog + + + Qt::WindowModal + + + + 0 + 0 + 752 + 393 + + + + + 0 + 0 + + + + Create spark name + + + + + 380 + 340 + 341 + 32 + + + + Qt::Horizontal + + + QDialogButtonBox::Cancel|QDialogButtonBox::Ok + + + + + + 20 + 110 + 711 + 51 + + + + + 0 + 0 + + + + Spark name is an alias to spark address. It lets you associate your address with easy to understand and rememberable name of your choice. Only alphanumberic symbols and dash are allowed. + + + true + + + + + + 80 + 190 + 121 + 18 + + + + Spark name: + + + Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter + + + + + + 220 + 180 + 251 + 31 + + + + + + + 480 + 190 + 191 + 18 + + + + 1-20 characters + + + + + + 20 + 270 + 701 + 71 + + + + <html><head/><body><p>To get this spark name you must pay a fee of <span style=" font-weight:600;">1 FIRO</span>. Fee depends on spark name lengths, shorter names are more expensive. Fee can be payed only with private funds.</p></body></html> + + + true + + + + + + 70 + 230 + 151 + 18 + + + + Number of years: + + + + + + 220 + 220 + 51 + 31 + + + + + + + 10 + 30 + 181 + 20 + + + + Spark address: + + + Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter + + + + + + 210 + 20 + 521 + 31 + + + + + + + 610 + 60 + 121 + 27 + + + + Generate new + + + buttonBox + label + label_2 + plainTextEdit + label_3 + label_4 + label_5 + plainTextEdit_2 + plainTextEdit_3 + pushButton + label_6 + + + + + buttonBox + accepted() + Dialog + accept() + + + 248 + 254 + + + 157 + 274 + + + + + buttonBox + rejected() + Dialog + reject() + + + 316 + 260 + + + 286 + 274 + + + + + diff --git a/src/sparkname.cpp b/src/sparkname.cpp index f46af0526d..068c5deaa4 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -53,6 +53,27 @@ bool CSparkNameManager::GetSparkAddress(const std::string &name, int nHeight, sp } } +bool CSparkNameManager::ParseSparkNameTxData(const CTransaction &tx, spark::SpendTransaction &sparkTx, CSparkNameTxData &sparkNameData, size_t &sparkNameDataPos) +{ + CDataStream serializedSpark(SER_NETWORK, PROTOCOL_VERSION); + serializedSpark.write((const char *)tx.vExtraPayload.data(), tx.vExtraPayload.size()); + try { + serializedSpark >> sparkTx; + if (serializedSpark.size() == 0) { + // silently ignore, it's not a critical error to not have a spark name tx part + return false; + } + + sparkNameDataPos = tx.vExtraPayload.size() - serializedSpark.size(); + serializedSpark >> sparkNameData; + } + catch (const std::exception &) { + return false; + } + + return true; +} + bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state) { const Consensus::Params &consensusParams = Params().GetConsensus(); @@ -61,24 +82,12 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV return state.Error("CheckSparkNameTx: not a spark name tx"); CSparkNameTxData sparkNameData; - - CDataStream serializedSpark(SER_NETWORK, PROTOCOL_VERSION); - serializedSpark.write((const char *)tx.vExtraPayload.data(), tx.vExtraPayload.size()); const spark::Params *params = spark::Params::get_default(); spark::SpendTransaction spendTransaction(params); size_t sparkNameDataPos; - try { - serializedSpark >> spendTransaction; - if (serializedSpark.size() == 0) - // silently ignore - return true; - sparkNameDataPos = tx.vExtraPayload.size() - serializedSpark.size(); - serializedSpark >> sparkNameData; - } - catch (const std::exception &) { - return state.DoS(100, error("CheckSparkNameTx: failed to deserialize spend")); - } + if (!ParseSparkNameTxData(tx, spendTransaction, sparkNameData, sparkNameDataPos)) + return state.DoS(100, error("CheckSparkNameTx: failed to parse spark name tx")); if (nHeight < consensusParams.nSparkNamesStartBlock) return state.DoS(100, error("CheckSparkNameTx: spark names are not allowed before block %d", consensusParams.nSparkStartBlock)); diff --git a/src/sparkname.h b/src/sparkname.h index 1db2af6f03..323d9f05e7 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -58,6 +58,10 @@ class CSparkNameManager public: CSparkNameManager() {} + // Parse spark name transaction data from the transaction. Sets fCriticalError to false if there is no name data found + // but the transaction is otherwise valid. Returns true if the transaction is a valid spark name transaction. + static bool ParseSparkNameTxData(const CTransaction &tx, spark::SpendTransaction &sparkTx, CSparkNameTxData &sparkNameData, size_t &sparkNameDataPos); + // update the state with contents of spark name transactions containted in block bool BlockConnected(CBlockIndex *pindex); bool BlockDisconnected(CBlockIndex *pindex); diff --git a/src/txmempool.h b/src/txmempool.h index c83ebb29a0..f15e61a211 100644 --- a/src/txmempool.h +++ b/src/txmempool.h @@ -523,6 +523,9 @@ class CTxMemPool lelantus::CLelantusMempoolState lelantusState; spark::CSparkMempoolState sparkState; + + std::set sparkNames; // used to rule out duplicate names + private: typedef std::map cacheMap; diff --git a/src/validation.cpp b/src/validation.cpp index 5cba58e368..90dd9d0d56 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -1011,6 +1011,7 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C return state.Invalid(false, REJECT_CONFLICT, "txn-mempool-conflict"); } } + } BOOST_FOREACH(const CTxOut &txout, tx.vout) From a5c6f142c1bc06d4d8b61657e585fc0e2a049a75 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 15 Dec 2024 22:37:55 +0100 Subject: [PATCH 06/31] Added additional field (core) --- contrib/bitcoin-qt.pro | 1 + src/Makefile.qt.include | 6 +- src/qt/addressbookpage.h | 1 + src/qt/createsparknamepage.cpp | 23 ++ src/qt/createsparknamepage.h | 25 ++ src/qt/forms/createsparkname.ui | 474 ++++++++++++++++++++------------ src/sparkname.cpp | 3 + src/sparkname.h | 3 + 8 files changed, 358 insertions(+), 178 deletions(-) create mode 100644 src/qt/createsparknamepage.cpp create mode 100644 src/qt/createsparknamepage.h diff --git a/contrib/bitcoin-qt.pro b/contrib/bitcoin-qt.pro index 7fe9e0a676..5fd22afd82 100644 --- a/contrib/bitcoin-qt.pro +++ b/contrib/bitcoin-qt.pro @@ -19,6 +19,7 @@ FORMS += \ ../src/qt/forms/transactiondescdialog.ui \ ../src/qt/forms/zerocoinpage.ui \ ../src/qt/forms/sendcoinsdialog.ui \ + ../src/qt/forms/createsparkname.ui RESOURCES += \ ../src/qt/bitcoin.qrc diff --git a/src/Makefile.qt.include b/src/Makefile.qt.include index f6278971fa..09b8a561f7 100644 --- a/src/Makefile.qt.include +++ b/src/Makefile.qt.include @@ -183,7 +183,7 @@ QT_MOC_CPP = \ qt/moc_automintnotification.cpp \ qt/moc_pcodemodel.cpp \ qt/moc_sparkmodel.cpp \ - qt/moc_createsparkname.cpp + qt/moc_createsparknamepage.cpp BITCOIN_MM = \ qt/macdockiconhandler.mm \ @@ -267,7 +267,7 @@ BITCOIN_QT_H = \ qt/lelantuscoincontroldialog.h \ qt/automintmodel.h \ qt/sparkmodel.h \ - qt/createsparkname.h + qt/createsparknamepage.h RES_ICONS = \ qt/res/icons/add.png \ @@ -458,7 +458,7 @@ BITCOIN_QT_WALLET_CPP = \ qt/lelantuscoincontroldialog.cpp \ qt/automintmodel.cpp \ qt/sparkmodel.cpp \ - qt/createsparkname.cpp + qt/createsparknamepage.cpp BITCOIN_QT_CPP = $(BITCOIN_QT_BASE_CPP) if TARGET_WINDOWS diff --git a/src/qt/addressbookpage.h b/src/qt/addressbookpage.h index 7ddbb65b5e..fca3286699 100644 --- a/src/qt/addressbookpage.h +++ b/src/qt/addressbookpage.h @@ -61,6 +61,7 @@ public Q_SLOTS: private: Ui::AddressBookPage *ui; + const PlatformStyle *platformStyle; AddressTableModel *model; Mode mode; Tabs tab; diff --git a/src/qt/createsparknamepage.cpp b/src/qt/createsparknamepage.cpp new file mode 100644 index 0000000000..de234b91c2 --- /dev/null +++ b/src/qt/createsparknamepage.cpp @@ -0,0 +1,23 @@ +#if defined(HAVE_CONFIG_H) +#include "config/bitcoin-config.h" +#endif + +#include "createsparknamepage.h" +#include "ui_createsparkname.h" + +#include "platformstyle.h" + +#include +#include + +CreateSparkNamePage::CreateSparkNamePage(const PlatformStyle *platformStyle, QWidget *parent) : + QDialog(parent), + ui(new Ui::CreateSparkNamePage) +{ + ui->setupUi(this); +} + +CreateSparkNamePage::~CreateSparkNamePage() +{ + delete ui; +} diff --git a/src/qt/createsparknamepage.h b/src/qt/createsparknamepage.h new file mode 100644 index 0000000000..bb5590bb8a --- /dev/null +++ b/src/qt/createsparknamepage.h @@ -0,0 +1,25 @@ +#ifndef _QT_CREATESPARKNAMEPAGE_H +#define _QT_CREATESPARKNAMEPAGE_H + +#include + +namespace Ui { + class CreateSparkNamePage; +} + +class PlatformStyle; + +class CreateSparkNamePage : public QDialog +{ + Q_OBJECT + +public: + explicit CreateSparkNamePage(const PlatformStyle *platformStyle, QWidget *parent = 0); + ~CreateSparkNamePage(); + +private: + Ui::CreateSparkNamePage *ui; + +}; + +#endif // _QT_CREATESPARKNAMEPAGE_H \ No newline at end of file diff --git a/src/qt/forms/createsparkname.ui b/src/qt/forms/createsparkname.ui index eac9d1b271..f461915205 100644 --- a/src/qt/forms/createsparkname.ui +++ b/src/qt/forms/createsparkname.ui @@ -1,7 +1,7 @@ - Dialog - + CreateSparkNamePage + Qt::WindowModal @@ -9,192 +9,316 @@ 0 0 - 752 - 393 + 922 + 550 - - 0 - 0 + + 1 + 1 + + + 906 + 550 + + + + + 906 + 550 + + Create spark name - - - - 380 - 340 - 341 - 32 - - - - Qt::Horizontal - - - QDialogButtonBox::Cancel|QDialogButtonBox::Ok - - - - - - 20 - 110 - 711 - 51 - - - - - 0 - 0 - - - - Spark name is an alias to spark address. It lets you associate your address with easy to understand and rememberable name of your choice. Only alphanumberic symbols and dash are allowed. - - - true - - - - - - 80 - 190 - 121 - 18 - - - - Spark name: - - - Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter - - - - - - 220 - 180 - 251 - 31 - - - - - - - 480 - 190 - 191 - 18 - - - - 1-20 characters - - - - - - 20 - 270 - 701 - 71 - - - - <html><head/><body><p>To get this spark name you must pay a fee of <span style=" font-weight:600;">1 FIRO</span>. Fee depends on spark name lengths, shorter names are more expensive. Fee can be payed only with private funds.</p></body></html> - - - true - - - - - - 70 - 230 - 151 - 18 - - - - Number of years: - - - - - - 220 - 220 - 51 - 31 - - - - - - - 10 - 30 - 181 - 20 - - - - Spark address: - - - Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter - - - - - - 210 - 20 - 521 - 31 - - - - - - - 610 - 60 - 121 - 27 - - - - Generate new - - - buttonBox - label - label_2 - plainTextEdit - label_3 - label_4 - label_5 - plainTextEdit_2 - plainTextEdit_3 - pushButton - label_6 + + true + + + true + + + + + + QLayout::SetDefaultConstraint + + + + + 20 + + + + + + 0 + 0 + + + + Spark Names serve as a convenient way to associate your Spark Address with a memorable name of your choice. This way you can share your Spark Names to other users for them to send funds to you (for e.g. @sparky) instead of a long Spark Address while protecting your privacy. + + + true + + + + + + + + + + + QLayout::SetDefaultConstraint + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + Spark address: + + + Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter + + + + + + + + + + + + + + 10 + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + Generate new + + + + + + + + + + + QLayout::SetFixedSize + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + Spark name: + + + Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter + + + + + + + + + + + + + + 1-20 characters + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + + + + 30 + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + Number of years: + + + Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter + + + + + + + + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + + + + 20 + + + + + <html><head/><body><p>To get this spark name you must pay a fee of <span style=" font-weight:600;">1 FIRO</span>. Fee depends on spark name lengths, shorter names are more expensive. Fee can be paid only with private funds.</p></body></html> + + + true + + + + + + + + + + + Additional information you want to associate with this spark name (max. 1024 symbols): + + + + + + + + + 20 + + + + + + 0 + 50 + + + + + 16777215 + 16777215 + + + + + + + + + + Qt::Horizontal + + + QDialogButtonBox::Cancel|QDialogButtonBox::Ok + + + + + + buttonBox accepted() - Dialog + CreateSparkNamePage accept() @@ -210,7 +334,7 @@ buttonBox rejected() - Dialog + CreateSparkNamePage reject() diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 068c5deaa4..603f83bf73 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -117,6 +117,9 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV if (!payoutFound) return state.DoS(100, error("CheckSparkNameTx: name fee is either missing or insufficient")); + if (sparkNameData.additionalInfo.size() > 1024) + return state.DoS(100, error("CheckSparkNameTx: additional info is too long")); + unsigned char sparkNetworkType = spark::GetNetworkType(); if (sparkNames.count(sparkNameData.name) > 0 && sparkNames[sparkNameData.name].first.encode(sparkNetworkType) != sparkNameData.sparkAddress) diff --git a/src/sparkname.h b/src/sparkname.h index 323d9f05e7..6e29a17e44 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -30,6 +30,8 @@ class CSparkNameTxData std::vector addressOwnershipProof; // number of blocks the spark name is valid for uint32_t sparkNameValidityBlocks{0}; + // additional information, string, up to 1024 symbols. Can be used for future extensions (e.g. for storing a web link) + std::string additionalInfo; // failsafe if the hash of the transaction data is can't be converted to a scalar for proof creation/verification uint32_t hashFailsafe{0}; @@ -44,6 +46,7 @@ class CSparkNameTxData READWRITE(sparkAddress); READWRITE(addressOwnershipProof); READWRITE(sparkNameValidityBlocks); + READWRITE(additionalInfo); READWRITE(hashFailsafe); } }; From 450bc656358507e0e8b3dc46a3ae6174aad30ef9 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sat, 21 Dec 2024 22:36:37 +0100 Subject: [PATCH 07/31] Consensus parameters for spark names --- src/chainparams.cpp | 23 +++++++++++++++++++++++ src/consensus/params.h | 2 +- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/chainparams.cpp b/src/chainparams.cpp index 4c4137be35..e0e6971646 100644 --- a/src/chainparams.cpp +++ b/src/chainparams.cpp @@ -170,6 +170,13 @@ static Consensus::LLMQParams llmq400_85 = { .keepOldConnections = 5, }; +static std::array standardSparkNamesFee = { + -1, + 1000, + 100, + 10, 10, 10, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 +}; /** * Main network @@ -481,6 +488,10 @@ class CMainParams : public CChainParams { // exchange address consensus.nExchangeAddressStartBlock = consensus.nSparkStartBlock; + + // spark names + consensus.nSparkNamesStartBlock = INT_MAX; + consensus.nSparkNamesFee = standardSparkNamesFee; } virtual bool SkipUndoForBlock(int nHeight) const { @@ -783,6 +794,10 @@ class CTestNetParams : public CChainParams { // exchange address consensus.nExchangeAddressStartBlock = 147000; + + // spark names + consensus.nSparkNamesStartBlock = INT_MAX; + consensus.nSparkNamesFee = standardSparkNamesFee; } }; @@ -1027,6 +1042,10 @@ class CDevNetParams : public CChainParams { // exchange address consensus.nExchangeAddressStartBlock = 2500; + + // spark names + consensus.nSparkNamesStartBlock = 3500; + consensus.nSparkNamesFee = standardSparkNamesFee; } }; @@ -1267,6 +1286,10 @@ class CRegTestParams : public CChainParams { consensus.nPPSwitchTime = INT_MAX; consensus.nPPBlockNumber = INT_MAX; consensus.nInitialPPDifficulty = 0x2000ffff; + + // spark names + consensus.nSparkNamesStartBlock = 2000; + consensus.nSparkNamesFee = standardSparkNamesFee; } void UpdateBIP9Parameters(Consensus::DeploymentPos d, int64_t nStartTime, int64_t nTimeout) diff --git a/src/consensus/params.h b/src/consensus/params.h index 8566ae35bd..c1f95f0426 100644 --- a/src/consensus/params.h +++ b/src/consensus/params.h @@ -265,7 +265,7 @@ struct Params { int nSparkStartBlock; int nSparkNamesStartBlock; - int nSparkNamesFee[21]; + std::array nSparkNamesFee; int nLelantusGracefulPeriod; From cd6b4f3ced5da8a421610120097f2b2687e16026 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 22 Dec 2024 20:30:49 +0100 Subject: [PATCH 08/31] Fixed mempool bug --- src/sparkname.cpp | 2 +- src/sparkname.h | 4 ++-- src/validation.cpp | 6 +++++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 603f83bf73..220cdcbc66 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -7,7 +7,7 @@ #include "base58.h" #include "sparkname.h" -CSparkNameManager *CSparkNameManager::sharedAliasManager = new CSparkNameManager(); +CSparkNameManager *CSparkNameManager::sharedSparkNameManager = new CSparkNameManager(); bool CSparkNameManager::BlockConnected(CBlockIndex *pindex) { diff --git a/src/sparkname.h b/src/sparkname.h index 6e29a17e44..a8e26f50e7 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -54,7 +54,7 @@ class CSparkNameTxData class CSparkNameManager { private: - static CSparkNameManager *sharedAliasManager; + static CSparkNameManager *sharedSparkNameManager; std::map> sparkNames; @@ -80,7 +80,7 @@ class CSparkNameManager // return the address associated with the spark name bool GetSparkAddress(const std::string &name, int nHeight, spark::Address &address); - static CSparkNameManager *GetAliasManager() { return sharedAliasManager; }; + static CSparkNameManager *GetInstance() { return sharedSparkNameManager; }; }; #endif // FIRO_SPARKNAME_H \ No newline at end of file diff --git a/src/validation.cpp b/src/validation.cpp index 90dd9d0d56..b7229313c0 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -51,6 +51,7 @@ #include "definition.h" #include "utiltime.h" #include "mtpstate.h" +#include "sparkname.h" #include "coins.h" @@ -1011,7 +1012,10 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C return state.Invalid(false, REJECT_CONFLICT, "txn-mempool-conflict"); } } - + + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + if (!sparkNameManager->CheckSparkNameTx(tx, chainActive.Height(), state)) + return false; } BOOST_FOREACH(const CTxOut &txout, tx.vout) From 200df4471367c5eac2f0547c26ac57fe315793bd Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 12 Jan 2025 22:34:18 +0100 Subject: [PATCH 09/31] Fixes in spark name conflict resolution --- src/spark/state.h | 4 ++++ src/sparkname.cpp | 5 ++++- src/sparkname.h | 10 +++++++++- src/txmempool.h | 2 +- src/validation.cpp | 11 ++++++++++- 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/src/spark/state.h b/src/spark/state.h index c8883875ef..b3d414dff4 100644 --- a/src/spark/state.h +++ b/src/spark/state.h @@ -10,6 +10,7 @@ #include "../libspark/mint_transaction.h" #include "../libspark/spend_transaction.h" #include "primitives.h" +#include "sparkname.h" namespace spark_mintspend { class spark_mintspend_test; } @@ -28,6 +29,9 @@ class CSparkTxInfo { std::unordered_map spentLTags; std::unordered_map ltagTxhash; + // spark names + std::map sparkNames; + // information about transactions in the block is complete bool fInfoIsComplete; diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 220cdcbc66..e1cc7d6077 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -74,7 +74,7 @@ bool CSparkNameManager::ParseSparkNameTxData(const CTransaction &tx, spark::Spen return true; } -bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state) +bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state, CSparkNameTxData *outSparkNameData) { const Consensus::Params &consensusParams = Params().GetConsensus(); @@ -89,6 +89,9 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV if (!ParseSparkNameTxData(tx, spendTransaction, sparkNameData, sparkNameDataPos)) return state.DoS(100, error("CheckSparkNameTx: failed to parse spark name tx")); + if (outSparkNameData) + *outSparkNameData = sparkNameData; + if (nHeight < consensusParams.nSparkNamesStartBlock) return state.DoS(100, error("CheckSparkNameTx: spark names are not allowed before block %d", consensusParams.nSparkStartBlock)); diff --git a/src/sparkname.h b/src/sparkname.h index a8e26f50e7..8a57123878 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -69,7 +69,7 @@ class CSparkNameManager bool BlockConnected(CBlockIndex *pindex); bool BlockDisconnected(CBlockIndex *pindex); - bool CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state); + bool CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state, CSparkNameTxData *outSparkNameData = nullptr); // test if the spark name tx is valid bool IsSparkNameValid(const CTransaction &tx, CValidationState &state); @@ -80,6 +80,14 @@ class CSparkNameManager // return the address associated with the spark name bool GetSparkAddress(const std::string &name, int nHeight, spark::Address &address); + // resolution of conflicts (e.g. for mempool) + // TxSet is a set of transactions that might be in conflict with the txData. Should implement contains() method + template + static bool IsInConflict(CSparkNameTxData &txData, const TxSet &txSet) + { + return txSet.find(txData.name) != txSet.cend(); + } + static CSparkNameManager *GetInstance() { return sharedSparkNameManager; }; }; diff --git a/src/txmempool.h b/src/txmempool.h index f15e61a211..16370c32ad 100644 --- a/src/txmempool.h +++ b/src/txmempool.h @@ -524,7 +524,7 @@ class CTxMemPool lelantus::CLelantusMempoolState lelantusState; spark::CSparkMempoolState sparkState; - std::set sparkNames; // used to rule out duplicate names + std::map sparkNames; // used to rule out duplicate names private: typedef std::map cacheMap; diff --git a/src/validation.cpp b/src/validation.cpp index b7229313c0..9609811d68 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -929,6 +929,8 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C std::vector sparkMintCoins; std::vector sparkUsedLTags; + CSparkNameTxData sparkNameData; + { LOCK(pool.cs); if (tx.IsSigmaSpend()) { @@ -1014,8 +1016,12 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C } CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); - if (!sparkNameManager->CheckSparkNameTx(tx, chainActive.Height(), state)) + if (!sparkNameManager->CheckSparkNameTx(tx, chainActive.Height(), state, &sparkNameData)) return false; + + if (CSparkNameManager::IsInConflict(sparkNameData, pool.sparkNames)) { + return state.Invalid(false, REJECT_CONFLICT, "txn-mempool-conflict"); + } } BOOST_FOREACH(const CTxOut &txout, tx.vout) @@ -1625,6 +1631,9 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C pool.sparkState.AddSpendToMempool(usedLTag, hash); } + if (!sparkNameData.name.empty()) + pool.sparkNames[sparkNameData.name] = hash; + #ifdef ENABLE_WALLET if (!GetBoolArg("-disablewallet", false) && pwalletMain->sparkWallet) { LogPrintf("Adding spends to wallet from Mempool..\n"); From c1970e75e57b5045a4b0394a5a5f557901fac0bb Mon Sep 17 00:00:00 2001 From: levoncrypto Date: Thu, 16 Jan 2025 00:55:10 +0400 Subject: [PATCH 10/31] RPCs for spark names --- src/rpc/blockchain.cpp | 88 ++++++++++++++++++++++++++++++++++++++++++ src/sparkname.cpp | 42 ++++++++++++++++++++ src/sparkname.h | 4 ++ 3 files changed, 134 insertions(+) diff --git a/src/rpc/blockchain.cpp b/src/rpc/blockchain.cpp index 6578d87eb3..1ce4ee5788 100644 --- a/src/rpc/blockchain.cpp +++ b/src/rpc/blockchain.cpp @@ -25,6 +25,7 @@ #include "evo/providertx.h" #include "evo/deterministicmns.h" #include "evo/cbtx.h" +#include "../sparkname.h" #include "llmq/quorums_chainlocks.h" #include "llmq/quorums_instantsend.h" @@ -176,6 +177,91 @@ UniValue getblockcount(const JSONRPCRequest& request) return chainActive.Height(); } +UniValue getsparknames(const JSONRPCRequest &request) +{ + if (request.fHelp || request.params.size() > 1) { + throw std::runtime_error( + "getsparknames ( height )\n" + "\nReturns a list of all Spark names.\n" + "\nArguments:\n" + "1. height (numeric, optional) The block height to filter Spark names (default is the spark names start block height).\n" + "\nResult:\n" + "[\n" + " \"name1\", (string) The Spark name and address\n" + " \"name2\", (string) Another Spark name and address\n" + " ...\n" + "]\n" + "\nExamples:\n" + + HelpExampleCli("getsparknames", "1000") + + HelpExampleRpc("getsparknames", "1000") + ); + } + + LOCK(cs_main); + + const Consensus::Params &consensusParams = Params().GetConsensus(); + int nHeight = consensusParams.nSparkNamesStartBlock; + if (request.params.size() == 1) { + nHeight = request.params[0].get_int(); + } + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + std::set sparkNames = sparkNameManager->GetSparkNames(nHeight); + UniValue result(UniValue::VARR); + for (const auto &name : sparkNames) { + result.push_back(name); + unsigned char network = spark::GetNetworkType(); + spark::Address SparkAddr; + sparkNameManager->GetSparkAddress(name, chainActive.Tip()->nHeight, SparkAddr); + std::string strAddress = SparkAddr.encode(network); + result.push_back(strAddress); + } + return result; +} + +UniValue getsparknamedata(const JSONRPCRequest& request) +{ + if (request.fHelp || request.params.size() > 1) { + throw std::runtime_error( + "getsparknamedata ( sparkname )\n" + "\nReturns info about spark name.\n" + "\nArguments:\n" + "Spark name (string)\n" + "\nResult:\n" + "[\n" + "1. Address (string)\n" + "2. Block Height (string)\n" + "3. TxId (string)\n" + "]\n" + "\nExamples:\n" + + HelpExampleCli("getsparknamedata", "sparkname") + + HelpExampleRpc("getsparknamedata", "sparkname") + ); + } + + LOCK(cs_main); + + std::string sparkName = request.params[0].get_str(); + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + + spark::Address SparkAddr; + sparkNameManager->GetSparkAddress(sparkName, chainActive.Tip()->nHeight, SparkAddr); + + UniValue result(UniValue::VARR); + unsigned char network = spark::GetNetworkType(); + + std::string strAddress = SparkAddr.encode(network); + result.push_back(strAddress); + + uint64_t nameBlockHeight = sparkNameManager->GetSparkNameBlockHeight(sparkName); + result.push_back(nameBlockHeight); + + std::string sparkNameTxId = sparkNameManager->GetSparkNameTxID(sparkName); + result.push_back(sparkNameTxId); + + return result; + +} + UniValue getbestblockhash(const JSONRPCRequest& request) { if (request.fHelp || request.params.size() != 0) @@ -1658,6 +1744,8 @@ static const CRPCCommand commands[] = { "blockchain", "getblockchaininfo", &getblockchaininfo, true, {} }, { "blockchain", "getbestblockhash", &getbestblockhash, true, {} }, { "blockchain", "getblockcount", &getblockcount, true, {} }, + { "blockchain", "getsparknames", &getsparknames, true, {} }, + { "blockchain", "getsparknamedata", &getsparknamedata, true, {} }, { "blockchain", "getblock", &getblock, true, {"blockhash","verbose"} }, { "blockchain", "getblockhash", &getblockhash, true, {"height"} }, { "blockchain", "getblockhashes", &getblockhashes, true, {"high", "low"} }, diff --git a/src/sparkname.cpp b/src/sparkname.cpp index e1cc7d6077..98f85c9753 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -6,6 +6,7 @@ #include "script/standard.h" #include "base58.h" #include "sparkname.h" +#include "validation.h" CSparkNameManager *CSparkNameManager::sharedSparkNameManager = new CSparkNameManager(); @@ -53,6 +54,47 @@ bool CSparkNameManager::GetSparkAddress(const std::string &name, int nHeight, sp } } +uint64_t CSparkNameManager::GetSparkNameBlockHeight(const std::string &name) const +{ + auto it = sparkNames.find(name); + if (it == sparkNames.end()) + throw std::runtime_error("Spark name not found: " + name); + + size_t height = it->second.second; + return height; +} + +std::string CSparkNameManager::GetSparkNameTxID(const std::string &name) const +{ + auto it = sparkNames.find(name); + if (it == sparkNames.end()) + throw std::runtime_error("Spark name not found: " + name); + + uint32_t blockHeight = it->second.second; + + CBlockIndex* pBlockIndex = chainActive[blockHeight]; + if (!pBlockIndex) + throw std::runtime_error("Block not found at height: " + std::to_string(blockHeight)); + + CBlock block; + if (!ReadBlockFromDisk(block, pBlockIndex, Params().GetConsensus())) + throw std::runtime_error("Failed to read block from disk."); + + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + for (const CTransactionRef& tx : block.vtx) + { + CSparkNameTxData sparkNameData; + CValidationState state; + + if (sparkNameManager->CheckSparkNameTx(*tx, blockHeight, state, &sparkNameData)) + { + return (*tx).GetHash().ToString(); + } + } + + throw std::runtime_error("Spark name transaction not found for: " + name); +} + bool CSparkNameManager::ParseSparkNameTxData(const CTransaction &tx, spark::SpendTransaction &sparkTx, CSparkNameTxData &sparkNameData, size_t &sparkNameDataPos) { CDataStream serializedSpark(SER_NETWORK, PROTOCOL_VERSION); diff --git a/src/sparkname.h b/src/sparkname.h index 8a57123878..173980b14b 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -89,6 +89,10 @@ class CSparkNameManager } static CSparkNameManager *GetInstance() { return sharedSparkNameManager; }; + + uint64_t GetSparkNameBlockHeight(const std::string &name) const; + + std::string GetSparkNameTxID(const std::string &name) const; }; #endif // FIRO_SPARKNAME_H \ No newline at end of file From 6878f4f03011752520b422e21cf850db03878661 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Wed, 15 Jan 2025 22:01:11 +0100 Subject: [PATCH 11/31] Additional API for spark names tx creation --- src/sparkname.cpp | 37 +++++++++++++++++++++++++++++++++++++ src/sparkname.h | 3 +++ 2 files changed, 40 insertions(+) diff --git a/src/sparkname.cpp b/src/sparkname.cpp index e1cc7d6077..a9f576c736 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -171,4 +171,41 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV return state.DoS(100, error("CheckSparkNameTx: ownership proof is invalid")); return true; +} + +void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey) +{ + for (uint32_t n=0; ; n++) { + sparkNameData.addressOwnershipProof.clear(); + sparkNameData.hashFailsafe = n; + + CMutableTransaction txCopy(txSparkSpend); + CDataStream serializedSparkNameData(SER_NETWORK, PROTOCOL_VERSION); + serializedSparkNameData << sparkNameData; + txCopy.vExtraPayload.insert(txCopy.vExtraPayload.end(), serializedSparkNameData.begin(), serializedSparkNameData.end()); + + CHashWriter ss(SER_GETHASH, PROTOCOL_VERSION); + ss << txCopy; + + spark::Scalar m; + try { + m.SetHex(ss.GetHash().ToString()); + } + catch (const std::exception &) { + continue; // increase hashFailSafe and try again + } + + spark::Address sparkAddress; + spark::OwnershipProof ownershipProof; + + sparkAddress.decode(sparkNameData.sparkAddress); + sparkAddress.prove_own(m, spendKey, incomingViewKey, ownershipProof); + + CDataStream ownershipProofStream(SER_NETWORK, PROTOCOL_VERSION); + ownershipProofStream << ownershipProof; + + txSparkSpend.vExtraPayload.insert(txSparkSpend.vExtraPayload.end(), ownershipProofStream.begin(), ownershipProofStream.end()); + + break; + } } \ No newline at end of file diff --git a/src/sparkname.h b/src/sparkname.h index 8a57123878..beb45d3d27 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -88,6 +88,9 @@ class CSparkNameManager return txSet.find(txData.name) != txSet.cend(); } + // fill missing CSparkNameTxData fields and append spark name tx data to the transaction + void AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey); + static CSparkNameManager *GetInstance() { return sharedSparkNameManager; }; }; From de07857f892c95b0aeaf24202561d952a9443f05 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 19 Jan 2025 23:19:14 +0100 Subject: [PATCH 12/31] Changed way of checking spark name tx --- src/spark/state.cpp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/spark/state.cpp b/src/spark/state.cpp index 4eebb613e4..09a5469392 100644 --- a/src/spark/state.cpp +++ b/src/spark/state.cpp @@ -1,4 +1,5 @@ #include "state.h" +#include "sparkname.h" #include "../validation.h" #include "../batchproof_container.h" @@ -774,6 +775,18 @@ bool CheckSparkTransaction( isCheckWallet, fStatefulSigmaCheck, sparkTxInfo)) { return false; } + + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + CSparkNameTxData sparkTxData; + if (sparkNameManager->CheckSparkNameTx(tx, nHeight, state, &sparkTxData)) { + if (!sparkTxData.name.empty() && sparkTxInfo && !sparkTxInfo->fInfoIsComplete) { + sparkTxInfo->sparkNames[sparkTxData.name] = sparkTxData; + } + } + else { + return false; + } + } catch (const std::exception &x) { return state.Error(x.what()); From 5a320a7e04ffd8fac13f6d40fc0b1a28a1ba8a0e Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Mon, 20 Jan 2025 00:10:57 +0100 Subject: [PATCH 13/31] Wallet API for spark name transaction creation --- src/qt/createsparknamepage.cpp | 1 + src/qt/createsparknamepage.h | 4 ++++ src/spark/sparkwallet.cpp | 28 ++++++++++++++++++++++++++++ src/spark/sparkwallet.h | 6 ++++++ src/sparkname.cpp | 2 +- 5 files changed, 40 insertions(+), 1 deletion(-) diff --git a/src/qt/createsparknamepage.cpp b/src/qt/createsparknamepage.cpp index de234b91c2..9642ec1ecf 100644 --- a/src/qt/createsparknamepage.cpp +++ b/src/qt/createsparknamepage.cpp @@ -21,3 +21,4 @@ CreateSparkNamePage::~CreateSparkNamePage() { delete ui; } + diff --git a/src/qt/createsparknamepage.h b/src/qt/createsparknamepage.h index bb5590bb8a..dd15712920 100644 --- a/src/qt/createsparknamepage.h +++ b/src/qt/createsparknamepage.h @@ -3,6 +3,9 @@ #include +#include "libspark/keys.h" +#include "primitives/transaction.h" + namespace Ui { class CreateSparkNamePage; } @@ -20,6 +23,7 @@ class CreateSparkNamePage : public QDialog private: Ui::CreateSparkNamePage *ui; + CTransactionRef CreateSparkNameTransaction(const std::string &name, const spark::Address &address, const std::string &additionalInfo); }; #endif // _QT_CREATESPARKNAMEPAGE_H \ No newline at end of file diff --git a/src/spark/sparkwallet.cpp b/src/spark/sparkwallet.cpp index 382cb6c62d..e147c0f7f7 100644 --- a/src/spark/sparkwallet.cpp +++ b/src/spark/sparkwallet.cpp @@ -8,6 +8,7 @@ #include "../policy/policy.h" #include "../script/sign.h" #include "state.h" +#include "sparkname.h" #include @@ -1587,6 +1588,33 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction( return wtxNew; } +CWalletTx CSparkWallet::CreateSparkNameTransaction(CSparkNameTxData &nameData, CAmount fee, const CCoinControl *coinConrol) { + CRecipient devPayout; + devPayout.nAmount = fee; + devPayout.scriptPubKey = GetScriptForDestination(CBitcoinAddress(Params().GetConsensus().stage3DevelopmentFundAddress).Get()); + devPayout.fSubtractFeeFromAmount = false; + + CAmount txFee; + CWalletTx wtxSparkSpend = CreateSparkSpendTransaction({devPayout}, {}, txFee, coinConrol); + + const spark::Params* params = spark::Params::get_default(); + spark::SpendKey spendKey(params); + try { + spendKey = std::move(generateSpendKey(params)); + } catch (std::exception& e) { + throw std::runtime_error(_("Unable to generate spend key.")); + } + + if (spendKey == spark::SpendKey(params)) + throw std::runtime_error(_("Unable to generate spend key, looks the wallet is locked.")); + + CMutableTransaction tx = CMutableTransaction(*wtxSparkSpend.tx); + CSparkNameManager::GetInstance()->AppendSparkNameTxData(tx, nameData, spendKey, fullViewKey); + + wtxSparkSpend.tx = MakeTransactionRef(std::move(tx)); + return wtxSparkSpend; +} + template static CAmount CalculateBalance(Iterator begin, Iterator end) { CAmount balance(0); diff --git a/src/spark/sparkwallet.h b/src/spark/sparkwallet.h index 8f707a5f94..331ae5ef85 100644 --- a/src/spark/sparkwallet.h +++ b/src/spark/sparkwallet.h @@ -11,6 +11,7 @@ #include "../libspark/spend_transaction.h" #include "../wallet/walletdb.h" #include "../sync.h" +#include "../sparkname.h" class CRecipient; class CReserveKey; @@ -139,6 +140,11 @@ class CSparkWallet { std::size_t utxoNum, const CCoinControl *coinControl); + CWalletTx CreateSparkNameTransaction( + CSparkNameTxData &nameData, + CAmount fee, + const CCoinControl *coinControl = NULL); + // Returns the list of pairs of coins and metadata for that coin, std::list GetAvailableSparkCoins(const CCoinControl *coinControl = NULL) const; diff --git a/src/sparkname.cpp b/src/sparkname.cpp index a9f576c736..0ce5bd5e35 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -208,4 +208,4 @@ void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, break; } -} \ No newline at end of file +} From 5ab7f4dc8d9c88d33813b6a7497f1506e1c92254 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Wed, 22 Jan 2025 23:59:45 +0100 Subject: [PATCH 14/31] API changes for spark name tx creation --- src/qt/walletmodel.cpp | 56 +++++++++++++++++++++++++++++++++++++++ src/qt/walletmodel.h | 6 +++++ src/spark/sparkwallet.cpp | 5 ++-- src/spark/sparkwallet.h | 3 ++- src/wallet/wallet.cpp | 16 +++++++++++ src/wallet/wallet.h | 6 +++++ 6 files changed, 88 insertions(+), 4 deletions(-) diff --git a/src/qt/walletmodel.cpp b/src/qt/walletmodel.cpp index 66a8538941..095f7ce4e2 100644 --- a/src/qt/walletmodel.cpp +++ b/src/qt/walletmodel.cpp @@ -1544,6 +1544,62 @@ WalletModel::SendCoinsReturn WalletModel::prepareSpendSparkTransaction(WalletMod return SendCoinsReturn(OK); } +WalletModel::SendCoinsReturn WalletModel::prepareSparkNameTransaction(WalletModelTransaction &transaction, CSparkNameTxData &sparkNameData, CAmount sparkNameFee, const CCoinControl* coinControl) +{ + CAmount nBalance; + std::tie(nBalance, std::ignore) = getSparkBalance(); + + if (sparkNameFee > nBalance) { + return AmountExceedsBalance; + } + + { + LOCK2(cs_main, wallet->cs_wallet); + + CAmount nFeeRequired = 0; + CWalletTx *newTx = transaction.getTransaction(); + try { + *newTx = wallet->CreateSparkNameTransaction(sparkNameData, sparkNameFee, nFeeRequired, coinControl); + } + catch (InsufficientFunds const&) { + transaction.setTransactionFee(nFeeRequired); + if (sparkNameFee + nFeeRequired > nBalance) { + return SendCoinsReturn(AmountWithFeeExceedsBalance); + } + return SendCoinsReturn(AmountExceedsBalance); + } + catch (std::runtime_error const& e) { + Q_EMIT message( + tr("Spend Spark"), + QString::fromStdString(e.what()), + CClientUIInterface::MSG_ERROR); + + return TransactionCreationFailed; + } + catch (std::invalid_argument const& e) { + Q_EMIT message( + tr("Spend Spark"), + QString::fromStdString(e.what()), + CClientUIInterface::MSG_ERROR); + + return TransactionCreationFailed; + } + if (nFeeRequired > maxTxFee) { + return AbsurdFee; + } + + int changePos = -1; + for (size_t i = 0; i != newTx->tx->vout.size(); i++) { + if (!newTx->tx->vout[i].scriptPubKey.IsSparkSMint()) changePos = i; + } + + transaction.setTransactionFee(nFeeRequired); + transaction.reassignAmounts(changePos); + } + + return SendCoinsReturn(OK); +} + WalletModel::SendCoinsReturn WalletModel::mintSparkCoins(std::vector &transactions, std::vector >& wtxAndFee, std::list& reserveKeys) { QByteArray transaction_array; /* store serialized transaction */ diff --git a/src/qt/walletmodel.h b/src/qt/walletmodel.h index f7e85ef62c..c2aeba10d2 100644 --- a/src/qt/walletmodel.h +++ b/src/qt/walletmodel.h @@ -196,6 +196,12 @@ class WalletModel : public QObject SendCoinsReturn spendSparkCoins( WalletModelTransaction &transaction); + + SendCoinsReturn prepareSparkNameTransaction( + WalletModelTransaction &transaction, + CSparkNameTxData &sparkNameData, + CAmount sparkNameFee, + const CCoinControl *coinControl); SendCoinsReturn mintSparkCoins( std::vector &transactions, diff --git a/src/spark/sparkwallet.cpp b/src/spark/sparkwallet.cpp index e147c0f7f7..4b1f8632d5 100644 --- a/src/spark/sparkwallet.cpp +++ b/src/spark/sparkwallet.cpp @@ -1588,13 +1588,12 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction( return wtxNew; } -CWalletTx CSparkWallet::CreateSparkNameTransaction(CSparkNameTxData &nameData, CAmount fee, const CCoinControl *coinConrol) { +CWalletTx CSparkWallet::CreateSparkNameTransaction(CSparkNameTxData &nameData, CAmount sparkNameFee, CAmount &txFee, const CCoinControl *coinConrol) { CRecipient devPayout; - devPayout.nAmount = fee; + devPayout.nAmount = sparkNameFee; devPayout.scriptPubKey = GetScriptForDestination(CBitcoinAddress(Params().GetConsensus().stage3DevelopmentFundAddress).Get()); devPayout.fSubtractFeeFromAmount = false; - CAmount txFee; CWalletTx wtxSparkSpend = CreateSparkSpendTransaction({devPayout}, {}, txFee, coinConrol); const spark::Params* params = spark::Params::get_default(); diff --git a/src/spark/sparkwallet.h b/src/spark/sparkwallet.h index 331ae5ef85..77c5a39f2d 100644 --- a/src/spark/sparkwallet.h +++ b/src/spark/sparkwallet.h @@ -142,7 +142,8 @@ class CSparkWallet { CWalletTx CreateSparkNameTransaction( CSparkNameTxData &nameData, - CAmount fee, + CAmount sparkNamefee, + CAmount &txFee, const CCoinControl *coinControl = NULL); // Returns the list of pairs of coins and metadata for that coin, diff --git a/src/wallet/wallet.cpp b/src/wallet/wallet.cpp index 4273885fac..5679866f71 100644 --- a/src/wallet/wallet.cpp +++ b/src/wallet/wallet.cpp @@ -5869,6 +5869,22 @@ CWalletTx CWallet::CreateSparkSpendTransaction( return sparkWallet->CreateSparkSpendTransaction(recipients, privateRecipients, fee, coinControl); } +CWalletTx CWallet::CreateSparkNameTransaction( + CSparkNameTxData &sparkNameData, + CAmount sparkNameFee, + CAmount &txFee, + const CCoinControl *coinControl) +{ + // sanity check + EnsureMintWalletAvailable(); + + if (IsLocked()) { + throw std::runtime_error(_("Wallet locked")); + } + + return sparkWallet->CreateSparkNameTransaction(sparkNameData, sparkNameFee, txFee, coinControl); +} + CWalletTx CWallet::SpendAndStoreSpark( const std::vector& recipients, const std::vector>& privateRecipients, diff --git a/src/wallet/wallet.h b/src/wallet/wallet.h index f9397f5150..f98773fb3f 100644 --- a/src/wallet/wallet.h +++ b/src/wallet/wallet.h @@ -1102,6 +1102,12 @@ class CWallet : public CCryptoKeyStore, public CValidationInterface CAmount &fee, const CCoinControl *coinControl = NULL); + CWalletTx CreateSparkNameTransaction( + CSparkNameTxData &sparkNameData, + CAmount sparkNameFee, + CAmount &txFee, + const CCoinControl *coinControl = NULL); + CWalletTx SpendAndStoreSpark( const std::vector& recipients, const std::vector>& privateRecipients, From 6309af47e60e8a4ec2a0cdcd33852ee14975c4db Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 26 Jan 2025 00:30:10 +0100 Subject: [PATCH 15/31] Added registersparkname RPC call --- src/wallet/rpcwallet.cpp | 70 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/src/wallet/rpcwallet.cpp b/src/wallet/rpcwallet.cpp index a5ade6b12f..2092de90a2 100644 --- a/src/wallet/rpcwallet.cpp +++ b/src/wallet/rpcwallet.cpp @@ -3895,6 +3895,75 @@ UniValue spendspark(const JSONRPCRequest& request) return wtx.GetHash().GetHex(); } +UniValue registersparkname(const JSONRPCRequest& request) { + CWallet * const pwallet = GetWalletForJSONRPCRequest(request); + if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) { + return NullUniValue; + } + + if (request.fHelp || request.params.size() > 0) { + throw std::runtime_error( + "registersparkname \"name\" \"sparkaddress\" [\"additionalData\"]\n"); + } + + EnsureWalletIsUnlocked(pwallet); + EnsureSparkWalletIsAvailable(); + + // Ensure spark mints is already accepted by network so users will not lost their coins + // due to other nodes will treat it as garbage data. + if (!spark::IsSparkAllowed()) { + throw JSONRPCError(RPC_WALLET_ERROR, "Spark is not activated yet"); + } + + const auto &consensusParams = Params().GetConsensus(); + + if (request.params.size() < 2 || request.params.size() > 3) + throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameters"); + + std::string sparkName = request.params[0].get_str(); + std::string sparkAddress = request.params[1].get_str(); + std::string additionalData; + + if (request.params.size() >= 3) + additionalData = request.params[2].get_str(); + + if (sparkName.empty() || sparkName.size() > 20) + throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid spark name"); + + CSparkNameTxData sparkNameData; + sparkNameData.name = sparkName; + sparkNameData.sparkAddress = sparkAddress; + sparkNameData.additionalInfo = additionalData; + + CAmount sparkNameFee = consensusParams.nSparkNamesFee[sparkName.size()]; + CAmount fee; + CWalletTx wtx; + try { + wtx = pwallet->CreateSparkNameTransaction(sparkNameData, sparkNameFee, fee); + } catch (const std::exception &) { + throw JSONRPCError(RPC_WALLET_ERROR, "Spark name registration failed."); + } + + // commit + try { + CValidationState state; + CReserveKey reserveKey(pwallet); + pwallet->CommitTransaction(wtx, reserveKey, g_connman.get(), state); + } + catch (const std::exception &) { + auto error = _( + "Error: The transaction was rejected! This might happen if some of " + "the coins in your wallet were already spent, such as if you used " + "a copy of wallet.dat and coins were spent in the copy but not " + "marked as spent here." + ); + + std::throw_with_nested(std::runtime_error(error)); + } + + return wtx.GetHash().GetHex(); +} + UniValue lelantustospark(const JSONRPCRequest& request) { CWallet * const pwallet = GetWalletForJSONRPCRequest(request); if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) { @@ -5765,6 +5834,7 @@ static const CRPCCommand commands[] = { "wallet", "lelantustospark", &lelantustospark, false }, { "wallet", "identifysparkcoins", &identifysparkcoins, false }, { "wallet", "getsparkcoinaddr", &getsparkcoinaddr, false }, + { "wallet", "registersparkname", ®istersparkname, false }, //bip47 From a560e384516b9cae36fed5fbce37a9251e8cc717 Mon Sep 17 00:00:00 2001 From: levoncrypto Date: Mon, 27 Jan 2025 13:45:19 +0400 Subject: [PATCH 16/31] Spark activation check for RPC --- src/rpc/blockchain.cpp | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/rpc/blockchain.cpp b/src/rpc/blockchain.cpp index 1ce4ee5788..c7ebd8c2cf 100644 --- a/src/rpc/blockchain.cpp +++ b/src/rpc/blockchain.cpp @@ -199,6 +199,10 @@ UniValue getsparknames(const JSONRPCRequest &request) LOCK(cs_main); + if (!spark::IsSparkAllowed()) { + throw JSONRPCError(RPC_WALLET_ERROR, "Spark is not activated yet"); + } + const Consensus::Params &consensusParams = Params().GetConsensus(); int nHeight = consensusParams.nSparkNamesStartBlock; if (request.params.size() == 1) { @@ -240,6 +244,10 @@ UniValue getsparknamedata(const JSONRPCRequest& request) LOCK(cs_main); + if (!spark::IsSparkAllowed()) { + throw JSONRPCError(RPC_WALLET_ERROR, "Spark is not activated yet"); + } + std::string sparkName = request.params[0].get_str(); CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); @@ -259,7 +267,6 @@ UniValue getsparknamedata(const JSONRPCRequest& request) result.push_back(sparkNameTxId); return result; - } UniValue getbestblockhash(const JSONRPCRequest& request) From 503a397f4120cd48bd5df4cad5d280742b44f18e Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Fri, 31 Jan 2025 11:38:21 +0100 Subject: [PATCH 17/31] Make spark names case-insensitive --- src/sparkname.cpp | 29 +++++++++++++++++++++++++++++ src/sparkname.h | 9 ++++++++- src/validation.cpp | 2 +- 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 0ce5bd5e35..5f93cd4165 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -209,3 +209,32 @@ void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, break; } } + +std::string CSparkNameManager::ToUpper(const std::string &str) +{ + std::string result = str; + std::transform(result.begin(), result.end(), result.begin(), ::toupper); + return result; +} + +bool CSparkNameManager::AddSparkName(const std::string &name, const spark::Address &address, uint32_t validityBlocks) +{ + std::string upperName = ToUpper(name); + + if (sparkNames.count(upperName) > 0) + return false; + + sparkNames[upperName] = std::make_pair(address, validityBlocks); + return true; +} + +bool CSparkNameManager::RemoveSparkName(const std::string &name) +{ + std::string upperName = ToUpper(name); + + if (sparkNames.count(upperName) == 0) + return false; + + sparkNames.erase(upperName); + return true; +} diff --git a/src/sparkname.h b/src/sparkname.h index beb45d3d27..0464879bc1 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -85,13 +85,20 @@ class CSparkNameManager template static bool IsInConflict(CSparkNameTxData &txData, const TxSet &txSet) { - return txSet.find(txData.name) != txSet.cend(); + std::string upperName = ToUpper(txData.name); + return txSet.find(upperName) != txSet.cend(); } // fill missing CSparkNameTxData fields and append spark name tx data to the transaction void AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey); + // add and remove spark name + bool AddSparkName(const std::string &name, const spark::Address &address, uint32_t validityBlocks); + bool RemoveSparkName(const std::string &name); + static CSparkNameManager *GetInstance() { return sharedSparkNameManager; }; + + static std::string ToUpper(const std::string &sparkName); }; #endif // FIRO_SPARKNAME_H \ No newline at end of file diff --git a/src/validation.cpp b/src/validation.cpp index 9609811d68..9d2be25030 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -1632,7 +1632,7 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C } if (!sparkNameData.name.empty()) - pool.sparkNames[sparkNameData.name] = hash; + pool.sparkNames[CSparkNameManager::ToUpper(sparkNameData.name)] = hash; #ifdef ENABLE_WALLET if (!GetBoolArg("-disablewallet", false) && pwalletMain->sparkWallet) { From ca5c1bab9a2164896b6519bf48dbd1d16ddb40b6 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 2 Feb 2025 23:04:58 +0100 Subject: [PATCH 18/31] Spark name RPC fixes --- src/rpc/client.cpp | 3 +++ src/sparkname.cpp | 17 +++++++++++------ src/wallet/rpcwallet.cpp | 17 +++++++++++------ 3 files changed, 25 insertions(+), 12 deletions(-) diff --git a/src/rpc/client.cpp b/src/rpc/client.cpp index 629dde9a14..384cbf303e 100644 --- a/src/rpc/client.cpp +++ b/src/rpc/client.cpp @@ -201,6 +201,9 @@ static const CRPCConvertParam vRPCConvertParams[] = { "spendspark", 0 }, { "spendspark", 1 }, + // Spark names + { "registersparkname", 2 }, + /* Evo spork */ { "spork", 2, "features"}, /* BIP47 */ diff --git a/src/sparkname.cpp b/src/sparkname.cpp index ca2fcf47d7..e9f9c7b2b3 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -147,10 +147,10 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV constexpr int nBlockPerYear = 365*24*24; // 24 blocks per hour int nYears = (sparkNameData.sparkNameValidityBlocks + nBlockPerYear-1) / nBlockPerYear; - if (sparkNameData.sparkNameValidityBlocks > nBlockPerYear * 5) - return state.DoS(100, error("CheckSparkNameTx: can't be valid for more than 5 years")); + if (sparkNameData.sparkNameValidityBlocks > nBlockPerYear * 10) + return state.DoS(100, error("CheckSparkNameTx: can't be valid for more than 10 years")); - CAmount nameFee = consensusParams.nSparkNamesFee[sparkNameData.name.size()] * nYears; + CAmount nameFee = consensusParams.nSparkNamesFee[sparkNameData.name.size()] * COIN * nYears; CScript devPayoutScript = GetScriptForDestination(CBitcoinAddress(consensusParams.stage3DevelopmentFundAddress).Get()); bool payoutFound = false; for (const CTxOut &txout: tx.vout) @@ -201,7 +201,7 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV return state.DoS(100, error("CheckSparkNameTx: hash is out of range")); } - spark::Address sparkAddress; + spark::Address sparkAddress(spark::Params::get_default()); try { sparkAddress.decode(sparkNameData.sparkAddress); } @@ -237,7 +237,7 @@ void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, continue; // increase hashFailSafe and try again } - spark::Address sparkAddress; + spark::Address sparkAddress(spark::Params::get_default()); spark::OwnershipProof ownershipProof; sparkAddress.decode(sparkNameData.sparkAddress); @@ -246,7 +246,12 @@ void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CDataStream ownershipProofStream(SER_NETWORK, PROTOCOL_VERSION); ownershipProofStream << ownershipProof; - txSparkSpend.vExtraPayload.insert(txSparkSpend.vExtraPayload.end(), ownershipProofStream.begin(), ownershipProofStream.end()); + sparkNameData.addressOwnershipProof.assign(ownershipProofStream.begin(), ownershipProofStream.end()); + + CDataStream sparkNameDataStream(SER_NETWORK, PROTOCOL_VERSION); + sparkNameDataStream << sparkNameData; + + txSparkSpend.vExtraPayload.insert(txSparkSpend.vExtraPayload.end(), sparkNameDataStream.begin(), sparkNameDataStream.end()); break; } diff --git a/src/wallet/rpcwallet.cpp b/src/wallet/rpcwallet.cpp index 2092de90a2..b489efa3c6 100644 --- a/src/wallet/rpcwallet.cpp +++ b/src/wallet/rpcwallet.cpp @@ -3901,9 +3901,9 @@ UniValue registersparkname(const JSONRPCRequest& request) { return NullUniValue; } - if (request.fHelp || request.params.size() > 0) { + if (request.fHelp || request.params.size() < 3 || request.params.size() > 4) { throw std::runtime_error( - "registersparkname \"name\" \"sparkaddress\" [\"additionalData\"]\n"); + "registersparkname \"name\" \"sparkaddress\" years [\"additionalData\"]\n"); } EnsureWalletIsUnlocked(pwallet); @@ -3917,15 +3917,19 @@ UniValue registersparkname(const JSONRPCRequest& request) { const auto &consensusParams = Params().GetConsensus(); - if (request.params.size() < 2 || request.params.size() > 3) + if (request.params.size() < 3 || request.params.size() > 4) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameters"); std::string sparkName = request.params[0].get_str(); std::string sparkAddress = request.params[1].get_str(); std::string additionalData; - if (request.params.size() >= 3) - additionalData = request.params[2].get_str(); + int numberOfYears = request.params[2].get_int(); + if (numberOfYears < 1 || numberOfYears > 10) + throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid number of years"); + + if (request.params.size() >= 4) + additionalData = request.params[3].get_str(); if (sparkName.empty() || sparkName.size() > 20) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid spark name"); @@ -3934,8 +3938,9 @@ UniValue registersparkname(const JSONRPCRequest& request) { sparkNameData.name = sparkName; sparkNameData.sparkAddress = sparkAddress; sparkNameData.additionalInfo = additionalData; + sparkNameData.sparkNameValidityBlocks = numberOfYears * 365*24*24; - CAmount sparkNameFee = consensusParams.nSparkNamesFee[sparkName.size()]; + CAmount sparkNameFee = consensusParams.nSparkNamesFee[sparkName.size()]*COIN; CAmount fee; CWalletTx wtx; try { From 13093eec8b4a5f8a10624b044c84bc781ac9bf9f Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Mon, 3 Feb 2025 10:16:46 +0100 Subject: [PATCH 19/31] Faster lookup for spark name by spark address --- src/libspark/keys.h | 4 ++++ src/sparkname.cpp | 26 +++++++++++++++++++++----- src/sparkname.h | 5 +++-- src/validation.cpp | 2 +- 4 files changed, 29 insertions(+), 8 deletions(-) diff --git a/src/libspark/keys.h b/src/libspark/keys.h index 285e37ce41..8e29d9b381 100644 --- a/src/libspark/keys.h +++ b/src/libspark/keys.h @@ -93,6 +93,10 @@ class Address { bool verify_own(const Scalar& m, OwnershipProof& proof) const; + bool operator < (const Address &other) const { + return encode(0) < other.encode(0); + } + private: const Params* params; std::vector d; diff --git a/src/sparkname.cpp b/src/sparkname.cpp index e9f9c7b2b3..454e2793aa 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -97,12 +97,15 @@ std::string CSparkNameManager::GetSparkNameTxID(const std::string &name) const bool CSparkNameManager::ParseSparkNameTxData(const CTransaction &tx, spark::SpendTransaction &sparkTx, CSparkNameTxData &sparkNameData, size_t &sparkNameDataPos) { + sparkNameDataPos = 0; CDataStream serializedSpark(SER_NETWORK, PROTOCOL_VERSION); serializedSpark.write((const char *)tx.vExtraPayload.data(), tx.vExtraPayload.size()); try { serializedSpark >> sparkTx; if (serializedSpark.size() == 0) { // silently ignore, it's not a critical error to not have a spark name tx part + // sparkNameDataPos pointing to the end of the tx payload means there is no spark name tx data + sparkNameDataPos = tx.vExtraPayload.size(); return false; } @@ -120,6 +123,9 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV { const Consensus::Params &consensusParams = Params().GetConsensus(); + if (outSparkNameData) + outSparkNameData->name.clear(); + if (!tx.IsSparkSpend()) return state.Error("CheckSparkNameTx: not a spark name tx"); @@ -128,8 +134,14 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV spark::SpendTransaction spendTransaction(params); size_t sparkNameDataPos; - if (!ParseSparkNameTxData(tx, spendTransaction, sparkNameData, sparkNameDataPos)) - return state.DoS(100, error("CheckSparkNameTx: failed to parse spark name tx")); + if (!ParseSparkNameTxData(tx, spendTransaction, sparkNameData, sparkNameDataPos)) { + if (sparkNameDataPos == tx.vExtraPayload.size()) { + return true; // no payload, not an error at all + } + else { + return state.DoS(100, error("CheckSparkNameTx: failed to parse spark name tx")); + } + } if (outSparkNameData) *outSparkNameData = sparkNameData; @@ -268,20 +280,24 @@ bool CSparkNameManager::AddSparkName(const std::string &name, const spark::Addre { std::string upperName = ToUpper(name); - if (sparkNames.count(upperName) > 0) + if (sparkNames.count(upperName) > 0 || sparkNameAddresses.count(address) > 0) return false; sparkNames[upperName] = std::make_pair(address, validityBlocks); + sparkNameAddresses[address] = name; + return true; } -bool CSparkNameManager::RemoveSparkName(const std::string &name) +bool CSparkNameManager::RemoveSparkName(const std::string &name, const spark::Address &address) { std::string upperName = ToUpper(name); - if (sparkNames.count(upperName) == 0) + if (sparkNames.count(upperName) == 0 || sparkNameAddresses.count(address) == 0) return false; sparkNames.erase(upperName); + sparkNameAddresses.erase(address); + return true; } diff --git a/src/sparkname.h b/src/sparkname.h index e37d807579..1f7591a6a3 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -57,6 +57,7 @@ class CSparkNameManager static CSparkNameManager *sharedSparkNameManager; std::map> sparkNames; + std::map sparkNameAddresses; public: CSparkNameManager() {} @@ -94,14 +95,14 @@ class CSparkNameManager // add and remove spark name bool AddSparkName(const std::string &name, const spark::Address &address, uint32_t validityBlocks); - bool RemoveSparkName(const std::string &name); + bool RemoveSparkName(const std::string &name, const spark::Address &address); static CSparkNameManager *GetInstance() { return sharedSparkNameManager; }; uint64_t GetSparkNameBlockHeight(const std::string &name) const; std::string GetSparkNameTxID(const std::string &name) const; - + static std::string ToUpper(const std::string &sparkName); }; diff --git a/src/validation.cpp b/src/validation.cpp index 9d2be25030..c0eea235db 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -1019,7 +1019,7 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C if (!sparkNameManager->CheckSparkNameTx(tx, chainActive.Height(), state, &sparkNameData)) return false; - if (CSparkNameManager::IsInConflict(sparkNameData, pool.sparkNames)) { + if (!sparkNameData.name.empty() && CSparkNameManager::IsInConflict(sparkNameData, pool.sparkNames)) { return state.Invalid(false, REJECT_CONFLICT, "txn-mempool-conflict"); } } From 335b332fd396d51e280ab835ba9bd4478303b882 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Mon, 3 Feb 2025 10:46:43 +0100 Subject: [PATCH 20/31] Fixes for spark name/address lookup --- src/sparkname.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 454e2793aa..3d15450989 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -280,7 +280,9 @@ bool CSparkNameManager::AddSparkName(const std::string &name, const spark::Addre { std::string upperName = ToUpper(name); - if (sparkNames.count(upperName) > 0 || sparkNameAddresses.count(address) > 0) + if (sparkNames.count(upperName) > 0 && address.encode(0) != sparkNames[upperName].first.encode(0)) + return false; + else if (sparkNameAddresses.count(address) > 0) return false; sparkNames[upperName] = std::make_pair(address, validityBlocks); From 2a59c734b9de94f7a0921c7754d5df687ee74f63 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Mon, 3 Feb 2025 22:38:58 +0100 Subject: [PATCH 21/31] Improvements for duplicated address detection --- src/spark/state.cpp | 7 +++++++ src/sparkname.h | 20 ++++++++++++++++++++ src/txmempool.h | 2 +- src/validation.cpp | 7 +++++-- 4 files changed, 33 insertions(+), 3 deletions(-) diff --git a/src/spark/state.cpp b/src/spark/state.cpp index 09a5469392..6ec4257f27 100644 --- a/src/spark/state.cpp +++ b/src/spark/state.cpp @@ -780,6 +780,13 @@ bool CheckSparkTransaction( CSparkNameTxData sparkTxData; if (sparkNameManager->CheckSparkNameTx(tx, nHeight, state, &sparkTxData)) { if (!sparkTxData.name.empty() && sparkTxInfo && !sparkTxInfo->fInfoIsComplete) { + // Check if the block already contains conflicting spark name + if (CSparkNameManager::IsInConflict(sparkTxData, sparkTxInfo->sparkNames, + [=](decltype(sparkTxInfo->sparkNames)::const_iterator it)->std::string { + return it->second.sparkAddress; + })) + return false; + sparkTxInfo->sparkNames[sparkTxData.name] = sparkTxData; } } diff --git a/src/sparkname.h b/src/sparkname.h index 1f7591a6a3..70d0a48b8b 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -7,6 +7,10 @@ #include "evo/evodb.h" #include "libspark/keys.h" +namespace spark { + unsigned char GetNetworkType(); +} + /* * Spark alias transaction data. This is to be stored in the transaction's extra data field * right after Spark data. The transaction is considered a Spark alias transaction if it spends @@ -90,6 +94,22 @@ class CSparkNameManager return txSet.find(upperName) != txSet.cend(); } + template + static bool IsInConflict(CSparkNameTxData &txData, const TxSet &txSet, std::function getAddress) + { + std::string upperName = ToUpper(txData.name); + if (txSet.find(upperName) != txSet.cend()) + return true; + + for (typename TxSet::const_iterator it = txSet.cbegin(); it != txSet.cend(); ++it) + { + if (getAddress(it) == txData.sparkAddress) + return true; + } + + return false; + } + // fill missing CSparkNameTxData fields and append spark name tx data to the transaction void AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey); diff --git a/src/txmempool.h b/src/txmempool.h index 16370c32ad..97fca8e471 100644 --- a/src/txmempool.h +++ b/src/txmempool.h @@ -524,7 +524,7 @@ class CTxMemPool lelantus::CLelantusMempoolState lelantusState; spark::CSparkMempoolState sparkState; - std::map sparkNames; // used to rule out duplicate names + std::map> sparkNames; // used to rule out duplicate names private: typedef std::map cacheMap; diff --git a/src/validation.cpp b/src/validation.cpp index c0eea235db..134474565d 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -1019,7 +1019,10 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C if (!sparkNameManager->CheckSparkNameTx(tx, chainActive.Height(), state, &sparkNameData)) return false; - if (!sparkNameData.name.empty() && CSparkNameManager::IsInConflict(sparkNameData, pool.sparkNames)) { + if (!sparkNameData.name.empty() && + CSparkNameManager::IsInConflict(sparkNameData, pool.sparkNames, [=](decltype(pool.sparkNames)::const_iterator it)->std::string { + return it->second.first; + })) { return state.Invalid(false, REJECT_CONFLICT, "txn-mempool-conflict"); } } @@ -1632,7 +1635,7 @@ bool AcceptToMemoryPoolWorker(CTxMemPool& pool, CValidationState& state, const C } if (!sparkNameData.name.empty()) - pool.sparkNames[CSparkNameManager::ToUpper(sparkNameData.name)] = hash; + pool.sparkNames[CSparkNameManager::ToUpper(sparkNameData.name)] = {sparkNameData.sparkAddress, hash}; #ifdef ENABLE_WALLET if (!GetBoolArg("-disablewallet", false) && pwalletMain->sparkWallet) { From 672b7001bab13a8858c68c170bc769f4932bab4b Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Tue, 4 Feb 2025 23:01:16 +0100 Subject: [PATCH 22/31] Fixes for spark name state --- src/spark/state.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/spark/state.cpp b/src/spark/state.cpp index 6ec4257f27..c3e602dc21 100644 --- a/src/spark/state.cpp +++ b/src/spark/state.cpp @@ -309,6 +309,15 @@ bool ConnectBlockSpark( } } + if (!pblock->sparkTxInfo->sparkNames.empty()) { + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + for (const auto &sparkName : pblock->sparkTxInfo->sparkNames) { + spark::Address address(spark::Params::get_default()); + address.decode(sparkName.second.sparkAddress); + sparkNameManager->AddSparkName(sparkName.first, address, pindexNew->nHeight + sparkName.second.sparkNameValidityBlocks); + } + } + // generate hash if we need it if (updateHash) { unsigned char hash_result[CSHA256::OUTPUT_SIZE]; From e243b7d0b9bf4f325689ad6148daa7700c40cc4b Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Wed, 5 Feb 2025 18:24:44 +0100 Subject: [PATCH 23/31] Block index entries for spark names --- src/chain.h | 13 ++++++++++--- src/spark/state.cpp | 13 ++++++++++--- src/sparkname.cpp | 35 +++++++++++++++++++++++++++++------ src/sparkname.h | 9 +++++---- src/txdb.cpp | 3 +++ 5 files changed, 57 insertions(+), 16 deletions(-) diff --git a/src/chain.h b/src/chain.h index 74a105393b..ed5931102f 100644 --- a/src/chain.h +++ b/src/chain.h @@ -269,9 +269,9 @@ class CBlockIndex ActiveSporkMap activeDisablingSporks; //! List of spark names that were created or extended in this block. Map of spark name to - std::map> addedSparkNames; - //! List of spark names that were removed in this block - std::map> removedSparkNames; + std::map> addedSparkNames; + //! List of spark names that were removed in this block because of expiration + std::map> removedSparkNames; void SetNull() { @@ -315,6 +315,8 @@ class CBlockIndex sigmaSpentSerials.clear(); lelantusSpentSerials.clear(); activeDisablingSporks.clear(); + addedSparkNames.clear(); + removedSparkNames.clear(); } CBlockIndex() @@ -589,6 +591,11 @@ class CDiskBlockIndex : public CBlockIndex READWRITE(activeDisablingSporks); } nDiskBlockVersion = nVersion; + + if (!(s.GetType() & SER_GETHASH) && nHeight >= params.nSparkNamesStartBlock) { + READWRITE(addedSparkNames); + READWRITE(removedSparkNames); + } } uint256 GetBlockHash() const diff --git a/src/spark/state.cpp b/src/spark/state.cpp index c3e602dc21..aeb0c6a3b4 100644 --- a/src/spark/state.cpp +++ b/src/spark/state.cpp @@ -33,6 +33,7 @@ bool BuildSparkStateFromIndex(CChain *chain) { for (CBlockIndex *blockIndex = chain->Genesis(); blockIndex; blockIndex=chain->Next(blockIndex)) { sparkState.AddBlock(blockIndex); + CSparkNameManager::GetInstance()->AddBlock(blockIndex); } // DEBUG LogPrintf( @@ -312,9 +313,7 @@ bool ConnectBlockSpark( if (!pblock->sparkTxInfo->sparkNames.empty()) { CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); for (const auto &sparkName : pblock->sparkTxInfo->sparkNames) { - spark::Address address(spark::Params::get_default()); - address.decode(sparkName.second.sparkAddress); - sparkNameManager->AddSparkName(sparkName.first, address, pindexNew->nHeight + sparkName.second.sparkNameValidityBlocks); + pindexNew->addedSparkNames[sparkName.first] = {sparkName.second.sparkAddress, pindexNew->nHeight + sparkName.second.sparkNameValidityBlocks}; } } @@ -330,6 +329,11 @@ bool ConnectBlockSpark( else if (!fJustCheck) { sparkState.AddBlock(pindexNew); } + + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + pindexNew->removedSparkNames = sparkNameManager->RemoveSparkNamesLosingValidity(pindexNew->nHeight); + sparkNameManager->AddBlock(pindexNew); + return true; } @@ -373,6 +377,9 @@ void RemoveSpendReferencingBlock(CTxMemPool& pool, CBlockIndex* blockIndex) { } void DisconnectTipSpark(CBlock& block, CBlockIndex *pindexDelete) { + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + sparkNameManager->RemoveBlock(pindexDelete); + sparkState.RemoveBlock(pindexDelete); // Also remove from mempool spends that reference given block hash. diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 3d15450989..2d2aafeb80 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -10,10 +10,13 @@ CSparkNameManager *CSparkNameManager::sharedSparkNameManager = new CSparkNameManager(); -bool CSparkNameManager::BlockConnected(CBlockIndex *pindex) +bool CSparkNameManager::AddBlock(CBlockIndex *pindex) { - for (const auto &entry : pindex->addedSparkNames) - sparkNames[entry.first] = entry.second; + for (const auto &entry : pindex->addedSparkNames) { + spark::Address address; + address.decode(entry.second.first); + sparkNames[entry.first] = {address, entry.second.second}; + } for (const auto &entry : pindex->removedSparkNames) sparkNames.erase(entry.first); @@ -21,13 +24,16 @@ bool CSparkNameManager::BlockConnected(CBlockIndex *pindex) return true; } -bool CSparkNameManager::BlockDisconnected(CBlockIndex *pindex) +bool CSparkNameManager::RemoveBlock(CBlockIndex *pindex) { for (const auto &entry : pindex->addedSparkNames) sparkNames.erase(entry.first); - for (const auto &entry : pindex->removedSparkNames) - sparkNames[entry.first] = entry.second; + for (const auto &entry : pindex->removedSparkNames) { + spark::Address address; + address.decode(entry.second.first); + sparkNames[entry.first] = {address, entry.second.second}; + } return true; } @@ -303,3 +309,20 @@ bool CSparkNameManager::RemoveSparkName(const std::string &name, const spark::Ad return true; } + +std::map> CSparkNameManager::RemoveSparkNamesLosingValidity(int nHeight) +{ + std::map> result; + + for (auto it = sparkNames.begin(); it != sparkNames.end();) + if (it->second.second >= nHeight) { + std::string sparkAddressStr = it->second.first.encode(spark::GetNetworkType()); + result[it->first] = {sparkAddressStr, it->second.second}; + sparkNameAddresses.erase(it->second.first); + it = sparkNames.erase(it); + } + else + ++it; + + return result; +} diff --git a/src/sparkname.h b/src/sparkname.h index 70d0a48b8b..0605b7d43b 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -70,10 +70,6 @@ class CSparkNameManager // but the transaction is otherwise valid. Returns true if the transaction is a valid spark name transaction. static bool ParseSparkNameTxData(const CTransaction &tx, spark::SpendTransaction &sparkTx, CSparkNameTxData &sparkNameData, size_t &sparkNameDataPos); - // update the state with contents of spark name transactions containted in block - bool BlockConnected(CBlockIndex *pindex); - bool BlockDisconnected(CBlockIndex *pindex); - bool CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state, CSparkNameTxData *outSparkNameData = nullptr); // test if the spark name tx is valid @@ -123,6 +119,11 @@ class CSparkNameManager std::string GetSparkNameTxID(const std::string &name) const; + std::map> RemoveSparkNamesLosingValidity(int nHeight); + + bool AddBlock(CBlockIndex *pindex); + bool RemoveBlock(CBlockIndex *pindex); + static std::string ToUpper(const std::string &sparkName); }; diff --git a/src/txdb.cpp b/src/txdb.cpp index 2ba448dffa..0e2cbea845 100644 --- a/src/txdb.cpp +++ b/src/txdb.cpp @@ -429,6 +429,9 @@ bool CBlockTreeDB::LoadBlockIndexGuts(boost::functionactiveDisablingSporks = diskindex.activeDisablingSporks; + pindexNew->addedSparkNames = diskindex.addedSparkNames; + pindexNew->removedSparkNames = diskindex.removedSparkNames; + if (fCheckPoWForAllBlocks) { if (!CheckProofOfWork(pindexNew->GetBlockPoWHash(), pindexNew->nBits, consensusParams)) return error("LoadBlockIndex(): CheckProofOfWork failed: %s", pindexNew->ToString()); From 88438ec92dfdf8826816ac6a1df03c30d3c8fd9d Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Thu, 6 Feb 2025 18:29:02 +0100 Subject: [PATCH 24/31] Make dot (.) a legit symbol in spark name --- src/sparkname.cpp | 20 ++++++++++++++------ src/sparkname.h | 6 ++++-- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 2d2aafeb80..40f9a8bf4a 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -155,12 +155,8 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV if (nHeight < consensusParams.nSparkNamesStartBlock) return state.DoS(100, error("CheckSparkNameTx: spark names are not allowed before block %d", consensusParams.nSparkStartBlock)); - if (sparkNameData.name.size() < 1 || sparkNameData.name.size() > 20) - return state.DoS(100, error("CheckSparkNameTx: invalid name length")); - - for (char c: sparkNameData.name) - if (!isalnum(c) && c != '-') - return state.DoS(100, error("CheckSparkNameTx: invalid name")); + if (!IsSparkNameValid(sparkNameData.name)) + return state.DoS(100, error("CheckSparkNameTx: invalid name")); constexpr int nBlockPerYear = 365*24*24; // 24 blocks per hour int nYears = (sparkNameData.sparkNameValidityBlocks + nBlockPerYear-1) / nBlockPerYear; @@ -326,3 +322,15 @@ std::map> CSparkNameManager::Remov return result; } + +bool CSparkNameManager::IsSparkNameValid(const std::string &name) +{ + if (name.size() < 1 || name.size() > maximumSparkNameLength) + return false; + + for (char c: name) + if (!isalnum(c) && c != '-' && c != '.') + return false; + + return true; +} \ No newline at end of file diff --git a/src/sparkname.h b/src/sparkname.h index 0605b7d43b..2919f01aec 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -64,6 +64,8 @@ class CSparkNameManager std::map sparkNameAddresses; public: + static const unsigned maximumSparkNameLength = 20; + CSparkNameManager() {} // Parse spark name transaction data from the transaction. Sets fCriticalError to false if there is no name data found @@ -72,8 +74,8 @@ class CSparkNameManager bool CheckSparkNameTx(const CTransaction &tx, int nHeight, CValidationState &state, CSparkNameTxData *outSparkNameData = nullptr); - // test if the spark name tx is valid - bool IsSparkNameValid(const CTransaction &tx, CValidationState &state); + // test if the spark name is valid + static bool IsSparkNameValid(const std::string &name); // return all valid names std::set GetSparkNames(int nHeight); From b739e1eb90ab4a3c7f3cb0d6fa300dde6c20b4fe Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 9 Feb 2025 16:16:54 +0100 Subject: [PATCH 25/31] Spark name block number for testnet --- src/chainparams.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chainparams.cpp b/src/chainparams.cpp index e0e6971646..42bc6fe650 100644 --- a/src/chainparams.cpp +++ b/src/chainparams.cpp @@ -796,7 +796,7 @@ class CTestNetParams : public CChainParams { consensus.nExchangeAddressStartBlock = 147000; // spark names - consensus.nSparkNamesStartBlock = INT_MAX; + consensus.nSparkNamesStartBlock = 174000; consensus.nSparkNamesFee = standardSparkNamesFee; } }; From fb4d49da075e455388e2df67809cecdf5346f20c Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Sun, 9 Feb 2025 23:58:35 +0100 Subject: [PATCH 26/31] Fixed restoration of old spark name data if block is disconnected --- src/chain.h | 7 ++-- src/rpc/blockchain.cpp | 19 ++++----- src/spark/state.cpp | 16 ++++++-- src/sparkname.cpp | 87 +++++++++++++++------------------------- src/sparkname.h | 48 ++++++++++++++++++---- src/txmempool.cpp | 9 +++++ src/wallet/rpcwallet.cpp | 10 ++++- 7 files changed, 115 insertions(+), 81 deletions(-) diff --git a/src/chain.h b/src/chain.h index ed5931102f..e73ae2b05d 100644 --- a/src/chain.h +++ b/src/chain.h @@ -22,6 +22,7 @@ #include "chainparams.h" #include "coin_containers.h" #include "streams.h" +#include "sparkname.h" #include #include @@ -268,10 +269,10 @@ class CBlockIndex //! std::map {feature name} -> {block number when feature is re-enabled again, parameter} ActiveSporkMap activeDisablingSporks; - //! List of spark names that were created or extended in this block. Map of spark name to - std::map> addedSparkNames; + //! List of spark names that were created or extended in this block. Map of spark name to + std::map addedSparkNames; //! List of spark names that were removed in this block because of expiration - std::map> removedSparkNames; + std::map removedSparkNames; void SetNull() { diff --git a/src/rpc/blockchain.cpp b/src/rpc/blockchain.cpp index c7ebd8c2cf..ef81f1933b 100644 --- a/src/rpc/blockchain.cpp +++ b/src/rpc/blockchain.cpp @@ -204,7 +204,7 @@ UniValue getsparknames(const JSONRPCRequest &request) } const Consensus::Params &consensusParams = Params().GetConsensus(); - int nHeight = consensusParams.nSparkNamesStartBlock; + int nHeight = chainActive.Height(); if (request.params.size() == 1) { nHeight = request.params[0].get_int(); } @@ -213,11 +213,9 @@ UniValue getsparknames(const JSONRPCRequest &request) UniValue result(UniValue::VARR); for (const auto &name : sparkNames) { result.push_back(name); - unsigned char network = spark::GetNetworkType(); - spark::Address SparkAddr; - sparkNameManager->GetSparkAddress(name, chainActive.Tip()->nHeight, SparkAddr); - std::string strAddress = SparkAddr.encode(network); - result.push_back(strAddress); + std::string SparkAddr; + if (sparkNameManager->GetSparkAddress(name, nHeight, SparkAddr)) + result.push_back(SparkAddr); } return result; } @@ -251,20 +249,19 @@ UniValue getsparknamedata(const JSONRPCRequest& request) std::string sparkName = request.params[0].get_str(); CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); - spark::Address SparkAddr; + std::string SparkAddr; sparkNameManager->GetSparkAddress(sparkName, chainActive.Tip()->nHeight, SparkAddr); UniValue result(UniValue::VARR); unsigned char network = spark::GetNetworkType(); - std::string strAddress = SparkAddr.encode(network); - result.push_back(strAddress); + result.push_back(SparkAddr); uint64_t nameBlockHeight = sparkNameManager->GetSparkNameBlockHeight(sparkName); result.push_back(nameBlockHeight); - std::string sparkNameTxId = sparkNameManager->GetSparkNameTxID(sparkName); - result.push_back(sparkNameTxId); + std::string sparkNameData = sparkNameManager->GetSparkNameAdditionalData(sparkName); + result.push_back(sparkNameData); return result; } diff --git a/src/spark/state.cpp b/src/spark/state.cpp index aeb0c6a3b4..11efea63c5 100644 --- a/src/spark/state.cpp +++ b/src/spark/state.cpp @@ -242,6 +242,9 @@ bool ConnectBlockSpark( CBlockIndex *pindexNew, const CBlock *pblock, bool fJustCheck) { + + bool fBackupRewrittenSparkNames = false; + // Add spark transaction information to index if (pblock && pblock->sparkTxInfo) { if (!fJustCheck) { @@ -313,8 +316,15 @@ bool ConnectBlockSpark( if (!pblock->sparkTxInfo->sparkNames.empty()) { CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); for (const auto &sparkName : pblock->sparkTxInfo->sparkNames) { - pindexNew->addedSparkNames[sparkName.first] = {sparkName.second.sparkAddress, pindexNew->nHeight + sparkName.second.sparkNameValidityBlocks}; + pindexNew->addedSparkNames[sparkName.first] = + CSparkNameBlockIndexData(sparkName.second.name, + sparkName.second.sparkAddress, + pindexNew->nHeight + sparkName.second.sparkNameValidityBlocks, + sparkName.second.additionalInfo); } + + // names were added, backup rewritten names if necessary + fBackupRewrittenSparkNames = true; } // generate hash if we need it @@ -332,7 +342,7 @@ bool ConnectBlockSpark( CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); pindexNew->removedSparkNames = sparkNameManager->RemoveSparkNamesLosingValidity(pindexNew->nHeight); - sparkNameManager->AddBlock(pindexNew); + sparkNameManager->AddBlock(pindexNew, fBackupRewrittenSparkNames); return true; } @@ -803,7 +813,7 @@ bool CheckSparkTransaction( })) return false; - sparkTxInfo->sparkNames[sparkTxData.name] = sparkTxData; + sparkTxInfo->sparkNames[CSparkNameManager::ToUpper(sparkTxData.name)] = sparkTxData; } } else { diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 40f9a8bf4a..715a2aa607 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -10,29 +10,28 @@ CSparkNameManager *CSparkNameManager::sharedSparkNameManager = new CSparkNameManager(); -bool CSparkNameManager::AddBlock(CBlockIndex *pindex) +bool CSparkNameManager::AddBlock(CBlockIndex *pindex, bool fBackupRewrittenEntries) { + for (const auto &entry : pindex->removedSparkNames) + sparkNames.erase(ToUpper(entry.first)); + for (const auto &entry : pindex->addedSparkNames) { - spark::Address address; - address.decode(entry.second.first); - sparkNames[entry.first] = {address, entry.second.second}; + std::string upperName = ToUpper(entry.first); + if (sparkNames.count(upperName) > 0 && fBackupRewrittenEntries) + pindex->removedSparkNames[upperName] = sparkNames[upperName]; + sparkNames[upperName] = entry.second; } - for (const auto &entry : pindex->removedSparkNames) - sparkNames.erase(entry.first); - return true; } bool CSparkNameManager::RemoveBlock(CBlockIndex *pindex) { for (const auto &entry : pindex->addedSparkNames) - sparkNames.erase(entry.first); + sparkNames.erase(ToUpper(entry.first)); for (const auto &entry : pindex->removedSparkNames) { - spark::Address address; - address.decode(entry.second.first); - sparkNames[entry.first] = {address, entry.second.second}; + sparkNames[ToUpper(entry.first)] = entry.second; } return true; @@ -42,17 +41,17 @@ std::set CSparkNameManager::GetSparkNames(int nHeight) { std::set result; for (const auto &entry : sparkNames) - if (entry.second.second >= nHeight) - result.insert(entry.first); + if (entry.second.sparkNameValidityHeight > nHeight) + result.insert(entry.second.name); return result; } -bool CSparkNameManager::GetSparkAddress(const std::string &name, int nHeight, spark::Address &address) +bool CSparkNameManager::GetSparkAddress(const std::string &name, int nHeight, std::string &address) { - auto it = sparkNames.find(name); - if (it == sparkNames.end() || it->second.second < nHeight) { - address = it->second.first; + auto it = sparkNames.find(ToUpper(name)); + if (it != sparkNames.end() || it->second.sparkNameValidityHeight > nHeight) { + address = it->second.sparkAddress; return true; } else { @@ -66,39 +65,17 @@ uint64_t CSparkNameManager::GetSparkNameBlockHeight(const std::string &name) con if (it == sparkNames.end()) throw std::runtime_error("Spark name not found: " + name); - size_t height = it->second.second; + size_t height = it->second.sparkNameValidityHeight; return height; } -std::string CSparkNameManager::GetSparkNameTxID(const std::string &name) const +std::string CSparkNameManager::GetSparkNameAdditionalData(const std::string &name) const { auto it = sparkNames.find(ToUpper(name)); if (it == sparkNames.end()) throw std::runtime_error("Spark name not found: " + name); - uint32_t blockHeight = it->second.second; - - CBlockIndex* pBlockIndex = chainActive[blockHeight]; - if (!pBlockIndex) - throw std::runtime_error("Block not found at height: " + std::to_string(blockHeight)); - - CBlock block; - if (!ReadBlockFromDisk(block, pBlockIndex, Params().GetConsensus())) - throw std::runtime_error("Failed to read block from disk."); - - CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); - for (const CTransactionRef& tx : block.vtx) - { - CSparkNameTxData sparkNameData; - CValidationState state; - - if (sparkNameManager->CheckSparkNameTx(*tx, blockHeight, state, &sparkNameData)) - { - return (*tx).GetHash().ToString(); - } - } - - throw std::runtime_error("Spark name transaction not found for: " + name); + return it->second.additionalInfo; } bool CSparkNameManager::ParseSparkNameTxData(const CTransaction &tx, spark::SpendTransaction &sparkTx, CSparkNameTxData &sparkNameData, size_t &sparkNameDataPos) @@ -180,8 +157,8 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV return state.DoS(100, error("CheckSparkNameTx: additional info is too long")); unsigned char sparkNetworkType = spark::GetNetworkType(); - if (sparkNames.count(sparkNameData.name) > 0 && - sparkNames[sparkNameData.name].first.encode(sparkNetworkType) != sparkNameData.sparkAddress) + if (sparkNames.count(ToUpper(sparkNameData.name)) > 0 && + sparkNames[ToUpper(sparkNameData.name)].sparkAddress != sparkNameData.sparkAddress) return state.DoS(100, error("CheckSparkNameTx: name already exists")); // calculate the hash of the all the transaction except the spark ownership proof @@ -278,22 +255,22 @@ std::string CSparkNameManager::ToUpper(const std::string &str) return result; } -bool CSparkNameManager::AddSparkName(const std::string &name, const spark::Address &address, uint32_t validityBlocks) +bool CSparkNameManager::AddSparkName(const std::string &name, const std::string &address, uint32_t validityBlocks, const std::string &additionalInfo) { std::string upperName = ToUpper(name); - if (sparkNames.count(upperName) > 0 && address.encode(0) != sparkNames[upperName].first.encode(0)) + if (sparkNames.count(upperName) > 0 && address != sparkNames[upperName].sparkAddress) return false; else if (sparkNameAddresses.count(address) > 0) return false; - sparkNames[upperName] = std::make_pair(address, validityBlocks); - sparkNameAddresses[address] = name; + sparkNames[upperName] = CSparkNameBlockIndexData(name, address, validityBlocks, additionalInfo); + sparkNameAddresses[address] = upperName; return true; } -bool CSparkNameManager::RemoveSparkName(const std::string &name, const spark::Address &address) +bool CSparkNameManager::RemoveSparkName(const std::string &name, const std::string &address) { std::string upperName = ToUpper(name); @@ -306,15 +283,15 @@ bool CSparkNameManager::RemoveSparkName(const std::string &name, const spark::Ad return true; } -std::map> CSparkNameManager::RemoveSparkNamesLosingValidity(int nHeight) +std::map CSparkNameManager::RemoveSparkNamesLosingValidity(int nHeight) { - std::map> result; + std::map result; for (auto it = sparkNames.begin(); it != sparkNames.end();) - if (it->second.second >= nHeight) { - std::string sparkAddressStr = it->second.first.encode(spark::GetNetworkType()); - result[it->first] = {sparkAddressStr, it->second.second}; - sparkNameAddresses.erase(it->second.first); + if (nHeight >= it->second.sparkNameValidityHeight) { + std::string sparkAddressStr = it->second.sparkAddress; + sparkNameAddresses.erase(sparkAddressStr); + result[it->first] = it->second; it = sparkNames.erase(it); } else diff --git a/src/sparkname.h b/src/sparkname.h index 2919f01aec..2b4686d744 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -6,6 +6,7 @@ #include "primitives/transaction.h" #include "evo/evodb.h" #include "libspark/keys.h" +#include "libspark/spend_transaction.h" namespace spark { unsigned char GetNetworkType(); @@ -55,13 +56,44 @@ class CSparkNameTxData } }; +/* + * Spark name record as it appears in the block index data. This is used to keep track of the added/removed + * spark names in a block. + */ + +struct CSparkNameBlockIndexData { + // 1-20 symbols, only alphanumeric characters and hyphens + std::string name; + // destination address for the alias + std::string sparkAddress; + // spark name is valid until this block height + uint32_t sparkNameValidityHeight{0}; + // additional information + std::string additionalInfo; + + CSparkNameBlockIndexData() {} + CSparkNameBlockIndexData(const std::string _name, const std::string _sparkAddress, uint32_t _sparkNameValidityHeight, const std::string _additionalInfo) + : name(_name), sparkAddress(_sparkAddress), sparkNameValidityHeight(_sparkNameValidityHeight), additionalInfo(_additionalInfo) {} + + ADD_SERIALIZE_METHODS; + + template + void SerializationOp(Stream &s, Operation ser_action) + { + READWRITE(name); + READWRITE(sparkAddress); + READWRITE(sparkNameValidityHeight); + READWRITE(additionalInfo); + } +}; + class CSparkNameManager { private: static CSparkNameManager *sharedSparkNameManager; - std::map> sparkNames; - std::map sparkNameAddresses; + std::map sparkNames; + std::map sparkNameAddresses; public: static const unsigned maximumSparkNameLength = 20; @@ -81,7 +113,7 @@ class CSparkNameManager std::set GetSparkNames(int nHeight); // return the address associated with the spark name - bool GetSparkAddress(const std::string &name, int nHeight, spark::Address &address); + bool GetSparkAddress(const std::string &name, int nHeight, std::string &address); // resolution of conflicts (e.g. for mempool) // TxSet is a set of transactions that might be in conflict with the txData. Should implement contains() method @@ -112,18 +144,18 @@ class CSparkNameManager void AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey); // add and remove spark name - bool AddSparkName(const std::string &name, const spark::Address &address, uint32_t validityBlocks); - bool RemoveSparkName(const std::string &name, const spark::Address &address); + bool AddSparkName(const std::string &name, const std::string &address, uint32_t validityBlocks, const std::string &additionalInfo); + bool RemoveSparkName(const std::string &name, const std::string &address); static CSparkNameManager *GetInstance() { return sharedSparkNameManager; }; uint64_t GetSparkNameBlockHeight(const std::string &name) const; - std::string GetSparkNameTxID(const std::string &name) const; + std::string GetSparkNameAdditionalData(const std::string &name) const; - std::map> RemoveSparkNamesLosingValidity(int nHeight); + std::map RemoveSparkNamesLosingValidity(int nHeight); - bool AddBlock(CBlockIndex *pindex); + bool AddBlock(CBlockIndex *pindex, bool fBackupRewrittenEntries = false); bool RemoveBlock(CBlockIndex *pindex); static std::string ToUpper(const std::string &sparkName); diff --git a/src/txmempool.cpp b/src/txmempool.cpp index 2d3c92cc07..2ba4b625d0 100644 --- a/src/txmempool.cpp +++ b/src/txmempool.cpp @@ -624,6 +624,15 @@ void CTxMemPool::removeUnchecked(txiter it, MemPoolRemovalReason reason) } catch (CBadTxIn&) { } + + // remove all the spark name transactions referencing this tx + for (auto it = sparkNames.begin(); it!=sparkNames.end();) { + if (it->second.second == tx.GetHash()) { + it = sparkNames.erase(it); + } else { + ++it; + } + } } BOOST_FOREACH(const CTxOut &txout, tx.vout) diff --git a/src/wallet/rpcwallet.cpp b/src/wallet/rpcwallet.cpp index b489efa3c6..204fcc7230 100644 --- a/src/wallet/rpcwallet.cpp +++ b/src/wallet/rpcwallet.cpp @@ -3940,6 +3940,13 @@ UniValue registersparkname(const JSONRPCRequest& request) { sparkNameData.additionalInfo = additionalData; sparkNameData.sparkNameValidityBlocks = numberOfYears * 365*24*24; + CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); + std::string address; + if (sparkNameManager->GetSparkAddress(sparkName, chainActive.Height(), address)) { + if (sparkAddress != address) + throw JSONRPCError(RPC_INVALID_PARAMETER, "Spark name already registered"); + } + CAmount sparkNameFee = consensusParams.nSparkNamesFee[sparkName.size()]*COIN; CAmount fee; CWalletTx wtx; @@ -3953,7 +3960,8 @@ UniValue registersparkname(const JSONRPCRequest& request) { try { CValidationState state; CReserveKey reserveKey(pwallet); - pwallet->CommitTransaction(wtx, reserveKey, g_connman.get(), state); + if (!pwallet->CommitTransaction(wtx, reserveKey, g_connman.get(), state)) + throw JSONRPCError(RPC_WALLET_ERROR, "CommitTransaction failed: " + FormatStateMessage(state)); } catch (const std::exception &) { auto error = _( From 344050db8d0a2d96c9decc4a4f583021536a2b8e Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Mon, 10 Feb 2025 21:59:54 +0100 Subject: [PATCH 27/31] API for quick check of spark name transaction validity before the creation --- src/sparkname.cpp | 30 ++++++++++++++++++++++++++++++ src/sparkname.h | 3 +++ src/wallet/rpcwallet.cpp | 8 +++----- 3 files changed, 36 insertions(+), 5 deletions(-) diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 715a2aa607..5fff04a4c5 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -206,6 +206,36 @@ bool CSparkNameManager::CheckSparkNameTx(const CTransaction &tx, int nHeight, CV return true; } +bool CSparkNameManager::ValidateSparkNameData(const CSparkNameTxData &sparkNameData, std::string &errorDescription) +{ + errorDescription.clear(); + + if (!IsSparkNameValid(sparkNameData.name)) + errorDescription = "invalid spark name"; + + else if (sparkNameData.additionalInfo.size() > 1024) + errorDescription = "additional info is too long"; + + else if (sparkNameData.sparkNameValidityBlocks > 365*24*24*10) + errorDescription = "transaction can't be valid for more than 10 years"; + + else if (sparkNames.count(ToUpper(sparkNameData.name)) > 0 && + sparkNames[ToUpper(sparkNameData.name)].sparkAddress != sparkNameData.sparkAddress) + errorDescription = "name already exists with another spark address as a destination"; + + else if (sparkNameAddresses.count(sparkNameData.sparkAddress) > 0 && + sparkNameAddresses[sparkNameData.sparkAddress] != ToUpper(sparkNameData.name)) + errorDescription = "spark address is already used for another name"; + + else { + LOCK(mempool.cs); + if (mempool.sparkNames.count(ToUpper(sparkNameData.name)) > 0) + errorDescription = "spark name transaction with that name is already in the mempool"; + } + + return errorDescription.empty(); +} + void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey) { for (uint32_t n=0; ; n++) { diff --git a/src/sparkname.h b/src/sparkname.h index 2b4686d744..bfaa05521f 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -140,6 +140,9 @@ class CSparkNameManager return false; } + // check the possibility to register a new spark name, return true if it's possible + bool ValidateSparkNameData(const CSparkNameTxData &sparkNameData, std::string &errorDescription); + // fill missing CSparkNameTxData fields and append spark name tx data to the transaction void AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey); diff --git a/src/wallet/rpcwallet.cpp b/src/wallet/rpcwallet.cpp index 204fcc7230..b4082f6b43 100644 --- a/src/wallet/rpcwallet.cpp +++ b/src/wallet/rpcwallet.cpp @@ -3941,11 +3941,9 @@ UniValue registersparkname(const JSONRPCRequest& request) { sparkNameData.sparkNameValidityBlocks = numberOfYears * 365*24*24; CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); - std::string address; - if (sparkNameManager->GetSparkAddress(sparkName, chainActive.Height(), address)) { - if (sparkAddress != address) - throw JSONRPCError(RPC_INVALID_PARAMETER, "Spark name already registered"); - } + std::string errorDescription; + if (!sparkNameManager->ValidateSparkNameData(sparkNameData, errorDescription)) + throw JSONRPCError(RPC_INVALID_PARAMETER, "Error creating spark name: "+ errorDescription); CAmount sparkNameFee = consensusParams.nSparkNamesFee[sparkName.size()]*COIN; CAmount fee; From a2366fb82bc0db0b5fa83e79b952bc22ad7f20b8 Mon Sep 17 00:00:00 2001 From: levonpetrosyan93 Date: Tue, 11 Feb 2025 15:54:50 +0400 Subject: [PATCH 28/31] added isAddressMine function --- src/spark/sparkwallet.cpp | 4 ++++ src/spark/sparkwallet.h | 1 + 2 files changed, 5 insertions(+) diff --git a/src/spark/sparkwallet.cpp b/src/spark/sparkwallet.cpp index 4b1f8632d5..219de3f120 100644 --- a/src/spark/sparkwallet.cpp +++ b/src/spark/sparkwallet.cpp @@ -265,6 +265,10 @@ bool CSparkWallet::isAddressMine(const std::string& encodedAddr) { return false; } + return isAddressMine(address); +} + +bool CSparkWallet::isAddressMine(const spark::Address& address) { for (const auto& itr : addresses) { if (itr.second.get_Q1() == address.get_Q1() && itr.second.get_Q2() == address.get_Q2()) return true; diff --git a/src/spark/sparkwallet.h b/src/spark/sparkwallet.h index 77c5a39f2d..e5c317a269 100644 --- a/src/spark/sparkwallet.h +++ b/src/spark/sparkwallet.h @@ -45,6 +45,7 @@ class CSparkWallet { // get address for a diversifier spark::Address getAddress(const int32_t& i); bool isAddressMine(const std::string& encodedAddr); + bool isAddressMine(const spark::Address& address); bool isChangeAddress(const uint64_t& i) const; // list spark mint, mint metadata in memory and in db should be the same at this moment, so get from memory From 2f47a1d36236f4215429f27c4328b342a960d777 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Tue, 11 Feb 2025 13:33:12 +0100 Subject: [PATCH 29/31] Check if the address belongs to the wallet before creating spark name transaction --- src/spark/sparkwallet.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/spark/sparkwallet.cpp b/src/spark/sparkwallet.cpp index 219de3f120..365a9a972f 100644 --- a/src/spark/sparkwallet.cpp +++ b/src/spark/sparkwallet.cpp @@ -1611,6 +1611,16 @@ CWalletTx CSparkWallet::CreateSparkNameTransaction(CSparkNameTxData &nameData, C if (spendKey == spark::SpendKey(params)) throw std::runtime_error(_("Unable to generate spend key, looks the wallet is locked.")); + spark::Address address(spark::Params::get_default()); + try { + address.decode(nameData.sparkAddress); + } catch (std::exception& e) { + throw std::runtime_error(_("Invalid spark address")); + } + + if (!isAddressMine(address)) + throw std::runtime_error(_("Spark address doesn't belong to the wallet")); + CMutableTransaction tx = CMutableTransaction(*wtxSparkSpend.tx); CSparkNameManager::GetInstance()->AppendSparkNameTxData(tx, nameData, spendKey, fullViewKey); From 8d973d024f9116c3acbbeea744abce5bc0b13122 Mon Sep 17 00:00:00 2001 From: Peter Shugalev Date: Thu, 13 Feb 2025 23:20:57 +0100 Subject: [PATCH 30/31] Fixed fee calculation for spark name --- src/spark/sparkwallet.cpp | 5 ++++- src/sparkname.cpp | 3 ++- src/sparkname.h | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/spark/sparkwallet.cpp b/src/spark/sparkwallet.cpp index 365a9a972f..cc6ac73699 100644 --- a/src/spark/sparkwallet.cpp +++ b/src/spark/sparkwallet.cpp @@ -1622,7 +1622,10 @@ CWalletTx CSparkWallet::CreateSparkNameTransaction(CSparkNameTxData &nameData, C throw std::runtime_error(_("Spark address doesn't belong to the wallet")); CMutableTransaction tx = CMutableTransaction(*wtxSparkSpend.tx); - CSparkNameManager::GetInstance()->AppendSparkNameTxData(tx, nameData, spendKey, fullViewKey); + size_t additionalSize = 0; + CSparkNameManager::GetInstance()->AppendSparkNameTxData(tx, nameData, spendKey, fullViewKey, additionalSize); + + txFee += CWallet::GetMinimumFee(additionalSize, nTxConfirmTarget, mempool); wtxSparkSpend.tx = MakeTransactionRef(std::move(tx)); return wtxSparkSpend; diff --git a/src/sparkname.cpp b/src/sparkname.cpp index 5fff04a4c5..c010c896c3 100644 --- a/src/sparkname.cpp +++ b/src/sparkname.cpp @@ -236,7 +236,7 @@ bool CSparkNameManager::ValidateSparkNameData(const CSparkNameTxData &sparkNameD return errorDescription.empty(); } -void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey) +void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey, size_t &additionalSize) { for (uint32_t n=0; ; n++) { sparkNameData.addressOwnershipProof.clear(); @@ -272,6 +272,7 @@ void CSparkNameManager::AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CDataStream sparkNameDataStream(SER_NETWORK, PROTOCOL_VERSION); sparkNameDataStream << sparkNameData; + additionalSize = sparkNameDataStream.size(); txSparkSpend.vExtraPayload.insert(txSparkSpend.vExtraPayload.end(), sparkNameDataStream.begin(), sparkNameDataStream.end()); break; diff --git a/src/sparkname.h b/src/sparkname.h index bfaa05521f..65b0a8263d 100644 --- a/src/sparkname.h +++ b/src/sparkname.h @@ -144,7 +144,7 @@ class CSparkNameManager bool ValidateSparkNameData(const CSparkNameTxData &sparkNameData, std::string &errorDescription); // fill missing CSparkNameTxData fields and append spark name tx data to the transaction - void AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey); + void AppendSparkNameTxData(CMutableTransaction &txSparkSpend, CSparkNameTxData &sparkNameData, const spark::SpendKey &spendKey, const spark::IncomingViewKey &incomingViewKey, size_t &additionalSize); // add and remove spark name bool AddSparkName(const std::string &name, const std::string &address, uint32_t validityBlocks, const std::string &additionalInfo); From d4ccf02fa1a0bb2453992c0b716eb804b474109f Mon Sep 17 00:00:00 2001 From: levoncrypto Date: Sun, 16 Feb 2025 00:37:34 +0400 Subject: [PATCH 31/31] Fix for spark names RPC --- src/rpc/blockchain.cpp | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/src/rpc/blockchain.cpp b/src/rpc/blockchain.cpp index ef81f1933b..9733fb6c7f 100644 --- a/src/rpc/blockchain.cpp +++ b/src/rpc/blockchain.cpp @@ -179,21 +179,19 @@ UniValue getblockcount(const JSONRPCRequest& request) UniValue getsparknames(const JSONRPCRequest &request) { - if (request.fHelp || request.params.size() > 1) { + if (request.fHelp || request.params.size() != 0) { throw std::runtime_error( - "getsparknames ( height )\n" + "getsparknames\n" "\nReturns a list of all Spark names.\n" - "\nArguments:\n" - "1. height (numeric, optional) The block height to filter Spark names (default is the spark names start block height).\n" "\nResult:\n" "[\n" - " \"name1\", (string) The Spark name and address\n" - " \"name2\", (string) Another Spark name and address\n" + " \"Name (string)\n" + " \"Address (string)\"\n" " ...\n" "]\n" "\nExamples:\n" - + HelpExampleCli("getsparknames", "1000") - + HelpExampleRpc("getsparknames", "1000") + + HelpExampleCli("getsparknames", "") + + HelpExampleRpc("getsparknames", "") ); } @@ -205,9 +203,6 @@ UniValue getsparknames(const JSONRPCRequest &request) const Consensus::Params &consensusParams = Params().GetConsensus(); int nHeight = chainActive.Height(); - if (request.params.size() == 1) { - nHeight = request.params[0].get_int(); - } CSparkNameManager *sparkNameManager = CSparkNameManager::GetInstance(); std::set sparkNames = sparkNameManager->GetSparkNames(nHeight); UniValue result(UniValue::VARR); @@ -231,8 +226,8 @@ UniValue getsparknamedata(const JSONRPCRequest& request) "\nResult:\n" "[\n" "1. Address (string)\n" - "2. Block Height (string)\n" - "3. TxId (string)\n" + "2. Block Height (int)\n" + "3. Additional info (string)\n" "]\n" "\nExamples:\n" + HelpExampleCli("getsparknamedata", "sparkname")