Skip to content

Commit

Permalink
Spark sync optimize (#1417)
Browse files Browse the repository at this point in the history
* Spark sync optimize

* Spark batching optimized
  • Loading branch information
levonpetrosyan93 authored Mar 19, 2024
1 parent e2162aa commit 17aab98
Show file tree
Hide file tree
Showing 8 changed files with 60 additions and 28 deletions.
16 changes: 14 additions & 2 deletions src/libspark/bpplus.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,18 @@ bool BPPlus::verify(const std::vector<std::vector<GroupElement>>& unpadded_C, co
scalars.emplace_back(ZERO);
}

std::vector<std::vector<unsigned char>> serialized_Gi;
serialized_Gi.resize(Gi.size());
std::vector<std::vector<unsigned char>> serialized_Hi;
serialized_Hi.resize(Hi.size());
// Serialize and cash Gi and Hi vectors
for (std::size_t i = 0; i < Gi.size(); i++) {
serialized_Gi[i].resize(GroupElement::serialize_size);
Gi[i].serialize(serialized_Gi[i].data());
serialized_Hi[i].resize(GroupElement::serialize_size);
Hi[i].serialize(serialized_Hi[i].data());
}

// Process each proof and add to the batch
for (std::size_t k_proofs = 0; k_proofs < N_proofs; k_proofs++) {
const BPPlusProof proof = proofs[k_proofs];
Expand All @@ -367,8 +379,8 @@ bool BPPlus::verify(const std::vector<std::vector<GroupElement>>& unpadded_C, co
Transcript transcript(LABEL_TRANSCRIPT_BPPLUS);
transcript.add("G", G);
transcript.add("H", H);
transcript.add("Gi", Gi);
transcript.add("Hi", Hi);
transcript.add("Gi", serialized_Gi);
transcript.add("Hi", serialized_Hi);
transcript.add("N", Scalar(N));
transcript.add("C", unpadded_C[k_proofs]);
transcript.add("A", proof.A);
Expand Down
5 changes: 3 additions & 2 deletions src/libspark/spend_transaction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ SpendTransaction::SpendTransaction(
const SpendKey& spend_key,
const std::vector<InputCoinData>& inputs,
const std::unordered_map<uint64_t, CoverSetData>& cover_set_data,
const std::unordered_map<uint64_t, std::vector<Coin>>& cover_sets,
const uint64_t f,
const uint64_t vout,
const std::vector<OutputCoinData>& outputs
Expand Down Expand Up @@ -55,10 +56,10 @@ SpendTransaction::SpendTransaction(
// Parse out cover set data for this spend
uint64_t set_id = inputs[u].cover_set_id;
this->cover_set_ids.emplace_back(set_id);
if (cover_set_data.count(set_id) == 0)
if (cover_set_data.count(set_id) == 0 || cover_sets.count(set_id) == 0)
throw std::invalid_argument("Required set is not passed");

const auto& cover_set = cover_set_data.at(set_id).cover_set;
const auto& cover_set = cover_sets.at(set_id);
std::size_t set_size = cover_set.size();
if (set_size > N)
throw std::invalid_argument("Wrong set size");
Expand Down
5 changes: 3 additions & 2 deletions src/libspark/spend_transaction.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ struct InputCoinData {
};

struct CoverSetData {
std::vector<Coin> cover_set; // set of coins used as a cover set for the spend
std::size_t cover_set_size; // set of coins used as a cover set for the spend
std::vector<unsigned char> cover_set_representation; // a unique representation for the ordered elements of the partial `cover_set` used in the spend
};

Expand All @@ -47,6 +47,7 @@ class SpendTransaction {
const SpendKey& spend_key,
const std::vector<InputCoinData>& inputs,
const std::unordered_map<uint64_t, CoverSetData>& cover_set_data,
const std::unordered_map<uint64_t, std::vector<Coin>>& cover_sets,
const uint64_t f,
const uint64_t vout,
const std::vector<OutputCoinData>& outputs
Expand Down Expand Up @@ -97,7 +98,7 @@ class SpendTransaction {

void setCoverSets(const std::unordered_map<uint64_t, CoverSetData>& cover_set_data) {
for (const auto& data : cover_set_data) {
this->cover_set_sizes[data.first] = data.second.cover_set.size();
this->cover_set_sizes[data.first] = data.second.cover_set_size;
this->cover_set_representations[data.first] = data.second.cover_set_representation;
}
}
Expand Down
9 changes: 5 additions & 4 deletions src/libspark/test/spend_transaction_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ BOOST_AUTO_TEST_CASE(generate_verify)
std::vector<InputCoinData> spend_coin_data;
std::unordered_map<uint64_t, CoverSetData> cover_set_data;
const std::size_t w = spend_indices.size();
std::unordered_map<uint64_t, std::vector<Coin>> cover_sets;

for (std::size_t u = 0; u < w; u++) {
IdentifiedCoinData identified_coin_data = in_coins[spend_indices[u]].identify(incoming_view_key);
RecoveredCoinData recovered_coin_data = in_coins[spend_indices[u]].recover(full_view_key, identified_coin_data);
Expand All @@ -74,9 +76,10 @@ BOOST_AUTO_TEST_CASE(generate_verify)
spend_coin_data.back().cover_set_id = cover_set_id;

CoverSetData setData;
setData.cover_set = in_coins;
setData.cover_set_size = in_coins.size();
setData.cover_set_representation = random_char_vector();
cover_set_data[cover_set_id] = setData;
cover_sets[cover_set_id] = in_coins;
spend_coin_data.back().index = spend_indices[u];
spend_coin_data.back().k = identified_coin_data.k;
spend_coin_data.back().s = recovered_coin_data.s;
Expand Down Expand Up @@ -118,16 +121,14 @@ BOOST_AUTO_TEST_CASE(generate_verify)
spend_key,
spend_coin_data,
cover_set_data,
cover_sets,
f,
0,
out_coin_data
);

// Verify
transaction.setCoverSets(cover_set_data);
std::unordered_map<uint64_t, std::vector<Coin>> cover_sets;
for (const auto set_data : cover_set_data)
cover_sets[set_data.first] = set_data.second.cover_set;
BOOST_CHECK(SpendTransaction::verify(transaction, cover_sets));
}

Expand Down
10 changes: 10 additions & 0 deletions src/libspark/transcript.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,16 @@ void Transcript::add(const std::string label, const std::vector<unsigned char>&
include_data(data);
}

// Add arbitrary data, such as serialized group elements or scalars
void Transcript::add(const std::string label, const std::vector<std::vector<unsigned char>>& data) {
include_flag(FLAG_VECTOR);
size(data.size());
include_label(label);
for (std::size_t i = 0; i < data.size(); i++) {
include_data(data[i]);
}
}

// Produce a challenge
Scalar Transcript::challenge(const std::string label) {
// Ensure we can properly populate a scalar
Expand Down
1 change: 1 addition & 0 deletions src/libspark/transcript.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class Transcript {
void add(const std::string, const GroupElement&);
void add(const std::string, const std::vector<GroupElement>&);
void add(const std::string, const std::vector<unsigned char>&);
void add(const std::string label, const std::vector<std::vector<unsigned char>>& data);
Scalar challenge(const std::string);

private:
Expand Down
8 changes: 5 additions & 3 deletions src/spark/sparkwallet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1451,6 +1451,7 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction(
std::vector<spark::InputCoinData> inputs;
std::map<uint64_t, uint256> idAndBlockHashes;
std::unordered_map<uint64_t, spark::CoverSetData> cover_set_data;
std::unordered_map<uint64_t, std::vector<spark::Coin>> cover_sets;
for (auto& coin : estimated.second) {
spark::CSparkState::SparkCoinGroupInfo nextCoinGroupInfo;
uint64_t groupId = coin.nId;
Expand All @@ -1475,18 +1476,19 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction(
_("Has to have at least two mint coins with at least 1 confirmation in order to spend a coin"));

spark::CoverSetData coverSetData;
coverSetData.cover_set = set;
coverSetData.cover_set_size = set.size();
coverSetData.cover_set_representation = setHash;
coverSetData.cover_set_representation.insert(coverSetData.cover_set_representation.end(), sig.begin(), sig.end());
cover_set_data[groupId] = coverSetData;
cover_sets[groupId] = set;
idAndBlockHashes[groupId] = blockHash;
}


spark::InputCoinData inputCoinData;
inputCoinData.cover_set_id = groupId;
std::size_t index = 0;
if (!getIndex(coin.coin, cover_set_data[groupId].cover_set, index))
if (!getIndex(coin.coin, cover_sets[groupId], index))
throw std::runtime_error(
_("No such coin in set"));
inputCoinData.index = index;
Expand All @@ -1507,7 +1509,7 @@ CWalletTx CSparkWallet::CreateSparkSpendTransaction(

}

spark::SpendTransaction spendTransaction(params, fullViewKey, spendKey, inputs, cover_set_data, fee, transparentOut, privOutputs);
spark::SpendTransaction spendTransaction(params, fullViewKey, spendKey, inputs, cover_set_data, cover_sets, fee, transparentOut, privOutputs);
spendTransaction.setBlockHashes(idAndBlockHashes);
CDataStream serialized(SER_NETWORK, PROTOCOL_VERSION);
serialized << spendTransaction;
Expand Down
34 changes: 19 additions & 15 deletions src/spark/state.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -462,14 +462,13 @@ bool CheckSparkSMintTransaction(
CSparkTxInfo* sparkTxInfo) {

LogPrintf("CheckSparkSMintTransaction txHash = %s\n", hashTx.ToString());
out_coins.clear();
for (const auto& out : vout) {
const auto& script = out.scriptPubKey;
if (script.IsSparkMint() || script.IsSparkSMint()) {
if (script.IsSparkSMint()) {
try {
spark::Coin coin(Params::get_default());
ParseSparkMintCoin(script, coin);
out_coins.push_back(coin);
out_coins.emplace_back(coin);
} catch (const std::exception &) {
return state.DoS(100,
false,
Expand Down Expand Up @@ -557,11 +556,11 @@ bool CheckSparkSpendTransaction(
bool passVerify = false;

uint64_t Vout = 0;
std::vector<CTxOut> vout;
std::size_t private_num = 0;
for (const CTxOut &txout : tx.vout) {
const auto& script = txout.scriptPubKey;
if (!script.empty() && script.IsSparkSMint()) {
vout.push_back(txout);
private_num++;
} else if (script.IsSparkMint() ||
script.IsLelantusMint() ||
script.IsLelantusJMint() ||
Expand All @@ -570,19 +569,23 @@ bool CheckSparkSpendTransaction(
} else {
Vout += txout.nValue;
}

}

if (vout.size() > ::Params().GetConsensus().nMaxSparkOutLimitPerTx)
if (private_num > ::Params().GetConsensus().nMaxSparkOutLimitPerTx)
return false;

std::vector<Coin> out_coins;
if (!CheckSparkSMintTransaction(vout, state, hashTx, fStatefulSigmaCheck, out_coins, sparkTxInfo))
out_coins.reserve(private_num);
if (!CheckSparkSMintTransaction(tx.vout, state, hashTx, fStatefulSigmaCheck, out_coins, sparkTxInfo))
return false;
spend->setOutCoins(out_coins);
std::unordered_map<uint64_t, std::vector<Coin>> cover_sets;
std::unordered_map<uint64_t, CoverSetData> cover_set_data;
const auto idAndBlockHashes = spend->getBlockHashes();

BatchProofContainer* batchProofContainer = BatchProofContainer::get_instance();
bool useBatching = batchProofContainer->fCollectProofs && !isVerifyDB && !isCheckWallet && sparkTxInfo && !sparkTxInfo->fInfoIsComplete;

for (const auto& idAndHash : idAndBlockHashes) {
CSparkState::SparkCoinGroupInfo coinGroup;
if (!sparkState.GetCoinGroupInfo(idAndHash.first, coinGroup))
Expand All @@ -598,6 +601,8 @@ bool CheckSparkSpendTransaction(
std::vector<unsigned char> set_hash = GetAnonymitySetHash(index, idAndHash.first);

std::vector<Coin> cover_set;
cover_set.reserve(coinGroup.nCoins);
std::size_t set_size = 0;
// Build a vector with all the public coins with given id before
// the block on which the spend occurred.
// This list of public coins is required by function "Verify" of spend.
Expand All @@ -613,7 +618,9 @@ bool CheckSparkSpendTransaction(
BOOST_FOREACH(
const auto& coin,
index->sparkMintedCoins[id]) {
cover_set.push_back(coin);
set_size++;
if (!useBatching)
cover_set.push_back(coin);
}
}
}
Expand All @@ -624,12 +631,12 @@ bool CheckSparkSpendTransaction(
}

CoverSetData setData;
setData.cover_set = cover_set;
setData.cover_set_size = set_size;
if (!set_hash.empty())
setData.cover_set_representation = set_hash;
setData.cover_set_representation.insert(setData.cover_set_representation.end(), txHashForMetadata.begin(), txHashForMetadata.end());

cover_sets[idAndHash.first] = cover_set;
cover_sets[idAndHash.first] = std::move(cover_set);
cover_set_data [idAndHash.first] = setData;
}
spend->setCoverSets(cover_set_data);
Expand All @@ -641,9 +648,6 @@ bool CheckSparkSpendTransaction(
return state.DoS(100,
error("CheckSparkSpendTransaction: No cover set found."));
}

BatchProofContainer* batchProofContainer = BatchProofContainer::get_instance();
bool useBatching = batchProofContainer->fCollectProofs && !isVerifyDB && !isCheckWallet && sparkTxInfo && !sparkTxInfo->fInfoIsComplete;

// if we are collecting proofs, skip verification and collect proofs
// add proofs into container
Expand Down Expand Up @@ -1216,7 +1220,7 @@ int CSparkState::GetCoinSetForSpend(
}

SparkCoinGroupInfo &coinGroup = coinGroups[coinGroupID];

coins_out.reserve(coinGroup.nCoins);
int numberOfCoins = 0;
for (CBlockIndex *block = coinGroup.lastBlock;; block = block->pprev) {

Expand Down

0 comments on commit 17aab98

Please sign in to comment.