diff --git a/src/bucket/BucketIndexImpl.cpp b/src/bucket/BucketIndexImpl.cpp index 0db0104e4d..a24f6ae515 100644 --- a/src/bucket/BucketIndexImpl.cpp +++ b/src/bucket/BucketIndexImpl.cpp @@ -82,7 +82,6 @@ BucketIndexImpl::BucketIndexImpl(BucketManager& bm, xdr::xdr_traits::serial_size(BucketEntry{}); auto fileSize = fs::size(filename.string()); auto estimatedNumElems = fileSize / estimatedLedgerEntrySize; - size_t estimatedIndexEntries; // Initialize bloom filter for range index if constexpr (std::is_same::value) @@ -105,7 +104,7 @@ BucketIndexImpl::BucketIndexImpl(BucketManager& bm, params.random_seed = shortHash::getShortHashInitKey(); params.compute_optimal_parameters(); mData.filter = std::make_unique(params); - estimatedIndexEntries = fileSize / mData.pageSize; + auto estimatedIndexEntries = fileSize / mData.pageSize; CLOG_DEBUG( Bucket, "Bloom filter initialized with params: projected element count " @@ -115,13 +114,11 @@ BucketIndexImpl::BucketIndexImpl(BucketManager& bm, params.false_positive_probability, params.optimal_parameters.number_of_hashes, params.optimal_parameters.table_size); - } - else - { - estimatedIndexEntries = estimatedNumElems; - } - mData.keysToOffset.reserve(estimatedIndexEntries); + // We don't have a good way of estimating IndividualIndex size, so + // only reserve range indexes + mData.keysToOffset.reserve(estimatedIndexEntries); + } XDRInputFileStream in; in.open(filename.string());