Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
161 changes: 103 additions & 58 deletions src/consensus/chain.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,9 @@ std::vector<ConstBlockPtr> Chain::GetSortedSubgraph(const ConstBlockPtr& pblock)
return result;
}

void Chain::CheckTxPartition(Vertex& b, float ms_hashrate) {
if (b.minerChainHeight <= GetParams().sortitionThreshold) {
void Chain::CheckTxPartition(Vertex& b) {
auto msLinkHeight = GetVertex(b.cblock->GetMilestoneHash())->height;
if (msLinkHeight <= GetParams().sortitionThreshold) {
if (b.cblock->IsRegistration()) {
if (b.cblock->GetTransactionSize() > 1) {
memset(&b.validity[1], Vertex::Validity::INVALID, b.validity.size() - 1);
Expand All @@ -162,17 +163,18 @@ void Chain::CheckTxPartition(Vertex& b, float ms_hashrate) {
// Construct a cumulator for the block if it is not cached
Cumulator cum;

ConstBlockPtr cursor = b.cblock;
Vertex blk_cursor = b;
VertexPtr previous;
while (!cum.Full()) {
previous = GetVertex(cursor->GetPrevHash());
while (previous->height > (previous->height - GetCumulatorCapacity()) && previous->height > 0) {
previous = GetVertex(blk_cursor.cblock->GetPrevHash());

if (!previous) {
// should not happen
throw std::logic_error("Cannot find " + std::to_string(cursor->GetPrevHash()) + " in cumulatorMap.");
throw std::logic_error("Cannot find " + std::to_string(blk_cursor.cblock->GetPrevHash()) +
" in cumulatorMap.");
}
cum.Add(previous->cblock, false);
cursor = previous->cblock;
cum.Add(*previous, *this, false);
blk_cursor = *previous;
}

cumulatorMap_.emplace(b.cblock->GetPrevHash(), cum);
Expand All @@ -182,7 +184,7 @@ void Chain::CheckTxPartition(Vertex& b, float ms_hashrate) {
Cumulator& cum = nodeHandler.mapped();

// Allowed distance
auto allowed = CalculateAllowedDist(cum, ms_hashrate);
auto allowed = CalculateAllowedDist(cum, msLinkHeight);

// Distances of the transaction hashes and previous block hash
const auto& txns = b.cblock->GetTransactions();
Expand All @@ -201,11 +203,20 @@ void Chain::CheckTxPartition(Vertex& b, float ms_hashrate) {
}

// Update key for the cumulator
cum.Add(b.cblock, true);
cum.Add(b, *this, true);
nodeHandler.key() = b.cblock->GetHash();
cumulatorMap_.insert(std::move(nodeHandler));
}

const Cumulator* Chain::GetCumulator(const uint256& h) const {
auto it = cumulatorMap_.find(h);
if (it == cumulatorMap_.end()) {
return nullptr;
}

return &it->second;
}

VertexPtr Chain::Verify(const ConstBlockPtr& pblock) {
auto height = GetChainHead()->height + 1;

Expand All @@ -216,6 +227,7 @@ VertexPtr Chain::Verify(const ConstBlockPtr& pblock) {

std::vector<VertexPtr> vtcs;
std::vector<VertexWPtr> wvtcs;
std::unordered_set<Cumulator*> cumulators;
RegChange regChange;
TXOC txoc;
vtcs.reserve(blocksToValidate.size());
Expand All @@ -230,8 +242,9 @@ VertexPtr Chain::Verify(const ConstBlockPtr& pblock) {
// validate each block in order
for (auto& vtx : vtcs) {
vtx->height = height;

const auto& blkHash = vtx->cblock->GetHash();
if (vtx->cblock->IsFirstRegistration()) {
const auto& blkHash = vtx->cblock->GetHash();
prevRedempHashMap_.insert_or_assign(blkHash, const_cast<uint256&&>(blkHash));
vtx->isRedeemed = Vertex::NOT_YET_REDEEMED;
regChange.Create(blkHash, blkHash);
Expand Down Expand Up @@ -261,6 +274,11 @@ VertexPtr Chain::Verify(const ConstBlockPtr& pblock) {
vtx->UpdateReward(GetPrevReward(*vtx));
}
verifying_.insert({vtx->cblock->GetHash(), vtx});

auto cum = cumulatorMap_.find(blkHash);
if (cum != cumulatorMap_.end()) {
cumulators.insert(&(cum->second));
}
}

CreateNextMilestone(GetChainHead(), *vtcs.back(), std::move(wvtcs), std::move(regChange), std::move(txoc));
Expand Down Expand Up @@ -322,7 +340,7 @@ std::pair<TXOC, TXOC> Chain::Validate(Vertex& vertex, RegChange& regChange) {
// txns with invalid distance will have validity == INVALID, and others are left unchanged
VertexPtr prevMs = DAG->GetMsVertex(vertex.cblock->GetMilestoneHash());
assert(prevMs);
CheckTxPartition(vertex, prevMs->snapshot->hashRate);
CheckTxPartition(vertex);

// check utxo
// txns with valid utxo will have validity == VALID, and others are left unchanged
Expand Down Expand Up @@ -520,6 +538,16 @@ VertexPtr Chain::GetMsVertexCache(const uint256& msHash) const {
return nullptr;
}

MilestonePtr Chain::GetMsVertex(size_t height) const {
size_t leastHeightCached = GetLeastHeightCached();

if (height < leastHeightCached) {
return STORE->GetMilestoneAt(height)->snapshot;
} else {
return milestones_[height - leastHeightCached];
}
}

void Chain::PopOldest(const std::vector<uint256>& vtxToRemove, const TXOC& txocToRemove) {
for (const auto& lvsh : vtxToRemove) {
// Modify redemption status for those prev regs in DB
Expand Down Expand Up @@ -588,77 +616,94 @@ bool Chain::IsTxFitsLedger(const ConstTxPtr& tx) const {
// Cumulator
////////////////////

void Cumulator::Add(const ConstBlockPtr& block, bool ascending) {
const auto& chainwork = block->GetChainWork();
uint32_t chainwork_comp = chainwork.GetCompact();
size_t GetCumulatorCapacity() {
static size_t const cap = GetParams().punctualityThred + GetParams().sortitionThreshold;
return cap;
}

if (timestamps.size() < GetParams().sortitionThreshold) {
sum += chainwork;
} else {
arith_uint256 subtrahend = arith_uint256().SetCompact(chainworks.front().first);
sum += (chainwork - subtrahend);
void Cumulator::Add(const Vertex& block, const Chain& chain, bool ascending) {
auto msHeight = block.height;
assert(msHeight > 0);

// Pop the first element if the counter is already 1,
// or decrease the counter of the first element by 1
if (chainworks.front().second == 1) {
chainworks.pop_front();
} else {
chainworks.front().second--;
// Update queue
if (ascending) {
// Align the segmt_sizes_
auto back_height = sizes_.back().first;
while (back_height++ < msHeight) {
// This happens when there is level set contains
// no block from this miner chain.

if (Full()) {
sizes_.pop_front();
}

sizes_.emplace_back(back_height, std::make_pair(chain.GetMsVertex(back_height)->lvsSize, 0));
}

timestamps.pop_front();
}
sizes_.back().second.second++;

if (ascending) {
if (!chainworks.empty() && chainworks.back().first == chainwork_comp) {
chainworks.back().second++;
} else {
chainworks.emplace_back(chainwork_comp, 1);
}
timestamps.emplace_back(block->GetTime());
} else {
if (!chainworks.empty() && chainworks.front().first == chainwork_comp) {
chainworks.front().second++;
} else {
chainworks.emplace_front(chainwork_comp, 1);
auto front_height = sizes_.front().first;
while (front_height-- > msHeight) {
sizes_.emplace_front(front_height, std::make_pair(chain.GetMsVertex(front_height)->lvsSize, 0));

// We don't check whether the queue is full here.
// It's the caller's responsibility to make sure that
// the capacity is not exceeded when adding elements
// backwards.
}
timestamps.emplace_front(block->GetTime());

sizes_.front().second.second++;
}
}

arith_uint256 Cumulator::Sum() const {
return sum;
}
double Cumulator::Percentage(size_t height) const {
if (sum_cache_.size() > GetParams().punctualityThred) {
sum_cache_.erase(sizes_.back().first - GetParams().punctualityThred);
}

auto sums_it = sum_cache_.find(height);
if (sums_it == sum_cache_.end()) {
auto cursor = sizes_.rbegin();
while (cursor->first != height) {
cursor++;
}

auto sums = cursor->second;
cursor++;
while (cursor->first > (height - GetParams().sortitionThreshold)) {
sums.first += cursor->second.first;
sums.second += cursor->second.second;

cursor++;
}

sum_cache_.emplace(height, sums);

uint32_t Cumulator::TimeSpan() const {
return timestamps.back() - timestamps.front();
return sums.second / sums.first;
}

return sums_it->second.first / sums_it->second.second;
}

bool Cumulator::Full() const {
return timestamps.size() == GetParams().sortitionThreshold;
return sizes_.size() == GetCumulatorCapacity();
}

bool Cumulator::Empty() const {
return timestamps.empty();
return sizes_.empty();
}

void Cumulator::Clear() {
chainworks.clear();
timestamps.clear();
sum = 0;
sizes_.clear();
}

std::string std::to_string(const Cumulator& cum) {
std::string s;
s += " Cumulator { \n";
s += " chainworks { \n";
for (auto& e : cum.chainworks) {
s += strprintf(" { %s, %s }\n", arith_uint256().SetCompact(e.first).GetLow64(), e.second);
}
s += " }\n";
s += " timestamps { \n";
for (auto& t : cum.timestamps) {
s += strprintf(" %s\n", t);
s += " sizes { \n";
for (auto& e : cum.sizes_) {
s += strprintf(" { %s, %s, %s}\n", e.first, e.second.first, e.second.second);
}
s += " }\n";
s += " }";
Expand Down
53 changes: 31 additions & 22 deletions src/consensus/chain.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,41 +13,48 @@
#include <optional>
#include <vector>

class Chain;

class Cumulator;
namespace std {
string to_string(const Cumulator& b);
} // namespace std

size_t GetCumulatorCapacity();

class Cumulator {
public:
void Add(const ConstBlockPtr& block, bool ascending);
arith_uint256 Sum() const;
uint32_t TimeSpan() const;
void Add(const Vertex& block, const Chain&, bool ascending);
double Percentage(size_t height) const;
bool Full() const;
bool Empty() const;
void Clear();

friend std::string std::to_string(const Cumulator&);
friend struct std::hash<Cumulator>;

private:
// Elements in chainworks:
// {chainwork, counter of consecutive chainworks that are equal}
// For example, the queue of chainworks
// { 1, 1, 3, 2, 2, 2, 2, 2, 2, 2 }
// are stored as:
// { {1, 2}, {3, 1}, {2, 7} }
std::deque<std::pair<uint32_t, uint16_t>> chainworks;
std::deque<uint32_t> timestamps;
arith_uint256 sum = 0;
// <
// level set height,
// <
// size of the level set,
// size of the segment of the miner chain contained in the level set
// >
// >
std::deque<std::pair<size_t, std::pair<size_t, size_t>>> sizes_;

// Caches the sums of the level set segments of length sortitionThreshold,
// so that each sum need only be calculated at most once at each height.
mutable std::unordered_map<size_t, std::pair<double, double>> sum_cache_;
};

/** Hasher for unordered_map */
template <>
struct std::hash<Cumulator> {
size_t operator()(const Cumulator& x) const {
return x.Sum().GetCompact() ^ x.TimeSpan();
}
};
// template <>
// struct std::hash<Cumulator> {
// size_t operator()(const Cumulator& x) const {
// return (size_t) x.lvs_sum_ ^ (size_t) x.miner_sum_;
//}
//};

class Chain {
public:
Expand Down Expand Up @@ -78,6 +85,7 @@ class Chain {
VertexPtr GetVertexCache(const uint256&) const;
VertexPtr GetVertex(const uint256&) const;
VertexPtr GetMsVertexCache(const uint256&) const;
MilestonePtr GetMsVertex(size_t height) const;

/** Gets a list of block to verify by the post-order DFS */
std::vector<ConstBlockPtr> GetSortedSubgraph(const ConstBlockPtr& pblock);
Expand Down Expand Up @@ -110,6 +118,8 @@ class Chain {
milestones_.emplace_back(ms.snapshot);
}

const Cumulator* GetCumulator(const uint256&) const;

/**
* Off-line verification (building ledger) on a level set
* performed when we add a milestone block to this chain.
Expand Down Expand Up @@ -188,7 +198,7 @@ class Chain {
std::optional<TXOC> ValidateRedemption(Vertex&, RegChange&);
bool ValidateTx(const Transaction&, uint32_t index, TXOC&, Coin& fee);
TXOC ValidateTxns(Vertex&);
void CheckTxPartition(Vertex&, float);
void CheckTxPartition(Vertex&);

Coin GetPrevReward(const Vertex& vtx) const {
return GetVertex(vtx.cblock->GetPrevHash())->cumulativeReward;
Expand All @@ -202,9 +212,8 @@ class Chain {

typedef std::unique_ptr<Chain> ChainPtr;

inline double CalculateAllowedDist(const Cumulator& cum, float msHashRate) {
return cum.Sum().GetDouble() / std::max(cum.TimeSpan(), (uint32_t) 1) / msHashRate *
(GetParams().sortitionCoefficient * GetParams().maxTarget.GetDouble());
inline double CalculateAllowedDist(const Cumulator& cum, size_t msHeight) {
return cum.Percentage(msHeight) * (GetParams().sortitionCoefficient * GetParams().maxTarget.GetDouble());
}

#endif // EPIC_CHAIN_H
4 changes: 4 additions & 0 deletions src/consensus/dag_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -774,3 +774,7 @@ void DAGManager::UpdateStatOnLvsStored(const MilestonePtr& pms) {
stat_.tStart = pms->GetLevelSet().front().lock()->cblock->GetTime();
}
}

const Cumulator* DAGManager::GetCumulator(const uint256& h) const {
return GetBestChain().GetCumulator(h);
}
2 changes: 2 additions & 0 deletions src/consensus/dag_manager.h
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,8 @@ class DAGManager {

StatData GetStatData() const;

const Cumulator* GetCumulator(const uint256&) const;

/**
* Blocks the main thread from going forward
* until DAG completes all the tasks
Expand Down
Loading