Skip to content

Commit

Permalink
Merge pull request #728 from evoskuil/master
Browse files Browse the repository at this point in the history
Adapt to store change of point table removal.
  • Loading branch information
evoskuil authored Feb 4, 2025
2 parents 4016527 + e7cf641 commit fa0cbba
Show file tree
Hide file tree
Showing 5 changed files with 42 additions and 129 deletions.
4 changes: 0 additions & 4 deletions console/executor_dumps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,6 @@ void executor::dump_body_sizes() const
query_.header_body_size() %
query_.txs_body_size() %
query_.tx_body_size() %
query_.point_body_size() %
query_.input_body_size() %
query_.output_body_size() %
query_.puts_body_size() %
Expand All @@ -112,7 +111,6 @@ void executor::dump_records() const
logger(format(BN_MEASURE_RECORDS) %
query_.header_records() %
query_.tx_records() %
query_.point_records() %
query_.candidate_records() %
query_.confirmed_records() %
query_.spend_records() %
Expand All @@ -127,7 +125,6 @@ void executor::dump_buckets() const
query_.header_buckets() %
query_.txs_buckets() %
query_.tx_buckets() %
query_.point_buckets() %
query_.spend_buckets() %
query_.prevout_buckets() %
query_.strong_tx_buckets() %
Expand All @@ -144,7 +141,6 @@ void executor::dump_collisions() const
(to_double(query_.header_records()) / query_.header_buckets()) %
(to_double(query_.header_records()) / query_.txs_buckets()) %
(to_double(query_.tx_records()) / query_.tx_buckets()) %
(to_double(query_.point_records()) / query_.point_buckets()) %
(to_double(query_.spend_records()) / query_.spend_buckets()) %
(to_double(query_.prevout_records()) / query_.prevout_buckets()) %
(to_double(query_.strong_tx_records()) / query_.strong_tx_buckets()) %
Expand Down
59 changes: 0 additions & 59 deletions console/executor_scans.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -190,29 +190,6 @@ void executor::scan_buckets() const

// ------------------------------------------------------------------------

filled = zero;
bucket = max_size_t;
start = logger::now();
while (!cancel_ && (++bucket < query_.point_buckets()))
{
const auto top = query_.top_point(bucket);
if (!top.is_terminal())
++filled;

if (is_zero(bucket % tx_frequency))
logger(format("point" BN_READ_ROW) % bucket %
duration_cast<seconds>(logger::now() - start).count());
}

if (cancel_)
logger(BN_OPERATION_CANCELED);

span = duration_cast<seconds>(logger::now() - start);
logger(format("point" BN_READ_ROW) % (to_double(filled) / bucket) %
span.count());

// ------------------------------------------------------------------------

filled = zero;
bucket = max_size_t;
start = logger::now();
Expand Down Expand Up @@ -381,42 +358,6 @@ void executor::scan_collisions() const
strong_tx.clear();
strong_tx.shrink_to_fit();

// point
// ------------------------------------------------------------------------

index = max_size_t;
start = logger::now();
const auto point_buckets = query_.point_buckets();
const auto point_records = query_.point_records();
std_vector<size_t> point(point_buckets, empty);
while (!cancel_ && (++index < point_records))
{
const tx_link link{ possible_narrow_cast<tx_link::integer>(index) };
++point.at(hash(query_.get_point_key(link.value)) % point_buckets);

if (is_zero(index % tx_frequency))
logger(format("point" BN_READ_ROW) % index %
duration_cast<seconds>(logger::now() - start).count());
}

if (cancel_)
logger(BN_OPERATION_CANCELED);

// ........................................................................

const auto point_count = count(point);
span = duration_cast<seconds>(logger::now() - start);
logger(format("point: %1% in %2%s buckets %3% filled %4% rate %5%") %
index % span.count() % point_buckets % point_count %
(to_double(point_count) / point_buckets));

for (const auto& entry: dump(point))
logger(format("point: %1% frequency: %2%") %
entry.first % entry.second);

point.clear();
point.shrink_to_fit();

// spend
// ------------------------------------------------------------------------

Expand Down
68 changes: 32 additions & 36 deletions console/localize.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,30 +73,28 @@ namespace node {
" header :%1%\n" \
" txs :%2%\n" \
" tx :%3%\n" \
" point :%4%\n" \
" input :%5%\n" \
" output :%6%\n" \
" puts :%7%\n" \
" candidate :%8%\n" \
" confirmed :%9%\n" \
" spend :%10%\n" \
" prevout :%11%\n" \
" strong_tx :%12%\n" \
" valid_tx :%13%\n" \
" valid_bk :%14%\n" \
" address :%15%\n" \
" neutrino :%16%"
" input :%4%\n" \
" output :%5%\n" \
" puts :%6%\n" \
" candidate :%7%\n" \
" confirmed :%8%\n" \
" spend :%9%\n" \
" prevout :%10%\n" \
" strong_tx :%11%\n" \
" valid_tx :%12%\n" \
" valid_bk :%13%\n" \
" address :%14%\n" \
" neutrino :%15%"
#define BN_MEASURE_RECORDS \
"Table records...\n" \
" header :%1%\n" \
" tx :%2%\n" \
" point :%3%\n" \
" candidate :%4%\n" \
" confirmed :%5%\n" \
" spend :%6%\n" \
" prevout :%7%\n" \
" strong_tx :%8%\n" \
" address :%9%"
" candidate :%3%\n" \
" confirmed :%4%\n" \
" spend :%5%\n" \
" prevout :%6%\n" \
" strong_tx :%7%\n" \
" address :%8%"
#define BN_MEASURE_SLABS \
"Table slabs..."
#define BN_MEASURE_SLABS_ROW \
Expand All @@ -110,27 +108,25 @@ namespace node {
" header :%1%\n" \
" txs :%2%\n" \
" tx :%3%\n" \
" point :%4%\n" \
" spend :%5%\n" \
" prevout :%6%\n" \
" strong_tx :%7%\n" \
" valid_tx :%8%\n" \
" valid_bk :%9%\n" \
" address :%10%\n" \
" neutrino :%11%"
" spend :%4%\n" \
" prevout :%5%\n" \
" strong_tx :%6%\n" \
" valid_tx :%7%\n" \
" valid_bk :%8%\n" \
" address :%9%\n" \
" neutrino :%10%"
#define BN_MEASURE_COLLISION_RATES \
"Collision rates...\n" \
" header :%1%\n" \
" txs :%2%\n" \
" tx :%3%\n" \
" point :%4%\n" \
" spend :%5%\n" \
" prevout :%6%\n" \
" strong_tx :%7%\n" \
" valid_tx :%8%\n" \
" valid_bk :%9%\n" \
" address :%10%\n" \
" neutrino :%11%"
" spend :%4%\n" \
" prevout :%5%\n" \
" strong_tx :%6%\n" \
" valid_tx :%7%\n" \
" valid_bk :%8%\n" \
" address :%9%\n" \
" neutrino :%10%"
#define BN_MEASURE_PROGRESS_START \
"Thinking..."
#define BN_MEASURE_PROGRESS \
Expand Down
18 changes: 10 additions & 8 deletions src/chasers/chaser_check.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -209,13 +209,12 @@ void chaser_check::do_confirmable(height_t height) NOEXCEPT
{
BC_ASSERT(stranded());

// Confirmations are ordered, but notification order isn't guaranteed.
if (confirmed_ > height)
confirmed_ = height;
// Confirmations are ordered and notification order is guaranteed.
confirmed_ = height;

// The full set of requested hashes has been confirmed.
if (confirmed_ == requested_)
do_headers(height_t{});
do_headers(height);
}

void chaser_check::do_checked(height_t height) NOEXCEPT
Expand All @@ -234,14 +233,17 @@ void chaser_check::do_bump(height_t) NOEXCEPT
return;

const auto& query = archive();
auto height = position();

// TODO: query.is_associated() is expensive (hashmap search).
// Skip checked blocks starting immediately after last checked.
while (!closed() && query.is_associated(
query.to_candidate(add1(position()))))
set_position(add1(position()));
while (!closed() &&
query.is_associated(query.to_candidate((height = add1(height)))))
{
set_position(height);
}

do_headers(height_t{});
do_headers(sub1(height));
}

// add headers
Expand Down
22 changes: 0 additions & 22 deletions src/parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,6 @@ parser::parser(system::chain::selection context) NOEXCEPT
configured.database.output_size = 25'300'000'000;
configured.database.output_rate = 5;

// Full size too big for mini, so reduced to compressed size.
configured.database.point_buckets = 546'188'501;
configured.database.point_size = 8'389'074'978;
configured.database.point_rate = 5;

configured.database.puts_size = 6'300'000'000;
configured.database.puts_rate = 5;

Expand Down Expand Up @@ -694,23 +689,6 @@ options_metadata parser::load_settings() THROWS
"The percentage expansion of the output table body, defaults to '5'."
)

/* point */
(
"database.point_buckets",
value<uint32_t>(&configured.database.point_buckets),
"The number of buckets in the point table head, defaults to '546188501'."
)
(
"database.point_size",
value<uint64_t>(&configured.database.point_size),
"The minimum allocation of the point table body, defaults to '8389074978'."
)
(
"database.point_rate",
value<uint16_t>(&configured.database.point_rate),
"The percentage expansion of the point table body, defaults to '5'."
)

/* puts */
(
"database.puts_size",
Expand Down

0 comments on commit fa0cbba

Please sign in to comment.