mirror of
https://github.com/Zygo/bees.git
synced 2025-05-18 05:45:45 +02:00
resolve: drop support for old-style compressed BeesAddr
No public version of bees ever created old-style compressed hash table entries. Remove the code that supports them. Signed-off-by: Zygo Blaxell <bees@furryterror.org>
This commit is contained in:
parent
27125b8140
commit
591a44e59a
@ -269,8 +269,7 @@ BeesHashTable::prefetch_loop()
|
|||||||
out << "\n";
|
out << "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t uncompressed_count = occupied_count - compressed_count;
|
size_t uncompressed_count = occupied_count - compressed_offset_count;
|
||||||
size_t legacy_count = compressed_count - compressed_offset_count;
|
|
||||||
|
|
||||||
ostringstream graph_blob;
|
ostringstream graph_blob;
|
||||||
|
|
||||||
@ -281,9 +280,7 @@ BeesHashTable::prefetch_loop()
|
|||||||
graph_blob
|
graph_blob
|
||||||
<< "\nHash table page occupancy histogram (" << occupied_count << "/" << total_count << " cells occupied, " << (occupied_count * 100 / total_count) << "%)\n"
|
<< "\nHash table page occupancy histogram (" << occupied_count << "/" << total_count << " cells occupied, " << (occupied_count * 100 / total_count) << "%)\n"
|
||||||
<< out.str() << "0% | 25% | 50% | 75% | 100% page fill\n"
|
<< out.str() << "0% | 25% | 50% | 75% | 100% page fill\n"
|
||||||
<< "compressed " << compressed_count << " (" << percent(compressed_count, occupied_count) << ")"
|
<< "compressed " << compressed_count << " (" << percent(compressed_count, occupied_count) << ")\n"
|
||||||
<< " new-style " << compressed_offset_count << " (" << percent(compressed_offset_count, occupied_count) << ")"
|
|
||||||
<< " old-style " << legacy_count << " (" << percent(legacy_count, occupied_count) << ")\n"
|
|
||||||
<< "uncompressed " << uncompressed_count << " (" << percent(uncompressed_count, occupied_count) << ")"
|
<< "uncompressed " << uncompressed_count << " (" << percent(uncompressed_count, occupied_count) << ")"
|
||||||
<< " unaligned_eof " << unaligned_eof_count << " (" << percent(unaligned_eof_count, occupied_count) << ")"
|
<< " unaligned_eof " << unaligned_eof_count << " (" << percent(unaligned_eof_count, occupied_count) << ")"
|
||||||
<< " toxic " << toxic_count << " (" << percent(toxic_count, occupied_count) << ")";
|
<< " toxic " << toxic_count << " (" << percent(toxic_count, occupied_count) << ")";
|
||||||
|
@ -98,16 +98,14 @@ BeesResolver::adjust_offset(const BeesFileRange &haystack, const BeesBlockData &
|
|||||||
return BeesBlockData();
|
return BeesBlockData();
|
||||||
}
|
}
|
||||||
|
|
||||||
off_t lower_offset = haystack.begin();
|
off_t haystack_offset = haystack.begin();
|
||||||
off_t upper_offset = haystack.end();
|
|
||||||
bool is_compressed_offset = false;
|
bool is_compressed_offset = false;
|
||||||
bool is_exact = false;
|
bool is_exact = false;
|
||||||
bool is_legacy = false;
|
|
||||||
if (m_addr.is_compressed()) {
|
if (m_addr.is_compressed()) {
|
||||||
BtrfsExtentWalker ew(haystack.fd(), haystack.begin(), m_ctx->root_fd());
|
BtrfsExtentWalker ew(haystack.fd(), haystack.begin(), m_ctx->root_fd());
|
||||||
BEESTRACE("haystack extent data " << ew);
|
BEESTRACE("haystack extent data " << ew);
|
||||||
Extent e = ew.current();
|
Extent e = ew.current();
|
||||||
if (m_addr.has_compressed_offset()) {
|
THROW_CHECK1(runtime_error, m_addr, m_addr.has_compressed_offset());
|
||||||
off_t coff = m_addr.get_compressed_offset();
|
off_t coff = m_addr.get_compressed_offset();
|
||||||
if (e.offset() > coff) {
|
if (e.offset() > coff) {
|
||||||
// this extent begins after the target block
|
// this extent begins after the target block
|
||||||
@ -120,31 +118,23 @@ BeesResolver::adjust_offset(const BeesFileRange &haystack, const BeesBlockData &
|
|||||||
BEESCOUNT(adjust_offset_high);
|
BEESCOUNT(adjust_offset_high);
|
||||||
return BeesBlockData();
|
return BeesBlockData();
|
||||||
}
|
}
|
||||||
lower_offset = e.begin() + coff;
|
haystack_offset = e.begin() + coff;
|
||||||
upper_offset = lower_offset + BLOCK_SIZE_CLONE;
|
|
||||||
BEESCOUNT(adjust_offset_hit);
|
BEESCOUNT(adjust_offset_hit);
|
||||||
is_compressed_offset = true;
|
is_compressed_offset = true;
|
||||||
} else {
|
|
||||||
lower_offset = e.begin();
|
|
||||||
upper_offset = e.end();
|
|
||||||
BEESCOUNT(adjust_legacy);
|
|
||||||
is_legacy = true;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
BEESCOUNT(adjust_exact);
|
BEESCOUNT(adjust_exact);
|
||||||
is_exact = true;
|
is_exact = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
BEESTRACE("Checking haystack " << haystack << " offsets " << to_hex(lower_offset) << ".." << to_hex(upper_offset));
|
BEESTRACE("Checking haystack " << haystack << " offset " << to_hex(haystack_offset));
|
||||||
|
|
||||||
// Check all the blocks in the list
|
// Check all the blocks in the list
|
||||||
for (off_t haystack_offset = lower_offset; haystack_offset < upper_offset; haystack_offset += BLOCK_SIZE_CLONE) {
|
|
||||||
THROW_CHECK1(out_of_range, haystack_offset, (haystack_offset & BLOCK_MASK_CLONE) == 0);
|
THROW_CHECK1(out_of_range, haystack_offset, (haystack_offset & BLOCK_MASK_CLONE) == 0);
|
||||||
|
|
||||||
// Straw cannot extend beyond end of haystack
|
// Straw cannot extend beyond end of haystack
|
||||||
if (haystack_offset + needle.size() > haystack_size) {
|
if (haystack_offset + needle.size() > haystack_size) {
|
||||||
BEESCOUNT(adjust_needle_too_long);
|
BEESCOUNT(adjust_needle_too_long);
|
||||||
break;
|
return BeesBlockData();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the haystack
|
// Read the haystack
|
||||||
@ -159,7 +149,6 @@ BeesResolver::adjust_offset(const BeesFileRange &haystack, const BeesBlockData &
|
|||||||
m_found_data = true;
|
m_found_data = true;
|
||||||
m_found_hash = true;
|
m_found_hash = true;
|
||||||
if (is_compressed_offset) BEESCOUNT(adjust_compressed_offset_correct);
|
if (is_compressed_offset) BEESCOUNT(adjust_compressed_offset_correct);
|
||||||
if (is_legacy) BEESCOUNT(adjust_legacy_correct);
|
|
||||||
if (is_exact) BEESCOUNT(adjust_exact_correct);
|
if (is_exact) BEESCOUNT(adjust_exact_correct);
|
||||||
return straw;
|
return straw;
|
||||||
}
|
}
|
||||||
@ -167,7 +156,7 @@ BeesResolver::adjust_offset(const BeesFileRange &haystack, const BeesBlockData &
|
|||||||
if (straw.hash() != needle.hash()) {
|
if (straw.hash() != needle.hash()) {
|
||||||
// Not the same hash or data, try next block
|
// Not the same hash or data, try next block
|
||||||
BEESCOUNT(adjust_miss);
|
BEESCOUNT(adjust_miss);
|
||||||
continue;
|
return BeesBlockData();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Found the hash but not the data. Yay!
|
// Found the hash but not the data. Yay!
|
||||||
@ -176,12 +165,10 @@ BeesResolver::adjust_offset(const BeesFileRange &haystack, const BeesBlockData &
|
|||||||
<< "\tneedle " << needle << "\n"
|
<< "\tneedle " << needle << "\n"
|
||||||
<< "\tstraw " << straw);
|
<< "\tstraw " << straw);
|
||||||
BEESCOUNT(hash_collision);
|
BEESCOUNT(hash_collision);
|
||||||
}
|
|
||||||
|
|
||||||
// Ran out of offsets to try
|
// Ran out of offsets to try
|
||||||
BEESCOUNT(adjust_no_match);
|
BEESCOUNT(adjust_no_match);
|
||||||
if (is_compressed_offset) BEESCOUNT(adjust_compressed_offset_wrong);
|
if (is_compressed_offset) BEESCOUNT(adjust_compressed_offset_wrong);
|
||||||
if (is_legacy) BEESCOUNT(adjust_legacy_wrong);
|
|
||||||
if (is_exact) BEESCOUNT(adjust_exact_wrong);
|
if (is_exact) BEESCOUNT(adjust_exact_wrong);
|
||||||
m_wrong_data = true;
|
m_wrong_data = true;
|
||||||
return BeesBlockData();
|
return BeesBlockData();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user