Skip to content

Commit 3a3b2c7

Browse files
authored
Merge pull request #263 from BigVan/zfile_build_check
[bugfix] enhance bound check on building jumptable
2 parents cc42223 + c69b0d3 commit 3a3b2c7

File tree

1 file changed

+22
-16
lines changed

1 file changed

+22
-16
lines changed

src/overlaybd/zfile/zfile.cpp

Lines changed: 22 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,8 @@ class CompressionFile : public VirtualReadOnlyFile {
181181
return deltas.size();
182182
}
183183

184-
int build(const uint32_t *ibuf, size_t n, off_t offset_begin, uint32_t block_size) {
184+
int build(const uint32_t *ibuf, size_t n, off_t offset_begin, uint32_t block_size,
185+
bool enable_crc) {
185186
partial_offset.clear();
186187
deltas.clear();
187188
group_size = (uinttype_max + 1) / block_size;
@@ -190,7 +191,11 @@ class CompressionFile : public VirtualReadOnlyFile {
190191
auto raw_offset = offset_begin;
191192
partial_offset.push_back(raw_offset);
192193
deltas.push_back(0);
194+
size_t min_blksize = (enable_crc ? sizeof(uint32_t) : 0);
193195
for (ssize_t i = 1; i < (ssize_t)n + 1; i++) {
196+
if (ibuf[i - 1] <= min_blksize) {
197+
LOG_ERRNO_RETURN(EIO, -1, "unexpected block size(id: `):", i - 1, ibuf[i - 1]);
198+
}
194199
raw_offset += ibuf[i - 1];
195200
if ((i % group_size) == 0) {
196201
partial_offset.push_back(raw_offset);
@@ -199,7 +204,7 @@ class CompressionFile : public VirtualReadOnlyFile {
199204
}
200205
if ((uint64_t)deltas[i - 1] + ibuf[i - 1] >= (uint64_t)uinttype_max) {
201206
LOG_ERROR_RETURN(ERANGE, -1, "build block[`] length failed `+` > ` (exceed)",
202-
i-1, deltas[i-1], ibuf[i-1], (uint64_t)uinttype_max);
207+
i - 1, deltas[i - 1], ibuf[i - 1], (uint64_t)uinttype_max);
203208
}
204209
deltas.push_back(deltas[i - 1] + ibuf[i - 1]);
205210
}
@@ -260,13 +265,14 @@ class CompressionFile : public VirtualReadOnlyFile {
260265
LOG_WARN("trim and reload. (idx: `, offset: `, len: `)", idx, begin_offset, read_size);
261266
int trim_res = m_zfile->m_file->trim(begin_offset, read_size);
262267
if (trim_res < 0) {
263-
LOG_ERRNO_RETURN(0, -1, "trim block failed. (idx: `, offset: `, len: `)",
264-
idx, begin_offset, read_size);
268+
LOG_ERRNO_RETURN(0, -1, "trim block failed. (idx: `, offset: `, len: `)", idx,
269+
begin_offset, read_size);
265270
}
266271
auto readn = m_zfile->m_file->pread(m_buf + m_buf_offset, read_size, begin_offset);
267272
if (readn != (ssize_t)read_size) {
268-
LOG_ERRNO_RETURN(0, -1, "read compressed blocks failed. (idx: `, offset: `, len: `)",
269-
idx, begin_offset, read_size);
273+
LOG_ERRNO_RETURN(0, -1,
274+
"read compressed blocks failed. (idx: `, offset: `, len: `)", idx,
275+
begin_offset, read_size);
270276
}
271277
return 0;
272278
}
@@ -351,8 +357,9 @@ class CompressionFile : public VirtualReadOnlyFile {
351357
compressed_size = m_reader->compressed_size();
352358
if ((size_t)(m_reader->m_buf_offset) + compressed_size > sizeof(m_buf)) {
353359
m_reader->m_eno = ERANGE;
354-
LOG_ERRNO_RETURN(0, -1, "inner buffer offset (`) + compressed size (`) overflow.",
355-
m_reader->m_buf_offset, compressed_size);
360+
LOG_ERRNO_RETURN(0, -1,
361+
"inner buffer offset (`) + compressed size (`) overflow.",
362+
m_reader->m_buf_offset, compressed_size);
356363
}
357364

358365
if (blk_idx == m_reader->m_begin_idx) {
@@ -439,15 +446,15 @@ class CompressionFile : public VirtualReadOnlyFile {
439446
if (count <= 0)
440447
return 0;
441448
if (offset + count > m_ht.original_file_size) {
442-
LOG_ERRNO_RETURN(ERANGE, -1, "pread range exceed (` > `)",
443-
offset + count, m_ht.original_file_size);
449+
LOG_ERRNO_RETURN(ERANGE, -1, "pread range exceed (` > `)", offset + count,
450+
m_ht.original_file_size);
444451
}
445452
ssize_t readn = 0; // final will equal to count
446453
unsigned char raw[MAX_READ_SIZE];
447454
BlockReader br(this, offset, count);
448455
for (auto &block : br) {
449456
if (buf == nullptr) {
450-
//used for prefetch; no copy, no decompress;
457+
// used for prefetch; no copy, no decompress;
451458
readn += block.cp_len;
452459
continue;
453460
}
@@ -506,7 +513,7 @@ static int write_header_trailer(IFile *file, bool is_header, bool is_sealed, boo
506513
CompressionFile::HeaderTrailer *pht, off_t offset = -1);
507514

508515
ssize_t compress_data(ICompressor *compressor, const unsigned char *buf, size_t count,
509-
unsigned char *dest_buf, size_t dest_len, bool gen_crc) {
516+
unsigned char *dest_buf, size_t dest_len, bool gen_crc) {
510517

511518
ssize_t compressed_len = 0;
512519
auto ret = compressor->compress((const unsigned char *)buf, count, dest_buf, dest_len);
@@ -721,8 +728,8 @@ bool load_jump_table(IFile *file, CompressionFile::HeaderTrailer *pheader_traile
721728
LOG_ERRNO_RETURN(0, false, "failed to read index");
722729
}
723730
ret = jump_table.build(ibuf.get(), pht->index_size,
724-
CompressionFile::HeaderTrailer::SPACE + pht->opt.dict_size,
725-
pht->opt.block_size);
731+
CompressionFile::HeaderTrailer::SPACE + pht->opt.dict_size,
732+
pht->opt.block_size, pht->opt.verify);
726733
if (ret != 0) {
727734
LOG_ERRNO_RETURN(0, false, "failed to build jump table");
728735
}
@@ -745,8 +752,7 @@ IFile *zfile_open_ro(IFile *file, bool verify, bool ownership) {
745752
auto res = file->fallocate(0, 0, -1);
746753
LOG_ERROR("failed to load jump table, fallocate result: `", res);
747754
if (res < 0) {
748-
LOG_ERRNO_RETURN(0, nullptr,
749-
"failed to load jump table and failed to evict");
755+
LOG_ERRNO_RETURN(0, nullptr, "failed to load jump table and failed to evict");
750756
}
751757
if (retry--) {
752758
LOG_INFO("retry loading jump table");

0 commit comments

Comments
 (0)