Skip to content
2 changes: 2 additions & 0 deletions compiler-rt/lib/sanitizer_common/sanitizer_common.h
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,8 @@ bool IsAccessibleMemoryRange(uptr beg, uptr size);
// the source range cannot be read, in which case the contents of `dest` are
// undefined.
bool TryMemCpy(void *dest, const void *src, uptr n);
// Copies accessible memory, and zero fill inaccessible.
void MemCpyAccessible(void *dest, const void *src, uptr n);

// Error report formatting.
const char *StripPathPrefix(const char *filepath,
Expand Down
26 changes: 26 additions & 0 deletions compiler-rt/lib/sanitizer_common/sanitizer_common_libcdep.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,32 @@ static void StopStackDepotBackgroundThread() {
static void StopStackDepotBackgroundThread() {}
#endif

void MemCpyAccessible(void *dest, const void *src, uptr n) {
if (TryMemCpy(dest, src, n))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not fully sure about this optimization. In the case where there are holes this makes it more expensive. Does the caller generally know that?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure as well. On some examples fast pass 4x faster than page by page.

return;

const uptr page_size = GetPageSize();
uptr b = reinterpret_cast<uptr>(src);
uptr b_up = RoundUpTo(b, page_size);

uptr e = reinterpret_cast<uptr>(src) + n;
uptr e_down = RoundDownTo(e, page_size);

auto copy_or_zero = [dest, src](uptr beg, uptr end) {
const uptr udest = reinterpret_cast<uptr>(dest);
const uptr usrc = reinterpret_cast<uptr>(src);
void *d = reinterpret_cast<void *>(udest + (beg - usrc));
const uptr size = end - beg;
if (!TryMemCpy(d, reinterpret_cast<void *>(beg), size))
internal_memset(d, 0, size);
};

copy_or_zero(b, b_up);
for (uptr p = b_up; p < e_down; p += page_size)
copy_or_zero(p, p + page_size);
copy_or_zero(e_down, e);
}

} // namespace __sanitizer

SANITIZER_INTERFACE_WEAK_DEF(void, __sanitizer_sandbox_on_notify,
Expand Down
37 changes: 37 additions & 0 deletions compiler-rt/lib/sanitizer_common/tests/sanitizer_posix_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,43 @@ TEST(SanitizerCommon, TryMemCpyNull) {
EXPECT_FALSE(TryMemCpy(dst.data(), nullptr, dst.size()));
}

TEST(SanitizerCommon, MemCpyAccessible) {
const int page_num = 1000;
const int page_size = GetPageSize();
InternalMmapVector<char> src(page_num * page_size);
std::iota(src.begin(), src.end(), 123);
std::vector<char> dst;
std::vector<char> exp = {src.begin(), src.end()};

// Protect some pages.
for (int i = 7; i < page_num; i *= 2) {
mprotect(src.data() + i * page_size, page_size, PROT_NONE);
std::fill(exp.data() + i * page_size, exp.data() + (i + 1) * page_size, 0);
}

dst.assign(src.size(), 0);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit '\0'

EXPECT_FALSE(TryMemCpy(dst.data(), src.data(), dst.size()));

// Full page aligned range with mprotect pages.
dst.assign(src.size(), 0);
MemCpyAccessible(dst.data(), src.data(), dst.size());
EXPECT_TRUE(std::equal(dst.begin(), dst.end(), exp.begin()));

// Misaligned range with mprotect pages.
size_t offb = 3;
size_t offe = 7;
dst.assign(src.size() - offb - offe, 0);
MemCpyAccessible(dst.data(), src.data() + offb, dst.size());
EXPECT_TRUE(std::equal(dst.begin(), dst.end(), exp.begin() + offb));

// Misaligned range with ends in mprotect pages.
offb = 3 + 7 * page_size;
offe = 7 + 14 * page_size;
dst.assign(src.size() - offb - offe, 0);
MemCpyAccessible(dst.data(), src.data() + offb, dst.size());
EXPECT_TRUE(std::equal(dst.begin(), dst.end(), exp.begin() + offb));
}

} // namespace __sanitizer

#endif // SANITIZER_POSIX
Loading