Skip to content

Commit c04bd49

Browse files
vitalybukaAdvenamTacet
authored andcommitted
rename variables
1 parent 271fbb0 commit c04bd49

File tree

1 file changed

+78
-97
lines changed

1 file changed

+78
-97
lines changed

compiler-rt/lib/asan/asan_poisoning.cpp

Lines changed: 78 additions & 97 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
#include "asan_report.h"
1717
#include "asan_stack.h"
1818
#include "sanitizer_common/sanitizer_atomic.h"
19+
#include "sanitizer_common/sanitizer_common.h"
1920
#include "sanitizer_common/sanitizer_flags.h"
2021
#include "sanitizer_common/sanitizer_interface_internal.h"
2122
#include "sanitizer_common/sanitizer_libc.h"
@@ -588,32 +589,28 @@ static void SetContainerGranule(uptr ptr, u8 n) {
588589
// Performs a byte-by-byte copy of ASan annotations (shadow memory values).
589590
// Result may be different due to ASan limitations, but result cannot lead
590591
// to false positives (more memory than requested may get unpoisoned).
591-
static void SlowCopyContainerAnnotations(uptr src_storage_beg,
592-
uptr src_storage_end,
593-
uptr dst_storage_beg,
594-
uptr dst_storage_end) {
592+
static void SlowCopyContainerAnnotations(uptr src_beg, uptr src_end,
593+
uptr dst_beg, uptr dst_end) {
595594
constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
596-
uptr dst_internal_end = RoundDownTo(dst_storage_end, granularity);
597-
uptr src_ptr = src_storage_beg;
598-
uptr dst_ptr = dst_storage_beg;
595+
uptr dst_end_down = RoundDownTo(dst_end, granularity);
596+
uptr src_ptr = src_beg;
597+
uptr dst_ptr = dst_beg;
599598

600-
while (dst_ptr < dst_storage_end) {
601-
uptr next_new = RoundUpTo(dst_ptr + 1, granularity);
602-
uptr granule_begin = next_new - granularity;
599+
while (dst_ptr < dst_end) {
600+
uptr granule_beg = RoundDownTo(dst_ptr, granularity);
601+
uptr granule_end = granule_beg + granularity;
603602
uptr unpoisoned_bytes = 0;
604603

605-
for (; dst_ptr != next_new && dst_ptr != dst_storage_end;
606-
++dst_ptr, ++src_ptr) {
607-
if (!AddressIsPoisoned(src_ptr)) {
608-
unpoisoned_bytes = dst_ptr - granule_begin + 1;
609-
}
604+
for (; dst_ptr != granule_end && dst_ptr != dst_end; ++dst_ptr, ++src_ptr) {
605+
if (!AddressIsPoisoned(src_ptr))
606+
unpoisoned_bytes = dst_ptr - granule_beg + 1;
610607
}
611-
if (dst_ptr < dst_storage_end || dst_ptr == dst_internal_end ||
612-
AddressIsPoisoned(dst_storage_end)) {
613-
if (unpoisoned_bytes != 0 || granule_begin >= dst_storage_beg) {
614-
SetContainerGranule(granule_begin, unpoisoned_bytes);
615-
} else if (!AddressIsPoisoned(dst_storage_beg)) {
616-
SetContainerGranule(granule_begin, dst_storage_beg - granule_begin);
608+
if (dst_ptr < dst_end || dst_ptr == dst_end_down ||
609+
AddressIsPoisoned(dst_end)) {
610+
if (unpoisoned_bytes != 0 || granule_beg >= dst_beg) {
611+
SetContainerGranule(granule_beg, unpoisoned_bytes);
612+
} else if (!AddressIsPoisoned(dst_beg)) {
613+
SetContainerGranule(granule_beg, dst_beg - granule_beg);
617614
}
618615
}
619616
}
@@ -623,69 +620,62 @@ static void SlowCopyContainerAnnotations(uptr src_storage_beg,
623620
// going through bytes in reversed order, but not reversing annotations.
624621
// Result may be different due to ASan limitations, but result cannot lead
625622
// to false positives (more memory than requested may get unpoisoned).
626-
static void SlowReversedCopyContainerAnnotations(uptr src_storage_beg,
627-
uptr src_storage_end,
628-
uptr dst_storage_beg,
629-
uptr dst_storage_end) {
623+
static void SlowReversedCopyContainerAnnotations(uptr src_beg, uptr src_end,
624+
uptr dst_beg, uptr dst_end) {
630625
constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
631-
uptr dst_internal_beg = RoundDownTo(dst_storage_beg, granularity);
632-
uptr dst_internal_end = RoundDownTo(dst_storage_end, granularity);
633-
uptr src_ptr = src_storage_end;
634-
uptr dst_ptr = dst_storage_end;
626+
uptr dst_end_down = RoundDownTo(dst_end, granularity);
627+
uptr src_ptr = src_end;
628+
uptr dst_ptr = dst_end;
635629

636-
while (dst_ptr > dst_storage_beg) {
637-
uptr granule_begin = RoundDownTo(dst_ptr - 1, granularity);
630+
while (dst_ptr > dst_beg) {
631+
uptr granule_beg = RoundDownTo(dst_ptr - 1, granularity);
638632
uptr unpoisoned_bytes = 0;
639633

640-
for (; dst_ptr != granule_begin && dst_ptr != dst_storage_beg;
641-
--dst_ptr, --src_ptr) {
634+
for (; dst_ptr != granule_beg && dst_ptr != dst_beg; --dst_ptr, --src_ptr) {
642635
if (unpoisoned_bytes == 0 && !AddressIsPoisoned(src_ptr - 1)) {
643-
unpoisoned_bytes = dst_ptr - granule_begin;
636+
unpoisoned_bytes = dst_ptr - granule_beg;
644637
}
645638
}
646639

647-
if (dst_ptr >= dst_internal_end && !AddressIsPoisoned(dst_storage_end)) {
640+
if (dst_ptr >= dst_end_down && !AddressIsPoisoned(dst_end)) {
648641
continue;
649642
}
650643

651-
if (granule_begin == dst_ptr || unpoisoned_bytes != 0) {
652-
SetContainerGranule(granule_begin, unpoisoned_bytes);
653-
} else if (!AddressIsPoisoned(dst_storage_beg)) {
654-
SetContainerGranule(granule_begin, dst_storage_beg - granule_begin);
644+
if (granule_beg == dst_ptr || unpoisoned_bytes != 0) {
645+
SetContainerGranule(granule_beg, unpoisoned_bytes);
646+
} else if (!AddressIsPoisoned(dst_beg)) {
647+
SetContainerGranule(granule_beg, dst_beg - granule_beg);
655648
}
656649
}
657650
}
658651

659652
// A helper function for __sanitizer_copy_contiguous_container_annotations,
660653
// has assumption about begin and end of the container.
661654
// Should not be used stand alone.
662-
static void CopyContainerFirstGranuleAnnotation(uptr src_storage_begin,
663-
uptr dst_storage_begin) {
655+
static void CopyContainerFirstGranuleAnnotation(uptr src_beg, uptr dst_beg) {
664656
constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
665657
// First granule
666-
uptr dst_external_begin = RoundDownTo(dst_storage_begin, granularity);
667-
uptr src_external_begin = RoundDownTo(src_storage_begin, granularity);
668-
if (!AddressIsPoisoned(src_storage_begin)) {
669-
*(u8 *)MemToShadow(dst_external_begin) =
670-
*(u8 *)MemToShadow(src_external_begin);
671-
} else if (!AddressIsPoisoned(dst_storage_begin)) {
672-
SetContainerGranule(dst_external_begin,
673-
dst_storage_begin - dst_external_begin);
658+
uptr dst_external_beg = RoundDownTo(dst_beg, granularity);
659+
uptr src_external_beg = RoundDownTo(src_beg, granularity);
660+
if (!AddressIsPoisoned(src_beg)) {
661+
*(u8 *)MemToShadow(dst_external_beg) = *(u8 *)MemToShadow(src_external_beg);
662+
} else if (!AddressIsPoisoned(dst_beg)) {
663+
SetContainerGranule(dst_external_beg, dst_beg - dst_external_beg);
674664
}
675665
}
676666

677667
// A helper function for __sanitizer_copy_contiguous_container_annotations,
678668
// has assumption about begin and end of the container.
679669
// Should not be used stand alone.
680-
static void CopyContainerLastGranuleAnnotation(uptr src_storage_end,
681-
uptr dst_internal_end) {
670+
static void CopyContainerLastGranuleAnnotation(uptr src_end,
671+
uptr dst_end_down) {
682672
constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
683673
// Last granule
684-
uptr src_internal_end = RoundDownTo(src_storage_end, granularity);
685-
if (AddressIsPoisoned(src_storage_end)) {
686-
*(u8 *)MemToShadow(dst_internal_end) = *(u8 *)MemToShadow(src_internal_end);
674+
uptr src_internal_end = RoundDownTo(src_end, granularity);
675+
if (AddressIsPoisoned(src_end)) {
676+
*(u8 *)MemToShadow(dst_end_down) = *(u8 *)MemToShadow(src_internal_end);
687677
} else {
688-
SetContainerGranule(dst_internal_end, src_storage_end - src_internal_end);
678+
SetContainerGranule(dst_end_down, src_end - src_internal_end);
689679
}
690680
}
691681

@@ -696,38 +686,34 @@ static void CopyContainerLastGranuleAnnotation(uptr src_storage_end,
696686
// However, it does not move memory content itself, only annotations.
697687
// If the buffers aren't aligned (the distance between buffers isn't
698688
// granule-aligned)
699-
// // src_storage_beg % granularity != dst_storage_beg % granularity
689+
// // src_beg % granularity != dst_beg % granularity
700690
// the function handles this by going byte by byte, slowing down performance.
701691
// The old buffer annotations are not removed. If necessary,
702692
// user can unpoison old buffer with __asan_unpoison_memory_region.
703-
void __sanitizer_copy_contiguous_container_annotations(const void *src_begin_p,
693+
void __sanitizer_copy_contiguous_container_annotations(const void *src_beg_p,
704694
const void *src_end_p,
705-
const void *dst_begin_p,
695+
const void *dst_beg_p,
706696
const void *dst_end_p) {
707697
if (!flags()->detect_container_overflow)
708698
return;
709699

710-
VPrintf(3, "contiguous_container_src: %p %p\n", src_begin_p, src_end_p);
711-
VPrintf(3, "contiguous_container_dst: %p %p\n", dst_begin_p, dst_end_p);
700+
VPrintf(3, "contiguous_container_src: %p %p\n", src_beg_p, src_end_p);
701+
VPrintf(3, "contiguous_container_dst: %p %p\n", dst_beg_p, dst_end_p);
712702

713-
uptr src_storage_begin = reinterpret_cast<uptr>(src_begin_p);
714-
uptr src_storage_end = reinterpret_cast<uptr>(src_end_p);
715-
uptr dst_storage_begin = reinterpret_cast<uptr>(dst_begin_p);
716-
uptr dst_storage_end = reinterpret_cast<uptr>(dst_end_p);
703+
uptr src_beg = reinterpret_cast<uptr>(src_beg_p);
704+
uptr src_end = reinterpret_cast<uptr>(src_end_p);
705+
uptr dst_beg = reinterpret_cast<uptr>(dst_beg_p);
706+
uptr dst_end = reinterpret_cast<uptr>(dst_end_p);
717707

718708
constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
719709

720-
if (src_storage_begin > src_storage_end ||
721-
(dst_storage_end - dst_storage_begin) !=
722-
(src_storage_end - src_storage_begin)) {
710+
if (src_beg > src_end || (dst_end - dst_beg) != (src_end - src_beg)) {
723711
GET_STACK_TRACE_FATAL_HERE;
724712
ReportBadParamsToCopyContiguousContainerAnnotations(
725-
src_storage_begin, src_storage_end, dst_storage_begin, dst_storage_end,
726-
&stack);
713+
src_beg, src_end, dst_beg, dst_end, &stack);
727714
}
728715

729-
if (src_storage_begin == src_storage_end ||
730-
src_storage_begin == dst_storage_begin)
716+
if (src_beg == src_end || src_beg == dst_beg)
731717
return;
732718
// Due to support for overlapping buffers, we may have to copy elements
733719
// in reversed order, when destination buffer starts in the middle of
@@ -738,51 +724,46 @@ void __sanitizer_copy_contiguous_container_annotations(const void *src_begin_p,
738724
//
739725
// The only remaining edge cases involve edge granules,
740726
// when the container starts or ends within a granule.
741-
uptr src_external_end = RoundUpTo(src_storage_end, granularity);
742-
bool copy_in_reversed_order = src_storage_begin < dst_storage_begin &&
743-
dst_storage_begin <= src_external_end;
744-
if (src_storage_begin % granularity != dst_storage_begin % granularity ||
745-
RoundDownTo(dst_storage_end - 1, granularity) <= dst_storage_begin) {
727+
uptr src_end_up = RoundUpTo(src_end, granularity);
728+
bool copy_in_reversed_order = src_beg < dst_beg && dst_beg <= src_end_up;
729+
if (src_beg % granularity != dst_beg % granularity ||
730+
RoundDownTo(dst_end - 1, granularity) <= dst_beg) {
746731
if (copy_in_reversed_order) {
747-
SlowReversedCopyContainerAnnotations(src_storage_begin, src_storage_end,
748-
dst_storage_begin, dst_storage_end);
732+
SlowReversedCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end);
749733
} else {
750-
SlowCopyContainerAnnotations(src_storage_begin, src_storage_end,
751-
dst_storage_begin, dst_storage_end);
734+
SlowCopyContainerAnnotations(src_beg, src_end, dst_beg, dst_end);
752735
}
753736
return;
754737
}
755738

756739
// As buffers are granule-aligned, we can just copy annotations of granules
757740
// from the middle.
758-
uptr dst_internal_begin = RoundUpTo(dst_storage_begin, granularity);
759-
uptr dst_internal_end = RoundDownTo(dst_storage_end, granularity);
741+
uptr dst_beg_up = RoundUpTo(dst_beg, granularity);
742+
uptr dst_end_down = RoundDownTo(dst_end, granularity);
760743
if (copy_in_reversed_order) {
761-
if (dst_internal_end != dst_storage_end &&
762-
AddressIsPoisoned(dst_storage_end)) {
763-
CopyContainerLastGranuleAnnotation(src_storage_end, dst_internal_end);
744+
if (dst_end_down != dst_end && AddressIsPoisoned(dst_end)) {
745+
CopyContainerLastGranuleAnnotation(src_end, dst_end_down);
764746
}
765747
} else {
766-
if (dst_internal_begin != dst_storage_begin) {
767-
CopyContainerFirstGranuleAnnotation(src_storage_begin, dst_storage_begin);
748+
if (dst_beg_up != dst_beg) {
749+
CopyContainerFirstGranuleAnnotation(src_beg, dst_beg);
768750
}
769751
}
770752

771-
if (dst_internal_end > dst_internal_begin) {
772-
uptr src_internal_begin = RoundUpTo(src_storage_begin, granularity);
773-
__builtin_memmove((u8 *)MemToShadow(dst_internal_begin),
774-
(u8 *)MemToShadow(src_internal_begin),
775-
(dst_internal_end - dst_internal_begin) / granularity);
753+
if (dst_end_down > dst_beg_up) {
754+
uptr src_internal_beg = RoundUpTo(src_beg, granularity);
755+
__builtin_memmove((u8 *)MemToShadow(dst_beg_up),
756+
(u8 *)MemToShadow(src_internal_beg),
757+
(dst_end_down - dst_beg_up) / granularity);
776758
}
777759

778760
if (copy_in_reversed_order) {
779-
if (dst_internal_begin != dst_storage_begin) {
780-
CopyContainerFirstGranuleAnnotation(src_storage_begin, dst_storage_begin);
761+
if (dst_beg_up != dst_beg) {
762+
CopyContainerFirstGranuleAnnotation(src_beg, dst_beg);
781763
}
782764
} else {
783-
if (dst_internal_end != dst_storage_end &&
784-
AddressIsPoisoned(dst_storage_end)) {
785-
CopyContainerLastGranuleAnnotation(src_storage_end, dst_internal_end);
765+
if (dst_end_down != dst_end && AddressIsPoisoned(dst_end)) {
766+
CopyContainerLastGranuleAnnotation(src_end, dst_end_down);
786767
}
787768
}
788769
}

0 commit comments

Comments
 (0)