@@ -576,8 +576,46 @@ void __sanitizer_annotate_double_ended_contiguous_container(
576576 }
577577}
578578
579- // This function moves annotation from one buffer to another.
580- // Old buffer is unpoisoned at the end.
579+ static bool WithinOneGranule (uptr p, uptr q) {
580+ if (p == q)
581+ return true ;
582+ return RoundDownTo (p, ASAN_SHADOW_GRANULARITY) ==
583+ RoundDownTo (q - 1 , ASAN_SHADOW_GRANULARITY);
584+ }
585+
586+ static void PoisonContainer (uptr storage_beg, uptr storage_end) {
587+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
588+ uptr internal_beg = RoundUpTo (storage_beg, granularity);
589+ uptr external_beg = RoundDownTo (storage_beg, granularity);
590+ uptr internal_end = RoundDownTo (storage_end, granularity);
591+
592+ if (internal_end > internal_beg)
593+ PoisonShadow (internal_beg, internal_end - internal_beg,
594+ kAsanContiguousContainerOOBMagic );
595+ // The new buffer may start in the middle of a granule.
596+ if (internal_beg != storage_beg && internal_beg < internal_end &&
597+ !AddressIsPoisoned (storage_beg)) {
598+ *(u8 *)MemToShadow (external_beg) =
599+ static_cast <u8 >(storage_beg - external_beg);
600+ }
601+ // The new buffer may end in the middle of a granule.
602+ if (internal_end != storage_end && AddressIsPoisoned (storage_end)) {
603+ *(u8 *)MemToShadow (internal_end) =
604+ static_cast <u8 >(kAsanContiguousContainerOOBMagic );
605+ }
606+ }
607+
608+ // This function copies ASan memory annotations (poisoned/unpoisoned states)
609+ // from one buffer to another.
610+ // It's main purpose is to help with relocating trivially relocatable objects,
611+ // which memory may be poisoned, without calling copy constructor.
612+ // However, it does not move memory content itself, only annotations.
613+ // If the buffers aren't aligned (the distance between buffers isn't
614+ // granule-aligned)
615+ // // old_storage_beg % granularity != new_storage_beg % granularity
616+ // the function handles this by going byte by byte, slowing down performance.
617+ // The old buffer annotations are not removed. If necessary,
618+ // user can unpoison old buffer with __asan_unpoison_memory_region.
581619void __sanitizer_move_contiguous_container_annotations (
582620 const void *old_storage_beg_p, const void *old_storage_end_p,
583621 const void *new_storage_beg_p, const void *new_storage_end_p) {
@@ -606,6 +644,9 @@ void __sanitizer_move_contiguous_container_annotations(
606644 &stack);
607645 }
608646
647+ if (old_storage_beg == old_storage_end)
648+ return ;
649+
609650 uptr new_internal_beg = RoundUpTo (new_storage_beg, granularity);
610651 uptr old_internal_beg = RoundUpTo (old_storage_beg, granularity);
611652 uptr new_external_beg = RoundDownTo (new_storage_beg, granularity);
@@ -615,93 +656,94 @@ void __sanitizer_move_contiguous_container_annotations(
615656
616657 // At the very beginning we poison the whole buffer.
617658 // Later we unpoison what is necessary.
618- PoisonShadow (new_internal_beg, new_internal_end - new_internal_beg,
619- kAsanContiguousContainerOOBMagic );
620- if (new_internal_beg != new_storage_beg) {
621- uptr new_unpoisoned = *(u8 *)MemToShadow (new_external_beg);
622- if (new_unpoisoned > (new_storage_beg - new_external_beg)) {
623- *(u8 *)MemToShadow (new_external_beg) =
624- static_cast <u8 >(new_storage_beg - new_external_beg);
625- }
626- }
627- if (new_internal_end != new_storage_end) {
628- uptr new_unpoisoned = *(u8 *)MemToShadow (new_internal_end);
629- if (new_unpoisoned <= (new_storage_end - new_internal_end)) {
630- *(u8 *)MemToShadow (new_external_beg) =
631- static_cast <u8 >(kAsanContiguousContainerOOBMagic );
632- }
633- }
659+ PoisonContainer (new_storage_beg, new_storage_end);
634660
635661 // There are two cases.
636662 // 1) Distance between buffers is granule-aligned.
637- // 2) It's not aligned, that case is slower .
663+ // 2) It's not aligned, and therefore requires going byte by byte .
638664 if (old_storage_beg % granularity == new_storage_beg % granularity) {
639665 // When buffers are aligned in the same way, we can just copy shadow memory,
640- // except first and last granule.
641- __builtin_memcpy ((u8 *)MemToShadow (new_internal_beg),
642- (u8 *)MemToShadow (old_internal_beg),
643- (new_internal_end - new_internal_beg) / granularity);
644- // In first granule we cannot poison anything before beginning of the
645- // container.
646- if (new_internal_beg != new_storage_beg) {
647- uptr old_unpoisoned = *(u8 *)MemToShadow (old_external_beg);
648- uptr new_unpoisoned = *(u8 *)MemToShadow (new_external_beg);
649-
650- if (old_unpoisoned > old_storage_beg - old_external_beg) {
651- *(u8 *)MemToShadow (new_external_beg) = old_unpoisoned;
652- } else if (new_unpoisoned > new_storage_beg - new_external_beg) {
653- *(u8 *)MemToShadow (new_external_beg) =
654- new_storage_beg - new_external_beg;
655- }
656- }
657- // In last granule we cannot poison anything after the end of the container.
658- if (new_internal_end != new_storage_end) {
659- uptr old_unpoisoned = *(u8 *)MemToShadow (old_internal_end);
660- uptr new_unpoisoned = *(u8 *)MemToShadow (new_internal_end);
661- if (new_unpoisoned <= new_storage_end - new_internal_end &&
662- old_unpoisoned < new_unpoisoned) {
663- *(u8 *)MemToShadow (new_internal_end) = old_unpoisoned;
666+ // except the first and the last granule.
667+ if (new_internal_end > new_internal_beg)
668+ __builtin_memcpy ((u8 *)MemToShadow (new_internal_beg),
669+ (u8 *)MemToShadow (old_internal_beg),
670+ (new_internal_end - new_internal_beg) / granularity);
671+ // If the beginning and the end of the storage are aligned, we are done.
672+ // Otherwise, we have to handle remaining granules.
673+ if (new_internal_beg != new_storage_beg ||
674+ new_internal_end != new_storage_end) {
675+ if (WithinOneGranule (new_storage_beg, new_storage_end)) {
676+ if (new_internal_end == new_storage_end) {
677+ if (!AddressIsPoisoned (old_storage_beg)) {
678+ *(u8 *)MemToShadow (new_external_beg) =
679+ *(u8 *)MemToShadow (old_external_beg);
680+ } else if (!AddressIsPoisoned (new_storage_beg)) {
681+ *(u8 *)MemToShadow (new_external_beg) =
682+ new_storage_beg - new_external_beg;
683+ }
684+ } else if (AddressIsPoisoned (new_storage_end)) {
685+ if (!AddressIsPoisoned (old_storage_beg)) {
686+ *(u8 *)MemToShadow (new_external_beg) =
687+ AddressIsPoisoned (old_storage_end)
688+ ? *(u8 *)MemToShadow (old_internal_end)
689+ : new_storage_end - new_external_beg;
690+ } else if (!AddressIsPoisoned (new_storage_beg)) {
691+ *(u8 *)MemToShadow (new_external_beg) =
692+ (new_storage_beg == new_external_beg)
693+ ? static_cast <u8 >(kAsanContiguousContainerOOBMagic )
694+ : new_storage_beg - new_external_beg;
695+ }
696+ }
697+ } else {
698+ // Buffer is not within one granule!
699+ if (new_internal_beg != new_storage_beg) {
700+ if (!AddressIsPoisoned (old_storage_beg)) {
701+ *(u8 *)MemToShadow (new_external_beg) =
702+ *(u8 *)MemToShadow (old_external_beg);
703+ } else if (!AddressIsPoisoned (new_storage_beg)) {
704+ *(u8 *)MemToShadow (new_external_beg) =
705+ new_storage_beg - new_external_beg;
706+ }
707+ }
708+ if (new_internal_end != new_storage_end &&
709+ AddressIsPoisoned (new_storage_end)) {
710+ *(u8 *)MemToShadow (new_internal_end) =
711+ AddressIsPoisoned (old_storage_end)
712+ ? *(u8 *)MemToShadow (old_internal_end)
713+ : old_storage_end - old_internal_end;
714+ }
664715 }
665716 }
666717 } else {
667718 // If buffers are not aligned, we have to go byte by byte.
668719 uptr old_ptr = old_storage_beg;
669720 uptr new_ptr = new_storage_beg;
670721 uptr next_new;
671- for (; new_ptr + granularity <= new_storage_end;) {
722+ for (; new_ptr < new_storage_end;) {
672723 next_new = RoundUpTo (new_ptr + 1 , granularity);
673724 uptr unpoison_to = 0 ;
674- for (; new_ptr != next_new; ++new_ptr, ++old_ptr) {
725+ for (; new_ptr != next_new && new_ptr != new_storage_end;
726+ ++new_ptr, ++old_ptr) {
675727 if (!AddressIsPoisoned (old_ptr)) {
676728 unpoison_to = new_ptr + 1 ;
677729 }
678730 }
679- if (unpoison_to != 0 ) {
680- uptr granule_beg = new_ptr - granularity;
681- uptr value = unpoison_to - granule_beg;
682- *(u8 *)MemToShadow (granule_beg) = static_cast <u8 >(value);
683- }
684- }
685- // Only case left is the end of the container in the middle of a granule.
686- // If memory after the end is unpoisoned, we cannot change anything.
687- // But if it's poisoned, we should unpoison as little as possible.
688- if (new_ptr != new_storage_end && AddressIsPoisoned (new_storage_end)) {
689- uptr unpoison_to = 0 ;
690- for (; new_ptr != new_storage_end; ++new_ptr, ++old_ptr) {
691- if (!AddressIsPoisoned (old_ptr)) {
692- unpoison_to = new_ptr + 1 ;
731+ if (new_ptr < new_storage_end || new_ptr == new_internal_end ||
732+ AddressIsPoisoned (new_storage_end)) {
733+ uptr granule_beg = RoundDownTo (new_ptr - 1 , granularity);
734+ if (unpoison_to != 0 ) {
735+ uptr value =
736+ (unpoison_to == next_new) ? 0 : unpoison_to - granule_beg;
737+ *(u8 *)MemToShadow (granule_beg) = static_cast <u8 >(value);
738+ } else {
739+ *(u8 *)MemToShadow (granule_beg) =
740+ (granule_beg >= new_storage_beg)
741+ ? static_cast <u8 >(kAsanContiguousContainerOOBMagic )
742+ : new_storage_beg - granule_beg;
693743 }
694744 }
695- if (unpoison_to != 0 ) {
696- uptr granule_beg = RoundDownTo (new_storage_end, granularity);
697- uptr value = unpoison_to - granule_beg;
698- *(u8 *)MemToShadow (granule_beg) = static_cast <u8 >(value);
699- }
700745 }
701746 }
702-
703- __asan_unpoison_memory_region ((void *)old_storage_beg,
704- old_storage_end - old_storage_beg);
705747}
706748
707749static const void *FindBadAddress (uptr begin, uptr end, bool poisoned) {
0 commit comments