@@ -576,32 +576,66 @@ void __sanitizer_annotate_double_ended_contiguous_container(
576576 }
577577}
578578
579+ // Checks if two pointers fall within the same memory granule.
579580static bool WithinOneGranule (uptr p, uptr q) {
580581 if (p == q)
581582 return true ;
582583 return RoundDownTo (p, ASAN_SHADOW_GRANULARITY) ==
583584 RoundDownTo (q - 1 , ASAN_SHADOW_GRANULARITY);
584585}
585586
586- static void PoisonContainer (uptr storage_beg, uptr storage_end) {
587+ // Copies ASan memory annotation (a shadow memory value)
588+ // from one granule to another.
589+ static void CopyGranuleAnnotation (uptr dst, uptr src) {
590+ *(u8 *)MemToShadow (dst) = *(u8 *)MemToShadow (src);
591+ }
592+
593+ // Marks the specified number of bytes in a granule as accessible or
594+ // poisones the whole granule with kAsanContiguousContainerOOBMagic value.
595+ static void AnnotateContainerGranuleAccessibleBytes (uptr ptr, u8 n) {
587596 constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
588- uptr internal_beg = RoundUpTo (storage_beg, granularity);
589- uptr external_beg = RoundDownTo (storage_beg, granularity);
590- uptr internal_end = RoundDownTo (storage_end, granularity);
591-
592- if (internal_end > internal_beg)
593- PoisonShadow (internal_beg, internal_end - internal_beg,
594- kAsanContiguousContainerOOBMagic );
595- // The new buffer may start in the middle of a granule.
596- if (internal_beg != storage_beg && internal_beg < internal_end &&
597- !AddressIsPoisoned (storage_beg)) {
598- *(u8 *)MemToShadow (external_beg) =
599- static_cast <u8 >(storage_beg - external_beg);
597+ if (n == granularity) {
598+ *(u8 *)MemToShadow (ptr) = 0 ;
599+ } else if (n == 0 ) {
600+ *(u8 *)MemToShadow (ptr) = static_cast <u8 >(kAsanContiguousContainerOOBMagic );
601+ } else {
602+ *(u8 *)MemToShadow (ptr) = n;
600603 }
601- // The new buffer may end in the middle of a granule.
602- if (internal_end != storage_end && AddressIsPoisoned (storage_end)) {
603- *(u8 *)MemToShadow (internal_end) =
604- static_cast <u8 >(kAsanContiguousContainerOOBMagic );
604+ }
605+
606+ // Performs a byte-by-byte copy of ASan annotations (shadow memory values).
607+ // Result may be different due to ASan limitations, but result cannot lead
608+ // to false positives (more memory than requested may get unpoisoned).
609+ static void SlowCopyContainerAnnotations (uptr old_storage_beg,
610+ uptr old_storage_end,
611+ uptr new_storage_beg,
612+ uptr new_storage_end) {
613+ constexpr uptr granularity = ASAN_SHADOW_GRANULARITY;
614+ uptr new_internal_end = RoundDownTo (new_storage_end, granularity);
615+ uptr old_ptr = old_storage_beg;
616+ uptr new_ptr = new_storage_beg;
617+
618+ while (new_ptr < new_storage_end) {
619+ uptr next_new = RoundUpTo (new_ptr + 1 , granularity);
620+ uptr granule_begin = next_new - granularity;
621+ uptr unpoisoned_bytes = 0 ;
622+
623+ for (; new_ptr != next_new && new_ptr != new_storage_end;
624+ ++new_ptr, ++old_ptr) {
625+ if (!AddressIsPoisoned (old_ptr)) {
626+ unpoisoned_bytes = new_ptr - granule_begin + 1 ;
627+ }
628+ }
629+ if (new_ptr < new_storage_end || new_ptr == new_internal_end ||
630+ AddressIsPoisoned (new_storage_end)) {
631+ if (unpoisoned_bytes != 0 || granule_begin >= new_storage_beg) {
632+ AnnotateContainerGranuleAccessibleBytes (granule_begin,
633+ unpoisoned_bytes);
634+ } else if (!AddressIsPoisoned (new_storage_beg)) {
635+ AnnotateContainerGranuleAccessibleBytes (
636+ granule_begin, new_storage_beg - granule_begin);
637+ }
638+ }
605639 }
606640}
607641
@@ -616,7 +650,7 @@ static void PoisonContainer(uptr storage_beg, uptr storage_end) {
616650// the function handles this by going byte by byte, slowing down performance.
617651// The old buffer annotations are not removed. If necessary,
618652// user can unpoison old buffer with __asan_unpoison_memory_region.
619- void __sanitizer_move_contiguous_container_annotations (
653+ void __sanitizer_copy_contiguous_container_annotations (
620654 const void *old_storage_beg_p, const void *old_storage_end_p,
621655 const void *new_storage_beg_p, const void *new_storage_end_p) {
622656 if (!flags ()->detect_container_overflow )
@@ -639,109 +673,52 @@ void __sanitizer_move_contiguous_container_annotations(
639673 (old_storage_end - old_storage_beg) !=
640674 (new_storage_end - new_storage_beg)) {
641675 GET_STACK_TRACE_FATAL_HERE;
642- ReportBadParamsToMoveContiguousContainerAnnotations (
676+ ReportBadParamsToCopyContiguousContainerAnnotations (
643677 old_storage_beg, old_storage_end, new_storage_beg, new_storage_end,
644678 &stack);
645679 }
646680
647681 if (old_storage_beg == old_storage_end)
648682 return ;
649683
684+ if (old_storage_beg % granularity != new_storage_beg % granularity ||
685+ WithinOneGranule (new_storage_beg, new_storage_end)) {
686+ SlowCopyContainerAnnotations (old_storage_beg, old_storage_end,
687+ new_storage_beg, new_storage_end);
688+ return ;
689+ }
690+
650691 uptr new_internal_beg = RoundUpTo (new_storage_beg, granularity);
651- uptr old_internal_beg = RoundUpTo (old_storage_beg, granularity);
652- uptr new_external_beg = RoundDownTo (new_storage_beg, granularity);
653- uptr old_external_beg = RoundDownTo (old_storage_beg, granularity);
654692 uptr new_internal_end = RoundDownTo (new_storage_end, granularity);
655- uptr old_internal_end = RoundDownTo (old_storage_end, granularity);
656-
657- // At the very beginning we poison the whole buffer.
658- // Later we unpoison what is necessary.
659- PoisonContainer (new_storage_beg, new_storage_end);
660-
661- // There are two cases.
662- // 1) Distance between buffers is granule-aligned.
663- // 2) It's not aligned, and therefore requires going byte by byte.
664- if (old_storage_beg % granularity == new_storage_beg % granularity) {
665- // When buffers are aligned in the same way, we can just copy shadow memory,
666- // except the first and the last granule.
667- if (new_internal_end > new_internal_beg)
668- __builtin_memcpy ((u8 *)MemToShadow (new_internal_beg),
669- (u8 *)MemToShadow (old_internal_beg),
670- (new_internal_end - new_internal_beg) / granularity);
671- // If the beginning and the end of the storage are aligned, we are done.
672- // Otherwise, we have to handle remaining granules.
673- if (new_internal_beg != new_storage_beg ||
674- new_internal_end != new_storage_end) {
675- if (WithinOneGranule (new_storage_beg, new_storage_end)) {
676- if (new_internal_end == new_storage_end) {
677- if (!AddressIsPoisoned (old_storage_beg)) {
678- *(u8 *)MemToShadow (new_external_beg) =
679- *(u8 *)MemToShadow (old_external_beg);
680- } else if (!AddressIsPoisoned (new_storage_beg)) {
681- *(u8 *)MemToShadow (new_external_beg) =
682- new_storage_beg - new_external_beg;
683- }
684- } else if (AddressIsPoisoned (new_storage_end)) {
685- if (!AddressIsPoisoned (old_storage_beg)) {
686- *(u8 *)MemToShadow (new_external_beg) =
687- AddressIsPoisoned (old_storage_end)
688- ? *(u8 *)MemToShadow (old_internal_end)
689- : new_storage_end - new_external_beg;
690- } else if (!AddressIsPoisoned (new_storage_beg)) {
691- *(u8 *)MemToShadow (new_external_beg) =
692- (new_storage_beg == new_external_beg)
693- ? static_cast <u8 >(kAsanContiguousContainerOOBMagic )
694- : new_storage_beg - new_external_beg;
695- }
696- }
697- } else {
698- // Buffer is not within one granule!
699- if (new_internal_beg != new_storage_beg) {
700- if (!AddressIsPoisoned (old_storage_beg)) {
701- *(u8 *)MemToShadow (new_external_beg) =
702- *(u8 *)MemToShadow (old_external_beg);
703- } else if (!AddressIsPoisoned (new_storage_beg)) {
704- *(u8 *)MemToShadow (new_external_beg) =
705- new_storage_beg - new_external_beg;
706- }
707- }
708- if (new_internal_end != new_storage_end &&
709- AddressIsPoisoned (new_storage_end)) {
710- *(u8 *)MemToShadow (new_internal_end) =
711- AddressIsPoisoned (old_storage_end)
712- ? *(u8 *)MemToShadow (old_internal_end)
713- : old_storage_end - old_internal_end;
714- }
715- }
693+ if (new_internal_end > new_internal_beg) {
694+ uptr old_internal_beg = RoundUpTo (old_storage_beg, granularity);
695+ __builtin_memcpy ((u8 *)MemToShadow (new_internal_beg),
696+ (u8 *)MemToShadow (old_internal_beg),
697+ (new_internal_end - new_internal_beg) / granularity);
698+ }
699+ // The only remaining cases involve edge granules when the container starts or
700+ // ends within a granule. We already know that the container's start and end
701+ // points lie in different granules.
702+ if (new_internal_beg != new_storage_beg) {
703+ // First granule
704+ uptr new_external_beg = RoundDownTo (new_storage_beg, granularity);
705+ uptr old_external_beg = RoundDownTo (old_storage_beg, granularity);
706+ if (!AddressIsPoisoned (old_storage_beg)) {
707+ CopyGranuleAnnotation (new_external_beg, old_external_beg);
708+ } else if (!AddressIsPoisoned (new_storage_beg)) {
709+ AnnotateContainerGranuleAccessibleBytes (
710+ new_external_beg, new_storage_beg - new_external_beg);
716711 }
717- } else {
718- // If buffers are not aligned, we have to go byte by byte.
719- uptr old_ptr = old_storage_beg;
720- uptr new_ptr = new_storage_beg;
721- uptr next_new;
722- for (; new_ptr < new_storage_end;) {
723- next_new = RoundUpTo (new_ptr + 1 , granularity);
724- uptr unpoison_to = 0 ;
725- for (; new_ptr != next_new && new_ptr != new_storage_end;
726- ++new_ptr, ++old_ptr) {
727- if (!AddressIsPoisoned (old_ptr)) {
728- unpoison_to = new_ptr + 1 ;
729- }
730- }
731- if (new_ptr < new_storage_end || new_ptr == new_internal_end ||
732- AddressIsPoisoned (new_storage_end)) {
733- uptr granule_beg = RoundDownTo (new_ptr - 1 , granularity);
734- if (unpoison_to != 0 ) {
735- uptr value =
736- (unpoison_to == next_new) ? 0 : unpoison_to - granule_beg;
737- *(u8 *)MemToShadow (granule_beg) = static_cast <u8 >(value);
738- } else {
739- *(u8 *)MemToShadow (granule_beg) =
740- (granule_beg >= new_storage_beg)
741- ? static_cast <u8 >(kAsanContiguousContainerOOBMagic )
742- : new_storage_beg - granule_beg;
743- }
744- }
712+ }
713+ if (new_internal_end != new_storage_end &&
714+ AddressIsPoisoned (new_storage_end)) {
715+ // Last granule
716+ uptr old_internal_end = RoundDownTo (old_storage_end, granularity);
717+ if (AddressIsPoisoned (old_storage_end)) {
718+ CopyGranuleAnnotation (new_internal_end, old_internal_end);
719+ } else {
720+ AnnotateContainerGranuleAccessibleBytes (
721+ new_internal_end, old_storage_end - old_internal_end);
745722 }
746723 }
747724}
0 commit comments