@@ -28,7 +28,7 @@ static const u64 kAllocaRedzoneMask = 31UL;
2828// For small size classes inline PoisonShadow for better performance.
2929ALWAYS_INLINE void SetShadow (uptr ptr, uptr size, uptr class_id, u64 magic) {
3030 CHECK (AddrIsAlignedByGranularity (ptr + size));
31- u64 * shadow = reinterpret_cast <u64 *>(MemToShadow (ptr));
31+ u64 * shadow = reinterpret_cast <u64 *>(MemToShadow (ptr));
3232 if (ASAN_SHADOW_SCALE == 3 && class_id <= 6 ) {
3333 // This code expects ASAN_SHADOW_SCALE=3.
3434 for (uptr i = 0 ; i < (((uptr)1 ) << class_id); i++) {
@@ -47,7 +47,7 @@ ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
4747 }
4848}
4949
50- FakeStack * FakeStack::Create (uptr stack_size_log) {
50+ FakeStack* FakeStack::Create (uptr stack_size_log) {
5151 static uptr kMinStackSizeLog = 16 ;
5252 static uptr kMaxStackSizeLog = FIRST_32_SECOND_64 (24 , 28 );
5353 if (stack_size_log < kMinStackSizeLog )
@@ -57,7 +57,7 @@ FakeStack *FakeStack::Create(uptr stack_size_log) {
5757 CHECK_LE (kMaxStackFrameSizeLog , stack_size_log);
5858 uptr size = RequiredSize (stack_size_log);
5959 uptr padded_size = size + kMaxStackFrameSize ;
60- void * true_res = reinterpret_cast <void *>(
60+ void * true_res = reinterpret_cast <void *>(
6161 flags ()->uar_noreserve ? MmapNoReserveOrDie (padded_size, " FakeStack" )
6262 : MmapOrDie (padded_size, " FakeStack" ));
6363 // GetFrame() requires the property that
@@ -66,20 +66,20 @@ FakeStack *FakeStack::Create(uptr stack_size_log) {
6666 // We didn't use MmapAlignedOrDieOnFatalError, because it requires that the
6767 // *size* is a power of 2, which is an overly strong condition.
6868 static_assert (alignof (FakeStack) <= kMaxStackFrameSize );
69- FakeStack * res = reinterpret_cast <FakeStack *>(
69+ FakeStack* res = reinterpret_cast <FakeStack*>(
7070 RoundUpTo (
7171 (uptr)true_res + kFlagsOffset + SizeRequiredForFlags (stack_size_log),
7272 kMaxStackFrameSize ) -
7373 kFlagsOffset - SizeRequiredForFlags (stack_size_log));
7474 res->true_start = true_res;
7575 res->stack_size_log_ = stack_size_log;
76- u8 * p = reinterpret_cast <u8 *>(res);
76+ u8 * p = reinterpret_cast <u8 *>(res);
7777 VReport (1 ,
7878 " T%d: FakeStack created: %p -- %p stack_size_log: %zd; "
7979 " mmapped %zdK, noreserve=%d, true_start: %p, start of first frame: "
8080 " 0x%zx\n " ,
81- GetCurrentTidOrInvalid (), (void *)p,
82- (void *)(p + FakeStack::RequiredSize (stack_size_log)), stack_size_log,
81+ GetCurrentTidOrInvalid (), (void *)p,
82+ (void *)(p + FakeStack::RequiredSize (stack_size_log)), stack_size_log,
8383 size >> 10 , flags ()->uar_noreserve , res->true_start ,
8484 res->GetFrame (stack_size_log, /* class_id*/ 0 , /* pos*/ 0 ));
8585 return res;
@@ -109,14 +109,14 @@ void FakeStack::PoisonAll(u8 magic) {
109109#if !defined(_MSC_VER) || defined(__clang__)
110110ALWAYS_INLINE USED
111111#endif
112- FakeFrame * FakeStack::Allocate (uptr stack_size_log, uptr class_id,
113- uptr real_stack) {
112+ FakeFrame* FakeStack::Allocate (uptr stack_size_log, uptr class_id,
113+ uptr real_stack) {
114114 CHECK_LT (class_id, kNumberOfSizeClasses );
115115 if (needs_gc_)
116116 GC (real_stack);
117- uptr & hint_position = hint_position_[class_id];
117+ uptr& hint_position = hint_position_[class_id];
118118 const int num_iter = NumberOfFrames (stack_size_log, class_id);
119- u8 * flags = GetFlags (stack_size_log, class_id);
119+ u8 * flags = GetFlags (stack_size_log, class_id);
120120 for (int i = 0 ; i < num_iter; i++) {
121121 uptr pos = ModuloNumberOfFrames (stack_size_log, class_id, hint_position++);
122122 // This part is tricky. On one hand, checking and setting flags[pos]
@@ -126,22 +126,24 @@ FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
126126 // and so will not touch this particular byte. So, it is safe to do this
127127 // with regular non-atomic load and store (at least I was not able to make
128128 // this code crash).
129- if (flags[pos]) continue ;
129+ if (flags[pos])
130+ continue ;
130131 flags[pos] = 1 ;
131- FakeFrame * res = reinterpret_cast <FakeFrame *>(
132- GetFrame (stack_size_log, class_id, pos));
132+ FakeFrame* res =
133+ reinterpret_cast <FakeFrame*>( GetFrame (stack_size_log, class_id, pos));
133134 res->real_stack = real_stack;
134135 *SavedFlagPtr (reinterpret_cast <uptr>(res), class_id) = &flags[pos];
135136 return res;
136137 }
137- return nullptr ; // We are out of fake stack.
138+ return nullptr ; // We are out of fake stack.
138139}
139140
140- uptr FakeStack::AddrIsInFakeStack (uptr ptr, uptr * frame_beg, uptr * frame_end) {
141+ uptr FakeStack::AddrIsInFakeStack (uptr ptr, uptr* frame_beg, uptr* frame_end) {
141142 uptr stack_size_log = this ->stack_size_log ();
142143 uptr beg = reinterpret_cast <uptr>(GetFrame (stack_size_log, 0 , 0 ));
143144 uptr end = reinterpret_cast <uptr>(this ) + RequiredSize (stack_size_log);
144- if (ptr < beg || ptr >= end) return 0 ;
145+ if (ptr < beg || ptr >= end)
146+ return 0 ;
145147 uptr class_id = (ptr - beg) >> stack_size_log;
146148 uptr base = beg + (class_id << stack_size_log);
147149 CHECK_LE (base, ptr);
@@ -153,9 +155,7 @@ uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
153155 return res;
154156}
155157
156- void FakeStack::HandleNoReturn () {
157- needs_gc_ = true ;
158- }
158+ void FakeStack::HandleNoReturn () { needs_gc_ = true ; }
159159
160160// Hack: The statement below is not true if we take into account sigaltstack or
161161// makecontext. It should be possible to make GC to discard wrong stack frame if
@@ -170,7 +170,7 @@ void FakeStack::HandleNoReturn() {
170170// We do it based on their 'real_stack' values -- everything that is lower
171171// than the current real_stack is garbage.
172172NOINLINE void FakeStack::GC (uptr real_stack) {
173- AsanThread * curr_thread = GetCurrentThread ();
173+ AsanThread* curr_thread = GetCurrentThread ();
174174 if (!curr_thread)
175175 return ; // Try again when we have a thread.
176176 auto top = curr_thread->stack_top ();
@@ -179,12 +179,13 @@ NOINLINE void FakeStack::GC(uptr real_stack) {
179179 return ; // Not the default stack.
180180
181181 for (uptr class_id = 0 ; class_id < kNumberOfSizeClasses ; class_id++) {
182- u8 * flags = GetFlags (stack_size_log (), class_id);
182+ u8 * flags = GetFlags (stack_size_log (), class_id);
183183 for (uptr i = 0 , n = NumberOfFrames (stack_size_log (), class_id); i < n;
184184 i++) {
185- if (flags[i] == 0 ) continue ; // not allocated.
186- FakeFrame *ff = reinterpret_cast <FakeFrame *>(
187- GetFrame (stack_size_log (), class_id, i));
185+ if (flags[i] == 0 )
186+ continue ; // not allocated.
187+ FakeFrame* ff =
188+ reinterpret_cast <FakeFrame*>(GetFrame (stack_size_log (), class_id, i));
188189 // GC only on the default stack.
189190 if (bottom < ff->real_stack && ff->real_stack < real_stack) {
190191 flags[i] = 0 ;
@@ -197,59 +198,57 @@ NOINLINE void FakeStack::GC(uptr real_stack) {
197198 needs_gc_ = false ;
198199}
199200
200- void FakeStack::ForEachFakeFrame (RangeIteratorCallback callback, void * arg) {
201+ void FakeStack::ForEachFakeFrame (RangeIteratorCallback callback, void * arg) {
201202 for (uptr class_id = 0 ; class_id < kNumberOfSizeClasses ; class_id++) {
202- u8 * flags = GetFlags (stack_size_log (), class_id);
203+ u8 * flags = GetFlags (stack_size_log (), class_id);
203204 for (uptr i = 0 , n = NumberOfFrames (stack_size_log (), class_id); i < n;
204205 i++) {
205- if (flags[i] == 0 ) continue ; // not allocated.
206- FakeFrame *ff = reinterpret_cast <FakeFrame *>(
207- GetFrame (stack_size_log (), class_id, i));
206+ if (flags[i] == 0 )
207+ continue ; // not allocated.
208+ FakeFrame* ff =
209+ reinterpret_cast <FakeFrame*>(GetFrame (stack_size_log (), class_id, i));
208210 uptr begin = reinterpret_cast <uptr>(ff);
209211 callback (begin, begin + FakeStack::BytesInSizeClass (class_id), arg);
210212 }
211213 }
212214}
213215
214216#if (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
215- static THREADLOCAL FakeStack * fake_stack_tls;
217+ static THREADLOCAL FakeStack* fake_stack_tls;
216218
217- FakeStack *GetTLSFakeStack () {
218- return fake_stack_tls;
219- }
220- void SetTLSFakeStack (FakeStack *fs) {
221- fake_stack_tls = fs;
222- }
219+ FakeStack* GetTLSFakeStack () { return fake_stack_tls; }
220+ void SetTLSFakeStack (FakeStack* fs) { fake_stack_tls = fs; }
223221#else
224- FakeStack * GetTLSFakeStack () { return 0 ; }
225- void SetTLSFakeStack (FakeStack * fs) { }
222+ FakeStack* GetTLSFakeStack () { return 0 ; }
223+ void SetTLSFakeStack (FakeStack* fs) {}
226224#endif // (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
227225
228- static FakeStack *GetFakeStack () {
229- AsanThread *t = GetCurrentThread ();
230- if (!t) return nullptr ;
226+ static FakeStack* GetFakeStack () {
227+ AsanThread* t = GetCurrentThread ();
228+ if (!t)
229+ return nullptr ;
231230 return t->get_or_create_fake_stack ();
232231}
233232
234- static FakeStack * GetFakeStackFast () {
235- if (FakeStack * fs = GetTLSFakeStack ())
233+ static FakeStack* GetFakeStackFast () {
234+ if (FakeStack* fs = GetTLSFakeStack ())
236235 return fs;
237236 if (!__asan_option_detect_stack_use_after_return)
238237 return nullptr ;
239238 return GetFakeStack ();
240239}
241240
242- static FakeStack * GetFakeStackFastAlways () {
243- if (FakeStack * fs = GetTLSFakeStack ())
241+ static FakeStack* GetFakeStackFastAlways () {
242+ if (FakeStack* fs = GetTLSFakeStack ())
244243 return fs;
245244 return GetFakeStack ();
246245}
247246
248247static ALWAYS_INLINE uptr OnMalloc (uptr class_id, uptr size) {
249- FakeStack * fs = GetFakeStackFast ();
248+ FakeStack* fs = GetFakeStackFast ();
250249 if (!fs)
251250 return 0 ;
252- FakeFrame * ff =
251+ FakeFrame* ff =
253252 fs->Allocate (fs->stack_size_log (), class_id, GET_CURRENT_FRAME ());
254253 if (!ff)
255254 return 0 ; // Out of fake stack.
@@ -259,10 +258,10 @@ static ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
259258}
260259
261260static ALWAYS_INLINE uptr OnMallocAlways (uptr class_id, uptr size) {
262- FakeStack * fs = GetFakeStackFastAlways ();
261+ FakeStack* fs = GetFakeStackFastAlways ();
263262 if (!fs)
264263 return 0 ;
265- FakeFrame * ff =
264+ FakeFrame* ff =
266265 fs->Allocate (fs->stack_size_log (), class_id, GET_CURRENT_FRAME ());
267266 if (!ff)
268267 return 0 ; // Out of fake stack.
@@ -276,17 +275,17 @@ static ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
276275 SetShadow (ptr, size, class_id, kMagic8 );
277276}
278277
279- } // namespace __asan
278+ } // namespace __asan
280279
281280// ---------------------- Interface ---------------- {{{1
282281using namespace __asan ;
283282#define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID (class_id ) \
284283 extern " C" SANITIZER_INTERFACE_ATTRIBUTE uptr \
285- __asan_stack_malloc_##class_id(uptr size) { \
284+ __asan_stack_malloc_##class_id(uptr size) { \
286285 return OnMalloc (class_id, size); \
287286 } \
288287 extern " C" SANITIZER_INTERFACE_ATTRIBUTE uptr \
289- __asan_stack_malloc_always_##class_id(uptr size) { \
288+ __asan_stack_malloc_always_##class_id(uptr size) { \
290289 return OnMallocAlways (class_id, size); \
291290 } \
292291 extern " C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \
@@ -311,21 +310,25 @@ extern "C" {
311310// -asan-use-after-return=never, after modal UAR flag lands
312311// (https://github.com/google/sanitizers/issues/1394)
313312SANITIZER_INTERFACE_ATTRIBUTE
314- void * __asan_get_current_fake_stack () { return GetFakeStackFast (); }
313+ void * __asan_get_current_fake_stack () { return GetFakeStackFast (); }
315314
316315SANITIZER_INTERFACE_ATTRIBUTE
317- void *__asan_addr_is_in_fake_stack (void *fake_stack, void *addr, void **beg,
318- void **end) {
319- FakeStack *fs = reinterpret_cast <FakeStack*>(fake_stack);
320- if (!fs) return nullptr ;
316+ void * __asan_addr_is_in_fake_stack (void * fake_stack, void * addr, void ** beg,
317+ void ** end) {
318+ FakeStack* fs = reinterpret_cast <FakeStack*>(fake_stack);
319+ if (!fs)
320+ return nullptr ;
321321 uptr frame_beg, frame_end;
322- FakeFrame * frame = reinterpret_cast <FakeFrame *>(fs->AddrIsInFakeStack (
322+ FakeFrame* frame = reinterpret_cast <FakeFrame*>(fs->AddrIsInFakeStack (
323323 reinterpret_cast <uptr>(addr), &frame_beg, &frame_end));
324- if (!frame) return nullptr ;
324+ if (!frame)
325+ return nullptr ;
325326 if (frame->magic != kCurrentStackFrameMagic )
326327 return nullptr ;
327- if (beg) *beg = reinterpret_cast <void *>(frame_beg);
328- if (end) *end = reinterpret_cast <void *>(frame_end);
328+ if (beg)
329+ *beg = reinterpret_cast <void *>(frame_beg);
330+ if (end)
331+ *end = reinterpret_cast <void *>(frame_end);
329332 return reinterpret_cast <void *>(frame->real_stack );
330333}
331334
@@ -344,9 +347,9 @@ void __asan_alloca_poison(uptr addr, uptr size) {
344347
345348SANITIZER_INTERFACE_ATTRIBUTE
346349void __asan_allocas_unpoison (uptr top, uptr bottom) {
347- if ((!top) || (top > bottom)) return ;
348- REAL (memset)
349- ( reinterpret_cast <void *>(MemToShadow (top)), 0 ,
350- (bottom - top) / ASAN_SHADOW_GRANULARITY);
350+ if ((!top) || (top > bottom))
351+ return ;
352+ REAL (memset)( reinterpret_cast <void *>(MemToShadow (top)), 0 ,
353+ (bottom - top) / ASAN_SHADOW_GRANULARITY);
351354}
352- } // extern "C"
355+ } // extern "C"
0 commit comments