@@ -265,7 +265,7 @@ ErrorOr<Vector<FlatPtr, 32>> ProcessorBase::capture_stack_trace(Thread& thread,
265265 return stack_trace;
266266}
267267
268- void ProcessorBase::exit_trap (TrapFrame& trap)
268+ void ProcessorBase::exit_trap (TrapFrame& trap, bool handling_exception )
269269{
270270 VERIFY_INTERRUPTS_DISABLED ();
271271 VERIFY (&Processor::current () == this );
@@ -277,23 +277,26 @@ void ProcessorBase::exit_trap(TrapFrame& trap)
277277 // ScopedCritical here.
278278 m_in_critical = m_in_critical + 1 ;
279279
280- m_in_irq = 0 ;
280+ bool only_update_state = handling_exception && trap. next_trap ;
281281
282+ auto * current_thread = Processor::current_thread ();
283+ if (!only_update_state) {
284+ m_in_irq = 0 ;
282285#if ARCH(X86_64)
283- auto * self = static_cast <Processor*>(this );
284- if (is_smp_enabled ())
285- self->smp_process_pending_messages ();
286+ auto * self = static_cast <Processor*>(this );
287+ if (is_smp_enabled ())
288+ self->smp_process_pending_messages ();
286289#endif
287290
288- auto * current_thread = Processor::current_thread ();
289- if (current_thread) {
290- SpinlockLocker thread_lock (current_thread->get_lock ());
291- current_thread->check_dispatch_pending_signal (YieldBehavior::FlagYield);
292- }
291+ if (current_thread) {
292+ SpinlockLocker thread_lock (current_thread->get_lock ());
293+ current_thread->check_dispatch_pending_signal (YieldBehavior::FlagYield);
294+ }
293295
294- // Process the deferred call queue. Among other things, this ensures
295- // that any pending thread unblocks happen before we enter the scheduler.
296- m_deferred_call_pool.execute_pending ();
296+ // Process the deferred call queue. Among other things, this ensures
297+ // that any pending thread unblocks happen before we enter the scheduler.
298+ m_deferred_call_pool.execute_pending ();
299+ }
297300
298301 if (current_thread) {
299302 auto & current_trap = current_thread->current_trap ();
@@ -320,7 +323,7 @@ void ProcessorBase::exit_trap(TrapFrame& trap)
320323 // We don't want context switches to happen until we're explicitly
321324 // triggering a switch in check_invoke_scheduler.
322325 m_in_critical = m_in_critical - 1 ;
323- if (!m_in_irq && !m_in_critical)
326+ if (!only_update_state && ! m_in_irq && !m_in_critical)
324327 check_invoke_scheduler ();
325328}
326329
0 commit comments