@@ -540,6 +540,7 @@ add_to_trace(
540
540
assert(func == NULL || func->func_code == (PyObject *)code); \
541
541
instr = trace_stack[trace_stack_depth].instr;
542
542
543
+
543
544
/* Returns the length of the trace on success,
544
545
* 0 if it failed to produce a worthwhile trace,
545
546
* and -1 on an error.
@@ -560,8 +561,10 @@ translate_bytecode_to_trace(
560
561
_Py_BloomFilter_Add (dependencies , initial_code );
561
562
_Py_CODEUNIT * initial_instr = instr ;
562
563
int trace_length = 0 ;
563
- // Leave space for possible trailing _EXIT_TRACE
564
- int max_length = buffer_size - 2 ;
564
+ // Leave space for possible trailing _EXIT_TRACE and estimated exit stubs
565
+ // Reserve 20% of buffer space for exit stubs (empirically sufficient)
566
+ int max_exit_stubs = (buffer_size * 20 ) / 100 ; // 20% for exit stubs
567
+ int max_length = buffer_size - 2 - max_exit_stubs ;
565
568
struct {
566
569
PyFunctionObject * func ;
567
570
PyCodeObject * code ;
@@ -647,16 +650,7 @@ translate_bytecode_to_trace(
647
650
assert (!OPCODE_HAS_DEOPT (opcode ));
648
651
}
649
652
650
- if (OPCODE_HAS_EXIT (opcode )) {
651
- // Make space for side exit and final _EXIT_TRACE:
652
- RESERVE_RAW (2 , "_EXIT_TRACE" );
653
- max_length -- ;
654
- }
655
- if (OPCODE_HAS_ERROR (opcode )) {
656
- // Make space for error stub and final _EXIT_TRACE:
657
- RESERVE_RAW (2 , "_ERROR_POP_N" );
658
- max_length -- ;
659
- }
653
+ // Note: Exit stub space is pre-reserved in max_length calculation above
660
654
switch (opcode ) {
661
655
case POP_JUMP_IF_NONE :
662
656
case POP_JUMP_IF_NOT_NONE :
@@ -731,9 +725,11 @@ translate_bytecode_to_trace(
731
725
{
732
726
const struct opcode_macro_expansion * expansion = & _PyOpcode_macro_expansion [opcode ];
733
727
if (expansion -> nuops > 0 ) {
734
- // Reserve space for nuops (+ _SET_IP + _EXIT_TRACE)
728
+ // Reserve space for nuops
735
729
int nuops = expansion -> nuops ;
736
- RESERVE (nuops + 1 ); /* One extra for exit */
730
+
731
+ // Reserve space for nuops (exit stub space already pre-reserved)
732
+ RESERVE (nuops );
737
733
int16_t last_op = expansion -> uops [nuops - 1 ].uop ;
738
734
if (last_op == _RETURN_VALUE || last_op == _RETURN_GENERATOR || last_op == _YIELD_VALUE ) {
739
735
// Check for trace stack underflow now:
0 commit comments