@@ -709,13 +709,29 @@ _PyJIT_translate_single_bytecode_to_trace(
709709 case POP_JUMP_IF_FALSE :
710710 case POP_JUMP_IF_TRUE :
711711 {
712+ int counter = target_instr [1 ].cache ;
713+ int direction = _Py_popcount32 (counter );
712714 _Py_CODEUNIT * computed_next_instr_without_modifiers = target_instr + 1 + _PyOpcode_Caches [_PyOpcode_Deopt [opcode ]];
713715 _Py_CODEUNIT * computed_next_instr = computed_next_instr_without_modifiers + (computed_next_instr_without_modifiers -> op .code == NOT_TAKEN );
714716 _Py_CODEUNIT * computed_jump_instr = computed_next_instr_without_modifiers + oparg ;
715717 assert (next_instr == computed_next_instr || next_instr == computed_jump_instr );
716718 int jump_happened = computed_jump_instr == next_instr ;
717719 uint32_t uopcode = BRANCH_TO_GUARD [opcode - POP_JUMP_IF_FALSE ][jump_happened ];
718- ADD_TO_TRACE (uopcode , 0 , 0 , INSTR_IP (jump_happened ? computed_next_instr : computed_jump_instr , old_code ));
720+ ADD_TO_TRACE (uopcode , 0 , 0 , INSTR_IP (jump_happened ? computed_next_instr : computed_jump_instr , old_code ));
721+ // Branch is biased to jumping, but jump did not happen.
722+ // We are likely in a bad trace. So we should retrace later.
723+ if ((direction > 10 && !jump_happened ) ||
724+ // Branch is biased to not jumping, but jump did not happen.
725+ // We are likely in a bad trace. So we should retrace later.
726+ (direction < 6 && jump_happened ) ||
727+ // Finally, branch is just not heavily biased.
728+ // So we should not trace through it anyways
729+ // This prevents trace explosion.
730+ (direction >= 6 && direction <= 10 )
731+ ) {
732+ ADD_TO_TRACE (_EXIT_TRACE , 0 , 0 , INSTR_IP (next_instr , old_code ));
733+ goto full ;
734+ }
719735 break ;
720736 }
721737 case JUMP_BACKWARD_JIT :
0 commit comments