From b87613f21474ea848fec435cbfe63d8cb1c7c44c Mon Sep 17 00:00:00 2001 From: Mariusz Felisiak Date: Tue, 18 Nov 2025 11:32:15 +0100 Subject: [PATCH 1/8] Add missing backticks in os and decimal docs (#141699) --- Doc/library/decimal.rst | 2 +- Doc/library/os.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 985153b5443f5c..ba882f10bbe2b8 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -264,7 +264,7 @@ allows the settings to be changed. This approach meets the needs of most applications. For more advanced work, it may be useful to create alternate contexts using the -Context() constructor. To make an alternate active, use the :func:`setcontext` +:meth:`Context` constructor. To make an alternate active, use the :func:`setcontext` function. In accordance with the standard, the :mod:`decimal` module provides two ready to diff --git a/Doc/library/os.rst b/Doc/library/os.rst index dbc3c92c8798b5..7dc6c177268ec2 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -558,7 +558,7 @@ process and user. .. function:: initgroups(username, gid, /) - Call the system initgroups() to initialize the group access list with all of + Call the system ``initgroups()`` to initialize the group access list with all of the groups of which the specified username is a member, plus the specified group id. From b420f6be53efdf40f552c94f19a7ce85f882b5e2 Mon Sep 17 00:00:00 2001 From: Mark Shannon Date: Tue, 18 Nov 2025 13:31:48 +0000 Subject: [PATCH 2/8] GH-139109: Support switch/case dispatch with the tracing interpreter. (GH-141703) --- .github/workflows/jit.yml | 26 +- Include/internal/pycore_magic_number.h | 3 +- Include/internal/pycore_opcode_metadata.h | 9 +- Include/internal/pycore_optimizer.h | 2 +- Include/internal/pycore_uop_ids.h | 1 + Include/opcode_ids.h | 47 +- Lib/_opcode_metadata.py | 47 +- Python/bytecodes.c | 9 +- Python/ceval.c | 4 + Python/ceval_macros.h | 10 +- Python/executor_cases.c.h | 2 + Python/generated_cases.c.h | 113 +-- Python/instrumentation.c | 4 +- Python/opcode_targets.h | 910 +++++++++++----------- Python/optimizer_cases.c.h | 2 + Tools/cases_generator/analyzer.py | 7 +- Tools/cases_generator/target_generator.py | 4 +- Tools/cases_generator/tier1_generator.py | 2 +- 18 files changed, 617 insertions(+), 585 deletions(-) diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml index 3349eb042425dd..62325250bd368e 100644 --- a/.github/workflows/jit.yml +++ b/.github/workflows/jit.yml @@ -57,10 +57,9 @@ jobs: fail-fast: false matrix: target: -# To re-enable later when we support these. -# - i686-pc-windows-msvc/msvc -# - x86_64-pc-windows-msvc/msvc -# - aarch64-pc-windows-msvc/msvc + - i686-pc-windows-msvc/msvc + - x86_64-pc-windows-msvc/msvc + - aarch64-pc-windows-msvc/msvc - x86_64-apple-darwin/clang - aarch64-apple-darwin/clang - x86_64-unknown-linux-gnu/gcc @@ -71,16 +70,15 @@ jobs: llvm: - 21 include: -# To re-enable later when we support these. -# - target: i686-pc-windows-msvc/msvc -# architecture: Win32 -# runner: windows-2022 -# - target: x86_64-pc-windows-msvc/msvc -# architecture: x64 -# runner: windows-2022 -# - target: aarch64-pc-windows-msvc/msvc -# architecture: ARM64 -# runner: windows-11-arm + - target: i686-pc-windows-msvc/msvc + architecture: Win32 + runner: windows-2022 + - target: x86_64-pc-windows-msvc/msvc + architecture: x64 + runner: windows-2022 + - target: aarch64-pc-windows-msvc/msvc + architecture: ARM64 + runner: windows-11-arm - target: x86_64-apple-darwin/clang architecture: x86_64 runner: macos-15-intel diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h index 7ec7bd1c695516..2fb46a6df50bb3 100644 --- a/Include/internal/pycore_magic_number.h +++ b/Include/internal/pycore_magic_number.h @@ -286,6 +286,7 @@ Known values: Python 3.15a1 3653 (Fix handling of opcodes that may leave operands on the stack when optimizing LOAD_FAST) Python 3.15a1 3654 (Fix missing exception handlers in logical expression) Python 3.15a1 3655 (Fix miscompilation of some module-level annotations) + Python 3.15a1 3656 (Add TRACE_RECORD instruction, for platforms with switch based interpreter) Python 3.16 will start with 3700 @@ -299,7 +300,7 @@ PC/launcher.c must also be updated. */ -#define PYC_MAGIC_NUMBER 3655 +#define PYC_MAGIC_NUMBER 3656 /* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes (little-endian) and then appending b'\r\n'. */ #define PYC_MAGIC_NUMBER_TOKEN \ diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 548627dc7982ec..cca88818c575df 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -488,6 +488,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 1; case TO_BOOL_STR: return 1; + case TRACE_RECORD: + return 0; case UNARY_INVERT: return 1; case UNARY_NEGATIVE: @@ -971,6 +973,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 1; case TO_BOOL_STR: return 1; + case TRACE_RECORD: + return 0; case UNARY_INVERT: return 1; case UNARY_NEGATIVE: @@ -1287,6 +1291,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[267] = { [TO_BOOL_LIST] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [TO_BOOL_NONE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, + [TRACE_RECORD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_INVERT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_NEGATIVE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_NOT] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, @@ -1738,6 +1743,7 @@ const char *_PyOpcode_OpName[267] = { [TO_BOOL_LIST] = "TO_BOOL_LIST", [TO_BOOL_NONE] = "TO_BOOL_NONE", [TO_BOOL_STR] = "TO_BOOL_STR", + [TRACE_RECORD] = "TRACE_RECORD", [UNARY_INVERT] = "UNARY_INVERT", [UNARY_NEGATIVE] = "UNARY_NEGATIVE", [UNARY_NOT] = "UNARY_NOT", @@ -1809,7 +1815,6 @@ const uint8_t _PyOpcode_Deopt[256] = { [230] = 230, [231] = 231, [232] = 232, - [233] = 233, [BINARY_OP] = BINARY_OP, [BINARY_OP_ADD_FLOAT] = BINARY_OP, [BINARY_OP_ADD_INT] = BINARY_OP, @@ -2025,6 +2030,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [TO_BOOL_LIST] = TO_BOOL, [TO_BOOL_NONE] = TO_BOOL, [TO_BOOL_STR] = TO_BOOL, + [TRACE_RECORD] = TRACE_RECORD, [UNARY_INVERT] = UNARY_INVERT, [UNARY_NEGATIVE] = UNARY_NEGATIVE, [UNARY_NOT] = UNARY_NOT, @@ -2070,7 +2076,6 @@ const uint8_t _PyOpcode_Deopt[256] = { case 230: \ case 231: \ case 232: \ - case 233: \ ; struct pseudo_targets { uint8_t as_sequence; diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index 0307a174e77346..e7177552cf666e 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -364,7 +364,7 @@ extern void _Py_ClearExecutorDeletionList(PyInterpreterState *interp); int _PyJit_translate_single_bytecode_to_trace(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *next_instr, int stop_tracing_opcode); -int +PyAPI_FUNC(int) _PyJit_TryInitializeTracing(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *curr_instr, _Py_CODEUNIT *start_instr, _Py_CODEUNIT *close_loop_instr, int curr_stackdepth, int chain_depth, _PyExitData *exit, diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index 7a33a5b84fd21a..c38f28f9db161b 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -352,6 +352,7 @@ extern "C" { #define _TO_BOOL_LIST 550 #define _TO_BOOL_NONE TO_BOOL_NONE #define _TO_BOOL_STR 551 +#define _TRACE_RECORD TRACE_RECORD #define _UNARY_INVERT UNARY_INVERT #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h index 1d5c74adefcd35..0d066c169019a7 100644 --- a/Include/opcode_ids.h +++ b/Include/opcode_ids.h @@ -213,28 +213,29 @@ extern "C" { #define UNPACK_SEQUENCE_LIST 207 #define UNPACK_SEQUENCE_TUPLE 208 #define UNPACK_SEQUENCE_TWO_TUPLE 209 -#define INSTRUMENTED_END_FOR 234 -#define INSTRUMENTED_POP_ITER 235 -#define INSTRUMENTED_END_SEND 236 -#define INSTRUMENTED_FOR_ITER 237 -#define INSTRUMENTED_INSTRUCTION 238 -#define INSTRUMENTED_JUMP_FORWARD 239 -#define INSTRUMENTED_NOT_TAKEN 240 -#define INSTRUMENTED_POP_JUMP_IF_TRUE 241 -#define INSTRUMENTED_POP_JUMP_IF_FALSE 242 -#define INSTRUMENTED_POP_JUMP_IF_NONE 243 -#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 244 -#define INSTRUMENTED_RESUME 245 -#define INSTRUMENTED_RETURN_VALUE 246 -#define INSTRUMENTED_YIELD_VALUE 247 -#define INSTRUMENTED_END_ASYNC_FOR 248 -#define INSTRUMENTED_LOAD_SUPER_ATTR 249 -#define INSTRUMENTED_CALL 250 -#define INSTRUMENTED_CALL_KW 251 -#define INSTRUMENTED_CALL_FUNCTION_EX 252 -#define INSTRUMENTED_JUMP_BACKWARD 253 -#define INSTRUMENTED_LINE 254 -#define ENTER_EXECUTOR 255 +#define INSTRUMENTED_END_FOR 233 +#define INSTRUMENTED_POP_ITER 234 +#define INSTRUMENTED_END_SEND 235 +#define INSTRUMENTED_FOR_ITER 236 +#define INSTRUMENTED_INSTRUCTION 237 +#define INSTRUMENTED_JUMP_FORWARD 238 +#define INSTRUMENTED_NOT_TAKEN 239 +#define INSTRUMENTED_POP_JUMP_IF_TRUE 240 +#define INSTRUMENTED_POP_JUMP_IF_FALSE 241 +#define INSTRUMENTED_POP_JUMP_IF_NONE 242 +#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 243 +#define INSTRUMENTED_RESUME 244 +#define INSTRUMENTED_RETURN_VALUE 245 +#define INSTRUMENTED_YIELD_VALUE 246 +#define INSTRUMENTED_END_ASYNC_FOR 247 +#define INSTRUMENTED_LOAD_SUPER_ATTR 248 +#define INSTRUMENTED_CALL 249 +#define INSTRUMENTED_CALL_KW 250 +#define INSTRUMENTED_CALL_FUNCTION_EX 251 +#define INSTRUMENTED_JUMP_BACKWARD 252 +#define INSTRUMENTED_LINE 253 +#define ENTER_EXECUTOR 254 +#define TRACE_RECORD 255 #define ANNOTATIONS_PLACEHOLDER 256 #define JUMP 257 #define JUMP_IF_FALSE 258 @@ -249,7 +250,7 @@ extern "C" { #define HAVE_ARGUMENT 43 #define MIN_SPECIALIZED_OPCODE 129 -#define MIN_INSTRUMENTED_OPCODE 234 +#define MIN_INSTRUMENTED_OPCODE 233 #ifdef __cplusplus } diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py index f168d169a32948..e681cb17e43e04 100644 --- a/Lib/_opcode_metadata.py +++ b/Lib/_opcode_metadata.py @@ -208,8 +208,9 @@ 'CACHE': 0, 'RESERVED': 17, 'RESUME': 128, - 'INSTRUMENTED_LINE': 254, - 'ENTER_EXECUTOR': 255, + 'INSTRUMENTED_LINE': 253, + 'ENTER_EXECUTOR': 254, + 'TRACE_RECORD': 255, 'BINARY_SLICE': 1, 'BUILD_TEMPLATE': 2, 'CALL_FUNCTION_EX': 4, @@ -328,26 +329,26 @@ 'UNPACK_EX': 118, 'UNPACK_SEQUENCE': 119, 'YIELD_VALUE': 120, - 'INSTRUMENTED_END_FOR': 234, - 'INSTRUMENTED_POP_ITER': 235, - 'INSTRUMENTED_END_SEND': 236, - 'INSTRUMENTED_FOR_ITER': 237, - 'INSTRUMENTED_INSTRUCTION': 238, - 'INSTRUMENTED_JUMP_FORWARD': 239, - 'INSTRUMENTED_NOT_TAKEN': 240, - 'INSTRUMENTED_POP_JUMP_IF_TRUE': 241, - 'INSTRUMENTED_POP_JUMP_IF_FALSE': 242, - 'INSTRUMENTED_POP_JUMP_IF_NONE': 243, - 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 244, - 'INSTRUMENTED_RESUME': 245, - 'INSTRUMENTED_RETURN_VALUE': 246, - 'INSTRUMENTED_YIELD_VALUE': 247, - 'INSTRUMENTED_END_ASYNC_FOR': 248, - 'INSTRUMENTED_LOAD_SUPER_ATTR': 249, - 'INSTRUMENTED_CALL': 250, - 'INSTRUMENTED_CALL_KW': 251, - 'INSTRUMENTED_CALL_FUNCTION_EX': 252, - 'INSTRUMENTED_JUMP_BACKWARD': 253, + 'INSTRUMENTED_END_FOR': 233, + 'INSTRUMENTED_POP_ITER': 234, + 'INSTRUMENTED_END_SEND': 235, + 'INSTRUMENTED_FOR_ITER': 236, + 'INSTRUMENTED_INSTRUCTION': 237, + 'INSTRUMENTED_JUMP_FORWARD': 238, + 'INSTRUMENTED_NOT_TAKEN': 239, + 'INSTRUMENTED_POP_JUMP_IF_TRUE': 240, + 'INSTRUMENTED_POP_JUMP_IF_FALSE': 241, + 'INSTRUMENTED_POP_JUMP_IF_NONE': 242, + 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 243, + 'INSTRUMENTED_RESUME': 244, + 'INSTRUMENTED_RETURN_VALUE': 245, + 'INSTRUMENTED_YIELD_VALUE': 246, + 'INSTRUMENTED_END_ASYNC_FOR': 247, + 'INSTRUMENTED_LOAD_SUPER_ATTR': 248, + 'INSTRUMENTED_CALL': 249, + 'INSTRUMENTED_CALL_KW': 250, + 'INSTRUMENTED_CALL_FUNCTION_EX': 251, + 'INSTRUMENTED_JUMP_BACKWARD': 252, 'ANNOTATIONS_PLACEHOLDER': 256, 'JUMP': 257, 'JUMP_IF_FALSE': 258, @@ -362,4 +363,4 @@ } HAVE_ARGUMENT = 43 -MIN_INSTRUMENTED_OPCODE = 234 +MIN_INSTRUMENTED_OPCODE = 233 diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 565eaa7a599175..12ee506e4f2bc4 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -5636,10 +5636,12 @@ dummy_func( DISPATCH(); } - label(record_previous_inst) { + inst(TRACE_RECORD, (--)) { #if _Py_TIER2 assert(IS_JIT_TRACING()); - int opcode = next_instr->op.code; + next_instr = this_instr; + frame->instr_ptr = prev_instr; + opcode = next_instr->op.code; bool stop_tracing = (opcode == WITH_EXCEPT_START || opcode == RERAISE || opcode == CLEANUP_THROW || opcode == PUSH_EXC_INFO || opcode == INTERPRETER_EXIT); @@ -5675,7 +5677,8 @@ dummy_func( } DISPATCH_GOTO_NON_TRACING(); #else - Py_FatalError("JIT label executed in non-jit build."); + (void)prev_instr; + Py_FatalError("JIT instruction executed in non-jit build."); #endif } diff --git a/Python/ceval.c b/Python/ceval.c index 25294ebd993f6c..14fef42ea967d6 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1179,6 +1179,10 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int uint8_t opcode; /* Current opcode */ int oparg; /* Current opcode argument, if any */ assert(tstate->current_frame == NULL || tstate->current_frame->stackpointer != NULL); +#if !USE_COMPUTED_GOTOS + uint8_t tracing_mode = 0; + uint8_t dispatch_code; +#endif #endif _PyEntryFrame entry; diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 05a2760671e847..c30638c221a03f 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -134,8 +134,8 @@ # define LABEL(name) name: #else # define TARGET(op) case op: TARGET_##op: -# define DISPATCH_GOTO() goto dispatch_opcode -# define DISPATCH_GOTO_NON_TRACING() goto dispatch_opcode +# define DISPATCH_GOTO() dispatch_code = opcode | tracing_mode ; goto dispatch_opcode +# define DISPATCH_GOTO_NON_TRACING() dispatch_code = opcode; goto dispatch_opcode # define JUMP_TO_LABEL(name) goto name; # define JUMP_TO_PREDICTED(name) goto PREDICTED_##name; # define LABEL(name) name: @@ -148,9 +148,9 @@ # define LEAVE_TRACING() \ DISPATCH_TABLE_VAR = DISPATCH_TABLE; #else -# define IS_JIT_TRACING() (0) -# define ENTER_TRACING() -# define LEAVE_TRACING() +# define IS_JIT_TRACING() (tracing_mode != 0) +# define ENTER_TRACING() tracing_mode = 255 +# define LEAVE_TRACING() tracing_mode = 0 #endif /* PRE_DISPATCH_GOTO() does lltrace if enabled. Normally a no-op */ diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 6796abf84ac5f4..e1edd20b778d27 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -7579,5 +7579,7 @@ break; } + /* _TRACE_RECORD is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ + #undef TIER_TWO diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 0d4678df68ce2d..b83b7c528e9150 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -11,7 +11,7 @@ #if !_Py_TAIL_CALL_INTERP #if !USE_COMPUTED_GOTOS dispatch_opcode: - switch (opcode) + switch (dispatch_code) #endif { #endif /* _Py_TAIL_CALL_INTERP */ @@ -11683,6 +11683,68 @@ DISPATCH(); } + TARGET(TRACE_RECORD) { + #if _Py_TAIL_CALL_INTERP + int opcode = TRACE_RECORD; + (void)(opcode); + #endif + _Py_CODEUNIT* const prev_instr = frame->instr_ptr; + _Py_CODEUNIT* const this_instr = next_instr; + (void)this_instr; + frame->instr_ptr = next_instr; + next_instr += 1; + INSTRUCTION_STATS(TRACE_RECORD); + opcode = TRACE_RECORD; + #if _Py_TIER2 + assert(IS_JIT_TRACING()); + next_instr = this_instr; + frame->instr_ptr = prev_instr; + opcode = next_instr->op.code; + bool stop_tracing = (opcode == WITH_EXCEPT_START || + opcode == RERAISE || opcode == CLEANUP_THROW || + opcode == PUSH_EXC_INFO || opcode == INTERPRETER_EXIT); + _PyFrame_SetStackPointer(frame, stack_pointer); + int full = !_PyJit_translate_single_bytecode_to_trace(tstate, frame, next_instr, stop_tracing ? _DEOPT : 0); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (full) { + LEAVE_TRACING(); + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = stop_tracing_and_jit(tstate, frame); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err < 0) { + JUMP_TO_LABEL(error); + } + DISPATCH_GOTO_NON_TRACING(); + } + _PyThreadStateImpl *_tstate = (_PyThreadStateImpl *)tstate; + if ((_tstate->jit_tracer_state.prev_state.instr->op.code == CALL_LIST_APPEND && + opcode == POP_TOP) || + (_tstate->jit_tracer_state.prev_state.instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE && + opcode == STORE_FAST)) { + _tstate->jit_tracer_state.prev_state.instr_is_super = true; + } + else { + _tstate->jit_tracer_state.prev_state.instr = next_instr; + } + PyObject *prev_code = PyStackRef_AsPyObjectBorrow(frame->f_executable); + if (_tstate->jit_tracer_state.prev_state.instr_code != (PyCodeObject *)prev_code) { + _PyFrame_SetStackPointer(frame, stack_pointer); + Py_SETREF(_tstate->jit_tracer_state.prev_state.instr_code, (PyCodeObject*)Py_NewRef((prev_code))); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + _tstate->jit_tracer_state.prev_state.instr_frame = frame; + _tstate->jit_tracer_state.prev_state.instr_oparg = oparg; + _tstate->jit_tracer_state.prev_state.instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); + if (_PyOpcode_Caches[_PyOpcode_Deopt[opcode]]) { + (&next_instr[1])->counter = trigger_backoff_counter(); + } + DISPATCH_GOTO_NON_TRACING(); + #else + (void)prev_instr; + Py_FatalError("JIT instruction executed in non-jit build."); + #endif + } + TARGET(UNARY_INVERT) { #if _Py_TAIL_CALL_INTERP int opcode = UNARY_INVERT; @@ -12254,55 +12316,6 @@ JUMP_TO_LABEL(error); DISPATCH(); } - LABEL(record_previous_inst) - { - #if _Py_TIER2 - assert(IS_JIT_TRACING()); - int opcode = next_instr->op.code; - bool stop_tracing = (opcode == WITH_EXCEPT_START || - opcode == RERAISE || opcode == CLEANUP_THROW || - opcode == PUSH_EXC_INFO || opcode == INTERPRETER_EXIT); - _PyFrame_SetStackPointer(frame, stack_pointer); - int full = !_PyJit_translate_single_bytecode_to_trace(tstate, frame, next_instr, stop_tracing ? _DEOPT : 0); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (full) { - LEAVE_TRACING(); - _PyFrame_SetStackPointer(frame, stack_pointer); - int err = stop_tracing_and_jit(tstate, frame); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) { - JUMP_TO_LABEL(error); - } - DISPATCH_GOTO_NON_TRACING(); - } - _PyThreadStateImpl *_tstate = (_PyThreadStateImpl *)tstate; - if ((_tstate->jit_tracer_state.prev_state.instr->op.code == CALL_LIST_APPEND && - opcode == POP_TOP) || - (_tstate->jit_tracer_state.prev_state.instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE && - opcode == STORE_FAST)) { - _tstate->jit_tracer_state.prev_state.instr_is_super = true; - } - else { - _tstate->jit_tracer_state.prev_state.instr = next_instr; - } - PyObject *prev_code = PyStackRef_AsPyObjectBorrow(frame->f_executable); - if (_tstate->jit_tracer_state.prev_state.instr_code != (PyCodeObject *)prev_code) { - _PyFrame_SetStackPointer(frame, stack_pointer); - Py_SETREF(_tstate->jit_tracer_state.prev_state.instr_code, (PyCodeObject*)Py_NewRef((prev_code))); - stack_pointer = _PyFrame_GetStackPointer(frame); - } - _tstate->jit_tracer_state.prev_state.instr_frame = frame; - _tstate->jit_tracer_state.prev_state.instr_oparg = oparg; - _tstate->jit_tracer_state.prev_state.instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); - if (_PyOpcode_Caches[_PyOpcode_Deopt[opcode]]) { - (&next_instr[1])->counter = trigger_backoff_counter(); - } - DISPATCH_GOTO_NON_TRACING(); - #else - Py_FatalError("JIT label executed in non-jit build."); - #endif - } - LABEL(stop_tracing) { #if _Py_TIER2 diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 81e46a331e0b9e..72b7433022fdea 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -191,7 +191,7 @@ is_instrumented(int opcode) { assert(opcode != 0); assert(opcode != RESERVED); - return opcode != ENTER_EXECUTOR && opcode >= MIN_INSTRUMENTED_OPCODE; + return opcode < ENTER_EXECUTOR && opcode >= MIN_INSTRUMENTED_OPCODE; } #ifndef NDEBUG @@ -526,7 +526,7 @@ valid_opcode(int opcode) if (IS_VALID_OPCODE(opcode) && opcode != CACHE && opcode != RESERVED && - opcode < 255) + opcode < 254) { return true; } diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index 1b9196503b570b..b2fa7d01e8f6c2 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -233,7 +233,6 @@ static void *opcode_targets_table[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_INSTRUMENTED_END_FOR, &&TARGET_INSTRUMENTED_POP_ITER, &&TARGET_INSTRUMENTED_END_SEND, @@ -256,220 +255,220 @@ static void *opcode_targets_table[256] = { &&TARGET_INSTRUMENTED_JUMP_BACKWARD, &&TARGET_INSTRUMENTED_LINE, &&TARGET_ENTER_EXECUTOR, + &&TARGET_TRACE_RECORD, }; #if _Py_TIER2 static void *opcode_tracing_targets_table[256] = { - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&_unknown_opcode, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, &&_unknown_opcode, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, @@ -493,28 +492,29 @@ static void *opcode_tracing_targets_table[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, - &&record_previous_inst, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, + &&TARGET_TRACE_RECORD, }; #endif #else /* _Py_TAIL_CALL_INTERP */ @@ -528,7 +528,6 @@ Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_error(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_exception_unwind(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_exit_unwind(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_start_frame(TAIL_CALL_PARAMS); -Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_record_previous_inst(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_stop_tracing(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_BINARY_OP(TAIL_CALL_PARAMS); @@ -746,6 +745,7 @@ Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_TO_BOOL_INT(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_TO_BOOL_LIST(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_TO_BOOL_NONE(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_TO_BOOL_STR(TAIL_CALL_PARAMS); +Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_TRACE_RECORD(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_UNARY_INVERT(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_UNARY_NEGATIVE(TAIL_CALL_PARAMS); Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_UNARY_NOT(TAIL_CALL_PARAMS); @@ -983,6 +983,7 @@ static py_tail_call_funcptr instruction_funcptr_handler_table[256] = { [TO_BOOL_LIST] = _TAIL_CALL_TO_BOOL_LIST, [TO_BOOL_NONE] = _TAIL_CALL_TO_BOOL_NONE, [TO_BOOL_STR] = _TAIL_CALL_TO_BOOL_STR, + [TRACE_RECORD] = _TAIL_CALL_TRACE_RECORD, [UNARY_INVERT] = _TAIL_CALL_UNARY_INVERT, [UNARY_NEGATIVE] = _TAIL_CALL_UNARY_NEGATIVE, [UNARY_NOT] = _TAIL_CALL_UNARY_NOT, @@ -1023,234 +1024,234 @@ static py_tail_call_funcptr instruction_funcptr_handler_table[256] = { [230] = _TAIL_CALL_UNKNOWN_OPCODE, [231] = _TAIL_CALL_UNKNOWN_OPCODE, [232] = _TAIL_CALL_UNKNOWN_OPCODE, - [233] = _TAIL_CALL_UNKNOWN_OPCODE, }; static py_tail_call_funcptr instruction_funcptr_tracing_table[256] = { - [BINARY_OP] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_ADD_FLOAT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_ADD_INT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_ADD_UNICODE] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_EXTEND] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_INPLACE_ADD_UNICODE] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_MULTIPLY_FLOAT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_MULTIPLY_INT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBSCR_DICT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBSCR_GETITEM] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBSCR_LIST_INT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBSCR_LIST_SLICE] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBSCR_STR_INT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBSCR_TUPLE_INT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBTRACT_FLOAT] = _TAIL_CALL_record_previous_inst, - [BINARY_OP_SUBTRACT_INT] = _TAIL_CALL_record_previous_inst, - [BINARY_SLICE] = _TAIL_CALL_record_previous_inst, - [BUILD_INTERPOLATION] = _TAIL_CALL_record_previous_inst, - [BUILD_LIST] = _TAIL_CALL_record_previous_inst, - [BUILD_MAP] = _TAIL_CALL_record_previous_inst, - [BUILD_SET] = _TAIL_CALL_record_previous_inst, - [BUILD_SLICE] = _TAIL_CALL_record_previous_inst, - [BUILD_STRING] = _TAIL_CALL_record_previous_inst, - [BUILD_TEMPLATE] = _TAIL_CALL_record_previous_inst, - [BUILD_TUPLE] = _TAIL_CALL_record_previous_inst, - [CACHE] = _TAIL_CALL_record_previous_inst, - [CALL] = _TAIL_CALL_record_previous_inst, - [CALL_ALLOC_AND_ENTER_INIT] = _TAIL_CALL_record_previous_inst, - [CALL_BOUND_METHOD_EXACT_ARGS] = _TAIL_CALL_record_previous_inst, - [CALL_BOUND_METHOD_GENERAL] = _TAIL_CALL_record_previous_inst, - [CALL_BUILTIN_CLASS] = _TAIL_CALL_record_previous_inst, - [CALL_BUILTIN_FAST] = _TAIL_CALL_record_previous_inst, - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = _TAIL_CALL_record_previous_inst, - [CALL_BUILTIN_O] = _TAIL_CALL_record_previous_inst, - [CALL_FUNCTION_EX] = _TAIL_CALL_record_previous_inst, - [CALL_INTRINSIC_1] = _TAIL_CALL_record_previous_inst, - [CALL_INTRINSIC_2] = _TAIL_CALL_record_previous_inst, - [CALL_ISINSTANCE] = _TAIL_CALL_record_previous_inst, - [CALL_KW] = _TAIL_CALL_record_previous_inst, - [CALL_KW_BOUND_METHOD] = _TAIL_CALL_record_previous_inst, - [CALL_KW_NON_PY] = _TAIL_CALL_record_previous_inst, - [CALL_KW_PY] = _TAIL_CALL_record_previous_inst, - [CALL_LEN] = _TAIL_CALL_record_previous_inst, - [CALL_LIST_APPEND] = _TAIL_CALL_record_previous_inst, - [CALL_METHOD_DESCRIPTOR_FAST] = _TAIL_CALL_record_previous_inst, - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = _TAIL_CALL_record_previous_inst, - [CALL_METHOD_DESCRIPTOR_NOARGS] = _TAIL_CALL_record_previous_inst, - [CALL_METHOD_DESCRIPTOR_O] = _TAIL_CALL_record_previous_inst, - [CALL_NON_PY_GENERAL] = _TAIL_CALL_record_previous_inst, - [CALL_PY_EXACT_ARGS] = _TAIL_CALL_record_previous_inst, - [CALL_PY_GENERAL] = _TAIL_CALL_record_previous_inst, - [CALL_STR_1] = _TAIL_CALL_record_previous_inst, - [CALL_TUPLE_1] = _TAIL_CALL_record_previous_inst, - [CALL_TYPE_1] = _TAIL_CALL_record_previous_inst, - [CHECK_EG_MATCH] = _TAIL_CALL_record_previous_inst, - [CHECK_EXC_MATCH] = _TAIL_CALL_record_previous_inst, - [CLEANUP_THROW] = _TAIL_CALL_record_previous_inst, - [COMPARE_OP] = _TAIL_CALL_record_previous_inst, - [COMPARE_OP_FLOAT] = _TAIL_CALL_record_previous_inst, - [COMPARE_OP_INT] = _TAIL_CALL_record_previous_inst, - [COMPARE_OP_STR] = _TAIL_CALL_record_previous_inst, - [CONTAINS_OP] = _TAIL_CALL_record_previous_inst, - [CONTAINS_OP_DICT] = _TAIL_CALL_record_previous_inst, - [CONTAINS_OP_SET] = _TAIL_CALL_record_previous_inst, - [CONVERT_VALUE] = _TAIL_CALL_record_previous_inst, - [COPY] = _TAIL_CALL_record_previous_inst, - [COPY_FREE_VARS] = _TAIL_CALL_record_previous_inst, - [DELETE_ATTR] = _TAIL_CALL_record_previous_inst, - [DELETE_DEREF] = _TAIL_CALL_record_previous_inst, - [DELETE_FAST] = _TAIL_CALL_record_previous_inst, - [DELETE_GLOBAL] = _TAIL_CALL_record_previous_inst, - [DELETE_NAME] = _TAIL_CALL_record_previous_inst, - [DELETE_SUBSCR] = _TAIL_CALL_record_previous_inst, - [DICT_MERGE] = _TAIL_CALL_record_previous_inst, - [DICT_UPDATE] = _TAIL_CALL_record_previous_inst, - [END_ASYNC_FOR] = _TAIL_CALL_record_previous_inst, - [END_FOR] = _TAIL_CALL_record_previous_inst, - [END_SEND] = _TAIL_CALL_record_previous_inst, - [ENTER_EXECUTOR] = _TAIL_CALL_record_previous_inst, - [EXIT_INIT_CHECK] = _TAIL_CALL_record_previous_inst, - [EXTENDED_ARG] = _TAIL_CALL_record_previous_inst, - [FORMAT_SIMPLE] = _TAIL_CALL_record_previous_inst, - [FORMAT_WITH_SPEC] = _TAIL_CALL_record_previous_inst, - [FOR_ITER] = _TAIL_CALL_record_previous_inst, - [FOR_ITER_GEN] = _TAIL_CALL_record_previous_inst, - [FOR_ITER_LIST] = _TAIL_CALL_record_previous_inst, - [FOR_ITER_RANGE] = _TAIL_CALL_record_previous_inst, - [FOR_ITER_TUPLE] = _TAIL_CALL_record_previous_inst, - [GET_AITER] = _TAIL_CALL_record_previous_inst, - [GET_ANEXT] = _TAIL_CALL_record_previous_inst, - [GET_AWAITABLE] = _TAIL_CALL_record_previous_inst, - [GET_ITER] = _TAIL_CALL_record_previous_inst, - [GET_LEN] = _TAIL_CALL_record_previous_inst, - [GET_YIELD_FROM_ITER] = _TAIL_CALL_record_previous_inst, - [IMPORT_FROM] = _TAIL_CALL_record_previous_inst, - [IMPORT_NAME] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_CALL] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_CALL_FUNCTION_EX] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_CALL_KW] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_END_ASYNC_FOR] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_END_FOR] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_END_SEND] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_FOR_ITER] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_INSTRUCTION] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_JUMP_BACKWARD] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_JUMP_FORWARD] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_LINE] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_LOAD_SUPER_ATTR] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_NOT_TAKEN] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_POP_ITER] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_POP_JUMP_IF_FALSE] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_POP_JUMP_IF_NONE] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_POP_JUMP_IF_TRUE] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_RESUME] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_RETURN_VALUE] = _TAIL_CALL_record_previous_inst, - [INSTRUMENTED_YIELD_VALUE] = _TAIL_CALL_record_previous_inst, - [INTERPRETER_EXIT] = _TAIL_CALL_record_previous_inst, - [IS_OP] = _TAIL_CALL_record_previous_inst, - [JUMP_BACKWARD] = _TAIL_CALL_record_previous_inst, - [JUMP_BACKWARD_JIT] = _TAIL_CALL_record_previous_inst, - [JUMP_BACKWARD_NO_INTERRUPT] = _TAIL_CALL_record_previous_inst, - [JUMP_BACKWARD_NO_JIT] = _TAIL_CALL_record_previous_inst, - [JUMP_FORWARD] = _TAIL_CALL_record_previous_inst, - [LIST_APPEND] = _TAIL_CALL_record_previous_inst, - [LIST_EXTEND] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_CLASS] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_CLASS_WITH_METACLASS_CHECK] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_INSTANCE_VALUE] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_METHOD_LAZY_DICT] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_METHOD_NO_DICT] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_METHOD_WITH_VALUES] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_MODULE] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_PROPERTY] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_SLOT] = _TAIL_CALL_record_previous_inst, - [LOAD_ATTR_WITH_HINT] = _TAIL_CALL_record_previous_inst, - [LOAD_BUILD_CLASS] = _TAIL_CALL_record_previous_inst, - [LOAD_COMMON_CONSTANT] = _TAIL_CALL_record_previous_inst, - [LOAD_CONST] = _TAIL_CALL_record_previous_inst, - [LOAD_DEREF] = _TAIL_CALL_record_previous_inst, - [LOAD_FAST] = _TAIL_CALL_record_previous_inst, - [LOAD_FAST_AND_CLEAR] = _TAIL_CALL_record_previous_inst, - [LOAD_FAST_BORROW] = _TAIL_CALL_record_previous_inst, - [LOAD_FAST_BORROW_LOAD_FAST_BORROW] = _TAIL_CALL_record_previous_inst, - [LOAD_FAST_CHECK] = _TAIL_CALL_record_previous_inst, - [LOAD_FAST_LOAD_FAST] = _TAIL_CALL_record_previous_inst, - [LOAD_FROM_DICT_OR_DEREF] = _TAIL_CALL_record_previous_inst, - [LOAD_FROM_DICT_OR_GLOBALS] = _TAIL_CALL_record_previous_inst, - [LOAD_GLOBAL] = _TAIL_CALL_record_previous_inst, - [LOAD_GLOBAL_BUILTIN] = _TAIL_CALL_record_previous_inst, - [LOAD_GLOBAL_MODULE] = _TAIL_CALL_record_previous_inst, - [LOAD_LOCALS] = _TAIL_CALL_record_previous_inst, - [LOAD_NAME] = _TAIL_CALL_record_previous_inst, - [LOAD_SMALL_INT] = _TAIL_CALL_record_previous_inst, - [LOAD_SPECIAL] = _TAIL_CALL_record_previous_inst, - [LOAD_SUPER_ATTR] = _TAIL_CALL_record_previous_inst, - [LOAD_SUPER_ATTR_ATTR] = _TAIL_CALL_record_previous_inst, - [LOAD_SUPER_ATTR_METHOD] = _TAIL_CALL_record_previous_inst, - [MAKE_CELL] = _TAIL_CALL_record_previous_inst, - [MAKE_FUNCTION] = _TAIL_CALL_record_previous_inst, - [MAP_ADD] = _TAIL_CALL_record_previous_inst, - [MATCH_CLASS] = _TAIL_CALL_record_previous_inst, - [MATCH_KEYS] = _TAIL_CALL_record_previous_inst, - [MATCH_MAPPING] = _TAIL_CALL_record_previous_inst, - [MATCH_SEQUENCE] = _TAIL_CALL_record_previous_inst, - [NOP] = _TAIL_CALL_record_previous_inst, - [NOT_TAKEN] = _TAIL_CALL_record_previous_inst, - [POP_EXCEPT] = _TAIL_CALL_record_previous_inst, - [POP_ITER] = _TAIL_CALL_record_previous_inst, - [POP_JUMP_IF_FALSE] = _TAIL_CALL_record_previous_inst, - [POP_JUMP_IF_NONE] = _TAIL_CALL_record_previous_inst, - [POP_JUMP_IF_NOT_NONE] = _TAIL_CALL_record_previous_inst, - [POP_JUMP_IF_TRUE] = _TAIL_CALL_record_previous_inst, - [POP_TOP] = _TAIL_CALL_record_previous_inst, - [PUSH_EXC_INFO] = _TAIL_CALL_record_previous_inst, - [PUSH_NULL] = _TAIL_CALL_record_previous_inst, - [RAISE_VARARGS] = _TAIL_CALL_record_previous_inst, - [RERAISE] = _TAIL_CALL_record_previous_inst, - [RESERVED] = _TAIL_CALL_record_previous_inst, - [RESUME] = _TAIL_CALL_record_previous_inst, - [RESUME_CHECK] = _TAIL_CALL_record_previous_inst, - [RETURN_GENERATOR] = _TAIL_CALL_record_previous_inst, - [RETURN_VALUE] = _TAIL_CALL_record_previous_inst, - [SEND] = _TAIL_CALL_record_previous_inst, - [SEND_GEN] = _TAIL_CALL_record_previous_inst, - [SETUP_ANNOTATIONS] = _TAIL_CALL_record_previous_inst, - [SET_ADD] = _TAIL_CALL_record_previous_inst, - [SET_FUNCTION_ATTRIBUTE] = _TAIL_CALL_record_previous_inst, - [SET_UPDATE] = _TAIL_CALL_record_previous_inst, - [STORE_ATTR] = _TAIL_CALL_record_previous_inst, - [STORE_ATTR_INSTANCE_VALUE] = _TAIL_CALL_record_previous_inst, - [STORE_ATTR_SLOT] = _TAIL_CALL_record_previous_inst, - [STORE_ATTR_WITH_HINT] = _TAIL_CALL_record_previous_inst, - [STORE_DEREF] = _TAIL_CALL_record_previous_inst, - [STORE_FAST] = _TAIL_CALL_record_previous_inst, - [STORE_FAST_LOAD_FAST] = _TAIL_CALL_record_previous_inst, - [STORE_FAST_STORE_FAST] = _TAIL_CALL_record_previous_inst, - [STORE_GLOBAL] = _TAIL_CALL_record_previous_inst, - [STORE_NAME] = _TAIL_CALL_record_previous_inst, - [STORE_SLICE] = _TAIL_CALL_record_previous_inst, - [STORE_SUBSCR] = _TAIL_CALL_record_previous_inst, - [STORE_SUBSCR_DICT] = _TAIL_CALL_record_previous_inst, - [STORE_SUBSCR_LIST_INT] = _TAIL_CALL_record_previous_inst, - [SWAP] = _TAIL_CALL_record_previous_inst, - [TO_BOOL] = _TAIL_CALL_record_previous_inst, - [TO_BOOL_ALWAYS_TRUE] = _TAIL_CALL_record_previous_inst, - [TO_BOOL_BOOL] = _TAIL_CALL_record_previous_inst, - [TO_BOOL_INT] = _TAIL_CALL_record_previous_inst, - [TO_BOOL_LIST] = _TAIL_CALL_record_previous_inst, - [TO_BOOL_NONE] = _TAIL_CALL_record_previous_inst, - [TO_BOOL_STR] = _TAIL_CALL_record_previous_inst, - [UNARY_INVERT] = _TAIL_CALL_record_previous_inst, - [UNARY_NEGATIVE] = _TAIL_CALL_record_previous_inst, - [UNARY_NOT] = _TAIL_CALL_record_previous_inst, - [UNPACK_EX] = _TAIL_CALL_record_previous_inst, - [UNPACK_SEQUENCE] = _TAIL_CALL_record_previous_inst, - [UNPACK_SEQUENCE_LIST] = _TAIL_CALL_record_previous_inst, - [UNPACK_SEQUENCE_TUPLE] = _TAIL_CALL_record_previous_inst, - [UNPACK_SEQUENCE_TWO_TUPLE] = _TAIL_CALL_record_previous_inst, - [WITH_EXCEPT_START] = _TAIL_CALL_record_previous_inst, - [YIELD_VALUE] = _TAIL_CALL_record_previous_inst, + [BINARY_OP] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_ADD_FLOAT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_ADD_INT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_ADD_UNICODE] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_EXTEND] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_INPLACE_ADD_UNICODE] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_MULTIPLY_FLOAT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_MULTIPLY_INT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBSCR_DICT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBSCR_GETITEM] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBSCR_LIST_INT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBSCR_LIST_SLICE] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBSCR_STR_INT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBSCR_TUPLE_INT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBTRACT_FLOAT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_OP_SUBTRACT_INT] = _TAIL_CALL_TRACE_RECORD, + [BINARY_SLICE] = _TAIL_CALL_TRACE_RECORD, + [BUILD_INTERPOLATION] = _TAIL_CALL_TRACE_RECORD, + [BUILD_LIST] = _TAIL_CALL_TRACE_RECORD, + [BUILD_MAP] = _TAIL_CALL_TRACE_RECORD, + [BUILD_SET] = _TAIL_CALL_TRACE_RECORD, + [BUILD_SLICE] = _TAIL_CALL_TRACE_RECORD, + [BUILD_STRING] = _TAIL_CALL_TRACE_RECORD, + [BUILD_TEMPLATE] = _TAIL_CALL_TRACE_RECORD, + [BUILD_TUPLE] = _TAIL_CALL_TRACE_RECORD, + [CACHE] = _TAIL_CALL_TRACE_RECORD, + [CALL] = _TAIL_CALL_TRACE_RECORD, + [CALL_ALLOC_AND_ENTER_INIT] = _TAIL_CALL_TRACE_RECORD, + [CALL_BOUND_METHOD_EXACT_ARGS] = _TAIL_CALL_TRACE_RECORD, + [CALL_BOUND_METHOD_GENERAL] = _TAIL_CALL_TRACE_RECORD, + [CALL_BUILTIN_CLASS] = _TAIL_CALL_TRACE_RECORD, + [CALL_BUILTIN_FAST] = _TAIL_CALL_TRACE_RECORD, + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = _TAIL_CALL_TRACE_RECORD, + [CALL_BUILTIN_O] = _TAIL_CALL_TRACE_RECORD, + [CALL_FUNCTION_EX] = _TAIL_CALL_TRACE_RECORD, + [CALL_INTRINSIC_1] = _TAIL_CALL_TRACE_RECORD, + [CALL_INTRINSIC_2] = _TAIL_CALL_TRACE_RECORD, + [CALL_ISINSTANCE] = _TAIL_CALL_TRACE_RECORD, + [CALL_KW] = _TAIL_CALL_TRACE_RECORD, + [CALL_KW_BOUND_METHOD] = _TAIL_CALL_TRACE_RECORD, + [CALL_KW_NON_PY] = _TAIL_CALL_TRACE_RECORD, + [CALL_KW_PY] = _TAIL_CALL_TRACE_RECORD, + [CALL_LEN] = _TAIL_CALL_TRACE_RECORD, + [CALL_LIST_APPEND] = _TAIL_CALL_TRACE_RECORD, + [CALL_METHOD_DESCRIPTOR_FAST] = _TAIL_CALL_TRACE_RECORD, + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = _TAIL_CALL_TRACE_RECORD, + [CALL_METHOD_DESCRIPTOR_NOARGS] = _TAIL_CALL_TRACE_RECORD, + [CALL_METHOD_DESCRIPTOR_O] = _TAIL_CALL_TRACE_RECORD, + [CALL_NON_PY_GENERAL] = _TAIL_CALL_TRACE_RECORD, + [CALL_PY_EXACT_ARGS] = _TAIL_CALL_TRACE_RECORD, + [CALL_PY_GENERAL] = _TAIL_CALL_TRACE_RECORD, + [CALL_STR_1] = _TAIL_CALL_TRACE_RECORD, + [CALL_TUPLE_1] = _TAIL_CALL_TRACE_RECORD, + [CALL_TYPE_1] = _TAIL_CALL_TRACE_RECORD, + [CHECK_EG_MATCH] = _TAIL_CALL_TRACE_RECORD, + [CHECK_EXC_MATCH] = _TAIL_CALL_TRACE_RECORD, + [CLEANUP_THROW] = _TAIL_CALL_TRACE_RECORD, + [COMPARE_OP] = _TAIL_CALL_TRACE_RECORD, + [COMPARE_OP_FLOAT] = _TAIL_CALL_TRACE_RECORD, + [COMPARE_OP_INT] = _TAIL_CALL_TRACE_RECORD, + [COMPARE_OP_STR] = _TAIL_CALL_TRACE_RECORD, + [CONTAINS_OP] = _TAIL_CALL_TRACE_RECORD, + [CONTAINS_OP_DICT] = _TAIL_CALL_TRACE_RECORD, + [CONTAINS_OP_SET] = _TAIL_CALL_TRACE_RECORD, + [CONVERT_VALUE] = _TAIL_CALL_TRACE_RECORD, + [COPY] = _TAIL_CALL_TRACE_RECORD, + [COPY_FREE_VARS] = _TAIL_CALL_TRACE_RECORD, + [DELETE_ATTR] = _TAIL_CALL_TRACE_RECORD, + [DELETE_DEREF] = _TAIL_CALL_TRACE_RECORD, + [DELETE_FAST] = _TAIL_CALL_TRACE_RECORD, + [DELETE_GLOBAL] = _TAIL_CALL_TRACE_RECORD, + [DELETE_NAME] = _TAIL_CALL_TRACE_RECORD, + [DELETE_SUBSCR] = _TAIL_CALL_TRACE_RECORD, + [DICT_MERGE] = _TAIL_CALL_TRACE_RECORD, + [DICT_UPDATE] = _TAIL_CALL_TRACE_RECORD, + [END_ASYNC_FOR] = _TAIL_CALL_TRACE_RECORD, + [END_FOR] = _TAIL_CALL_TRACE_RECORD, + [END_SEND] = _TAIL_CALL_TRACE_RECORD, + [ENTER_EXECUTOR] = _TAIL_CALL_TRACE_RECORD, + [EXIT_INIT_CHECK] = _TAIL_CALL_TRACE_RECORD, + [EXTENDED_ARG] = _TAIL_CALL_TRACE_RECORD, + [FORMAT_SIMPLE] = _TAIL_CALL_TRACE_RECORD, + [FORMAT_WITH_SPEC] = _TAIL_CALL_TRACE_RECORD, + [FOR_ITER] = _TAIL_CALL_TRACE_RECORD, + [FOR_ITER_GEN] = _TAIL_CALL_TRACE_RECORD, + [FOR_ITER_LIST] = _TAIL_CALL_TRACE_RECORD, + [FOR_ITER_RANGE] = _TAIL_CALL_TRACE_RECORD, + [FOR_ITER_TUPLE] = _TAIL_CALL_TRACE_RECORD, + [GET_AITER] = _TAIL_CALL_TRACE_RECORD, + [GET_ANEXT] = _TAIL_CALL_TRACE_RECORD, + [GET_AWAITABLE] = _TAIL_CALL_TRACE_RECORD, + [GET_ITER] = _TAIL_CALL_TRACE_RECORD, + [GET_LEN] = _TAIL_CALL_TRACE_RECORD, + [GET_YIELD_FROM_ITER] = _TAIL_CALL_TRACE_RECORD, + [IMPORT_FROM] = _TAIL_CALL_TRACE_RECORD, + [IMPORT_NAME] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_CALL] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_CALL_FUNCTION_EX] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_CALL_KW] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_END_ASYNC_FOR] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_END_FOR] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_END_SEND] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_FOR_ITER] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_INSTRUCTION] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_JUMP_BACKWARD] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_JUMP_FORWARD] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_LINE] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_LOAD_SUPER_ATTR] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_NOT_TAKEN] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_POP_ITER] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_POP_JUMP_IF_FALSE] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_POP_JUMP_IF_NONE] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_POP_JUMP_IF_TRUE] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_RESUME] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_RETURN_VALUE] = _TAIL_CALL_TRACE_RECORD, + [INSTRUMENTED_YIELD_VALUE] = _TAIL_CALL_TRACE_RECORD, + [INTERPRETER_EXIT] = _TAIL_CALL_TRACE_RECORD, + [IS_OP] = _TAIL_CALL_TRACE_RECORD, + [JUMP_BACKWARD] = _TAIL_CALL_TRACE_RECORD, + [JUMP_BACKWARD_JIT] = _TAIL_CALL_TRACE_RECORD, + [JUMP_BACKWARD_NO_INTERRUPT] = _TAIL_CALL_TRACE_RECORD, + [JUMP_BACKWARD_NO_JIT] = _TAIL_CALL_TRACE_RECORD, + [JUMP_FORWARD] = _TAIL_CALL_TRACE_RECORD, + [LIST_APPEND] = _TAIL_CALL_TRACE_RECORD, + [LIST_EXTEND] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_CLASS] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_CLASS_WITH_METACLASS_CHECK] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_INSTANCE_VALUE] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_METHOD_LAZY_DICT] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_METHOD_NO_DICT] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_METHOD_WITH_VALUES] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_MODULE] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_PROPERTY] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_SLOT] = _TAIL_CALL_TRACE_RECORD, + [LOAD_ATTR_WITH_HINT] = _TAIL_CALL_TRACE_RECORD, + [LOAD_BUILD_CLASS] = _TAIL_CALL_TRACE_RECORD, + [LOAD_COMMON_CONSTANT] = _TAIL_CALL_TRACE_RECORD, + [LOAD_CONST] = _TAIL_CALL_TRACE_RECORD, + [LOAD_DEREF] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FAST] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FAST_AND_CLEAR] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FAST_BORROW] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FAST_BORROW_LOAD_FAST_BORROW] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FAST_CHECK] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FAST_LOAD_FAST] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FROM_DICT_OR_DEREF] = _TAIL_CALL_TRACE_RECORD, + [LOAD_FROM_DICT_OR_GLOBALS] = _TAIL_CALL_TRACE_RECORD, + [LOAD_GLOBAL] = _TAIL_CALL_TRACE_RECORD, + [LOAD_GLOBAL_BUILTIN] = _TAIL_CALL_TRACE_RECORD, + [LOAD_GLOBAL_MODULE] = _TAIL_CALL_TRACE_RECORD, + [LOAD_LOCALS] = _TAIL_CALL_TRACE_RECORD, + [LOAD_NAME] = _TAIL_CALL_TRACE_RECORD, + [LOAD_SMALL_INT] = _TAIL_CALL_TRACE_RECORD, + [LOAD_SPECIAL] = _TAIL_CALL_TRACE_RECORD, + [LOAD_SUPER_ATTR] = _TAIL_CALL_TRACE_RECORD, + [LOAD_SUPER_ATTR_ATTR] = _TAIL_CALL_TRACE_RECORD, + [LOAD_SUPER_ATTR_METHOD] = _TAIL_CALL_TRACE_RECORD, + [MAKE_CELL] = _TAIL_CALL_TRACE_RECORD, + [MAKE_FUNCTION] = _TAIL_CALL_TRACE_RECORD, + [MAP_ADD] = _TAIL_CALL_TRACE_RECORD, + [MATCH_CLASS] = _TAIL_CALL_TRACE_RECORD, + [MATCH_KEYS] = _TAIL_CALL_TRACE_RECORD, + [MATCH_MAPPING] = _TAIL_CALL_TRACE_RECORD, + [MATCH_SEQUENCE] = _TAIL_CALL_TRACE_RECORD, + [NOP] = _TAIL_CALL_TRACE_RECORD, + [NOT_TAKEN] = _TAIL_CALL_TRACE_RECORD, + [POP_EXCEPT] = _TAIL_CALL_TRACE_RECORD, + [POP_ITER] = _TAIL_CALL_TRACE_RECORD, + [POP_JUMP_IF_FALSE] = _TAIL_CALL_TRACE_RECORD, + [POP_JUMP_IF_NONE] = _TAIL_CALL_TRACE_RECORD, + [POP_JUMP_IF_NOT_NONE] = _TAIL_CALL_TRACE_RECORD, + [POP_JUMP_IF_TRUE] = _TAIL_CALL_TRACE_RECORD, + [POP_TOP] = _TAIL_CALL_TRACE_RECORD, + [PUSH_EXC_INFO] = _TAIL_CALL_TRACE_RECORD, + [PUSH_NULL] = _TAIL_CALL_TRACE_RECORD, + [RAISE_VARARGS] = _TAIL_CALL_TRACE_RECORD, + [RERAISE] = _TAIL_CALL_TRACE_RECORD, + [RESERVED] = _TAIL_CALL_TRACE_RECORD, + [RESUME] = _TAIL_CALL_TRACE_RECORD, + [RESUME_CHECK] = _TAIL_CALL_TRACE_RECORD, + [RETURN_GENERATOR] = _TAIL_CALL_TRACE_RECORD, + [RETURN_VALUE] = _TAIL_CALL_TRACE_RECORD, + [SEND] = _TAIL_CALL_TRACE_RECORD, + [SEND_GEN] = _TAIL_CALL_TRACE_RECORD, + [SETUP_ANNOTATIONS] = _TAIL_CALL_TRACE_RECORD, + [SET_ADD] = _TAIL_CALL_TRACE_RECORD, + [SET_FUNCTION_ATTRIBUTE] = _TAIL_CALL_TRACE_RECORD, + [SET_UPDATE] = _TAIL_CALL_TRACE_RECORD, + [STORE_ATTR] = _TAIL_CALL_TRACE_RECORD, + [STORE_ATTR_INSTANCE_VALUE] = _TAIL_CALL_TRACE_RECORD, + [STORE_ATTR_SLOT] = _TAIL_CALL_TRACE_RECORD, + [STORE_ATTR_WITH_HINT] = _TAIL_CALL_TRACE_RECORD, + [STORE_DEREF] = _TAIL_CALL_TRACE_RECORD, + [STORE_FAST] = _TAIL_CALL_TRACE_RECORD, + [STORE_FAST_LOAD_FAST] = _TAIL_CALL_TRACE_RECORD, + [STORE_FAST_STORE_FAST] = _TAIL_CALL_TRACE_RECORD, + [STORE_GLOBAL] = _TAIL_CALL_TRACE_RECORD, + [STORE_NAME] = _TAIL_CALL_TRACE_RECORD, + [STORE_SLICE] = _TAIL_CALL_TRACE_RECORD, + [STORE_SUBSCR] = _TAIL_CALL_TRACE_RECORD, + [STORE_SUBSCR_DICT] = _TAIL_CALL_TRACE_RECORD, + [STORE_SUBSCR_LIST_INT] = _TAIL_CALL_TRACE_RECORD, + [SWAP] = _TAIL_CALL_TRACE_RECORD, + [TO_BOOL] = _TAIL_CALL_TRACE_RECORD, + [TO_BOOL_ALWAYS_TRUE] = _TAIL_CALL_TRACE_RECORD, + [TO_BOOL_BOOL] = _TAIL_CALL_TRACE_RECORD, + [TO_BOOL_INT] = _TAIL_CALL_TRACE_RECORD, + [TO_BOOL_LIST] = _TAIL_CALL_TRACE_RECORD, + [TO_BOOL_NONE] = _TAIL_CALL_TRACE_RECORD, + [TO_BOOL_STR] = _TAIL_CALL_TRACE_RECORD, + [TRACE_RECORD] = _TAIL_CALL_TRACE_RECORD, + [UNARY_INVERT] = _TAIL_CALL_TRACE_RECORD, + [UNARY_NEGATIVE] = _TAIL_CALL_TRACE_RECORD, + [UNARY_NOT] = _TAIL_CALL_TRACE_RECORD, + [UNPACK_EX] = _TAIL_CALL_TRACE_RECORD, + [UNPACK_SEQUENCE] = _TAIL_CALL_TRACE_RECORD, + [UNPACK_SEQUENCE_LIST] = _TAIL_CALL_TRACE_RECORD, + [UNPACK_SEQUENCE_TUPLE] = _TAIL_CALL_TRACE_RECORD, + [UNPACK_SEQUENCE_TWO_TUPLE] = _TAIL_CALL_TRACE_RECORD, + [WITH_EXCEPT_START] = _TAIL_CALL_TRACE_RECORD, + [YIELD_VALUE] = _TAIL_CALL_TRACE_RECORD, [121] = _TAIL_CALL_UNKNOWN_OPCODE, [122] = _TAIL_CALL_UNKNOWN_OPCODE, [123] = _TAIL_CALL_UNKNOWN_OPCODE, @@ -1281,6 +1282,5 @@ static py_tail_call_funcptr instruction_funcptr_tracing_table[256] = { [230] = _TAIL_CALL_UNKNOWN_OPCODE, [231] = _TAIL_CALL_UNKNOWN_OPCODE, [232] = _TAIL_CALL_UNKNOWN_OPCODE, - [233] = _TAIL_CALL_UNKNOWN_OPCODE, }; #endif /* _Py_TAIL_CALL_INTERP */ diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 01263fe8c7a78f..9ebd113df2dabf 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -3483,3 +3483,5 @@ break; } + /* _TRACE_RECORD is not a viable micro-op for tier 2 */ + diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index d39013db4f7fd6..93aa4899fe6ec8 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -1195,8 +1195,9 @@ def assign_opcodes( # This is an historical oddity. instmap["BINARY_OP_INPLACE_ADD_UNICODE"] = 3 - instmap["INSTRUMENTED_LINE"] = 254 - instmap["ENTER_EXECUTOR"] = 255 + instmap["INSTRUMENTED_LINE"] = 253 + instmap["ENTER_EXECUTOR"] = 254 + instmap["TRACE_RECORD"] = 255 instrumented = [name for name in instructions if name.startswith("INSTRUMENTED")] @@ -1221,7 +1222,7 @@ def assign_opcodes( # Specialized ops appear in their own section # Instrumented opcodes are at the end of the valid range min_internal = instmap["RESUME"] + 1 - min_instrumented = 254 - (len(instrumented) - 1) + min_instrumented = 254 - len(instrumented) assert min_internal + len(specialized) < min_instrumented next_opcode = 1 diff --git a/Tools/cases_generator/target_generator.py b/Tools/cases_generator/target_generator.py index 36fa1d7fa4908b..f633f704485819 100644 --- a/Tools/cases_generator/target_generator.py +++ b/Tools/cases_generator/target_generator.py @@ -34,7 +34,7 @@ def write_opcode_targets(analysis: Analysis, out: CWriter) -> None: targets = ["&&_unknown_opcode,\n"] * 256 for name, op in analysis.opmap.items(): if op < 256: - targets[op] = f"&&record_previous_inst,\n" + targets[op] = f"&&TARGET_TRACE_RECORD,\n" out.emit("#if _Py_TIER2\n") out.emit("static void *opcode_tracing_targets_table[256] = {\n") for target in targets: @@ -84,7 +84,7 @@ def write_tailcall_dispatch_table(analysis: Analysis, out: CWriter) -> None: # Emit the tracing dispatch table. out.emit("static py_tail_call_funcptr instruction_funcptr_tracing_table[256] = {\n") for name in sorted(analysis.instructions.keys()): - out.emit(f"[{name}] = _TAIL_CALL_record_previous_inst,\n") + out.emit(f"[{name}] = _TAIL_CALL_TRACE_RECORD,\n") named_values = analysis.opmap.values() for rest in range(256): if rest not in named_values: diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index 94ffb0118f0786..c7ff5de681e6fa 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -160,7 +160,7 @@ def generate_tier1( #if !_Py_TAIL_CALL_INTERP #if !USE_COMPUTED_GOTOS dispatch_opcode: - switch (opcode) + switch (dispatch_code) #endif {{ #endif /* _Py_TAIL_CALL_INTERP */ From f46785f8bc118e0efb840af1e520777b1baa03d9 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Tue, 18 Nov 2025 16:42:13 +0200 Subject: [PATCH 3/8] gh-133879: Copyedit "What's new in Python 3.15" (#141717) --- .../pending-removal-in-future.rst | 2 +- Doc/whatsnew/3.15.rst | 36 +++++++++---------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/Doc/deprecations/pending-removal-in-future.rst b/Doc/deprecations/pending-removal-in-future.rst index 7ed430625f305e..301867416701ea 100644 --- a/Doc/deprecations/pending-removal-in-future.rst +++ b/Doc/deprecations/pending-removal-in-future.rst @@ -76,7 +76,7 @@ although there is currently no date scheduled for their removal. * :mod:`mailbox`: Use of StringIO input and text mode is deprecated, use BytesIO and binary mode instead. -* :mod:`os`: Calling :func:`os.register_at_fork` in multi-threaded process. +* :mod:`os`: Calling :func:`os.register_at_fork` in a multi-threaded process. * :class:`!pydoc.ErrorDuringImport`: A tuple value for *exc_info* parameter is deprecated, use an exception instance. diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst index cf5bef15203b23..24cc7e2d7eb911 100644 --- a/Doc/whatsnew/3.15.rst +++ b/Doc/whatsnew/3.15.rst @@ -316,9 +316,7 @@ Other language changes and compression. Common code patterns which can be optimized with :func:`~bytearray.take_bytes` are listed below. - (Contributed by Cody Maloney in :gh:`139871`.) - - .. list-table:: Suggested Optimizing Refactors + .. list-table:: Suggested optimizing refactors :header-rows: 1 * - Description @@ -387,10 +385,12 @@ Other language changes buffer.resize(n) data = buffer.take_bytes() + (Contributed by Cody Maloney in :gh:`139871`.) + * Many functions related to compiling or parsing Python code, such as :func:`compile`, :func:`ast.parse`, :func:`symtable.symtable`, - and :func:`importlib.abc.InspectLoader.source_to_code`, now allow to pass - the module name. It is needed to unambiguous :ref:`filter ` + and :func:`importlib.abc.InspectLoader.source_to_code`, now allow the module + name to be passed. It is needed to unambiguously :ref:`filter ` syntax warnings by module name. (Contributed by Serhiy Storchaka in :gh:`135801`.) @@ -776,6 +776,17 @@ unittest (Contributed by Garry Cairns in :gh:`134567`.) +venv +---- + +* On POSIX platforms, platlib directories will be created if needed when + creating virtual environments, instead of using ``lib64 -> lib`` symlink. + This means purelib and platlib of virtual environments no longer share the + same ``lib`` directory on platforms where :data:`sys.platlibdir` is not + equal to ``lib``. + (Contributed by Rui Xi in :gh:`133951`.) + + warnings -------- @@ -788,17 +799,6 @@ warnings (Contributed by Serhiy Storchaka in :gh:`135801`.) -venv ----- - -* On POSIX platforms, platlib directories will be created if needed when - creating virtual environments, instead of using ``lib64 -> lib`` symlink. - This means purelib and platlib of virtual environments no longer share the - same ``lib`` directory on platforms where :data:`sys.platlibdir` is not - equal to ``lib``. - (Contributed by Rui Xi in :gh:`133951`.) - - xml.parsers.expat ----------------- @@ -1242,7 +1242,7 @@ Porting to Python 3.15 This section lists previously described changes and other bugfixes that may require changes to your code. -* :class:`sqlite3.Connection` APIs has been cleaned up. +* :class:`sqlite3.Connection` APIs have been cleaned up. * All parameters of :func:`sqlite3.connect` except *database* are now keyword-only. * The first three parameters of methods :meth:`~sqlite3.Connection.create_function` @@ -1262,7 +1262,7 @@ that may require changes to your code. * :meth:`~mmap.mmap.resize` has been removed on platforms that don't support the underlying syscall, instead of raising a :exc:`SystemError`. -* Resource warning is now emitted for unclosed +* A resource warning is now emitted for an unclosed :func:`xml.etree.ElementTree.iterparse` iterator if it opened a file. Use its :meth:`!close` method or the :func:`contextlib.closing` context manager to close it. From a52c39e2608557a710784d5876150578d2ae5183 Mon Sep 17 00:00:00 2001 From: Pablo Galindo Salgado Date: Tue, 18 Nov 2025 15:14:16 +0000 Subject: [PATCH 4/8] gh-135953: Refactor test_sampling_profiler into multiple files (#141689) --- .../test_profiling/test_sampling_profiler.py | 3360 ----------------- .../test_sampling_profiler/__init__.py | 9 + .../test_sampling_profiler/helpers.py | 101 + .../test_sampling_profiler/mocks.py | 38 + .../test_sampling_profiler/test_advanced.py | 264 ++ .../test_sampling_profiler/test_cli.py | 664 ++++ .../test_sampling_profiler/test_collectors.py | 896 +++++ .../test_integration.py | 804 ++++ .../test_sampling_profiler/test_modes.py | 514 +++ .../test_sampling_profiler/test_profiler.py | 656 ++++ Makefile.pre.in | 1 + 11 files changed, 3947 insertions(+), 3360 deletions(-) delete mode 100644 Lib/test/test_profiling/test_sampling_profiler.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/__init__.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/helpers.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/mocks.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/test_advanced.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/test_cli.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/test_collectors.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/test_integration.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/test_modes.py create mode 100644 Lib/test/test_profiling/test_sampling_profiler/test_profiler.py diff --git a/Lib/test/test_profiling/test_sampling_profiler.py b/Lib/test/test_profiling/test_sampling_profiler.py deleted file mode 100644 index c2cc2ddd48a02c..00000000000000 --- a/Lib/test/test_profiling/test_sampling_profiler.py +++ /dev/null @@ -1,3360 +0,0 @@ -"""Tests for the sampling profiler (profiling.sampling).""" - -import contextlib -import io -import json -import marshal -import os -import shutil -import socket -import subprocess -import sys -import tempfile -import unittest -from collections import namedtuple -from unittest import mock - -from profiling.sampling.pstats_collector import PstatsCollector -from profiling.sampling.stack_collector import ( - CollapsedStackCollector, - FlamegraphCollector, -) -from profiling.sampling.gecko_collector import GeckoCollector - -from test.support.os_helper import unlink -from test.support import ( - force_not_colorized_test_class, - SHORT_TIMEOUT, - script_helper, - os_helper, - SuppressCrashReport, -) -from test.support.socket_helper import find_unused_port -from test.support import requires_subprocess, is_emscripten -from test.support import captured_stdout, captured_stderr - -PROCESS_VM_READV_SUPPORTED = False - -try: - from _remote_debugging import PROCESS_VM_READV_SUPPORTED - import _remote_debugging -except ImportError: - raise unittest.SkipTest( - "Test only runs when _remote_debugging is available" - ) -else: - import profiling.sampling - from profiling.sampling.sample import SampleProfiler - - - -class MockFrameInfo: - """Mock FrameInfo for testing since the real one isn't accessible.""" - - def __init__(self, filename, lineno, funcname): - self.filename = filename - self.lineno = lineno - self.funcname = funcname - - def __repr__(self): - return f"MockFrameInfo(filename='{self.filename}', lineno={self.lineno}, funcname='{self.funcname}')" - - -class MockThreadInfo: - """Mock ThreadInfo for testing since the real one isn't accessible.""" - - def __init__(self, thread_id, frame_info, status=0): # Default to THREAD_STATE_RUNNING (0) - self.thread_id = thread_id - self.frame_info = frame_info - self.status = status - - def __repr__(self): - return f"MockThreadInfo(thread_id={self.thread_id}, frame_info={self.frame_info}, status={self.status})" - - -class MockInterpreterInfo: - """Mock InterpreterInfo for testing since the real one isn't accessible.""" - - def __init__(self, interpreter_id, threads): - self.interpreter_id = interpreter_id - self.threads = threads - - def __repr__(self): - return f"MockInterpreterInfo(interpreter_id={self.interpreter_id}, threads={self.threads})" - - -skip_if_not_supported = unittest.skipIf( - ( - sys.platform != "darwin" - and sys.platform != "linux" - and sys.platform != "win32" - ), - "Test only runs on Linux, Windows and MacOS", -) - -SubprocessInfo = namedtuple('SubprocessInfo', ['process', 'socket']) - - -@contextlib.contextmanager -def test_subprocess(script): - # Find an unused port for socket communication - port = find_unused_port() - - # Inject socket connection code at the beginning of the script - socket_code = f''' -import socket -_test_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) -_test_sock.connect(('localhost', {port})) -_test_sock.sendall(b"ready") -''' - - # Combine socket code with user script - full_script = socket_code + script - - # Create server socket to wait for process to be ready - server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - server_socket.bind(("localhost", port)) - server_socket.settimeout(SHORT_TIMEOUT) - server_socket.listen(1) - - proc = subprocess.Popen( - [sys.executable, "-c", full_script], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - - client_socket = None - try: - # Wait for process to connect and send ready signal - client_socket, _ = server_socket.accept() - server_socket.close() - response = client_socket.recv(1024) - if response != b"ready": - raise RuntimeError(f"Unexpected response from subprocess: {response}") - - yield SubprocessInfo(proc, client_socket) - finally: - if client_socket is not None: - client_socket.close() - if proc.poll() is None: - proc.kill() - proc.wait() - - -def close_and_unlink(file): - file.close() - unlink(file.name) - - -class TestSampleProfilerComponents(unittest.TestCase): - """Unit tests for individual profiler components.""" - - def test_mock_frame_info_with_empty_and_unicode_values(self): - """Test MockFrameInfo handles empty strings, unicode characters, and very long names correctly.""" - # Test with empty strings - frame = MockFrameInfo("", 0, "") - self.assertEqual(frame.filename, "") - self.assertEqual(frame.lineno, 0) - self.assertEqual(frame.funcname, "") - self.assertIn("filename=''", repr(frame)) - - # Test with unicode characters - frame = MockFrameInfo("文件.py", 42, "函数名") - self.assertEqual(frame.filename, "文件.py") - self.assertEqual(frame.funcname, "函数名") - - # Test with very long names - long_filename = "x" * 1000 + ".py" - long_funcname = "func_" + "x" * 1000 - frame = MockFrameInfo(long_filename, 999999, long_funcname) - self.assertEqual(frame.filename, long_filename) - self.assertEqual(frame.lineno, 999999) - self.assertEqual(frame.funcname, long_funcname) - - def test_pstats_collector_with_extreme_intervals_and_empty_data(self): - """Test PstatsCollector handles zero/large intervals, empty frames, None thread IDs, and duplicate frames.""" - # Test with zero interval - collector = PstatsCollector(sample_interval_usec=0) - self.assertEqual(collector.sample_interval_usec, 0) - - # Test with very large interval - collector = PstatsCollector(sample_interval_usec=1000000000) - self.assertEqual(collector.sample_interval_usec, 1000000000) - - # Test collecting empty frames list - collector = PstatsCollector(sample_interval_usec=1000) - collector.collect([]) - self.assertEqual(len(collector.result), 0) - - # Test collecting frames with None thread id - test_frames = [MockInterpreterInfo(0, [MockThreadInfo(None, [MockFrameInfo("file.py", 10, "func")])])] - collector.collect(test_frames) - # Should still process the frames - self.assertEqual(len(collector.result), 1) - - # Test collecting duplicate frames in same sample - test_frames = [ - MockInterpreterInfo( - 0, # interpreter_id - [MockThreadInfo( - 1, - [ - MockFrameInfo("file.py", 10, "func1"), - MockFrameInfo("file.py", 10, "func1"), # Duplicate - ], - )] - ) - ] - collector = PstatsCollector(sample_interval_usec=1000) - collector.collect(test_frames) - # Should count both occurrences - self.assertEqual( - collector.result[("file.py", 10, "func1")]["cumulative_calls"], 2 - ) - - def test_pstats_collector_single_frame_stacks(self): - """Test PstatsCollector with single-frame call stacks to trigger len(frames) <= 1 branch.""" - collector = PstatsCollector(sample_interval_usec=1000) - - # Test with exactly one frame (should trigger the <= 1 condition) - single_frame = [MockInterpreterInfo(0, [MockThreadInfo(1, [MockFrameInfo("single.py", 10, "single_func")])])] - collector.collect(single_frame) - - # Should record the single frame with inline call - self.assertEqual(len(collector.result), 1) - single_key = ("single.py", 10, "single_func") - self.assertIn(single_key, collector.result) - self.assertEqual(collector.result[single_key]["direct_calls"], 1) - self.assertEqual(collector.result[single_key]["cumulative_calls"], 1) - - # Test with empty frames (should also trigger <= 1 condition) - empty_frames = [MockInterpreterInfo(0, [MockThreadInfo(1, [])])] - collector.collect(empty_frames) - - # Should not add any new entries - self.assertEqual( - len(collector.result), 1 - ) # Still just the single frame - - # Test mixed single and multi-frame stacks - mixed_frames = [ - MockInterpreterInfo( - 0, - [ - MockThreadInfo( - 1, - [MockFrameInfo("single2.py", 20, "single_func2")], - ), # Single frame - MockThreadInfo( - 2, - [ # Multi-frame stack - MockFrameInfo("multi.py", 30, "multi_func1"), - MockFrameInfo("multi.py", 40, "multi_func2"), - ], - ), - ] - ), - ] - collector.collect(mixed_frames) - - # Should have recorded all functions - self.assertEqual( - len(collector.result), 4 - ) # single + single2 + multi1 + multi2 - - # Verify single frame handling - single2_key = ("single2.py", 20, "single_func2") - self.assertIn(single2_key, collector.result) - self.assertEqual(collector.result[single2_key]["direct_calls"], 1) - self.assertEqual(collector.result[single2_key]["cumulative_calls"], 1) - - # Verify multi-frame handling still works - multi1_key = ("multi.py", 30, "multi_func1") - multi2_key = ("multi.py", 40, "multi_func2") - self.assertIn(multi1_key, collector.result) - self.assertIn(multi2_key, collector.result) - self.assertEqual(collector.result[multi1_key]["direct_calls"], 1) - self.assertEqual( - collector.result[multi2_key]["cumulative_calls"], 1 - ) # Called from multi1 - - def test_collapsed_stack_collector_with_empty_and_deep_stacks(self): - """Test CollapsedStackCollector handles empty frames, single-frame stacks, and very deep call stacks.""" - collector = CollapsedStackCollector() - - # Test with empty frames - collector.collect([]) - self.assertEqual(len(collector.stack_counter), 0) - - # Test with single frame stack - test_frames = [MockInterpreterInfo(0, [MockThreadInfo(1, [("file.py", 10, "func")])])] - collector.collect(test_frames) - self.assertEqual(len(collector.stack_counter), 1) - ((path, thread_id), count), = collector.stack_counter.items() - self.assertEqual(path, (("file.py", 10, "func"),)) - self.assertEqual(thread_id, 1) - self.assertEqual(count, 1) - - # Test with very deep stack - deep_stack = [(f"file{i}.py", i, f"func{i}") for i in range(100)] - test_frames = [MockInterpreterInfo(0, [MockThreadInfo(1, deep_stack)])] - collector = CollapsedStackCollector() - collector.collect(test_frames) - # One aggregated path with 100 frames (reversed) - ((path_tuple, thread_id),), = (collector.stack_counter.keys(),) - self.assertEqual(len(path_tuple), 100) - self.assertEqual(path_tuple[0], ("file99.py", 99, "func99")) - self.assertEqual(path_tuple[-1], ("file0.py", 0, "func0")) - self.assertEqual(thread_id, 1) - - def test_pstats_collector_basic(self): - """Test basic PstatsCollector functionality.""" - collector = PstatsCollector(sample_interval_usec=1000) - - # Test empty state - self.assertEqual(len(collector.result), 0) - self.assertEqual(len(collector.stats), 0) - - # Test collecting sample data - test_frames = [ - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - MockFrameInfo("file.py", 10, "func1"), - MockFrameInfo("file.py", 20, "func2"), - ], - )] - ) - ] - collector.collect(test_frames) - - # Should have recorded calls for both functions - self.assertEqual(len(collector.result), 2) - self.assertIn(("file.py", 10, "func1"), collector.result) - self.assertIn(("file.py", 20, "func2"), collector.result) - - # Top-level function should have direct call - self.assertEqual( - collector.result[("file.py", 10, "func1")]["direct_calls"], 1 - ) - self.assertEqual( - collector.result[("file.py", 10, "func1")]["cumulative_calls"], 1 - ) - - # Calling function should have cumulative call but no direct calls - self.assertEqual( - collector.result[("file.py", 20, "func2")]["cumulative_calls"], 1 - ) - self.assertEqual( - collector.result[("file.py", 20, "func2")]["direct_calls"], 0 - ) - - def test_pstats_collector_create_stats(self): - """Test PstatsCollector stats creation.""" - collector = PstatsCollector( - sample_interval_usec=1000000 - ) # 1 second intervals - - test_frames = [ - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - MockFrameInfo("file.py", 10, "func1"), - MockFrameInfo("file.py", 20, "func2"), - ], - )] - ) - ] - collector.collect(test_frames) - collector.collect(test_frames) # Collect twice - - collector.create_stats() - - # Check stats format: (direct_calls, cumulative_calls, tt, ct, callers) - func1_stats = collector.stats[("file.py", 10, "func1")] - self.assertEqual(func1_stats[0], 2) # direct_calls (top of stack) - self.assertEqual(func1_stats[1], 2) # cumulative_calls - self.assertEqual( - func1_stats[2], 2.0 - ) # tt (total time - 2 samples * 1 sec) - self.assertEqual(func1_stats[3], 2.0) # ct (cumulative time) - - func2_stats = collector.stats[("file.py", 20, "func2")] - self.assertEqual( - func2_stats[0], 0 - ) # direct_calls (never top of stack) - self.assertEqual( - func2_stats[1], 2 - ) # cumulative_calls (appears in stack) - self.assertEqual(func2_stats[2], 0.0) # tt (no direct calls) - self.assertEqual(func2_stats[3], 2.0) # ct (cumulative time) - - def test_collapsed_stack_collector_basic(self): - collector = CollapsedStackCollector() - - # Test empty state - self.assertEqual(len(collector.stack_counter), 0) - - # Test collecting sample data - test_frames = [ - MockInterpreterInfo(0, [MockThreadInfo(1, [("file.py", 10, "func1"), ("file.py", 20, "func2")])]) - ] - collector.collect(test_frames) - - # Should store one reversed path - self.assertEqual(len(collector.stack_counter), 1) - ((path, thread_id), count), = collector.stack_counter.items() - expected_tree = (("file.py", 20, "func2"), ("file.py", 10, "func1")) - self.assertEqual(path, expected_tree) - self.assertEqual(thread_id, 1) - self.assertEqual(count, 1) - - def test_collapsed_stack_collector_export(self): - collapsed_out = tempfile.NamedTemporaryFile(delete=False) - self.addCleanup(close_and_unlink, collapsed_out) - - collector = CollapsedStackCollector() - - test_frames1 = [ - MockInterpreterInfo(0, [MockThreadInfo(1, [("file.py", 10, "func1"), ("file.py", 20, "func2")])]) - ] - test_frames2 = [ - MockInterpreterInfo(0, [MockThreadInfo(1, [("file.py", 10, "func1"), ("file.py", 20, "func2")])]) - ] # Same stack - test_frames3 = [MockInterpreterInfo(0, [MockThreadInfo(1, [("other.py", 5, "other_func")])])] - - collector.collect(test_frames1) - collector.collect(test_frames2) - collector.collect(test_frames3) - - with (captured_stdout(), captured_stderr()): - collector.export(collapsed_out.name) - # Check file contents - with open(collapsed_out.name, "r") as f: - content = f.read() - - lines = content.strip().split("\n") - self.assertEqual(len(lines), 2) # Two unique stacks - - # Check collapsed format: tid:X;file:func:line;file:func:line count - stack1_expected = "tid:1;file.py:func2:20;file.py:func1:10 2" - stack2_expected = "tid:1;other.py:other_func:5 1" - - self.assertIn(stack1_expected, lines) - self.assertIn(stack2_expected, lines) - - def test_flamegraph_collector_basic(self): - """Test basic FlamegraphCollector functionality.""" - collector = FlamegraphCollector() - - # Empty collector should produce 'No Data' - data = collector._convert_to_flamegraph_format() - # With string table, name is now an index - resolve it using the strings array - strings = data.get("strings", []) - name_index = data.get("name", 0) - resolved_name = strings[name_index] if isinstance(name_index, int) and 0 <= name_index < len(strings) else str(name_index) - self.assertIn(resolved_name, ("No Data", "No significant data")) - - # Test collecting sample data - test_frames = [ - MockInterpreterInfo( - 0, - [MockThreadInfo(1, [("file.py", 10, "func1"), ("file.py", 20, "func2")])], - ) - ] - collector.collect(test_frames) - - # Convert and verify structure: func2 -> func1 with counts = 1 - data = collector._convert_to_flamegraph_format() - # Expect promotion: root is the single child (func2), with func1 as its only child - strings = data.get("strings", []) - name_index = data.get("name", 0) - name = strings[name_index] if isinstance(name_index, int) and 0 <= name_index < len(strings) else str(name_index) - self.assertIsInstance(name, str) - self.assertTrue(name.startswith("Program Root: ")) - self.assertIn("func2 (file.py:20)", name) # formatted name - children = data.get("children", []) - self.assertEqual(len(children), 1) - child = children[0] - child_name_index = child.get("name", 0) - child_name = strings[child_name_index] if isinstance(child_name_index, int) and 0 <= child_name_index < len(strings) else str(child_name_index) - self.assertIn("func1 (file.py:10)", child_name) # formatted name - self.assertEqual(child["value"], 1) - - def test_flamegraph_collector_export(self): - """Test flamegraph HTML export functionality.""" - flamegraph_out = tempfile.NamedTemporaryFile( - suffix=".html", delete=False - ) - self.addCleanup(close_and_unlink, flamegraph_out) - - collector = FlamegraphCollector() - - # Create some test data (use Interpreter/Thread objects like runtime) - test_frames1 = [ - MockInterpreterInfo( - 0, - [MockThreadInfo(1, [("file.py", 10, "func1"), ("file.py", 20, "func2")])], - ) - ] - test_frames2 = [ - MockInterpreterInfo( - 0, - [MockThreadInfo(1, [("file.py", 10, "func1"), ("file.py", 20, "func2")])], - ) - ] # Same stack - test_frames3 = [ - MockInterpreterInfo(0, [MockThreadInfo(1, [("other.py", 5, "other_func")])]) - ] - - collector.collect(test_frames1) - collector.collect(test_frames2) - collector.collect(test_frames3) - - # Export flamegraph - with (captured_stdout(), captured_stderr()): - collector.export(flamegraph_out.name) - - # Verify file was created and contains valid data - self.assertTrue(os.path.exists(flamegraph_out.name)) - self.assertGreater(os.path.getsize(flamegraph_out.name), 0) - - # Check file contains HTML content - with open(flamegraph_out.name, "r", encoding="utf-8") as f: - content = f.read() - - # Should be valid HTML - self.assertIn("", content.lower()) - self.assertIn(" 0) - self.assertGreater(mock_collector.collect.call_count, 0) - self.assertLessEqual(mock_collector.collect.call_count, 3) - - def test_sample_profiler_missed_samples_warning(self): - """Test that the profiler warns about missed samples when sampling is too slow.""" - from profiling.sampling.sample import SampleProfiler - - mock_unwinder = mock.MagicMock() - mock_unwinder.get_stack_trace.return_value = [ - ( - 1, - [ - mock.MagicMock( - filename="test.py", lineno=10, funcname="test_func" - ) - ], - ) - ] - - with mock.patch( - "_remote_debugging.RemoteUnwinder" - ) as mock_unwinder_class: - mock_unwinder_class.return_value = mock_unwinder - - # Use very short interval that we'll miss - profiler = SampleProfiler( - pid=12345, sample_interval_usec=1000, all_threads=False - ) # 1ms interval - - mock_collector = mock.MagicMock() - - # Simulate slow sampling where we miss many samples - times = [ - 0.0, - 0.1, - 0.2, - 0.3, - 0.4, - 0.5, - 0.6, - 0.7, - ] # Extra time points to avoid StopIteration - - with mock.patch("time.perf_counter", side_effect=times): - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - profiler.sample(mock_collector, duration_sec=0.5) - - result = output.getvalue() - - # Should warn about missed samples - self.assertIn("Warning: missed", result) - self.assertIn("samples from the expected total", result) - - -@force_not_colorized_test_class -class TestPrintSampledStats(unittest.TestCase): - """Test the print_sampled_stats function.""" - - def setUp(self): - """Set up test data.""" - # Mock stats data - self.mock_stats = mock.MagicMock() - self.mock_stats.stats = { - ("file1.py", 10, "func1"): ( - 100, - 100, - 0.5, - 0.5, - {}, - ), # cc, nc, tt, ct, callers - ("file2.py", 20, "func2"): (50, 50, 0.25, 0.3, {}), - ("file3.py", 30, "func3"): (200, 200, 1.5, 2.0, {}), - ("file4.py", 40, "func4"): ( - 10, - 10, - 0.001, - 0.001, - {}, - ), # millisecond range - ("file5.py", 50, "func5"): ( - 5, - 5, - 0.000001, - 0.000002, - {}, - ), # microsecond range - } - - def test_print_sampled_stats_basic(self): - """Test basic print_sampled_stats functionality.""" - from profiling.sampling.sample import print_sampled_stats - - # Capture output - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats(self.mock_stats, sample_interval_usec=100) - - result = output.getvalue() - - # Check header is present - self.assertIn("Profile Stats:", result) - self.assertIn("nsamples", result) - self.assertIn("tottime", result) - self.assertIn("cumtime", result) - - # Check functions are present - self.assertIn("func1", result) - self.assertIn("func2", result) - self.assertIn("func3", result) - - def test_print_sampled_stats_sorting(self): - """Test different sorting options.""" - from profiling.sampling.sample import print_sampled_stats - - # Test sort by calls - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - self.mock_stats, sort=0, sample_interval_usec=100 - ) - - result = output.getvalue() - lines = result.strip().split("\n") - - # Find the data lines (skip header) - data_lines = [l for l in lines if "file" in l and ".py" in l] - # func3 should be first (200 calls) - self.assertIn("func3", data_lines[0]) - - # Test sort by time - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - self.mock_stats, sort=1, sample_interval_usec=100 - ) - - result = output.getvalue() - lines = result.strip().split("\n") - - data_lines = [l for l in lines if "file" in l and ".py" in l] - # func3 should be first (1.5s time) - self.assertIn("func3", data_lines[0]) - - def test_print_sampled_stats_limit(self): - """Test limiting output rows.""" - from profiling.sampling.sample import print_sampled_stats - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - self.mock_stats, limit=2, sample_interval_usec=100 - ) - - result = output.getvalue() - - # Count function entries in the main stats section (not in summary) - lines = result.split("\n") - # Find where the main stats section ends (before summary) - main_section_lines = [] - for line in lines: - if "Summary of Interesting Functions:" in line: - break - main_section_lines.append(line) - - # Count function entries only in main section - func_count = sum( - 1 - for line in main_section_lines - if "func" in line and ".py" in line - ) - self.assertEqual(func_count, 2) - - def test_print_sampled_stats_time_units(self): - """Test proper time unit selection.""" - from profiling.sampling.sample import print_sampled_stats - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats(self.mock_stats, sample_interval_usec=100) - - result = output.getvalue() - - # Should use seconds for the header since max time is > 1s - self.assertIn("tottime (s)", result) - self.assertIn("cumtime (s)", result) - - # Test with only microsecond-range times - micro_stats = mock.MagicMock() - micro_stats.stats = { - ("file1.py", 10, "func1"): (100, 100, 0.000005, 0.000010, {}), - } - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats(micro_stats, sample_interval_usec=100) - - result = output.getvalue() - - # Should use microseconds - self.assertIn("tottime (μs)", result) - self.assertIn("cumtime (μs)", result) - - def test_print_sampled_stats_summary(self): - """Test summary section generation.""" - from profiling.sampling.sample import print_sampled_stats - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - self.mock_stats, - show_summary=True, - sample_interval_usec=100, - ) - - result = output.getvalue() - - # Check summary sections are present - self.assertIn("Summary of Interesting Functions:", result) - self.assertIn( - "Functions with Highest Direct/Cumulative Ratio (Hot Spots):", - result, - ) - self.assertIn( - "Functions with Highest Call Frequency (Indirect Calls):", result - ) - self.assertIn( - "Functions with Highest Call Magnification (Cumulative/Direct):", - result, - ) - - def test_print_sampled_stats_no_summary(self): - """Test disabling summary output.""" - from profiling.sampling.sample import print_sampled_stats - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - self.mock_stats, - show_summary=False, - sample_interval_usec=100, - ) - - result = output.getvalue() - - # Summary should not be present - self.assertNotIn("Summary of Interesting Functions:", result) - - def test_print_sampled_stats_empty_stats(self): - """Test with empty stats.""" - from profiling.sampling.sample import print_sampled_stats - - empty_stats = mock.MagicMock() - empty_stats.stats = {} - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats(empty_stats, sample_interval_usec=100) - - result = output.getvalue() - - # Should still print header - self.assertIn("Profile Stats:", result) - - def test_print_sampled_stats_sample_percentage_sorting(self): - """Test sample percentage sorting options.""" - from profiling.sampling.sample import print_sampled_stats - - # Add a function with high sample percentage (more direct calls than func3's 200) - self.mock_stats.stats[("expensive.py", 60, "expensive_func")] = ( - 300, # direct calls (higher than func3's 200) - 300, # cumulative calls - 1.0, # total time - 1.0, # cumulative time - {}, - ) - - # Test sort by sample percentage - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - self.mock_stats, sort=3, sample_interval_usec=100 - ) # sample percentage - - result = output.getvalue() - lines = result.strip().split("\n") - - data_lines = [l for l in lines if ".py" in l and "func" in l] - # expensive_func should be first (highest sample percentage) - self.assertIn("expensive_func", data_lines[0]) - - def test_print_sampled_stats_with_recursive_calls(self): - """Test print_sampled_stats with recursive calls where nc != cc.""" - from profiling.sampling.sample import print_sampled_stats - - # Create stats with recursive calls (nc != cc) - recursive_stats = mock.MagicMock() - recursive_stats.stats = { - # (direct_calls, cumulative_calls, tt, ct, callers) - recursive function - ("recursive.py", 10, "factorial"): ( - 5, # direct_calls - 10, # cumulative_calls (appears more times in stack due to recursion) - 0.5, - 0.6, - {}, - ), - ("normal.py", 20, "normal_func"): ( - 3, # direct_calls - 3, # cumulative_calls (same as direct for non-recursive) - 0.2, - 0.2, - {}, - ), - } - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats(recursive_stats, sample_interval_usec=100) - - result = output.getvalue() - - # Should display recursive calls as "5/10" format - self.assertIn("5/10", result) # nc/cc format for recursive calls - self.assertIn("3", result) # just nc for non-recursive calls - self.assertIn("factorial", result) - self.assertIn("normal_func", result) - - def test_print_sampled_stats_with_zero_call_counts(self): - """Test print_sampled_stats with zero call counts to trigger division protection.""" - from profiling.sampling.sample import print_sampled_stats - - # Create stats with zero call counts - zero_stats = mock.MagicMock() - zero_stats.stats = { - ("file.py", 10, "zero_calls"): (0, 0, 0.0, 0.0, {}), # Zero calls - ("file.py", 20, "normal_func"): ( - 5, - 5, - 0.1, - 0.1, - {}, - ), # Normal function - } - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats(zero_stats, sample_interval_usec=100) - - result = output.getvalue() - - # Should handle zero call counts gracefully - self.assertIn("zero_calls", result) - self.assertIn("zero_calls", result) - self.assertIn("normal_func", result) - - def test_print_sampled_stats_sort_by_name(self): - """Test sort by function name option.""" - from profiling.sampling.sample import print_sampled_stats - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - self.mock_stats, sort=-1, sample_interval_usec=100 - ) # sort by name - - result = output.getvalue() - lines = result.strip().split("\n") - - # Find the data lines (skip header and summary) - # Data lines start with whitespace and numbers, and contain filename:lineno(function) - data_lines = [] - for line in lines: - # Skip header lines and summary sections - if ( - line.startswith(" ") - and "(" in line - and ")" in line - and not line.startswith( - " 1." - ) # Skip summary lines that start with times - and not line.startswith( - " 0." - ) # Skip summary lines that start with times - and not "per call" in line # Skip summary lines - and not "calls" in line # Skip summary lines - and not "total time" in line # Skip summary lines - and not "cumulative time" in line - ): # Skip summary lines - data_lines.append(line) - - # Extract just the function names for comparison - func_names = [] - import re - - for line in data_lines: - # Function name is between the last ( and ), accounting for ANSI color codes - match = re.search(r"\(([^)]+)\)$", line) - if match: - func_name = match.group(1) - # Remove ANSI color codes - func_name = re.sub(r"\x1b\[[0-9;]*m", "", func_name) - func_names.append(func_name) - - # Verify we extracted function names and they are sorted - self.assertGreater( - len(func_names), 0, "Should have extracted some function names" - ) - self.assertEqual( - func_names, - sorted(func_names), - f"Function names {func_names} should be sorted alphabetically", - ) - - def test_print_sampled_stats_with_zero_time_functions(self): - """Test summary sections with functions that have zero time.""" - from profiling.sampling.sample import print_sampled_stats - - # Create stats with zero-time functions - zero_time_stats = mock.MagicMock() - zero_time_stats.stats = { - ("file1.py", 10, "zero_time_func"): ( - 5, - 5, - 0.0, - 0.0, - {}, - ), # Zero time - ("file2.py", 20, "normal_func"): ( - 3, - 3, - 0.1, - 0.1, - {}, - ), # Normal time - } - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - zero_time_stats, - show_summary=True, - sample_interval_usec=100, - ) - - result = output.getvalue() - - # Should handle zero-time functions gracefully in summary - self.assertIn("Summary of Interesting Functions:", result) - self.assertIn("zero_time_func", result) - self.assertIn("normal_func", result) - - def test_print_sampled_stats_with_malformed_qualified_names(self): - """Test summary generation with function names that don't contain colons.""" - from profiling.sampling.sample import print_sampled_stats - - # Create stats with function names that would create malformed qualified names - malformed_stats = mock.MagicMock() - malformed_stats.stats = { - # Function name without clear module separation - ("no_colon_func", 10, "func"): (3, 3, 0.1, 0.1, {}), - ("", 20, "empty_filename_func"): (2, 2, 0.05, 0.05, {}), - ("normal.py", 30, "normal_func"): (5, 5, 0.2, 0.2, {}), - } - - with io.StringIO() as output: - with mock.patch("sys.stdout", output): - print_sampled_stats( - malformed_stats, - show_summary=True, - sample_interval_usec=100, - ) - - result = output.getvalue() - - # Should handle malformed names gracefully in summary aggregation - self.assertIn("Summary of Interesting Functions:", result) - # All function names should appear somewhere in the output - self.assertIn("func", result) - self.assertIn("empty_filename_func", result) - self.assertIn("normal_func", result) - - def test_print_sampled_stats_with_recursive_call_stats_creation(self): - """Test create_stats with recursive call data to trigger total_rec_calls branch.""" - collector = PstatsCollector(sample_interval_usec=1000000) # 1 second - - # Simulate recursive function data where total_rec_calls would be set - # We need to manually manipulate the collector result to test this branch - collector.result = { - ("recursive.py", 10, "factorial"): { - "total_rec_calls": 3, # Non-zero recursive calls - "direct_calls": 5, - "cumulative_calls": 10, - }, - ("normal.py", 20, "normal_func"): { - "total_rec_calls": 0, # Zero recursive calls - "direct_calls": 2, - "cumulative_calls": 5, - }, - } - - collector.create_stats() - - # Check that recursive calls are handled differently from non-recursive - factorial_stats = collector.stats[("recursive.py", 10, "factorial")] - normal_stats = collector.stats[("normal.py", 20, "normal_func")] - - # factorial should use cumulative_calls (10) as nc - self.assertEqual( - factorial_stats[1], 10 - ) # nc should be cumulative_calls - self.assertEqual(factorial_stats[0], 5) # cc should be direct_calls - - # normal_func should use cumulative_calls as nc - self.assertEqual(normal_stats[1], 5) # nc should be cumulative_calls - self.assertEqual(normal_stats[0], 2) # cc should be direct_calls - - -@skip_if_not_supported -@unittest.skipIf( - sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, - "Test only runs on Linux with process_vm_readv support", -) -class TestRecursiveFunctionProfiling(unittest.TestCase): - """Test profiling of recursive functions and complex call patterns.""" - - def test_recursive_function_call_counting(self): - """Test that recursive function calls are counted correctly.""" - collector = PstatsCollector(sample_interval_usec=1000) - - # Simulate a recursive call pattern: fibonacci(5) calling itself - recursive_frames = [ - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ # First sample: deep in recursion - MockFrameInfo("fib.py", 10, "fibonacci"), - MockFrameInfo("fib.py", 10, "fibonacci"), # recursive call - MockFrameInfo( - "fib.py", 10, "fibonacci" - ), # deeper recursion - MockFrameInfo("fib.py", 10, "fibonacci"), # even deeper - MockFrameInfo("main.py", 5, "main"), # main caller - ], - )] - ), - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ # Second sample: different recursion depth - MockFrameInfo("fib.py", 10, "fibonacci"), - MockFrameInfo("fib.py", 10, "fibonacci"), # recursive call - MockFrameInfo("main.py", 5, "main"), # main caller - ], - )] - ), - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ # Third sample: back to deeper recursion - MockFrameInfo("fib.py", 10, "fibonacci"), - MockFrameInfo("fib.py", 10, "fibonacci"), - MockFrameInfo("fib.py", 10, "fibonacci"), - MockFrameInfo("main.py", 5, "main"), - ], - )] - ), - ] - - for frames in recursive_frames: - collector.collect([frames]) - - collector.create_stats() - - # Check that recursive calls are counted properly - fib_key = ("fib.py", 10, "fibonacci") - main_key = ("main.py", 5, "main") - - self.assertIn(fib_key, collector.stats) - self.assertIn(main_key, collector.stats) - - # Fibonacci should have many calls due to recursion - fib_stats = collector.stats[fib_key] - direct_calls, cumulative_calls, tt, ct, callers = fib_stats - - # Should have recorded multiple calls (9 total appearances in samples) - self.assertEqual(cumulative_calls, 9) - self.assertGreater(tt, 0) # Should have some total time - self.assertGreater(ct, 0) # Should have some cumulative time - - # Main should have fewer calls - main_stats = collector.stats[main_key] - main_direct_calls, main_cumulative_calls = main_stats[0], main_stats[1] - self.assertEqual(main_direct_calls, 0) # Never directly executing - self.assertEqual(main_cumulative_calls, 3) # Appears in all 3 samples - - def test_nested_function_hierarchy(self): - """Test profiling of deeply nested function calls.""" - collector = PstatsCollector(sample_interval_usec=1000) - - # Simulate a deep call hierarchy - deep_call_frames = [ - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - MockFrameInfo("level1.py", 10, "level1_func"), - MockFrameInfo("level2.py", 20, "level2_func"), - MockFrameInfo("level3.py", 30, "level3_func"), - MockFrameInfo("level4.py", 40, "level4_func"), - MockFrameInfo("level5.py", 50, "level5_func"), - MockFrameInfo("main.py", 5, "main"), - ], - )] - ), - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ # Same hierarchy sampled again - MockFrameInfo("level1.py", 10, "level1_func"), - MockFrameInfo("level2.py", 20, "level2_func"), - MockFrameInfo("level3.py", 30, "level3_func"), - MockFrameInfo("level4.py", 40, "level4_func"), - MockFrameInfo("level5.py", 50, "level5_func"), - MockFrameInfo("main.py", 5, "main"), - ], - )] - ), - ] - - for frames in deep_call_frames: - collector.collect([frames]) - - collector.create_stats() - - # All levels should be recorded - for level in range(1, 6): - key = (f"level{level}.py", level * 10, f"level{level}_func") - self.assertIn(key, collector.stats) - - stats = collector.stats[key] - direct_calls, cumulative_calls, tt, ct, callers = stats - - # Each level should appear in stack twice (2 samples) - self.assertEqual(cumulative_calls, 2) - - # Only level1 (deepest) should have direct calls - if level == 1: - self.assertEqual(direct_calls, 2) - else: - self.assertEqual(direct_calls, 0) - - # Deeper levels should have lower cumulative time than higher levels - # (since they don't include time from functions they call) - if level == 1: # Deepest level with most time - self.assertGreater(ct, 0) - - def test_alternating_call_patterns(self): - """Test profiling with alternating call patterns.""" - collector = PstatsCollector(sample_interval_usec=1000) - - # Simulate alternating execution paths - pattern_frames = [ - # Pattern A: path through func_a - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - MockFrameInfo("module.py", 10, "func_a"), - MockFrameInfo("module.py", 30, "shared_func"), - MockFrameInfo("main.py", 5, "main"), - ], - )] - ), - # Pattern B: path through func_b - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - MockFrameInfo("module.py", 20, "func_b"), - MockFrameInfo("module.py", 30, "shared_func"), - MockFrameInfo("main.py", 5, "main"), - ], - )] - ), - # Pattern A again - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - MockFrameInfo("module.py", 10, "func_a"), - MockFrameInfo("module.py", 30, "shared_func"), - MockFrameInfo("main.py", 5, "main"), - ], - )] - ), - # Pattern B again - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - MockFrameInfo("module.py", 20, "func_b"), - MockFrameInfo("module.py", 30, "shared_func"), - MockFrameInfo("main.py", 5, "main"), - ], - )] - ), - ] - - for frames in pattern_frames: - collector.collect([frames]) - - collector.create_stats() - - # Check that both paths are recorded equally - func_a_key = ("module.py", 10, "func_a") - func_b_key = ("module.py", 20, "func_b") - shared_key = ("module.py", 30, "shared_func") - main_key = ("main.py", 5, "main") - - # func_a and func_b should each be directly executing twice - self.assertEqual(collector.stats[func_a_key][0], 2) # direct_calls - self.assertEqual(collector.stats[func_a_key][1], 2) # cumulative_calls - self.assertEqual(collector.stats[func_b_key][0], 2) # direct_calls - self.assertEqual(collector.stats[func_b_key][1], 2) # cumulative_calls - - # shared_func should appear in all samples (4 times) but never directly executing - self.assertEqual(collector.stats[shared_key][0], 0) # direct_calls - self.assertEqual(collector.stats[shared_key][1], 4) # cumulative_calls - - # main should appear in all samples but never directly executing - self.assertEqual(collector.stats[main_key][0], 0) # direct_calls - self.assertEqual(collector.stats[main_key][1], 4) # cumulative_calls - - def test_collapsed_stack_with_recursion(self): - """Test collapsed stack collector with recursive patterns.""" - collector = CollapsedStackCollector() - - # Recursive call pattern - recursive_frames = [ - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - ("factorial.py", 10, "factorial"), - ("factorial.py", 10, "factorial"), # recursive - ("factorial.py", 10, "factorial"), # deeper - ("main.py", 5, "main"), - ], - )] - ), - MockInterpreterInfo( - 0, - [MockThreadInfo( - 1, - [ - ("factorial.py", 10, "factorial"), - ("factorial.py", 10, "factorial"), # different depth - ("main.py", 5, "main"), - ], - )] - ), - ] - - for frames in recursive_frames: - collector.collect([frames]) - - # Should capture both call paths - self.assertEqual(len(collector.stack_counter), 2) - - # First path should be longer (deeper recursion) than the second - path_tuples = list(collector.stack_counter.keys()) - paths = [p[0] for p in path_tuples] # Extract just the call paths - lengths = [len(p) for p in paths] - self.assertNotEqual(lengths[0], lengths[1]) - - # Both should contain factorial calls - self.assertTrue(any(any(f[2] == "factorial" for f in p) for p in paths)) - - # Verify total occurrences via aggregation - factorial_key = ("factorial.py", 10, "factorial") - main_key = ("main.py", 5, "main") - - def total_occurrences(func): - total = 0 - for (path, thread_id), count in collector.stack_counter.items(): - total += sum(1 for f in path if f == func) * count - return total - - self.assertEqual(total_occurrences(factorial_key), 5) - self.assertEqual(total_occurrences(main_key), 2) - - -@requires_subprocess() -@skip_if_not_supported -class TestSampleProfilerIntegration(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.test_script = ''' -import time -import os - -def slow_fibonacci(n): - """Recursive fibonacci - should show up prominently in profiler.""" - if n <= 1: - return n - return slow_fibonacci(n-1) + slow_fibonacci(n-2) - -def cpu_intensive_work(): - """CPU intensive work that should show in profiler.""" - result = 0 - for i in range(10000): - result += i * i - if i % 100 == 0: - result = result % 1000000 - return result - -def medium_computation(): - """Medium complexity function.""" - result = 0 - for i in range(100): - result += i * i - return result - -def fast_loop(): - """Fast simple loop.""" - total = 0 - for i in range(50): - total += i - return total - -def nested_calls(): - """Test nested function calls.""" - def level1(): - def level2(): - return medium_computation() - return level2() - return level1() - -def main_loop(): - """Main test loop with different execution paths.""" - iteration = 0 - - while True: - iteration += 1 - - # Different execution paths - focus on CPU intensive work - if iteration % 3 == 0: - # Very CPU intensive - result = cpu_intensive_work() - elif iteration % 5 == 0: - # Expensive recursive operation - result = slow_fibonacci(12) - else: - # Medium operation - result = nested_calls() - - # No sleep - keep CPU busy - -if __name__ == "__main__": - main_loop() -''' - - def test_sampling_basic_functionality(self): - with ( - test_subprocess(self.test_script) as subproc, - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=2, - sample_interval_usec=1000, # 1ms - show_summary=False, - ) - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - - output = captured_output.getvalue() - - # Basic checks on output - self.assertIn("Captured", output) - self.assertIn("samples", output) - self.assertIn("Profile Stats", output) - - # Should see some of our test functions - self.assertIn("slow_fibonacci", output) - - def test_sampling_with_pstats_export(self): - pstats_out = tempfile.NamedTemporaryFile( - suffix=".pstats", delete=False - ) - self.addCleanup(close_and_unlink, pstats_out) - - with test_subprocess(self.test_script) as subproc: - # Suppress profiler output when testing file export - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=1, - filename=pstats_out.name, - sample_interval_usec=10000, - ) - except PermissionError: - self.skipTest( - "Insufficient permissions for remote profiling" - ) - - # Verify file was created and contains valid data - self.assertTrue(os.path.exists(pstats_out.name)) - self.assertGreater(os.path.getsize(pstats_out.name), 0) - - # Try to load the stats file - with open(pstats_out.name, "rb") as f: - stats_data = marshal.load(f) - - # Should be a dictionary with the sampled marker - self.assertIsInstance(stats_data, dict) - self.assertIn(("__sampled__",), stats_data) - self.assertTrue(stats_data[("__sampled__",)]) - - # Should have some function data - function_entries = [ - k for k in stats_data.keys() if k != ("__sampled__",) - ] - self.assertGreater(len(function_entries), 0) - - def test_sampling_with_collapsed_export(self): - collapsed_file = tempfile.NamedTemporaryFile( - suffix=".txt", delete=False - ) - self.addCleanup(close_and_unlink, collapsed_file) - - with ( - test_subprocess(self.test_script) as subproc, - ): - # Suppress profiler output when testing file export - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=1, - filename=collapsed_file.name, - output_format="collapsed", - sample_interval_usec=10000, - ) - except PermissionError: - self.skipTest( - "Insufficient permissions for remote profiling" - ) - - # Verify file was created and contains valid data - self.assertTrue(os.path.exists(collapsed_file.name)) - self.assertGreater(os.path.getsize(collapsed_file.name), 0) - - # Check file format - with open(collapsed_file.name, "r") as f: - content = f.read() - - lines = content.strip().split("\n") - self.assertGreater(len(lines), 0) - - # Each line should have format: stack_trace count - for line in lines: - parts = line.rsplit(" ", 1) - self.assertEqual(len(parts), 2) - - stack_trace, count_str = parts - self.assertGreater(len(stack_trace), 0) - self.assertTrue(count_str.isdigit()) - self.assertGreater(int(count_str), 0) - - # Stack trace should contain semicolon-separated entries - if ";" in stack_trace: - stack_parts = stack_trace.split(";") - for part in stack_parts: - # Each part should be file:function:line - self.assertIn(":", part) - - def test_sampling_all_threads(self): - with ( - test_subprocess(self.test_script) as subproc, - # Suppress profiler output - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=1, - all_threads=True, - sample_interval_usec=10000, - show_summary=False, - ) - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - - # Just verify that sampling completed without error - # We're not testing output format here - - def test_sample_target_script(self): - script_file = tempfile.NamedTemporaryFile(delete=False) - script_file.write(self.test_script.encode("utf-8")) - script_file.flush() - self.addCleanup(close_and_unlink, script_file) - - test_args = ["profiling.sampling.sample", "-d", "1", script_file.name] - - with ( - mock.patch("sys.argv", test_args), - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.main() - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - - output = captured_output.getvalue() - - # Basic checks on output - self.assertIn("Captured", output) - self.assertIn("samples", output) - self.assertIn("Profile Stats", output) - - # Should see some of our test functions - self.assertIn("slow_fibonacci", output) - - def test_sample_target_module(self): - tempdir = tempfile.TemporaryDirectory(delete=False) - self.addCleanup(lambda x: shutil.rmtree(x), tempdir.name) - - module_path = os.path.join(tempdir.name, "test_module.py") - - with open(module_path, "w") as f: - f.write(self.test_script) - - test_args = ["profiling.sampling.sample", "-d", "1", "-m", "test_module"] - - with ( - mock.patch("sys.argv", test_args), - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - # Change to temp directory so subprocess can find the module - contextlib.chdir(tempdir.name), - ): - try: - profiling.sampling.sample.main() - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - - output = captured_output.getvalue() - - # Basic checks on output - self.assertIn("Captured", output) - self.assertIn("samples", output) - self.assertIn("Profile Stats", output) - - # Should see some of our test functions - self.assertIn("slow_fibonacci", output) - - -@skip_if_not_supported -@unittest.skipIf( - sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, - "Test only runs on Linux with process_vm_readv support", -) -class TestSampleProfilerErrorHandling(unittest.TestCase): - def test_invalid_pid(self): - with self.assertRaises((OSError, RuntimeError)): - profiling.sampling.sample.sample(-1, duration_sec=1) - - def test_process_dies_during_sampling(self): - with test_subprocess("import time; time.sleep(0.5); exit()") as subproc: - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=2, # Longer than process lifetime - sample_interval_usec=50000, - ) - except PermissionError: - self.skipTest( - "Insufficient permissions for remote profiling" - ) - - output = captured_output.getvalue() - - self.assertIn("Error rate", output) - - def test_invalid_output_format(self): - with self.assertRaises(ValueError): - profiling.sampling.sample.sample( - os.getpid(), - duration_sec=1, - output_format="invalid_format", - ) - - def test_invalid_output_format_with_mocked_profiler(self): - """Test invalid output format with proper mocking to avoid permission issues.""" - with mock.patch( - "profiling.sampling.sample.SampleProfiler" - ) as mock_profiler_class: - mock_profiler = mock.MagicMock() - mock_profiler_class.return_value = mock_profiler - - with self.assertRaises(ValueError) as cm: - profiling.sampling.sample.sample( - 12345, - duration_sec=1, - output_format="unknown_format", - ) - - # Should raise ValueError with the invalid format name - self.assertIn( - "Invalid output format: unknown_format", str(cm.exception) - ) - - def test_is_process_running(self): - with test_subprocess("import time; time.sleep(1000)") as subproc: - try: - profiler = SampleProfiler(pid=subproc.process.pid, sample_interval_usec=1000, all_threads=False) - except PermissionError: - self.skipTest( - "Insufficient permissions to read the stack trace" - ) - self.assertTrue(profiler._is_process_running()) - self.assertIsNotNone(profiler.unwinder.get_stack_trace()) - subproc.process.kill() - subproc.process.wait() - self.assertRaises(ProcessLookupError, profiler.unwinder.get_stack_trace) - - # Exit the context manager to ensure the process is terminated - self.assertFalse(profiler._is_process_running()) - self.assertRaises(ProcessLookupError, profiler.unwinder.get_stack_trace) - - @unittest.skipUnless(sys.platform == "linux", "Only valid on Linux") - def test_esrch_signal_handling(self): - with test_subprocess("import time; time.sleep(1000)") as subproc: - try: - unwinder = _remote_debugging.RemoteUnwinder(subproc.process.pid) - except PermissionError: - self.skipTest( - "Insufficient permissions to read the stack trace" - ) - initial_trace = unwinder.get_stack_trace() - self.assertIsNotNone(initial_trace) - - subproc.process.kill() - - # Wait for the process to die and try to get another trace - subproc.process.wait() - - with self.assertRaises(ProcessLookupError): - unwinder.get_stack_trace() - - def test_valid_output_formats(self): - """Test that all valid output formats are accepted.""" - valid_formats = ["pstats", "collapsed", "flamegraph", "gecko"] - - tempdir = tempfile.TemporaryDirectory(delete=False) - self.addCleanup(shutil.rmtree, tempdir.name) - - - with (contextlib.chdir(tempdir.name), captured_stdout(), captured_stderr()): - for fmt in valid_formats: - try: - # This will likely fail with permissions, but the format should be valid - profiling.sampling.sample.sample( - os.getpid(), - duration_sec=0.1, - output_format=fmt, - filename=f"test_{fmt}.out", - ) - except (OSError, RuntimeError, PermissionError): - # Expected errors - we just want to test format validation - pass - - def test_script_error_treatment(self): - script_file = tempfile.NamedTemporaryFile("w", delete=False, suffix=".py") - script_file.write("open('nonexistent_file.txt')\n") - script_file.close() - self.addCleanup(os.unlink, script_file.name) - - result = subprocess.run( - [sys.executable, "-m", "profiling.sampling.sample", "-d", "1", script_file.name], - capture_output=True, - text=True, - ) - output = result.stdout + result.stderr - - if "PermissionError" in output: - self.skipTest("Insufficient permissions for remote profiling") - self.assertNotIn("Script file not found", output) - self.assertIn("No such file or directory: 'nonexistent_file.txt'", output) - - -class TestSampleProfilerCLI(unittest.TestCase): - def _setup_sync_mocks(self, mock_socket, mock_popen): - """Helper to set up socket and process mocks for coordinator tests.""" - # Mock the sync socket with context manager support - mock_sock_instance = mock.MagicMock() - mock_sock_instance.getsockname.return_value = ("127.0.0.1", 12345) - - # Mock the connection with context manager support - mock_conn = mock.MagicMock() - mock_conn.recv.return_value = b"ready" - mock_conn.__enter__.return_value = mock_conn - mock_conn.__exit__.return_value = None - - # Mock accept() to return (connection, address) and support indexing - mock_accept_result = mock.MagicMock() - mock_accept_result.__getitem__.return_value = mock_conn # [0] returns the connection - mock_sock_instance.accept.return_value = mock_accept_result - - # Mock socket with context manager support - mock_sock_instance.__enter__.return_value = mock_sock_instance - mock_sock_instance.__exit__.return_value = None - mock_socket.return_value = mock_sock_instance - - # Mock the subprocess - mock_process = mock.MagicMock() - mock_process.pid = 12345 - mock_process.poll.return_value = None - mock_popen.return_value = mock_process - return mock_process - - def _verify_coordinator_command(self, mock_popen, expected_target_args): - """Helper to verify the coordinator command was called correctly.""" - args, kwargs = mock_popen.call_args - coordinator_cmd = args[0] - self.assertEqual(coordinator_cmd[0], sys.executable) - self.assertEqual(coordinator_cmd[1], "-m") - self.assertEqual(coordinator_cmd[2], "profiling.sampling._sync_coordinator") - self.assertEqual(coordinator_cmd[3], "12345") # port - # cwd is coordinator_cmd[4] - self.assertEqual(coordinator_cmd[5:], expected_target_args) - - @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") - def test_cli_module_argument_parsing(self): - test_args = ["profiling.sampling.sample", "-m", "mymodule"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("subprocess.Popen") as mock_popen, - mock.patch("socket.socket") as mock_socket, - ): - self._setup_sync_mocks(mock_socket, mock_popen) - profiling.sampling.sample.main() - - self._verify_coordinator_command(mock_popen, ("-m", "mymodule")) - mock_sample.assert_called_once_with( - 12345, - sort=2, # default sort (sort_value from args.sort) - sample_interval_usec=100, - duration_sec=10, - filename=None, - all_threads=False, - limit=15, - show_summary=True, - output_format="pstats", - realtime_stats=False, - mode=0, - native=False, - gc=True, - ) - - @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") - def test_cli_module_with_arguments(self): - test_args = ["profiling.sampling.sample", "-m", "mymodule", "arg1", "arg2", "--flag"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("subprocess.Popen") as mock_popen, - mock.patch("socket.socket") as mock_socket, - ): - self._setup_sync_mocks(mock_socket, mock_popen) - profiling.sampling.sample.main() - - self._verify_coordinator_command(mock_popen, ("-m", "mymodule", "arg1", "arg2", "--flag")) - mock_sample.assert_called_once_with( - 12345, - sort=2, - sample_interval_usec=100, - duration_sec=10, - filename=None, - all_threads=False, - limit=15, - show_summary=True, - output_format="pstats", - realtime_stats=False, - mode=0, - native=False, - gc=True, - ) - - @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") - def test_cli_script_argument_parsing(self): - test_args = ["profiling.sampling.sample", "myscript.py"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("subprocess.Popen") as mock_popen, - mock.patch("socket.socket") as mock_socket, - ): - self._setup_sync_mocks(mock_socket, mock_popen) - profiling.sampling.sample.main() - - self._verify_coordinator_command(mock_popen, ("myscript.py",)) - mock_sample.assert_called_once_with( - 12345, - sort=2, - sample_interval_usec=100, - duration_sec=10, - filename=None, - all_threads=False, - limit=15, - show_summary=True, - output_format="pstats", - realtime_stats=False, - mode=0, - native=False, - gc=True, - ) - - @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") - def test_cli_script_with_arguments(self): - test_args = ["profiling.sampling.sample", "myscript.py", "arg1", "arg2", "--flag"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("subprocess.Popen") as mock_popen, - mock.patch("socket.socket") as mock_socket, - ): - # Use the helper to set up mocks consistently - mock_process = self._setup_sync_mocks(mock_socket, mock_popen) - # Override specific behavior for this test - mock_process.wait.side_effect = [subprocess.TimeoutExpired(test_args, 0.1), None] - - profiling.sampling.sample.main() - - # Verify the coordinator command was called - args, kwargs = mock_popen.call_args - coordinator_cmd = args[0] - self.assertEqual(coordinator_cmd[0], sys.executable) - self.assertEqual(coordinator_cmd[1], "-m") - self.assertEqual(coordinator_cmd[2], "profiling.sampling._sync_coordinator") - self.assertEqual(coordinator_cmd[3], "12345") # port - # cwd is coordinator_cmd[4] - self.assertEqual(coordinator_cmd[5:], ("myscript.py", "arg1", "arg2", "--flag")) - - def test_cli_mutually_exclusive_pid_module(self): - test_args = ["profiling.sampling.sample", "-p", "12345", "-m", "mymodule"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("sys.stderr", io.StringIO()) as mock_stderr, - self.assertRaises(SystemExit) as cm, - ): - profiling.sampling.sample.main() - - self.assertEqual(cm.exception.code, 2) # argparse error - error_msg = mock_stderr.getvalue() - self.assertIn("not allowed with argument", error_msg) - - def test_cli_mutually_exclusive_pid_script(self): - test_args = ["profiling.sampling.sample", "-p", "12345", "myscript.py"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("sys.stderr", io.StringIO()) as mock_stderr, - self.assertRaises(SystemExit) as cm, - ): - profiling.sampling.sample.main() - - self.assertEqual(cm.exception.code, 2) # argparse error - error_msg = mock_stderr.getvalue() - self.assertIn("only one target type can be specified", error_msg) - - def test_cli_no_target_specified(self): - test_args = ["profiling.sampling.sample", "-d", "5"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("sys.stderr", io.StringIO()) as mock_stderr, - self.assertRaises(SystemExit) as cm, - ): - profiling.sampling.sample.main() - - self.assertEqual(cm.exception.code, 2) # argparse error - error_msg = mock_stderr.getvalue() - self.assertIn("one of the arguments", error_msg) - - @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") - def test_cli_module_with_profiler_options(self): - test_args = [ - "profiling.sampling.sample", "-i", "1000", "-d", "30", "-a", - "--sort-tottime", "-l", "20", "-m", "mymodule", - ] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("subprocess.Popen") as mock_popen, - mock.patch("socket.socket") as mock_socket, - ): - self._setup_sync_mocks(mock_socket, mock_popen) - profiling.sampling.sample.main() - - self._verify_coordinator_command(mock_popen, ("-m", "mymodule")) - mock_sample.assert_called_once_with( - 12345, - sort=1, # sort-tottime - sample_interval_usec=1000, - duration_sec=30, - filename=None, - all_threads=True, - limit=20, - show_summary=True, - output_format="pstats", - realtime_stats=False, - mode=0, - native=False, - gc=True, - ) - - @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") - def test_cli_script_with_profiler_options(self): - """Test script with various profiler options.""" - test_args = [ - "profiling.sampling.sample", "-i", "2000", "-d", "60", - "--collapsed", "-o", "output.txt", - "myscript.py", "scriptarg", - ] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("subprocess.Popen") as mock_popen, - mock.patch("socket.socket") as mock_socket, - ): - self._setup_sync_mocks(mock_socket, mock_popen) - profiling.sampling.sample.main() - - self._verify_coordinator_command(mock_popen, ("myscript.py", "scriptarg")) - # Verify profiler options were passed correctly - mock_sample.assert_called_once_with( - 12345, - sort=2, # default sort - sample_interval_usec=2000, - duration_sec=60, - filename="output.txt", - all_threads=False, - limit=15, - show_summary=True, - output_format="collapsed", - realtime_stats=False, - mode=0, - native=False, - gc=True, - ) - - def test_cli_empty_module_name(self): - test_args = ["profiling.sampling.sample", "-m"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("sys.stderr", io.StringIO()) as mock_stderr, - self.assertRaises(SystemExit) as cm, - ): - profiling.sampling.sample.main() - - self.assertEqual(cm.exception.code, 2) # argparse error - error_msg = mock_stderr.getvalue() - self.assertIn("argument -m/--module: expected one argument", error_msg) - - @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") - def test_cli_long_module_option(self): - test_args = ["profiling.sampling.sample", "--module", "mymodule", "arg1"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("subprocess.Popen") as mock_popen, - mock.patch("socket.socket") as mock_socket, - ): - self._setup_sync_mocks(mock_socket, mock_popen) - profiling.sampling.sample.main() - - self._verify_coordinator_command(mock_popen, ("-m", "mymodule", "arg1")) - - def test_cli_complex_script_arguments(self): - test_args = [ - "profiling.sampling.sample", "script.py", - "--input", "file.txt", "-v", "--output=/tmp/out", "positional" - ] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - mock.patch("profiling.sampling.sample._run_with_sync") as mock_run_with_sync, - ): - mock_process = mock.MagicMock() - mock_process.pid = 12345 - mock_process.wait.side_effect = [subprocess.TimeoutExpired(test_args, 0.1), None] - mock_process.poll.return_value = None - mock_run_with_sync.return_value = mock_process - - profiling.sampling.sample.main() - - mock_run_with_sync.assert_called_once_with(( - sys.executable, "script.py", - "--input", "file.txt", "-v", "--output=/tmp/out", "positional", - )) - - def test_cli_collapsed_format_validation(self): - """Test that CLI properly validates incompatible options with collapsed format.""" - test_cases = [ - # Test sort options are invalid with collapsed - ( - ["profiling.sampling.sample", "--collapsed", "--sort-nsamples", "-p", "12345"], - "sort", - ), - ( - ["profiling.sampling.sample", "--collapsed", "--sort-tottime", "-p", "12345"], - "sort", - ), - ( - [ - "profiling.sampling.sample", - "--collapsed", - "--sort-cumtime", - "-p", - "12345", - ], - "sort", - ), - ( - [ - "profiling.sampling.sample", - "--collapsed", - "--sort-sample-pct", - "-p", - "12345", - ], - "sort", - ), - ( - [ - "profiling.sampling.sample", - "--collapsed", - "--sort-cumul-pct", - "-p", - "12345", - ], - "sort", - ), - ( - ["profiling.sampling.sample", "--collapsed", "--sort-name", "-p", "12345"], - "sort", - ), - # Test limit option is invalid with collapsed - (["profiling.sampling.sample", "--collapsed", "-l", "20", "-p", "12345"], "limit"), - ( - ["profiling.sampling.sample", "--collapsed", "--limit", "20", "-p", "12345"], - "limit", - ), - # Test no-summary option is invalid with collapsed - ( - ["profiling.sampling.sample", "--collapsed", "--no-summary", "-p", "12345"], - "summary", - ), - ] - - for test_args, expected_error_keyword in test_cases: - with ( - mock.patch("sys.argv", test_args), - mock.patch("sys.stderr", io.StringIO()) as mock_stderr, - self.assertRaises(SystemExit) as cm, - ): - profiling.sampling.sample.main() - - self.assertEqual(cm.exception.code, 2) # argparse error code - error_msg = mock_stderr.getvalue() - self.assertIn("error:", error_msg) - self.assertIn("--pstats format", error_msg) - - def test_cli_default_collapsed_filename(self): - """Test that collapsed format gets a default filename when not specified.""" - test_args = ["profiling.sampling.sample", "--collapsed", "-p", "12345"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - ): - profiling.sampling.sample.main() - - # Check that filename was set to default collapsed format - mock_sample.assert_called_once() - call_args = mock_sample.call_args[1] - self.assertEqual(call_args["output_format"], "collapsed") - self.assertEqual(call_args["filename"], "collapsed.12345.txt") - - def test_cli_custom_output_filenames(self): - """Test custom output filenames for both formats.""" - test_cases = [ - ( - ["profiling.sampling.sample", "--pstats", "-o", "custom.pstats", "-p", "12345"], - "custom.pstats", - "pstats", - ), - ( - ["profiling.sampling.sample", "--collapsed", "-o", "custom.txt", "-p", "12345"], - "custom.txt", - "collapsed", - ), - ] - - for test_args, expected_filename, expected_format in test_cases: - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - ): - profiling.sampling.sample.main() - - mock_sample.assert_called_once() - call_args = mock_sample.call_args[1] - self.assertEqual(call_args["filename"], expected_filename) - self.assertEqual(call_args["output_format"], expected_format) - - def test_cli_missing_required_arguments(self): - """Test that CLI requires PID argument.""" - with ( - mock.patch("sys.argv", ["profiling.sampling.sample"]), - mock.patch("sys.stderr", io.StringIO()), - ): - with self.assertRaises(SystemExit): - profiling.sampling.sample.main() - - def test_cli_mutually_exclusive_format_options(self): - """Test that pstats and collapsed options are mutually exclusive.""" - with ( - mock.patch( - "sys.argv", - ["profiling.sampling.sample", "--pstats", "--collapsed", "-p", "12345"], - ), - mock.patch("sys.stderr", io.StringIO()), - ): - with self.assertRaises(SystemExit): - profiling.sampling.sample.main() - - def test_argument_parsing_basic(self): - test_args = ["profiling.sampling.sample", "-p", "12345"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - ): - profiling.sampling.sample.main() - - mock_sample.assert_called_once_with( - 12345, - sample_interval_usec=100, - duration_sec=10, - filename=None, - all_threads=False, - limit=15, - sort=2, - show_summary=True, - output_format="pstats", - realtime_stats=False, - mode=0, - native=False, - gc=True, - ) - - def test_sort_options(self): - sort_options = [ - ("--sort-nsamples", 0), - ("--sort-tottime", 1), - ("--sort-cumtime", 2), - ("--sort-sample-pct", 3), - ("--sort-cumul-pct", 4), - ("--sort-name", -1), - ] - - for option, expected_sort_value in sort_options: - test_args = ["profiling.sampling.sample", option, "-p", "12345"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - ): - profiling.sampling.sample.main() - - mock_sample.assert_called_once() - call_args = mock_sample.call_args[1] - self.assertEqual( - call_args["sort"], - expected_sort_value, - ) - mock_sample.reset_mock() - - -class TestCpuModeFiltering(unittest.TestCase): - """Test CPU mode filtering functionality (--mode=cpu).""" - - def test_mode_validation(self): - """Test that CLI validates mode choices correctly.""" - # Invalid mode choice should raise SystemExit - test_args = ["profiling.sampling.sample", "--mode", "invalid", "-p", "12345"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("sys.stderr", io.StringIO()) as mock_stderr, - self.assertRaises(SystemExit) as cm, - ): - profiling.sampling.sample.main() - - self.assertEqual(cm.exception.code, 2) # argparse error - error_msg = mock_stderr.getvalue() - self.assertIn("invalid choice", error_msg) - - def test_frames_filtered_with_skip_idle(self): - """Test that frames are actually filtered when skip_idle=True.""" - # Import thread status flags - try: - from _remote_debugging import THREAD_STATUS_HAS_GIL, THREAD_STATUS_ON_CPU - except ImportError: - THREAD_STATUS_HAS_GIL = (1 << 0) - THREAD_STATUS_ON_CPU = (1 << 1) - - # Create mock frames with different thread statuses - class MockThreadInfoWithStatus: - def __init__(self, thread_id, frame_info, status): - self.thread_id = thread_id - self.frame_info = frame_info - self.status = status - - # Create test data: active thread (HAS_GIL | ON_CPU), idle thread (neither), and another active thread - ACTIVE_STATUS = THREAD_STATUS_HAS_GIL | THREAD_STATUS_ON_CPU # Has GIL and on CPU - IDLE_STATUS = 0 # Neither has GIL nor on CPU - - test_frames = [ - MockInterpreterInfo(0, [ - MockThreadInfoWithStatus(1, [MockFrameInfo("active1.py", 10, "active_func1")], ACTIVE_STATUS), - MockThreadInfoWithStatus(2, [MockFrameInfo("idle.py", 20, "idle_func")], IDLE_STATUS), - MockThreadInfoWithStatus(3, [MockFrameInfo("active2.py", 30, "active_func2")], ACTIVE_STATUS), - ]) - ] - - # Test with skip_idle=True - should only process running threads - collector_skip = PstatsCollector(sample_interval_usec=1000, skip_idle=True) - collector_skip.collect(test_frames) - - # Should only have functions from running threads (status 0) - active1_key = ("active1.py", 10, "active_func1") - active2_key = ("active2.py", 30, "active_func2") - idle_key = ("idle.py", 20, "idle_func") - - self.assertIn(active1_key, collector_skip.result) - self.assertIn(active2_key, collector_skip.result) - self.assertNotIn(idle_key, collector_skip.result) # Idle thread should be filtered out - - # Test with skip_idle=False - should process all threads - collector_no_skip = PstatsCollector(sample_interval_usec=1000, skip_idle=False) - collector_no_skip.collect(test_frames) - - # Should have functions from all threads - self.assertIn(active1_key, collector_no_skip.result) - self.assertIn(active2_key, collector_no_skip.result) - self.assertIn(idle_key, collector_no_skip.result) # Idle thread should be included - - @requires_subprocess() - def test_cpu_mode_integration_filtering(self): - """Integration test: CPU mode should only capture active threads, not idle ones.""" - # Script with one mostly-idle thread and one CPU-active thread - cpu_vs_idle_script = ''' -import time -import threading - -cpu_ready = threading.Event() - -def idle_worker(): - time.sleep(999999) - -def cpu_active_worker(): - cpu_ready.set() - x = 1 - while True: - x += 1 - -def main(): - # Start both threads - idle_thread = threading.Thread(target=idle_worker) - cpu_thread = threading.Thread(target=cpu_active_worker) - idle_thread.start() - cpu_thread.start() - - # Wait for CPU thread to be running, then signal test - cpu_ready.wait() - _test_sock.sendall(b"threads_ready") - - idle_thread.join() - cpu_thread.join() - -main() - -''' - with test_subprocess(cpu_vs_idle_script) as subproc: - # Wait for signal that threads are running - response = subproc.socket.recv(1024) - self.assertEqual(response, b"threads_ready") - - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=2.0, - sample_interval_usec=5000, - mode=1, # CPU mode - show_summary=False, - all_threads=True, - ) - except (PermissionError, RuntimeError) as e: - self.skipTest("Insufficient permissions for remote profiling") - - cpu_mode_output = captured_output.getvalue() - - # Test wall-clock mode (mode=0) - should capture both functions - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=2.0, - sample_interval_usec=5000, - mode=0, # Wall-clock mode - show_summary=False, - all_threads=True, - ) - except (PermissionError, RuntimeError) as e: - self.skipTest("Insufficient permissions for remote profiling") - - wall_mode_output = captured_output.getvalue() - - # Verify both modes captured samples - self.assertIn("Captured", cpu_mode_output) - self.assertIn("samples", cpu_mode_output) - self.assertIn("Captured", wall_mode_output) - self.assertIn("samples", wall_mode_output) - - # CPU mode should strongly favor cpu_active_worker over mostly_idle_worker - self.assertIn("cpu_active_worker", cpu_mode_output) - self.assertNotIn("idle_worker", cpu_mode_output) - - # Wall-clock mode should capture both types of work - self.assertIn("cpu_active_worker", wall_mode_output) - self.assertIn("idle_worker", wall_mode_output) - - def test_cpu_mode_with_no_samples(self): - """Test that CPU mode handles no samples gracefully when no samples are collected.""" - # Mock a collector that returns empty stats - mock_collector = mock.MagicMock() - mock_collector.stats = {} - mock_collector.create_stats = mock.MagicMock() - - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - mock.patch("profiling.sampling.sample.PstatsCollector", return_value=mock_collector), - mock.patch("profiling.sampling.sample.SampleProfiler") as mock_profiler_class, - ): - mock_profiler = mock.MagicMock() - mock_profiler_class.return_value = mock_profiler - - profiling.sampling.sample.sample( - 12345, # dummy PID - duration_sec=0.5, - sample_interval_usec=5000, - mode=1, # CPU mode - show_summary=False, - all_threads=True, - ) - - output = captured_output.getvalue() - - # Should see the "No samples were collected" message - self.assertIn("No samples were collected", output) - self.assertIn("CPU mode", output) - - -class TestGilModeFiltering(unittest.TestCase): - """Test GIL mode filtering functionality (--mode=gil).""" - - def test_gil_mode_validation(self): - """Test that CLI accepts gil mode choice correctly.""" - test_args = ["profiling.sampling.sample", "--mode", "gil", "-p", "12345"] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - ): - try: - profiling.sampling.sample.main() - except SystemExit: - pass # Expected due to invalid PID - - # Should have attempted to call sample with mode=2 (GIL mode) - mock_sample.assert_called_once() - call_args = mock_sample.call_args[1] - self.assertEqual(call_args["mode"], 2) # PROFILING_MODE_GIL - - def test_gil_mode_sample_function_call(self): - """Test that sample() function correctly uses GIL mode.""" - with ( - mock.patch("profiling.sampling.sample.SampleProfiler") as mock_profiler, - mock.patch("profiling.sampling.sample.PstatsCollector") as mock_collector, - ): - # Mock the profiler instance - mock_instance = mock.Mock() - mock_profiler.return_value = mock_instance - - # Mock the collector instance - mock_collector_instance = mock.Mock() - mock_collector.return_value = mock_collector_instance - - # Call sample with GIL mode and a filename to avoid pstats creation - profiling.sampling.sample.sample( - 12345, - mode=2, # PROFILING_MODE_GIL - duration_sec=1, - sample_interval_usec=1000, - filename="test_output.txt", - ) - - # Verify SampleProfiler was created with correct mode - mock_profiler.assert_called_once() - call_args = mock_profiler.call_args - self.assertEqual(call_args[1]['mode'], 2) # mode parameter - - # Verify profiler.sample was called - mock_instance.sample.assert_called_once() - - # Verify collector.export was called since we provided a filename - mock_collector_instance.export.assert_called_once_with("test_output.txt") - - def test_gil_mode_collector_configuration(self): - """Test that collectors are configured correctly for GIL mode.""" - with ( - mock.patch("profiling.sampling.sample.SampleProfiler") as mock_profiler, - mock.patch("profiling.sampling.sample.PstatsCollector") as mock_collector, - captured_stdout(), captured_stderr() - ): - # Mock the profiler instance - mock_instance = mock.Mock() - mock_profiler.return_value = mock_instance - - # Call sample with GIL mode - profiling.sampling.sample.sample( - 12345, - mode=2, # PROFILING_MODE_GIL - output_format="pstats", - ) - - # Verify collector was created with skip_idle=True (since mode != WALL) - mock_collector.assert_called_once() - call_args = mock_collector.call_args[1] - self.assertTrue(call_args['skip_idle']) - - def test_gil_mode_with_collapsed_format(self): - """Test GIL mode with collapsed stack format.""" - with ( - mock.patch("profiling.sampling.sample.SampleProfiler") as mock_profiler, - mock.patch("profiling.sampling.sample.CollapsedStackCollector") as mock_collector, - ): - # Mock the profiler instance - mock_instance = mock.Mock() - mock_profiler.return_value = mock_instance - - # Call sample with GIL mode and collapsed format - profiling.sampling.sample.sample( - 12345, - mode=2, # PROFILING_MODE_GIL - output_format="collapsed", - filename="test_output.txt", - ) - - # Verify collector was created with skip_idle=True - mock_collector.assert_called_once() - call_args = mock_collector.call_args[1] - self.assertTrue(call_args['skip_idle']) - - def test_gil_mode_cli_argument_parsing(self): - """Test CLI argument parsing for GIL mode with various options.""" - test_args = [ - "profiling.sampling.sample", - "--mode", "gil", - "--interval", "500", - "--duration", "5", - "-p", "12345" - ] - - with ( - mock.patch("sys.argv", test_args), - mock.patch("profiling.sampling.sample.sample") as mock_sample, - ): - try: - profiling.sampling.sample.main() - except SystemExit: - pass # Expected due to invalid PID - - # Verify all arguments were parsed correctly - mock_sample.assert_called_once() - call_args = mock_sample.call_args[1] - self.assertEqual(call_args["mode"], 2) # GIL mode - self.assertEqual(call_args["sample_interval_usec"], 500) - self.assertEqual(call_args["duration_sec"], 5) - - @requires_subprocess() - def test_gil_mode_integration_behavior(self): - """Integration test: GIL mode should capture GIL-holding threads.""" - # Create a test script with GIL-releasing operations - gil_test_script = ''' -import time -import threading - -gil_ready = threading.Event() - -def gil_releasing_work(): - time.sleep(999999) - -def gil_holding_work(): - gil_ready.set() - x = 1 - while True: - x += 1 - -def main(): - # Start both threads - idle_thread = threading.Thread(target=gil_releasing_work) - cpu_thread = threading.Thread(target=gil_holding_work) - idle_thread.start() - cpu_thread.start() - - # Wait for GIL-holding thread to be running, then signal test - gil_ready.wait() - _test_sock.sendall(b"threads_ready") - - idle_thread.join() - cpu_thread.join() - -main() -''' - with test_subprocess(gil_test_script) as subproc: - # Wait for signal that threads are running - response = subproc.socket.recv(1024) - self.assertEqual(response, b"threads_ready") - - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=2.0, - sample_interval_usec=5000, - mode=2, # GIL mode - show_summary=False, - all_threads=True, - ) - except (PermissionError, RuntimeError) as e: - self.skipTest("Insufficient permissions for remote profiling") - - gil_mode_output = captured_output.getvalue() - - # Test wall-clock mode for comparison - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=0.5, - sample_interval_usec=5000, - mode=0, # Wall-clock mode - show_summary=False, - all_threads=True, - ) - except (PermissionError, RuntimeError) as e: - self.skipTest("Insufficient permissions for remote profiling") - - wall_mode_output = captured_output.getvalue() - - # GIL mode should primarily capture GIL-holding work - # (Note: actual behavior depends on threading implementation) - self.assertIn("gil_holding_work", gil_mode_output) - - # Wall-clock mode should capture both types of work - self.assertIn("gil_holding_work", wall_mode_output) - - def test_mode_constants_are_defined(self): - """Test that all profiling mode constants are properly defined.""" - self.assertEqual(profiling.sampling.sample.PROFILING_MODE_WALL, 0) - self.assertEqual(profiling.sampling.sample.PROFILING_MODE_CPU, 1) - self.assertEqual(profiling.sampling.sample.PROFILING_MODE_GIL, 2) - - def test_parse_mode_function(self): - """Test the _parse_mode function with all valid modes.""" - self.assertEqual(profiling.sampling.sample._parse_mode("wall"), 0) - self.assertEqual(profiling.sampling.sample._parse_mode("cpu"), 1) - self.assertEqual(profiling.sampling.sample._parse_mode("gil"), 2) - - # Test invalid mode raises KeyError - with self.assertRaises(KeyError): - profiling.sampling.sample._parse_mode("invalid") - - -@requires_subprocess() -@skip_if_not_supported -class TestGCFrameTracking(unittest.TestCase): - """Tests for GC frame tracking in the sampling profiler.""" - - @classmethod - def setUpClass(cls): - """Create a static test script with GC frames and CPU-intensive work.""" - cls.gc_test_script = ''' -import gc - -class ExpensiveGarbage: - """Class that triggers GC with expensive finalizer (callback).""" - def __init__(self): - self.cycle = self - - def __del__(self): - # CPU-intensive work in the finalizer callback - result = 0 - for i in range(100000): - result += i * i - if i % 1000 == 0: - result = result % 1000000 - -def main_loop(): - """Main loop that triggers GC with expensive callback.""" - while True: - ExpensiveGarbage() - gc.collect() - -if __name__ == "__main__": - main_loop() -''' - - def test_gc_frames_enabled(self): - """Test that GC frames appear when gc tracking is enabled.""" - with ( - test_subprocess(self.gc_test_script) as subproc, - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=1, - sample_interval_usec=5000, - show_summary=False, - native=False, - gc=True, - ) - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - - output = captured_output.getvalue() - - # Should capture samples - self.assertIn("Captured", output) - self.assertIn("samples", output) - - # GC frames should be present - self.assertIn("", output) - - def test_gc_frames_disabled(self): - """Test that GC frames do not appear when gc tracking is disabled.""" - with ( - test_subprocess(self.gc_test_script) as subproc, - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=1, - sample_interval_usec=5000, - show_summary=False, - native=False, - gc=False, - ) - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - - output = captured_output.getvalue() - - # Should capture samples - self.assertIn("Captured", output) - self.assertIn("samples", output) - - # GC frames should NOT be present - self.assertNotIn("", output) - - -@requires_subprocess() -@skip_if_not_supported -class TestNativeFrameTracking(unittest.TestCase): - """Tests for native frame tracking in the sampling profiler.""" - - @classmethod - def setUpClass(cls): - """Create a static test script with native frames and CPU-intensive work.""" - cls.native_test_script = ''' -import operator - -def main_loop(): - while True: - # Native code in the middle of the stack: - operator.call(inner) - -def inner(): - # Python code at the top of the stack: - for _ in range(1_000_0000): - pass - -if __name__ == "__main__": - main_loop() -''' - - def test_native_frames_enabled(self): - """Test that native frames appear when native tracking is enabled.""" - collapsed_file = tempfile.NamedTemporaryFile( - suffix=".txt", delete=False - ) - self.addCleanup(close_and_unlink, collapsed_file) - - with ( - test_subprocess(self.native_test_script) as subproc, - ): - # Suppress profiler output when testing file export - with ( - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=1, - filename=collapsed_file.name, - output_format="collapsed", - sample_interval_usec=1000, - native=True, - ) - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - - # Verify file was created and contains valid data - self.assertTrue(os.path.exists(collapsed_file.name)) - self.assertGreater(os.path.getsize(collapsed_file.name), 0) - - # Check file format - with open(collapsed_file.name, "r") as f: - content = f.read() - - lines = content.strip().split("\n") - self.assertGreater(len(lines), 0) - - stacks = [line.rsplit(" ", 1)[0] for line in lines] - - # Most samples should have native code in the middle of the stack: - self.assertTrue(any(";;" in stack for stack in stacks)) - - # No samples should have native code at the top of the stack: - self.assertFalse(any(stack.endswith(";") for stack in stacks)) - - def test_native_frames_disabled(self): - """Test that native frames do not appear when native tracking is disabled.""" - with ( - test_subprocess(self.native_test_script) as subproc, - io.StringIO() as captured_output, - mock.patch("sys.stdout", captured_output), - ): - try: - profiling.sampling.sample.sample( - subproc.process.pid, - duration_sec=1, - sample_interval_usec=5000, - show_summary=False, - ) - except PermissionError: - self.skipTest("Insufficient permissions for remote profiling") - output = captured_output.getvalue() - # Native frames should NOT be present: - self.assertNotIn("", output) - - -@requires_subprocess() -@skip_if_not_supported -class TestProcessPoolExecutorSupport(unittest.TestCase): - """ - Test that ProcessPoolExecutor works correctly with profiling.sampling. - """ - - def test_process_pool_executor_pickle(self): - # gh-140729: test use ProcessPoolExecutor.map() can sampling - test_script = ''' -import concurrent.futures - -def worker(x): - return x * 2 - -if __name__ == "__main__": - with concurrent.futures.ProcessPoolExecutor() as executor: - results = list(executor.map(worker, [1, 2, 3])) - print(f"Results: {results}") -''' - with os_helper.temp_dir() as temp_dir: - script = script_helper.make_script( - temp_dir, 'test_process_pool_executor_pickle', test_script - ) - with SuppressCrashReport(): - with script_helper.spawn_python( - "-m", "profiling.sampling.sample", - "-d", "5", - "-i", "100000", - script, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True - ) as proc: - try: - stdout, stderr = proc.communicate(timeout=SHORT_TIMEOUT) - except subprocess.TimeoutExpired: - proc.kill() - stdout, stderr = proc.communicate() - - if "PermissionError" in stderr: - self.skipTest("Insufficient permissions for remote profiling") - - self.assertIn("Results: [2, 4, 6]", stdout) - self.assertNotIn("Can't pickle", stderr) -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_profiling/test_sampling_profiler/__init__.py b/Lib/test/test_profiling/test_sampling_profiler/__init__.py new file mode 100644 index 00000000000000..616ae5b49f03ef --- /dev/null +++ b/Lib/test/test_profiling/test_sampling_profiler/__init__.py @@ -0,0 +1,9 @@ +"""Tests for the sampling profiler (profiling.sampling).""" + +import os +from test.support import load_package_tests + + +def load_tests(*args): + """Load all tests from this subpackage.""" + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_profiling/test_sampling_profiler/helpers.py b/Lib/test/test_profiling/test_sampling_profiler/helpers.py new file mode 100644 index 00000000000000..abd5a7377ad68e --- /dev/null +++ b/Lib/test/test_profiling/test_sampling_profiler/helpers.py @@ -0,0 +1,101 @@ +"""Helper utilities for sampling profiler tests.""" + +import contextlib +import socket +import subprocess +import sys +import unittest +from collections import namedtuple + +from test.support import SHORT_TIMEOUT +from test.support.socket_helper import find_unused_port +from test.support.os_helper import unlink + + +PROCESS_VM_READV_SUPPORTED = False + +try: + from _remote_debugging import PROCESS_VM_READV_SUPPORTED # noqa: F401 + import _remote_debugging # noqa: F401 +except ImportError: + raise unittest.SkipTest( + "Test only runs when _remote_debugging is available" + ) +else: + import profiling.sampling # noqa: F401 + from profiling.sampling.sample import SampleProfiler # noqa: F401 + + +skip_if_not_supported = unittest.skipIf( + ( + sys.platform != "darwin" + and sys.platform != "linux" + and sys.platform != "win32" + ), + "Test only runs on Linux, Windows and MacOS", +) + +SubprocessInfo = namedtuple("SubprocessInfo", ["process", "socket"]) + + +@contextlib.contextmanager +def test_subprocess(script): + """Context manager to create a test subprocess with socket synchronization. + + Args: + script: Python code to execute in the subprocess + + Yields: + SubprocessInfo: Named tuple with process and socket objects + """ + # Find an unused port for socket communication + port = find_unused_port() + + # Inject socket connection code at the beginning of the script + socket_code = f""" +import socket +_test_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) +_test_sock.connect(('localhost', {port})) +_test_sock.sendall(b"ready") +""" + + # Combine socket code with user script + full_script = socket_code + script + + # Create server socket to wait for process to be ready + server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + server_socket.bind(("localhost", port)) + server_socket.settimeout(SHORT_TIMEOUT) + server_socket.listen(1) + + proc = subprocess.Popen( + [sys.executable, "-c", full_script], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + + client_socket = None + try: + # Wait for process to connect and send ready signal + client_socket, _ = server_socket.accept() + server_socket.close() + response = client_socket.recv(1024) + if response != b"ready": + raise RuntimeError( + f"Unexpected response from subprocess: {response}" + ) + + yield SubprocessInfo(proc, client_socket) + finally: + if client_socket is not None: + client_socket.close() + if proc.poll() is None: + proc.kill() + proc.wait() + + +def close_and_unlink(file): + """Close a file and unlink it from the filesystem.""" + file.close() + unlink(file.name) diff --git a/Lib/test/test_profiling/test_sampling_profiler/mocks.py b/Lib/test/test_profiling/test_sampling_profiler/mocks.py new file mode 100644 index 00000000000000..9f1cd5b83e0856 --- /dev/null +++ b/Lib/test/test_profiling/test_sampling_profiler/mocks.py @@ -0,0 +1,38 @@ +"""Mock classes for sampling profiler tests.""" + + +class MockFrameInfo: + """Mock FrameInfo for testing since the real one isn't accessible.""" + + def __init__(self, filename, lineno, funcname): + self.filename = filename + self.lineno = lineno + self.funcname = funcname + + def __repr__(self): + return f"MockFrameInfo(filename='{self.filename}', lineno={self.lineno}, funcname='{self.funcname}')" + + +class MockThreadInfo: + """Mock ThreadInfo for testing since the real one isn't accessible.""" + + def __init__( + self, thread_id, frame_info, status=0 + ): # Default to THREAD_STATE_RUNNING (0) + self.thread_id = thread_id + self.frame_info = frame_info + self.status = status + + def __repr__(self): + return f"MockThreadInfo(thread_id={self.thread_id}, frame_info={self.frame_info}, status={self.status})" + + +class MockInterpreterInfo: + """Mock InterpreterInfo for testing since the real one isn't accessible.""" + + def __init__(self, interpreter_id, threads): + self.interpreter_id = interpreter_id + self.threads = threads + + def __repr__(self): + return f"MockInterpreterInfo(interpreter_id={self.interpreter_id}, threads={self.threads})" diff --git a/Lib/test/test_profiling/test_sampling_profiler/test_advanced.py b/Lib/test/test_profiling/test_sampling_profiler/test_advanced.py new file mode 100644 index 00000000000000..578fb51bc0c9ef --- /dev/null +++ b/Lib/test/test_profiling/test_sampling_profiler/test_advanced.py @@ -0,0 +1,264 @@ +"""Tests for advanced sampling profiler features (GC tracking, native frames, ProcessPoolExecutor support).""" + +import io +import os +import subprocess +import tempfile +import unittest +from unittest import mock + +try: + import _remote_debugging # noqa: F401 + import profiling.sampling + import profiling.sampling.sample +except ImportError: + raise unittest.SkipTest( + "Test only runs when _remote_debugging is available" + ) + +from test.support import ( + SHORT_TIMEOUT, + SuppressCrashReport, + os_helper, + requires_subprocess, + script_helper, +) + +from .helpers import close_and_unlink, skip_if_not_supported, test_subprocess + + +@requires_subprocess() +@skip_if_not_supported +class TestGCFrameTracking(unittest.TestCase): + """Tests for GC frame tracking in the sampling profiler.""" + + @classmethod + def setUpClass(cls): + """Create a static test script with GC frames and CPU-intensive work.""" + cls.gc_test_script = ''' +import gc + +class ExpensiveGarbage: + """Class that triggers GC with expensive finalizer (callback).""" + def __init__(self): + self.cycle = self + + def __del__(self): + # CPU-intensive work in the finalizer callback + result = 0 + for i in range(100000): + result += i * i + if i % 1000 == 0: + result = result % 1000000 + +def main_loop(): + """Main loop that triggers GC with expensive callback.""" + while True: + ExpensiveGarbage() + gc.collect() + +if __name__ == "__main__": + main_loop() +''' + + def test_gc_frames_enabled(self): + """Test that GC frames appear when gc tracking is enabled.""" + with ( + test_subprocess(self.gc_test_script) as subproc, + io.StringIO() as captured_output, + mock.patch("sys.stdout", captured_output), + ): + try: + profiling.sampling.sample.sample( + subproc.process.pid, + duration_sec=1, + sample_interval_usec=5000, + show_summary=False, + native=False, + gc=True, + ) + except PermissionError: + self.skipTest("Insufficient permissions for remote profiling") + + output = captured_output.getvalue() + + # Should capture samples + self.assertIn("Captured", output) + self.assertIn("samples", output) + + # GC frames should be present + self.assertIn("", output) + + def test_gc_frames_disabled(self): + """Test that GC frames do not appear when gc tracking is disabled.""" + with ( + test_subprocess(self.gc_test_script) as subproc, + io.StringIO() as captured_output, + mock.patch("sys.stdout", captured_output), + ): + try: + profiling.sampling.sample.sample( + subproc.process.pid, + duration_sec=1, + sample_interval_usec=5000, + show_summary=False, + native=False, + gc=False, + ) + except PermissionError: + self.skipTest("Insufficient permissions for remote profiling") + + output = captured_output.getvalue() + + # Should capture samples + self.assertIn("Captured", output) + self.assertIn("samples", output) + + # GC frames should NOT be present + self.assertNotIn("", output) + + +@requires_subprocess() +@skip_if_not_supported +class TestNativeFrameTracking(unittest.TestCase): + """Tests for native frame tracking in the sampling profiler.""" + + @classmethod + def setUpClass(cls): + """Create a static test script with native frames and CPU-intensive work.""" + cls.native_test_script = """ +import operator + +def main_loop(): + while True: + # Native code in the middle of the stack: + operator.call(inner) + +def inner(): + # Python code at the top of the stack: + for _ in range(1_000_0000): + pass + +if __name__ == "__main__": + main_loop() +""" + + def test_native_frames_enabled(self): + """Test that native frames appear when native tracking is enabled.""" + collapsed_file = tempfile.NamedTemporaryFile( + suffix=".txt", delete=False + ) + self.addCleanup(close_and_unlink, collapsed_file) + + with ( + test_subprocess(self.native_test_script) as subproc, + ): + # Suppress profiler output when testing file export + with ( + io.StringIO() as captured_output, + mock.patch("sys.stdout", captured_output), + ): + try: + profiling.sampling.sample.sample( + subproc.process.pid, + duration_sec=1, + filename=collapsed_file.name, + output_format="collapsed", + sample_interval_usec=1000, + native=True, + ) + except PermissionError: + self.skipTest( + "Insufficient permissions for remote profiling" + ) + + # Verify file was created and contains valid data + self.assertTrue(os.path.exists(collapsed_file.name)) + self.assertGreater(os.path.getsize(collapsed_file.name), 0) + + # Check file format + with open(collapsed_file.name, "r") as f: + content = f.read() + + lines = content.strip().split("\n") + self.assertGreater(len(lines), 0) + + stacks = [line.rsplit(" ", 1)[0] for line in lines] + + # Most samples should have native code in the middle of the stack: + self.assertTrue(any(";;" in stack for stack in stacks)) + + # No samples should have native code at the top of the stack: + self.assertFalse(any(stack.endswith(";") for stack in stacks)) + + def test_native_frames_disabled(self): + """Test that native frames do not appear when native tracking is disabled.""" + with ( + test_subprocess(self.native_test_script) as subproc, + io.StringIO() as captured_output, + mock.patch("sys.stdout", captured_output), + ): + try: + profiling.sampling.sample.sample( + subproc.process.pid, + duration_sec=1, + sample_interval_usec=5000, + show_summary=False, + ) + except PermissionError: + self.skipTest("Insufficient permissions for remote profiling") + output = captured_output.getvalue() + # Native frames should NOT be present: + self.assertNotIn("", output) + + +@requires_subprocess() +@skip_if_not_supported +class TestProcessPoolExecutorSupport(unittest.TestCase): + """ + Test that ProcessPoolExecutor works correctly with profiling.sampling. + """ + + def test_process_pool_executor_pickle(self): + # gh-140729: test use ProcessPoolExecutor.map() can sampling + test_script = """ +import concurrent.futures + +def worker(x): + return x * 2 + +if __name__ == "__main__": + with concurrent.futures.ProcessPoolExecutor() as executor: + results = list(executor.map(worker, [1, 2, 3])) + print(f"Results: {results}") +""" + with os_helper.temp_dir() as temp_dir: + script = script_helper.make_script( + temp_dir, "test_process_pool_executor_pickle", test_script + ) + with SuppressCrashReport(): + with script_helper.spawn_python( + "-m", + "profiling.sampling.sample", + "-d", + "5", + "-i", + "100000", + script, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + ) as proc: + try: + stdout, stderr = proc.communicate( + timeout=SHORT_TIMEOUT + ) + except subprocess.TimeoutExpired: + proc.kill() + stdout, stderr = proc.communicate() + + if "PermissionError" in stderr: + self.skipTest("Insufficient permissions for remote profiling") + + self.assertIn("Results: [2, 4, 6]", stdout) + self.assertNotIn("Can't pickle", stderr) diff --git a/Lib/test/test_profiling/test_sampling_profiler/test_cli.py b/Lib/test/test_profiling/test_sampling_profiler/test_cli.py new file mode 100644 index 00000000000000..5833920d1b96f3 --- /dev/null +++ b/Lib/test/test_profiling/test_sampling_profiler/test_cli.py @@ -0,0 +1,664 @@ +"""Tests for sampling profiler CLI argument parsing and functionality.""" + +import io +import subprocess +import sys +import unittest +from unittest import mock + +try: + import _remote_debugging # noqa: F401 + import profiling.sampling + import profiling.sampling.sample +except ImportError: + raise unittest.SkipTest( + "Test only runs when _remote_debugging is available" + ) + +from test.support import is_emscripten + + +class TestSampleProfilerCLI(unittest.TestCase): + def _setup_sync_mocks(self, mock_socket, mock_popen): + """Helper to set up socket and process mocks for coordinator tests.""" + # Mock the sync socket with context manager support + mock_sock_instance = mock.MagicMock() + mock_sock_instance.getsockname.return_value = ("127.0.0.1", 12345) + + # Mock the connection with context manager support + mock_conn = mock.MagicMock() + mock_conn.recv.return_value = b"ready" + mock_conn.__enter__.return_value = mock_conn + mock_conn.__exit__.return_value = None + + # Mock accept() to return (connection, address) and support indexing + mock_accept_result = mock.MagicMock() + mock_accept_result.__getitem__.return_value = ( + mock_conn # [0] returns the connection + ) + mock_sock_instance.accept.return_value = mock_accept_result + + # Mock socket with context manager support + mock_sock_instance.__enter__.return_value = mock_sock_instance + mock_sock_instance.__exit__.return_value = None + mock_socket.return_value = mock_sock_instance + + # Mock the subprocess + mock_process = mock.MagicMock() + mock_process.pid = 12345 + mock_process.poll.return_value = None + mock_popen.return_value = mock_process + return mock_process + + def _verify_coordinator_command(self, mock_popen, expected_target_args): + """Helper to verify the coordinator command was called correctly.""" + args, kwargs = mock_popen.call_args + coordinator_cmd = args[0] + self.assertEqual(coordinator_cmd[0], sys.executable) + self.assertEqual(coordinator_cmd[1], "-m") + self.assertEqual( + coordinator_cmd[2], "profiling.sampling._sync_coordinator" + ) + self.assertEqual(coordinator_cmd[3], "12345") # port + # cwd is coordinator_cmd[4] + self.assertEqual(coordinator_cmd[5:], expected_target_args) + + @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") + def test_cli_module_argument_parsing(self): + test_args = ["profiling.sampling.sample", "-m", "mymodule"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch("subprocess.Popen") as mock_popen, + mock.patch("socket.socket") as mock_socket, + ): + self._setup_sync_mocks(mock_socket, mock_popen) + profiling.sampling.sample.main() + + self._verify_coordinator_command(mock_popen, ("-m", "mymodule")) + mock_sample.assert_called_once_with( + 12345, + sort=2, # default sort (sort_value from args.sort) + sample_interval_usec=100, + duration_sec=10, + filename=None, + all_threads=False, + limit=15, + show_summary=True, + output_format="pstats", + realtime_stats=False, + mode=0, + native=False, + gc=True, + ) + + @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") + def test_cli_module_with_arguments(self): + test_args = [ + "profiling.sampling.sample", + "-m", + "mymodule", + "arg1", + "arg2", + "--flag", + ] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch("subprocess.Popen") as mock_popen, + mock.patch("socket.socket") as mock_socket, + ): + self._setup_sync_mocks(mock_socket, mock_popen) + profiling.sampling.sample.main() + + self._verify_coordinator_command( + mock_popen, ("-m", "mymodule", "arg1", "arg2", "--flag") + ) + mock_sample.assert_called_once_with( + 12345, + sort=2, + sample_interval_usec=100, + duration_sec=10, + filename=None, + all_threads=False, + limit=15, + show_summary=True, + output_format="pstats", + realtime_stats=False, + mode=0, + native=False, + gc=True, + ) + + @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") + def test_cli_script_argument_parsing(self): + test_args = ["profiling.sampling.sample", "myscript.py"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch("subprocess.Popen") as mock_popen, + mock.patch("socket.socket") as mock_socket, + ): + self._setup_sync_mocks(mock_socket, mock_popen) + profiling.sampling.sample.main() + + self._verify_coordinator_command(mock_popen, ("myscript.py",)) + mock_sample.assert_called_once_with( + 12345, + sort=2, + sample_interval_usec=100, + duration_sec=10, + filename=None, + all_threads=False, + limit=15, + show_summary=True, + output_format="pstats", + realtime_stats=False, + mode=0, + native=False, + gc=True, + ) + + @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") + def test_cli_script_with_arguments(self): + test_args = [ + "profiling.sampling.sample", + "myscript.py", + "arg1", + "arg2", + "--flag", + ] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch("subprocess.Popen") as mock_popen, + mock.patch("socket.socket") as mock_socket, + ): + # Use the helper to set up mocks consistently + mock_process = self._setup_sync_mocks(mock_socket, mock_popen) + # Override specific behavior for this test + mock_process.wait.side_effect = [ + subprocess.TimeoutExpired(test_args, 0.1), + None, + ] + + profiling.sampling.sample.main() + + # Verify the coordinator command was called + args, kwargs = mock_popen.call_args + coordinator_cmd = args[0] + self.assertEqual(coordinator_cmd[0], sys.executable) + self.assertEqual(coordinator_cmd[1], "-m") + self.assertEqual( + coordinator_cmd[2], "profiling.sampling._sync_coordinator" + ) + self.assertEqual(coordinator_cmd[3], "12345") # port + # cwd is coordinator_cmd[4] + self.assertEqual( + coordinator_cmd[5:], ("myscript.py", "arg1", "arg2", "--flag") + ) + + def test_cli_mutually_exclusive_pid_module(self): + test_args = [ + "profiling.sampling.sample", + "-p", + "12345", + "-m", + "mymodule", + ] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("sys.stderr", io.StringIO()) as mock_stderr, + self.assertRaises(SystemExit) as cm, + ): + profiling.sampling.sample.main() + + self.assertEqual(cm.exception.code, 2) # argparse error + error_msg = mock_stderr.getvalue() + self.assertIn("not allowed with argument", error_msg) + + def test_cli_mutually_exclusive_pid_script(self): + test_args = ["profiling.sampling.sample", "-p", "12345", "myscript.py"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("sys.stderr", io.StringIO()) as mock_stderr, + self.assertRaises(SystemExit) as cm, + ): + profiling.sampling.sample.main() + + self.assertEqual(cm.exception.code, 2) # argparse error + error_msg = mock_stderr.getvalue() + self.assertIn("only one target type can be specified", error_msg) + + def test_cli_no_target_specified(self): + test_args = ["profiling.sampling.sample", "-d", "5"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("sys.stderr", io.StringIO()) as mock_stderr, + self.assertRaises(SystemExit) as cm, + ): + profiling.sampling.sample.main() + + self.assertEqual(cm.exception.code, 2) # argparse error + error_msg = mock_stderr.getvalue() + self.assertIn("one of the arguments", error_msg) + + @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") + def test_cli_module_with_profiler_options(self): + test_args = [ + "profiling.sampling.sample", + "-i", + "1000", + "-d", + "30", + "-a", + "--sort-tottime", + "-l", + "20", + "-m", + "mymodule", + ] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch("subprocess.Popen") as mock_popen, + mock.patch("socket.socket") as mock_socket, + ): + self._setup_sync_mocks(mock_socket, mock_popen) + profiling.sampling.sample.main() + + self._verify_coordinator_command(mock_popen, ("-m", "mymodule")) + mock_sample.assert_called_once_with( + 12345, + sort=1, # sort-tottime + sample_interval_usec=1000, + duration_sec=30, + filename=None, + all_threads=True, + limit=20, + show_summary=True, + output_format="pstats", + realtime_stats=False, + mode=0, + native=False, + gc=True, + ) + + @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") + def test_cli_script_with_profiler_options(self): + """Test script with various profiler options.""" + test_args = [ + "profiling.sampling.sample", + "-i", + "2000", + "-d", + "60", + "--collapsed", + "-o", + "output.txt", + "myscript.py", + "scriptarg", + ] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch("subprocess.Popen") as mock_popen, + mock.patch("socket.socket") as mock_socket, + ): + self._setup_sync_mocks(mock_socket, mock_popen) + profiling.sampling.sample.main() + + self._verify_coordinator_command( + mock_popen, ("myscript.py", "scriptarg") + ) + # Verify profiler options were passed correctly + mock_sample.assert_called_once_with( + 12345, + sort=2, # default sort + sample_interval_usec=2000, + duration_sec=60, + filename="output.txt", + all_threads=False, + limit=15, + show_summary=True, + output_format="collapsed", + realtime_stats=False, + mode=0, + native=False, + gc=True, + ) + + def test_cli_empty_module_name(self): + test_args = ["profiling.sampling.sample", "-m"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("sys.stderr", io.StringIO()) as mock_stderr, + self.assertRaises(SystemExit) as cm, + ): + profiling.sampling.sample.main() + + self.assertEqual(cm.exception.code, 2) # argparse error + error_msg = mock_stderr.getvalue() + self.assertIn("argument -m/--module: expected one argument", error_msg) + + @unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist") + def test_cli_long_module_option(self): + test_args = [ + "profiling.sampling.sample", + "--module", + "mymodule", + "arg1", + ] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch("subprocess.Popen") as mock_popen, + mock.patch("socket.socket") as mock_socket, + ): + self._setup_sync_mocks(mock_socket, mock_popen) + profiling.sampling.sample.main() + + self._verify_coordinator_command( + mock_popen, ("-m", "mymodule", "arg1") + ) + + def test_cli_complex_script_arguments(self): + test_args = [ + "profiling.sampling.sample", + "script.py", + "--input", + "file.txt", + "-v", + "--output=/tmp/out", + "positional", + ] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + mock.patch( + "profiling.sampling.sample._run_with_sync" + ) as mock_run_with_sync, + ): + mock_process = mock.MagicMock() + mock_process.pid = 12345 + mock_process.wait.side_effect = [ + subprocess.TimeoutExpired(test_args, 0.1), + None, + ] + mock_process.poll.return_value = None + mock_run_with_sync.return_value = mock_process + + profiling.sampling.sample.main() + + mock_run_with_sync.assert_called_once_with( + ( + sys.executable, + "script.py", + "--input", + "file.txt", + "-v", + "--output=/tmp/out", + "positional", + ) + ) + + def test_cli_collapsed_format_validation(self): + """Test that CLI properly validates incompatible options with collapsed format.""" + test_cases = [ + # Test sort options are invalid with collapsed + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--sort-nsamples", + "-p", + "12345", + ], + "sort", + ), + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--sort-tottime", + "-p", + "12345", + ], + "sort", + ), + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--sort-cumtime", + "-p", + "12345", + ], + "sort", + ), + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--sort-sample-pct", + "-p", + "12345", + ], + "sort", + ), + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--sort-cumul-pct", + "-p", + "12345", + ], + "sort", + ), + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--sort-name", + "-p", + "12345", + ], + "sort", + ), + # Test limit option is invalid with collapsed + ( + [ + "profiling.sampling.sample", + "--collapsed", + "-l", + "20", + "-p", + "12345", + ], + "limit", + ), + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--limit", + "20", + "-p", + "12345", + ], + "limit", + ), + # Test no-summary option is invalid with collapsed + ( + [ + "profiling.sampling.sample", + "--collapsed", + "--no-summary", + "-p", + "12345", + ], + "summary", + ), + ] + + for test_args, expected_error_keyword in test_cases: + with ( + mock.patch("sys.argv", test_args), + mock.patch("sys.stderr", io.StringIO()) as mock_stderr, + self.assertRaises(SystemExit) as cm, + ): + profiling.sampling.sample.main() + + self.assertEqual(cm.exception.code, 2) # argparse error code + error_msg = mock_stderr.getvalue() + self.assertIn("error:", error_msg) + self.assertIn("--pstats format", error_msg) + + def test_cli_default_collapsed_filename(self): + """Test that collapsed format gets a default filename when not specified.""" + test_args = ["profiling.sampling.sample", "--collapsed", "-p", "12345"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + ): + profiling.sampling.sample.main() + + # Check that filename was set to default collapsed format + mock_sample.assert_called_once() + call_args = mock_sample.call_args[1] + self.assertEqual(call_args["output_format"], "collapsed") + self.assertEqual(call_args["filename"], "collapsed.12345.txt") + + def test_cli_custom_output_filenames(self): + """Test custom output filenames for both formats.""" + test_cases = [ + ( + [ + "profiling.sampling.sample", + "--pstats", + "-o", + "custom.pstats", + "-p", + "12345", + ], + "custom.pstats", + "pstats", + ), + ( + [ + "profiling.sampling.sample", + "--collapsed", + "-o", + "custom.txt", + "-p", + "12345", + ], + "custom.txt", + "collapsed", + ), + ] + + for test_args, expected_filename, expected_format in test_cases: + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + ): + profiling.sampling.sample.main() + + mock_sample.assert_called_once() + call_args = mock_sample.call_args[1] + self.assertEqual(call_args["filename"], expected_filename) + self.assertEqual(call_args["output_format"], expected_format) + + def test_cli_missing_required_arguments(self): + """Test that CLI requires PID argument.""" + with ( + mock.patch("sys.argv", ["profiling.sampling.sample"]), + mock.patch("sys.stderr", io.StringIO()), + ): + with self.assertRaises(SystemExit): + profiling.sampling.sample.main() + + def test_cli_mutually_exclusive_format_options(self): + """Test that pstats and collapsed options are mutually exclusive.""" + with ( + mock.patch( + "sys.argv", + [ + "profiling.sampling.sample", + "--pstats", + "--collapsed", + "-p", + "12345", + ], + ), + mock.patch("sys.stderr", io.StringIO()), + ): + with self.assertRaises(SystemExit): + profiling.sampling.sample.main() + + def test_argument_parsing_basic(self): + test_args = ["profiling.sampling.sample", "-p", "12345"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + ): + profiling.sampling.sample.main() + + mock_sample.assert_called_once_with( + 12345, + sample_interval_usec=100, + duration_sec=10, + filename=None, + all_threads=False, + limit=15, + sort=2, + show_summary=True, + output_format="pstats", + realtime_stats=False, + mode=0, + native=False, + gc=True, + ) + + def test_sort_options(self): + sort_options = [ + ("--sort-nsamples", 0), + ("--sort-tottime", 1), + ("--sort-cumtime", 2), + ("--sort-sample-pct", 3), + ("--sort-cumul-pct", 4), + ("--sort-name", -1), + ] + + for option, expected_sort_value in sort_options: + test_args = ["profiling.sampling.sample", option, "-p", "12345"] + + with ( + mock.patch("sys.argv", test_args), + mock.patch("profiling.sampling.sample.sample") as mock_sample, + ): + profiling.sampling.sample.main() + + mock_sample.assert_called_once() + call_args = mock_sample.call_args[1] + self.assertEqual( + call_args["sort"], + expected_sort_value, + ) + mock_sample.reset_mock() diff --git a/Lib/test/test_profiling/test_sampling_profiler/test_collectors.py b/Lib/test/test_profiling/test_sampling_profiler/test_collectors.py new file mode 100644 index 00000000000000..4a24256203c187 --- /dev/null +++ b/Lib/test/test_profiling/test_sampling_profiler/test_collectors.py @@ -0,0 +1,896 @@ +"""Tests for sampling profiler collector components.""" + +import json +import marshal +import os +import tempfile +import unittest + +try: + import _remote_debugging # noqa: F401 + from profiling.sampling.pstats_collector import PstatsCollector + from profiling.sampling.stack_collector import ( + CollapsedStackCollector, + FlamegraphCollector, + ) + from profiling.sampling.gecko_collector import GeckoCollector +except ImportError: + raise unittest.SkipTest( + "Test only runs when _remote_debugging is available" + ) + +from test.support import captured_stdout, captured_stderr + +from .mocks import MockFrameInfo, MockThreadInfo, MockInterpreterInfo +from .helpers import close_and_unlink + + +class TestSampleProfilerComponents(unittest.TestCase): + """Unit tests for individual profiler components.""" + + def test_mock_frame_info_with_empty_and_unicode_values(self): + """Test MockFrameInfo handles empty strings, unicode characters, and very long names correctly.""" + # Test with empty strings + frame = MockFrameInfo("", 0, "") + self.assertEqual(frame.filename, "") + self.assertEqual(frame.lineno, 0) + self.assertEqual(frame.funcname, "") + self.assertIn("filename=''", repr(frame)) + + # Test with unicode characters + frame = MockFrameInfo("文件.py", 42, "函数名") + self.assertEqual(frame.filename, "文件.py") + self.assertEqual(frame.funcname, "函数名") + + # Test with very long names + long_filename = "x" * 1000 + ".py" + long_funcname = "func_" + "x" * 1000 + frame = MockFrameInfo(long_filename, 999999, long_funcname) + self.assertEqual(frame.filename, long_filename) + self.assertEqual(frame.lineno, 999999) + self.assertEqual(frame.funcname, long_funcname) + + def test_pstats_collector_with_extreme_intervals_and_empty_data(self): + """Test PstatsCollector handles zero/large intervals, empty frames, None thread IDs, and duplicate frames.""" + # Test with zero interval + collector = PstatsCollector(sample_interval_usec=0) + self.assertEqual(collector.sample_interval_usec, 0) + + # Test with very large interval + collector = PstatsCollector(sample_interval_usec=1000000000) + self.assertEqual(collector.sample_interval_usec, 1000000000) + + # Test collecting empty frames list + collector = PstatsCollector(sample_interval_usec=1000) + collector.collect([]) + self.assertEqual(len(collector.result), 0) + + # Test collecting frames with None thread id + test_frames = [ + MockInterpreterInfo( + 0, + [MockThreadInfo(None, [MockFrameInfo("file.py", 10, "func")])], + ) + ] + collector.collect(test_frames) + # Should still process the frames + self.assertEqual(len(collector.result), 1) + + # Test collecting duplicate frames in same sample + test_frames = [ + MockInterpreterInfo( + 0, # interpreter_id + [ + MockThreadInfo( + 1, + [ + MockFrameInfo("file.py", 10, "func1"), + MockFrameInfo("file.py", 10, "func1"), # Duplicate + ], + ) + ], + ) + ] + collector = PstatsCollector(sample_interval_usec=1000) + collector.collect(test_frames) + # Should count both occurrences + self.assertEqual( + collector.result[("file.py", 10, "func1")]["cumulative_calls"], 2 + ) + + def test_pstats_collector_single_frame_stacks(self): + """Test PstatsCollector with single-frame call stacks to trigger len(frames) <= 1 branch.""" + collector = PstatsCollector(sample_interval_usec=1000) + + # Test with exactly one frame (should trigger the <= 1 condition) + single_frame = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, [MockFrameInfo("single.py", 10, "single_func")] + ) + ], + ) + ] + collector.collect(single_frame) + + # Should record the single frame with inline call + self.assertEqual(len(collector.result), 1) + single_key = ("single.py", 10, "single_func") + self.assertIn(single_key, collector.result) + self.assertEqual(collector.result[single_key]["direct_calls"], 1) + self.assertEqual(collector.result[single_key]["cumulative_calls"], 1) + + # Test with empty frames (should also trigger <= 1 condition) + empty_frames = [MockInterpreterInfo(0, [MockThreadInfo(1, [])])] + collector.collect(empty_frames) + + # Should not add any new entries + self.assertEqual( + len(collector.result), 1 + ) # Still just the single frame + + # Test mixed single and multi-frame stacks + mixed_frames = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, + [MockFrameInfo("single2.py", 20, "single_func2")], + ), # Single frame + MockThreadInfo( + 2, + [ # Multi-frame stack + MockFrameInfo("multi.py", 30, "multi_func1"), + MockFrameInfo("multi.py", 40, "multi_func2"), + ], + ), + ], + ), + ] + collector.collect(mixed_frames) + + # Should have recorded all functions + self.assertEqual( + len(collector.result), 4 + ) # single + single2 + multi1 + multi2 + + # Verify single frame handling + single2_key = ("single2.py", 20, "single_func2") + self.assertIn(single2_key, collector.result) + self.assertEqual(collector.result[single2_key]["direct_calls"], 1) + self.assertEqual(collector.result[single2_key]["cumulative_calls"], 1) + + # Verify multi-frame handling still works + multi1_key = ("multi.py", 30, "multi_func1") + multi2_key = ("multi.py", 40, "multi_func2") + self.assertIn(multi1_key, collector.result) + self.assertIn(multi2_key, collector.result) + self.assertEqual(collector.result[multi1_key]["direct_calls"], 1) + self.assertEqual( + collector.result[multi2_key]["cumulative_calls"], 1 + ) # Called from multi1 + + def test_collapsed_stack_collector_with_empty_and_deep_stacks(self): + """Test CollapsedStackCollector handles empty frames, single-frame stacks, and very deep call stacks.""" + collector = CollapsedStackCollector() + + # Test with empty frames + collector.collect([]) + self.assertEqual(len(collector.stack_counter), 0) + + # Test with single frame stack + test_frames = [ + MockInterpreterInfo( + 0, [MockThreadInfo(1, [("file.py", 10, "func")])] + ) + ] + collector.collect(test_frames) + self.assertEqual(len(collector.stack_counter), 1) + (((path, thread_id), count),) = collector.stack_counter.items() + self.assertEqual(path, (("file.py", 10, "func"),)) + self.assertEqual(thread_id, 1) + self.assertEqual(count, 1) + + # Test with very deep stack + deep_stack = [(f"file{i}.py", i, f"func{i}") for i in range(100)] + test_frames = [MockInterpreterInfo(0, [MockThreadInfo(1, deep_stack)])] + collector = CollapsedStackCollector() + collector.collect(test_frames) + # One aggregated path with 100 frames (reversed) + (((path_tuple, thread_id),),) = (collector.stack_counter.keys(),) + self.assertEqual(len(path_tuple), 100) + self.assertEqual(path_tuple[0], ("file99.py", 99, "func99")) + self.assertEqual(path_tuple[-1], ("file0.py", 0, "func0")) + self.assertEqual(thread_id, 1) + + def test_pstats_collector_basic(self): + """Test basic PstatsCollector functionality.""" + collector = PstatsCollector(sample_interval_usec=1000) + + # Test empty state + self.assertEqual(len(collector.result), 0) + self.assertEqual(len(collector.stats), 0) + + # Test collecting sample data + test_frames = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, + [ + MockFrameInfo("file.py", 10, "func1"), + MockFrameInfo("file.py", 20, "func2"), + ], + ) + ], + ) + ] + collector.collect(test_frames) + + # Should have recorded calls for both functions + self.assertEqual(len(collector.result), 2) + self.assertIn(("file.py", 10, "func1"), collector.result) + self.assertIn(("file.py", 20, "func2"), collector.result) + + # Top-level function should have direct call + self.assertEqual( + collector.result[("file.py", 10, "func1")]["direct_calls"], 1 + ) + self.assertEqual( + collector.result[("file.py", 10, "func1")]["cumulative_calls"], 1 + ) + + # Calling function should have cumulative call but no direct calls + self.assertEqual( + collector.result[("file.py", 20, "func2")]["cumulative_calls"], 1 + ) + self.assertEqual( + collector.result[("file.py", 20, "func2")]["direct_calls"], 0 + ) + + def test_pstats_collector_create_stats(self): + """Test PstatsCollector stats creation.""" + collector = PstatsCollector( + sample_interval_usec=1000000 + ) # 1 second intervals + + test_frames = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, + [ + MockFrameInfo("file.py", 10, "func1"), + MockFrameInfo("file.py", 20, "func2"), + ], + ) + ], + ) + ] + collector.collect(test_frames) + collector.collect(test_frames) # Collect twice + + collector.create_stats() + + # Check stats format: (direct_calls, cumulative_calls, tt, ct, callers) + func1_stats = collector.stats[("file.py", 10, "func1")] + self.assertEqual(func1_stats[0], 2) # direct_calls (top of stack) + self.assertEqual(func1_stats[1], 2) # cumulative_calls + self.assertEqual( + func1_stats[2], 2.0 + ) # tt (total time - 2 samples * 1 sec) + self.assertEqual(func1_stats[3], 2.0) # ct (cumulative time) + + func2_stats = collector.stats[("file.py", 20, "func2")] + self.assertEqual( + func2_stats[0], 0 + ) # direct_calls (never top of stack) + self.assertEqual( + func2_stats[1], 2 + ) # cumulative_calls (appears in stack) + self.assertEqual(func2_stats[2], 0.0) # tt (no direct calls) + self.assertEqual(func2_stats[3], 2.0) # ct (cumulative time) + + def test_collapsed_stack_collector_basic(self): + collector = CollapsedStackCollector() + + # Test empty state + self.assertEqual(len(collector.stack_counter), 0) + + # Test collecting sample data + test_frames = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, [("file.py", 10, "func1"), ("file.py", 20, "func2")] + ) + ], + ) + ] + collector.collect(test_frames) + + # Should store one reversed path + self.assertEqual(len(collector.stack_counter), 1) + (((path, thread_id), count),) = collector.stack_counter.items() + expected_tree = (("file.py", 20, "func2"), ("file.py", 10, "func1")) + self.assertEqual(path, expected_tree) + self.assertEqual(thread_id, 1) + self.assertEqual(count, 1) + + def test_collapsed_stack_collector_export(self): + collapsed_out = tempfile.NamedTemporaryFile(delete=False) + self.addCleanup(close_and_unlink, collapsed_out) + + collector = CollapsedStackCollector() + + test_frames1 = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, [("file.py", 10, "func1"), ("file.py", 20, "func2")] + ) + ], + ) + ] + test_frames2 = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, [("file.py", 10, "func1"), ("file.py", 20, "func2")] + ) + ], + ) + ] # Same stack + test_frames3 = [ + MockInterpreterInfo( + 0, [MockThreadInfo(1, [("other.py", 5, "other_func")])] + ) + ] + + collector.collect(test_frames1) + collector.collect(test_frames2) + collector.collect(test_frames3) + + with captured_stdout(), captured_stderr(): + collector.export(collapsed_out.name) + # Check file contents + with open(collapsed_out.name, "r") as f: + content = f.read() + + lines = content.strip().split("\n") + self.assertEqual(len(lines), 2) # Two unique stacks + + # Check collapsed format: tid:X;file:func:line;file:func:line count + stack1_expected = "tid:1;file.py:func2:20;file.py:func1:10 2" + stack2_expected = "tid:1;other.py:other_func:5 1" + + self.assertIn(stack1_expected, lines) + self.assertIn(stack2_expected, lines) + + def test_flamegraph_collector_basic(self): + """Test basic FlamegraphCollector functionality.""" + collector = FlamegraphCollector() + + # Empty collector should produce 'No Data' + data = collector._convert_to_flamegraph_format() + # With string table, name is now an index - resolve it using the strings array + strings = data.get("strings", []) + name_index = data.get("name", 0) + resolved_name = ( + strings[name_index] + if isinstance(name_index, int) and 0 <= name_index < len(strings) + else str(name_index) + ) + self.assertIn(resolved_name, ("No Data", "No significant data")) + + # Test collecting sample data + test_frames = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, [("file.py", 10, "func1"), ("file.py", 20, "func2")] + ) + ], + ) + ] + collector.collect(test_frames) + + # Convert and verify structure: func2 -> func1 with counts = 1 + data = collector._convert_to_flamegraph_format() + # Expect promotion: root is the single child (func2), with func1 as its only child + strings = data.get("strings", []) + name_index = data.get("name", 0) + name = ( + strings[name_index] + if isinstance(name_index, int) and 0 <= name_index < len(strings) + else str(name_index) + ) + self.assertIsInstance(name, str) + self.assertTrue(name.startswith("Program Root: ")) + self.assertIn("func2 (file.py:20)", name) # formatted name + children = data.get("children", []) + self.assertEqual(len(children), 1) + child = children[0] + child_name_index = child.get("name", 0) + child_name = ( + strings[child_name_index] + if isinstance(child_name_index, int) + and 0 <= child_name_index < len(strings) + else str(child_name_index) + ) + self.assertIn("func1 (file.py:10)", child_name) # formatted name + self.assertEqual(child["value"], 1) + + def test_flamegraph_collector_export(self): + """Test flamegraph HTML export functionality.""" + flamegraph_out = tempfile.NamedTemporaryFile( + suffix=".html", delete=False + ) + self.addCleanup(close_and_unlink, flamegraph_out) + + collector = FlamegraphCollector() + + # Create some test data (use Interpreter/Thread objects like runtime) + test_frames1 = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, [("file.py", 10, "func1"), ("file.py", 20, "func2")] + ) + ], + ) + ] + test_frames2 = [ + MockInterpreterInfo( + 0, + [ + MockThreadInfo( + 1, [("file.py", 10, "func1"), ("file.py", 20, "func2")] + ) + ], + ) + ] # Same stack + test_frames3 = [ + MockInterpreterInfo( + 0, [MockThreadInfo(1, [("other.py", 5, "other_func")])] + ) + ] + + collector.collect(test_frames1) + collector.collect(test_frames2) + collector.collect(test_frames3) + + # Export flamegraph + with captured_stdout(), captured_stderr(): + collector.export(flamegraph_out.name) + + # Verify file was created and contains valid data + self.assertTrue(os.path.exists(flamegraph_out.name)) + self.assertGreater(os.path.getsize(flamegraph_out.name), 0) + + # Check file contains HTML content + with open(flamegraph_out.name, "r", encoding="utf-8") as f: + content = f.read() + + # Should be valid HTML + self.assertIn("", content.lower()) + self.assertIn(" 0) + self.assertGreater(mock_collector.collect.call_count, 0) + self.assertLessEqual(mock_collector.collect.call_count, 3) + + def test_sample_profiler_missed_samples_warning(self): + """Test that the profiler warns about missed samples when sampling is too slow.""" + + mock_unwinder = mock.MagicMock() + mock_unwinder.get_stack_trace.return_value = [ + ( + 1, + [ + mock.MagicMock( + filename="test.py", lineno=10, funcname="test_func" + ) + ], + ) + ] + + with mock.patch( + "_remote_debugging.RemoteUnwinder" + ) as mock_unwinder_class: + mock_unwinder_class.return_value = mock_unwinder + + # Use very short interval that we'll miss + profiler = SampleProfiler( + pid=12345, sample_interval_usec=1000, all_threads=False + ) # 1ms interval + + mock_collector = mock.MagicMock() + + # Simulate slow sampling where we miss many samples + times = [ + 0.0, + 0.1, + 0.2, + 0.3, + 0.4, + 0.5, + 0.6, + 0.7, + ] # Extra time points to avoid StopIteration + + with mock.patch("time.perf_counter", side_effect=times): + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + profiler.sample(mock_collector, duration_sec=0.5) + + result = output.getvalue() + + # Should warn about missed samples + self.assertIn("Warning: missed", result) + self.assertIn("samples from the expected total", result) + + +@force_not_colorized_test_class +class TestPrintSampledStats(unittest.TestCase): + """Test the print_sampled_stats function.""" + + def setUp(self): + """Set up test data.""" + # Mock stats data + self.mock_stats = mock.MagicMock() + self.mock_stats.stats = { + ("file1.py", 10, "func1"): ( + 100, + 100, + 0.5, + 0.5, + {}, + ), # cc, nc, tt, ct, callers + ("file2.py", 20, "func2"): (50, 50, 0.25, 0.3, {}), + ("file3.py", 30, "func3"): (200, 200, 1.5, 2.0, {}), + ("file4.py", 40, "func4"): ( + 10, + 10, + 0.001, + 0.001, + {}, + ), # millisecond range + ("file5.py", 50, "func5"): ( + 5, + 5, + 0.000001, + 0.000002, + {}, + ), # microsecond range + } + + def test_print_sampled_stats_basic(self): + """Test basic print_sampled_stats functionality.""" + + # Capture output + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats(self.mock_stats, sample_interval_usec=100) + + result = output.getvalue() + + # Check header is present + self.assertIn("Profile Stats:", result) + self.assertIn("nsamples", result) + self.assertIn("tottime", result) + self.assertIn("cumtime", result) + + # Check functions are present + self.assertIn("func1", result) + self.assertIn("func2", result) + self.assertIn("func3", result) + + def test_print_sampled_stats_sorting(self): + """Test different sorting options.""" + + # Test sort by calls + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + self.mock_stats, sort=0, sample_interval_usec=100 + ) + + result = output.getvalue() + lines = result.strip().split("\n") + + # Find the data lines (skip header) + data_lines = [l for l in lines if "file" in l and ".py" in l] + # func3 should be first (200 calls) + self.assertIn("func3", data_lines[0]) + + # Test sort by time + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + self.mock_stats, sort=1, sample_interval_usec=100 + ) + + result = output.getvalue() + lines = result.strip().split("\n") + + data_lines = [l for l in lines if "file" in l and ".py" in l] + # func3 should be first (1.5s time) + self.assertIn("func3", data_lines[0]) + + def test_print_sampled_stats_limit(self): + """Test limiting output rows.""" + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + self.mock_stats, limit=2, sample_interval_usec=100 + ) + + result = output.getvalue() + + # Count function entries in the main stats section (not in summary) + lines = result.split("\n") + # Find where the main stats section ends (before summary) + main_section_lines = [] + for line in lines: + if "Summary of Interesting Functions:" in line: + break + main_section_lines.append(line) + + # Count function entries only in main section + func_count = sum( + 1 + for line in main_section_lines + if "func" in line and ".py" in line + ) + self.assertEqual(func_count, 2) + + def test_print_sampled_stats_time_units(self): + """Test proper time unit selection.""" + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats(self.mock_stats, sample_interval_usec=100) + + result = output.getvalue() + + # Should use seconds for the header since max time is > 1s + self.assertIn("tottime (s)", result) + self.assertIn("cumtime (s)", result) + + # Test with only microsecond-range times + micro_stats = mock.MagicMock() + micro_stats.stats = { + ("file1.py", 10, "func1"): (100, 100, 0.000005, 0.000010, {}), + } + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats(micro_stats, sample_interval_usec=100) + + result = output.getvalue() + + # Should use microseconds + self.assertIn("tottime (μs)", result) + self.assertIn("cumtime (μs)", result) + + def test_print_sampled_stats_summary(self): + """Test summary section generation.""" + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + self.mock_stats, + show_summary=True, + sample_interval_usec=100, + ) + + result = output.getvalue() + + # Check summary sections are present + self.assertIn("Summary of Interesting Functions:", result) + self.assertIn( + "Functions with Highest Direct/Cumulative Ratio (Hot Spots):", + result, + ) + self.assertIn( + "Functions with Highest Call Frequency (Indirect Calls):", result + ) + self.assertIn( + "Functions with Highest Call Magnification (Cumulative/Direct):", + result, + ) + + def test_print_sampled_stats_no_summary(self): + """Test disabling summary output.""" + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + self.mock_stats, + show_summary=False, + sample_interval_usec=100, + ) + + result = output.getvalue() + + # Summary should not be present + self.assertNotIn("Summary of Interesting Functions:", result) + + def test_print_sampled_stats_empty_stats(self): + """Test with empty stats.""" + + empty_stats = mock.MagicMock() + empty_stats.stats = {} + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats(empty_stats, sample_interval_usec=100) + + result = output.getvalue() + + # Should still print header + self.assertIn("Profile Stats:", result) + + def test_print_sampled_stats_sample_percentage_sorting(self): + """Test sample percentage sorting options.""" + + # Add a function with high sample percentage (more direct calls than func3's 200) + self.mock_stats.stats[("expensive.py", 60, "expensive_func")] = ( + 300, # direct calls (higher than func3's 200) + 300, # cumulative calls + 1.0, # total time + 1.0, # cumulative time + {}, + ) + + # Test sort by sample percentage + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + self.mock_stats, sort=3, sample_interval_usec=100 + ) # sample percentage + + result = output.getvalue() + lines = result.strip().split("\n") + + data_lines = [l for l in lines if ".py" in l and "func" in l] + # expensive_func should be first (highest sample percentage) + self.assertIn("expensive_func", data_lines[0]) + + def test_print_sampled_stats_with_recursive_calls(self): + """Test print_sampled_stats with recursive calls where nc != cc.""" + + # Create stats with recursive calls (nc != cc) + recursive_stats = mock.MagicMock() + recursive_stats.stats = { + # (direct_calls, cumulative_calls, tt, ct, callers) - recursive function + ("recursive.py", 10, "factorial"): ( + 5, # direct_calls + 10, # cumulative_calls (appears more times in stack due to recursion) + 0.5, + 0.6, + {}, + ), + ("normal.py", 20, "normal_func"): ( + 3, # direct_calls + 3, # cumulative_calls (same as direct for non-recursive) + 0.2, + 0.2, + {}, + ), + } + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats(recursive_stats, sample_interval_usec=100) + + result = output.getvalue() + + # Should display recursive calls as "5/10" format + self.assertIn("5/10", result) # nc/cc format for recursive calls + self.assertIn("3", result) # just nc for non-recursive calls + self.assertIn("factorial", result) + self.assertIn("normal_func", result) + + def test_print_sampled_stats_with_zero_call_counts(self): + """Test print_sampled_stats with zero call counts to trigger division protection.""" + + # Create stats with zero call counts + zero_stats = mock.MagicMock() + zero_stats.stats = { + ("file.py", 10, "zero_calls"): (0, 0, 0.0, 0.0, {}), # Zero calls + ("file.py", 20, "normal_func"): ( + 5, + 5, + 0.1, + 0.1, + {}, + ), # Normal function + } + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats(zero_stats, sample_interval_usec=100) + + result = output.getvalue() + + # Should handle zero call counts gracefully + self.assertIn("zero_calls", result) + self.assertIn("zero_calls", result) + self.assertIn("normal_func", result) + + def test_print_sampled_stats_sort_by_name(self): + """Test sort by function name option.""" + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + self.mock_stats, sort=-1, sample_interval_usec=100 + ) # sort by name + + result = output.getvalue() + lines = result.strip().split("\n") + + # Find the data lines (skip header and summary) + # Data lines start with whitespace and numbers, and contain filename:lineno(function) + data_lines = [] + for line in lines: + # Skip header lines and summary sections + if ( + line.startswith(" ") + and "(" in line + and ")" in line + and not line.startswith( + " 1." + ) # Skip summary lines that start with times + and not line.startswith( + " 0." + ) # Skip summary lines that start with times + and not "per call" in line # Skip summary lines + and not "calls" in line # Skip summary lines + and not "total time" in line # Skip summary lines + and not "cumulative time" in line + ): # Skip summary lines + data_lines.append(line) + + # Extract just the function names for comparison + func_names = [] + import re + + for line in data_lines: + # Function name is between the last ( and ), accounting for ANSI color codes + match = re.search(r"\(([^)]+)\)$", line) + if match: + func_name = match.group(1) + # Remove ANSI color codes + func_name = re.sub(r"\x1b\[[0-9;]*m", "", func_name) + func_names.append(func_name) + + # Verify we extracted function names and they are sorted + self.assertGreater( + len(func_names), 0, "Should have extracted some function names" + ) + self.assertEqual( + func_names, + sorted(func_names), + f"Function names {func_names} should be sorted alphabetically", + ) + + def test_print_sampled_stats_with_zero_time_functions(self): + """Test summary sections with functions that have zero time.""" + + # Create stats with zero-time functions + zero_time_stats = mock.MagicMock() + zero_time_stats.stats = { + ("file1.py", 10, "zero_time_func"): ( + 5, + 5, + 0.0, + 0.0, + {}, + ), # Zero time + ("file2.py", 20, "normal_func"): ( + 3, + 3, + 0.1, + 0.1, + {}, + ), # Normal time + } + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + zero_time_stats, + show_summary=True, + sample_interval_usec=100, + ) + + result = output.getvalue() + + # Should handle zero-time functions gracefully in summary + self.assertIn("Summary of Interesting Functions:", result) + self.assertIn("zero_time_func", result) + self.assertIn("normal_func", result) + + def test_print_sampled_stats_with_malformed_qualified_names(self): + """Test summary generation with function names that don't contain colons.""" + + # Create stats with function names that would create malformed qualified names + malformed_stats = mock.MagicMock() + malformed_stats.stats = { + # Function name without clear module separation + ("no_colon_func", 10, "func"): (3, 3, 0.1, 0.1, {}), + ("", 20, "empty_filename_func"): (2, 2, 0.05, 0.05, {}), + ("normal.py", 30, "normal_func"): (5, 5, 0.2, 0.2, {}), + } + + with io.StringIO() as output: + with mock.patch("sys.stdout", output): + print_sampled_stats( + malformed_stats, + show_summary=True, + sample_interval_usec=100, + ) + + result = output.getvalue() + + # Should handle malformed names gracefully in summary aggregation + self.assertIn("Summary of Interesting Functions:", result) + # All function names should appear somewhere in the output + self.assertIn("func", result) + self.assertIn("empty_filename_func", result) + self.assertIn("normal_func", result) + + def test_print_sampled_stats_with_recursive_call_stats_creation(self): + """Test create_stats with recursive call data to trigger total_rec_calls branch.""" + collector = PstatsCollector(sample_interval_usec=1000000) # 1 second + + # Simulate recursive function data where total_rec_calls would be set + # We need to manually manipulate the collector result to test this branch + collector.result = { + ("recursive.py", 10, "factorial"): { + "total_rec_calls": 3, # Non-zero recursive calls + "direct_calls": 5, + "cumulative_calls": 10, + }, + ("normal.py", 20, "normal_func"): { + "total_rec_calls": 0, # Zero recursive calls + "direct_calls": 2, + "cumulative_calls": 5, + }, + } + + collector.create_stats() + + # Check that recursive calls are handled differently from non-recursive + factorial_stats = collector.stats[("recursive.py", 10, "factorial")] + normal_stats = collector.stats[("normal.py", 20, "normal_func")] + + # factorial should use cumulative_calls (10) as nc + self.assertEqual( + factorial_stats[1], 10 + ) # nc should be cumulative_calls + self.assertEqual(factorial_stats[0], 5) # cc should be direct_calls + + # normal_func should use cumulative_calls as nc + self.assertEqual(normal_stats[1], 5) # nc should be cumulative_calls + self.assertEqual(normal_stats[0], 2) # cc should be direct_calls diff --git a/Makefile.pre.in b/Makefile.pre.in index dd28ff5d2a3ed1..59c3c808794cf3 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2692,6 +2692,7 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_pathlib/support \ test/test_peg_generator \ test/test_profiling \ + test/test_profiling/test_sampling_profiler \ test/test_pydoc \ test/test_pyrepl \ test/test_string \ From 4695ec109d07c9bfd9eb7d91d6285c974a4331a7 Mon Sep 17 00:00:00 2001 From: Petr Viktorin Date: Tue, 18 Nov 2025 16:33:52 +0100 Subject: [PATCH 5/8] gh-138189: Link references to type slots (GH-141410) Link references to type slots --- Doc/c-api/structures.rst | 8 +++----- Doc/c-api/type.rst | 16 ++++++++-------- Doc/c-api/typeobj.rst | 2 +- Doc/howto/isolating-extensions.rst | 2 +- 4 files changed, 13 insertions(+), 15 deletions(-) diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 414dfdc84e61c9..b4e7cb1d77e1a3 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -698,14 +698,12 @@ The following flags can be used with :c:member:`PyMemberDef.flags`: entry indicates an offset from the subclass-specific data, rather than from ``PyObject``. - Can only be used as part of :c:member:`Py_tp_members ` + Can only be used as part of the :c:data:`Py_tp_members` :c:type:`slot ` when creating a class using negative :c:member:`~PyType_Spec.basicsize`. It is mandatory in that case. - - This flag is only used in :c:type:`PyType_Slot`. - When setting :c:member:`~PyTypeObject.tp_members` during - class creation, Python clears it and sets + When setting :c:member:`~PyTypeObject.tp_members` from the slot during + class creation, Python clears the flag and sets :c:member:`PyMemberDef.offset` to the offset from the ``PyObject`` struct. .. index:: diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index b608f815160f76..c7946e3190f01b 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -383,8 +383,8 @@ The following functions and structs are used to create The *bases* argument can be used to specify base classes; it can either be only one class or a tuple of classes. - If *bases* is ``NULL``, the *Py_tp_bases* slot is used instead. - If that also is ``NULL``, the *Py_tp_base* slot is used instead. + If *bases* is ``NULL``, the :c:data:`Py_tp_bases` slot is used instead. + If that also is ``NULL``, the :c:data:`Py_tp_base` slot is used instead. If that also is ``NULL``, the new type derives from :class:`object`. The *module* argument can be used to record the module in which the new @@ -590,9 +590,9 @@ The following functions and structs are used to create :c:type:`PyAsyncMethods` with an added ``Py_`` prefix. For example, use: - * ``Py_tp_dealloc`` to set :c:member:`PyTypeObject.tp_dealloc` - * ``Py_nb_add`` to set :c:member:`PyNumberMethods.nb_add` - * ``Py_sq_length`` to set :c:member:`PySequenceMethods.sq_length` + * :c:data:`Py_tp_dealloc` to set :c:member:`PyTypeObject.tp_dealloc` + * :c:data:`Py_nb_add` to set :c:member:`PyNumberMethods.nb_add` + * :c:data:`Py_sq_length` to set :c:member:`PySequenceMethods.sq_length` An additional slot is supported that does not correspond to a :c:type:`!PyTypeObject` struct field: @@ -611,7 +611,7 @@ The following functions and structs are used to create If it is not possible to switch to a ``MANAGED`` flag (for example, for vectorcall or to support Python older than 3.12), specify the - offset in :c:member:`Py_tp_members `. + offset in :c:data:`Py_tp_members`. See :ref:`PyMemberDef documentation ` for details. @@ -639,7 +639,7 @@ The following functions and structs are used to create .. versionchanged:: 3.14 The field :c:member:`~PyTypeObject.tp_vectorcall` can now set - using ``Py_tp_vectorcall``. See the field's documentation + using :c:data:`Py_tp_vectorcall`. See the field's documentation for details. .. c:member:: void *pfunc @@ -649,7 +649,7 @@ The following functions and structs are used to create *pfunc* values may not be ``NULL``, except for the following slots: - * ``Py_tp_doc`` + * :c:data:`Py_tp_doc` * :c:data:`Py_tp_token` (for clarity, prefer :c:data:`Py_TP_USE_SPEC` rather than ``NULL``) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 34d19acdf17868..49fe02d919df8b 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -2273,7 +2273,7 @@ and :c:data:`PyType_Type` effectively act as defaults.) This field should be set to ``NULL`` and treated as read-only. Python will fill it in when the type is :c:func:`initialized `. - For dynamically created classes, the ``Py_tp_bases`` + For dynamically created classes, the :c:data:`Py_tp_bases` :c:type:`slot ` can be used instead of the *bases* argument of :c:func:`PyType_FromSpecWithBases`. The argument form is preferred. diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst index 7da6dc8a39795e..6092c75f48fdef 100644 --- a/Doc/howto/isolating-extensions.rst +++ b/Doc/howto/isolating-extensions.rst @@ -353,7 +353,7 @@ garbage collection protocol. That is, heap types should: - Have the :c:macro:`Py_TPFLAGS_HAVE_GC` flag. -- Define a traverse function using ``Py_tp_traverse``, which +- Define a traverse function using :c:data:`Py_tp_traverse`, which visits the type (e.g. using ``Py_VISIT(Py_TYPE(self))``). Please refer to the documentation of From 600f3feb234219c9a9998e30ea653a2afb1f8116 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Tue, 18 Nov 2025 17:13:13 +0100 Subject: [PATCH 6/8] gh-141070: Add PyUnstable_Object_Dump() function (#141072) * Promote _PyObject_Dump() as a public function. * Keep _PyObject_Dump() alias to PyUnstable_Object_Dump() for backward compatibility. * Replace _PyObject_Dump() with PyUnstable_Object_Dump(). Co-authored-by: Peter Bierma Co-authored-by: Kumar Aditya Co-authored-by: Petr Viktorin --- Doc/c-api/object.rst | 29 +++++++++++ Doc/whatsnew/3.15.rst | 12 +++-- Include/cpython/object.h | 14 +++-- .../pycore_global_objects_fini_generated.h | 2 +- Lib/test/test_capi/test_object.py | 52 +++++++++++++++++++ ...-11-05-21-48-31.gh-issue-141070.mkrhjQ.rst | 2 + Modules/_testcapi/object.c | 25 +++++++++ Objects/object.c | 4 +- Objects/unicodeobject.c | 3 +- Python/gc.c | 2 +- Python/pythonrun.c | 8 +-- 11 files changed, 135 insertions(+), 18 deletions(-) create mode 100644 Misc/NEWS.d/next/C_API/2025-11-05-21-48-31.gh-issue-141070.mkrhjQ.rst diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 96353266ac7300..76971c46c1696b 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -85,6 +85,35 @@ Object Protocol instead of the :func:`repr`. +.. c:function:: void PyUnstable_Object_Dump(PyObject *op) + + Dump an object *op* to ``stderr``. This should only be used for debugging. + + The output is intended to try dumping objects even after memory corruption: + + * Information is written starting with fields that are the least likely to + crash when accessed. + * This function can be called without an :term:`attached thread state`, but + it's not recommended to do so: it can cause deadlocks. + * An object that does not belong to the current interpreter may be dumped, + but this may also cause crashes or unintended behavior. + * Implement a heuristic to detect if the object memory has been freed. Don't + display the object contents in this case, only its memory address. + * The output format may change at any time. + + Example of output: + + .. code-block:: output + + object address : 0x7f80124702c0 + object refcount : 2 + object type : 0x9902e0 + object type name: str + object repr : 'abcdef' + + .. versionadded:: next + + .. c:function:: int PyObject_HasAttrWithError(PyObject *o, PyObject *attr_name) Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise. diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst index 24cc7e2d7eb911..5a98297d3f8847 100644 --- a/Doc/whatsnew/3.15.rst +++ b/Doc/whatsnew/3.15.rst @@ -1084,19 +1084,23 @@ New features (Contributed by Victor Stinner in :gh:`129813`.) +* Add a new :c:func:`PyImport_CreateModuleFromInitfunc` C-API for creating + a module from a *spec* and *initfunc*. + (Contributed by Itamar Oren in :gh:`116146`.) + * Add :c:func:`PyTuple_FromArray` to create a :class:`tuple` from an array. (Contributed by Victor Stinner in :gh:`111489`.) +* Add :c:func:`PyUnstable_Object_Dump` to dump an object to ``stderr``. + It should only be used for debugging. + (Contributed by Victor Stinner in :gh:`141070`.) + * Add :c:func:`PyUnstable_ThreadState_SetStackProtection` and :c:func:`PyUnstable_ThreadState_ResetStackProtection` functions to set the stack protection base address and stack protection size of a Python thread state. (Contributed by Victor Stinner in :gh:`139653`.) -* Add a new :c:func:`PyImport_CreateModuleFromInitfunc` C-API for creating - a module from a *spec* and *initfunc*. - (Contributed by Itamar Oren in :gh:`116146`.) - Changed C APIs -------------- diff --git a/Include/cpython/object.h b/Include/cpython/object.h index d64298232e705c..130a105de42150 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -295,7 +295,10 @@ PyAPI_FUNC(PyObject *) PyType_GetDict(PyTypeObject *); PyAPI_FUNC(int) PyObject_Print(PyObject *, FILE *, int); PyAPI_FUNC(void) _Py_BreakPoint(void); -PyAPI_FUNC(void) _PyObject_Dump(PyObject *); +PyAPI_FUNC(void) PyUnstable_Object_Dump(PyObject *); + +// Alias for backward compatibility +#define _PyObject_Dump PyUnstable_Object_Dump PyAPI_FUNC(PyObject*) _PyObject_GetAttrId(PyObject *, _Py_Identifier *); @@ -387,10 +390,11 @@ PyAPI_FUNC(PyObject *) _PyObject_FunctionStr(PyObject *); process with a message on stderr if the given condition fails to hold, but compile away to nothing if NDEBUG is defined. - However, before aborting, Python will also try to call _PyObject_Dump() on - the given object. This may be of use when investigating bugs in which a - particular object is corrupt (e.g. buggy a tp_visit method in an extension - module breaking the garbage collector), to help locate the broken objects. + However, before aborting, Python will also try to call + PyUnstable_Object_Dump() on the given object. This may be of use when + investigating bugs in which a particular object is corrupt (e.g. buggy a + tp_visit method in an extension module breaking the garbage collector), to + help locate the broken objects. The WITH_MSG variant allows you to supply an additional message that Python will attempt to print to stderr, after the object dump. */ diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index ecef4364cc32df..c3968aff8f3b8d 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -13,7 +13,7 @@ static inline void _PyStaticObject_CheckRefcnt(PyObject *obj) { if (!_Py_IsImmortal(obj)) { fprintf(stderr, "Immortal Object has less refcnt than expected.\n"); - _PyObject_Dump(obj); + PyUnstable_Object_Dump(obj); } } #endif diff --git a/Lib/test/test_capi/test_object.py b/Lib/test/test_capi/test_object.py index d4056727d07fbf..c5040913e9e1f1 100644 --- a/Lib/test/test_capi/test_object.py +++ b/Lib/test/test_capi/test_object.py @@ -1,4 +1,5 @@ import enum +import os import sys import textwrap import unittest @@ -13,6 +14,9 @@ _testcapi = import_helper.import_module('_testcapi') _testinternalcapi = import_helper.import_module('_testinternalcapi') +NULL = None +STDERR_FD = 2 + class Constant(enum.IntEnum): Py_CONSTANT_NONE = 0 @@ -247,5 +251,53 @@ def func(x): func(object()) + def pyobject_dump(self, obj, release_gil=False): + pyobject_dump = _testcapi.pyobject_dump + + try: + old_stderr = os.dup(STDERR_FD) + except OSError as exc: + # os.dup(STDERR_FD) is not supported on WASI + self.skipTest(f"os.dup() failed with {exc!r}") + + filename = os_helper.TESTFN + try: + try: + with open(filename, "wb") as fp: + fd = fp.fileno() + os.dup2(fd, STDERR_FD) + pyobject_dump(obj, release_gil) + finally: + os.dup2(old_stderr, STDERR_FD) + os.close(old_stderr) + + with open(filename) as fp: + return fp.read().rstrip() + finally: + os_helper.unlink(filename) + + def test_pyobject_dump(self): + # test string object + str_obj = 'test string' + output = self.pyobject_dump(str_obj) + hex_regex = r'(0x)?[0-9a-fA-F]+' + regex = ( + fr"object address : {hex_regex}\n" + r"object refcount : [0-9]+\n" + fr"object type : {hex_regex}\n" + r"object type name: str\n" + r"object repr : 'test string'" + ) + self.assertRegex(output, regex) + + # release the GIL + output = self.pyobject_dump(str_obj, release_gil=True) + self.assertRegex(output, regex) + + # test NULL object + output = self.pyobject_dump(NULL) + self.assertRegex(output, r'') + + if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/C_API/2025-11-05-21-48-31.gh-issue-141070.mkrhjQ.rst b/Misc/NEWS.d/next/C_API/2025-11-05-21-48-31.gh-issue-141070.mkrhjQ.rst new file mode 100644 index 00000000000000..39cfcf73404ebf --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-11-05-21-48-31.gh-issue-141070.mkrhjQ.rst @@ -0,0 +1,2 @@ +Add :c:func:`PyUnstable_Object_Dump` to dump an object to ``stderr``. It should +only be used for debugging. Patch by Victor Stinner. diff --git a/Modules/_testcapi/object.c b/Modules/_testcapi/object.c index 798ef97c495aeb..4c9632c07a99f4 100644 --- a/Modules/_testcapi/object.c +++ b/Modules/_testcapi/object.c @@ -485,6 +485,30 @@ is_uniquely_referenced(PyObject *self, PyObject *op) } +static PyObject * +pyobject_dump(PyObject *self, PyObject *args) +{ + PyObject *op; + int release_gil = 0; + + if (!PyArg_ParseTuple(args, "O|i", &op, &release_gil)) { + return NULL; + } + NULLABLE(op); + + if (release_gil) { + Py_BEGIN_ALLOW_THREADS + PyUnstable_Object_Dump(op); + Py_END_ALLOW_THREADS + + } + else { + PyUnstable_Object_Dump(op); + } + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"call_pyobject_print", call_pyobject_print, METH_VARARGS}, {"pyobject_print_null", pyobject_print_null, METH_VARARGS}, @@ -511,6 +535,7 @@ static PyMethodDef test_methods[] = { {"test_py_is_funcs", test_py_is_funcs, METH_NOARGS}, {"clear_managed_dict", clear_managed_dict, METH_O, NULL}, {"is_uniquely_referenced", is_uniquely_referenced, METH_O}, + {"pyobject_dump", pyobject_dump, METH_VARARGS}, {NULL}, }; diff --git a/Objects/object.c b/Objects/object.c index 0540112d7d2acf..0a80c6edcf158c 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -713,7 +713,7 @@ _PyObject_IsFreed(PyObject *op) /* For debugging convenience. See Misc/gdbinit for some useful gdb hooks */ void -_PyObject_Dump(PyObject* op) +PyUnstable_Object_Dump(PyObject* op) { if (_PyObject_IsFreed(op)) { /* It seems like the object memory has been freed: @@ -3150,7 +3150,7 @@ _PyObject_AssertFailed(PyObject *obj, const char *expr, const char *msg, /* This might succeed or fail, but we're about to abort, so at least try to provide any extra info we can: */ - _PyObject_Dump(obj); + PyUnstable_Object_Dump(obj); fprintf(stderr, "\n"); fflush(stderr); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 4e8c132327b7d0..7f9f75126a9e56 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -547,7 +547,8 @@ unicode_check_encoding_errors(const char *encoding, const char *errors) } /* Disable checks during Python finalization. For example, it allows to - call _PyObject_Dump() during finalization for debugging purpose. */ + * call PyUnstable_Object_Dump() during finalization for debugging purpose. + */ if (_PyInterpreterState_GetFinalizing(interp) != NULL) { return 0; } diff --git a/Python/gc.c b/Python/gc.c index 064f9406e0a17c..27364ecfdcd5c6 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -2237,7 +2237,7 @@ _PyGC_Fini(PyInterpreterState *interp) void _PyGC_Dump(PyGC_Head *g) { - _PyObject_Dump(FROM_GC(g)); + PyUnstable_Object_Dump(FROM_GC(g)); } diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 49ce0a97d4742f..272be504a68fa1 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1181,7 +1181,7 @@ _PyErr_Display(PyObject *file, PyObject *unused, PyObject *value, PyObject *tb) } if (print_exception_recursive(&ctx, value) < 0) { PyErr_Clear(); - _PyObject_Dump(value); + PyUnstable_Object_Dump(value); fprintf(stderr, "lost sys.stderr\n"); } Py_XDECREF(ctx.seen); @@ -1199,14 +1199,14 @@ PyErr_Display(PyObject *unused, PyObject *value, PyObject *tb) PyObject *file; if (PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) { PyObject *exc = PyErr_GetRaisedException(); - _PyObject_Dump(value); + PyUnstable_Object_Dump(value); fprintf(stderr, "lost sys.stderr\n"); - _PyObject_Dump(exc); + PyUnstable_Object_Dump(exc); Py_DECREF(exc); return; } if (file == NULL) { - _PyObject_Dump(value); + PyUnstable_Object_Dump(value); fprintf(stderr, "lost sys.stderr\n"); return; } From daafacf0053e9c329b0f96447258f628dd0bd6f1 Mon Sep 17 00:00:00 2001 From: Shamil Date: Tue, 18 Nov 2025 19:34:58 +0300 Subject: [PATCH 7/8] gh-42400: Fix buffer overflow in _Py_wrealpath() for very long paths (#141529) Co-authored-by: Victor Stinner --- .../Security/2025-11-13-22-31-56.gh-issue-42400.pqB5Kq.rst | 3 +++ Python/fileutils.c | 7 ++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 Misc/NEWS.d/next/Security/2025-11-13-22-31-56.gh-issue-42400.pqB5Kq.rst diff --git a/Misc/NEWS.d/next/Security/2025-11-13-22-31-56.gh-issue-42400.pqB5Kq.rst b/Misc/NEWS.d/next/Security/2025-11-13-22-31-56.gh-issue-42400.pqB5Kq.rst new file mode 100644 index 00000000000000..17dc241aef91d6 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2025-11-13-22-31-56.gh-issue-42400.pqB5Kq.rst @@ -0,0 +1,3 @@ +Fix buffer overflow in ``_Py_wrealpath()`` for paths exceeding ``MAXPATHLEN`` bytes +by using dynamic memory allocation instead of fixed-size buffer. +Patch by Shamil Abdulaev. diff --git a/Python/fileutils.c b/Python/fileutils.c index 93abd70a34d420..0c1766b8804500 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -2118,7 +2118,6 @@ _Py_wrealpath(const wchar_t *path, wchar_t *resolved_path, size_t resolved_path_len) { char *cpath; - char cresolved_path[MAXPATHLEN]; wchar_t *wresolved_path; char *res; size_t r; @@ -2127,12 +2126,14 @@ _Py_wrealpath(const wchar_t *path, errno = EINVAL; return NULL; } - res = realpath(cpath, cresolved_path); + res = realpath(cpath, NULL); PyMem_RawFree(cpath); if (res == NULL) return NULL; - wresolved_path = Py_DecodeLocale(cresolved_path, &r); + wresolved_path = Py_DecodeLocale(res, &r); + free(res); + if (wresolved_path == NULL) { errno = EINVAL; return NULL; From 4cfa695c953e5dfdab99ade81cee960ddf4b106d Mon Sep 17 00:00:00 2001 From: Brandt Bucher Date: Tue, 18 Nov 2025 09:51:18 -0800 Subject: [PATCH 8/8] GH-141686: Break cycles created by JSONEncoder.iterencode (GH-141687) --- Lib/json/encoder.py | 30 +++++++++---------- ...-11-17-16-53-49.gh-issue-141686.V-xaoI.rst | 2 ++ 2 files changed, 16 insertions(+), 16 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2025-11-17-16-53-49.gh-issue-141686.V-xaoI.rst diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py index 5cf6d64f3eade6..4c70e8b75ed132 100644 --- a/Lib/json/encoder.py +++ b/Lib/json/encoder.py @@ -264,17 +264,6 @@ def floatstr(o, allow_nan=self.allow_nan, def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, - ## HACK: hand-optimized bytecode; turn globals into locals - ValueError=ValueError, - dict=dict, - float=float, - id=id, - int=int, - isinstance=isinstance, - list=list, - str=str, - tuple=tuple, - _intstr=int.__repr__, ): def _iterencode_list(lst, _current_indent_level): @@ -311,7 +300,7 @@ def _iterencode_list(lst, _current_indent_level): # Subclasses of int/float may override __repr__, but we still # want to encode them as integers/floats in JSON. One example # within the standard library is IntEnum. - yield buf + _intstr(value) + yield buf + int.__repr__(value) elif isinstance(value, float): # see comment above for int yield buf + _floatstr(value) @@ -374,7 +363,7 @@ def _iterencode_dict(dct, _current_indent_level): key = 'null' elif isinstance(key, int): # see comment for int/float in _make_iterencode - key = _intstr(key) + key = int.__repr__(key) elif _skipkeys: continue else: @@ -399,7 +388,7 @@ def _iterencode_dict(dct, _current_indent_level): yield 'false' elif isinstance(value, int): # see comment for int/float in _make_iterencode - yield _intstr(value) + yield int.__repr__(value) elif isinstance(value, float): # see comment for int/float in _make_iterencode yield _floatstr(value) @@ -434,7 +423,7 @@ def _iterencode(o, _current_indent_level): yield 'false' elif isinstance(o, int): # see comment for int/float in _make_iterencode - yield _intstr(o) + yield int.__repr__(o) elif isinstance(o, float): # see comment for int/float in _make_iterencode yield _floatstr(o) @@ -458,4 +447,13 @@ def _iterencode(o, _current_indent_level): raise if markers is not None: del markers[markerid] - return _iterencode + + def _iterencode_once(o, _current_indent_level): + nonlocal _iterencode, _iterencode_dict, _iterencode_list + try: + yield from _iterencode(o, _current_indent_level) + finally: + # Break reference cycles due to mutually recursive closures: + del _iterencode, _iterencode_dict, _iterencode_list + + return _iterencode_once diff --git a/Misc/NEWS.d/next/Library/2025-11-17-16-53-49.gh-issue-141686.V-xaoI.rst b/Misc/NEWS.d/next/Library/2025-11-17-16-53-49.gh-issue-141686.V-xaoI.rst new file mode 100644 index 00000000000000..87e9cb8d69bfbc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-11-17-16-53-49.gh-issue-141686.V-xaoI.rst @@ -0,0 +1,2 @@ +Break reference cycles created by each call to :func:`json.dump` or +:meth:`json.JSONEncoder.iterencode`.