|
| 1 | +#ifndef Py_INTERNAL_STATS_H |
| 2 | +#define Py_INTERNAL_STATS_H |
| 3 | +#ifdef __cplusplus |
| 4 | +extern "C" { |
| 5 | +#endif |
| 6 | + |
| 7 | +#ifndef Py_BUILD_CORE |
| 8 | +# error "this header requires Py_BUILD_CORE define" |
| 9 | +#endif |
| 10 | + |
| 11 | +#include "pycore_structs.h" // |
| 12 | + |
| 13 | + |
| 14 | +#ifdef Py_STATS |
| 15 | + |
| 16 | +#include "pycore_bitutils.h" // _Py_bit_length |
| 17 | + |
| 18 | +#define STAT_INC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name++; } while (0) |
| 19 | +#define STAT_DEC(opname, name) do { if (_Py_stats) _Py_stats->opcode_stats[opname].specialization.name--; } while (0) |
| 20 | +#define OPCODE_EXE_INC(opname) do { if (_Py_stats) _Py_stats->opcode_stats[opname].execution_count++; } while (0) |
| 21 | +#define CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.name++; } while (0) |
| 22 | +#define OBJECT_STAT_INC(name) do { if (_Py_stats) _Py_stats->object_stats.name++; } while (0) |
| 23 | +#define OBJECT_STAT_INC_COND(name, cond) \ |
| 24 | + do { if (_Py_stats && cond) _Py_stats->object_stats.name++; } while (0) |
| 25 | +#define EVAL_CALL_STAT_INC(name) do { if (_Py_stats) _Py_stats->call_stats.eval_calls[name]++; } while (0) |
| 26 | +#define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) \ |
| 27 | + do { if (_Py_stats && PyFunction_Check(callable)) _Py_stats->call_stats.eval_calls[name]++; } while (0) |
| 28 | +#define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0) |
| 29 | +#define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0) |
| 30 | +#define OPT_STAT_ADD(name, n) do { if (_Py_stats) _Py_stats->optimization_stats.name += (n); } while (0) |
| 31 | +#define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0) |
| 32 | +#define UOP_PAIR_INC(uopcode, lastuop) \ |
| 33 | + do { \ |
| 34 | + if (lastuop && _Py_stats) { \ |
| 35 | + _Py_stats->optimization_stats.opcode[lastuop].pair_count[uopcode]++; \ |
| 36 | + } \ |
| 37 | + lastuop = uopcode; \ |
| 38 | + } while (0) |
| 39 | +#define OPT_UNSUPPORTED_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.unsupported_opcode[opname]++; } while (0) |
| 40 | +#define OPT_ERROR_IN_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.error_in_opcode[opname]++; } while (0) |
| 41 | +#define OPT_HIST(length, name) \ |
| 42 | + do { \ |
| 43 | + if (_Py_stats) { \ |
| 44 | + int bucket = _Py_bit_length(length >= 1 ? length - 1 : 0); \ |
| 45 | + bucket = (bucket >= _Py_UOP_HIST_SIZE) ? _Py_UOP_HIST_SIZE - 1 : bucket; \ |
| 46 | + _Py_stats->optimization_stats.name[bucket]++; \ |
| 47 | + } \ |
| 48 | + } while (0) |
| 49 | +#define RARE_EVENT_STAT_INC(name) do { if (_Py_stats) _Py_stats->rare_event_stats.name++; } while (0) |
| 50 | +#define OPCODE_DEFERRED_INC(opname) do { if (_Py_stats && opcode == opname) _Py_stats->opcode_stats[opname].specialization.deferred++; } while (0) |
| 51 | + |
| 52 | +// Export for '_opcode' shared extension |
| 53 | +PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void); |
| 54 | + |
| 55 | +#else |
| 56 | +#define STAT_INC(opname, name) ((void)0) |
| 57 | +#define STAT_DEC(opname, name) ((void)0) |
| 58 | +#define OPCODE_EXE_INC(opname) ((void)0) |
| 59 | +#define CALL_STAT_INC(name) ((void)0) |
| 60 | +#define OBJECT_STAT_INC(name) ((void)0) |
| 61 | +#define OBJECT_STAT_INC_COND(name, cond) ((void)0) |
| 62 | +#define EVAL_CALL_STAT_INC(name) ((void)0) |
| 63 | +#define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0) |
| 64 | +#define GC_STAT_ADD(gen, name, n) ((void)0) |
| 65 | +#define OPT_STAT_INC(name) ((void)0) |
| 66 | +#define OPT_STAT_ADD(name, n) ((void)0) |
| 67 | +#define UOP_STAT_INC(opname, name) ((void)0) |
| 68 | +#define UOP_PAIR_INC(uopcode, lastuop) ((void)0) |
| 69 | +#define OPT_UNSUPPORTED_OPCODE(opname) ((void)0) |
| 70 | +#define OPT_ERROR_IN_OPCODE(opname) ((void)0) |
| 71 | +#define OPT_HIST(length, name) ((void)0) |
| 72 | +#define RARE_EVENT_STAT_INC(name) ((void)0) |
| 73 | +#define OPCODE_DEFERRED_INC(opname) ((void)0) |
| 74 | +#endif // !Py_STATS |
| 75 | + |
| 76 | + |
| 77 | +#define RARE_EVENT_INTERP_INC(interp, name) \ |
| 78 | + do { \ |
| 79 | + /* saturating add */ \ |
| 80 | + int val = FT_ATOMIC_LOAD_UINT8_RELAXED(interp->rare_events.name); \ |
| 81 | + if (val < UINT8_MAX) { \ |
| 82 | + FT_ATOMIC_STORE_UINT8(interp->rare_events.name, val + 1); \ |
| 83 | + } \ |
| 84 | + RARE_EVENT_STAT_INC(name); \ |
| 85 | + } while (0); \ |
| 86 | + |
| 87 | +#define RARE_EVENT_INC(name) \ |
| 88 | + do { \ |
| 89 | + PyInterpreterState *interp = PyInterpreterState_Get(); \ |
| 90 | + RARE_EVENT_INTERP_INC(interp, name); \ |
| 91 | + } while (0); \ |
| 92 | + |
| 93 | + |
| 94 | +#ifdef __cplusplus |
| 95 | +} |
| 96 | +#endif |
| 97 | +#endif /* !Py_INTERNAL_STATS_H */ |
0 commit comments