/* If the eval breaker is set then stay in tier 1.
* This avoids any potentially infinite loops
* involving _RESUME_CHECK */
- if (IS_JIT_TRACING() || _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
+ if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
opcode = executor->vm_data.opcode;
oparg = (oparg & ~255) | executor->vm_data.oparg;
next_instr = this_instr;
}
op(_FOR_ITER_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) {
+ TIER2_JUMPBY(1 + INLINE_CACHE_ENTRIES_FOR_ITER);
_PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index);
if (!PyStackRef_IsValid(item)) {
if (PyStackRef_IsError(item)) {
assert(executor->vm_data.code == code);
assert(executor->vm_data.valid);
assert(tstate->current_executor == NULL);
- if (IS_JIT_TRACING() || _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
+ if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
opcode = executor->vm_data.opcode;
oparg = (oparg & ~255) | executor->vm_data.oparg;
next_instr = this_instr;
assert(executor->vm_data.code == code);
assert(executor->vm_data.valid);
assert(tstate->current_executor == NULL);
- if (IS_JIT_TRACING() || _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
+ if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
opcode = executor->vm_data.opcode;
oparg = (oparg & ~255) | executor->vm_data.oparg;
next_instr = this_instr;
target = INSTR_IP(target_instr, old_code);
// One for possible _DEOPT, one because _CHECK_VALIDITY itself might _DEOPT
- max_length-=2;
+ max_length -= 2;
if ((uint16_t)oparg != (uint64_t)oparg) {
goto full;
}
return 1;
}
- // Unsupported opcodes
- if (opcode == WITH_EXCEPT_START || opcode == RERAISE || opcode == CLEANUP_THROW) {
+ if (opcode == ENTER_EXECUTOR) {
goto full;
}
- RESERVE_RAW(1, "_CHECK_VALIDITY");
- ADD_TO_TRACE(_CHECK_VALIDITY, 0, 0, target);
-
- if (!OPCODE_HAS_NO_SAVE_IP(opcode)) {
- RESERVE_RAW(2, "_SET_IP");
- ADD_TO_TRACE(_SET_IP, 0, (uintptr_t)target_instr, target);
+ // Unsupported opcodes
+ if (opcode == WITH_EXCEPT_START || opcode == RERAISE || opcode == CLEANUP_THROW) {
+ goto full;
}
bool needs_guard_ip = _PyOpcode_NeedsGuardIp[opcode] &&
!(opcode == JUMP_BACKWARD_NO_INTERRUPT || opcode == JUMP_BACKWARD || opcode == JUMP_BACKWARD_JIT) &&
!(opcode == POP_JUMP_IF_TRUE || opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_NONE || opcode == POP_JUMP_IF_NOT_NONE);
- if (needs_guard_ip) {
- RESERVE_RAW(1, "_GUARD_IP");
+ const struct opcode_macro_expansion *expansion = &_PyOpcode_macro_expansion[opcode];
+ RESERVE_RAW(expansion->nuops + needs_guard_ip + 3, "uop and various checks");
+
+ ADD_TO_TRACE(_CHECK_VALIDITY, 0, 0, target);
+
+ if (!OPCODE_HAS_NO_SAVE_IP(opcode)) {
+ ADD_TO_TRACE(_SET_IP, 0, (uintptr_t)target_instr, target);
}
/* Special case the first instruction,
if (OPCODE_HAS_EXIT(opcode)) {
// Make space for side exit and final _EXIT_TRACE:
- RESERVE_RAW(2, "_EXIT_TRACE");
max_length--;
}
if (OPCODE_HAS_ERROR(opcode)) {
// Make space for error stub and final _EXIT_TRACE:
- RESERVE_RAW(2, "_ERROR_POP_N");
max_length--;
}
int bitcount = counter & 1;
int jump_likely = bitcount;
uint32_t uopcode = BRANCH_TO_GUARD[opcode - POP_JUMP_IF_FALSE][jump_likely];
- ADD_TO_TRACE(uopcode, 0, 0, INSTR_IP(target_instr, old_code));
+ _Py_CODEUNIT *next_instr = target_instr + 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]];
+ _Py_CODEUNIT *false_target = next_instr + oparg;
+ ADD_TO_TRACE(uopcode, 0, 0, INSTR_IP(false_target, old_code));
break;
}
case JUMP_BACKWARD_JIT:
if (uop == _TIER2_RESUME_CHECK) {
target = next_inst;
}
-#ifdef Py_DEBUG
- else {
- uint32_t jump_target = next_inst + oparg;
- assert(_Py_GetBaseCodeUnit(old_code, jump_target).op.code == END_FOR);
- assert(_Py_GetBaseCodeUnit(old_code, jump_target+1).op.code == POP_ITER);
- }
-#endif
break;
case OPERAND1_1:
assert(trace[trace_length-1].opcode == uop);
if (!is_terminator(&tstate->interp->jit_tracer_code_buffer[trace_length-1])) {
// Undo the last few instructions.
trace_length = tstate->interp->jit_tracer_code_curr_size;
+ // We previously reversed one.
+ max_length += 1;
ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target);
}
tstate->interp->jit_tracer_code_curr_size = trace_length;
char *env_var = Py_GETENV("PYTHON_UOPS_OPTIMIZE");
bool is_noopt = true;
if (env_var == NULL || *env_var == '\0' || *env_var > '0') {
- is_noopt = true;
+ is_noopt = false;
}
int curr_stackentries = tstate->interp->jit_tracer_initial_stack_depth;
int length = interp->jit_tracer_code_curr_size;
+ // Trace too short, don't bother.
+ if (length <= 8) {
+ return 0;
+ }
assert(length > 0);
assert(length < UOP_MAX_TRACE_LENGTH);
OPT_STAT_INC(traces_created);