IRRef loopref; /* Reference of LOOP instruction (or 0). */
BCReg topslot; /* Number of slots for stack check (unless 0). */
- MSize gcsteps; /* Accumulated number of GC steps (per section). */
+ int32_t gcsteps; /* Accumulated number of GC steps (per section). */
GCtrace *T; /* Trace to assemble. */
GCtrace *parent; /* Parent trace (or NULL). */
asm_gencall(as, ci, args);
}
+static void asm_gc_check(ASMState *as);
+
+/* Explicit GC step. */
+static void asm_gcstep(ASMState *as, IRIns *ir)
+{
+ IRIns *ira;
+ for (ira = IR(as->stopins+1); ira < ir; ira++)
+ if ((ira->o == IR_TNEW || ira->o == IR_TDUP ||
+ (LJ_HASFFI && (ira->o == IR_CNEW || ira->o == IR_CNEWI))) &&
+ ra_used(ira))
+ as->gcsteps++;
+ if (as->gcsteps)
+ asm_gc_check(as);
+ as->gcsteps = 0x80000000; /* Prevent implicit GC check further up. */
+}
+
/* -- PHI and loop handling ----------------------------------------------- */
/* Break a PHI cycle by renaming to a free register (evict if needed). */
}
}
-static void asm_gc_check(ASMState *as);
static void asm_loop_fixup(ASMState *as);
/* Middle part of a loop. */
/* Emit head of trace. */
RA_DBG_REF();
checkmclim(as);
- if (as->gcsteps) {
+ if (as->gcsteps > 0) {
as->curins = as->T->snap[0].ref;
asm_snap_prep(as); /* The GC check is a guard. */
asm_gc_check(as);
asm_gencall(as, ci, args);
tmp1 = ra_releasetmp(as, ASMREF_TMP1);
tmp2 = ra_releasetmp(as, ASMREF_TMP2);
- emit_loadi(as, tmp2, (int32_t)as->gcsteps);
+ emit_loadi(as, tmp2, as->gcsteps);
/* Jump around GC step if GC total < GC threshold. */
emit_branch(as, ARMF_CC(ARMI_B, CC_LS), l_end);
emit_nm(as, ARMI_CMP, RID_TMP, tmp2);
case IR_USE: ra_alloc1(as, ir->op1, RSET_GPR); break;
case IR_PHI: asm_phi(as, ir); break;
case IR_HIOP: asm_hiop(as, ir); break;
+ case IR_GCSTEP: asm_gcstep(as, ir); break;
/* Guarded assertions. */
case IR_EQ: case IR_NE:
asm_gencall(as, ci, args);
emit_tsi(as, MIPSI_ADDIU, ra_releasetmp(as, ASMREF_TMP1), RID_JGL, -32768);
tmp = ra_releasetmp(as, ASMREF_TMP2);
- emit_loadi(as, tmp, (int32_t)as->gcsteps);
+ emit_loadi(as, tmp, as->gcsteps);
/* Jump around GC step if GC total < GC threshold. */
emit_branch(as, MIPSI_BNE, RID_TMP, RID_ZERO, l_end);
emit_dst(as, MIPSI_SLTU, RID_TMP, RID_TMP, tmp);
ra_alloc1(as, ir->op1, irt_isfp(ir->t) ? RSET_FPR : RSET_GPR); break;
case IR_PHI: asm_phi(as, ir); break;
case IR_HIOP: asm_hiop(as, ir); break;
+ case IR_GCSTEP: asm_gcstep(as, ir); break;
/* Guarded assertions. */
case IR_EQ: case IR_NE: asm_compeq(as, ir); break;
asm_gencall(as, ci, args);
emit_tai(as, PPCI_ADDI, ra_releasetmp(as, ASMREF_TMP1), RID_JGL, -32768);
tmp = ra_releasetmp(as, ASMREF_TMP2);
- emit_loadi(as, tmp, (int32_t)as->gcsteps);
+ emit_loadi(as, tmp, as->gcsteps);
/* Jump around GC step if GC total < GC threshold. */
emit_condbranch(as, PPCI_BC|PPCF_Y, CC_LT, l_end);
emit_ab(as, PPCI_CMPLW, RID_TMP, tmp);
ra_alloc1(as, ir->op1, irt_isfp(ir->t) ? RSET_FPR : RSET_GPR); break;
case IR_PHI: asm_phi(as, ir); break;
case IR_HIOP: asm_hiop(as, ir); break;
+ case IR_GCSTEP: asm_gcstep(as, ir); break;
/* Guarded assertions. */
case IR_EQ: case IR_NE:
asm_gencall(as, ci, args);
tmp = ra_releasetmp(as, ASMREF_TMP1);
emit_loada(as, tmp, J2G(as->J));
- emit_loadi(as, ra_releasetmp(as, ASMREF_TMP2), (int32_t)as->gcsteps);
+ emit_loadi(as, ra_releasetmp(as, ASMREF_TMP2), as->gcsteps);
/* Jump around GC step if GC total < GC threshold. */
emit_sjcc(as, CC_B, l_end);
emit_opgl(as, XO_ARITH(XOg_CMP), tmp, gc.threshold);
ra_alloc1(as, ir->op1, irt_isfp(ir->t) ? RSET_FPR : RSET_GPR); break;
case IR_PHI: asm_phi(as, ir); break;
case IR_HIOP: asm_hiop(as, ir); break;
+ case IR_GCSTEP: asm_gcstep(as, ir); break;
/* Guarded assertions. */
case IR_LT: case IR_GE: case IR_LE: case IR_GT:
/* Miscellaneous ops. */ \
_(NOP, N , ___, ___) \
_(BASE, N , lit, lit) \
+ _(GCSTEP, S , ___, ___) \
_(HIOP, S , ref, ref) \
_(LOOP, S , ___, ___) \
_(USE, S , ref, ___) \