lj_opt_dce.o: lj_opt_dce.c lj_obj.h lua.h luaconf.h lj_def.h lj_arch.h \
lj_ir.h lj_jit.h lj_iropt.h
lj_opt_fold.o: lj_opt_fold.c lj_obj.h lua.h luaconf.h lj_def.h lj_arch.h \
- lj_str.h lj_ir.h lj_jit.h lj_iropt.h lj_trace.h lj_dispatch.h lj_bc.h \
- lj_traceerr.h lj_vm.h lj_folddef.h
+ lj_str.h lj_tab.h lj_ir.h lj_jit.h lj_iropt.h lj_trace.h lj_dispatch.h \
+ lj_bc.h lj_traceerr.h lj_vm.h lj_folddef.h
lj_opt_loop.o: lj_opt_loop.c lj_obj.h lua.h luaconf.h lj_def.h lj_arch.h \
lj_err.h lj_errmsg.h lj_str.h lj_ir.h lj_jit.h lj_iropt.h lj_trace.h \
lj_dispatch.h lj_bc.h lj_traceerr.h lj_snap.h lj_vm.h
LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_uload(jit_State *J);
LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_fload(jit_State *J);
LJ_FUNC TRef LJ_FASTCALL lj_opt_fwd_tab_len(jit_State *J);
+LJ_FUNC int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J);
LJ_FUNC int lj_opt_fwd_wasnonnil(jit_State *J, IROpT loadop, IRRef xref);
/* Dead-store elimination. */
#if LJ_HASJIT
#include "lj_str.h"
+#include "lj_tab.h"
#include "lj_ir.h"
#include "lj_jit.h"
#include "lj_iropt.h"
LJFOLD(ALOAD any)
LJFOLDX(lj_opt_fwd_aload)
+/* From HREF fwd (see below). Must eliminate, not supported by fwd/backend. */
+LJFOLD(HLOAD KPTR)
+LJFOLDF(kfold_hload_kptr)
+{
+ UNUSED(J);
+ lua_assert(ir_kptr(fleft) == niltvg(J2G(J)));
+ return TREF_NIL;
+}
+
LJFOLD(HLOAD any)
LJFOLDX(lj_opt_fwd_hload)
return EMITFOLD;
}
+LJFOLD(HREF TNEW any)
+LJFOLDF(fwd_href_tnew)
+{
+ if (lj_opt_fwd_href_nokey(J))
+ return lj_ir_kptr(J, niltvg(J2G(J)));
+ return NEXTFOLD;
+}
+
+LJFOLD(HREF TDUP KPRI)
+LJFOLD(HREF TDUP KGC)
+LJFOLD(HREF TDUP KNUM)
+LJFOLDF(fwd_href_tdup)
+{
+ TValue keyv;
+ lj_ir_kvalue(J->L, &keyv, fright);
+ if (lj_tab_get(J->L, ir_ktab(IR(fleft->op1)), &keyv) == niltvg(J2G(J)) &&
+ lj_opt_fwd_href_nokey(J))
+ return lj_ir_kptr(J, niltvg(J2G(J)));
+ return NEXTFOLD;
+}
+
/* We can safely FOLD/CSE array/hash refs and field loads, since there
** are no corresponding stores. But NEWREF may invalidate all of them.
** Lacking better disambiguation for table references, these optimizations
/* Some local macros to save typing. Undef'd at the end. */
#define IR(ref) (&J->cur.ir[(ref)])
#define fins (&J->fold.ins)
+#define fright (&J->fold.right)
/*
** Caveat #1: return value is not always a TRef -- only use with tref_ref().
return EMITFOLD;
}
+/* Check whether HREF of TNEW/TDUP can be folded to niltv. */
+int LJ_FASTCALL lj_opt_fwd_href_nokey(jit_State *J)
+{
+ IRRef lim = fins->op1; /* Search limit. */
+ IRRef ref;
+
+ /* The key for an ASTORE may end up in the hash part after a NEWREF. */
+ if (irt_isnum(fright->t) && J->chain[IR_NEWREF] > lim) {
+ ref = J->chain[IR_ASTORE];
+ while (ref > lim) {
+ if (ref < J->chain[IR_NEWREF])
+ return 0; /* Conflict. */
+ ref = IR(ref)->prev;
+ }
+ }
+
+ /* Search for conflicting stores. */
+ ref = J->chain[IR_HSTORE];
+ while (ref > lim) {
+ IRIns *store = IR(ref);
+ if (aa_ahref(J, fins, IR(store->op1)) != ALIAS_NO)
+ return 0; /* Conflict. */
+ ref = store->prev;
+ }
+
+ return 1; /* No conflict. Can fold to niltv. */
+}
+
/* ASTORE/HSTORE elimination. */
TRef LJ_FASTCALL lj_opt_dse_ahstore(jit_State *J)
{
#undef IR
#undef fins
+#undef fright
#endif