else
*up = u0 % u1;
break;
- case MM_pow: *up = lj_carith_powi64(u0, u1, (id == CTID_UINT64)); break;
+ case MM_pow:
+ if (id == CTID_INT64)
+ *up = (uint64_t)lj_carith_powi64((int64_t)u0, (int64_t)u1);
+ else
+ *up = lj_carith_powu64(u0, u1);
+ break;
case MM_unm: *up = (uint64_t)-(int64_t)u0; break;
default: lua_assert(0); break;
}
/* -- 64 bit integer arithmetic helpers ----------------------------------- */
-/* 64 bit integer x^k. */
-uint64_t lj_carith_powi64(uint64_t x, uint64_t k, int isunsigned)
+/* Unsigned 64 bit x^k. */
+uint64_t lj_carith_powu64(uint64_t x, uint64_t k)
{
- uint64_t y = 0;
+ uint64_t y;
if (k == 0)
return 1;
- if (!isunsigned) {
- if ((int64_t)k < 0) {
- if (x == 0)
- return U64x(7fffffff,ffffffff);
- else if (x == 1)
- return 1;
- else if ((int64_t)x == -1)
- return (k & 1) ? -1 : 1;
- else
- return 0;
- }
- }
for (; (k & 1) == 0; k >>= 1) x *= x;
y = x;
if ((k >>= 1) != 0) {
return y;
}
+/* Signed 64 bit x^k. */
+int64_t lj_carith_powi64(int64_t x, int64_t k)
+{
+ if (k == 0)
+ return 1;
+ if (k < 0) {
+ if (x == 0)
+ return U64x(7fffffff,ffffffff);
+ else if (x == 1)
+ return 1;
+ else if (x == -1)
+ return (k & 1) ? -1 : 1;
+ else
+ return 0;
+ }
+ return (int64_t)lj_carith_powu64((uint64_t)x, (uint64_t)k);
+}
+
#endif
LJ_FUNC int lj_carith_op(lua_State *L, MMS mm);
-LJ_FUNC uint64_t lj_carith_powi64(uint64_t x, uint64_t k, int isunsigned);
+LJ_FUNC uint64_t lj_carith_powu64(uint64_t x, uint64_t k);
+LJ_FUNC int64_t lj_carith_powi64(int64_t x, int64_t k);
#endif
J->postproc = LJ_POST_FIXGUARD;
return TREF_TRUE;
} else if (mm == MM_pow) {
- tr = lj_ir_call(J, IRCALL_lj_carith_powi64, sp[0], sp[1],
- lj_ir_kint(J, (int)dt-(int)IRT_I64));
+ tr = lj_ir_call(J, dt == IRT_I64 ? IRCALL_lj_carith_powi64 :
+ IRCALL_lj_carith_powu64, sp[0], sp[1]);
} else {
if (mm == MM_div || mm == MM_mod)
return 0; /* NYI: integer div, mod. */
#define CCI_CASTU64 0x0200 /* Cast u64 result to number. */
#define CCI_NOFPRCLOBBER 0x0400 /* Does not clobber any FPRs. */
#define CCI_FASTCALL 0x0800 /* Fastcall convention. */
+#define CCI_STACK64 0x1000 /* Needs 64 bits per argument. */
/* Function definitions for CALL* instructions. */
#if LJ_HASFFI
#define IRCALLDEF_FFI(_) \
- _(lj_carith_powi64, 3, N, U64, CCI_NOFPRCLOBBER)
+ _(lj_carith_powi64, 2, N, I64, CCI_STACK64|CCI_NOFPRCLOBBER) \
+ _(lj_carith_powu64, 2, N, U64, CCI_STACK64|CCI_NOFPRCLOBBER)
#else
#define IRCALLDEF_FFI(_)
#endif