self.assertEqual(res, TIER2_THRESHOLD)
self.assertIsNotNone(ex)
uops = get_opnames(ex)
- self.assertIn("_LOAD_CONST_INLINE_BORROW", uops)
+ self.assertGreaterEqual(count_ops(ex, "_LOAD_CONST_INLINE_BORROW"), 2)
self.assertNotIn("_BINARY_OP_SUBSCR_DICT", uops)
def test_binary_subscr_frozendict_const_fold(self):
self.assertEqual(res, TIER2_THRESHOLD)
self.assertIsNotNone(ex)
uops = get_opnames(ex)
+ self.assertGreaterEqual(count_ops(ex, "_LOAD_CONST_INLINE_BORROW"), 3)
# lookup result is folded to constant 1, so comparison is optimized away
self.assertNotIn("_COMPARE_OP_INT", uops)
self.assertEqual(res, TIER2_THRESHOLD)
self.assertIsNotNone(ex)
uops = get_opnames(ex)
+ self.assertGreaterEqual(count_ops(ex, "_LOAD_CONST_INLINE_BORROW"), 3)
self.assertNotIn("_CONTAINS_OP_SET", uops)
+ def test_contains_op_frozendict_const_fold(self):
+ def testfunc(n):
+ x = 0
+ for _ in range(n):
+ if 'x' in FROZEN_DICT_CONST:
+ x += 1
+ return x
+
+ res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD)
+ self.assertEqual(res, TIER2_THRESHOLD)
+ self.assertIsNotNone(ex)
+ uops = get_opnames(ex)
+ self.assertGreaterEqual(count_ops(ex, "_LOAD_CONST_INLINE_BORROW"), 3)
+ self.assertNotIn("_CONTAINS_OP_DICT", uops)
+
+ def test_not_contains_op_frozendict_const_fold(self):
+ def testfunc(n):
+ x = 0
+ for _ in range(n):
+ if 'z' not in FROZEN_DICT_CONST:
+ x += 1
+ return x
+
+ res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD)
+ self.assertEqual(res, TIER2_THRESHOLD)
+ self.assertIsNotNone(ex)
+ uops = get_opnames(ex)
+ self.assertGreaterEqual(count_ops(ex, "_LOAD_CONST_INLINE_BORROW"), 3)
+ self.assertNotIn("_CONTAINS_OP_DICT", uops)
+
def test_binary_subscr_list_slice(self):
def testfunc(n):
x = 0
b = sym_new_type(ctx, &PyBool_Type);
l = left;
r = right;
+ if (sym_is_not_container(left) &&
+ sym_matches_type(right, &PyFrozenDict_Type)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right, b);
+ }
}
op(_LOAD_CONST, (-- value)) {
b = sym_new_type(ctx, &PyBool_Type);
l = left;
r = right;
+ if (sym_is_not_container(left) &&
+ sym_matches_type(right, &PyFrozenDict_Type)) {
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef b_stackref;
+ _PyStackRef l_stackref;
+ _PyStackRef r_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyAnyDict_CheckExact(right_o));
+ STAT_INC(CONTAINS_OP, hit);
+ int res = PyDict_Contains(right_o, left_o);
+ if (res < 0) {
+ JUMP_TO_LABEL(error);
+ }
+ b_stackref = (res ^ oparg) ? PyStackRef_True : PyStackRef_False;
+ l_stackref = left;
+ r_stackref = right;
+ /* End of uop copied from bytecodes for constant evaluation */
+ (void)l_stackref;
+ (void)r_stackref;
+ b = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(b_stackref));
+ if (sym_is_const(ctx, b)) {
+ PyObject *result = sym_get_const(ctx, b);
+ if (_Py_IsImmortal(result)) {
+ // Replace with _LOAD_CONST_INLINE_BORROW + _SWAP + _SWAP since we have two inputs and an immortal result
+ ADD_OP(_LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)result);
+ ADD_OP(_SWAP, 3, 0);
+ ADD_OP(_SWAP, 2, 0);
+ }
+ }
+ CHECK_STACK_BOUNDS(1);
+ stack_pointer[-2] = b;
+ stack_pointer[-1] = l;
+ stack_pointer[0] = r;
+ stack_pointer += 1;
+ ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
+ break;
+ }
+ }
CHECK_STACK_BOUNDS(1);
stack_pointer[-2] = b;
stack_pointer[-1] = l;