G2H_LEN_DW_TLB_INVALIDATE, 1);
}
-#define MAKE_INVAL_OP(type) ((type << XE_GUC_TLB_INVAL_TYPE_SHIFT) | \
+#define MAKE_INVAL_OP_FLUSH(type, flush_cache) ((type << XE_GUC_TLB_INVAL_TYPE_SHIFT) | \
XE_GUC_TLB_INVAL_MODE_HEAVY << XE_GUC_TLB_INVAL_MODE_SHIFT | \
- XE_GUC_TLB_INVAL_FLUSH_CACHE)
+ (flush_cache ? \
+ XE_GUC_TLB_INVAL_FLUSH_CACHE : 0))
+
+#define MAKE_INVAL_OP(type) MAKE_INVAL_OP_FLUSH(type, true)
static int send_tlb_inval_all(struct xe_tlb_inval *tlb_inval, u32 seqno)
{
ilog2(SZ_2M) + 1)));
xe_gt_assert(gt, IS_ALIGNED(start, length));
- action[len++] = MAKE_INVAL_OP(XE_GUC_TLB_INVAL_PAGE_SELECTIVE);
+ action[len++] = MAKE_INVAL_OP_FLUSH(XE_GUC_TLB_INVAL_PAGE_SELECTIVE, true);
action[len++] = asid;
action[len++] = lower_32_bits(start);
action[len++] = upper_32_bits(start);