* in the table.
*
* Note: There is an implicit assumption in the driver that compression and
- * coh_1way+ are mutually exclusive. If this is ever not true then userptr
- * and imported dma-buf from external device will have uncleared ccs state. See
- * also xe_bo_needs_ccs_pages().
+ * coh_1way+ are mutually exclusive for platforms prior to Xe3. Starting
+ * with Xe3, compression can be combined with coherency. If using compression
+ * with coherency, userptr and imported dma-buf from external device will
+ * have uncleared ccs state. See also xe_bo_needs_ccs_pages().
*/
#define XE2_PAT(no_promote, comp_en, l3clos, l3_policy, l4_policy, __coh_mode) \
{ \
REG_FIELD_PREP(XE2_L3_POLICY, l3_policy) | \
REG_FIELD_PREP(XE2_L4_POLICY, l4_policy) | \
REG_FIELD_PREP(XE2_COH_MODE, __coh_mode), \
- .coh_mode = (BUILD_BUG_ON_ZERO(__coh_mode && comp_en) || __coh_mode) ? \
- XE_COH_AT_LEAST_1WAY : XE_COH_NONE, \
+ .coh_mode = __coh_mode ? XE_COH_AT_LEAST_1WAY : XE_COH_NONE, \
.valid = 1 \
}
[31] = XE2_PAT( 0, 0, 3, 0, 3, 3 ),
};
+static const struct xe_pat_table_entry xe3_lpg_pat_table[] = {
+ [ 0] = XE2_PAT( 0, 0, 0, 0, 3, 0 ),
+ [ 1] = XE2_PAT( 0, 0, 0, 0, 3, 2 ),
+ [ 2] = XE2_PAT( 0, 0, 0, 0, 3, 3 ),
+ [ 3] = XE2_PAT( 0, 0, 0, 3, 3, 0 ),
+ [ 4] = XE2_PAT( 0, 0, 0, 3, 0, 2 ),
+ [ 5] = XE2_PAT( 0, 0, 0, 3, 3, 2 ),
+ [ 6] = XE2_PAT( 1, 0, 0, 1, 3, 0 ),
+ [ 7] = XE2_PAT( 0, 0, 0, 3, 0, 3 ),
+ [ 8] = XE2_PAT( 0, 0, 0, 3, 0, 0 ),
+ [ 9] = XE2_PAT( 0, 1, 0, 0, 3, 0 ),
+ [10] = XE2_PAT( 0, 1, 0, 3, 0, 0 ),
+ [11] = XE2_PAT( 1, 1, 0, 1, 3, 0 ),
+ [12] = XE2_PAT( 0, 1, 0, 3, 3, 0 ),
+ [13] = XE2_PAT( 0, 0, 0, 0, 0, 0 ),
+ [14] = XE2_PAT( 0, 1, 0, 0, 0, 0 ),
+ [15] = XE2_PAT( 1, 1, 0, 1, 1, 0 ),
+ [16] = XE2_PAT( 0, 1, 0, 0, 3, 2 ),
+ /* 17..19 are reserved; leave set to all 0's */
+ [20] = XE2_PAT( 0, 0, 1, 0, 3, 0 ),
+ [21] = XE2_PAT( 0, 1, 1, 0, 3, 0 ),
+ [22] = XE2_PAT( 0, 0, 1, 0, 3, 2 ),
+ [23] = XE2_PAT( 0, 0, 1, 0, 3, 3 ),
+ [24] = XE2_PAT( 0, 0, 2, 0, 3, 0 ),
+ [25] = XE2_PAT( 0, 1, 2, 0, 3, 0 ),
+ [26] = XE2_PAT( 0, 0, 2, 0, 3, 2 ),
+ [27] = XE2_PAT( 0, 0, 2, 0, 3, 3 ),
+ [28] = XE2_PAT( 0, 0, 3, 0, 3, 0 ),
+ [29] = XE2_PAT( 0, 1, 3, 0, 3, 0 ),
+ [30] = XE2_PAT( 0, 0, 3, 0, 3, 2 ),
+ [31] = XE2_PAT( 0, 0, 3, 0, 3, 3 ),
+};
/* Special PAT values programmed outside the main table */
static const struct xe_pat_table_entry xe2_pat_ats = XE2_PAT( 0, 0, 0, 0, 3, 3 );
static const struct xe_pat_table_entry xe2_pat_pta = XE2_PAT( 0, 0, 0, 0, 3, 0 );
void xe_pat_init_early(struct xe_device *xe)
{
+ xe->pat.idx[XE_CACHE_WB_COMPRESSION] = XE_PAT_INVALID_IDX;
if (GRAPHICS_VERx100(xe) == 3511) {
xe->pat.ops = &xe3p_xpc_pat_ops;
xe->pat.table = xe3p_xpc_pat_table;
xe->pat.idx[XE_CACHE_WB] = 2;
} else if (GRAPHICS_VER(xe) == 30 || GRAPHICS_VER(xe) == 20) {
xe->pat.ops = &xe2_pat_ops;
- xe->pat.table = xe2_pat_table;
+ if (GRAPHICS_VER(xe) == 30) {
+ xe->pat.table = xe3_lpg_pat_table;
+ xe->pat.idx[XE_CACHE_WB_COMPRESSION] = 16;
+ } else {
+ xe->pat.table = xe2_pat_table;
+ }
xe->pat.pat_ats = &xe2_pat_ats;
if (IS_DGFX(xe))
xe->pat.pat_pta = &xe2_pat_pta;
if (GRAPHICS_VER(xe) >= 20) {
drm_printf(p, "IDX[XE_CACHE_NONE_COMPRESSION] = %d\n",
xe->pat.idx[XE_CACHE_NONE_COMPRESSION]);
+ drm_printf(p, "IDX[XE_CACHE_WB_COMPRESSION] = %d\n",
+ xe->pat.idx[XE_CACHE_WB_COMPRESSION]);
}
return 0;
DRM_XE_VM_BIND_FLAG_CPU_ADDR_MIRROR;
u16 pat_index = (*bind_ops)[i].pat_index;
u16 coh_mode;
+ bool comp_en;
if (XE_IOCTL_DBG(xe, is_cpu_addr_mirror &&
(!xe_vm_in_fault_mode(vm) ||
pat_index = array_index_nospec(pat_index, xe->pat.n_entries);
(*bind_ops)[i].pat_index = pat_index;
coh_mode = xe_pat_index_get_coh_mode(xe, pat_index);
+ comp_en = xe_pat_index_get_comp_en(xe, pat_index);
if (XE_IOCTL_DBG(xe, !coh_mode)) { /* hw reserved */
err = -EINVAL;
goto free_bind_ops;
op == DRM_XE_VM_BIND_OP_MAP_USERPTR) ||
XE_IOCTL_DBG(xe, coh_mode == XE_COH_NONE &&
op == DRM_XE_VM_BIND_OP_MAP_USERPTR) ||
+ XE_IOCTL_DBG(xe, comp_en &&
+ op == DRM_XE_VM_BIND_OP_MAP_USERPTR) ||
XE_IOCTL_DBG(xe, op == DRM_XE_VM_BIND_OP_MAP_USERPTR &&
!IS_ENABLED(CONFIG_DRM_GPUSVM)) ||
XE_IOCTL_DBG(xe, obj &&
u16 pat_index, u32 op, u32 bind_flags)
{
u16 coh_mode;
+ bool comp_en;
if (XE_IOCTL_DBG(xe, (bo->flags & XE_BO_FLAG_NO_COMPRESSION) &&
xe_pat_index_get_comp_en(xe, pat_index)))
return -EINVAL;
}
+ /*
+ * Ensures that imported buffer objects (dma-bufs) are not mapped
+ * with a PAT index that enables compression.
+ */
+ comp_en = xe_pat_index_get_comp_en(xe, pat_index);
+ if (XE_IOCTL_DBG(xe, bo->ttm.base.import_attach && comp_en))
+ return -EINVAL;
+
/* If a BO is protected it can only be mapped if the key is still valid */
if ((bind_flags & DRM_XE_VM_BIND_FLAG_CHECK_PXP) && xe_bo_is_protected(bo) &&
op != DRM_XE_VM_BIND_OP_UNMAP && op != DRM_XE_VM_BIND_OP_UNMAP_ALL)