* decoders in the BIOS would prevent a capable kernel (or
* other operating systems) from shutting down auto-generated
* regions and managing resources dynamically.
+ *
+ * Indicate that Normalized Addressing is enabled.
*/
cxld->flags |= CXL_DECODER_F_LOCK;
+ cxld->flags |= CXL_DECODER_F_NORMALIZED_ADDRESSING;
ctx->hpa_range = hpa_range;
ctx->interleave_ways = ways;
return 0;
}
-static void cxl_region_set_lock(struct cxl_region *cxlr,
- struct cxl_decoder *cxld)
+static void cxl_region_setup_flags(struct cxl_region *cxlr,
+ struct cxl_decoder *cxld)
{
- if (!test_bit(CXL_DECODER_F_LOCK, &cxld->flags))
- return;
+ if (test_bit(CXL_DECODER_F_LOCK, &cxld->flags)) {
+ set_bit(CXL_REGION_F_LOCK, &cxlr->flags);
+ clear_bit(CXL_REGION_F_NEEDS_RESET, &cxlr->flags);
+ }
- set_bit(CXL_REGION_F_LOCK, &cxlr->flags);
- clear_bit(CXL_REGION_F_NEEDS_RESET, &cxlr->flags);
+ if (test_bit(CXL_DECODER_F_NORMALIZED_ADDRESSING, &cxld->flags))
+ set_bit(CXL_REGION_F_NORMALIZED_ADDRESSING, &cxlr->flags);
}
/**
}
}
- cxl_region_set_lock(cxlr, cxld);
+ cxl_region_setup_flags(cxlr, cxld);
rc = cxl_rr_ep_add(cxl_rr, cxled);
if (rc) {
device_set_pm_not_required(dev);
dev->bus = &cxl_bus_type;
dev->type = &cxl_region_type;
- cxl_region_set_lock(cxlr, &cxlrd->cxlsd.cxld);
+ cxl_region_setup_flags(cxlr, &cxlrd->cxlsd.cxld);
return cxlr;
}
u8 eiw = 0;
int pos;
+ /*
+ * Conversion between SPA and DPA is not supported in
+ * Normalized Address mode.
+ */
+ if (test_bit(CXL_REGION_F_NORMALIZED_ADDRESSING, &cxlr->flags))
+ return ULLONG_MAX;
+
for (int i = 0; i < p->nr_targets; i++) {
if (cxlmd == cxled_to_memdev(p->targets[i])) {
cxled = p->targets[i];
struct cxl_region_params *p = &cxlr->params;
struct dentry *dentry;
+ /*
+ * Do not enable poison injection in Normalized Address mode.
+ * Conversion between SPA and DPA is required for this, but it is
+ * not supported in this mode.
+ */
+ if (test_bit(CXL_REGION_F_NORMALIZED_ADDRESSING, &cxlr->flags))
+ return 0;
+
/* Create poison attributes if all memdevs support the capabilities */
for (int i = 0; i < p->nr_targets; i++) {
struct cxl_endpoint_decoder *cxled = p->targets[i];
#define CXL_DECODER_F_TYPE3 BIT(3)
#define CXL_DECODER_F_LOCK BIT(4)
#define CXL_DECODER_F_ENABLE BIT(5)
-#define CXL_DECODER_F_MASK GENMASK(5, 0)
+#define CXL_DECODER_F_NORMALIZED_ADDRESSING BIT(6)
enum cxl_decoder_type {
CXL_DECODER_DEVMEM = 2,
*/
#define CXL_REGION_F_LOCK 2
+/*
+ * Indicate Normalized Addressing. Use it to disable SPA conversion if
+ * HPA != SPA and an address translation callback handler does not
+ * exist. Flag is needed by AMD Zen5 platforms.
+ */
+#define CXL_REGION_F_NORMALIZED_ADDRESSING 3
+
/**
* struct cxl_region - CXL region
* @dev: This region's device