Rlcv is required to be loaded for frontdoor.
1. Add 2 rlcv ucode ids:
AMDGPU_UCODE_RLC_IRAM_1 and AMDGPU_UCODE_RLC_DRAM_1
2. Add rlc_firmware_header_v2_5 for above 2 rlcv headers.
3. Add 2 types in psp_fw_gfx_if interface interacting with asp:
GFX_FW_TYPE_RLX6_UCODE_CORE1 - RLCV IRAM
GFX_FW_TYPE_RLX6_DRAM_BOOT_CORE1 - RLCV DRAM BOOT
Signed-off-by: Feifei Xu <Feifei.Xu@amd.com>
Reviewed-by: Hawking Zhang <Hawking.Zhang@amd.com>
Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
case AMDGPU_UCODE_ID_RLC_DRAM:
*type = GFX_FW_TYPE_RLC_DRAM_BOOT;
break;
+ case AMDGPU_UCODE_ID_RLC_IRAM_1:
+ *type = GFX_FW_TYPE_RLX6_UCODE_CORE1;
+ break;
+ case AMDGPU_UCODE_ID_RLC_DRAM_1:
+ *type = GFX_FW_TYPE_RLX6_DRAM_BOOT_CORE1;
+ break;
case AMDGPU_UCODE_ID_GLOBAL_TAP_DELAYS:
*type = GFX_FW_TYPE_GLOBAL_TAP_DELAYS;
break;
amdgpu_ucode_print_gfx_hdr(hdr);
break;
case AMDGPU_UCODE_ID_RLC_G:
+ case AMDGPU_UCODE_ID_RLC_DRAM_1:
+ case AMDGPU_UCODE_ID_RLC_IRAM_1:
hdr = (struct common_firmware_header *)adev->gfx.rlc_fw->data;
amdgpu_ucode_print_rlc_hdr(hdr);
break;
}
}
+static void amdgpu_gfx_rlc_init_microcode_v2_5(struct amdgpu_device *adev)
+{
+ const struct rlc_firmware_header_v2_5 *rlc_hdr;
+ struct amdgpu_firmware_info *info;
+
+ rlc_hdr = (const struct rlc_firmware_header_v2_5 *)adev->gfx.rlc_fw->data;
+ adev->gfx.rlc.rlc_1_iram_ucode_size_bytes =
+ le32_to_cpu(rlc_hdr->rlc_1_iram_ucode_size_bytes);
+ adev->gfx.rlc.rlc_1_iram_ucode = (u8 *)rlc_hdr +
+ le32_to_cpu(rlc_hdr->rlc_1_iram_ucode_offset_bytes);
+ adev->gfx.rlc.rlc_1_dram_ucode_size_bytes =
+ le32_to_cpu(rlc_hdr->rlc_1_dram_ucode_size_bytes);
+ adev->gfx.rlc.rlc_1_dram_ucode = (u8 *)rlc_hdr +
+ le32_to_cpu(rlc_hdr->rlc_1_dram_ucode_offset_bytes);
+
+ if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) {
+ if (adev->gfx.rlc.rlc_1_iram_ucode_size_bytes) {
+ info = &adev->firmware.ucode[AMDGPU_UCODE_ID_RLC_IRAM_1];
+ info->ucode_id = AMDGPU_UCODE_ID_RLC_IRAM_1;
+ info->fw = adev->gfx.rlc_fw;
+ adev->firmware.fw_size +=
+ ALIGN(adev->gfx.rlc.rlc_1_iram_ucode_size_bytes, PAGE_SIZE);
+ }
+
+ if (adev->gfx.rlc.rlc_1_dram_ucode_size_bytes) {
+ info = &adev->firmware.ucode[AMDGPU_UCODE_ID_RLC_DRAM_1];
+ info->ucode_id = AMDGPU_UCODE_ID_RLC_DRAM_1;
+ info->fw = adev->gfx.rlc_fw;
+ adev->firmware.fw_size +=
+ ALIGN(adev->gfx.rlc.rlc_1_dram_ucode_size_bytes, PAGE_SIZE);
+ }
+ }
+}
+
int amdgpu_gfx_rlc_init_microcode(struct amdgpu_device *adev,
uint16_t version_major,
uint16_t version_minor)
amdgpu_gfx_rlc_init_microcode_v2_3(adev);
if (version_minor == 4)
amdgpu_gfx_rlc_init_microcode_v2_4(adev);
-
+ if (version_minor == 5)
+ amdgpu_gfx_rlc_init_microcode_v2_5(adev);
return 0;
}
u32 save_restore_list_srm_size_bytes;
u32 rlc_iram_ucode_size_bytes;
u32 rlc_dram_ucode_size_bytes;
+ u32 rlc_1_iram_ucode_size_bytes;
+ u32 rlc_1_dram_ucode_size_bytes;
u32 rlcp_ucode_size_bytes;
u32 rlcv_ucode_size_bytes;
u32 global_tap_delays_ucode_size_bytes;
u8 *save_restore_list_srm;
u8 *rlc_iram_ucode;
u8 *rlc_dram_ucode;
+ u8 *rlc_1_iram_ucode;
+ u8 *rlc_1_dram_ucode;
u8 *rlcp_ucode;
u8 *rlcv_ucode;
u8 *global_tap_delays_ucode;
container_of(rlc_hdr_v2_2, struct rlc_firmware_header_v2_3, v2_2);
const struct rlc_firmware_header_v2_4 *rlc_hdr_v2_4 =
container_of(rlc_hdr_v2_3, struct rlc_firmware_header_v2_4, v2_3);
+ const struct rlc_firmware_header_v2_5 *rlc_hdr_v2_5 =
+ container_of(rlc_hdr_v2_2, struct rlc_firmware_header_v2_5, v2_2);
switch (version_minor) {
case 0:
DRM_DEBUG("se3_tap_delays_ucode_offset_bytes: %u\n",
le32_to_cpu(rlc_hdr_v2_4->se3_tap_delays_ucode_offset_bytes));
break;
+ case 5:
+ /* rlc_hdr v2_5 */
+ DRM_INFO("rlc_iram_ucode_size_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->v2_2.rlc_iram_ucode_size_bytes));
+ DRM_INFO("rlc_iram_ucode_offset_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->v2_2.rlc_iram_ucode_offset_bytes));
+ DRM_INFO("rlc_dram_ucode_size_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->v2_2.rlc_dram_ucode_size_bytes));
+ DRM_INFO("rlc_dram_ucode_offset_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->v2_2.rlc_dram_ucode_offset_bytes));
+ /* rlc_hdr v2_5 */
+ DRM_INFO("rlc_1_iram_ucode_size_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->rlc_1_iram_ucode_size_bytes));
+ DRM_INFO("rlc_1_iram_ucode_offset_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->rlc_1_iram_ucode_offset_bytes));
+ DRM_INFO("rlc_1_dram_ucode_size_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->rlc_1_dram_ucode_size_bytes));
+ DRM_INFO("rlc_1_dram_ucode_offset_bytes: %u\n",
+ le32_to_cpu(rlc_hdr_v2_5->rlc_1_dram_ucode_offset_bytes));
+ break;
default:
DRM_ERROR("Unknown RLC v2 ucode: v2.%u\n", version_minor);
break;
return "RLC_IRAM";
case AMDGPU_UCODE_ID_RLC_DRAM:
return "RLC_DRAM";
+ case AMDGPU_UCODE_ID_RLC_IRAM_1:
+ return "RLC_IRAM_1";
+ case AMDGPU_UCODE_ID_RLC_DRAM_1:
+ return "RLC_DRAM_1";
case AMDGPU_UCODE_ID_RLC_G:
return "RLC_G";
case AMDGPU_UCODE_ID_RLC_P:
ucode->ucode_size = adev->gfx.rlc.rlc_dram_ucode_size_bytes;
ucode_addr = adev->gfx.rlc.rlc_dram_ucode;
break;
+ case AMDGPU_UCODE_ID_RLC_IRAM_1:
+ ucode->ucode_size = adev->gfx.rlc.rlc_1_iram_ucode_size_bytes;
+ ucode_addr = adev->gfx.rlc.rlc_1_iram_ucode;
+ break;
+ case AMDGPU_UCODE_ID_RLC_DRAM_1:
+ ucode->ucode_size = adev->gfx.rlc.rlc_1_dram_ucode_size_bytes;
+ ucode_addr = adev->gfx.rlc.rlc_1_dram_ucode;
+ break;
case AMDGPU_UCODE_ID_RLC_P:
ucode->ucode_size = adev->gfx.rlc.rlcp_ucode_size_bytes;
ucode_addr = adev->gfx.rlc.rlcp_ucode;
uint32_t se3_tap_delays_ucode_offset_bytes;
};
+/* version_major=2, version_minor=5 */
+struct rlc_firmware_header_v2_5 {
+ struct rlc_firmware_header_v2_2 v2_2;
+ uint32_t rlc_1_iram_ucode_size_bytes;
+ uint32_t rlc_1_iram_ucode_offset_bytes;
+ uint32_t rlc_1_dram_ucode_size_bytes;
+ uint32_t rlc_1_dram_ucode_offset_bytes;
+};
+
/* version_major=1, version_minor=0 */
struct sdma_firmware_header_v1_0 {
struct common_firmware_header header;
struct rlc_firmware_header_v2_2 rlc_v2_2;
struct rlc_firmware_header_v2_3 rlc_v2_3;
struct rlc_firmware_header_v2_4 rlc_v2_4;
+ struct rlc_firmware_header_v2_5 rlc_v2_5;
struct sdma_firmware_header_v1_0 sdma;
struct sdma_firmware_header_v1_1 sdma_v1_1;
struct sdma_firmware_header_v2_0 sdma_v2_0;
AMDGPU_UCODE_ID_RLC_RESTORE_LIST_SRM_MEM,
AMDGPU_UCODE_ID_RLC_IRAM,
AMDGPU_UCODE_ID_RLC_DRAM,
+ AMDGPU_UCODE_ID_RLC_IRAM_1,
+ AMDGPU_UCODE_ID_RLC_DRAM_1,
AMDGPU_UCODE_ID_RLC_P,
AMDGPU_UCODE_ID_RLC_V,
AMDGPU_UCODE_ID_RLC_G,
GFX_FW_TYPE_RS64_MEC_P1_STACK = 95, /* RS64 MEC stack P1 SOC21 */
GFX_FW_TYPE_RS64_MEC_P2_STACK = 96, /* RS64 MEC stack P2 SOC21 */
GFX_FW_TYPE_RS64_MEC_P3_STACK = 97, /* RS64 MEC stack P3 SOC21 */
+ GFX_FW_TYPE_RLX6_UCODE_CORE1 = 98, /* RLCV_IRAM MI */
+ GFX_FW_TYPE_RLX6_DRAM_BOOT_CORE1 = 99, /* RLCV DRAM BOOT MI */
GFX_FW_TYPE_VPEC_FW1 = 100, /* VPEC FW1 To Save VPE */
GFX_FW_TYPE_VPEC_FW2 = 101, /* VPEC FW2 To Save VPE */
GFX_FW_TYPE_VPE = 102,