This array is within CPUNegativeOffsetState, which means the
last element of the array has an offset from env with the
smallest magnitude. This can be encoded into fewer bits
when generating TCG fast path memory references.
When we changed the NB_MMU_MODES to be a global constant,
rather than a per-target value, we pessimized the code
generated for targets which use only a few mmu indexes.
By inverting the array index, we counteract that.
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
}
#ifdef CONFIG_TCG
+/*
+ * Invert the index order of the CPUTLBDescFast array so that lower
+ * mmu_idx have offsets from env with smaller magnitude.
+ */
+static inline int mmuidx_to_fast_index(int mmu_idx)
+{
+ return NB_MMU_MODES - 1 - mmu_idx;
+}
+
static inline CPUTLBDescFast *cpu_tlb_fast(CPUState *cpu, int mmu_idx)
{
- return &cpu->neg.tlb.f[mmu_idx];
+ return &cpu->neg.tlb.f[mmuidx_to_fast_index(mmu_idx)];
}
#endif
static int __attribute__((unused))
tlb_mask_table_ofs(TCGContext *s, int which)
{
- return (offsetof(CPUNegativeOffsetState, tlb.f[which]) -
+ int fi = mmuidx_to_fast_index(which);
+ return (offsetof(CPUNegativeOffsetState, tlb.f[fi]) -
sizeof(CPUNegativeOffsetState));
}