]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
arm: add basic mitigation for Cortex-A AES errata
authorRichard Earnshaw <rearnsha@arm.com>
Thu, 21 Oct 2021 16:29:41 +0000 (17:29 +0100)
committerRichard Earnshaw <rearnsha@arm.com>
Thu, 20 Jan 2022 11:14:44 +0000 (11:14 +0000)
This patch adds the basic patterns for mitigation of the erratum, but no
attempt is made at this point to optimize the results for the cases where
the erratum mitigation is not needed.

The mitigation is done by guaranteeing that the input operands are fed
from a full-width operation by using an identity operation on the input
values.

gcc/ChangeLog:

* config/arm/crypto.md (crypto_<CRYPTO_AES:crypto_pattern>): Convert
to define_expand.  Add mitigation for the Cortex-A AES erratum
when enabled.
(*crypto_<CRYPTO_AES:crypto_pattern>_insn): New pattern, based
on original crypto_<CRYPTO_AES:crypto_pattern> insn.
(aes_op_protect): New pattern.
* config/arm/unspecs.md (unspec): Add UNSPEC_AES_PROTECT.

gcc/config/arm/crypto.md
gcc/config/arm/unspecs.md

index 020dfba7dcfda838e40dd67e6fb6f6120a98c1ff..fbee1829ce8e2cc836f845e0cd18fb1055eb7da0 100644 (file)
   [(set_attr "type" "<crypto_type>")]
 )
 
-(define_insn "crypto_<CRYPTO_AES:crypto_pattern>"
+(define_expand "crypto_<CRYPTO_AES:crypto_pattern>"
+  [(set (match_operand:<crypto_mode> 0 "register_operand" "=w")
+       (unspec:<crypto_mode>
+               [(xor:<crypto_mode>
+                    (match_operand:<crypto_mode> 1 "register_operand" "%0")
+                    (match_operand:<crypto_mode> 2 "register_operand" "w"))]
+       CRYPTO_AES))]
+  "TARGET_CRYPTO"
+{
+  if (fix_aes_erratum_1742098)
+    {
+      rtx op1_protect = gen_reg_rtx (V16QImode);
+      emit_insn (gen_aes_op_protect (op1_protect, operands[1]));
+      operands[1] = op1_protect;
+      rtx op2_protect = gen_reg_rtx (V16QImode);
+      emit_insn (gen_aes_op_protect (op2_protect, operands[2]));
+      operands[2] = op2_protect;
+    }
+  /* Fall through to default expansion.  */
+})
+
+(define_insn "*crypto_<CRYPTO_AES:crypto_pattern>_insn"
   [(set (match_operand:<crypto_mode> 0 "register_operand" "=w")
        (unspec:<crypto_mode>
         [(xor:<crypto_mode>
   [(set_attr "type" "<crypto_type>")]
 )
 
+; Mitigate against AES erratum on Cortex-A57 and Cortex-A72 by performing
+; a 128-bit operation on an operand producer.  This can be eliminated only
+; if we know that the operand was produced by a full-width operation.
+; V16QImode matches <crypto_mode> for the AES instructions.
+(define_insn "aes_op_protect"
+  [(set (match_operand:V16QI 0 "register_operand" "=w")
+       (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "0")]
+        UNSPEC_AES_PROTECT))]
+  "TARGET_CRYPTO && fix_aes_erratum_1742098"
+  "vmov\\t%q0, %q1"
+  [(set_attr "type" "neon_move_q")]
+)
+
 ;; When AESE/AESMC fusion is enabled we really want to keep the two together
 ;; and enforce the register dependency without scheduling or register
 ;; allocation messing up the order or introducing moves inbetween.
index 2782af08834805b8b5070eb557a4018a29dc7609..7748e78437943ca0cd0d8909330ea8d3b4948ae3 100644 (file)
   UNSPEC_AESE
   UNSPEC_AESIMC
   UNSPEC_AESMC
+  UNSPEC_AES_PROTECT
   UNSPEC_SHA1C
   UNSPEC_SHA1M
   UNSPEC_SHA1P