This is in parity with the LLVM commit
a64b3e92c7cb ("[RISCV] Re-define
sha256, Zksed, and Zksh intrinsics to use i32 types.").
SHA-256, SM3 and SM4 instructions operate on 32-bit integers and upper
32-bits have no effects on RV64 (the output is sign-extended from the
original 32-bit value). In that sense, making those intrinsics only
operate on uint32_t is much more natural than XLEN-bits wide integers.
This commit reforms instructions and expansions based on 32-bit
instruction handling on RV64 (such as ADDW).
Before:
riscv_<op>_si: For RV32, fully operate on uint32_t
riscv_<op>_di: For RV64, fully operate on uint64_t
After:
*riscv_<op>_si: For RV32, fully operate on uint32_t
riscv_<op>_di_extended:
For RV64. Input is uint32_t and output is int64_t,
sign-extended from the int32_t result
(represents a part of <op> behavior).
riscv_<op>_si: Common (fully operate on uint32_t).
On RV32, "expands" to *riscv_<op>_si.
On RV64, initially expands to riscv_<op>_di_extended *and*
extracts lower 32-bits from the int64_t result.
It also refines definitions of SHA-256, SM3 and SM4 intrinsics.
gcc/ChangeLog:
* config/riscv/crypto.md (riscv_sha256sig0_<mode>,
riscv_sha256sig1_<mode>, riscv_sha256sum0_<mode>,
riscv_sha256sum1_<mode>, riscv_sm3p0_<mode>, riscv_sm3p1_<mode>,
riscv_sm4ed_<mode>, riscv_sm4ks_<mode>): Remove and replace with
new insn/expansions.
(SHA256_OP, SM3_OP, SM4_OP): New iterators.
(sha256_op, sm3_op, sm4_op): New attributes for iteration.
(*riscv_<sha256_op>_si): New raw instruction for RV32.
(*riscv_<sm3_op>_si): Ditto.
(*riscv_<sm4_op>_si): Ditto.
(riscv_<sha256_op>_di_extended): New base instruction for RV64.
(riscv_<sm3_op>_di_extended): Ditto.
(riscv_<sm4_op>_di_extended): Ditto.
(riscv_<sha256_op>_si): New common instruction expansion.
(riscv_<sm3_op>_si): Ditto.
(riscv_<sm4_op>_si): Ditto.
* config/riscv/riscv-builtins.cc: Add availability "crypto_zknh",
"crypto_zksh" and "crypto_zksed". Remove availability
"crypto_zksh{32,64}" and "crypto_zksed{32,64}".
* config/riscv/riscv-ftypes.def: Remove unused function type.
* config/riscv/riscv-scalar-crypto.def: Make SHA-256, SM3 and SM4
intrinsics to operate on uint32_t.
gcc/testsuite/ChangeLog:
* gcc.target/riscv/zknh-sha256.c: Moved to...
* gcc.target/riscv/zknh-sha256-64.c: ...here. Test RV64.
* gcc.target/riscv/zknh-sha256-32.c: New test for RV32.
* gcc.target/riscv/zksh64.c: Change the type.
* gcc.target/riscv/zksed64.c: Ditto.
;; ZKNH - SHA256
-(define_insn "riscv_sha256sig0_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")]
- UNSPEC_SHA_256_SIG0))]
- "TARGET_ZKNH"
- "sha256sig0\t%0,%1"
- [(set_attr "type" "crypto")])
-
-(define_insn "riscv_sha256sig1_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")]
- UNSPEC_SHA_256_SIG1))]
- "TARGET_ZKNH"
- "sha256sig1\t%0,%1"
+(define_int_iterator SHA256_OP [
+ UNSPEC_SHA_256_SIG0 UNSPEC_SHA_256_SIG1
+ UNSPEC_SHA_256_SUM0 UNSPEC_SHA_256_SUM1])
+(define_int_attr sha256_op [
+ (UNSPEC_SHA_256_SIG0 "sha256sig0") (UNSPEC_SHA_256_SIG1 "sha256sig1")
+ (UNSPEC_SHA_256_SUM0 "sha256sum0") (UNSPEC_SHA_256_SUM1 "sha256sum1")])
+
+(define_insn "*riscv_<sha256_op>_si"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")]
+ SHA256_OP))]
+ "TARGET_ZKNH && !TARGET_64BIT"
+ "<sha256_op>\t%0,%1"
[(set_attr "type" "crypto")])
-(define_insn "riscv_sha256sum0_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")]
- UNSPEC_SHA_256_SUM0))]
- "TARGET_ZKNH"
- "sha256sum0\t%0,%1"
+(define_insn "riscv_<sha256_op>_di_extended"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (sign_extend:DI
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")]
+ SHA256_OP)))]
+ "TARGET_ZKNH && TARGET_64BIT"
+ "<sha256_op>\t%0,%1"
[(set_attr "type" "crypto")])
-(define_insn "riscv_sha256sum1_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")]
- UNSPEC_SHA_256_SUM1))]
+(define_expand "riscv_<sha256_op>_si"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")]
+ SHA256_OP))]
"TARGET_ZKNH"
- "sha256sum1\t%0,%1"
+ {
+ if (TARGET_64BIT)
+ {
+ rtx t = gen_reg_rtx (DImode);
+ emit_insn (gen_riscv_<sha256_op>_di_extended (t, operands[1]));
+ t = gen_lowpart (SImode, t);
+ SUBREG_PROMOTED_VAR_P (t) = 1;
+ SUBREG_PROMOTED_SET (t, SRP_SIGNED);
+ emit_move_insn (operands[0], t);
+ DONE;
+ }
+ }
[(set_attr "type" "crypto")])
;; ZKNH - SHA512
;; ZKSH
-(define_insn "riscv_sm3p0_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")]
- UNSPEC_SM3_P0))]
- "TARGET_ZKSH"
- "sm3p0\t%0,%1"
+(define_int_iterator SM3_OP [UNSPEC_SM3_P0 UNSPEC_SM3_P1])
+(define_int_attr sm3_op [(UNSPEC_SM3_P0 "sm3p0") (UNSPEC_SM3_P1 "sm3p1")])
+
+(define_insn "*riscv_<sm3_op>_si"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")]
+ SM3_OP))]
+ "TARGET_ZKSH && !TARGET_64BIT"
+ "<sm3_op>\t%0,%1"
[(set_attr "type" "crypto")])
-(define_insn "riscv_sm3p1_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")]
- UNSPEC_SM3_P1))]
+(define_insn "riscv_<sm3_op>_di_extended"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (sign_extend:DI
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")]
+ SM3_OP)))]
+ "TARGET_ZKSH && TARGET_64BIT"
+ "<sm3_op>\t%0,%1"
+ [(set_attr "type" "crypto")])
+
+(define_expand "riscv_<sm3_op>_si"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")]
+ SM3_OP))]
"TARGET_ZKSH"
- "sm3p1\t%0,%1"
+ {
+ if (TARGET_64BIT)
+ {
+ rtx t = gen_reg_rtx (DImode);
+ emit_insn (gen_riscv_<sm3_op>_di_extended (t, operands[1]));
+ t = gen_lowpart (SImode, t);
+ SUBREG_PROMOTED_VAR_P (t) = 1;
+ SUBREG_PROMOTED_SET (t, SRP_SIGNED);
+ emit_move_insn (operands[0], t);
+ DONE;
+ }
+ }
[(set_attr "type" "crypto")])
;; ZKSED
-(define_insn "riscv_sm4ed_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")
- (match_operand:X 2 "register_operand" "r")
- (match_operand:SI 3 "register_operand" "D03")]
- UNSPEC_SM4_ED))]
- "TARGET_ZKSED"
- "sm4ed\t%0,%1,%2,%3"
+(define_int_iterator SM4_OP [UNSPEC_SM4_ED UNSPEC_SM4_KS])
+(define_int_attr sm4_op [(UNSPEC_SM4_ED "sm4ed") (UNSPEC_SM4_KS "sm4ks")])
+
+(define_insn "*riscv_<sm4_op>_si"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r")
+ (match_operand:SI 3 "register_operand" "D03")]
+ SM4_OP))]
+ "TARGET_ZKSED && !TARGET_64BIT"
+ "<sm4_op>\t%0,%1,%2,%3"
[(set_attr "type" "crypto")])
-(define_insn "riscv_sm4ks_<mode>"
- [(set (match_operand:X 0 "register_operand" "=r")
- (unspec:X [(match_operand:X 1 "register_operand" "r")
- (match_operand:X 2 "register_operand" "r")
- (match_operand:SI 3 "register_operand" "D03")]
- UNSPEC_SM4_KS))]
+(define_insn "riscv_<sm4_op>_di_extended"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (sign_extend:DI
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r")
+ (match_operand:SI 3 "register_operand" "D03")]
+ SM4_OP)))]
+ "TARGET_ZKSED && TARGET_64BIT"
+ "<sm4_op>\t%0,%1,%2,%3"
+ [(set_attr "type" "crypto")])
+
+(define_expand "riscv_<sm4_op>_si"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (unspec:SI [(match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "register_operand" "r")
+ (match_operand:SI 3 "register_operand" "D03")]
+ SM4_OP))]
"TARGET_ZKSED"
- "sm4ks\t%0,%1,%2,%3"
+ {
+ if (TARGET_64BIT)
+ {
+ rtx t = gen_reg_rtx (DImode);
+ emit_insn (gen_riscv_<sm4_op>_di_extended (t, operands[1], operands[2], operands[3]));
+ t = gen_lowpart (SImode, t);
+ SUBREG_PROMOTED_VAR_P (t) = 1;
+ SUBREG_PROMOTED_SET (t, SRP_SIGNED);
+ emit_move_insn (operands[0], t);
+ DONE;
+ }
+ }
[(set_attr "type" "crypto")])
AVAIL (crypto_zkne32, TARGET_ZKNE && !TARGET_64BIT)
AVAIL (crypto_zkne64, TARGET_ZKNE && TARGET_64BIT)
AVAIL (crypto_zkne_or_zknd, (TARGET_ZKNE || TARGET_ZKND) && TARGET_64BIT)
+AVAIL (crypto_zknh, TARGET_ZKNH)
AVAIL (crypto_zknh32, TARGET_ZKNH && !TARGET_64BIT)
AVAIL (crypto_zknh64, TARGET_ZKNH && TARGET_64BIT)
-AVAIL (crypto_zksh32, TARGET_ZKSH && !TARGET_64BIT)
-AVAIL (crypto_zksh64, TARGET_ZKSH && TARGET_64BIT)
-AVAIL (crypto_zksed32, TARGET_ZKSED && !TARGET_64BIT)
-AVAIL (crypto_zksed64, TARGET_ZKSED && TARGET_64BIT)
+AVAIL (crypto_zksh, TARGET_ZKSH)
+AVAIL (crypto_zksed, TARGET_ZKSED)
AVAIL (clmul_zbkc32_or_zbc32, (TARGET_ZBKC || TARGET_ZBC) && !TARGET_64BIT)
AVAIL (clmul_zbkc64_or_zbc64, (TARGET_ZBKC || TARGET_ZBC) && TARGET_64BIT)
AVAIL (clmulr_zbc32, TARGET_ZBC && !TARGET_64BIT)
DEF_RISCV_FTYPE (2, (UDI, UDI, USI))
DEF_RISCV_FTYPE (2, (UDI, UDI, UDI))
DEF_RISCV_FTYPE (3, (USI, USI, USI, USI))
-DEF_RISCV_FTYPE (3, (UDI, UDI, UDI, USI))
DIRECT_BUILTIN (aes64esm, RISCV_UDI_FTYPE_UDI_UDI, crypto_zkne64),
// ZKNH
-RISCV_BUILTIN (sha256sig0_si, "sha256sig0", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh32),
-RISCV_BUILTIN (sha256sig0_di, "sha256sig0", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI, crypto_zknh64),
-RISCV_BUILTIN (sha256sig1_si, "sha256sig1", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh32),
-RISCV_BUILTIN (sha256sig1_di, "sha256sig1", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI, crypto_zknh64),
-RISCV_BUILTIN (sha256sum0_si, "sha256sum0", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh32),
-RISCV_BUILTIN (sha256sum0_di, "sha256sum0", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI, crypto_zknh64),
-RISCV_BUILTIN (sha256sum1_si, "sha256sum1", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh32),
-RISCV_BUILTIN (sha256sum1_di, "sha256sum1", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI, crypto_zknh64),
+RISCV_BUILTIN (sha256sig0_si, "sha256sig0", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh),
+RISCV_BUILTIN (sha256sig1_si, "sha256sig1", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh),
+RISCV_BUILTIN (sha256sum0_si, "sha256sum0", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh),
+RISCV_BUILTIN (sha256sum1_si, "sha256sum1", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zknh),
DIRECT_BUILTIN (sha512sig0h, RISCV_USI_FTYPE_USI_USI, crypto_zknh32),
DIRECT_BUILTIN (sha512sig0l, RISCV_USI_FTYPE_USI_USI, crypto_zknh32),
DIRECT_BUILTIN (sha512sum1, RISCV_UDI_FTYPE_UDI, crypto_zknh64),
// ZKSH
-RISCV_BUILTIN (sm3p0_si, "sm3p0", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zksh32),
-RISCV_BUILTIN (sm3p0_di, "sm3p0", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI, crypto_zksh64),
-RISCV_BUILTIN (sm3p1_si, "sm3p1", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zksh32),
-RISCV_BUILTIN (sm3p1_di, "sm3p1", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI, crypto_zksh64),
+RISCV_BUILTIN (sm3p0_si, "sm3p0", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zksh),
+RISCV_BUILTIN (sm3p1_si, "sm3p1", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI, crypto_zksh),
// ZKSED
-RISCV_BUILTIN (sm4ed_si, "sm4ed", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI_USI_USI, crypto_zksed32),
-RISCV_BUILTIN (sm4ed_di, "sm4ed", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI_UDI_USI, crypto_zksed64),
-RISCV_BUILTIN (sm4ks_si, "sm4ks", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI_USI_USI, crypto_zksed32),
-RISCV_BUILTIN (sm4ks_di, "sm4ks", RISCV_BUILTIN_DIRECT, RISCV_UDI_FTYPE_UDI_UDI_USI, crypto_zksed64),
+RISCV_BUILTIN (sm4ed_si, "sm4ed", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI_USI_USI, crypto_zksed),
+RISCV_BUILTIN (sm4ks_si, "sm4ks", RISCV_BUILTIN_DIRECT, RISCV_USI_FTYPE_USI_USI_USI, crypto_zksed),
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O2 -march=rv32gc_zknh -mabi=ilp32d" } */
+/* { dg-skip-if "" { *-*-* } { "-g" "-flto"} } */
+
+#include "zknh-sha256-64.c"
+
+/* { dg-final { scan-assembler-times "sha256sig0" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sig1" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sum0" 1 } } */
+/* { dg-final { scan-assembler-times "sha256sum1" 1 } } */
/* { dg-options "-O2 -march=rv64gc_zknh -mabi=lp64" } */
/* { dg-skip-if "" { *-*-* } { "-g" "-flto"} } */
-unsigned long foo1(unsigned long rs1)
+unsigned int foo1(unsigned int rs1)
{
return __builtin_riscv_sha256sig0(rs1);
}
-unsigned long foo2(unsigned long rs1)
+unsigned int foo2(unsigned int rs1)
{
return __builtin_riscv_sha256sig1(rs1);
}
-unsigned long foo3(unsigned long rs1)
+unsigned int foo3(unsigned int rs1)
{
return __builtin_riscv_sha256sum0(rs1);
}
-unsigned long foo4(unsigned long rs1)
+unsigned int foo4(unsigned int rs1)
{
return __builtin_riscv_sha256sum1(rs1);
}
#include <stdint-gcc.h>
-uint64_t foo1(uint64_t rs1, uint64_t rs2, unsigned bs)
+uint32_t foo1(uint32_t rs1, uint32_t rs2, unsigned bs)
{
return __builtin_riscv_sm4ks(rs1,rs2,bs);
}
-uint64_t foo2(uint64_t rs1, uint64_t rs2, unsigned bs)
+uint32_t foo2(uint32_t rs1, uint32_t rs2, unsigned bs)
{
return __builtin_riscv_sm4ed(rs1,rs2,bs);
}
#include <stdint-gcc.h>
-uint64_t foo1(uint64_t rs1)
+uint32_t foo1(uint32_t rs1)
{
return __builtin_riscv_sm3p0(rs1);
}
-uint64_t foo2(uint64_t rs1)
+uint32_t foo2(uint32_t rs1)
{
return __builtin_riscv_sm3p1(rs1);
}