+2009-02-13 Ulrich Weigand <Ulrich.Weigand@de.ibm.com>
+
+ * config/spu/spu_internals.h (spu_sr, spu_sra, spu_srqw,
+ spu_srqwbyte, spu_srqwbytebc): Define.
+ * config/spu/spu-builtins.def (spu_sr, spu_sra, spu_srqw,
+ spu_srqwbyte, spu_srqwbytebc): New overloaded builtins.
+ * config/spu/spu.md ("shrqbybi_<mode>", "shrqbi_<mode>",
+ "shrqby_<mode>"): New insn-and-split patterns.
+ * config/spu/spu.c (expand_builtin_args): Determine and return
+ number of operands using spu_builtin_description data.
+ (spu_expand_builtin_1): Use it.
+
2009-02-13 Steve Ellcey <sje@cup.hp.com>
PR target/38056
DEF_BUILTIN (SPU_SLQWBYTEBC_7, CODE_FOR_shlqbybi_ti, "spu_slqwbytebc_7", B_INTERNAL, _A3(SPU_BTI_UV16QI, SPU_BTI_UV16QI, SPU_BTI_UINTSI))
DEF_BUILTIN (SPU_SLQWBYTEBC_8, CODE_FOR_shlqbybi_ti, "spu_slqwbytebc_8", B_INTERNAL, _A3(SPU_BTI_V4SF, SPU_BTI_V4SF, SPU_BTI_UINTSI))
DEF_BUILTIN (SPU_SLQWBYTEBC_9, CODE_FOR_shlqbybi_ti, "spu_slqwbytebc_9", B_INTERNAL, _A3(SPU_BTI_V2DF, SPU_BTI_V2DF, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SR, CODE_FOR_nothing, "spu_sr", B_OVERLOAD, _A1(SPU_BTI_VOID))
+DEF_BUILTIN (SPU_SR_0, CODE_FOR_vlshrv8hi3, "spu_sr_0", B_INTERNAL, _A3(SPU_BTI_UV8HI, SPU_BTI_UV8HI, SPU_BTI_UV8HI))
+DEF_BUILTIN (SPU_SR_1, CODE_FOR_vlshrv8hi3, "spu_sr_1", B_INTERNAL, _A3(SPU_BTI_V8HI, SPU_BTI_V8HI, SPU_BTI_UV8HI))
+DEF_BUILTIN (SPU_SR_2, CODE_FOR_vlshrv4si3, "spu_sr_2", B_INTERNAL, _A3(SPU_BTI_UV4SI, SPU_BTI_UV4SI, SPU_BTI_UV4SI))
+DEF_BUILTIN (SPU_SR_3, CODE_FOR_vlshrv4si3, "spu_sr_3", B_INTERNAL, _A3(SPU_BTI_V4SI, SPU_BTI_V4SI, SPU_BTI_UV4SI))
+DEF_BUILTIN (SPU_SR_4, CODE_FOR_vlshrv8hi3, "spu_sr_4", B_INTERNAL, _A3(SPU_BTI_UV8HI, SPU_BTI_UV8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SR_5, CODE_FOR_vlshrv8hi3, "spu_sr_5", B_INTERNAL, _A3(SPU_BTI_V8HI, SPU_BTI_V8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SR_6, CODE_FOR_vlshrv4si3, "spu_sr_6", B_INTERNAL, _A3(SPU_BTI_UV4SI, SPU_BTI_UV4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SR_7, CODE_FOR_vlshrv4si3, "spu_sr_7", B_INTERNAL, _A3(SPU_BTI_V4SI, SPU_BTI_V4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRA, CODE_FOR_nothing, "spu_sra", B_OVERLOAD, _A1(SPU_BTI_VOID))
+DEF_BUILTIN (SPU_SRA_0, CODE_FOR_vashrv8hi3, "spu_sra_0", B_INTERNAL, _A3(SPU_BTI_UV8HI, SPU_BTI_UV8HI, SPU_BTI_UV8HI))
+DEF_BUILTIN (SPU_SRA_1, CODE_FOR_vashrv8hi3, "spu_sra_1", B_INTERNAL, _A3(SPU_BTI_V8HI, SPU_BTI_V8HI, SPU_BTI_UV8HI))
+DEF_BUILTIN (SPU_SRA_2, CODE_FOR_vashrv4si3, "spu_sra_2", B_INTERNAL, _A3(SPU_BTI_UV4SI, SPU_BTI_UV4SI, SPU_BTI_UV4SI))
+DEF_BUILTIN (SPU_SRA_3, CODE_FOR_vashrv4si3, "spu_sra_3", B_INTERNAL, _A3(SPU_BTI_V4SI, SPU_BTI_V4SI, SPU_BTI_UV4SI))
+DEF_BUILTIN (SPU_SRA_4, CODE_FOR_vashrv8hi3, "spu_sra_4", B_INTERNAL, _A3(SPU_BTI_UV8HI, SPU_BTI_UV8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRA_5, CODE_FOR_vashrv8hi3, "spu_sra_5", B_INTERNAL, _A3(SPU_BTI_V8HI, SPU_BTI_V8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRA_6, CODE_FOR_vashrv4si3, "spu_sra_6", B_INTERNAL, _A3(SPU_BTI_UV4SI, SPU_BTI_UV4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRA_7, CODE_FOR_vashrv4si3, "spu_sra_7", B_INTERNAL, _A3(SPU_BTI_V4SI, SPU_BTI_V4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW, CODE_FOR_nothing, "spu_srqw", B_OVERLOAD, _A1(SPU_BTI_VOID))
+DEF_BUILTIN (SPU_SRQW_0, CODE_FOR_shrqbi_ti, "spu_srqw_0", B_INTERNAL, _A3(SPU_BTI_V2DI, SPU_BTI_V2DI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_1, CODE_FOR_shrqbi_ti, "spu_srqw_1", B_INTERNAL, _A3(SPU_BTI_UV2DI, SPU_BTI_UV2DI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_2, CODE_FOR_shrqbi_ti, "spu_srqw_2", B_INTERNAL, _A3(SPU_BTI_V4SI, SPU_BTI_V4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_3, CODE_FOR_shrqbi_ti, "spu_srqw_3", B_INTERNAL, _A3(SPU_BTI_UV4SI, SPU_BTI_UV4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_4, CODE_FOR_shrqbi_ti, "spu_srqw_4", B_INTERNAL, _A3(SPU_BTI_V8HI, SPU_BTI_V8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_5, CODE_FOR_shrqbi_ti, "spu_srqw_5", B_INTERNAL, _A3(SPU_BTI_UV8HI, SPU_BTI_UV8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_6, CODE_FOR_shrqbi_ti, "spu_srqw_6", B_INTERNAL, _A3(SPU_BTI_V16QI, SPU_BTI_V16QI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_7, CODE_FOR_shrqbi_ti, "spu_srqw_7", B_INTERNAL, _A3(SPU_BTI_UV16QI, SPU_BTI_UV16QI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_8, CODE_FOR_shrqbi_ti, "spu_srqw_8", B_INTERNAL, _A3(SPU_BTI_V4SF, SPU_BTI_V4SF, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQW_9, CODE_FOR_shrqbi_ti, "spu_srqw_9", B_INTERNAL, _A3(SPU_BTI_V2DF, SPU_BTI_V2DF, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE, CODE_FOR_nothing, "spu_srqwbyte", B_OVERLOAD, _A1(SPU_BTI_VOID))
+DEF_BUILTIN (SPU_SRQWBYTE_0, CODE_FOR_shrqby_ti, "spu_srqwbyte_0", B_INTERNAL, _A3(SPU_BTI_V2DI, SPU_BTI_V2DI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_1, CODE_FOR_shrqby_ti, "spu_srqwbyte_1", B_INTERNAL, _A3(SPU_BTI_UV2DI, SPU_BTI_UV2DI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_2, CODE_FOR_shrqby_ti, "spu_srqwbyte_2", B_INTERNAL, _A3(SPU_BTI_V4SI, SPU_BTI_V4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_3, CODE_FOR_shrqby_ti, "spu_srqwbyte_3", B_INTERNAL, _A3(SPU_BTI_UV4SI, SPU_BTI_UV4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_4, CODE_FOR_shrqby_ti, "spu_srqwbyte_4", B_INTERNAL, _A3(SPU_BTI_V8HI, SPU_BTI_V8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_5, CODE_FOR_shrqby_ti, "spu_srqwbyte_5", B_INTERNAL, _A3(SPU_BTI_UV8HI, SPU_BTI_UV8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_6, CODE_FOR_shrqby_ti, "spu_srqwbyte_6", B_INTERNAL, _A3(SPU_BTI_V16QI, SPU_BTI_V16QI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_7, CODE_FOR_shrqby_ti, "spu_srqwbyte_7", B_INTERNAL, _A3(SPU_BTI_UV16QI, SPU_BTI_UV16QI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_8, CODE_FOR_shrqby_ti, "spu_srqwbyte_8", B_INTERNAL, _A3(SPU_BTI_V4SF, SPU_BTI_V4SF, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTE_9, CODE_FOR_shrqby_ti, "spu_srqwbyte_9", B_INTERNAL, _A3(SPU_BTI_V2DF, SPU_BTI_V2DF, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC, CODE_FOR_nothing, "spu_srqwbytebc", B_OVERLOAD, _A1(SPU_BTI_VOID))
+DEF_BUILTIN (SPU_SRQWBYTEBC_0, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_0", B_INTERNAL, _A3(SPU_BTI_V2DI, SPU_BTI_V2DI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_1, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_1", B_INTERNAL, _A3(SPU_BTI_UV2DI, SPU_BTI_UV2DI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_2, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_2", B_INTERNAL, _A3(SPU_BTI_V4SI, SPU_BTI_V4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_3, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_3", B_INTERNAL, _A3(SPU_BTI_UV4SI, SPU_BTI_UV4SI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_4, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_4", B_INTERNAL, _A3(SPU_BTI_V8HI, SPU_BTI_V8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_5, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_5", B_INTERNAL, _A3(SPU_BTI_UV8HI, SPU_BTI_UV8HI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_6, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_6", B_INTERNAL, _A3(SPU_BTI_V16QI, SPU_BTI_V16QI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_7, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_7", B_INTERNAL, _A3(SPU_BTI_UV16QI, SPU_BTI_UV16QI, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_8, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_8", B_INTERNAL, _A3(SPU_BTI_V4SF, SPU_BTI_V4SF, SPU_BTI_UINTSI))
+DEF_BUILTIN (SPU_SRQWBYTEBC_9, CODE_FOR_shrqbybi_ti, "spu_srqwbytebc_9", B_INTERNAL, _A3(SPU_BTI_V2DF, SPU_BTI_V2DF, SPU_BTI_UINTSI))
DEF_BUILTIN (SPU_SPLATS, CODE_FOR_nothing, "spu_splats", B_OVERLOAD, _A1(SPU_BTI_VOID))
DEF_BUILTIN (SPU_SPLATS_0, CODE_FOR_spu_splats, "spu_splats_0", B_INTERNAL, _A2(SPU_BTI_UV16QI, SPU_BTI_UINTQI))
}
-static void
+static int
expand_builtin_args (struct spu_builtin_description *d, tree exp,
rtx target, rtx ops[])
{
if (d->parm[0] != SPU_BTI_VOID)
ops[i++] = target;
- for (a = 0; i < insn_data[icode].n_operands; i++, a++)
+ for (a = 0; d->parm[a+1] != SPU_BTI_END_OF_PARAMS; i++, a++)
{
tree arg = CALL_EXPR_ARG (exp, a);
if (arg == 0)
abort ();
ops[i] = expand_expr (arg, NULL_RTX, VOIDmode, 0);
}
+
+ /* The insn pattern may have additional operands (SCRATCH).
+ Return the number of actual non-SCRATCH operands. */
+ gcc_assert (i <= insn_data[icode].n_operands);
+ return i;
}
static rtx
enum insn_code icode = d->icode;
enum machine_mode mode, tmode;
int i, p;
+ int n_operands;
tree return_type;
/* Set up ops[] with values from arglist. */
- expand_builtin_args (d, exp, target, ops);
+ n_operands = expand_builtin_args (d, exp, target, ops);
/* Handle the target operand which must be operand 0. */
i = 0;
return 0;
/* Handle the rest of the operands. */
- for (p = 1; i < insn_data[icode].n_operands; i++, p++)
+ for (p = 1; i < n_operands; i++, p++)
{
if (insn_data[d->icode].operand[i].mode != VOIDmode)
mode = insn_data[d->icode].operand[i].mode;
ops[i] = spu_force_reg (mode, ops[i]);
}
- switch (insn_data[icode].n_operands)
+ switch (n_operands)
{
case 0:
pat = GEN_FCN (icode) (0);
emit_insn (gen_subsi3(operands[5], GEN_INT(7), operands[2]));
})
+(define_insn_and_split "shrqbybi_<mode>"
+ [(set (match_operand:DTI 0 "spu_reg_operand" "=r,r")
+ (lshiftrt:DTI (match_operand:DTI 1 "spu_reg_operand" "r,r")
+ (and:SI (match_operand:SI 2 "spu_nonmem_operand" "r,I")
+ (const_int -8))))
+ (clobber (match_scratch:SI 3 "=&r,X"))]
+ ""
+ "#"
+ "reload_completed"
+ [(set (match_dup:DTI 0)
+ (lshiftrt:DTI (match_dup:DTI 1)
+ (and:SI (neg:SI (and:SI (match_dup:SI 3) (const_int -8)))
+ (const_int -8))))]
+ {
+ if (GET_CODE (operands[2]) == CONST_INT)
+ operands[3] = GEN_INT (7 - INTVAL (operands[2]));
+ else
+ emit_insn (gen_subsi3 (operands[3], GEN_INT (7), operands[2]));
+ }
+ [(set_attr "type" "shuf")])
+
(define_insn "rotqmbybi_<mode>"
[(set (match_operand:DTI 0 "spu_reg_operand" "=r,r")
(lshiftrt:DTI (match_operand:DTI 1 "spu_reg_operand" "r,r")
rotqmbyi\t%0,%1,-%H2"
[(set_attr "type" "shuf")])
+(define_insn_and_split "shrqbi_<mode>"
+ [(set (match_operand:DTI 0 "spu_reg_operand" "=r,r")
+ (lshiftrt:DTI (match_operand:DTI 1 "spu_reg_operand" "r,r")
+ (and:SI (match_operand:SI 2 "spu_nonmem_operand" "r,I")
+ (const_int 7))))
+ (clobber (match_scratch:SI 3 "=&r,X"))]
+ ""
+ "#"
+ "reload_completed"
+ [(set (match_dup:DTI 0)
+ (lshiftrt:DTI (match_dup:DTI 1)
+ (and:SI (neg:SI (match_dup:SI 3)) (const_int 7))))]
+ {
+ if (GET_CODE (operands[2]) == CONST_INT)
+ operands[3] = GEN_INT (-INTVAL (operands[2]));
+ else
+ emit_insn (gen_subsi3 (operands[3], GEN_INT (0), operands[2]));
+ }
+ [(set_attr "type" "shuf")])
+
(define_insn "rotqmbi_<mode>"
[(set (match_operand:DTI 0 "spu_reg_operand" "=r,r")
(lshiftrt:DTI (match_operand:DTI 1 "spu_reg_operand" "r,r")
rotqmbii\t%0,%1,-%E2"
[(set_attr "type" "shuf")])
+(define_insn_and_split "shrqby_<mode>"
+ [(set (match_operand:DTI 0 "spu_reg_operand" "=r,r")
+ (lshiftrt:DTI (match_operand:DTI 1 "spu_reg_operand" "r,r")
+ (mult:SI (match_operand:SI 2 "spu_nonmem_operand" "r,I")
+ (const_int 8))))
+ (clobber (match_scratch:SI 3 "=&r,X"))]
+ ""
+ "#"
+ "reload_completed"
+ [(set (match_dup:DTI 0)
+ (lshiftrt:DTI (match_dup:DTI 1)
+ (mult:SI (neg:SI (match_dup:SI 3)) (const_int 8))))]
+ {
+ if (GET_CODE (operands[2]) == CONST_INT)
+ operands[3] = GEN_INT (-INTVAL (operands[2]));
+ else
+ emit_insn (gen_subsi3 (operands[3], GEN_INT (0), operands[2]));
+ }
+ [(set_attr "type" "shuf")])
+
(define_insn "rotqmby_<mode>"
[(set (match_operand:DTI 0 "spu_reg_operand" "=r,r")
(lshiftrt:DTI (match_operand:DTI 1 "spu_reg_operand" "r,r")
#define spu_slqw(ra,rb) __builtin_spu_slqw(ra,rb)
#define spu_slqwbyte(ra,rb) __builtin_spu_slqwbyte(ra,rb)
#define spu_slqwbytebc(ra,rb) __builtin_spu_slqwbytebc(ra,rb)
+#define spu_sr(ra,rb) __builtin_spu_sr(ra,rb)
+#define spu_sra(ra,rb) __builtin_spu_sra(ra,rb)
+#define spu_srqw(ra,rb) __builtin_spu_srqw(ra,rb)
+#define spu_srqwbyte(ra,rb) __builtin_spu_srqwbyte(ra,rb)
+#define spu_srqwbytebc(ra,rb) __builtin_spu_srqwbytebc(ra,rb)
#define spu_extract(ra,pos) __builtin_spu_extract(ra,pos)
#define spu_insert(scalar,ra,pos) __builtin_spu_insert(scalar,ra,pos)
#define spu_promote(scalar,pos) __builtin_spu_promote(scalar,pos)
+2009-02-13 Ulrich Weigand <Ulrich.Weigand@de.ibm.com>
+
+ * gcc.target/spu/intrinsics-sr.c: New test.
+
2009-02-13 Steve Ellcey <sje@cup.hp.com>
PR target/38056
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-std=c99" } */
+
+#include <spu_intrinsics.h>
+
+/* spu_sr */
+
+vector unsigned short test_sr_1 (vector unsigned short ra, vector unsigned short count)
+{
+ return spu_sr (ra, count);
+}
+
+vector signed short test_sr_2 (vector signed short ra, vector unsigned short count)
+{
+ return spu_sr (ra, count);
+}
+
+vector unsigned int test_sr_3 (vector unsigned int ra, vector unsigned int count)
+{
+ return spu_sr (ra, count);
+}
+
+vector signed int test_sr_4 (vector signed int ra, vector unsigned int count)
+{
+ return spu_sr (ra, count);
+}
+
+vector unsigned short test_sr_5 (vector unsigned short ra)
+{
+ return spu_sr (ra, 11);
+}
+
+vector signed short test_sr_6 (vector signed short ra)
+{
+ return spu_sr (ra, 11);
+}
+
+vector unsigned short test_sr_7 (vector unsigned short ra, unsigned int count)
+{
+ return spu_sr (ra, count);
+}
+
+vector signed short test_sr_8 (vector signed short ra, unsigned int count)
+{
+ return spu_sr (ra, count);
+}
+
+vector unsigned int test_sr_9 (vector unsigned int ra)
+{
+ return spu_sr (ra, 11);
+}
+
+vector signed int test_sr_10 (vector signed int ra)
+{
+ return spu_sr (ra, 11);
+}
+
+vector unsigned int test_sr_11 (vector unsigned int ra, unsigned int count)
+{
+ return spu_sr (ra, count);
+}
+
+vector signed int test_sr_12 (vector signed int ra, unsigned int count)
+{
+ return spu_sr (ra, count);
+}
+
+
+/* spu_sra */
+
+vector unsigned short test_sra_1 (vector unsigned short ra, vector unsigned short count)
+{
+ return spu_sra (ra, count);
+}
+
+vector signed short test_sra_2 (vector signed short ra, vector unsigned short count)
+{
+ return spu_sra (ra, count);
+}
+
+vector unsigned int test_sra_3 (vector unsigned int ra, vector unsigned int count)
+{
+ return spu_sra (ra, count);
+}
+
+vector signed int test_sra_4 (vector signed int ra, vector unsigned int count)
+{
+ return spu_sra (ra, count);
+}
+
+vector unsigned short test_sra_5 (vector unsigned short ra)
+{
+ return spu_sra (ra, 11);
+}
+
+vector signed short test_sra_6 (vector signed short ra)
+{
+ return spu_sra (ra, 11);
+}
+
+vector unsigned short test_sra_7 (vector unsigned short ra, unsigned int count)
+{
+ return spu_sra (ra, count);
+}
+
+vector signed short test_sra_8 (vector signed short ra, unsigned int count)
+{
+ return spu_sra (ra, count);
+}
+
+vector unsigned int test_sra_9 (vector unsigned int ra)
+{
+ return spu_sra (ra, 11);
+}
+
+vector signed int test_sra_10 (vector signed int ra)
+{
+ return spu_sra (ra, 11);
+}
+
+vector unsigned int test_sra_11 (vector unsigned int ra, unsigned int count)
+{
+ return spu_sra (ra, count);
+}
+
+vector signed int test_sra_12 (vector signed int ra, unsigned int count)
+{
+ return spu_sra (ra, count);
+}
+
+/* spu_srqw */
+
+vector unsigned char test_srqw_1 (vector unsigned char ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector signed char test_srqw_2 (vector signed char ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector unsigned short test_srqw_3 (vector unsigned short ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector signed short test_srqw_4 (vector signed short ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector unsigned int test_srqw_5 (vector unsigned int ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector signed int test_srqw_6 (vector signed int ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector unsigned long test_srqw_7 (vector unsigned long ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector signed long test_srqw_8 (vector signed long ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector unsigned long long test_srqw_9 (vector unsigned long long ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector signed long long test_srqw_10 (vector signed long long ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector float test_srqw_11 (vector float ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector double test_srqw_12 (vector double ra)
+{
+ return spu_srqw (ra, 5);
+}
+
+vector unsigned char test_srqw_13 (vector unsigned char ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector signed char test_srqw_14 (vector signed char ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector unsigned short test_srqw_15 (vector unsigned short ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector signed short test_srqw_16 (vector signed short ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector unsigned int test_srqw_17 (vector unsigned int ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector signed int test_srqw_18 (vector signed int ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector unsigned long test_srqw_19 (vector unsigned long ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector signed long test_srqw_20 (vector signed long ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector unsigned long long test_srqw_21 (vector unsigned long long ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector signed long long test_srqw_22 (vector signed long long ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector float test_srqw_23 (vector float ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+vector double test_srqw_24 (vector double ra, unsigned int count)
+{
+ return spu_srqw (ra, count);
+}
+
+/* spu_srqwbyte */
+
+vector unsigned char test_srqwbyte_1 (vector unsigned char ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector signed char test_srqwbyte_2 (vector signed char ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector unsigned short test_srqwbyte_3 (vector unsigned short ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector signed short test_srqwbyte_4 (vector signed short ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector unsigned int test_srqwbyte_5 (vector unsigned int ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector signed int test_srqwbyte_6 (vector signed int ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector unsigned long test_srqwbyte_7 (vector unsigned long ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector signed long test_srqwbyte_8 (vector signed long ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector unsigned long long test_srqwbyte_9 (vector unsigned long long ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector signed long long test_srqwbyte_10 (vector signed long long ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector float test_srqwbyte_11 (vector float ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector double test_srqwbyte_12 (vector double ra)
+{
+ return spu_srqwbyte (ra, 5);
+}
+
+vector unsigned char test_srqwbyte_13 (vector unsigned char ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector signed char test_srqwbyte_14 (vector signed char ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector unsigned short test_srqwbyte_15 (vector unsigned short ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector signed short test_srqwbyte_16 (vector signed short ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector unsigned int test_srqwbyte_17 (vector unsigned int ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector signed int test_srqwbyte_18 (vector signed int ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector unsigned long test_srqwbyte_19 (vector unsigned long ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector signed long test_srqwbyte_20 (vector signed long ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector unsigned long long test_srqwbyte_21 (vector unsigned long long ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector signed long long test_srqwbyte_22 (vector signed long long ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector float test_srqwbyte_23 (vector float ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+vector double test_srqwbyte_24 (vector double ra, unsigned int count)
+{
+ return spu_srqwbyte (ra, count);
+}
+
+/* spu_srqwbytebc */
+
+vector unsigned char test_srqwbytebc_1 (vector unsigned char ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector signed char test_srqwbytebc_2 (vector signed char ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector unsigned short test_srqwbytebc_3 (vector unsigned short ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector signed short test_srqwbytebc_4 (vector signed short ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector unsigned int test_srqwbytebc_5 (vector unsigned int ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector signed int test_srqwbytebc_6 (vector signed int ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector unsigned long test_srqwbytebc_7 (vector unsigned long ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector signed long test_srqwbytebc_8 (vector signed long ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector unsigned long long test_srqwbytebc_9 (vector unsigned long long ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector signed long long test_srqwbytebc_10 (vector signed long long ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector float test_srqwbytebc_11 (vector float ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector double test_srqwbytebc_12 (vector double ra)
+{
+ return spu_srqwbytebc (ra, 40);
+}
+
+vector unsigned char test_srqwbytebc_13 (vector unsigned char ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector signed char test_srqwbytebc_14 (vector signed char ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector unsigned short test_srqwbytebc_15 (vector unsigned short ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector signed short test_srqwbytebc_16 (vector signed short ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector unsigned int test_srqwbytebc_17 (vector unsigned int ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector signed int test_srqwbytebc_18 (vector signed int ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector unsigned long test_srqwbytebc_19 (vector unsigned long ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector signed long test_srqwbytebc_20 (vector signed long ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector unsigned long long test_srqwbytebc_21 (vector unsigned long long ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector signed long long test_srqwbytebc_22 (vector signed long long ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector float test_srqwbytebc_23 (vector float ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+
+vector double test_srqwbytebc_24 (vector double ra, unsigned int count)
+{
+ return spu_srqwbytebc (ra, count);
+}
+