2 ;; Copyright (C) 2002-2022 Free Software Foundation, Inc.
3 ;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 (define_c_enum "unspec"
51 UNSPEC_VPACK_SIGN_SIGN_SAT
52 UNSPEC_VPACK_SIGN_UNS_SAT
53 UNSPEC_VPACK_UNS_UNS_SAT
54 UNSPEC_VPACK_UNS_UNS_MOD
55 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
77 UNSPEC_VUNPACK_HI_SIGN
78 UNSPEC_VUNPACK_LO_SIGN
79 UNSPEC_VUNPACK_HI_SIGN_DIRECT
80 UNSPEC_VUNPACK_LO_SIGN_DIRECT
83 UNSPEC_CONVERT_4F32_8I16
84 UNSPEC_CONVERT_4F32_8F16
147 UNSPEC_VSUMSWS_DIRECT
176 (define_c_enum "unspecv"
184 ;; Short vec int modes
185 (define_mode_iterator VIshort [V8HI V16QI])
187 (define_mode_iterator VF [V4SF])
188 ;; Vec modes, pity mode iterators are not composable
189 (define_mode_iterator V [V4SI V8HI V16QI V4SF])
190 ;; Vec modes for move/logical/permute ops, include vector types for move not
191 ;; otherwise handled by altivec (v2df, v2di, ti)
192 (define_mode_iterator VM [V4SI
200 (KF "FLOAT128_VECTOR_P (KFmode)")
201 (TF "FLOAT128_VECTOR_P (TFmode)")])
203 ;; Like VM, except don't do TImode
204 (define_mode_iterator VM2 [V4SI
211 (KF "FLOAT128_VECTOR_P (KFmode)")
212 (TF "FLOAT128_VECTOR_P (TFmode)")])
214 ;; Map the Vector convert single precision to double precision for integer
215 ;; versus floating point
216 (define_mode_attr VS_sxwsp [(V4SI "sxw") (V4SF "sp")])
218 ;; Specific iterator for parity which does not have a byte/half-word form, but
219 ;; does have a quad word form
220 (define_mode_iterator VParity [V4SI
225 (define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
226 (define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
227 (define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
228 (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
229 (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
230 (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
231 (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
233 ;; Vector pack/unpack
234 (define_mode_iterator VP [V2DI V4SI V8HI])
235 (define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
236 (define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
237 (define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
240 (define_mode_iterator VNEG [V4SI V2DI])
242 ;; Vector move instructions.
243 (define_insn "*altivec_mov<mode>"
244 [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,?Y,?*r,?*r,v,v,?*r")
245 (match_operand:VM2 1 "input_operand" "v,Z,v,*r,Y,*r,j,W,W"))]
246 "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
247 && (register_operand (operands[0], <MODE>mode)
248 || register_operand (operands[1], <MODE>mode))"
257 * return output_vec_const_move (operands);
259 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*,*")
260 (set_attr "length" "*,*,*,20,20,20,*,8,32")])
262 ;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
263 ;; is for unions. However for plain data movement, slightly favor the vector
265 (define_insn "*altivec_movti"
266 [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
267 (match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
268 "VECTOR_MEM_ALTIVEC_P (TImode)
269 && (register_operand (operands[0], TImode)
270 || register_operand (operands[1], TImode))"
279 * return output_vec_const_move (operands);"
280 [(set_attr "type" "vecstore,vecload,veclogical,store,load,*,veclogical,*")])
282 ;; Load up a vector with the most significant bit set by loading up -1 and
283 ;; doing a shift left
285 [(set (match_operand:VM 0 "altivec_register_operand")
286 (match_operand:VM 1 "easy_vector_constant_msb"))]
287 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
290 rtx dest = operands[0];
295 switch (easy_altivec_constant (operands[1], <MODE>mode))
309 if (mode != <MODE>mode)
310 dest = gen_lowpart (mode, dest);
312 num_elements = GET_MODE_NUNITS (mode);
313 v = rtvec_alloc (num_elements);
314 for (i = 0; i < num_elements; i++)
315 RTVEC_ELT (v, i) = constm1_rtx;
317 rs6000_expand_vector_init (dest, gen_rtx_PARALLEL (mode, v));
318 emit_insn (gen_rtx_SET (dest, gen_rtx_ASHIFT (mode, dest, dest)));
323 [(set (match_operand:VM 0 "altivec_register_operand")
324 (match_operand:VM 1 "easy_vector_constant_add_self"))]
325 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
326 [(set (match_dup 0) (match_dup 3))
327 (set (match_dup 0) (match_dup 4))]
329 rtx dup = gen_easy_altivec_constant (operands[1]);
331 machine_mode op_mode = <MODE>mode;
333 /* Divide the operand of the resulting VEC_DUPLICATE, and use
334 simplify_rtx to make a CONST_VECTOR. */
335 XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
336 XEXP (dup, 0), const1_rtx);
337 const_vec = simplify_rtx (dup);
339 if (op_mode == V4SFmode)
342 operands[0] = gen_lowpart (op_mode, operands[0]);
344 if (GET_MODE (const_vec) == op_mode)
345 operands[3] = const_vec;
347 operands[3] = gen_lowpart (op_mode, const_vec);
348 operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
352 [(set (match_operand:VM 0 "altivec_register_operand")
353 (match_operand:VM 1 "easy_vector_constant_vsldoi"))]
354 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && can_create_pseudo_p ()"
355 [(set (match_dup 2) (match_dup 3))
356 (set (match_dup 4) (match_dup 5))
358 (unspec:VM [(match_dup 2)
363 rtx op1 = operands[1];
364 int elt = (BYTES_BIG_ENDIAN) ? 0 : GET_MODE_NUNITS (<MODE>mode) - 1;
365 HOST_WIDE_INT val = const_vector_elt_as_int (op1, elt);
366 rtx rtx_val = GEN_INT (val);
367 int shift = vspltis_shifted (op1);
369 gcc_assert (shift != 0);
370 operands[2] = gen_reg_rtx (<MODE>mode);
371 operands[3] = gen_const_vec_duplicate (<MODE>mode, rtx_val);
372 operands[4] = gen_reg_rtx (<MODE>mode);
376 operands[5] = CONSTM1_RTX (<MODE>mode);
377 operands[6] = GEN_INT (-shift);
381 operands[5] = CONST0_RTX (<MODE>mode);
382 operands[6] = GEN_INT (shift);
386 (define_insn "get_vrsave_internal"
387 [(set (match_operand:SI 0 "register_operand" "=r")
388 (unspec:SI [(reg:SI VRSAVE_REGNO)] UNSPEC_GET_VRSAVE))]
392 return "mfspr %0,256";
394 return "mfvrsave %0";
396 [(set_attr "type" "*")])
398 (define_insn "*set_vrsave_internal"
399 [(match_parallel 0 "vrsave_operation"
400 [(set (reg:SI VRSAVE_REGNO)
401 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
402 (reg:SI VRSAVE_REGNO)] UNSPECV_SET_VRSAVE))])]
406 return "mtspr 256,%1";
408 return "mtvrsave %1";
410 [(set_attr "type" "*")])
412 (define_insn "*save_world"
413 [(match_parallel 0 "save_world_operation"
414 [(clobber (reg:SI LR_REGNO))
415 (use (match_operand:SI 1 "call_operand" "s"))])]
416 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
418 [(set_attr "type" "branch")])
420 (define_insn "*restore_world"
421 [(match_parallel 0 "restore_world_operation"
423 (use (match_operand:SI 1 "call_operand" "s"))
424 (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
425 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
428 ;; The save_vregs and restore_vregs patterns don't use memory_operand
429 ;; because (plus (reg) (const_int)) is not a valid vector address.
430 ;; This way is more compact than describing exactly what happens in
431 ;; the out-of-line functions, ie. loading the constant into r11/r12
432 ;; then using indexed addressing, and requires less editing of rtl
433 ;; to describe the operation to dwarf2out_frame_debug_expr.
434 (define_insn "*save_vregs_<mode>_r11"
435 [(match_parallel 0 "any_parallel_operand"
436 [(clobber (reg:P LR_REGNO))
437 (use (match_operand:P 1 "symbol_ref_operand" "s"))
440 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
441 (match_operand:P 3 "short_cint_operand" "I")))
442 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
445 [(set_attr "type" "branch")])
447 (define_insn "*save_vregs_<mode>_r12"
448 [(match_parallel 0 "any_parallel_operand"
449 [(clobber (reg:P LR_REGNO))
450 (use (match_operand:P 1 "symbol_ref_operand" "s"))
453 (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
454 (match_operand:P 3 "short_cint_operand" "I")))
455 (match_operand:V4SI 4 "altivec_register_operand" "v"))])]
458 [(set_attr "type" "branch")])
460 (define_insn "*restore_vregs_<mode>_r11"
461 [(match_parallel 0 "any_parallel_operand"
462 [(clobber (reg:P LR_REGNO))
463 (use (match_operand:P 1 "symbol_ref_operand" "s"))
466 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
467 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
468 (match_operand:P 4 "short_cint_operand" "I"))))])]
471 [(set_attr "type" "branch")])
473 (define_insn "*restore_vregs_<mode>_r12"
474 [(match_parallel 0 "any_parallel_operand"
475 [(clobber (reg:P LR_REGNO))
476 (use (match_operand:P 1 "symbol_ref_operand" "s"))
479 (set (match_operand:V4SI 2 "altivec_register_operand" "=v")
480 (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
481 (match_operand:P 4 "short_cint_operand" "I"))))])]
484 [(set_attr "type" "branch")])
486 ;; Simple binary operations.
489 (define_insn "add<mode>3"
490 [(set (match_operand:VI2 0 "register_operand" "=v")
491 (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
492 (match_operand:VI2 2 "register_operand" "v")))]
494 "vaddu<VI_char>m %0,%1,%2"
495 [(set_attr "type" "vecsimple")])
497 (define_insn "*altivec_addv4sf3"
498 [(set (match_operand:V4SF 0 "register_operand" "=v")
499 (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
500 (match_operand:V4SF 2 "register_operand" "v")))]
501 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
503 [(set_attr "type" "vecfloat")])
505 (define_insn "altivec_vaddcuw"
506 [(set (match_operand:V4SI 0 "register_operand" "=v")
507 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
508 (match_operand:V4SI 2 "register_operand" "v")]
510 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
512 [(set_attr "type" "vecsimple")])
514 (define_insn "altivec_vaddu<VI_char>s"
515 [(set (match_operand:VI 0 "register_operand" "=v")
516 (us_plus:VI (match_operand:VI 1 "register_operand" "v")
517 (match_operand:VI 2 "register_operand" "v")))
518 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
520 "vaddu<VI_char>s %0,%1,%2"
521 [(set_attr "type" "vecsimple")])
523 (define_insn "altivec_vadds<VI_char>s"
524 [(set (match_operand:VI 0 "register_operand" "=v")
525 (ss_plus:VI (match_operand:VI 1 "register_operand" "v")
526 (match_operand:VI 2 "register_operand" "v")))
527 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
528 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
529 "vadds<VI_char>s %0,%1,%2"
530 [(set_attr "type" "vecsimple")])
533 (define_insn "sub<mode>3"
534 [(set (match_operand:VI2 0 "register_operand" "=v")
535 (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
536 (match_operand:VI2 2 "register_operand" "v")))]
538 "vsubu<VI_char>m %0,%1,%2"
539 [(set_attr "type" "vecsimple")])
541 (define_insn "*altivec_subv4sf3"
542 [(set (match_operand:V4SF 0 "register_operand" "=v")
543 (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
544 (match_operand:V4SF 2 "register_operand" "v")))]
545 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
547 [(set_attr "type" "vecfloat")])
549 (define_insn "altivec_vsubcuw"
550 [(set (match_operand:V4SI 0 "register_operand" "=v")
551 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
552 (match_operand:V4SI 2 "register_operand" "v")]
554 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
556 [(set_attr "type" "vecsimple")])
558 (define_insn "altivec_vsubu<VI_char>s"
559 [(set (match_operand:VI 0 "register_operand" "=v")
560 (us_minus:VI (match_operand:VI 1 "register_operand" "v")
561 (match_operand:VI 2 "register_operand" "v")))
562 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
563 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
564 "vsubu<VI_char>s %0,%1,%2"
565 [(set_attr "type" "vecsimple")])
567 (define_insn "altivec_vsubs<VI_char>s"
568 [(set (match_operand:VI 0 "register_operand" "=v")
569 (ss_minus:VI (match_operand:VI 1 "register_operand" "v")
570 (match_operand:VI 2 "register_operand" "v")))
571 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
572 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
573 "vsubs<VI_char>s %0,%1,%2"
574 [(set_attr "type" "vecsimple")])
577 (define_insn "uavg<mode>3_ceil"
578 [(set (match_operand:VI 0 "register_operand" "=v")
579 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
580 (match_operand:VI 2 "register_operand" "v")]
583 "vavgu<VI_char> %0,%1,%2"
584 [(set_attr "type" "vecsimple")])
586 (define_insn "avg<mode>3_ceil"
587 [(set (match_operand:VI 0 "register_operand" "=v")
588 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
589 (match_operand:VI 2 "register_operand" "v")]
591 "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
592 "vavgs<VI_char> %0,%1,%2"
593 [(set_attr "type" "vecsimple")])
595 (define_insn "altivec_vcmpbfp"
596 [(set (match_operand:V4SI 0 "register_operand" "=v")
597 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
598 (match_operand:V4SF 2 "register_operand" "v")]
600 "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
602 [(set_attr "type" "veccmp")])
604 (define_insn "altivec_eqv1ti"
605 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
606 (eq:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
607 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
610 [(set_attr "type" "veccmpfx")])
612 (define_insn "altivec_eq<mode>"
613 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
614 (eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
615 (match_operand:VI2 2 "altivec_register_operand" "v")))]
617 "vcmpequ<VI_char> %0,%1,%2"
618 [(set_attr "type" "veccmpfx")])
620 (define_insn "*altivec_gt<mode>"
621 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
622 (gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
623 (match_operand:VI2 2 "altivec_register_operand" "v")))]
625 "vcmpgts<VI_char> %0,%1,%2"
626 [(set_attr "type" "veccmpfx")])
628 (define_insn "*altivec_gtv1ti"
629 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
630 (gt:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
631 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
634 [(set_attr "type" "veccmpfx")])
636 (define_insn "*altivec_gtu<mode>"
637 [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
638 (gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
639 (match_operand:VI2 2 "altivec_register_operand" "v")))]
641 "vcmpgtu<VI_char> %0,%1,%2"
642 [(set_attr "type" "veccmpfx")])
644 (define_insn "*altivec_gtuv1ti"
645 [(set (match_operand:V1TI 0 "altivec_register_operand" "=v")
646 (gtu:V1TI (match_operand:V1TI 1 "altivec_register_operand" "v")
647 (match_operand:V1TI 2 "altivec_register_operand" "v")))]
650 [(set_attr "type" "veccmpfx")])
652 (define_insn "*altivec_eqv4sf"
653 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
654 (eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
655 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
656 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
658 [(set_attr "type" "veccmp")])
660 (define_insn "*altivec_gtv4sf"
661 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
662 (gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
663 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
664 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
666 [(set_attr "type" "veccmp")])
668 (define_insn "*altivec_gev4sf"
669 [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
670 (ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
671 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
672 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
674 [(set_attr "type" "veccmp")])
676 (define_insn "altivec_vsel<mode>"
677 [(set (match_operand:VM 0 "register_operand" "=wa,v")
680 (not:VM (match_operand:VM 3 "register_operand" "wa,v"))
681 (match_operand:VM 1 "register_operand" "wa,v"))
684 (match_operand:VM 2 "register_operand" "wa,v"))))]
685 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
687 xxsel %x0,%x1,%x2,%x3
689 [(set_attr "type" "vecmove")
690 (set_attr "isa" "<VSisa>")])
692 (define_insn "altivec_vsel<mode>2"
693 [(set (match_operand:VM 0 "register_operand" "=wa,v")
696 (not:VM (match_operand:VM 3 "register_operand" "wa,v"))
697 (match_operand:VM 1 "register_operand" "wa,v"))
699 (match_operand:VM 2 "register_operand" "wa,v")
701 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
703 xxsel %x0,%x1,%x2,%x3
705 [(set_attr "type" "vecmove")
706 (set_attr "isa" "<VSisa>")])
708 (define_insn "altivec_vsel<mode>3"
709 [(set (match_operand:VM 0 "register_operand" "=wa,v")
712 (match_operand:VM 3 "register_operand" "wa,v")
713 (match_operand:VM 1 "register_operand" "wa,v"))
715 (not:VM (match_dup 3))
716 (match_operand:VM 2 "register_operand" "wa,v"))))]
717 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
719 xxsel %x0,%x2,%x1,%x3
721 [(set_attr "type" "vecmove")
722 (set_attr "isa" "<VSisa>")])
724 (define_insn "altivec_vsel<mode>4"
725 [(set (match_operand:VM 0 "register_operand" "=wa,v")
728 (match_operand:VM 1 "register_operand" "wa,v")
729 (match_operand:VM 3 "register_operand" "wa,v"))
731 (not:VM (match_dup 3))
732 (match_operand:VM 2 "register_operand" "wa,v"))))]
733 "VECTOR_MEM_ALTIVEC_OR_VSX_P (<MODE>mode)"
735 xxsel %x0,%x2,%x1,%x3
737 [(set_attr "type" "vecmove")
738 (set_attr "isa" "<VSisa>")])
740 ;; Fused multiply add.
742 (define_insn "*altivec_fmav4sf4"
743 [(set (match_operand:V4SF 0 "register_operand" "=v")
744 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
745 (match_operand:V4SF 2 "register_operand" "v")
746 (match_operand:V4SF 3 "register_operand" "v")))]
747 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
748 "vmaddfp %0,%1,%2,%3"
749 [(set_attr "type" "vecfloat")])
751 ;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
753 (define_expand "altivec_mulv4sf3"
754 [(set (match_operand:V4SF 0 "register_operand")
755 (fma:V4SF (match_operand:V4SF 1 "register_operand")
756 (match_operand:V4SF 2 "register_operand")
758 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
762 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
763 neg0 = gen_reg_rtx (V4SImode);
764 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
765 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
767 operands[3] = gen_lowpart (V4SFmode, neg0);
770 ;; 32-bit integer multiplication
771 ;; A_high = Operand_0 & 0xFFFF0000 >> 16
772 ;; A_low = Operand_0 & 0xFFFF
773 ;; B_high = Operand_1 & 0xFFFF0000 >> 16
774 ;; B_low = Operand_1 & 0xFFFF
775 ;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
777 ;; (define_insn "mulv4si3"
778 ;; [(set (match_operand:V4SI 0 "register_operand" "=v")
779 ;; (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
780 ;; (match_operand:V4SI 2 "register_operand" "v")))]
781 (define_insn "mulv4si3_p8"
782 [(set (match_operand:V4SI 0 "register_operand" "=v")
783 (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
784 (match_operand:V4SI 2 "register_operand" "v")))]
787 [(set_attr "type" "veccomplex")])
789 (define_expand "mulv4si3"
790 [(use (match_operand:V4SI 0 "register_operand"))
791 (use (match_operand:V4SI 1 "register_operand"))
792 (use (match_operand:V4SI 2 "register_operand"))]
804 if (TARGET_P8_VECTOR)
806 emit_insn (gen_mulv4si3_p8 (operands[0], operands[1], operands[2]));
810 zero = gen_reg_rtx (V4SImode);
811 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
813 sixteen = gen_reg_rtx (V4SImode);
814 emit_insn (gen_altivec_vspltisw (sixteen, gen_rtx_CONST_INT (V4SImode, -16)));
816 swap = gen_reg_rtx (V4SImode);
817 emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
819 one = gen_reg_rtx (V8HImode);
820 convert_move (one, operands[1], 0);
822 two = gen_reg_rtx (V8HImode);
823 convert_move (two, operands[2], 0);
825 small_swap = gen_reg_rtx (V8HImode);
826 convert_move (small_swap, swap, 0);
828 low_product = gen_reg_rtx (V4SImode);
829 emit_insn (gen_altivec_vmulouh (low_product, one, two));
831 high_product = gen_reg_rtx (V4SImode);
832 emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
834 emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
836 emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
841 (define_expand "mulv8hi3"
842 [(use (match_operand:V8HI 0 "register_operand"))
843 (use (match_operand:V8HI 1 "register_operand"))
844 (use (match_operand:V8HI 2 "register_operand"))]
847 rtx zero = gen_reg_rtx (V8HImode);
849 emit_insn (gen_altivec_vspltish (zero, const0_rtx));
850 emit_insn (gen_fmav8hi4 (operands[0], operands[1], operands[2], zero));
855 ;; Map UNSPEC_SLDB to "l" and UNSPEC_SRDB to "r".
856 (define_int_attr SLDB_lr [(UNSPEC_SLDB "l")
859 (define_int_iterator VSHIFT_DBL_LR [UNSPEC_SLDB UNSPEC_SRDB])
861 (define_insn "vs<SLDB_lr>db_<mode>"
862 [(set (match_operand:VI2 0 "register_operand" "=v")
863 (unspec:VI2 [(match_operand:VI2 1 "register_operand" "v")
864 (match_operand:VI2 2 "register_operand" "v")
865 (match_operand:QI 3 "const_0_to_12_operand" "n")]
868 "vs<SLDB_lr>dbi %0,%1,%2,%3"
869 [(set_attr "type" "vecsimple")])
871 (define_expand "vstrir_<mode>"
872 [(set (match_operand:VIshort 0 "altivec_register_operand")
873 (unspec:VIshort [(match_operand:VIshort 1 "altivec_register_operand")]
877 if (BYTES_BIG_ENDIAN)
878 emit_insn (gen_vstrir_code_<mode> (operands[0], operands[1]));
880 emit_insn (gen_vstril_code_<mode> (operands[0], operands[1]));
884 (define_insn "vstrir_code_<mode>"
885 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
887 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
891 [(set_attr "type" "vecsimple")])
893 ;; This expands into same code as vstrir_<mode> followed by condition logic
894 ;; so that a single vstribr. or vstrihr. or vstribl. or vstrihl. instruction
895 ;; can, for example, satisfy the needs of a vec_strir () function paired
896 ;; with a vec_strir_p () function if both take the same incoming arguments.
897 (define_expand "vstrir_p_<mode>"
898 [(match_operand:SI 0 "gpc_reg_operand")
899 (match_operand:VIshort 1 "altivec_register_operand")]
902 rtx scratch = gen_reg_rtx (<MODE>mode);
903 if (BYTES_BIG_ENDIAN)
904 emit_insn (gen_vstrir_p_code_<mode> (scratch, operands[1]));
906 emit_insn (gen_vstril_p_code_<mode> (scratch, operands[1]));
907 emit_insn (gen_cr6_test_for_zero (operands[0]));
911 (define_insn "vstrir_p_code_<mode>"
912 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
914 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
916 (set (reg:CC CR6_REGNO)
917 (unspec:CC [(match_dup 1)]
921 [(set_attr "type" "vecsimple")])
923 (define_expand "vstril_<mode>"
924 [(set (match_operand:VIshort 0 "altivec_register_operand")
925 (unspec:VIshort [(match_operand:VIshort 1 "altivec_register_operand")]
929 if (BYTES_BIG_ENDIAN)
930 emit_insn (gen_vstril_code_<mode> (operands[0], operands[1]));
932 emit_insn (gen_vstrir_code_<mode> (operands[0], operands[1]));
936 (define_insn "vstril_code_<mode>"
937 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
939 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
943 [(set_attr "type" "vecsimple")])
945 ;; This expands into same code as vstril_<mode> followed by condition logic
946 ;; so that a single vstribr. or vstrihr. or vstribl. or vstrihl. instruction
947 ;; can, for example, satisfy the needs of a vec_stril () function paired
948 ;; with a vec_stril_p () function if both take the same incoming arguments.
949 (define_expand "vstril_p_<mode>"
950 [(match_operand:SI 0 "gpc_reg_operand")
951 (match_operand:VIshort 1 "altivec_register_operand")]
954 rtx scratch = gen_reg_rtx (<MODE>mode);
955 if (BYTES_BIG_ENDIAN)
956 emit_insn (gen_vstril_p_code_<mode> (scratch, operands[1]));
958 emit_insn (gen_vstrir_p_code_<mode> (scratch, operands[1]));
959 emit_insn (gen_cr6_test_for_zero (operands[0]));
963 (define_insn "vstril_p_code_<mode>"
964 [(set (match_operand:VIshort 0 "altivec_register_operand" "=v")
966 [(match_operand:VIshort 1 "altivec_register_operand" "v")]
968 (set (reg:CC CR6_REGNO)
969 (unspec:CC [(match_dup 1)]
973 [(set_attr "type" "vecsimple")])
975 ;; Fused multiply subtract
976 (define_insn "*altivec_vnmsubfp"
977 [(set (match_operand:V4SF 0 "register_operand" "=v")
979 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
980 (match_operand:V4SF 2 "register_operand" "v")
982 (match_operand:V4SF 3 "register_operand" "v")))))]
983 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
984 "vnmsubfp %0,%1,%2,%3"
985 [(set_attr "type" "vecfloat")])
987 (define_insn "altivec_vmsumu<VI_char>m"
988 [(set (match_operand:V4SI 0 "register_operand" "=v")
989 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
990 (match_operand:VIshort 2 "register_operand" "v")
991 (match_operand:V4SI 3 "register_operand" "v")]
994 "vmsumu<VI_char>m %0,%1,%2,%3"
995 [(set_attr "type" "veccomplex")])
997 (define_insn "altivec_vmsumudm"
998 [(set (match_operand:V1TI 0 "register_operand" "=v")
999 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1000 (match_operand:V2DI 2 "register_operand" "v")
1001 (match_operand:V1TI 3 "register_operand" "v")]
1004 "vmsumudm %0,%1,%2,%3"
1005 [(set_attr "type" "veccomplex")])
1007 (define_insn "altivec_vmsumm<VI_char>m"
1008 [(set (match_operand:V4SI 0 "register_operand" "=v")
1009 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1010 (match_operand:VIshort 2 "register_operand" "v")
1011 (match_operand:V4SI 3 "register_operand" "v")]
1014 "vmsumm<VI_char>m %0,%1,%2,%3"
1015 [(set_attr "type" "veccomplex")])
1017 (define_insn "altivec_vmsumshm"
1018 [(set (match_operand:V4SI 0 "register_operand" "=v")
1019 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1020 (match_operand:V8HI 2 "register_operand" "v")
1021 (match_operand:V4SI 3 "register_operand" "v")]
1024 "vmsumshm %0,%1,%2,%3"
1025 [(set_attr "type" "veccomplex")])
1027 (define_insn "altivec_vmsumuhs"
1028 [(set (match_operand:V4SI 0 "register_operand" "=v")
1029 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1030 (match_operand:V8HI 2 "register_operand" "v")
1031 (match_operand:V4SI 3 "register_operand" "v")]
1033 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1035 "vmsumuhs %0,%1,%2,%3"
1036 [(set_attr "type" "veccomplex")])
1038 (define_insn "altivec_vmsumshs"
1039 [(set (match_operand:V4SI 0 "register_operand" "=v")
1040 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1041 (match_operand:V8HI 2 "register_operand" "v")
1042 (match_operand:V4SI 3 "register_operand" "v")]
1044 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1046 "vmsumshs %0,%1,%2,%3"
1047 [(set_attr "type" "veccomplex")])
1051 (define_insn "umax<mode>3"
1052 [(set (match_operand:VI2 0 "register_operand" "=v")
1053 (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
1054 (match_operand:VI2 2 "register_operand" "v")))]
1056 "vmaxu<VI_char> %0,%1,%2"
1057 [(set_attr "type" "vecsimple")])
1059 (define_insn "smax<mode>3"
1060 [(set (match_operand:VI2 0 "register_operand" "=v")
1061 (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
1062 (match_operand:VI2 2 "register_operand" "v")))]
1064 "vmaxs<VI_char> %0,%1,%2"
1065 [(set_attr "type" "vecsimple")])
1067 (define_insn "*altivec_smaxv4sf3"
1068 [(set (match_operand:V4SF 0 "register_operand" "=v")
1069 (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
1070 (match_operand:V4SF 2 "register_operand" "v")))]
1071 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1073 [(set_attr "type" "veccmp")])
1075 (define_insn "umin<mode>3"
1076 [(set (match_operand:VI2 0 "register_operand" "=v")
1077 (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
1078 (match_operand:VI2 2 "register_operand" "v")))]
1080 "vminu<VI_char> %0,%1,%2"
1081 [(set_attr "type" "vecsimple")])
1083 (define_insn "smin<mode>3"
1084 [(set (match_operand:VI2 0 "register_operand" "=v")
1085 (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
1086 (match_operand:VI2 2 "register_operand" "v")))]
1088 "vmins<VI_char> %0,%1,%2"
1089 [(set_attr "type" "vecsimple")])
1091 (define_insn "*altivec_sminv4sf3"
1092 [(set (match_operand:V4SF 0 "register_operand" "=v")
1093 (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
1094 (match_operand:V4SF 2 "register_operand" "v")))]
1095 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1097 [(set_attr "type" "veccmp")])
1099 (define_insn "altivec_vmhaddshs"
1100 [(set (match_operand:V8HI 0 "register_operand" "=v")
1101 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1102 (match_operand:V8HI 2 "register_operand" "v")
1103 (match_operand:V8HI 3 "register_operand" "v")]
1105 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1107 "vmhaddshs %0,%1,%2,%3"
1108 [(set_attr "type" "veccomplex")])
1110 (define_insn "altivec_vmhraddshs"
1111 [(set (match_operand:V8HI 0 "register_operand" "=v")
1112 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1113 (match_operand:V8HI 2 "register_operand" "v")
1114 (match_operand:V8HI 3 "register_operand" "v")]
1116 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1118 "vmhraddshs %0,%1,%2,%3"
1119 [(set_attr "type" "veccomplex")])
1121 (define_insn "fmav8hi4"
1122 [(set (match_operand:V8HI 0 "register_operand" "=v")
1123 (plus:V8HI (mult:V8HI (match_operand:V8HI 1 "register_operand" "v")
1124 (match_operand:V8HI 2 "register_operand" "v"))
1125 (match_operand:V8HI 3 "register_operand" "v")))]
1127 "vmladduhm %0,%1,%2,%3"
1128 [(set_attr "type" "veccomplex")])
1130 (define_expand "altivec_vmrghb"
1131 [(use (match_operand:V16QI 0 "register_operand"))
1132 (use (match_operand:V16QI 1 "register_operand"))
1133 (use (match_operand:V16QI 2 "register_operand"))]
1136 rtx (*fun) (rtx, rtx, rtx) = BYTES_BIG_ENDIAN ? gen_altivec_vmrghb_direct
1137 : gen_altivec_vmrglb_direct;
1138 if (!BYTES_BIG_ENDIAN)
1139 std::swap (operands[1], operands[2]);
1140 emit_insn (fun (operands[0], operands[1], operands[2]));
1144 (define_insn "altivec_vmrghb_direct"
1145 [(set (match_operand:V16QI 0 "register_operand" "=v")
1148 (match_operand:V16QI 1 "register_operand" "v")
1149 (match_operand:V16QI 2 "register_operand" "v"))
1150 (parallel [(const_int 0) (const_int 16)
1151 (const_int 1) (const_int 17)
1152 (const_int 2) (const_int 18)
1153 (const_int 3) (const_int 19)
1154 (const_int 4) (const_int 20)
1155 (const_int 5) (const_int 21)
1156 (const_int 6) (const_int 22)
1157 (const_int 7) (const_int 23)])))]
1160 [(set_attr "type" "vecperm")])
1162 (define_expand "altivec_vmrghh"
1163 [(use (match_operand:V8HI 0 "register_operand"))
1164 (use (match_operand:V8HI 1 "register_operand"))
1165 (use (match_operand:V8HI 2 "register_operand"))]
1168 rtx (*fun) (rtx, rtx, rtx) = BYTES_BIG_ENDIAN ? gen_altivec_vmrghh_direct
1169 : gen_altivec_vmrglh_direct;
1170 if (!BYTES_BIG_ENDIAN)
1171 std::swap (operands[1], operands[2]);
1172 emit_insn (fun (operands[0], operands[1], operands[2]));
1176 (define_insn "altivec_vmrghh_direct"
1177 [(set (match_operand:V8HI 0 "register_operand" "=v")
1180 (match_operand:V8HI 1 "register_operand" "v")
1181 (match_operand:V8HI 2 "register_operand" "v"))
1182 (parallel [(const_int 0) (const_int 8)
1183 (const_int 1) (const_int 9)
1184 (const_int 2) (const_int 10)
1185 (const_int 3) (const_int 11)])))]
1188 [(set_attr "type" "vecperm")])
1190 (define_expand "altivec_vmrghw"
1191 [(use (match_operand:V4SI 0 "register_operand"))
1192 (use (match_operand:V4SI 1 "register_operand"))
1193 (use (match_operand:V4SI 2 "register_operand"))]
1194 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1196 rtx (*fun) (rtx, rtx, rtx);
1197 fun = BYTES_BIG_ENDIAN ? gen_altivec_vmrghw_direct_v4si
1198 : gen_altivec_vmrglw_direct_v4si;
1199 if (!BYTES_BIG_ENDIAN)
1200 std::swap (operands[1], operands[2]);
1201 emit_insn (fun (operands[0], operands[1], operands[2]));
1205 (define_insn "altivec_vmrghw_direct_<mode>"
1206 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1208 (vec_concat:<VS_double>
1209 (match_operand:VSX_W 1 "register_operand" "wa,v")
1210 (match_operand:VSX_W 2 "register_operand" "wa,v"))
1211 (parallel [(const_int 0) (const_int 4)
1212 (const_int 1) (const_int 5)])))]
1217 [(set_attr "type" "vecperm")])
1219 (define_insn "*altivec_vmrghsf"
1220 [(set (match_operand:V4SF 0 "register_operand" "=v")
1223 (match_operand:V4SF 1 "register_operand" "v")
1224 (match_operand:V4SF 2 "register_operand" "v"))
1225 (parallel [(const_int 0) (const_int 4)
1226 (const_int 1) (const_int 5)])))]
1227 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1229 if (BYTES_BIG_ENDIAN)
1230 return "vmrghw %0,%1,%2";
1232 return "vmrglw %0,%2,%1";
1234 [(set_attr "type" "vecperm")])
1236 (define_expand "altivec_vmrglb"
1237 [(use (match_operand:V16QI 0 "register_operand"))
1238 (use (match_operand:V16QI 1 "register_operand"))
1239 (use (match_operand:V16QI 2 "register_operand"))]
1242 rtx (*fun) (rtx, rtx, rtx) = BYTES_BIG_ENDIAN ? gen_altivec_vmrglb_direct
1243 : gen_altivec_vmrghb_direct;
1244 if (!BYTES_BIG_ENDIAN)
1245 std::swap (operands[1], operands[2]);
1246 emit_insn (fun (operands[0], operands[1], operands[2]));
1250 (define_insn "altivec_vmrglb_direct"
1251 [(set (match_operand:V16QI 0 "register_operand" "=v")
1254 (match_operand:V16QI 1 "register_operand" "v")
1255 (match_operand:V16QI 2 "register_operand" "v"))
1256 (parallel [(const_int 8) (const_int 24)
1257 (const_int 9) (const_int 25)
1258 (const_int 10) (const_int 26)
1259 (const_int 11) (const_int 27)
1260 (const_int 12) (const_int 28)
1261 (const_int 13) (const_int 29)
1262 (const_int 14) (const_int 30)
1263 (const_int 15) (const_int 31)])))]
1266 [(set_attr "type" "vecperm")])
1268 (define_expand "altivec_vmrglh"
1269 [(use (match_operand:V8HI 0 "register_operand"))
1270 (use (match_operand:V8HI 1 "register_operand"))
1271 (use (match_operand:V8HI 2 "register_operand"))]
1274 rtx (*fun) (rtx, rtx, rtx) = BYTES_BIG_ENDIAN ? gen_altivec_vmrglh_direct
1275 : gen_altivec_vmrghh_direct;
1276 if (!BYTES_BIG_ENDIAN)
1277 std::swap (operands[1], operands[2]);
1278 emit_insn (fun (operands[0], operands[1], operands[2]));
1282 (define_insn "altivec_vmrglh_direct"
1283 [(set (match_operand:V8HI 0 "register_operand" "=v")
1286 (match_operand:V8HI 1 "register_operand" "v")
1287 (match_operand:V8HI 2 "register_operand" "v"))
1288 (parallel [(const_int 4) (const_int 12)
1289 (const_int 5) (const_int 13)
1290 (const_int 6) (const_int 14)
1291 (const_int 7) (const_int 15)])))]
1294 [(set_attr "type" "vecperm")])
1296 (define_expand "altivec_vmrglw"
1297 [(use (match_operand:V4SI 0 "register_operand"))
1298 (use (match_operand:V4SI 1 "register_operand"))
1299 (use (match_operand:V4SI 2 "register_operand"))]
1300 "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1302 rtx (*fun) (rtx, rtx, rtx);
1303 fun = BYTES_BIG_ENDIAN ? gen_altivec_vmrglw_direct_v4si
1304 : gen_altivec_vmrghw_direct_v4si;
1305 if (!BYTES_BIG_ENDIAN)
1306 std::swap (operands[1], operands[2]);
1307 emit_insn (fun (operands[0], operands[1], operands[2]));
1311 (define_insn "altivec_vmrglw_direct_<mode>"
1312 [(set (match_operand:VSX_W 0 "register_operand" "=wa,v")
1314 (vec_concat:<VS_double>
1315 (match_operand:VSX_W 1 "register_operand" "wa,v")
1316 (match_operand:VSX_W 2 "register_operand" "wa,v"))
1317 (parallel [(const_int 2) (const_int 6)
1318 (const_int 3) (const_int 7)])))]
1323 [(set_attr "type" "vecperm")])
1325 (define_insn "*altivec_vmrglsf"
1326 [(set (match_operand:V4SF 0 "register_operand" "=v")
1329 (match_operand:V4SF 1 "register_operand" "v")
1330 (match_operand:V4SF 2 "register_operand" "v"))
1331 (parallel [(const_int 2) (const_int 6)
1332 (const_int 3) (const_int 7)])))]
1333 "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1335 if (BYTES_BIG_ENDIAN)
1336 return "vmrglw %0,%1,%2";
1338 return "vmrghw %0,%2,%1";
1340 [(set_attr "type" "vecperm")])
1342 ;; Power8 vector merge two V2DF/V2DI even words to V2DF
1343 (define_expand "p8_vmrgew_<mode>"
1344 [(use (match_operand:VSX_D 0 "vsx_register_operand"))
1345 (use (match_operand:VSX_D 1 "vsx_register_operand"))
1346 (use (match_operand:VSX_D 2 "vsx_register_operand"))]
1347 "VECTOR_MEM_VSX_P (<MODE>mode)"
1352 v = gen_rtvec (2, GEN_INT (0), GEN_INT (2));
1353 x = gen_rtx_VEC_CONCAT (<VS_double>mode, operands[1], operands[2]);
1355 x = gen_rtx_VEC_SELECT (<MODE>mode, x, gen_rtx_PARALLEL (VOIDmode, v));
1356 emit_insn (gen_rtx_SET (operands[0], x));
1360 ;; Power8 vector merge two V4SF/V4SI even words to V4SF
1361 (define_insn "p8_vmrgew_<mode>"
1362 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1364 (vec_concat:<VS_double>
1365 (match_operand:VSX_W 1 "register_operand" "v")
1366 (match_operand:VSX_W 2 "register_operand" "v"))
1367 (parallel [(const_int 0) (const_int 4)
1368 (const_int 2) (const_int 6)])))]
1371 if (BYTES_BIG_ENDIAN)
1372 return "vmrgew %0,%1,%2";
1374 return "vmrgow %0,%2,%1";
1376 [(set_attr "type" "vecperm")])
1378 (define_insn "p8_vmrgow_<mode>"
1379 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1381 (vec_concat:<VS_double>
1382 (match_operand:VSX_W 1 "register_operand" "v")
1383 (match_operand:VSX_W 2 "register_operand" "v"))
1384 (parallel [(const_int 1) (const_int 5)
1385 (const_int 3) (const_int 7)])))]
1388 if (BYTES_BIG_ENDIAN)
1389 return "vmrgow %0,%1,%2";
1391 return "vmrgew %0,%2,%1";
1393 [(set_attr "type" "vecperm")])
1395 (define_expand "p8_vmrgow_<mode>"
1396 [(use (match_operand:VSX_D 0 "vsx_register_operand"))
1397 (use (match_operand:VSX_D 1 "vsx_register_operand"))
1398 (use (match_operand:VSX_D 2 "vsx_register_operand"))]
1399 "VECTOR_MEM_VSX_P (<MODE>mode)"
1404 v = gen_rtvec (2, GEN_INT (1), GEN_INT (3));
1405 x = gen_rtx_VEC_CONCAT (<VS_double>mode, operands[1], operands[2]);
1407 x = gen_rtx_VEC_SELECT (<MODE>mode, x, gen_rtx_PARALLEL (VOIDmode, v));
1408 emit_insn (gen_rtx_SET (operands[0], x));
1412 (define_insn "p8_vmrgew_<mode>_direct"
1413 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1414 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1415 (match_operand:VSX_W 2 "register_operand" "v")]
1416 UNSPEC_VMRGEW_DIRECT))]
1419 [(set_attr "type" "vecperm")])
1421 (define_insn "p8_vmrgow_<mode>_direct"
1422 [(set (match_operand:VSX_W 0 "register_operand" "=v")
1423 (unspec:VSX_W [(match_operand:VSX_W 1 "register_operand" "v")
1424 (match_operand:VSX_W 2 "register_operand" "v")]
1425 UNSPEC_VMRGOW_DIRECT))]
1428 [(set_attr "type" "vecperm")])
1430 (define_expand "vec_widen_umult_even_v16qi"
1431 [(use (match_operand:V8HI 0 "register_operand"))
1432 (use (match_operand:V16QI 1 "register_operand"))
1433 (use (match_operand:V16QI 2 "register_operand"))]
1436 if (BYTES_BIG_ENDIAN)
1437 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1439 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1443 (define_expand "vec_widen_smult_even_v16qi"
1444 [(use (match_operand:V8HI 0 "register_operand"))
1445 (use (match_operand:V16QI 1 "register_operand"))
1446 (use (match_operand:V16QI 2 "register_operand"))]
1449 if (BYTES_BIG_ENDIAN)
1450 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1452 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1456 (define_expand "vec_widen_umult_even_v8hi"
1457 [(use (match_operand:V4SI 0 "register_operand"))
1458 (use (match_operand:V8HI 1 "register_operand"))
1459 (use (match_operand:V8HI 2 "register_operand"))]
1462 if (BYTES_BIG_ENDIAN)
1463 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1465 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1469 (define_expand "vec_widen_smult_even_v8hi"
1470 [(use (match_operand:V4SI 0 "register_operand"))
1471 (use (match_operand:V8HI 1 "register_operand"))
1472 (use (match_operand:V8HI 2 "register_operand"))]
1475 if (BYTES_BIG_ENDIAN)
1476 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1478 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1482 (define_expand "vec_widen_umult_even_v4si"
1483 [(use (match_operand:V2DI 0 "register_operand"))
1484 (use (match_operand:V4SI 1 "register_operand"))
1485 (use (match_operand:V4SI 2 "register_operand"))]
1488 if (BYTES_BIG_ENDIAN)
1489 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1491 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1495 (define_expand "vec_widen_umult_even_v2di"
1496 [(use (match_operand:V1TI 0 "register_operand"))
1497 (use (match_operand:V2DI 1 "register_operand"))
1498 (use (match_operand:V2DI 2 "register_operand"))]
1501 if (BYTES_BIG_ENDIAN)
1502 emit_insn (gen_altivec_vmuleud (operands[0], operands[1], operands[2]));
1504 emit_insn (gen_altivec_vmuloud (operands[0], operands[1], operands[2]));
1508 (define_expand "vec_widen_smult_even_v4si"
1509 [(use (match_operand:V2DI 0 "register_operand"))
1510 (use (match_operand:V4SI 1 "register_operand"))
1511 (use (match_operand:V4SI 2 "register_operand"))]
1514 if (BYTES_BIG_ENDIAN)
1515 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1517 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1521 (define_expand "vec_widen_smult_even_v2di"
1522 [(use (match_operand:V1TI 0 "register_operand"))
1523 (use (match_operand:V2DI 1 "register_operand"))
1524 (use (match_operand:V2DI 2 "register_operand"))]
1527 if (BYTES_BIG_ENDIAN)
1528 emit_insn (gen_altivec_vmulesd (operands[0], operands[1], operands[2]));
1530 emit_insn (gen_altivec_vmulosd (operands[0], operands[1], operands[2]));
1534 (define_expand "vec_widen_umult_odd_v16qi"
1535 [(use (match_operand:V8HI 0 "register_operand"))
1536 (use (match_operand:V16QI 1 "register_operand"))
1537 (use (match_operand:V16QI 2 "register_operand"))]
1540 if (BYTES_BIG_ENDIAN)
1541 emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1543 emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1547 (define_expand "vec_widen_smult_odd_v16qi"
1548 [(use (match_operand:V8HI 0 "register_operand"))
1549 (use (match_operand:V16QI 1 "register_operand"))
1550 (use (match_operand:V16QI 2 "register_operand"))]
1553 if (BYTES_BIG_ENDIAN)
1554 emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1556 emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1560 (define_expand "vec_widen_umult_odd_v8hi"
1561 [(use (match_operand:V4SI 0 "register_operand"))
1562 (use (match_operand:V8HI 1 "register_operand"))
1563 (use (match_operand:V8HI 2 "register_operand"))]
1566 if (BYTES_BIG_ENDIAN)
1567 emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1569 emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1573 (define_expand "vec_widen_smult_odd_v8hi"
1574 [(use (match_operand:V4SI 0 "register_operand"))
1575 (use (match_operand:V8HI 1 "register_operand"))
1576 (use (match_operand:V8HI 2 "register_operand"))]
1579 if (BYTES_BIG_ENDIAN)
1580 emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1582 emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1586 (define_expand "vec_widen_umult_odd_v4si"
1587 [(use (match_operand:V2DI 0 "register_operand"))
1588 (use (match_operand:V4SI 1 "register_operand"))
1589 (use (match_operand:V4SI 2 "register_operand"))]
1592 if (BYTES_BIG_ENDIAN)
1593 emit_insn (gen_altivec_vmulouw (operands[0], operands[1], operands[2]));
1595 emit_insn (gen_altivec_vmuleuw (operands[0], operands[1], operands[2]));
1599 (define_expand "vec_widen_umult_odd_v2di"
1600 [(use (match_operand:V1TI 0 "register_operand"))
1601 (use (match_operand:V2DI 1 "register_operand"))
1602 (use (match_operand:V2DI 2 "register_operand"))]
1605 if (BYTES_BIG_ENDIAN)
1606 emit_insn (gen_altivec_vmuloud (operands[0], operands[1], operands[2]));
1608 emit_insn (gen_altivec_vmuleud (operands[0], operands[1], operands[2]));
1612 (define_expand "vec_widen_smult_odd_v4si"
1613 [(use (match_operand:V2DI 0 "register_operand"))
1614 (use (match_operand:V4SI 1 "register_operand"))
1615 (use (match_operand:V4SI 2 "register_operand"))]
1618 if (BYTES_BIG_ENDIAN)
1619 emit_insn (gen_altivec_vmulosw (operands[0], operands[1], operands[2]));
1621 emit_insn (gen_altivec_vmulesw (operands[0], operands[1], operands[2]));
1625 (define_expand "vec_widen_smult_odd_v2di"
1626 [(use (match_operand:V1TI 0 "register_operand"))
1627 (use (match_operand:V2DI 1 "register_operand"))
1628 (use (match_operand:V2DI 2 "register_operand"))]
1631 if (BYTES_BIG_ENDIAN)
1632 emit_insn (gen_altivec_vmulosd (operands[0], operands[1], operands[2]));
1634 emit_insn (gen_altivec_vmulesd (operands[0], operands[1], operands[2]));
1638 (define_insn "altivec_vmuleub"
1639 [(set (match_operand:V8HI 0 "register_operand" "=v")
1640 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1641 (match_operand:V16QI 2 "register_operand" "v")]
1645 [(set_attr "type" "veccomplex")])
1647 (define_insn "altivec_vmuloub"
1648 [(set (match_operand:V8HI 0 "register_operand" "=v")
1649 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1650 (match_operand:V16QI 2 "register_operand" "v")]
1654 [(set_attr "type" "veccomplex")])
1656 (define_insn "altivec_vmulesb"
1657 [(set (match_operand:V8HI 0 "register_operand" "=v")
1658 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1659 (match_operand:V16QI 2 "register_operand" "v")]
1663 [(set_attr "type" "veccomplex")])
1665 (define_insn "altivec_vmulosb"
1666 [(set (match_operand:V8HI 0 "register_operand" "=v")
1667 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1668 (match_operand:V16QI 2 "register_operand" "v")]
1672 [(set_attr "type" "veccomplex")])
1674 (define_insn "altivec_vmuleuh"
1675 [(set (match_operand:V4SI 0 "register_operand" "=v")
1676 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1677 (match_operand:V8HI 2 "register_operand" "v")]
1681 [(set_attr "type" "veccomplex")])
1683 (define_insn "altivec_vmulouh"
1684 [(set (match_operand:V4SI 0 "register_operand" "=v")
1685 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1686 (match_operand:V8HI 2 "register_operand" "v")]
1690 [(set_attr "type" "veccomplex")])
1692 (define_insn "altivec_vmulesh"
1693 [(set (match_operand:V4SI 0 "register_operand" "=v")
1694 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1695 (match_operand:V8HI 2 "register_operand" "v")]
1699 [(set_attr "type" "veccomplex")])
1701 (define_insn "altivec_vmulosh"
1702 [(set (match_operand:V4SI 0 "register_operand" "=v")
1703 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1704 (match_operand:V8HI 2 "register_operand" "v")]
1708 [(set_attr "type" "veccomplex")])
1710 (define_insn "altivec_vmuleuw"
1711 [(set (match_operand:V2DI 0 "register_operand" "=v")
1712 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1713 (match_operand:V4SI 2 "register_operand" "v")]
1717 [(set_attr "type" "veccomplex")])
1719 (define_insn "altivec_vmuleud"
1720 [(set (match_operand:V1TI 0 "register_operand" "=v")
1721 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1722 (match_operand:V2DI 2 "register_operand" "v")]
1726 [(set_attr "type" "veccomplex")])
1728 (define_insn "altivec_vmulouw"
1729 [(set (match_operand:V2DI 0 "register_operand" "=v")
1730 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1731 (match_operand:V4SI 2 "register_operand" "v")]
1735 [(set_attr "type" "veccomplex")])
1737 (define_insn "altivec_vmuloud"
1738 [(set (match_operand:V1TI 0 "register_operand" "=v")
1739 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1740 (match_operand:V2DI 2 "register_operand" "v")]
1744 [(set_attr "type" "veccomplex")])
1746 (define_insn "altivec_vmulesw"
1747 [(set (match_operand:V2DI 0 "register_operand" "=v")
1748 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1749 (match_operand:V4SI 2 "register_operand" "v")]
1753 [(set_attr "type" "veccomplex")])
1755 (define_insn "altivec_vmulesd"
1756 [(set (match_operand:V1TI 0 "register_operand" "=v")
1757 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1758 (match_operand:V2DI 2 "register_operand" "v")]
1762 [(set_attr "type" "veccomplex")])
1764 (define_insn "altivec_vmulosw"
1765 [(set (match_operand:V2DI 0 "register_operand" "=v")
1766 (unspec:V2DI [(match_operand:V4SI 1 "register_operand" "v")
1767 (match_operand:V4SI 2 "register_operand" "v")]
1771 [(set_attr "type" "veccomplex")])
1773 (define_insn "altivec_vmulosd"
1774 [(set (match_operand:V1TI 0 "register_operand" "=v")
1775 (unspec:V1TI [(match_operand:V2DI 1 "register_operand" "v")
1776 (match_operand:V2DI 2 "register_operand" "v")]
1780 [(set_attr "type" "veccomplex")])
1782 ;; Vector pack/unpack
1783 (define_insn "altivec_vpkpx"
1784 [(set (match_operand:V8HI 0 "register_operand" "=v")
1785 (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1786 (match_operand:V4SI 2 "register_operand" "v")]
1790 if (BYTES_BIG_ENDIAN)
1791 return "vpkpx %0,%1,%2";
1793 return "vpkpx %0,%2,%1";
1795 [(set_attr "type" "vecperm")])
1797 (define_insn "altivec_vpks<VI_char>ss"
1798 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1799 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1800 (match_operand:VP 2 "register_operand" "v")]
1801 UNSPEC_VPACK_SIGN_SIGN_SAT))]
1804 if (BYTES_BIG_ENDIAN)
1805 return "vpks<VI_char>ss %0,%1,%2";
1807 return "vpks<VI_char>ss %0,%2,%1";
1809 [(set_attr "type" "vecperm")])
1811 (define_insn "altivec_vpks<VI_char>us"
1812 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1813 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1814 (match_operand:VP 2 "register_operand" "v")]
1815 UNSPEC_VPACK_SIGN_UNS_SAT))]
1818 if (BYTES_BIG_ENDIAN)
1819 return "vpks<VI_char>us %0,%1,%2";
1821 return "vpks<VI_char>us %0,%2,%1";
1823 [(set_attr "type" "vecperm")])
1825 (define_insn "altivec_vpku<VI_char>us"
1826 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1827 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1828 (match_operand:VP 2 "register_operand" "v")]
1829 UNSPEC_VPACK_UNS_UNS_SAT))]
1832 if (BYTES_BIG_ENDIAN)
1833 return "vpku<VI_char>us %0,%1,%2";
1835 return "vpku<VI_char>us %0,%2,%1";
1837 [(set_attr "type" "vecperm")])
1839 (define_insn "altivec_vpku<VI_char>um"
1840 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1841 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1842 (match_operand:VP 2 "register_operand" "v")]
1843 UNSPEC_VPACK_UNS_UNS_MOD))]
1846 if (BYTES_BIG_ENDIAN)
1847 return "vpku<VI_char>um %0,%1,%2";
1849 return "vpku<VI_char>um %0,%2,%1";
1851 [(set_attr "type" "vecperm")])
1853 (define_insn "altivec_vpku<VI_char>um_direct"
1854 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1855 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1856 (match_operand:VP 2 "register_operand" "v")]
1857 UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1860 if (BYTES_BIG_ENDIAN)
1861 return "vpku<VI_char>um %0,%1,%2";
1863 return "vpku<VI_char>um %0,%2,%1";
1865 [(set_attr "type" "vecperm")])
1867 (define_insn "*altivec_vrl<VI_char>"
1868 [(set (match_operand:VI2 0 "register_operand" "=v")
1869 (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1870 (match_operand:VI2 2 "register_operand" "v")))]
1872 "vrl<VI_char> %0,%1,%2"
1873 [(set_attr "type" "vecsimple")])
1875 (define_insn "altivec_vrlq"
1876 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
1877 (rotate:V1TI (match_operand:V1TI 1 "vsx_register_operand" "v")
1878 (match_operand:V1TI 2 "vsx_register_operand" "v")))]
1880 ;; rotate amount in needs to be in bits[57:63] of operand2.
1882 [(set_attr "type" "vecsimple")])
1884 (define_insn "altivec_vrl<VI_char>mi"
1885 [(set (match_operand:VIlong 0 "register_operand" "=v")
1886 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
1887 (match_operand:VIlong 2 "register_operand" "0")
1888 (match_operand:VIlong 3 "register_operand" "v")]
1891 "vrl<VI_char>mi %0,%1,%3"
1892 [(set_attr "type" "veclogical")])
1894 (define_expand "altivec_vrlqmi"
1895 [(set (match_operand:V1TI 0 "vsx_register_operand")
1896 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand")
1897 (match_operand:V1TI 2 "vsx_register_operand")
1898 (match_operand:V1TI 3 "vsx_register_operand")]
1902 /* Mask bit begin, end fields need to be in bits [41:55] of 128-bit operand2.
1903 Shift amount in needs to be put in bits[57:63] of 128-bit operand2. */
1904 rtx tmp = gen_reg_rtx (V1TImode);
1906 emit_insn (gen_xxswapd_v1ti (tmp, operands[3]));
1907 emit_insn (gen_altivec_vrlqmi_inst (operands[0], operands[1], operands[2],
1912 (define_insn "altivec_vrlqmi_inst"
1913 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
1914 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand" "v")
1915 (match_operand:V1TI 2 "vsx_register_operand" "0")
1916 (match_operand:V1TI 3 "vsx_register_operand" "v")]
1920 [(set_attr "type" "veclogical")])
1922 (define_insn "altivec_vrl<VI_char>nm"
1923 [(set (match_operand:VIlong 0 "register_operand" "=v")
1924 (unspec:VIlong [(match_operand:VIlong 1 "register_operand" "v")
1925 (match_operand:VIlong 2 "register_operand" "v")]
1928 "vrl<VI_char>nm %0,%1,%2"
1929 [(set_attr "type" "veclogical")])
1931 (define_expand "altivec_vrlqnm"
1932 [(set (match_operand:V1TI 0 "vsx_register_operand")
1933 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand")
1934 (match_operand:V1TI 2 "vsx_register_operand")]
1938 /* Shift amount in needs to be put in bits[57:63] of 128-bit operand2. */
1939 rtx tmp = gen_reg_rtx (V1TImode);
1941 emit_insn (gen_xxswapd_v1ti (tmp, operands[2]));
1942 emit_insn (gen_altivec_vrlqnm_inst (operands[0], operands[1], tmp));
1946 (define_insn "altivec_vrlqnm_inst"
1947 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
1948 (unspec:V1TI [(match_operand:V1TI 1 "vsx_register_operand" "v")
1949 (match_operand:V1TI 2 "vsx_register_operand" "v")]
1952 ;; rotate and mask bits need to be in upper 64-bits of operand2.
1954 [(set_attr "type" "veclogical")])
1956 (define_insn "altivec_vsl"
1957 [(set (match_operand:V4SI 0 "register_operand" "=v")
1958 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1959 (match_operand:V4SI 2 "register_operand" "v")]
1963 [(set_attr "type" "vecperm")])
1965 (define_insn "altivec_vslo"
1966 [(set (match_operand:V4SI 0 "register_operand" "=v")
1967 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1968 (match_operand:V4SI 2 "register_operand" "v")]
1972 [(set_attr "type" "vecperm")])
1975 [(set (match_operand:V16QI 0 "register_operand" "=v")
1976 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1977 (match_operand:V16QI 2 "register_operand" "v")]
1981 [(set_attr "type" "vecsimple")])
1984 [(set (match_operand:V16QI 0 "register_operand" "=v")
1985 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1986 (match_operand:V16QI 2 "register_operand" "v")]
1990 [(set_attr "type" "vecsimple")])
1992 (define_insn "*altivec_vsl<VI_char>"
1993 [(set (match_operand:VI2 0 "register_operand" "=v")
1994 (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1995 (match_operand:VI2 2 "register_operand" "v")))]
1997 "vsl<VI_char> %0,%1,%2"
1998 [(set_attr "type" "vecsimple")])
2000 (define_insn "altivec_vslq_<mode>"
2001 [(set (match_operand:VEC_TI 0 "vsx_register_operand" "=v")
2002 (ashift:VEC_TI (match_operand:VEC_TI 1 "vsx_register_operand" "v")
2003 (match_operand:VEC_TI 2 "vsx_register_operand" "v")))]
2005 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2007 [(set_attr "type" "vecsimple")])
2009 (define_insn "*altivec_vsr<VI_char>"
2010 [(set (match_operand:VI2 0 "register_operand" "=v")
2011 (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
2012 (match_operand:VI2 2 "register_operand" "v")))]
2014 "vsr<VI_char> %0,%1,%2"
2015 [(set_attr "type" "vecsimple")])
2017 (define_insn "altivec_vsrq_<mode>"
2018 [(set (match_operand:VEC_TI 0 "vsx_register_operand" "=v")
2019 (lshiftrt:VEC_TI (match_operand:VEC_TI 1 "vsx_register_operand" "v")
2020 (match_operand:VEC_TI 2 "vsx_register_operand" "v")))]
2022 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2024 [(set_attr "type" "vecsimple")])
2026 (define_insn "*altivec_vsra<VI_char>"
2027 [(set (match_operand:VI2 0 "register_operand" "=v")
2028 (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
2029 (match_operand:VI2 2 "register_operand" "v")))]
2031 "vsra<VI_char> %0,%1,%2"
2032 [(set_attr "type" "vecsimple")])
2034 (define_insn "altivec_vsraq"
2035 [(set (match_operand:V1TI 0 "vsx_register_operand" "=v")
2036 (ashiftrt:V1TI (match_operand:V1TI 1 "vsx_register_operand" "v")
2037 (match_operand:V1TI 2 "vsx_register_operand" "v")))]
2039 /* Shift amount in needs to be in bits[57:63] of 128-bit operand. */
2041 [(set_attr "type" "vecsimple")])
2043 (define_insn "altivec_vsr"
2044 [(set (match_operand:V4SI 0 "register_operand" "=v")
2045 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2046 (match_operand:V4SI 2 "register_operand" "v")]
2050 [(set_attr "type" "vecperm")])
2052 (define_insn "altivec_vsro"
2053 [(set (match_operand:V4SI 0 "register_operand" "=v")
2054 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2055 (match_operand:V4SI 2 "register_operand" "v")]
2059 [(set_attr "type" "vecperm")])
2061 (define_insn "altivec_vsum4ubs"
2062 [(set (match_operand:V4SI 0 "register_operand" "=v")
2063 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
2064 (match_operand:V4SI 2 "register_operand" "v")]
2066 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2069 [(set_attr "type" "veccomplex")])
2071 (define_insn "altivec_vsum4s<VI_char>s"
2072 [(set (match_operand:V4SI 0 "register_operand" "=v")
2073 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2074 (match_operand:V4SI 2 "register_operand" "v")]
2076 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2078 "vsum4s<VI_char>s %0,%1,%2"
2079 [(set_attr "type" "veccomplex")])
2081 (define_expand "altivec_vsum2sws"
2082 [(use (match_operand:V4SI 0 "register_operand"))
2083 (use (match_operand:V4SI 1 "register_operand"))
2084 (use (match_operand:V4SI 2 "register_operand"))]
2087 if (BYTES_BIG_ENDIAN)
2088 emit_insn (gen_altivec_vsum2sws_direct (operands[0], operands[1],
2092 rtx tmp1 = gen_reg_rtx (V4SImode);
2093 rtx tmp2 = gen_reg_rtx (V4SImode);
2094 emit_insn (gen_altivec_vsldoi_v4si (tmp1, operands[2],
2095 operands[2], GEN_INT (12)));
2096 emit_insn (gen_altivec_vsum2sws_direct (tmp2, operands[1], tmp1));
2097 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
2103 ; FIXME: This can probably be expressed without an UNSPEC.
2104 (define_insn "altivec_vsum2sws_direct"
2105 [(set (match_operand:V4SI 0 "register_operand" "=v")
2106 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2107 (match_operand:V4SI 2 "register_operand" "v")]
2109 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2112 [(set_attr "type" "veccomplex")])
2114 (define_expand "altivec_vsumsws"
2115 [(use (match_operand:V4SI 0 "register_operand"))
2116 (use (match_operand:V4SI 1 "register_operand"))
2117 (use (match_operand:V4SI 2 "register_operand"))]
2120 if (BYTES_BIG_ENDIAN)
2121 emit_insn (gen_altivec_vsumsws_direct (operands[0], operands[1],
2125 rtx tmp1 = gen_reg_rtx (V4SImode);
2126 rtx tmp2 = gen_reg_rtx (V4SImode);
2127 emit_insn (gen_altivec_vspltw_direct (tmp1, operands[2], const0_rtx));
2128 emit_insn (gen_altivec_vsumsws_direct (tmp2, operands[1], tmp1));
2129 emit_insn (gen_altivec_vsldoi_v4si (operands[0], tmp2, tmp2,
2135 ; FIXME: This can probably be expressed without an UNSPEC.
2136 (define_insn "altivec_vsumsws_direct"
2137 [(set (match_operand:V4SI 0 "register_operand" "=v")
2138 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2139 (match_operand:V4SI 2 "register_operand" "v")]
2140 UNSPEC_VSUMSWS_DIRECT))
2141 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2144 [(set_attr "type" "veccomplex")])
2146 (define_expand "altivec_vspltb"
2147 [(use (match_operand:V16QI 0 "register_operand"))
2148 (use (match_operand:V16QI 1 "register_operand"))
2149 (use (match_operand:QI 2 "const_0_to_15_operand"))]
2152 rtvec v = gen_rtvec (1, operands[2]);
2154 x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2155 x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
2156 emit_insn (gen_rtx_SET (operands[0], x));
2160 (define_insn "*altivec_vspltb_internal"
2161 [(set (match_operand:V16QI 0 "register_operand" "=v")
2162 (vec_duplicate:V16QI
2163 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
2165 [(match_operand:QI 2 "const_0_to_15_operand" "")]))))]
2168 if (!BYTES_BIG_ENDIAN)
2169 operands[2] = GEN_INT (15 - INTVAL (operands[2]));
2171 return "vspltb %0,%1,%2";
2173 [(set_attr "type" "vecperm")])
2175 (define_insn "altivec_vspltb_direct"
2176 [(set (match_operand:V16QI 0 "register_operand" "=v")
2177 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
2178 (match_operand:QI 2 "const_0_to_15_operand" "i")]
2179 UNSPEC_VSPLT_DIRECT))]
2182 [(set_attr "type" "vecperm")])
2184 (define_expand "altivec_vsplth"
2185 [(use (match_operand:V8HI 0 "register_operand"))
2186 (use (match_operand:V8HI 1 "register_operand"))
2187 (use (match_operand:QI 2 "const_0_to_7_operand"))]
2190 rtvec v = gen_rtvec (1, operands[2]);
2192 x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2193 x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
2194 emit_insn (gen_rtx_SET (operands[0], x));
2198 (define_insn "*altivec_vsplth_internal"
2199 [(set (match_operand:V8HI 0 "register_operand" "=v")
2201 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
2203 [(match_operand:QI 2 "const_0_to_7_operand" "")]))))]
2206 if (!BYTES_BIG_ENDIAN)
2207 operands[2] = GEN_INT (7 - INTVAL (operands[2]));
2209 return "vsplth %0,%1,%2";
2211 [(set_attr "type" "vecperm")])
2213 (define_insn "altivec_vsplth_direct"
2214 [(set (match_operand:V8HI 0 "register_operand" "=v")
2215 (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
2216 (match_operand:QI 2 "const_0_to_7_operand" "i")]
2217 UNSPEC_VSPLT_DIRECT))]
2220 [(set_attr "type" "vecperm")])
2222 (define_expand "altivec_vspltw"
2223 [(use (match_operand:V4SI 0 "register_operand"))
2224 (use (match_operand:V4SI 1 "register_operand"))
2225 (use (match_operand:QI 2 "const_0_to_3_operand"))]
2228 rtvec v = gen_rtvec (1, operands[2]);
2230 x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2231 x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
2232 emit_insn (gen_rtx_SET (operands[0], x));
2236 (define_insn "*altivec_vspltw_internal"
2237 [(set (match_operand:V4SI 0 "register_operand" "=v")
2239 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
2241 [(match_operand:QI 2 "const_0_to_3_operand" "i")]))))]
2244 if (!BYTES_BIG_ENDIAN)
2245 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2247 return "vspltw %0,%1,%2";
2249 [(set_attr "type" "vecperm")])
2251 (define_insn "altivec_vspltw_direct"
2252 [(set (match_operand:V4SI 0 "register_operand" "=v")
2253 (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
2254 (match_operand:QI 2 "const_0_to_3_operand" "i")]
2255 UNSPEC_VSPLT_DIRECT))]
2258 [(set_attr "type" "vecperm")])
2260 (define_expand "altivec_vspltsf"
2261 [(use (match_operand:V4SF 0 "register_operand"))
2262 (use (match_operand:V4SF 1 "register_operand"))
2263 (use (match_operand:QI 2 "const_0_to_3_operand"))]
2266 rtvec v = gen_rtvec (1, operands[2]);
2268 x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
2269 x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
2270 emit_insn (gen_rtx_SET (operands[0], x));
2274 (define_insn "*altivec_vspltsf_internal"
2275 [(set (match_operand:V4SF 0 "register_operand" "=v")
2277 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
2279 [(match_operand:QI 2 "const_0_to_3_operand" "i")]))))]
2280 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2282 if (!BYTES_BIG_ENDIAN)
2283 operands[2] = GEN_INT (3 - INTVAL (operands[2]));
2285 return "vspltw %0,%1,%2";
2287 [(set_attr "type" "vecperm")])
2289 (define_insn "altivec_vspltis<VI_char>"
2290 [(set (match_operand:VI 0 "register_operand" "=v")
2292 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
2294 "vspltis<VI_char> %0,%1"
2295 [(set_attr "type" "vecperm")])
2297 (define_insn "*altivec_vrfiz"
2298 [(set (match_operand:V4SF 0 "register_operand" "=v")
2299 (fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
2300 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2302 [(set_attr "type" "vecfloat")])
2304 (define_expand "altivec_vperm_<mode>"
2305 [(set (match_operand:VM 0 "register_operand")
2306 (unspec:VM [(match_operand:VM 1 "register_operand")
2307 (match_operand:VM 2 "register_operand")
2308 (match_operand:V16QI 3 "register_operand")]
2312 if (!BYTES_BIG_ENDIAN)
2314 altivec_expand_vec_perm_le (operands);
2319 ;; Slightly prefer vperm, since the target does not overlap the source
2320 (define_insn "altivec_vperm_<mode>_direct"
2321 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2322 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2323 (match_operand:VM 2 "register_operand" "0,v")
2324 (match_operand:V16QI 3 "register_operand" "wa,v")]
2330 [(set_attr "type" "vecperm")
2331 (set_attr "isa" "p9v,*")])
2333 (define_insn "altivec_vperm_v8hiv16qi"
2334 [(set (match_operand:V16QI 0 "register_operand" "=?wa,v")
2335 (unspec:V16QI [(match_operand:V8HI 1 "register_operand" "wa,v")
2336 (match_operand:V8HI 2 "register_operand" "0,v")
2337 (match_operand:V16QI 3 "register_operand" "wa,v")]
2343 [(set_attr "type" "vecperm")
2344 (set_attr "isa" "p9v,*")])
2346 (define_expand "altivec_vperm_<mode>_uns"
2347 [(set (match_operand:VM 0 "register_operand")
2348 (unspec:VM [(match_operand:VM 1 "register_operand")
2349 (match_operand:VM 2 "register_operand")
2350 (match_operand:V16QI 3 "register_operand")]
2354 if (!BYTES_BIG_ENDIAN)
2356 altivec_expand_vec_perm_le (operands);
2361 (define_insn "*altivec_vperm_<mode>_uns_internal"
2362 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2363 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2364 (match_operand:VM 2 "register_operand" "0,v")
2365 (match_operand:V16QI 3 "register_operand" "wa,v")]
2371 [(set_attr "type" "vecperm")
2372 (set_attr "isa" "p9v,*")])
2374 (define_expand "vec_permv16qi"
2375 [(set (match_operand:V16QI 0 "register_operand")
2376 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")
2377 (match_operand:V16QI 2 "register_operand")
2378 (match_operand:V16QI 3 "register_operand")]
2382 if (!BYTES_BIG_ENDIAN) {
2383 altivec_expand_vec_perm_le (operands);
2388 (define_insn "*altivec_vpermr_<mode>_internal"
2389 [(set (match_operand:VM 0 "register_operand" "=?wa,v")
2390 (unspec:VM [(match_operand:VM 1 "register_operand" "wa,v")
2391 (match_operand:VM 2 "register_operand" "0,v")
2392 (match_operand:V16QI 3 "register_operand" "wa,v")]
2398 [(set_attr "type" "vecperm")
2399 (set_attr "isa" "p9v,*")])
2401 (define_insn "altivec_vrfip" ; ceil
2402 [(set (match_operand:V4SF 0 "register_operand" "=v")
2403 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2407 [(set_attr "type" "vecfloat")])
2409 (define_insn "altivec_vrfin"
2410 [(set (match_operand:V4SF 0 "register_operand" "=v")
2411 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2415 [(set_attr "type" "vecfloat")])
2417 (define_insn "*altivec_vrfim" ; floor
2418 [(set (match_operand:V4SF 0 "register_operand" "=v")
2419 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2423 [(set_attr "type" "vecfloat")])
2425 (define_insn "altivec_vcfux"
2426 [(set (match_operand:V4SF 0 "register_operand" "=v")
2427 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2428 (match_operand:QI 2 "immediate_operand" "i")]
2432 [(set_attr "type" "vecfloat")])
2434 (define_insn "altivec_vcfsx"
2435 [(set (match_operand:V4SF 0 "register_operand" "=v")
2436 (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
2437 (match_operand:QI 2 "immediate_operand" "i")]
2441 [(set_attr "type" "vecfloat")])
2443 (define_insn "altivec_vctuxs"
2444 [(set (match_operand:V4SI 0 "register_operand" "=v")
2445 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2446 (match_operand:QI 2 "immediate_operand" "i")]
2448 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2451 [(set_attr "type" "vecfloat")])
2453 (define_insn "altivec_vctsxs"
2454 [(set (match_operand:V4SI 0 "register_operand" "=v")
2455 (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2456 (match_operand:QI 2 "immediate_operand" "i")]
2458 (set (reg:SI VSCR_REGNO) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2461 [(set_attr "type" "vecfloat")])
2463 (define_insn "altivec_vlogefp"
2464 [(set (match_operand:V4SF 0 "register_operand" "=v")
2465 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2469 [(set_attr "type" "vecfloat")])
2471 (define_insn "altivec_vexptefp"
2472 [(set (match_operand:V4SF 0 "register_operand" "=v")
2473 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2477 [(set_attr "type" "vecfloat")])
2479 (define_insn "*altivec_vrsqrtefp"
2480 [(set (match_operand:V4SF 0 "register_operand" "=v")
2481 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2483 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2485 [(set_attr "type" "vecfloat")])
2487 (define_insn "altivec_vrefp"
2488 [(set (match_operand:V4SF 0 "register_operand" "=v")
2489 (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2491 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2493 [(set_attr "type" "vecfloat")])
2495 (define_expand "altivec_copysign_v4sf3"
2496 [(use (match_operand:V4SF 0 "register_operand"))
2497 (use (match_operand:V4SF 1 "register_operand"))
2498 (use (match_operand:V4SF 2 "register_operand"))]
2499 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2501 rtx mask = gen_reg_rtx (V4SImode);
2502 rtx mask_val = gen_int_mode (HOST_WIDE_INT_1U << 31, SImode);
2503 rtvec v = gen_rtvec (4, mask_val, mask_val, mask_val, mask_val);
2505 emit_insn (gen_vec_initv4sisi (mask, gen_rtx_PARALLEL (V4SImode, v)));
2506 emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2507 gen_lowpart (V4SFmode, mask)));
2511 (define_insn "altivec_vsldoi_<mode>"
2512 [(set (match_operand:VM 0 "register_operand" "=v")
2513 (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2514 (match_operand:VM 2 "register_operand" "v")
2515 (match_operand:QI 3 "immediate_operand" "i")]
2518 "vsldoi %0,%1,%2,%3"
2519 [(set_attr "type" "vecperm")])
2521 (define_insn "altivec_vupkhs<VU_char>"
2522 [(set (match_operand:VP 0 "register_operand" "=v")
2523 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2524 UNSPEC_VUNPACK_HI_SIGN))]
2527 if (BYTES_BIG_ENDIAN)
2528 return "vupkhs<VU_char> %0,%1";
2530 return "vupkls<VU_char> %0,%1";
2532 [(set_attr "type" "vecperm")])
2534 (define_insn "*altivec_vupkhs<VU_char>_direct"
2535 [(set (match_operand:VP 0 "register_operand" "=v")
2536 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2537 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2539 "vupkhs<VU_char> %0,%1"
2540 [(set_attr "type" "vecperm")])
2542 (define_insn "altivec_vupkls<VU_char>"
2543 [(set (match_operand:VP 0 "register_operand" "=v")
2544 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2545 UNSPEC_VUNPACK_LO_SIGN))]
2548 if (BYTES_BIG_ENDIAN)
2549 return "vupkls<VU_char> %0,%1";
2551 return "vupkhs<VU_char> %0,%1";
2553 [(set_attr "type" "vecperm")])
2555 (define_insn "*altivec_vupkls<VU_char>_direct"
2556 [(set (match_operand:VP 0 "register_operand" "=v")
2557 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2558 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2560 "vupkls<VU_char> %0,%1"
2561 [(set_attr "type" "vecperm")])
2563 (define_insn "altivec_vupkhpx"
2564 [(set (match_operand:V4SI 0 "register_operand" "=v")
2565 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2569 if (BYTES_BIG_ENDIAN)
2570 return "vupkhpx %0,%1";
2572 return "vupklpx %0,%1";
2574 [(set_attr "type" "vecperm")])
2576 (define_insn "altivec_vupklpx"
2577 [(set (match_operand:V4SI 0 "register_operand" "=v")
2578 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2582 if (BYTES_BIG_ENDIAN)
2583 return "vupklpx %0,%1";
2585 return "vupkhpx %0,%1";
2587 [(set_attr "type" "vecperm")])
2589 ;; Compare vectors producing a vector result and a predicate, setting CR6 to
2590 ;; indicate a combined status
2591 (define_insn "altivec_vcmpequ<VI_char>_p"
2592 [(set (reg:CC CR6_REGNO)
2593 (unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2594 (match_operand:VI2 2 "register_operand" "v"))]
2596 (set (match_operand:VI2 0 "register_operand" "=v")
2597 (eq:VI2 (match_dup 1)
2600 "vcmpequ<VI_char>. %0,%1,%2"
2601 [(set_attr "type" "veccmpfx")])
2603 (define_insn "altivec_vcmpequt_p"
2604 [(set (reg:CC CR6_REGNO)
2605 (unspec:CC [(eq:CC (match_operand:V1TI 1 "altivec_register_operand" "v")
2606 (match_operand:V1TI 2 "altivec_register_operand" "v"))]
2608 (set (match_operand:V1TI 0 "altivec_register_operand" "=v")
2609 (eq:V1TI (match_dup 1)
2612 "vcmpequq. %0,%1,%2"
2613 [(set_attr "type" "veccmpfx")])
2615 (define_insn "*altivec_vcmpgts<VI_char>_p"
2616 [(set (reg:CC CR6_REGNO)
2617 (unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2618 (match_operand:VI2 2 "register_operand" "v"))]
2620 (set (match_operand:VI2 0 "register_operand" "=v")
2621 (gt:VI2 (match_dup 1)
2624 "vcmpgts<VI_char>. %0,%1,%2"
2625 [(set_attr "type" "veccmpfx")])
2627 (define_insn "*altivec_vcmpgtst_p"
2628 [(set (reg:CC CR6_REGNO)
2629 (unspec:CC [(gt:CC (match_operand:V1TI 1 "register_operand" "v")
2630 (match_operand:V1TI 2 "register_operand" "v"))]
2632 (set (match_operand:V1TI 0 "register_operand" "=v")
2633 (gt:V1TI (match_dup 1)
2636 "vcmpgtsq. %0,%1,%2"
2637 [(set_attr "type" "veccmpfx")])
2639 (define_insn "*altivec_vcmpgtu<VI_char>_p"
2640 [(set (reg:CC CR6_REGNO)
2641 (unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2642 (match_operand:VI2 2 "register_operand" "v"))]
2644 (set (match_operand:VI2 0 "register_operand" "=v")
2645 (gtu:VI2 (match_dup 1)
2648 "vcmpgtu<VI_char>. %0,%1,%2"
2649 [(set_attr "type" "veccmpfx")])
2651 (define_insn "*altivec_vcmpgtut_p"
2652 [(set (reg:CC CR6_REGNO)
2653 (unspec:CC [(gtu:CC (match_operand:V1TI 1 "register_operand" "v")
2654 (match_operand:V1TI 2 "register_operand" "v"))]
2656 (set (match_operand:V1TI 0 "register_operand" "=v")
2657 (gtu:V1TI (match_dup 1)
2660 "vcmpgtuq. %0,%1,%2"
2661 [(set_attr "type" "veccmpfx")])
2663 (define_insn "*altivec_vcmpeqfp_p"
2664 [(set (reg:CC CR6_REGNO)
2665 (unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2666 (match_operand:V4SF 2 "register_operand" "v"))]
2668 (set (match_operand:V4SF 0 "register_operand" "=v")
2669 (eq:V4SF (match_dup 1)
2671 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2672 "vcmpeqfp. %0,%1,%2"
2673 [(set_attr "type" "veccmp")])
2675 (define_insn "*altivec_vcmpgtfp_p"
2676 [(set (reg:CC CR6_REGNO)
2677 (unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2678 (match_operand:V4SF 2 "register_operand" "v"))]
2680 (set (match_operand:V4SF 0 "register_operand" "=v")
2681 (gt:V4SF (match_dup 1)
2683 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2684 "vcmpgtfp. %0,%1,%2"
2685 [(set_attr "type" "veccmp")])
2687 (define_insn "*altivec_vcmpgefp_p"
2688 [(set (reg:CC CR6_REGNO)
2689 (unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2690 (match_operand:V4SF 2 "register_operand" "v"))]
2692 (set (match_operand:V4SF 0 "register_operand" "=v")
2693 (ge:V4SF (match_dup 1)
2695 "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2696 "vcmpgefp. %0,%1,%2"
2697 [(set_attr "type" "veccmp")])
2699 (define_insn "altivec_vcmpbfp_p"
2700 [(set (reg:CC CR6_REGNO)
2701 (unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2702 (match_operand:V4SF 2 "register_operand" "v")]
2704 (set (match_operand:V4SF 0 "register_operand" "=v")
2705 (unspec:V4SF [(match_dup 1)
2708 "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2710 [(set_attr "type" "veccmp")])
2712 (define_insn "altivec_mtvscr"
2713 [(set (reg:SI VSCR_REGNO)
2715 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2718 [(set_attr "type" "vecsimple")])
2720 (define_insn "altivec_mfvscr"
2721 [(set (match_operand:V8HI 0 "register_operand" "=v")
2722 (unspec_volatile:V8HI [(reg:SI VSCR_REGNO)] UNSPECV_MFVSCR))]
2725 [(set_attr "type" "vecsimple")])
2727 (define_insn "altivec_dssall"
2728 [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2731 [(set_attr "type" "vecsimple")])
2733 (define_insn "altivec_dss"
2734 [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2738 [(set_attr "type" "vecsimple")])
2740 (define_insn "altivec_dst"
2741 [(unspec [(match_operand 0 "register_operand" "b")
2742 (match_operand:SI 1 "register_operand" "r")
2743 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2744 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2746 [(set_attr "type" "vecsimple")])
2748 (define_insn "altivec_dstt"
2749 [(unspec [(match_operand 0 "register_operand" "b")
2750 (match_operand:SI 1 "register_operand" "r")
2751 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2752 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2754 [(set_attr "type" "vecsimple")])
2756 (define_insn "altivec_dstst"
2757 [(unspec [(match_operand 0 "register_operand" "b")
2758 (match_operand:SI 1 "register_operand" "r")
2759 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2760 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2762 [(set_attr "type" "vecsimple")])
2764 (define_insn "altivec_dststt"
2765 [(unspec [(match_operand 0 "register_operand" "b")
2766 (match_operand:SI 1 "register_operand" "r")
2767 (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2768 "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2770 [(set_attr "type" "vecsimple")])
2772 (define_expand "altivec_lvsl"
2773 [(use (match_operand:V16QI 0 "register_operand"))
2774 (use (match_operand:V16QI 1 "memory_operand"))]
2777 if (BYTES_BIG_ENDIAN)
2778 emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2781 rtx mask, constv, vperm;
2782 mask = gen_reg_rtx (V16QImode);
2783 emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2784 constv = gen_const_vec_series (V16QImode, const0_rtx, const1_rtx);
2785 constv = force_reg (V16QImode, constv);
2786 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2788 emit_insn (gen_rtx_SET (operands[0], vperm));
2793 (define_insn "altivec_lvsl_reg_<mode>"
2794 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
2796 [(match_operand:GPR 1 "gpc_reg_operand" "b")]
2800 [(set_attr "type" "vecload")])
2802 (define_insn "altivec_lvsl_direct"
2803 [(set (match_operand:V16QI 0 "register_operand" "=v")
2804 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2808 [(set_attr "type" "vecload")])
2810 (define_expand "altivec_lvsr"
2811 [(use (match_operand:V16QI 0 "altivec_register_operand"))
2812 (use (match_operand:V16QI 1 "memory_operand"))]
2815 if (BYTES_BIG_ENDIAN)
2816 emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2819 rtx mask, constv, vperm;
2820 mask = gen_reg_rtx (V16QImode);
2821 emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2822 constv = gen_const_vec_series (V16QImode, const0_rtx, const1_rtx);
2823 constv = force_reg (V16QImode, constv);
2824 vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2826 emit_insn (gen_rtx_SET (operands[0], vperm));
2831 (define_insn "altivec_lvsr_reg_<mode>"
2832 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
2834 [(match_operand:GPR 1 "gpc_reg_operand" "b")]
2838 [(set_attr "type" "vecload")])
2840 (define_insn "altivec_lvsr_direct"
2841 [(set (match_operand:V16QI 0 "register_operand" "=v")
2842 (unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2846 [(set_attr "type" "vecload")])
2848 (define_expand "build_vector_mask_for_load"
2849 [(set (match_operand:V16QI 0 "register_operand")
2850 (unspec:V16QI [(match_operand 1 "memory_operand")] UNSPEC_LVSR))]
2856 gcc_assert (MEM_P (operands[1]));
2858 addr = XEXP (operands[1], 0);
2859 temp = gen_reg_rtx (GET_MODE (addr));
2860 emit_insn (gen_rtx_SET (temp, gen_rtx_NEG (GET_MODE (addr), addr)));
2861 emit_insn (gen_altivec_lvsr (operands[0],
2862 replace_equiv_address (operands[1], temp)));
2866 ;; Parallel some of the LVE* and STV*'s with unspecs because some have
2867 ;; identical rtl but different instructions-- and gcc gets confused.
2869 (define_insn "altivec_lve<VI_char>x"
2871 [(set (match_operand:VI 0 "register_operand" "=v")
2872 (match_operand:VI 1 "memory_operand" "Z"))
2873 (unspec [(const_int 0)] UNSPEC_LVE)])]
2875 "lve<VI_char>x %0,%y1"
2876 [(set_attr "type" "vecload")])
2878 (define_insn "*altivec_lvesfx"
2880 [(set (match_operand:V4SF 0 "register_operand" "=v")
2881 (match_operand:V4SF 1 "memory_operand" "Z"))
2882 (unspec [(const_int 0)] UNSPEC_LVE)])]
2885 [(set_attr "type" "vecload")])
2887 (define_insn "altivec_lvxl_<mode>"
2889 [(set (match_operand:VM2 0 "register_operand" "=v")
2890 (match_operand:VM2 1 "memory_operand" "Z"))
2891 (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2894 [(set_attr "type" "vecload")])
2896 ; This version of lvx is used only in cases where we need to force an lvx
2897 ; over any other load, and we don't care about losing CSE opportunities.
2898 ; Its primary use is for prologue register saves.
2899 (define_insn "altivec_lvx_<mode>_internal"
2901 [(set (match_operand:VM2 0 "register_operand" "=v")
2902 (match_operand:VM2 1 "memory_operand" "Z"))
2903 (unspec [(const_int 0)] UNSPEC_LVX)])]
2906 [(set_attr "type" "vecload")])
2908 ; The following patterns embody what lvx should usually look like.
2909 (define_expand "altivec_lvx_<VM2:mode>"
2910 [(set (match_operand:VM2 0 "register_operand")
2911 (match_operand:VM2 1 "altivec_indexed_or_indirect_operand"))]
2914 rtx addr = XEXP (operand1, 0);
2915 if (rs6000_sum_of_two_registers_p (addr))
2917 rtx op1 = XEXP (addr, 0);
2918 rtx op2 = XEXP (addr, 1);
2920 emit_insn (gen_altivec_lvx_<VM2:mode>_2op_di (operand0, op1, op2));
2922 emit_insn (gen_altivec_lvx_<VM2:mode>_2op_si (operand0, op1, op2));
2927 emit_insn (gen_altivec_lvx_<VM2:mode>_1op_di (operand0, addr));
2929 emit_insn (gen_altivec_lvx_<VM2:mode>_1op_si (operand0, addr));
2934 ; The next two patterns embody what lvx should usually look like.
2935 (define_insn "altivec_lvx_<VM2:mode>_2op_<P:mptrsize>"
2936 [(set (match_operand:VM2 0 "register_operand" "=v")
2937 (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
2938 (match_operand:P 2 "register_operand" "r"))
2942 [(set_attr "type" "vecload")])
2944 (define_insn "altivec_lvx_<VM2:mode>_1op_<P:mptrsize>"
2945 [(set (match_operand:VM2 0 "register_operand" "=v")
2946 (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
2950 [(set_attr "type" "vecload")])
2952 ; This version of stvx is used only in cases where we need to force an stvx
2953 ; over any other store, and we don't care about losing CSE opportunities.
2954 ; Its primary use is for epilogue register restores.
2955 (define_insn "altivec_stvx_<mode>_internal"
2957 [(set (match_operand:VM2 0 "memory_operand" "=Z")
2958 (match_operand:VM2 1 "register_operand" "v"))
2959 (unspec [(const_int 0)] UNSPEC_STVX)])]
2962 [(set_attr "type" "vecstore")])
2964 ; The following patterns embody what stvx should usually look like.
2965 (define_expand "altivec_stvx_<VM2:mode>"
2966 [(set (match_operand:VM2 1 "altivec_indexed_or_indirect_operand")
2967 (match_operand:VM2 0 "register_operand"))]
2970 rtx addr = XEXP (operand1, 0);
2971 if (rs6000_sum_of_two_registers_p (addr))
2973 rtx op1 = XEXP (addr, 0);
2974 rtx op2 = XEXP (addr, 1);
2976 emit_insn (gen_altivec_stvx_<VM2:mode>_2op_di (operand0, op1, op2));
2978 emit_insn (gen_altivec_stvx_<VM2:mode>_2op_si (operand0, op1, op2));
2983 emit_insn (gen_altivec_stvx_<VM2:mode>_1op_di (operand0, addr));
2985 emit_insn (gen_altivec_stvx_<VM2:mode>_1op_si (operand0, addr));
2990 ; The next two patterns embody what stvx should usually look like.
2991 (define_insn "altivec_stvx_<VM2:mode>_2op_<P:mptrsize>"
2992 [(set (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
2993 (match_operand:P 2 "register_operand" "r"))
2995 (match_operand:VM2 0 "register_operand" "v"))]
2998 [(set_attr "type" "vecstore")])
3000 (define_insn "altivec_stvx_<VM2:mode>_1op_<P:mptrsize>"
3001 [(set (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
3003 (match_operand:VM2 0 "register_operand" "v"))]
3006 [(set_attr "type" "vecstore")])
3008 (define_insn "altivec_stvxl_<mode>"
3010 [(set (match_operand:VM2 0 "memory_operand" "=Z")
3011 (match_operand:VM2 1 "register_operand" "v"))
3012 (unspec [(const_int 0)] UNSPEC_STVXL)])]
3015 [(set_attr "type" "vecstore")])
3017 (define_insn "altivec_stve<VI_char>x"
3018 [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
3019 (unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
3021 "stve<VI_char>x %1,%y0"
3022 [(set_attr "type" "vecstore")])
3024 (define_insn "*altivec_stvesfx"
3025 [(set (match_operand:SF 0 "memory_operand" "=Z")
3026 (unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
3029 [(set_attr "type" "vecstore")])
3032 ;; signed int/float to double convert words 0 and 2
3033 (define_expand "doublee<mode>2"
3034 [(set (match_operand:V2DF 0 "register_operand" "=v")
3035 (match_operand:VSX_W 1 "register_operand" "v"))]
3038 machine_mode op_mode = GET_MODE (operands[1]);
3040 if (BYTES_BIG_ENDIAN)
3042 /* Big endian word numbering for words in operand is 0 1 2 3.
3043 Input words 0 and 2 are where they need to be. */
3044 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
3048 /* Little endian word numbering for operand is 3 2 1 0.
3049 take (operand[1] operand[1]) and shift left one word
3050 3 2 1 0 3 2 1 0 => 2 1 0 3
3051 Input words 2 and 0 are now where they need to be for the
3054 rtx rtx_val = GEN_INT (1);
3056 rtx_tmp = gen_reg_rtx (op_mode);
3057 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3058 operands[1], rtx_val));
3059 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3063 [(set_attr "type" "veccomplex")])
3065 ;; Generate unsdoublee
3066 ;; unsigned int to double convert words 0 and 2
3067 (define_expand "unsdoubleev4si2"
3068 [(set (match_operand:V2DF 0 "register_operand" "=v")
3069 (match_operand:V4SI 1 "register_operand" "v"))]
3072 if (BYTES_BIG_ENDIAN)
3074 /* Big endian word numbering for words in operand is 0 1 2 3.
3075 Input words 0 and 2 are where they need to be. */
3076 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
3080 /* Little endian word numbering for operand is 3 2 1 0.
3081 take (operand[1] operand[1]) and shift left one word
3082 3 2 1 0 3 2 1 0 => 2 1 0 3
3083 Input words 2 and 0 are now where they need to be for the
3086 rtx rtx_val = GEN_INT (1);
3088 rtx_tmp = gen_reg_rtx (V4SImode);
3089 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3090 operands[1], rtx_val));
3091 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3095 [(set_attr "type" "veccomplex")])
3097 ;; Generate doubleov
3098 ;; signed int/float to double convert words 1 and 3
3099 (define_expand "doubleo<mode>2"
3100 [(set (match_operand:V2DF 0 "register_operand" "=v")
3101 (match_operand:VSX_W 1 "register_operand" "v"))]
3104 machine_mode op_mode = GET_MODE (operands[1]);
3106 if (BYTES_BIG_ENDIAN)
3108 /* Big endian word numbering for words in operand is 0 1 2 3.
3109 take (operand[1] operand[1]) and shift left one word
3110 0 1 2 3 0 1 2 3 => 1 2 3 0
3111 Input words 1 and 3 are now where they need to be for the
3114 rtx rtx_val = GEN_INT (1);
3116 rtx_tmp = gen_reg_rtx (op_mode);
3117 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3118 operands[1], rtx_val));
3119 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3123 /* Little endian word numbering for operand is 3 2 1 0.
3124 Input words 3 and 1 are where they need to be. */
3125 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], operands[1]));
3129 [(set_attr "type" "veccomplex")])
3131 ;; Generate unsdoubleov
3132 ;; unsigned int to double convert words 1 and 3
3133 (define_expand "unsdoubleov4si2"
3134 [(set (match_operand:V2DF 0 "register_operand" "=v")
3135 (match_operand:V4SI 1 "register_operand" "v"))]
3138 if (BYTES_BIG_ENDIAN)
3140 /* Big endian word numbering for words in operand is 0 1 2 3.
3141 take (operand[1] operand[1]) and shift left one word
3142 0 1 2 3 0 1 2 3 => 1 2 3 0
3143 Input words 1 and 3 are now where they need to be for the
3146 rtx rtx_val = GEN_INT (1);
3148 rtx_tmp = gen_reg_rtx (V4SImode);
3149 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3150 operands[1], rtx_val));
3151 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3155 /* Want to convert the words 1 and 3.
3156 Little endian word numbering for operand is 3 2 1 0.
3157 Input words 3 and 1 are where they need to be. */
3158 emit_insn (gen_vsx_xvcvuxwdp (operands[0], operands[1]));
3162 [(set_attr "type" "veccomplex")])
3164 ;; Generate doublehv
3165 ;; signed int/float to double convert words 0 and 1
3166 (define_expand "doubleh<mode>2"
3167 [(set (match_operand:V2DF 0 "register_operand" "=v")
3168 (match_operand:VSX_W 1 "register_operand" "v"))]
3174 machine_mode op_mode = GET_MODE (operands[1]);
3175 rtx_tmp = gen_reg_rtx (op_mode);
3177 if (BYTES_BIG_ENDIAN)
3179 /* Big endian word numbering for words in operand is 0 1 2 3.
3180 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3181 take (rts_tmp operand[1]) and shift left three words
3182 1 2 3 0 0 1 2 3 => 0 0 1 2
3183 Input words 0 and 1 are now where they need to be for the
3185 rtx_val = GEN_INT (1);
3186 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3187 operands[1], rtx_val));
3189 rtx_val = GEN_INT (3);
3190 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3191 operands[1], rtx_val));
3192 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3196 /* Little endian word numbering for operand is 3 2 1 0.
3197 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3198 take (operand[1] rts_tmp) and shift left two words
3199 3 2 1 0 0 3 2 1 => 1 0 0 3
3200 Input words 0 and 1 are now where they need to be for the
3202 rtx_val = GEN_INT (3);
3203 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3204 operands[1], rtx_val));
3206 rtx_val = GEN_INT (2);
3207 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3209 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3213 [(set_attr "type" "veccomplex")])
3215 ;; Generate unsdoublehv
3216 ;; unsigned int to double convert words 0 and 1
3217 (define_expand "unsdoublehv4si2"
3218 [(set (match_operand:V2DF 0 "register_operand" "=v")
3219 (match_operand:V4SI 1 "register_operand" "v"))]
3222 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3223 rtx rtx_val = GEN_INT (12);
3225 if (BYTES_BIG_ENDIAN)
3227 /* Big endian word numbering for words in operand is 0 1 2 3.
3228 Shift operand left one word, rtx_tmp word order is now 1 2 3 0.
3229 take (rts_tmp operand[1]) and shift left three words
3230 1 2 3 0 0 1 2 3 => 0 0 1 2
3231 Input words 0 and 1 are now where they need to be for the
3233 rtx_val = GEN_INT (1);
3234 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3235 operands[1], rtx_val));
3237 rtx_val = GEN_INT (3);
3238 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3239 operands[1], rtx_val));
3240 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3244 /* Little endian word numbering for operand is 3 2 1 0.
3245 Shift operand left three words, rtx_tmp word order is now 0 3 2 1.
3246 take (operand[1] rts_tmp) and shift left two words
3247 3 2 1 0 0 3 2 1 => 1 0 0 3
3248 Input words 1 and 0 are now where they need to be for the
3250 rtx_val = GEN_INT (3);
3252 rtx_tmp = gen_reg_rtx (V4SImode);
3253 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3254 operands[1], rtx_val));
3256 rtx_val = GEN_INT (2);
3257 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3259 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3263 [(set_attr "type" "veccomplex")])
3265 ;; Generate doublelv
3266 ;; signed int/float to double convert words 2 and 3
3267 (define_expand "doublel<mode>2"
3268 [(set (match_operand:V2DF 0 "register_operand" "=v")
3269 (match_operand:VSX_W 1 "register_operand" "v"))]
3273 rtx rtx_val = GEN_INT (3);
3275 machine_mode op_mode = GET_MODE (operands[1]);
3276 rtx_tmp = gen_reg_rtx (op_mode);
3278 if (BYTES_BIG_ENDIAN)
3280 /* Big endian word numbering for operand is 0 1 2 3.
3281 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3282 take (operand[1] rtx_tmp) and shift left two words
3283 0 1 2 3 3 0 1 2 => 2 3 3 0
3284 now use convert instruction to convert word 2 and 3 in the
3286 rtx_val = GEN_INT (3);
3287 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3288 operands[1], rtx_val));
3290 rtx_val = GEN_INT (2);
3291 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3293 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3297 /* Little endian word numbering for operand is 3 2 1 0.
3298 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3299 take (rtx_tmp operand[1]) and shift left three words
3300 2 1 0 3 3 2 1 0 => 3 3 2 1
3301 now use convert instruction to convert word 3 and 2 in the
3303 rtx_val = GEN_INT (1);
3304 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, operands[1],
3305 operands[1], rtx_val));
3307 rtx_val = GEN_INT (3);
3308 emit_insn (gen_vsx_xxsldwi_<mode> (rtx_tmp, rtx_tmp,
3309 operands[1], rtx_val));
3310 emit_insn (gen_vsx_xvcv<VS_sxwsp>dp (operands[0], rtx_tmp));
3314 [(set_attr "type" "veccomplex")])
3316 ;; Generate unsdoublelv
3317 ;; unsigned int to double convert convert 2 and 3
3318 (define_expand "unsdoublelv4si2"
3319 [(set (match_operand:V2DF 0 "register_operand" "=v")
3320 (match_operand:V4SI 1 "register_operand" "v"))]
3323 rtx rtx_tmp = gen_reg_rtx (V4SImode);
3324 rtx rtx_val = GEN_INT (12);
3326 if (BYTES_BIG_ENDIAN)
3328 /* Big endian word numbering for operand is 0 1 2 3.
3329 Shift operand left three words, rtx_tmp word order is now 3 0 1 2.
3330 take (operand[1] rtx_tmp) and shift left two words
3331 0 1 2 3 3 0 1 2 => 2 3 3 0
3332 now use convert instruction to convert word 2 and 3 in the
3334 rtx_val = GEN_INT (3);
3335 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3336 operands[1], rtx_val));
3338 rtx_val = GEN_INT (2);
3339 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, operands[1],
3341 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3345 /* Little endian word numbering for operand is 3 2 1 0.
3346 Shift operand left one word, rtx_tmp word order is now 2 1 0 3.
3347 take (rtx_tmp operand[1]) and shift left three words
3348 2 1 0 3 3 2 1 0 => 3 3 2 1
3349 now use convert instruction to convert word 3 and 2 in the
3351 rtx_val = GEN_INT (1);
3352 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp,
3353 operands[1], operands[1], rtx_val));
3355 rtx_val = GEN_INT (3);
3356 emit_insn (gen_vsx_xxsldwi_v4si (rtx_tmp, rtx_tmp,
3357 operands[1], rtx_val));
3358 emit_insn (gen_vsx_xvcvuxwdp (operands[0], rtx_tmp));
3362 [(set_attr "type" "veccomplex")])
3364 ;; Generate two vector F32 converted to packed vector I16 vector
3365 (define_expand "convert_4f32_8i16"
3366 [(set (match_operand:V8HI 0 "register_operand" "=v")
3367 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "v")
3368 (match_operand:V4SF 2 "register_operand" "v")]
3369 UNSPEC_CONVERT_4F32_8I16))]
3372 rtx rtx_tmp_hi = gen_reg_rtx (V4SImode);
3373 rtx rtx_tmp_lo = gen_reg_rtx (V4SImode);
3375 emit_insn (gen_altivec_vctuxs (rtx_tmp_hi, operands[1], const0_rtx));
3376 emit_insn (gen_altivec_vctuxs (rtx_tmp_lo, operands[2], const0_rtx));
3377 emit_insn (gen_altivec_vpkswss (operands[0], rtx_tmp_hi, rtx_tmp_lo));
3382 ;; Convert two vector F32 to packed vector F16.
3383 ;; This builtin packs 32-bit floating-point values into a packed
3384 ;; 16-bit floating point values (stored in 16bit integer type).
3385 ;; (vector unsigned short r = vec_pack_to_short_fp32 (a, b);
3386 ;; The expected codegen for this builtin is
3389 ;; if (little endian)
3394 (define_expand "convert_4f32_8f16"
3395 [(set (match_operand:V8HI 0 "register_operand" "=v")
3396 (unspec:V8HI [(match_operand:V4SF 1 "register_operand" "v")
3397 (match_operand:V4SF 2 "register_operand" "v")]
3398 UNSPEC_CONVERT_4F32_8F16))]
3401 rtx rtx_tmp_hi = gen_reg_rtx (V4SImode);
3402 rtx rtx_tmp_lo = gen_reg_rtx (V4SImode);
3404 emit_insn (gen_vsx_xvcvsphp (rtx_tmp_hi, operands[1]));
3405 emit_insn (gen_vsx_xvcvsphp (rtx_tmp_lo, operands[2]));
3406 if (!BYTES_BIG_ENDIAN)
3407 emit_insn (gen_altivec_vpkuwum (operands[0], rtx_tmp_hi, rtx_tmp_lo));
3409 emit_insn (gen_altivec_vpkuwum (operands[0], rtx_tmp_lo, rtx_tmp_hi));
3415 ;; xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
3416 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3417 ;; vmaxs? %0,%1,SCRATCH2"
3418 (define_expand "abs<mode>2"
3419 [(set (match_dup 2) (match_dup 3))
3421 (minus:VI2 (match_dup 2)
3422 (match_operand:VI2 1 "register_operand" "v")))
3423 (set (match_operand:VI2 0 "register_operand" "=v")
3424 (smax:VI2 (match_dup 1) (match_dup 4)))]
3427 operands[2] = gen_reg_rtx (<MODE>mode);
3428 operands[3] = CONST0_RTX (<MODE>mode);
3429 operands[4] = gen_reg_rtx (<MODE>mode);
3433 ;; vspltisw SCRATCH1,0
3434 ;; vsubu?m SCRATCH2,SCRATCH1,%1
3435 ;; vmins? %0,%1,SCRATCH2"
3436 (define_expand "nabs<mode>2"
3437 [(set (match_dup 2) (match_dup 3))
3439 (minus:VI2 (match_dup 2)
3440 (match_operand:VI2 1 "register_operand" "v")))
3441 (set (match_operand:VI2 0 "register_operand" "=v")
3442 (smin:VI2 (match_dup 1) (match_dup 4)))]
3445 operands[2] = gen_reg_rtx (<MODE>mode);
3446 operands[3] = CONST0_RTX (<MODE>mode);
3447 operands[4] = gen_reg_rtx (<MODE>mode);
3451 ;; vspltisw SCRATCH1,-1
3452 ;; vslw SCRATCH2,SCRATCH1,SCRATCH1
3453 ;; vandc %0,%1,SCRATCH2
3454 (define_expand "altivec_absv4sf2"
3456 (vec_duplicate:V4SI (const_int -1)))
3458 (ashift:V4SI (match_dup 2) (match_dup 2)))
3459 (set (match_operand:V4SF 0 "register_operand" "=v")
3460 (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
3461 (match_operand:V4SF 1 "register_operand" "v")))]
3464 operands[2] = gen_reg_rtx (V4SImode);
3465 operands[3] = gen_reg_rtx (V4SImode);
3469 ;; vspltis? SCRATCH0,0
3470 ;; vsubs?s SCRATCH2,SCRATCH1,%1
3471 ;; vmaxs? %0,%1,SCRATCH2"
3472 (define_expand "altivec_abss_<mode>"
3473 [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
3474 (parallel [(set (match_dup 3)
3475 (ss_minus:VI (match_dup 2)
3476 (match_operand:VI 1 "register_operand" "v")))
3477 (set (reg:SI VSCR_REGNO)
3478 (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
3479 (set (match_operand:VI 0 "register_operand" "=v")
3480 (smax:VI (match_dup 1) (match_dup 3)))]
3483 operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
3484 operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
3487 (define_expand "reduc_plus_scal_<mode>"
3488 [(set (match_operand:<VI_scalar> 0 "register_operand" "=v")
3489 (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
3490 UNSPEC_REDUC_PLUS))]
3493 rtx vzero = gen_reg_rtx (V4SImode);
3494 rtx vtmp1 = gen_reg_rtx (V4SImode);
3495 rtx vtmp2 = gen_reg_rtx (<MODE>mode);
3496 rtx dest = gen_lowpart (V4SImode, vtmp2);
3497 int elt = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (<MODE>mode) - 1 : 0;
3499 emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
3500 emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
3501 emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
3502 rs6000_expand_vector_extract (operands[0], vtmp2, GEN_INT (elt));
3506 (define_insn "*p9_neg<mode>2"
3507 [(set (match_operand:VNEG 0 "altivec_register_operand" "=v")
3508 (neg:VNEG (match_operand:VNEG 1 "altivec_register_operand" "v")))]
3510 "vneg<VI_char> %0,%1"
3511 [(set_attr "type" "vecsimple")])
3513 (define_expand "neg<mode>2"
3514 [(set (match_operand:VI2 0 "register_operand")
3515 (neg:VI2 (match_operand:VI2 1 "register_operand")))]
3518 if (!TARGET_P9_VECTOR || (<MODE>mode != V4SImode && <MODE>mode != V2DImode))
3522 vzero = gen_reg_rtx (GET_MODE (operands[0]));
3523 emit_move_insn (vzero, CONST0_RTX (<MODE>mode));
3524 emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
3529 (define_expand "udot_prod<mode>"
3530 [(set (match_operand:V4SI 0 "register_operand" "=v")
3531 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3532 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
3533 (match_operand:VIshort 2 "register_operand" "v")]
3537 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
3541 (define_expand "sdot_prodv8hi"
3542 [(set (match_operand:V4SI 0 "register_operand" "=v")
3543 (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
3544 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3545 (match_operand:V8HI 2 "register_operand" "v")]
3549 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
3553 (define_expand "widen_usum<mode>3"
3554 [(set (match_operand:V4SI 0 "register_operand" "=v")
3555 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3556 (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
3560 rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
3562 emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
3563 emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
3567 (define_expand "widen_ssumv16qi3"
3568 [(set (match_operand:V4SI 0 "register_operand" "=v")
3569 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3570 (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
3574 rtx vones = gen_reg_rtx (V16QImode);
3576 emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
3577 emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
3581 (define_expand "widen_ssumv8hi3"
3582 [(set (match_operand:V4SI 0 "register_operand" "=v")
3583 (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
3584 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
3588 rtx vones = gen_reg_rtx (V8HImode);
3590 emit_insn (gen_altivec_vspltish (vones, const1_rtx));
3591 emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
3595 (define_expand "vec_unpacks_hi_<VP_small_lc>"
3596 [(set (match_operand:VP 0 "register_operand" "=v")
3597 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3598 UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
3602 (define_expand "vec_unpacks_lo_<VP_small_lc>"
3603 [(set (match_operand:VP 0 "register_operand" "=v")
3604 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3605 UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
3609 (define_insn "vperm_v8hiv4si"
3610 [(set (match_operand:V4SI 0 "register_operand" "=?wa,v")
3611 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "wa,v")
3612 (match_operand:V4SI 2 "register_operand" "0,v")
3613 (match_operand:V16QI 3 "register_operand" "wa,v")]
3619 [(set_attr "type" "vecperm")
3620 (set_attr "isa" "p9v,*")])
3622 (define_insn "vperm_v16qiv8hi"
3623 [(set (match_operand:V8HI 0 "register_operand" "=?wa,v")
3624 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "wa,v")
3625 (match_operand:V8HI 2 "register_operand" "0,v")
3626 (match_operand:V16QI 3 "register_operand" "wa,v")]
3632 [(set_attr "type" "vecperm")
3633 (set_attr "isa" "p9v,*")])
3635 (define_expand "vec_unpacku_hi_<VP_small_lc>"
3636 [(set (match_operand:VP 0 "register_operand" "=v")
3637 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3641 rtx vzero = gen_reg_rtx (<VP_small>mode);
3642 emit_insn (gen_altivec_vspltis<VU_char> (vzero, const0_rtx));
3644 rtx res = gen_reg_rtx (<VP_small>mode);
3645 rtx op1 = operands[1];
3647 if (BYTES_BIG_ENDIAN)
3648 emit_insn (gen_altivec_vmrgh<VU_char> (res, vzero, op1));
3650 emit_insn (gen_altivec_vmrgl<VU_char> (res, op1, vzero));
3652 emit_insn (gen_move_insn (operands[0], gen_lowpart (<MODE>mode, res)));
3656 (define_expand "vec_unpacku_lo_<VP_small_lc>"
3657 [(set (match_operand:VP 0 "register_operand" "=v")
3658 (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
3662 rtx vzero = gen_reg_rtx (<VP_small>mode);
3663 emit_insn (gen_altivec_vspltis<VU_char> (vzero, const0_rtx));
3665 rtx res = gen_reg_rtx (<VP_small>mode);
3666 rtx op1 = operands[1];
3668 if (BYTES_BIG_ENDIAN)
3669 emit_insn (gen_altivec_vmrgl<VU_char> (res, vzero, op1));
3671 emit_insn (gen_altivec_vmrgh<VU_char> (res, op1, vzero));
3673 emit_insn (gen_move_insn (operands[0], gen_lowpart (<MODE>mode, res)));
3677 (define_expand "vec_widen_umult_hi_v16qi"
3678 [(set (match_operand:V8HI 0 "register_operand" "=v")
3679 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3680 (match_operand:V16QI 2 "register_operand" "v")]
3684 rtx ve = gen_reg_rtx (V8HImode);
3685 rtx vo = gen_reg_rtx (V8HImode);
3687 if (BYTES_BIG_ENDIAN)
3689 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3690 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3691 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3695 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3696 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3697 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3702 (define_expand "vec_widen_umult_lo_v16qi"
3703 [(set (match_operand:V8HI 0 "register_operand" "=v")
3704 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3705 (match_operand:V16QI 2 "register_operand" "v")]
3709 rtx ve = gen_reg_rtx (V8HImode);
3710 rtx vo = gen_reg_rtx (V8HImode);
3712 if (BYTES_BIG_ENDIAN)
3714 emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
3715 emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
3716 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3720 emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
3721 emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
3722 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3727 (define_expand "vec_widen_smult_hi_v16qi"
3728 [(set (match_operand:V8HI 0 "register_operand" "=v")
3729 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3730 (match_operand:V16QI 2 "register_operand" "v")]
3734 rtx ve = gen_reg_rtx (V8HImode);
3735 rtx vo = gen_reg_rtx (V8HImode);
3737 if (BYTES_BIG_ENDIAN)
3739 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3740 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3741 emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
3745 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3746 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3747 emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
3752 (define_expand "vec_widen_smult_lo_v16qi"
3753 [(set (match_operand:V8HI 0 "register_operand" "=v")
3754 (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
3755 (match_operand:V16QI 2 "register_operand" "v")]
3759 rtx ve = gen_reg_rtx (V8HImode);
3760 rtx vo = gen_reg_rtx (V8HImode);
3762 if (BYTES_BIG_ENDIAN)
3764 emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
3765 emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
3766 emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
3770 emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
3771 emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
3772 emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
3777 (define_expand "vec_widen_umult_hi_v8hi"
3778 [(set (match_operand:V4SI 0 "register_operand" "=v")
3779 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3780 (match_operand:V8HI 2 "register_operand" "v")]
3784 rtx ve = gen_reg_rtx (V4SImode);
3785 rtx vo = gen_reg_rtx (V4SImode);
3787 if (BYTES_BIG_ENDIAN)
3789 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3790 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3791 emit_insn (gen_altivec_vmrghw_direct_v4si (operands[0], ve, vo));
3795 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3796 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3797 emit_insn (gen_altivec_vmrghw_direct_v4si (operands[0], vo, ve));
3802 (define_expand "vec_widen_umult_lo_v8hi"
3803 [(set (match_operand:V4SI 0 "register_operand" "=v")
3804 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3805 (match_operand:V8HI 2 "register_operand" "v")]
3809 rtx ve = gen_reg_rtx (V4SImode);
3810 rtx vo = gen_reg_rtx (V4SImode);
3812 if (BYTES_BIG_ENDIAN)
3814 emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3815 emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3816 emit_insn (gen_altivec_vmrglw_direct_v4si (operands[0], ve, vo));
3820 emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3821 emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3822 emit_insn (gen_altivec_vmrglw_direct_v4si (operands[0], vo, ve));
3827 (define_expand "vec_widen_smult_hi_v8hi"
3828 [(set (match_operand:V4SI 0 "register_operand" "=v")
3829 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3830 (match_operand:V8HI 2 "register_operand" "v")]
3834 rtx ve = gen_reg_rtx (V4SImode);
3835 rtx vo = gen_reg_rtx (V4SImode);
3837 if (BYTES_BIG_ENDIAN)
3839 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3840 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3841 emit_insn (gen_altivec_vmrghw_direct_v4si (operands[0], ve, vo));
3845 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3846 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3847 emit_insn (gen_altivec_vmrghw_direct_v4si (operands[0], vo, ve));
3852 (define_expand "vec_widen_smult_lo_v8hi"
3853 [(set (match_operand:V4SI 0 "register_operand" "=v")
3854 (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3855 (match_operand:V8HI 2 "register_operand" "v")]
3859 rtx ve = gen_reg_rtx (V4SImode);
3860 rtx vo = gen_reg_rtx (V4SImode);
3862 if (BYTES_BIG_ENDIAN)
3864 emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3865 emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3866 emit_insn (gen_altivec_vmrglw_direct_v4si (operands[0], ve, vo));
3870 emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3871 emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3872 emit_insn (gen_altivec_vmrglw_direct_v4si (operands[0], vo, ve));
3877 (define_expand "vec_pack_trunc_<mode>"
3878 [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3879 (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3880 (match_operand:VP 2 "register_operand" "v")]
3881 UNSPEC_VPACK_UNS_UNS_MOD))]
3885 (define_expand "mulv16qi3"
3886 [(set (match_operand:V16QI 0 "register_operand" "=v")
3887 (mult:V16QI (match_operand:V16QI 1 "register_operand" "v")
3888 (match_operand:V16QI 2 "register_operand" "v")))]
3891 rtx even = gen_reg_rtx (V8HImode);
3892 rtx odd = gen_reg_rtx (V8HImode);
3893 rtx mask = gen_reg_rtx (V16QImode);
3894 rtvec v = rtvec_alloc (16);
3897 for (i = 0; i < 8; ++i) {
3898 RTVEC_ELT (v, 2 * i)
3899 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 1 : 31 - 2 * i);
3900 RTVEC_ELT (v, 2 * i + 1)
3901 = gen_rtx_CONST_INT (QImode, BYTES_BIG_ENDIAN ? 2 * i + 17 : 15 - 2 * i);
3904 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3905 emit_insn (gen_altivec_vmulesb (even, operands[1], operands[2]));
3906 emit_insn (gen_altivec_vmulosb (odd, operands[1], operands[2]));
3907 emit_insn (gen_altivec_vperm_v8hiv16qi (operands[0], even, odd, mask));
3911 (define_expand "altivec_vpermxor"
3912 [(use (match_operand:V16QI 0 "register_operand"))
3913 (use (match_operand:V16QI 1 "register_operand"))
3914 (use (match_operand:V16QI 2 "register_operand"))
3915 (use (match_operand:V16QI 3 "register_operand"))]
3918 if (!BYTES_BIG_ENDIAN)
3920 /* vpermxor indexes the bytes using Big Endian numbering. If LE,
3921 change indexing in operand[3] to BE index. */
3922 rtx be_index = gen_reg_rtx (V16QImode);
3924 emit_insn (gen_one_cmplv16qi2 (be_index, operands[3]));
3925 emit_insn (gen_crypto_vpermxor_v16qi (operands[0], operands[1],
3926 operands[2], be_index));
3929 emit_insn (gen_crypto_vpermxor_v16qi (operands[0], operands[1],
3930 operands[2], operands[3]));
3934 (define_expand "altivec_negv4sf2"
3935 [(use (match_operand:V4SF 0 "register_operand"))
3936 (use (match_operand:V4SF 1 "register_operand"))]
3941 /* Generate [-0.0, -0.0, -0.0, -0.0]. */
3942 neg0 = gen_reg_rtx (V4SImode);
3943 emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3944 emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3947 emit_insn (gen_xorv4sf3 (operands[0],
3948 gen_lowpart (V4SFmode, neg0), operands[1]));
3953 ;; Vector reverse elements
3954 (define_expand "altivec_vreveti2"
3955 [(set (match_operand:TI 0 "register_operand" "=v")
3956 (unspec:TI [(match_operand:TI 1 "register_operand" "v")]
3960 int i, j, size, num_elements;
3961 rtvec v = rtvec_alloc (16);
3962 rtx mask = gen_reg_rtx (V16QImode);
3964 size = GET_MODE_UNIT_SIZE (TImode);
3965 num_elements = GET_MODE_NUNITS (TImode);
3967 for (j = 0; j < num_elements; j++)
3968 for (i = 0; i < size; i++)
3969 RTVEC_ELT (v, i + j * size)
3970 = GEN_INT (i + (num_elements - 1 - j) * size);
3972 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
3973 emit_insn (gen_altivec_vperm_ti (operands[0], operands[1],
3974 operands[1], mask));
3978 ;; Vector reverse elements for V16QI V8HI V4SI V4SF
3979 (define_expand "altivec_vreve<mode>2"
3980 [(set (match_operand:VEC_K 0 "register_operand" "=v")
3981 (unspec:VEC_K [(match_operand:VEC_K 1 "register_operand" "v")]
3985 if (TARGET_P9_VECTOR)
3987 if (<MODE>mode == V16QImode)
3988 emit_insn (gen_p9_xxbrq_v16qi (operands[0], operands[1]));
3989 else if (<MODE>mode == V8HImode)
3991 rtx subreg1 = simplify_gen_subreg (V1TImode, operands[1],
3993 rtx temp = gen_reg_rtx (V1TImode);
3994 emit_insn (gen_p9_xxbrq_v1ti (temp, subreg1));
3995 rtx subreg2 = simplify_gen_subreg (<MODE>mode, temp,
3997 emit_insn (gen_p9_xxbrh_v8hi (operands[0], subreg2));
3999 else /* V4SI and V4SF. */
4001 rtx subreg1 = simplify_gen_subreg (V1TImode, operands[1],
4003 rtx temp = gen_reg_rtx (V1TImode);
4004 emit_insn (gen_p9_xxbrq_v1ti (temp, subreg1));
4005 rtx subreg2 = simplify_gen_subreg (<MODE>mode, temp,
4007 if (<MODE>mode == V4SImode)
4008 emit_insn (gen_p9_xxbrw_v4si (operands[0], subreg2));
4010 emit_insn (gen_p9_xxbrw_v4sf (operands[0], subreg2));
4015 int i, j, size, num_elements;
4016 rtvec v = rtvec_alloc (16);
4017 rtx mask = gen_reg_rtx (V16QImode);
4019 size = GET_MODE_UNIT_SIZE (<MODE>mode);
4020 num_elements = GET_MODE_NUNITS (<MODE>mode);
4022 for (j = 0; j < num_elements; j++)
4023 for (i = 0; i < size; i++)
4024 RTVEC_ELT (v, i + j * size)
4025 = GEN_INT (i + (num_elements - 1 - j) * size);
4027 emit_insn (gen_vec_initv16qiqi (mask, gen_rtx_PARALLEL (V16QImode, v)));
4028 emit_insn (gen_altivec_vperm_<mode> (operands[0], operands[1],
4029 operands[1], mask));
4033 ;; Vector reverse elements for V2DI V2DF
4034 (define_expand "altivec_vreve<mode>2"
4035 [(set (match_operand:VEC_64 0 "register_operand" "=v")
4036 (unspec:VEC_64 [(match_operand:VEC_64 1 "register_operand" "v")]
4040 emit_insn (gen_xxswapd_<mode> (operands[0], operands[1]));
4044 ;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
4045 ;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
4046 (define_insn "altivec_lvlx"
4047 [(set (match_operand:V16QI 0 "register_operand" "=v")
4048 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4050 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4052 [(set_attr "type" "vecload")])
4054 (define_insn "altivec_lvlxl"
4055 [(set (match_operand:V16QI 0 "register_operand" "=v")
4056 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4058 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4060 [(set_attr "type" "vecload")])
4062 (define_insn "altivec_lvrx"
4063 [(set (match_operand:V16QI 0 "register_operand" "=v")
4064 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4066 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4068 [(set_attr "type" "vecload")])
4070 (define_insn "altivec_lvrxl"
4071 [(set (match_operand:V16QI 0 "register_operand" "=v")
4072 (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
4074 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4076 [(set_attr "type" "vecload")])
4078 (define_insn "altivec_stvlx"
4080 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4081 (match_operand:V16QI 1 "register_operand" "v"))
4082 (unspec [(const_int 0)] UNSPEC_STVLX)])]
4083 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4085 [(set_attr "type" "vecstore")])
4087 (define_insn "altivec_stvlxl"
4089 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4090 (match_operand:V16QI 1 "register_operand" "v"))
4091 (unspec [(const_int 0)] UNSPEC_STVLXL)])]
4092 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4094 [(set_attr "type" "vecstore")])
4096 (define_insn "altivec_stvrx"
4098 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4099 (match_operand:V16QI 1 "register_operand" "v"))
4100 (unspec [(const_int 0)] UNSPEC_STVRX)])]
4101 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4103 [(set_attr "type" "vecstore")])
4105 (define_insn "altivec_stvrxl"
4107 [(set (match_operand:V16QI 0 "memory_operand" "=Z")
4108 (match_operand:V16QI 1 "register_operand" "v"))
4109 (unspec [(const_int 0)] UNSPEC_STVRXL)])]
4110 "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
4112 [(set_attr "type" "vecstore")])
4114 (define_expand "vec_unpacks_float_hi_v8hi"
4115 [(set (match_operand:V4SF 0 "register_operand")
4116 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4117 UNSPEC_VUPKHS_V4SF))]
4120 rtx tmp = gen_reg_rtx (V4SImode);
4122 emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
4123 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
4127 (define_expand "vec_unpacks_float_lo_v8hi"
4128 [(set (match_operand:V4SF 0 "register_operand")
4129 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4130 UNSPEC_VUPKLS_V4SF))]
4133 rtx tmp = gen_reg_rtx (V4SImode);
4135 emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
4136 emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
4140 (define_expand "vec_unpacku_float_hi_v8hi"
4141 [(set (match_operand:V4SF 0 "register_operand")
4142 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4143 UNSPEC_VUPKHU_V4SF))]
4146 rtx tmp = gen_reg_rtx (V4SImode);
4148 emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
4149 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
4153 (define_expand "vec_unpacku_float_lo_v8hi"
4154 [(set (match_operand:V4SF 0 "register_operand")
4155 (unspec:V4SF [(match_operand:V8HI 1 "register_operand")]
4156 UNSPEC_VUPKLU_V4SF))]
4159 rtx tmp = gen_reg_rtx (V4SImode);
4161 emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
4162 emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
4167 ;; Power8/power9 vector instructions encoded as Altivec instructions
4169 ;; Vector count leading zeros
4170 (define_insn "*p8v_clz<mode>2"
4171 [(set (match_operand:VI2 0 "register_operand" "=v")
4172 (clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4175 [(set_attr "type" "vecsimple")])
4177 ;; Vector absolute difference unsigned
4178 (define_expand "vadu<mode>3"
4179 [(set (match_operand:VI 0 "register_operand")
4180 (unspec:VI [(match_operand:VI 1 "register_operand")
4181 (match_operand:VI 2 "register_operand")]
4185 ;; Vector absolute difference unsigned
4186 (define_insn "p9_vadu<mode>3"
4187 [(set (match_operand:VI 0 "register_operand" "=v")
4188 (unspec:VI [(match_operand:VI 1 "register_operand" "v")
4189 (match_operand:VI 2 "register_operand" "v")]
4192 "vabsdu<wd> %0,%1,%2"
4193 [(set_attr "type" "vecsimple")])
4195 ;; Vector count trailing zeros
4196 (define_insn "*p9v_ctz<mode>2"
4197 [(set (match_operand:VI2 0 "register_operand" "=v")
4198 (ctz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4201 [(set_attr "type" "vecsimple")])
4203 ;; Vector population count
4204 (define_insn "*p8v_popcount<mode>2"
4205 [(set (match_operand:VI2 0 "register_operand" "=v")
4206 (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
4209 [(set_attr "type" "vecsimple")])
4212 (define_insn "*p9v_parity<mode>2"
4213 [(set (match_operand:VParity 0 "register_operand" "=v")
4214 (parity:VParity (match_operand:VParity 1 "register_operand" "v")))]
4217 [(set_attr "type" "vecsimple")])
4219 ;; Vector Gather Bits by Bytes by Doubleword
4220 (define_insn "p8v_vgbbd"
4221 [(set (match_operand:V16QI 0 "register_operand" "=v")
4222 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
4226 [(set_attr "type" "vecsimple")])
4229 ;; 128-bit binary integer arithmetic
4230 ;; We have a special container type (V1TImode) to allow operations using the
4231 ;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
4232 ;; having to worry about the register allocator deciding GPRs are better.
4234 (define_insn "altivec_vadduqm"
4235 [(set (match_operand:V1TI 0 "register_operand" "=v")
4236 (plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4237 (match_operand:V1TI 2 "register_operand" "v")))]
4240 [(set_attr "type" "vecsimple")])
4242 (define_insn "altivec_vaddcuq"
4243 [(set (match_operand:V1TI 0 "register_operand" "=v")
4244 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4245 (match_operand:V1TI 2 "register_operand" "v")]
4249 [(set_attr "type" "vecsimple")])
4251 (define_insn "altivec_vsubuqm"
4252 [(set (match_operand:V1TI 0 "register_operand" "=v")
4253 (minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
4254 (match_operand:V1TI 2 "register_operand" "v")))]
4257 [(set_attr "type" "vecsimple")])
4259 (define_insn "altivec_vsubcuq"
4260 [(set (match_operand:V1TI 0 "register_operand" "=v")
4261 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4262 (match_operand:V1TI 2 "register_operand" "v")]
4266 [(set_attr "type" "vecsimple")])
4268 (define_insn "altivec_vaddeuqm"
4269 [(set (match_operand:V1TI 0 "register_operand" "=v")
4270 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4271 (match_operand:V1TI 2 "register_operand" "v")
4272 (match_operand:V1TI 3 "register_operand" "v")]
4275 "vaddeuqm %0,%1,%2,%3"
4276 [(set_attr "type" "vecsimple")])
4278 (define_insn "altivec_vaddecuq"
4279 [(set (match_operand:V1TI 0 "register_operand" "=v")
4280 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4281 (match_operand:V1TI 2 "register_operand" "v")
4282 (match_operand:V1TI 3 "register_operand" "v")]
4285 "vaddecuq %0,%1,%2,%3"
4286 [(set_attr "type" "vecsimple")])
4288 (define_insn "altivec_vsubeuqm"
4289 [(set (match_operand:V1TI 0 "register_operand" "=v")
4290 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4291 (match_operand:V1TI 2 "register_operand" "v")
4292 (match_operand:V1TI 3 "register_operand" "v")]
4295 "vsubeuqm %0,%1,%2,%3"
4296 [(set_attr "type" "vecsimple")])
4298 (define_insn "altivec_vsubecuq"
4299 [(set (match_operand:V1TI 0 "register_operand" "=v")
4300 (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
4301 (match_operand:V1TI 2 "register_operand" "v")
4302 (match_operand:V1TI 3 "register_operand" "v")]
4305 "vsubecuq %0,%1,%2,%3"
4306 [(set_attr "type" "vecsimple")])
4308 ;; We use V2DI as the output type to simplify converting the permute
4309 ;; bits into an integer
4310 (define_insn "altivec_vbpermq"
4311 [(set (match_operand:V2DI 0 "register_operand" "=v")
4312 (unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
4313 (match_operand:V16QI 2 "register_operand" "v")]
4317 [(set_attr "type" "vecperm")])
4319 ; One of the vector API interfaces requires returning vector unsigned char.
4320 (define_insn "altivec_vbpermq2"
4321 [(set (match_operand:V16QI 0 "register_operand" "=v")
4322 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4323 (match_operand:V16QI 2 "register_operand" "v")]
4327 [(set_attr "type" "vecperm")])
4329 (define_insn "altivec_vbpermd"
4330 [(set (match_operand:V2DI 0 "register_operand" "=v")
4331 (unspec:V2DI [(match_operand:V2DI 1 "register_operand" "v")
4332 (match_operand:V16QI 2 "register_operand" "v")]
4336 [(set_attr "type" "vecsimple")])
4338 ;; Support for SAD (sum of absolute differences).
4340 ;; Due to saturating semantics, we can't combine the sum-across
4341 ;; with the vector accumulate in vsum4ubs. A vadduwm is needed.
4342 (define_expand "usadv16qi"
4343 [(use (match_operand:V4SI 0 "register_operand"))
4344 (use (match_operand:V16QI 1 "register_operand"))
4345 (use (match_operand:V16QI 2 "register_operand"))
4346 (use (match_operand:V4SI 3 "register_operand"))]
4349 rtx absd = gen_reg_rtx (V16QImode);
4350 rtx zero = gen_reg_rtx (V4SImode);
4351 rtx psum = gen_reg_rtx (V4SImode);
4353 emit_insn (gen_p9_vaduv16qi3 (absd, operands[1], operands[2]));
4354 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
4355 emit_insn (gen_altivec_vsum4ubs (psum, absd, zero));
4356 emit_insn (gen_addv4si3 (operands[0], psum, operands[3]));
4360 ;; Since vsum4shs is saturating and further performs signed
4361 ;; arithmetic, we can't combine the sum-across with the vector
4362 ;; accumulate in vsum4shs. A vadduwm is needed.
4363 (define_expand "usadv8hi"
4364 [(use (match_operand:V4SI 0 "register_operand"))
4365 (use (match_operand:V8HI 1 "register_operand"))
4366 (use (match_operand:V8HI 2 "register_operand"))
4367 (use (match_operand:V4SI 3 "register_operand"))]
4370 rtx absd = gen_reg_rtx (V8HImode);
4371 rtx zero = gen_reg_rtx (V4SImode);
4372 rtx psum = gen_reg_rtx (V4SImode);
4374 emit_insn (gen_p9_vaduv8hi3 (absd, operands[1], operands[2]));
4375 emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
4376 emit_insn (gen_altivec_vsum4shs (psum, absd, zero));
4377 emit_insn (gen_addv4si3 (operands[0], psum, operands[3]));
4381 ;; Decimal Integer operations
4382 (define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
4384 (define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
4385 (UNSPEC_BCDSUB "sub")])
4387 (define_code_iterator BCD_TEST [eq lt le gt ge unordered])
4388 (define_mode_iterator VBCD [V1TI V16QI])
4390 (define_insn "bcd<bcd_add_sub>_<mode>"
4391 [(set (match_operand:VBCD 0 "register_operand" "=v")
4392 (unspec:VBCD [(match_operand:VBCD 1 "register_operand" "v")
4393 (match_operand:VBCD 2 "register_operand" "v")
4394 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4395 UNSPEC_BCD_ADD_SUB))
4396 (clobber (reg:CCFP CR6_REGNO))]
4398 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4399 [(set_attr "type" "vecsimple")])
4401 ;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
4402 ;; can use the unordered test for BCD nans and add/subtracts that overflow. An
4403 ;; UNORDERED test on an integer type (like V1TImode) is not defined. The type
4404 ;; probably should be one that can go in the VMX (Altivec) registers, so we
4405 ;; can't use DDmode or DFmode.
4406 (define_insn "*bcd<bcd_add_sub>_test_<mode>"
4407 [(set (reg:CCFP CR6_REGNO)
4409 (unspec:V2DF [(match_operand:VBCD 1 "register_operand" "v")
4410 (match_operand:VBCD 2 "register_operand" "v")
4411 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4413 (match_operand:V2DF 4 "zero_constant" "j")))
4414 (clobber (match_scratch:VBCD 0 "=v"))]
4416 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4417 [(set_attr "type" "vecsimple")])
4419 (define_insn "*bcd<bcd_add_sub>_test2_<mode>"
4420 [(set (match_operand:VBCD 0 "register_operand" "=v")
4421 (unspec:VBCD [(match_operand:VBCD 1 "register_operand" "v")
4422 (match_operand:VBCD 2 "register_operand" "v")
4423 (match_operand:QI 3 "const_0_to_1_operand" "i")]
4424 UNSPEC_BCD_ADD_SUB))
4425 (set (reg:CCFP CR6_REGNO)
4427 (unspec:V2DF [(match_dup 1)
4431 (match_operand:V2DF 4 "zero_constant" "j")))]
4433 "bcd<bcd_add_sub>. %0,%1,%2,%3"
4434 [(set_attr "type" "vecsimple")])
4436 (define_insn "vcfuged"
4437 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4438 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4439 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4443 [(set_attr "type" "vecsimple")])
4445 (define_insn "vclzdm"
4446 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4447 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4448 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4452 [(set_attr "type" "vecsimple")])
4454 (define_insn "vctzdm"
4455 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4456 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4457 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4461 [(set_attr "type" "vecsimple")])
4463 (define_insn "vpdepd"
4464 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4465 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4466 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4470 [(set_attr "type" "vecsimple")])
4472 (define_insn "vpextd"
4473 [(set (match_operand:V2DI 0 "altivec_register_operand" "=v")
4474 (unspec:V2DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4475 (match_operand:V2DI 2 "altivec_register_operand" "v")]
4479 [(set_attr "type" "vecsimple")])
4482 [(set (match_operand:DI 0 "register_operand" "=r")
4483 (unspec:DI [(match_operand:V2DI 1 "altivec_register_operand" "v")
4484 (match_operand:QI 2 "u3bit_cint_operand" "n")]
4488 [(set_attr "type" "vecsimple")])
4490 (define_insn "vclrlb"
4491 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
4492 (unspec:V16QI [(match_operand:V16QI 1 "altivec_register_operand" "v")
4493 (match_operand:SI 2 "gpc_reg_operand" "r")]
4497 if (BYTES_BIG_ENDIAN)
4498 return "vclrlb %0,%1,%2";
4500 return "vclrrb %0,%1,%2";
4502 [(set_attr "type" "vecsimple")])
4504 (define_insn "vclrrb"
4505 [(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
4506 (unspec:V16QI [(match_operand:V16QI 1 "altivec_register_operand" "v")
4507 (match_operand:SI 2 "gpc_reg_operand" "r")]
4511 if (BYTES_BIG_ENDIAN)
4512 return "vclrrb %0,%1,%2";
4514 return "vclrlb %0,%1,%2";
4516 [(set_attr "type" "vecsimple")])
4518 (define_expand "bcd<bcd_add_sub>_<code>_<mode>"
4519 [(parallel [(set (reg:CCFP CR6_REGNO)
4521 (unspec:V2DF [(match_operand:VBCD 1 "register_operand")
4522 (match_operand:VBCD 2 "register_operand")
4523 (match_operand:QI 3 "const_0_to_1_operand")]
4526 (clobber (match_scratch:VBCD 5))])
4527 (set (match_operand:SI 0 "register_operand")
4528 (BCD_TEST:SI (reg:CCFP CR6_REGNO)
4532 operands[4] = CONST0_RTX (V2DFmode);
4535 (define_insn "*bcdinvalid_<mode>"
4536 [(set (reg:CCFP CR6_REGNO)
4538 (unspec:V2DF [(match_operand:VBCD 1 "register_operand" "v")]
4540 (match_operand:V2DF 2 "zero_constant" "j")))
4541 (clobber (match_scratch:VBCD 0 "=v"))]
4543 "bcdadd. %0,%1,%1,0"
4544 [(set_attr "type" "vecsimple")])
4546 (define_expand "bcdinvalid_<mode>"
4547 [(parallel [(set (reg:CCFP CR6_REGNO)
4549 (unspec:V2DF [(match_operand:VBCD 1 "register_operand")]
4552 (clobber (match_scratch:VBCD 3))])
4553 (set (match_operand:SI 0 "register_operand")
4554 (unordered:SI (reg:CCFP CR6_REGNO)
4558 operands[2] = CONST0_RTX (V2DFmode);
4561 (define_insn "bcdshift_v16qi"
4562 [(set (match_operand:V16QI 0 "register_operand" "=v")
4563 (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
4564 (match_operand:V16QI 2 "register_operand" "v")
4565 (match_operand:QI 3 "const_0_to_1_operand" "n")]
4567 (clobber (reg:CCFP CR6_REGNO))]
4570 [(set_attr "type" "vecsimple")])
4572 (define_expand "bcdmul10_v16qi"
4573 [(set (match_operand:V16QI 0 "register_operand")
4574 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")]
4576 (clobber (reg:CCFP CR6_REGNO))]
4579 rtx one = gen_reg_rtx (V16QImode);
4581 emit_insn (gen_altivec_vspltisb (one, const1_rtx));
4582 emit_insn (gen_bcdshift_v16qi (operands[0], one, operands[1], const0_rtx));
4587 (define_expand "bcddiv10_v16qi"
4588 [(set (match_operand:V16QI 0 "register_operand")
4589 (unspec:V16QI [(match_operand:V16QI 1 "register_operand")]
4591 (clobber (reg:CCFP CR6_REGNO))]
4594 rtx one = gen_reg_rtx (V16QImode);
4596 emit_insn (gen_altivec_vspltisb (one, constm1_rtx));
4597 emit_insn (gen_bcdshift_v16qi (operands[0], one, operands[1], const0_rtx));
4603 ;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
4604 ;; the bcdadd/bcdsub that tests the value. The combiner won't work since
4605 ;; CR6 is a hard coded register. Unfortunately, all of the Altivec predicate
4606 ;; support is hard coded to use the fixed register CR6 instead of creating
4607 ;; a register class for CR6.
4610 [(parallel [(set (match_operand:V1TI 0 "register_operand")
4611 (unspec:V1TI [(match_operand:V1TI 1 "register_operand")
4612 (match_operand:V1TI 2 "register_operand")
4613 (match_operand:QI 3 "const_0_to_1_operand")]
4614 UNSPEC_BCD_ADD_SUB))
4615 (clobber (reg:CCFP CR6_REGNO))])
4616 (parallel [(set (reg:CCFP CR6_REGNO)
4618 (unspec:V2DF [(match_dup 1)
4622 (match_operand:V2DF 4 "zero_constant")))
4623 (clobber (match_operand:V1TI 5 "register_operand"))])]
4625 [(parallel [(set (match_dup 0)
4626 (unspec:V1TI [(match_dup 1)
4629 UNSPEC_BCD_ADD_SUB))
4630 (set (reg:CCFP CR6_REGNO)
4632 (unspec:V2DF [(match_dup 1)