1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
6 ;; This file is part of GNU CC.
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;; Insn type. Used to default other attribute values.
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
44 ;; FIXME: Add 800 scheduling for completeness?
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
48 ;; Length (in # of bytes).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
72 ;; Attributes for instruction and branch scheduling
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
79 (const_string "false")))
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
87 (const_string "false")))
89 ;; For calls and millicode calls. Allow unconditional branches in the
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
99 (const_string "false"))]
100 (const_string "false")))
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
150 ;; (Noted only for documentation; units that take one cycle do not need to
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
172 ;; Instruction Time Unit Minimum Distance (unit contention)
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
229 ;; Instruction Time Unit Minimum Distance (unit contention)
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
242 ;; fdiv,dbl 15 DIV 15
244 ;; fsqrt,dbl 15 DIV 15
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
267 (define_function_unit "pa7100nonflop" 1 1
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
281 ;; Instruction Time Unit Minimum Distance (unit contention)
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
298 ;; fdiv,dbl 15 DIV 15
300 ;; fsqrt,dbl 15 DIV 15
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
318 ;; Define the various functional units for dual-issue.
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
326 ;; Shifts and memory ops execute in only one of the integer ALUs
327 (define_function_unit "pa7100LCshiftmem" 1 1
329 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
330 (eq_attr "cpu" "7100LC,7200")) 1 1)
332 ;; We have two basic ALUs.
333 (define_function_unit "pa7100LCalu" 2 1
335 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
336 (eq_attr "cpu" "7100LC,7200")) 1 1)
338 ;; I don't have complete information on the PA7200; however, most of
339 ;; what I've heard makes it look like a 7100LC without the store-store
340 ;; penalty. So that's how we'll model it.
342 ;; Memory. Disregarding Cache misses, memory loads and stores take
343 ;; two cycles. Any special cases are handled in pa_adjust_cost.
344 (define_function_unit "pa7200memory" 1 0
345 (and (eq_attr "type" "load,fpload,store,fpstore")
346 (eq_attr "cpu" "7200")) 2 0)
348 ;; I don't have detailed information on the PA7200 FP pipeline, so I
349 ;; treat it just like the 7100LC pipeline.
350 ;; Similarly for the multi-issue fake units.
353 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
354 ;; traditional architecture.
356 ;; The PA8000 has a large (56) entry reorder buffer that is split between
357 ;; memory and non-memory operations.
359 ;; The PA800 can issue two memory and two non-memory operations per cycle to
360 ;; the function units. Similarly, the PA8000 can retire two memory and two
361 ;; non-memory operations per cycle.
363 ;; Given the large reorder buffer, the processor can hide most latencies.
364 ;; According to HP, they've got the best results by scheduling for retirement
365 ;; bandwidth with limited latency scheduling for floating point operations.
366 ;; Latency for integer operations and memory references is ignored.
368 ;; We claim floating point operations have a 2 cycle latency and are
369 ;; fully pipelined, except for div and sqrt which are not pipelined.
371 ;; It is not necessary to define the shifter and integer alu units.
373 ;; These first two define_unit_unit descriptions model retirement from
374 ;; the reorder buffer.
375 (define_function_unit "pa8000lsu" 2 1
377 (eq_attr "type" "load,fpload,store,fpstore")
378 (eq_attr "cpu" "8000")) 1 1)
380 (define_function_unit "pa8000alu" 2 1
382 (eq_attr "type" "!load,fpload,store,fpstore")
383 (eq_attr "cpu" "8000")) 1 1)
385 ;; Claim floating point ops have a 2 cycle latency, excluding div and
386 ;; sqrt, which are not pipelined and issue to different units.
387 (define_function_unit "pa8000fmac" 2 0
389 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
390 (eq_attr "cpu" "8000")) 2 1)
392 (define_function_unit "pa8000fdiv" 2 1
394 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
395 (eq_attr "cpu" "8000")) 17 17)
397 (define_function_unit "pa8000fdiv" 2 1
399 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
400 (eq_attr "cpu" "8000")) 31 31)
403 ;; Compare instructions.
404 ;; This controls RTL generation and register allocation.
406 ;; We generate RTL for comparisons and branches by having the cmpxx
407 ;; patterns store away the operands. Then, the scc and bcc patterns
408 ;; emit RTL for both the compare and the branch.
411 (define_expand "cmpsi"
413 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
414 (match_operand:SI 1 "arith5_operand" "")))]
418 hppa_compare_op0 = operands[0];
419 hppa_compare_op1 = operands[1];
420 hppa_branch_type = CMP_SI;
424 (define_expand "cmpsf"
426 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
427 (match_operand:SF 1 "reg_or_0_operand" "")))]
428 "! TARGET_SOFT_FLOAT"
431 hppa_compare_op0 = operands[0];
432 hppa_compare_op1 = operands[1];
433 hppa_branch_type = CMP_SF;
437 (define_expand "cmpdf"
439 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
440 (match_operand:DF 1 "reg_or_0_operand" "")))]
441 "! TARGET_SOFT_FLOAT"
444 hppa_compare_op0 = operands[0];
445 hppa_compare_op1 = operands[1];
446 hppa_branch_type = CMP_DF;
452 (match_operator:CCFP 2 "comparison_operator"
453 [(match_operand:SF 0 "reg_or_0_operand" "fG")
454 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
455 "! TARGET_SOFT_FLOAT"
456 "fcmp,sgl,%Y2 %f0,%f1"
457 [(set_attr "length" "4")
458 (set_attr "type" "fpcc")])
462 (match_operator:CCFP 2 "comparison_operator"
463 [(match_operand:DF 0 "reg_or_0_operand" "fG")
464 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
465 "! TARGET_SOFT_FLOAT"
466 "fcmp,dbl,%Y2 %f0,%f1"
467 [(set_attr "length" "4")
468 (set_attr "type" "fpcc")])
473 [(set (match_operand:SI 0 "register_operand" "")
479 /* fp scc patterns rarely match, and are not a win on the PA. */
480 if (hppa_branch_type != CMP_SI)
482 /* set up operands from compare. */
483 operands[1] = hppa_compare_op0;
484 operands[2] = hppa_compare_op1;
485 /* fall through and generate default code */
489 [(set (match_operand:SI 0 "register_operand" "")
495 /* fp scc patterns rarely match, and are not a win on the PA. */
496 if (hppa_branch_type != CMP_SI)
498 operands[1] = hppa_compare_op0;
499 operands[2] = hppa_compare_op1;
503 [(set (match_operand:SI 0 "register_operand" "")
509 /* fp scc patterns rarely match, and are not a win on the PA. */
510 if (hppa_branch_type != CMP_SI)
512 operands[1] = hppa_compare_op0;
513 operands[2] = hppa_compare_op1;
517 [(set (match_operand:SI 0 "register_operand" "")
523 /* fp scc patterns rarely match, and are not a win on the PA. */
524 if (hppa_branch_type != CMP_SI)
526 operands[1] = hppa_compare_op0;
527 operands[2] = hppa_compare_op1;
531 [(set (match_operand:SI 0 "register_operand" "")
537 /* fp scc patterns rarely match, and are not a win on the PA. */
538 if (hppa_branch_type != CMP_SI)
540 operands[1] = hppa_compare_op0;
541 operands[2] = hppa_compare_op1;
545 [(set (match_operand:SI 0 "register_operand" "")
551 /* fp scc patterns rarely match, and are not a win on the PA. */
552 if (hppa_branch_type != CMP_SI)
554 operands[1] = hppa_compare_op0;
555 operands[2] = hppa_compare_op1;
558 (define_expand "sltu"
559 [(set (match_operand:SI 0 "register_operand" "")
560 (ltu:SI (match_dup 1)
565 if (hppa_branch_type != CMP_SI)
567 operands[1] = hppa_compare_op0;
568 operands[2] = hppa_compare_op1;
571 (define_expand "sgtu"
572 [(set (match_operand:SI 0 "register_operand" "")
573 (gtu:SI (match_dup 1)
578 if (hppa_branch_type != CMP_SI)
580 operands[1] = hppa_compare_op0;
581 operands[2] = hppa_compare_op1;
584 (define_expand "sleu"
585 [(set (match_operand:SI 0 "register_operand" "")
586 (leu:SI (match_dup 1)
591 if (hppa_branch_type != CMP_SI)
593 operands[1] = hppa_compare_op0;
594 operands[2] = hppa_compare_op1;
597 (define_expand "sgeu"
598 [(set (match_operand:SI 0 "register_operand" "")
599 (geu:SI (match_dup 1)
604 if (hppa_branch_type != CMP_SI)
606 operands[1] = hppa_compare_op0;
607 operands[2] = hppa_compare_op1;
610 ;; Instruction canonicalization puts immediate operands second, which
611 ;; is the reverse of what we want.
614 [(set (match_operand:SI 0 "register_operand" "=r")
615 (match_operator:SI 3 "comparison_operator"
616 [(match_operand:SI 1 "register_operand" "r")
617 (match_operand:SI 2 "arith11_operand" "rI")]))]
619 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
620 [(set_attr "type" "binary")
621 (set_attr "length" "8")])
623 (define_insn "iorscc"
624 [(set (match_operand:SI 0 "register_operand" "=r")
625 (ior:SI (match_operator:SI 3 "comparison_operator"
626 [(match_operand:SI 1 "register_operand" "r")
627 (match_operand:SI 2 "arith11_operand" "rI")])
628 (match_operator:SI 6 "comparison_operator"
629 [(match_operand:SI 4 "register_operand" "r")
630 (match_operand:SI 5 "arith11_operand" "rI")])))]
632 "com%I2clr,%S3 %2,%1,%%r0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
633 [(set_attr "type" "binary")
634 (set_attr "length" "12")])
636 ;; Combiner patterns for common operations performed with the output
637 ;; from an scc insn (negscc and incscc).
638 (define_insn "negscc"
639 [(set (match_operand:SI 0 "register_operand" "=r")
640 (neg:SI (match_operator:SI 3 "comparison_operator"
641 [(match_operand:SI 1 "register_operand" "r")
642 (match_operand:SI 2 "arith11_operand" "rI")])))]
644 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
645 [(set_attr "type" "binary")
646 (set_attr "length" "8")])
648 ;; Patterns for adding/subtracting the result of a boolean expression from
649 ;; a register. First we have special patterns that make use of the carry
650 ;; bit, and output only two instructions. For the cases we can't in
651 ;; general do in two instructions, the incscc pattern at the end outputs
652 ;; two or three instructions.
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))
658 (match_operand:SI 1 "register_operand" "r")))]
660 "sub%I3 %3,%2,%%r0\;addc %%r0,%1,%0"
661 [(set_attr "type" "binary")
662 (set_attr "length" "8")])
664 ; This need only accept registers for op3, since canonicalization
665 ; replaces geu with gtu when op3 is an integer.
667 [(set (match_operand:SI 0 "register_operand" "=r")
668 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
669 (match_operand:SI 3 "register_operand" "r"))
670 (match_operand:SI 1 "register_operand" "r")))]
672 "sub %2,%3,%%r0\;addc %%r0,%1,%0"
673 [(set_attr "type" "binary")
674 (set_attr "length" "8")])
676 ; Match only integers for op3 here. This is used as canonical form of the
677 ; geu pattern when op3 is an integer. Don't match registers since we can't
678 ; make better code than the general incscc pattern.
680 [(set (match_operand:SI 0 "register_operand" "=r")
681 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
682 (match_operand:SI 3 "int11_operand" "I"))
683 (match_operand:SI 1 "register_operand" "r")))]
685 "addi %k3,%2,%%r0\;addc %%r0,%1,%0"
686 [(set_attr "type" "binary")
687 (set_attr "length" "8")])
689 (define_insn "incscc"
690 [(set (match_operand:SI 0 "register_operand" "=r,r")
691 (plus:SI (match_operator:SI 4 "comparison_operator"
692 [(match_operand:SI 2 "register_operand" "r,r")
693 (match_operand:SI 3 "arith11_operand" "rI,rI")])
694 (match_operand:SI 1 "register_operand" "0,?r")))]
697 com%I3clr,%B4 %3,%2,%%r0\;addi 1,%0,%0
698 com%I3clr,%B4 %3,%2,%%r0\;addi,tr 1,%1,%0\;copy %1,%0"
699 [(set_attr "type" "binary,binary")
700 (set_attr "length" "8,12")])
703 [(set (match_operand:SI 0 "register_operand" "=r")
704 (minus:SI (match_operand:SI 1 "register_operand" "r")
705 (gtu:SI (match_operand:SI 2 "register_operand" "r")
706 (match_operand:SI 3 "arith11_operand" "rI"))))]
708 "sub%I3 %3,%2,%%r0\;subb %1,0,%0"
709 [(set_attr "type" "binary")
710 (set_attr "length" "8")])
713 [(set (match_operand:SI 0 "register_operand" "=r")
714 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
715 (gtu:SI (match_operand:SI 2 "register_operand" "r")
716 (match_operand:SI 3 "arith11_operand" "rI")))
717 (match_operand:SI 4 "register_operand" "r")))]
719 "sub%I3 %3,%2,%%r0\;subb %1,%4,%0"
720 [(set_attr "type" "binary")
721 (set_attr "length" "8")])
723 ; This need only accept registers for op3, since canonicalization
724 ; replaces ltu with leu when op3 is an integer.
726 [(set (match_operand:SI 0 "register_operand" "=r")
727 (minus:SI (match_operand:SI 1 "register_operand" "r")
728 (ltu:SI (match_operand:SI 2 "register_operand" "r")
729 (match_operand:SI 3 "register_operand" "r"))))]
731 "sub %2,%3,%%r0\;subb %1,0,%0"
732 [(set_attr "type" "binary")
733 (set_attr "length" "8")])
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
738 (ltu:SI (match_operand:SI 2 "register_operand" "r")
739 (match_operand:SI 3 "register_operand" "r")))
740 (match_operand:SI 4 "register_operand" "r")))]
742 "sub %2,%3,%%r0\;subb %1,%4,%0"
743 [(set_attr "type" "binary")
744 (set_attr "length" "8")])
746 ; Match only integers for op3 here. This is used as canonical form of the
747 ; ltu pattern when op3 is an integer. Don't match registers since we can't
748 ; make better code than the general incscc pattern.
750 [(set (match_operand:SI 0 "register_operand" "=r")
751 (minus:SI (match_operand:SI 1 "register_operand" "r")
752 (leu:SI (match_operand:SI 2 "register_operand" "r")
753 (match_operand:SI 3 "int11_operand" "I"))))]
755 "addi %k3,%2,%%r0\;subb %1,0,%0"
756 [(set_attr "type" "binary")
757 (set_attr "length" "8")])
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
762 (leu:SI (match_operand:SI 2 "register_operand" "r")
763 (match_operand:SI 3 "int11_operand" "I")))
764 (match_operand:SI 4 "register_operand" "r")))]
766 "addi %k3,%2,%%r0\;subb %1,%4,%0"
767 [(set_attr "type" "binary")
768 (set_attr "length" "8")])
770 (define_insn "decscc"
771 [(set (match_operand:SI 0 "register_operand" "=r,r")
772 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
773 (match_operator:SI 4 "comparison_operator"
774 [(match_operand:SI 2 "register_operand" "r,r")
775 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
778 com%I3clr,%B4 %3,%2,%%r0\;addi -1,%0,%0
779 com%I3clr,%B4 %3,%2,%%r0\;addi,tr -1,%1,%0\;copy %1,%0"
780 [(set_attr "type" "binary,binary")
781 (set_attr "length" "8,12")])
783 ; Patterns for max and min. (There is no need for an earlyclobber in the
784 ; last alternative since the middle alternative will match if op0 == op1.)
786 (define_insn "sminsi3"
787 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
788 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
789 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
792 comclr,> %2,%0,%%r0\;copy %2,%0
793 comiclr,> %2,%0,%%r0\;ldi %2,%0
794 comclr,> %1,%r2,%0\;copy %1,%0"
795 [(set_attr "type" "multi,multi,multi")
796 (set_attr "length" "8,8,8")])
798 (define_insn "uminsi3"
799 [(set (match_operand:SI 0 "register_operand" "=r,r")
800 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
801 (match_operand:SI 2 "arith11_operand" "r,I")))]
804 comclr,>> %2,%0,%%r0\;copy %2,%0
805 comiclr,>> %2,%0,%%r0\;ldi %2,%0"
806 [(set_attr "type" "multi,multi")
807 (set_attr "length" "8,8")])
809 (define_insn "smaxsi3"
810 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
811 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
812 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
815 comclr,< %2,%0,%%r0\;copy %2,%0
816 comiclr,< %2,%0,%%r0\;ldi %2,%0
817 comclr,< %1,%r2,%0\;copy %1,%0"
818 [(set_attr "type" "multi,multi,multi")
819 (set_attr "length" "8,8,8")])
821 (define_insn "umaxsi3"
822 [(set (match_operand:SI 0 "register_operand" "=r,r")
823 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
824 (match_operand:SI 2 "arith11_operand" "r,I")))]
827 comclr,<< %2,%0,%%r0\;copy %2,%0
828 comiclr,<< %2,%0,%%r0\;ldi %2,%0"
829 [(set_attr "type" "multi,multi")
830 (set_attr "length" "8,8")])
832 (define_insn "abssi2"
833 [(set (match_operand:SI 0 "register_operand" "=r")
834 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
836 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
837 [(set_attr "type" "multi")
838 (set_attr "length" "8")])
840 ;;; Experimental conditional move patterns
842 (define_expand "movsicc"
843 [(set (match_operand:SI 0 "register_operand" "")
845 (match_operator 1 "comparison_operator"
848 (match_operand:SI 2 "reg_or_cint_move_operand" "")
849 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
853 enum rtx_code code = GET_CODE (operands[1]);
855 if (hppa_branch_type != CMP_SI)
858 /* operands[1] is currently the result of compare_from_rtx. We want to
859 emit a compare of the original operands. */
860 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
861 operands[4] = hppa_compare_op0;
862 operands[5] = hppa_compare_op1;
865 ; We need the first constraint alternative in order to avoid
866 ; earlyclobbers on all other alternatives.
868 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
870 (match_operator 5 "comparison_operator"
871 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
872 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
873 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
877 com%I4clr,%S5 %4,%3,%%r0\;ldi 0,%0
878 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
879 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
881 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
882 [(set_attr "type" "multi,multi,multi,multi,nullshift")
883 (set_attr "length" "8,8,8,8,8")])
886 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
888 (match_operator 5 "comparison_operator"
889 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
890 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
891 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
892 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
895 com%I4clr,%S5 %4,%3,%%r0\;copy %2,%0
896 com%I4clr,%S5 %4,%3,%%r0\;ldi %2,%0
897 com%I4clr,%S5 %4,%3,%%r0\;ldil L'%2,%0
898 com%I4clr,%S5 %4,%3,%%r0\;zdepi %Z2,%0
899 com%I4clr,%B5 %4,%3,%%r0\;copy %1,%0
900 com%I4clr,%B5 %4,%3,%%r0\;ldi %1,%0
901 com%I4clr,%B5 %4,%3,%%r0\;ldil L'%1,%0
902 com%I4clr,%B5 %4,%3,%%r0\;zdepi %Z1,%0"
903 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
904 (set_attr "length" "8,8,8,8,8,8,8,8")])
906 ;; Conditional Branches
910 (if_then_else (eq (match_dup 1) (match_dup 2))
911 (label_ref (match_operand 0 "" ""))
916 if (hppa_branch_type != CMP_SI)
918 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
919 emit_bcond_fp (NE, operands[0]);
922 /* set up operands from compare. */
923 operands[1] = hppa_compare_op0;
924 operands[2] = hppa_compare_op1;
925 /* fall through and generate default code */
930 (if_then_else (ne (match_dup 1) (match_dup 2))
931 (label_ref (match_operand 0 "" ""))
936 if (hppa_branch_type != CMP_SI)
938 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
939 emit_bcond_fp (NE, operands[0]);
942 operands[1] = hppa_compare_op0;
943 operands[2] = hppa_compare_op1;
948 (if_then_else (gt (match_dup 1) (match_dup 2))
949 (label_ref (match_operand 0 "" ""))
954 if (hppa_branch_type != CMP_SI)
956 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
957 emit_bcond_fp (NE, operands[0]);
960 operands[1] = hppa_compare_op0;
961 operands[2] = hppa_compare_op1;
966 (if_then_else (lt (match_dup 1) (match_dup 2))
967 (label_ref (match_operand 0 "" ""))
972 if (hppa_branch_type != CMP_SI)
974 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
975 emit_bcond_fp (NE, operands[0]);
978 operands[1] = hppa_compare_op0;
979 operands[2] = hppa_compare_op1;
984 (if_then_else (ge (match_dup 1) (match_dup 2))
985 (label_ref (match_operand 0 "" ""))
990 if (hppa_branch_type != CMP_SI)
992 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
993 emit_bcond_fp (NE, operands[0]);
996 operands[1] = hppa_compare_op0;
997 operands[2] = hppa_compare_op1;
1000 (define_expand "ble"
1002 (if_then_else (le (match_dup 1) (match_dup 2))
1003 (label_ref (match_operand 0 "" ""))
1008 if (hppa_branch_type != CMP_SI)
1010 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1011 emit_bcond_fp (NE, operands[0]);
1014 operands[1] = hppa_compare_op0;
1015 operands[2] = hppa_compare_op1;
1018 (define_expand "bgtu"
1020 (if_then_else (gtu (match_dup 1) (match_dup 2))
1021 (label_ref (match_operand 0 "" ""))
1026 if (hppa_branch_type != CMP_SI)
1028 operands[1] = hppa_compare_op0;
1029 operands[2] = hppa_compare_op1;
1032 (define_expand "bltu"
1034 (if_then_else (ltu (match_dup 1) (match_dup 2))
1035 (label_ref (match_operand 0 "" ""))
1040 if (hppa_branch_type != CMP_SI)
1042 operands[1] = hppa_compare_op0;
1043 operands[2] = hppa_compare_op1;
1046 (define_expand "bgeu"
1048 (if_then_else (geu (match_dup 1) (match_dup 2))
1049 (label_ref (match_operand 0 "" ""))
1054 if (hppa_branch_type != CMP_SI)
1056 operands[1] = hppa_compare_op0;
1057 operands[2] = hppa_compare_op1;
1060 (define_expand "bleu"
1062 (if_then_else (leu (match_dup 1) (match_dup 2))
1063 (label_ref (match_operand 0 "" ""))
1068 if (hppa_branch_type != CMP_SI)
1070 operands[1] = hppa_compare_op0;
1071 operands[2] = hppa_compare_op1;
1074 ;; Match the branch patterns.
1077 ;; Note a long backward conditional branch with an annulled delay slot
1078 ;; has a length of 12.
1082 (match_operator 3 "comparison_operator"
1083 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1084 (match_operand:SI 2 "arith5_operand" "rL")])
1085 (label_ref (match_operand 0 "" ""))
1090 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1091 get_attr_length (insn), 0, insn);
1093 [(set_attr "type" "cbranch")
1094 (set (attr "length")
1095 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1098 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1101 (eq (symbol_ref "flag_pic") (const_int 0))
1105 ;; Match the negated branch.
1110 (match_operator 3 "comparison_operator"
1111 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1112 (match_operand:SI 2 "arith5_operand" "rL")])
1114 (label_ref (match_operand 0 "" ""))))]
1118 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1119 get_attr_length (insn), 1, insn);
1121 [(set_attr "type" "cbranch")
1122 (set (attr "length")
1123 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1126 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1129 (eq (symbol_ref "flag_pic") (const_int 0))
1133 ;; Branch on Bit patterns.
1137 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1139 (match_operand:SI 1 "uint5_operand" ""))
1141 (label_ref (match_operand 2 "" ""))
1146 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1147 get_attr_length (insn), 0, insn, 0);
1149 [(set_attr "type" "cbranch")
1150 (set (attr "length")
1151 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1159 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1161 (match_operand:SI 1 "uint5_operand" ""))
1164 (label_ref (match_operand 2 "" ""))))]
1168 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1169 get_attr_length (insn), 1, insn, 0);
1171 [(set_attr "type" "cbranch")
1172 (set (attr "length")
1173 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1181 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1183 (match_operand:SI 1 "uint5_operand" ""))
1185 (label_ref (match_operand 2 "" ""))
1190 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1191 get_attr_length (insn), 0, insn, 1);
1193 [(set_attr "type" "cbranch")
1194 (set (attr "length")
1195 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1203 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1205 (match_operand:SI 1 "uint5_operand" ""))
1208 (label_ref (match_operand 2 "" ""))))]
1212 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1213 get_attr_length (insn), 1, insn, 1);
1215 [(set_attr "type" "cbranch")
1216 (set (attr "length")
1217 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1222 ;; Branch on Variable Bit patterns.
1226 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1228 (match_operand:SI 1 "register_operand" "q"))
1230 (label_ref (match_operand 2 "" ""))
1235 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1236 get_attr_length (insn), 0, insn, 0);
1238 [(set_attr "type" "cbranch")
1239 (set (attr "length")
1240 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1248 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1250 (match_operand:SI 1 "register_operand" "q"))
1253 (label_ref (match_operand 2 "" ""))))]
1257 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1258 get_attr_length (insn), 1, insn, 0);
1260 [(set_attr "type" "cbranch")
1261 (set (attr "length")
1262 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1270 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1272 (match_operand:SI 1 "register_operand" "q"))
1274 (label_ref (match_operand 2 "" ""))
1279 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1280 get_attr_length (insn), 0, insn, 1);
1282 [(set_attr "type" "cbranch")
1283 (set (attr "length")
1284 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1292 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1294 (match_operand:SI 1 "register_operand" "q"))
1297 (label_ref (match_operand 2 "" ""))))]
1301 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1302 get_attr_length (insn), 1, insn, 1);
1304 [(set_attr "type" "cbranch")
1305 (set (attr "length")
1306 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1311 ;; Floating point branches
1313 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1314 (label_ref (match_operand 0 "" ""))
1316 "! TARGET_SOFT_FLOAT"
1319 if (INSN_ANNULLED_BRANCH_P (insn))
1320 return \"ftest\;b,n %0\";
1322 return \"ftest\;b%* %0\";
1324 [(set_attr "type" "fbranch")
1325 (set_attr "length" "8")])
1328 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1330 (label_ref (match_operand 0 "" ""))))]
1331 "! TARGET_SOFT_FLOAT"
1334 if (INSN_ANNULLED_BRANCH_P (insn))
1335 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b,n %0\";
1337 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b%* %0\";
1339 [(set_attr "type" "fbranch")
1340 (set_attr "length" "12")])
1342 ;; Move instructions
1344 (define_expand "movsi"
1345 [(set (match_operand:SI 0 "general_operand" "")
1346 (match_operand:SI 1 "general_operand" ""))]
1350 if (emit_move_sequence (operands, SImode, 0))
1354 ;; Reloading an SImode or DImode value requires a scratch register if
1355 ;; going in to or out of float point registers.
1357 (define_expand "reload_insi"
1358 [(set (match_operand:SI 0 "register_operand" "=Z")
1359 (match_operand:SI 1 "non_hard_reg_operand" ""))
1360 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1364 if (emit_move_sequence (operands, SImode, operands[2]))
1367 /* We don't want the clobber emitted, so handle this ourselves. */
1368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1372 (define_expand "reload_outsi"
1373 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1374 (match_operand:SI 1 "register_operand" "Z"))
1375 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1379 if (emit_move_sequence (operands, SImode, operands[2]))
1382 /* We don't want the clobber emitted, so handle this ourselves. */
1383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1387 ;;; pic symbol references
1390 [(set (match_operand:SI 0 "register_operand" "=r")
1391 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1392 (match_operand:SI 2 "symbolic_operand" ""))))]
1393 "flag_pic && operands[1] == pic_offset_table_rtx"
1395 [(set_attr "type" "load")
1396 (set_attr "length" "4")])
1399 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1400 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1401 (match_operand:SI 1 "move_operand"
1402 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1403 "(register_operand (operands[0], SImode)
1404 || reg_or_0_operand (operands[1], SImode))
1405 && ! TARGET_SOFT_FLOAT"
1417 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1418 (set_attr "pa_combine_type" "addmove")
1419 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1422 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1424 (match_operand:SI 1 "move_operand"
1425 "r,J,N,K,RQ,rM,rM"))]
1426 "(register_operand (operands[0], SImode)
1427 || reg_or_0_operand (operands[1], SImode))
1428 && TARGET_SOFT_FLOAT"
1437 [(set_attr "type" "move,move,move,move,load,store,move")
1438 (set_attr "pa_combine_type" "addmove")
1439 (set_attr "length" "4,4,4,4,4,4,4")])
1442 [(set (match_operand:SI 0 "register_operand" "=r")
1443 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1444 (match_operand:SI 2 "register_operand" "r"))))]
1445 "! TARGET_DISABLE_INDEXING"
1448 /* Reload can create backwards (relative to cse) unscaled index
1449 address modes when eliminating registers and possibly for
1450 pseudos that don't get hard registers. Deal with it. */
1451 if (operands[2] == hard_frame_pointer_rtx
1452 || operands[2] == stack_pointer_rtx)
1453 return \"ldwx %1(%2),%0\";
1455 return \"ldwx %2(%1),%0\";
1457 [(set_attr "type" "load")
1458 (set_attr "length" "4")])
1461 [(set (match_operand:SI 0 "register_operand" "=r")
1462 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1463 (match_operand:SI 2 "basereg_operand" "r"))))]
1464 "! TARGET_DISABLE_INDEXING"
1467 /* Reload can create backwards (relative to cse) unscaled index
1468 address modes when eliminating registers and possibly for
1469 pseudos that don't get hard registers. Deal with it. */
1470 if (operands[1] == hard_frame_pointer_rtx
1471 || operands[1] == stack_pointer_rtx)
1472 return \"ldwx %2(%1),%0\";
1474 return \"ldwx %1(%2),%0\";
1476 [(set_attr "type" "load")
1477 (set_attr "length" "4")])
1479 ;; Load or store with base-register modification.
1481 (define_expand "pre_load"
1482 [(parallel [(set (match_operand:SI 0 "register_operand" "")
1483 (mem (plus (match_operand 1 "register_operand" "")
1484 (match_operand 2 "pre_cint_operand" ""))))
1486 (plus (match_dup 1) (match_dup 2)))])]
1490 emit_insn (gen_pre_ldw (operands[0], operands[1], operands[2]));
1494 (define_insn "pre_ldw"
1495 [(set (match_operand:SI 0 "register_operand" "=r")
1496 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1497 (match_operand:SI 2 "pre_cint_operand" ""))))
1499 (plus:SI (match_dup 1) (match_dup 2)))]
1503 if (INTVAL (operands[2]) < 0)
1504 return \"ldwm %2(%1),%0\";
1505 return \"ldws,mb %2(%1),%0\";
1507 [(set_attr "type" "load")
1508 (set_attr "length" "4")])
1511 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1512 (match_operand:SI 1 "pre_cint_operand" "")))
1513 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1515 (plus:SI (match_dup 0) (match_dup 1)))]
1519 if (INTVAL (operands[1]) < 0)
1520 return \"stwm %r2,%1(%0)\";
1521 return \"stws,mb %r2,%1(%0)\";
1523 [(set_attr "type" "store")
1524 (set_attr "length" "4")])
1527 [(set (match_operand:SI 0 "register_operand" "=r")
1528 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1530 (plus:SI (match_dup 1)
1531 (match_operand:SI 2 "post_cint_operand" "")))]
1535 if (INTVAL (operands[2]) > 0)
1536 return \"ldwm %2(%1),%0\";
1537 return \"ldws,ma %2(%1),%0\";
1539 [(set_attr "type" "load")
1540 (set_attr "length" "4")])
1542 (define_expand "post_store"
1543 [(parallel [(set (mem (match_operand 0 "register_operand" ""))
1544 (match_operand 1 "reg_or_0_operand" ""))
1547 (match_operand 2 "post_cint_operand" "")))])]
1551 emit_insn (gen_post_stw (operands[0], operands[1], operands[2]));
1555 (define_insn "post_stw"
1556 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1557 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1559 (plus:SI (match_dup 0)
1560 (match_operand:SI 2 "post_cint_operand" "")))]
1564 if (INTVAL (operands[2]) > 0)
1565 return \"stwm %r1,%2(%0)\";
1566 return \"stws,ma %r1,%2(%0)\";
1568 [(set_attr "type" "store")
1569 (set_attr "length" "4")])
1571 ;; For loading the address of a label while generating PIC code.
1572 ;; Note since this pattern can be created at reload time (via movsi), all
1573 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1575 [(set (match_operand 0 "register_operand" "=a")
1576 (match_operand 1 "pic_label_operand" ""))]
1580 rtx label_rtx = gen_label_rtx ();
1582 extern FILE *asm_out_file;
1584 xoperands[0] = operands[0];
1585 xoperands[1] = operands[1];
1586 xoperands[2] = label_rtx;
1587 output_asm_insn (\"bl .+8,%0\", xoperands);
1588 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1589 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1590 CODE_LABEL_NUMBER (label_rtx));
1592 /* If we're trying to load the address of a label that happens to be
1593 close, then we can use a shorter sequence. */
1594 if (GET_CODE (operands[1]) == LABEL_REF
1596 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1597 - insn_addresses[INSN_UID (insn)]) < 8100)
1599 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1600 always non-negative. */
1601 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1605 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1606 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1610 [(set_attr "type" "multi")
1611 (set_attr "length" "16")]) ; 12 or 16
1614 [(set (match_operand:SI 0 "register_operand" "=a")
1615 (plus:SI (match_operand:SI 1 "register_operand" "r")
1616 (high:SI (match_operand 2 "" ""))))]
1617 "symbolic_operand (operands[2], Pmode)
1618 && ! function_label_operand (operands[2])
1621 [(set_attr "type" "binary")
1622 (set_attr "length" "4")])
1624 ; We need this to make sure CSE doesn't simplify a memory load with a
1625 ; symbolic address, whose content it think it knows. For PIC, what CSE
1626 ; think is the real value will be the address of that value.
1628 [(set (match_operand:SI 0 "register_operand" "=r")
1630 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1632 [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1638 return \"ldw RT'%G2(%1),%0\";
1640 [(set_attr "type" "load")
1641 (set_attr "length" "4")])
1643 ;; Always use addil rather than ldil;add sequences. This allows the
1644 ;; HP linker to eliminate the dp relocation if the symbolic operand
1645 ;; lives in the TEXT space.
1647 [(set (match_operand:SI 0 "register_operand" "=a")
1648 (high:SI (match_operand 1 "" "")))]
1649 "symbolic_operand (operands[1], Pmode)
1650 && ! function_label_operand (operands[1])
1651 && ! read_only_operand (operands[1])
1655 if (TARGET_LONG_LOAD_STORE)
1656 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1658 return \"addil LR'%H1,%%r27\";
1660 [(set_attr "type" "binary")
1661 (set (attr "length")
1662 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1667 ;; This is for use in the prologue/epilogue code. We need it
1668 ;; to add large constants to a stack pointer or frame pointer.
1669 ;; Because of the additional %r1 pressure, we probably do not
1670 ;; want to use this in general code, so make it available
1671 ;; only after reload.
1673 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1674 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1675 (high:SI (match_operand 2 "const_int_operand" ""))))]
1679 ldil L'%G2,%0\;addl %0,%1,%0"
1680 [(set_attr "type" "binary,binary")
1681 (set_attr "length" "4,8")])
1684 [(set (match_operand:SI 0 "register_operand" "=r")
1685 (high:SI (match_operand 1 "" "")))]
1686 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1687 && !is_function_label_plus_const (operands[1])"
1690 if (symbolic_operand (operands[1], Pmode))
1691 return \"ldil LR'%H1,%0\";
1693 return \"ldil L'%G1,%0\";
1695 [(set_attr "type" "move")
1696 (set_attr "length" "4")])
1699 [(set (match_operand:SI 0 "register_operand" "=r")
1700 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1701 (match_operand:SI 2 "immediate_operand" "i")))]
1702 "!is_function_label_plus_const (operands[2])"
1705 if (flag_pic && symbolic_operand (operands[2], Pmode))
1707 else if (symbolic_operand (operands[2], Pmode))
1708 return \"ldo RR'%G2(%1),%0\";
1710 return \"ldo R'%G2(%1),%0\";
1712 [(set_attr "type" "move")
1713 (set_attr "length" "4")])
1715 ;; Now that a symbolic_address plus a constant is broken up early
1716 ;; in the compilation phase (for better CSE) we need a special
1717 ;; combiner pattern to load the symbolic address plus the constant
1718 ;; in only 2 instructions. (For cases where the symbolic address
1719 ;; was not a common subexpression.)
1721 [(set (match_operand:SI 0 "register_operand" "")
1722 (match_operand:SI 1 "symbolic_operand" ""))
1723 (clobber (match_operand:SI 2 "register_operand" ""))]
1724 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1725 [(set (match_dup 2) (high:SI (match_dup 1)))
1726 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1729 ;; hppa_legitimize_address goes to a great deal of trouble to
1730 ;; create addresses which use indexing. In some cases, this
1731 ;; is a lose because there isn't any store instructions which
1732 ;; allow indexed addresses (with integer register source).
1734 ;; These define_splits try to turn a 3 insn store into
1735 ;; a 2 insn store with some creative RTL rewriting.
1737 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1738 (match_operand:SI 1 "shadd_operand" ""))
1739 (plus:SI (match_operand:SI 2 "register_operand" "")
1740 (match_operand:SI 3 "const_int_operand" ""))))
1741 (match_operand:SI 4 "register_operand" ""))
1742 (clobber (match_operand:SI 5 "register_operand" ""))]
1744 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1746 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1750 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1751 (match_operand:SI 1 "shadd_operand" ""))
1752 (plus:SI (match_operand:SI 2 "register_operand" "")
1753 (match_operand:SI 3 "const_int_operand" ""))))
1754 (match_operand:HI 4 "register_operand" ""))
1755 (clobber (match_operand:SI 5 "register_operand" ""))]
1757 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1759 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1763 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1764 (match_operand:SI 1 "shadd_operand" ""))
1765 (plus:SI (match_operand:SI 2 "register_operand" "")
1766 (match_operand:SI 3 "const_int_operand" ""))))
1767 (match_operand:QI 4 "register_operand" ""))
1768 (clobber (match_operand:SI 5 "register_operand" ""))]
1770 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1772 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1775 (define_expand "movhi"
1776 [(set (match_operand:HI 0 "general_operand" "")
1777 (match_operand:HI 1 "general_operand" ""))]
1781 if (emit_move_sequence (operands, HImode, 0))
1786 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!*f")
1787 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!*fM"))]
1788 "register_operand (operands[0], HImode)
1789 || reg_or_0_operand (operands[1], HImode)"
1799 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1800 (set_attr "pa_combine_type" "addmove")
1801 (set_attr "length" "4,4,4,4,4,4,4,4")])
1804 [(set (match_operand:HI 0 "register_operand" "=r")
1805 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1806 (match_operand:SI 2 "register_operand" "r"))))]
1807 "! TARGET_DISABLE_INDEXING"
1810 /* Reload can create backwards (relative to cse) unscaled index
1811 address modes when eliminating registers and possibly for
1812 pseudos that don't get hard registers. Deal with it. */
1813 if (operands[2] == hard_frame_pointer_rtx
1814 || operands[2] == stack_pointer_rtx)
1815 return \"ldhx %1(%2),%0\";
1817 return \"ldhx %2(%1),%0\";
1819 [(set_attr "type" "load")
1820 (set_attr "length" "4")])
1823 [(set (match_operand:HI 0 "register_operand" "=r")
1824 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1825 (match_operand:SI 2 "basereg_operand" "r"))))]
1826 "! TARGET_DISABLE_INDEXING"
1829 /* Reload can create backwards (relative to cse) unscaled index
1830 address modes when eliminating registers and possibly for
1831 pseudos that don't get hard registers. Deal with it. */
1832 if (operands[1] == hard_frame_pointer_rtx
1833 || operands[1] == stack_pointer_rtx)
1834 return \"ldhx %2(%1),%0\";
1836 return \"ldhx %1(%2),%0\";
1838 [(set_attr "type" "load")
1839 (set_attr "length" "4")])
1841 ; Now zero extended variants.
1843 [(set (match_operand:SI 0 "register_operand" "=r")
1844 (zero_extend:SI (mem:HI
1846 (match_operand:SI 1 "basereg_operand" "r")
1847 (match_operand:SI 2 "register_operand" "r")))))]
1848 "! TARGET_DISABLE_INDEXING"
1851 /* Reload can create backwards (relative to cse) unscaled index
1852 address modes when eliminating registers and possibly for
1853 pseudos that don't get hard registers. Deal with it. */
1854 if (operands[2] == hard_frame_pointer_rtx
1855 || operands[2] == stack_pointer_rtx)
1856 return \"ldhx %1(%2),%0\";
1858 return \"ldhx %2(%1),%0\";
1860 [(set_attr "type" "load")
1861 (set_attr "length" "4")])
1864 [(set (match_operand:SI 0 "register_operand" "=r")
1865 (zero_extend:SI (mem:HI
1867 (match_operand:SI 1 "register_operand" "r")
1868 (match_operand:SI 2 "basereg_operand" "r")))))]
1869 "! TARGET_DISABLE_INDEXING"
1872 /* Reload can create backwards (relative to cse) unscaled index
1873 address modes when eliminating registers and possibly for
1874 pseudos that don't get hard registers. Deal with it. */
1875 if (operands[1] == hard_frame_pointer_rtx
1876 || operands[1] == stack_pointer_rtx)
1877 return \"ldhx %2(%1),%0\";
1879 return \"ldhx %1(%2),%0\";
1881 [(set_attr "type" "load")
1882 (set_attr "length" "4")])
1885 [(set (match_operand:HI 0 "register_operand" "=r")
1886 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1887 (match_operand:SI 2 "int5_operand" "L"))))
1889 (plus:SI (match_dup 1) (match_dup 2)))]
1892 [(set_attr "type" "load")
1893 (set_attr "length" "4")])
1895 ; And a zero extended variant.
1897 [(set (match_operand:SI 0 "register_operand" "=r")
1898 (zero_extend:SI (mem:HI
1900 (match_operand:SI 1 "register_operand" "+r")
1901 (match_operand:SI 2 "int5_operand" "L")))))
1903 (plus:SI (match_dup 1) (match_dup 2)))]
1906 [(set_attr "type" "load")
1907 (set_attr "length" "4")])
1910 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1911 (match_operand:SI 1 "int5_operand" "L")))
1912 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1914 (plus:SI (match_dup 0) (match_dup 1)))]
1916 "sths,mb %r2,%1(%0)"
1917 [(set_attr "type" "store")
1918 (set_attr "length" "4")])
1921 [(set (match_operand:HI 0 "register_operand" "=r")
1922 (high:HI (match_operand 1 "const_int_operand" "")))]
1925 [(set_attr "type" "move")
1926 (set_attr "length" "4")])
1929 [(set (match_operand:HI 0 "register_operand" "=r")
1930 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1931 (match_operand 2 "const_int_operand" "")))]
1934 [(set_attr "type" "move")
1935 (set_attr "length" "4")])
1937 (define_expand "movqi"
1938 [(set (match_operand:QI 0 "general_operand" "")
1939 (match_operand:QI 1 "general_operand" ""))]
1943 if (emit_move_sequence (operands, QImode, 0))
1948 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!*f")
1949 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!*fM"))]
1950 "register_operand (operands[0], QImode)
1951 || reg_or_0_operand (operands[1], QImode)"
1961 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1962 (set_attr "pa_combine_type" "addmove")
1963 (set_attr "length" "4,4,4,4,4,4,4,4")])
1966 [(set (match_operand:QI 0 "register_operand" "=r")
1967 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1968 (match_operand:SI 2 "register_operand" "r"))))]
1969 "! TARGET_DISABLE_INDEXING"
1972 /* Reload can create backwards (relative to cse) unscaled index
1973 address modes when eliminating registers and possibly for
1974 pseudos that don't get hard registers. Deal with it. */
1975 if (operands[2] == hard_frame_pointer_rtx
1976 || operands[2] == stack_pointer_rtx)
1977 return \"ldbx %1(%2),%0\";
1979 return \"ldbx %2(%1),%0\";
1981 [(set_attr "type" "load")
1982 (set_attr "length" "4")])
1985 [(set (match_operand:QI 0 "register_operand" "=r")
1986 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1987 (match_operand:SI 2 "basereg_operand" "r"))))]
1988 "! TARGET_DISABLE_INDEXING"
1991 /* Reload can create backwards (relative to cse) unscaled index
1992 address modes when eliminating registers and possibly for
1993 pseudos that don't get hard registers. Deal with it. */
1994 if (operands[1] == hard_frame_pointer_rtx
1995 || operands[1] == stack_pointer_rtx)
1996 return \"ldbx %2(%1),%0\";
1998 return \"ldbx %1(%2),%0\";
2000 [(set_attr "type" "load")
2001 (set_attr "length" "4")])
2003 ; Indexed byte load with zero extension to SImode or HImode.
2005 [(set (match_operand:SI 0 "register_operand" "=r")
2006 (zero_extend:SI (mem:QI
2008 (match_operand:SI 1 "basereg_operand" "r")
2009 (match_operand:SI 2 "register_operand" "r")))))]
2010 "! TARGET_DISABLE_INDEXING"
2013 /* Reload can create backwards (relative to cse) unscaled index
2014 address modes when eliminating registers and possibly for
2015 pseudos that don't get hard registers. Deal with it. */
2016 if (operands[2] == hard_frame_pointer_rtx
2017 || operands[2] == stack_pointer_rtx)
2018 return \"ldbx %1(%2),%0\";
2020 return \"ldbx %2(%1),%0\";
2022 [(set_attr "type" "load")
2023 (set_attr "length" "4")])
2026 [(set (match_operand:SI 0 "register_operand" "=r")
2027 (zero_extend:SI (mem:QI
2029 (match_operand:SI 1 "register_operand" "r")
2030 (match_operand:SI 2 "basereg_operand" "r")))))]
2031 "! TARGET_DISABLE_INDEXING"
2034 /* Reload can create backwards (relative to cse) unscaled index
2035 address modes when eliminating registers and possibly for
2036 pseudos that don't get hard registers. Deal with it. */
2037 if (operands[1] == hard_frame_pointer_rtx
2038 || operands[1] == stack_pointer_rtx)
2039 return \"ldbx %2(%1),%0\";
2041 return \"ldbx %1(%2),%0\";
2043 [(set_attr "type" "load")
2044 (set_attr "length" "4")])
2047 [(set (match_operand:HI 0 "register_operand" "=r")
2048 (zero_extend:HI (mem:QI
2050 (match_operand:SI 1 "basereg_operand" "r")
2051 (match_operand:SI 2 "register_operand" "r")))))]
2052 "! TARGET_DISABLE_INDEXING"
2055 /* Reload can create backwards (relative to cse) unscaled index
2056 address modes when eliminating registers and possibly for
2057 pseudos that don't get hard registers. Deal with it. */
2058 if (operands[2] == hard_frame_pointer_rtx
2059 || operands[2] == stack_pointer_rtx)
2060 return \"ldbx %1(%2),%0\";
2062 return \"ldbx %2(%1),%0\";
2064 [(set_attr "type" "load")
2065 (set_attr "length" "4")])
2068 [(set (match_operand:HI 0 "register_operand" "=r")
2069 (zero_extend:HI (mem:QI
2071 (match_operand:SI 1 "register_operand" "r")
2072 (match_operand:SI 2 "basereg_operand" "r")))))]
2073 "! TARGET_DISABLE_INDEXING"
2076 /* Reload can create backwards (relative to cse) unscaled index
2077 address modes when eliminating registers and possibly for
2078 pseudos that don't get hard registers. Deal with it. */
2079 if (operands[1] == hard_frame_pointer_rtx
2080 || operands[1] == stack_pointer_rtx)
2081 return \"ldbx %2(%1),%0\";
2083 return \"ldbx %1(%2),%0\";
2085 [(set_attr "type" "load")
2086 (set_attr "length" "4")])
2089 [(set (match_operand:QI 0 "register_operand" "=r")
2090 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2091 (match_operand:SI 2 "int5_operand" "L"))))
2092 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2095 [(set_attr "type" "load")
2096 (set_attr "length" "4")])
2098 ; Now the same thing with zero extensions.
2100 [(set (match_operand:SI 0 "register_operand" "=r")
2101 (zero_extend:SI (mem:QI (plus:SI
2102 (match_operand:SI 1 "register_operand" "+r")
2103 (match_operand:SI 2 "int5_operand" "L")))))
2104 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2107 [(set_attr "type" "load")
2108 (set_attr "length" "4")])
2111 [(set (match_operand:HI 0 "register_operand" "=r")
2112 (zero_extend:HI (mem:QI (plus:SI
2113 (match_operand:SI 1 "register_operand" "+r")
2114 (match_operand:SI 2 "int5_operand" "L")))))
2115 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2118 [(set_attr "type" "load")
2119 (set_attr "length" "4")])
2122 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2123 (match_operand:SI 1 "int5_operand" "L")))
2124 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2126 (plus:SI (match_dup 0) (match_dup 1)))]
2128 "stbs,mb %r2,%1(%0)"
2129 [(set_attr "type" "store")
2130 (set_attr "length" "4")])
2132 ;; The definition of this insn does not really explain what it does,
2133 ;; but it should suffice
2134 ;; that anything generated as this insn will be recognized as one
2135 ;; and that it will not successfully combine with anything.
2136 (define_expand "movstrsi"
2137 [(parallel [(set (match_operand:BLK 0 "" "")
2138 (match_operand:BLK 1 "" ""))
2139 (clobber (match_dup 7))
2140 (clobber (match_dup 8))
2141 (clobber (match_dup 4))
2142 (clobber (match_dup 5))
2143 (clobber (match_dup 6))
2144 (use (match_operand:SI 2 "arith_operand" ""))
2145 (use (match_operand:SI 3 "const_int_operand" ""))])]
2151 /* HP provides very fast block move library routine for the PA;
2152 this routine includes:
2154 4x4 byte at a time block moves,
2155 1x4 byte at a time with alignment checked at runtime with
2156 attempts to align the source and destination as needed
2159 With that in mind, here's the heuristics to try and guess when
2160 the inlined block move will be better than the library block
2163 If the size isn't constant, then always use the library routines.
2165 If the size is large in respect to the known alignment, then use
2166 the library routines.
2168 If the size is small in repsect to the known alignment, then open
2169 code the copy (since that will lead to better scheduling).
2171 Else use the block move pattern. */
2173 /* Undetermined size, use the library routine. */
2174 if (GET_CODE (operands[2]) != CONST_INT)
2177 size = INTVAL (operands[2]);
2178 align = INTVAL (operands[3]);
2179 align = align > 4 ? 4 : align;
2181 /* If size/alignment > 8 (eg size is large in respect to alignment),
2182 then use the library routines. */
2183 if (size / align > 16)
2186 /* This does happen, but not often enough to worry much about. */
2187 if (size / align < MOVE_RATIO)
2190 /* Fall through means we're going to use our block move pattern. */
2192 = change_address (operands[0], VOIDmode,
2193 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2195 = change_address (operands[1], VOIDmode,
2196 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2197 operands[4] = gen_reg_rtx (SImode);
2198 operands[5] = gen_reg_rtx (SImode);
2199 operands[6] = gen_reg_rtx (SImode);
2200 operands[7] = XEXP (operands[0], 0);
2201 operands[8] = XEXP (operands[1], 0);
2204 ;; The operand constraints are written like this to support both compile-time
2205 ;; and run-time determined byte count. If the count is run-time determined,
2206 ;; the register with the byte count is clobbered by the copying code, and
2207 ;; therefore it is forced to operand 2. If the count is compile-time
2208 ;; determined, we need two scratch registers for the unrolled code.
2209 (define_insn "movstrsi_internal"
2210 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2211 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2212 (clobber (match_dup 0))
2213 (clobber (match_dup 1))
2214 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2215 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2216 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2217 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2218 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2220 "* return output_block_move (operands, !which_alternative);"
2221 [(set_attr "type" "multi,multi")])
2223 ;; Floating point move insns
2225 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2226 ;; to be reloaded by putting the constant into memory when
2227 ;; reg is a floating point register.
2229 ;; For integer registers we use ldil;ldo to set the appropriate
2232 ;; This must come before the movdf pattern, and it must be present
2233 ;; to handle obscure reloading cases.
2235 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2236 (match_operand:DF 1 "" "?F,m"))]
2237 "GET_CODE (operands[1]) == CONST_DOUBLE
2238 && operands[1] != CONST0_RTX (DFmode)
2239 && ! TARGET_SOFT_FLOAT"
2240 "* return (which_alternative == 0 ? output_move_double (operands)
2241 : \"fldd%F1 %1,%0\");"
2242 [(set_attr "type" "move,fpload")
2243 (set_attr "length" "16,4")])
2245 (define_expand "movdf"
2246 [(set (match_operand:DF 0 "general_operand" "")
2247 (match_operand:DF 1 "general_operand" ""))]
2251 if (emit_move_sequence (operands, DFmode, 0))
2255 ;; Reloading an SImode or DImode value requires a scratch register if
2256 ;; going in to or out of float point registers.
2258 (define_expand "reload_indf"
2259 [(set (match_operand:DF 0 "register_operand" "=Z")
2260 (match_operand:DF 1 "non_hard_reg_operand" ""))
2261 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2265 if (emit_move_sequence (operands, DFmode, operands[2]))
2268 /* We don't want the clobber emitted, so handle this ourselves. */
2269 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2273 (define_expand "reload_outdf"
2274 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2275 (match_operand:DF 1 "register_operand" "Z"))
2276 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2280 if (emit_move_sequence (operands, DFmode, operands[2]))
2283 /* We don't want the clobber emitted, so handle this ourselves. */
2284 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2289 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2290 "=f,*r,RQ,?o,?Q,f,*r,*r")
2291 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2292 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2293 "(register_operand (operands[0], DFmode)
2294 || reg_or_0_operand (operands[1], DFmode))
2295 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2296 && GET_CODE (operands[0]) == MEM)
2297 && ! TARGET_SOFT_FLOAT"
2300 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2301 || operands[1] == CONST0_RTX (DFmode))
2302 return output_fp_move_double (operands);
2303 return output_move_double (operands);
2305 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2306 (set_attr "length" "4,8,4,8,16,4,8,16")])
2309 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2311 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2313 "(register_operand (operands[0], DFmode)
2314 || reg_or_0_operand (operands[1], DFmode))
2315 && TARGET_SOFT_FLOAT"
2318 return output_move_double (operands);
2320 [(set_attr "type" "move,store,store,load,load")
2321 (set_attr "length" "8,8,16,8,16")])
2324 [(set (match_operand:DF 0 "register_operand" "=fx")
2325 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2326 (match_operand:SI 2 "register_operand" "r"))))]
2327 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2330 /* Reload can create backwards (relative to cse) unscaled index
2331 address modes when eliminating registers and possibly for
2332 pseudos that don't get hard registers. Deal with it. */
2333 if (operands[2] == hard_frame_pointer_rtx
2334 || operands[2] == stack_pointer_rtx)
2335 return \"flddx %1(%2),%0\";
2337 return \"flddx %2(%1),%0\";
2339 [(set_attr "type" "fpload")
2340 (set_attr "length" "4")])
2343 [(set (match_operand:DF 0 "register_operand" "=fx")
2344 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2345 (match_operand:SI 2 "basereg_operand" "r"))))]
2346 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2349 /* Reload can create backwards (relative to cse) unscaled index
2350 address modes when eliminating registers and possibly for
2351 pseudos that don't get hard registers. Deal with it. */
2352 if (operands[1] == hard_frame_pointer_rtx
2353 || operands[1] == stack_pointer_rtx)
2354 return \"flddx %2(%1),%0\";
2356 return \"flddx %1(%2),%0\";
2358 [(set_attr "type" "fpload")
2359 (set_attr "length" "4")])
2362 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2363 (match_operand:SI 2 "register_operand" "r")))
2364 (match_operand:DF 0 "register_operand" "fx"))]
2365 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2368 /* Reload can create backwards (relative to cse) unscaled index
2369 address modes when eliminating registers and possibly for
2370 pseudos that don't get hard registers. Deal with it. */
2371 if (operands[2] == hard_frame_pointer_rtx
2372 || operands[2] == stack_pointer_rtx)
2373 return \"fstdx %0,%1(%2)\";
2375 return \"fstdx %0,%2(%1)\";
2377 [(set_attr "type" "fpstore")
2378 (set_attr "length" "4")])
2381 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2382 (match_operand:SI 2 "basereg_operand" "r")))
2383 (match_operand:DF 0 "register_operand" "fx"))]
2384 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2387 /* Reload can create backwards (relative to cse) unscaled index
2388 address modes when eliminating registers and possibly for
2389 pseudos that don't get hard registers. Deal with it. */
2390 if (operands[1] == hard_frame_pointer_rtx
2391 || operands[1] == stack_pointer_rtx)
2392 return \"fstdx %0,%2(%1)\";
2394 return \"fstdx %0,%1(%2)\";
2396 [(set_attr "type" "fpstore")
2397 (set_attr "length" "4")])
2399 (define_expand "movdi"
2400 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2401 (match_operand:DI 1 "general_operand" ""))]
2405 if (emit_move_sequence (operands, DImode, 0))
2409 (define_expand "reload_indi"
2410 [(set (match_operand:DI 0 "register_operand" "=Z")
2411 (match_operand:DI 1 "non_hard_reg_operand" ""))
2412 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2416 if (emit_move_sequence (operands, DImode, operands[2]))
2419 /* We don't want the clobber emitted, so handle this ourselves. */
2420 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2424 (define_expand "reload_outdi"
2425 [(set (match_operand:DI 0 "general_operand" "")
2426 (match_operand:DI 1 "register_operand" "Z"))
2427 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2431 if (emit_move_sequence (operands, DImode, operands[2]))
2434 /* We don't want the clobber emitted, so handle this ourselves. */
2435 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2440 [(set (match_operand:DI 0 "register_operand" "=r")
2441 (high:DI (match_operand 1 "" "")))]
2445 rtx op0 = operands[0];
2446 rtx op1 = operands[1];
2448 if (GET_CODE (op1) == CONST_INT)
2450 operands[0] = operand_subword (op0, 1, 0, DImode);
2451 output_asm_insn (\"ldil L'%1,%0\", operands);
2453 operands[0] = operand_subword (op0, 0, 0, DImode);
2454 if (INTVAL (op1) < 0)
2455 output_asm_insn (\"ldi -1,%0\", operands);
2457 output_asm_insn (\"ldi 0,%0\", operands);
2460 else if (GET_CODE (op1) == CONST_DOUBLE)
2462 operands[0] = operand_subword (op0, 1, 0, DImode);
2463 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2464 output_asm_insn (\"ldil L'%1,%0\", operands);
2466 operands[0] = operand_subword (op0, 0, 0, DImode);
2467 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2468 output_asm_insn (singlemove_string (operands), operands);
2474 [(set_attr "type" "move")
2475 (set_attr "length" "8")])
2478 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2479 "=r,o,Q,r,r,r,f,f,*TR")
2480 (match_operand:DI 1 "general_operand"
2481 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2482 "(register_operand (operands[0], DImode)
2483 || reg_or_0_operand (operands[1], DImode))
2484 && ! TARGET_SOFT_FLOAT"
2487 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2488 || (operands[1] == CONST0_RTX (DImode)))
2489 return output_fp_move_double (operands);
2490 return output_move_double (operands);
2492 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2493 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2496 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2498 (match_operand:DI 1 "general_operand"
2500 "(register_operand (operands[0], DImode)
2501 || reg_or_0_operand (operands[1], DImode))
2502 && TARGET_SOFT_FLOAT"
2505 return output_move_double (operands);
2507 [(set_attr "type" "move,store,store,load,load,multi")
2508 (set_attr "length" "8,8,16,8,16,16")])
2511 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2512 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2513 (match_operand:DI 2 "immediate_operand" "i,i")))]
2517 /* Don't output a 64 bit constant, since we can't trust the assembler to
2518 handle it correctly. */
2519 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2520 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2521 if (which_alternative == 1)
2522 output_asm_insn (\"copy %1,%0\", operands);
2523 return \"ldo R'%G2(%R1),%R0\";
2525 [(set_attr "type" "move,move")
2526 (set_attr "length" "4,8")])
2528 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2529 ;; to be reloaded by putting the constant into memory when
2530 ;; reg is a floating point register.
2532 ;; For integer registers we use ldil;ldo to set the appropriate
2535 ;; This must come before the movsf pattern, and it must be present
2536 ;; to handle obscure reloading cases.
2538 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2539 (match_operand:SF 1 "" "?F,m"))]
2540 "GET_CODE (operands[1]) == CONST_DOUBLE
2541 && operands[1] != CONST0_RTX (SFmode)
2542 && ! TARGET_SOFT_FLOAT"
2543 "* return (which_alternative == 0 ? singlemove_string (operands)
2544 : \" fldw%F1 %1,%0\");"
2545 [(set_attr "type" "move,fpload")
2546 (set_attr "length" "8,4")])
2548 (define_expand "movsf"
2549 [(set (match_operand:SF 0 "general_operand" "")
2550 (match_operand:SF 1 "general_operand" ""))]
2554 if (emit_move_sequence (operands, SFmode, 0))
2558 ;; Reloading an SImode or DImode value requires a scratch register if
2559 ;; going in to or out of float point registers.
2561 (define_expand "reload_insf"
2562 [(set (match_operand:SF 0 "register_operand" "=Z")
2563 (match_operand:SF 1 "non_hard_reg_operand" ""))
2564 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2568 if (emit_move_sequence (operands, SFmode, operands[2]))
2571 /* We don't want the clobber emitted, so handle this ourselves. */
2572 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2576 (define_expand "reload_outsf"
2577 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2578 (match_operand:SF 1 "register_operand" "Z"))
2579 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2583 if (emit_move_sequence (operands, SFmode, operands[2]))
2586 /* We don't want the clobber emitted, so handle this ourselves. */
2587 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2592 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2594 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2595 "fG,rG,RQ,RQ,f,rG"))]
2596 "(register_operand (operands[0], SFmode)
2597 || reg_or_0_operand (operands[1], SFmode))
2598 && ! TARGET_SOFT_FLOAT"
2606 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2607 (set_attr "pa_combine_type" "addmove")
2608 (set_attr "length" "4,4,4,4,4,4")])
2611 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2613 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2615 "(register_operand (operands[0], SFmode)
2616 || reg_or_0_operand (operands[1], SFmode))
2617 && TARGET_SOFT_FLOAT"
2622 [(set_attr "type" "move,load,store")
2623 (set_attr "pa_combine_type" "addmove")
2624 (set_attr "length" "4,4,4")])
2627 [(set (match_operand:SF 0 "register_operand" "=fx")
2628 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2629 (match_operand:SI 2 "register_operand" "r"))))]
2630 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2633 /* Reload can create backwards (relative to cse) unscaled index
2634 address modes when eliminating registers and possibly for
2635 pseudos that don't get hard registers. Deal with it. */
2636 if (operands[2] == hard_frame_pointer_rtx
2637 || operands[2] == stack_pointer_rtx)
2638 return \"fldwx %1(%2),%0\";
2640 return \"fldwx %2(%1),%0\";
2642 [(set_attr "type" "fpload")
2643 (set_attr "length" "4")])
2646 [(set (match_operand:SF 0 "register_operand" "=fx")
2647 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2648 (match_operand:SI 2 "basereg_operand" "r"))))]
2649 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2652 /* Reload can create backwards (relative to cse) unscaled index
2653 address modes when eliminating registers and possibly for
2654 pseudos that don't get hard registers. Deal with it. */
2655 if (operands[1] == hard_frame_pointer_rtx
2656 || operands[1] == stack_pointer_rtx)
2657 return \"fldwx %2(%1),%0\";
2659 return \"fldwx %1(%2),%0\";
2661 [(set_attr "type" "fpload")
2662 (set_attr "length" "4")])
2665 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2666 (match_operand:SI 2 "register_operand" "r")))
2667 (match_operand:SF 0 "register_operand" "fx"))]
2668 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2671 /* Reload can create backwards (relative to cse) unscaled index
2672 address modes when eliminating registers and possibly for
2673 pseudos that don't get hard registers. Deal with it. */
2674 if (operands[2] == hard_frame_pointer_rtx
2675 || operands[2] == stack_pointer_rtx)
2676 return \"fstwx %0,%1(%2)\";
2678 return \"fstwx %0,%2(%1)\";
2680 [(set_attr "type" "fpstore")
2681 (set_attr "length" "4")])
2684 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2685 (match_operand:SI 2 "basereg_operand" "r")))
2686 (match_operand:SF 0 "register_operand" "fx"))]
2687 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2690 /* Reload can create backwards (relative to cse) unscaled index
2691 address modes when eliminating registers and possibly for
2692 pseudos that don't get hard registers. Deal with it. */
2693 if (operands[1] == hard_frame_pointer_rtx
2694 || operands[1] == stack_pointer_rtx)
2695 return \"fstwx %0,%2(%1)\";
2697 return \"fstwx %0,%1(%2)\";
2699 [(set_attr "type" "fpstore")
2700 (set_attr "length" "4")])
2703 ;;- zero extension instructions
2704 ;; We have define_expand for zero extension patterns to make sure the
2705 ;; operands get loaded into registers. The define_insns accept
2706 ;; memory operands. This gives us better overall code than just
2707 ;; having a pattern that does or does not accept memory operands.
2709 (define_expand "zero_extendhisi2"
2710 [(set (match_operand:SI 0 "register_operand" "")
2712 (match_operand:HI 1 "register_operand" "")))]
2717 [(set (match_operand:SI 0 "register_operand" "=r,r")
2719 (match_operand:HI 1 "move_operand" "r,RQ")))]
2720 "GET_CODE (operands[1]) != CONST_INT"
2724 [(set_attr "type" "shift,load")
2725 (set_attr "length" "4,4")])
2727 (define_expand "zero_extendqihi2"
2728 [(set (match_operand:HI 0 "register_operand" "")
2730 (match_operand:QI 1 "register_operand" "")))]
2735 [(set (match_operand:HI 0 "register_operand" "=r,r")
2737 (match_operand:QI 1 "move_operand" "r,RQ")))]
2738 "GET_CODE (operands[1]) != CONST_INT"
2742 [(set_attr "type" "shift,load")
2743 (set_attr "length" "4,4")])
2745 (define_expand "zero_extendqisi2"
2746 [(set (match_operand:SI 0 "register_operand" "")
2748 (match_operand:QI 1 "register_operand" "")))]
2753 [(set (match_operand:SI 0 "register_operand" "=r,r")
2755 (match_operand:QI 1 "move_operand" "r,RQ")))]
2756 "GET_CODE (operands[1]) != CONST_INT"
2760 [(set_attr "type" "shift,load")
2761 (set_attr "length" "4,4")])
2763 ;;- sign extension instructions
2765 (define_insn "extendhisi2"
2766 [(set (match_operand:SI 0 "register_operand" "=r")
2767 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2770 [(set_attr "type" "shift")
2771 (set_attr "length" "4")])
2773 (define_insn "extendqihi2"
2774 [(set (match_operand:HI 0 "register_operand" "=r")
2775 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2778 [(set_attr "type" "shift")
2779 (set_attr "length" "4")])
2781 (define_insn "extendqisi2"
2782 [(set (match_operand:SI 0 "register_operand" "=r")
2783 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2786 [(set_attr "type" "shift")
2787 (set_attr "length" "4")])
2789 ;; Conversions between float and double.
2791 (define_insn "extendsfdf2"
2792 [(set (match_operand:DF 0 "register_operand" "=f")
2794 (match_operand:SF 1 "register_operand" "f")))]
2795 "! TARGET_SOFT_FLOAT"
2796 "fcnvff,sgl,dbl %1,%0"
2797 [(set_attr "type" "fpalu")
2798 (set_attr "length" "4")])
2800 (define_insn "truncdfsf2"
2801 [(set (match_operand:SF 0 "register_operand" "=f")
2803 (match_operand:DF 1 "register_operand" "f")))]
2804 "! TARGET_SOFT_FLOAT"
2805 "fcnvff,dbl,sgl %1,%0"
2806 [(set_attr "type" "fpalu")
2807 (set_attr "length" "4")])
2809 ;; Conversion between fixed point and floating point.
2810 ;; Note that among the fix-to-float insns
2811 ;; the ones that start with SImode come first.
2812 ;; That is so that an operand that is a CONST_INT
2813 ;; (and therefore lacks a specific machine mode).
2814 ;; will be recognized as SImode (which is always valid)
2815 ;; rather than as QImode or HImode.
2817 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2818 ;; to be reloaded by putting the constant into memory.
2819 ;; It must come before the more general floatsisf2 pattern.
2821 [(set (match_operand:SF 0 "register_operand" "=f")
2822 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2823 "! TARGET_SOFT_FLOAT"
2824 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2825 [(set_attr "type" "fpalu")
2826 (set_attr "length" "8")])
2828 (define_insn "floatsisf2"
2829 [(set (match_operand:SF 0 "register_operand" "=f")
2830 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2831 "! TARGET_SOFT_FLOAT"
2832 "fcnvxf,sgl,sgl %1,%0"
2833 [(set_attr "type" "fpalu")
2834 (set_attr "length" "4")])
2836 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2837 ;; to be reloaded by putting the constant into memory.
2838 ;; It must come before the more general floatsidf2 pattern.
2840 [(set (match_operand:DF 0 "register_operand" "=f")
2841 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2842 "! TARGET_SOFT_FLOAT"
2843 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2844 [(set_attr "type" "fpalu")
2845 (set_attr "length" "8")])
2847 (define_insn "floatsidf2"
2848 [(set (match_operand:DF 0 "register_operand" "=f")
2849 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2850 "! TARGET_SOFT_FLOAT"
2851 "fcnvxf,sgl,dbl %1,%0"
2852 [(set_attr "type" "fpalu")
2853 (set_attr "length" "4")])
2855 (define_expand "floatunssisf2"
2856 [(set (subreg:SI (match_dup 2) 1)
2857 (match_operand:SI 1 "register_operand" ""))
2858 (set (subreg:SI (match_dup 2) 0)
2860 (set (match_operand:SF 0 "register_operand" "")
2861 (float:SF (match_dup 2)))]
2862 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2863 "operands[2] = gen_reg_rtx (DImode);")
2865 (define_expand "floatunssidf2"
2866 [(set (subreg:SI (match_dup 2) 1)
2867 (match_operand:SI 1 "register_operand" ""))
2868 (set (subreg:SI (match_dup 2) 0)
2870 (set (match_operand:DF 0 "register_operand" "")
2871 (float:DF (match_dup 2)))]
2872 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2873 "operands[2] = gen_reg_rtx (DImode);")
2875 (define_insn "floatdisf2"
2876 [(set (match_operand:SF 0 "register_operand" "=f")
2877 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2878 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2879 "fcnvxf,dbl,sgl %1,%0"
2880 [(set_attr "type" "fpalu")
2881 (set_attr "length" "4")])
2883 (define_insn "floatdidf2"
2884 [(set (match_operand:DF 0 "register_operand" "=f")
2885 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2886 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2887 "fcnvxf,dbl,dbl %1,%0"
2888 [(set_attr "type" "fpalu")
2889 (set_attr "length" "4")])
2891 ;; Convert a float to an actual integer.
2892 ;; Truncation is performed as part of the conversion.
2894 (define_insn "fix_truncsfsi2"
2895 [(set (match_operand:SI 0 "register_operand" "=f")
2896 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2897 "! TARGET_SOFT_FLOAT"
2898 "fcnvfxt,sgl,sgl %1,%0"
2899 [(set_attr "type" "fpalu")
2900 (set_attr "length" "4")])
2902 (define_insn "fix_truncdfsi2"
2903 [(set (match_operand:SI 0 "register_operand" "=f")
2904 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2905 "! TARGET_SOFT_FLOAT"
2906 "fcnvfxt,dbl,sgl %1,%0"
2907 [(set_attr "type" "fpalu")
2908 (set_attr "length" "4")])
2910 (define_insn "fix_truncsfdi2"
2911 [(set (match_operand:DI 0 "register_operand" "=f")
2912 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2913 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2914 "fcnvfxt,sgl,dbl %1,%0"
2915 [(set_attr "type" "fpalu")
2916 (set_attr "length" "4")])
2918 (define_insn "fix_truncdfdi2"
2919 [(set (match_operand:DI 0 "register_operand" "=f")
2920 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2921 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2922 "fcnvfxt,dbl,dbl %1,%0"
2923 [(set_attr "type" "fpalu")
2924 (set_attr "length" "4")])
2926 ;;- arithmetic instructions
2928 (define_expand "adddi3"
2929 [(set (match_operand:DI 0 "register_operand" "")
2930 (plus:DI (match_operand:DI 1 "register_operand" "")
2931 (match_operand:DI 2 "arith11_operand" "")))]
2936 [(set (match_operand:DI 0 "register_operand" "=r")
2937 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2938 (match_operand:DI 2 "arith11_operand" "rI")))]
2942 if (GET_CODE (operands[2]) == CONST_INT)
2944 if (INTVAL (operands[2]) >= 0)
2945 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2947 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2950 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2952 [(set_attr "type" "binary")
2953 (set_attr "length" "8")])
2956 [(set (match_operand:SI 0 "register_operand" "=r")
2957 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2958 (match_operand:SI 2 "register_operand" "r")))]
2961 [(set_attr "type" "binary")
2962 (set_attr "length" "4")])
2964 ;; define_splits to optimize cases of adding a constant integer
2965 ;; to a register when the constant does not fit in 14 bits. */
2967 [(set (match_operand:SI 0 "register_operand" "")
2968 (plus:SI (match_operand:SI 1 "register_operand" "")
2969 (match_operand:SI 2 "const_int_operand" "")))
2970 (clobber (match_operand:SI 4 "register_operand" ""))]
2971 "! cint_ok_for_move (INTVAL (operands[2]))
2972 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2973 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2974 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2977 int val = INTVAL (operands[2]);
2978 int low = (val < 0) ? -0x2000 : 0x1fff;
2979 int rest = val - low;
2981 operands[2] = GEN_INT (rest);
2982 operands[3] = GEN_INT (low);
2986 [(set (match_operand:SI 0 "register_operand" "")
2987 (plus:SI (match_operand:SI 1 "register_operand" "")
2988 (match_operand:SI 2 "const_int_operand" "")))
2989 (clobber (match_operand:SI 4 "register_operand" ""))]
2990 "! cint_ok_for_move (INTVAL (operands[2]))"
2991 [(set (match_dup 4) (match_dup 2))
2992 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2996 HOST_WIDE_INT intval = INTVAL (operands[2]);
2998 /* Try dividing the constant by 2, then 4, and finally 8 to see
2999 if we can get a constant which can be loaded into a register
3000 in a single instruction (cint_ok_for_move).
3002 If that fails, try to negate the constant and subtract it
3003 from our input operand. */
3004 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
3006 operands[2] = GEN_INT (intval / 2);
3007 operands[3] = GEN_INT (2);
3009 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
3011 operands[2] = GEN_INT (intval / 4);
3012 operands[3] = GEN_INT (4);
3014 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
3016 operands[2] = GEN_INT (intval / 8);
3017 operands[3] = GEN_INT (8);
3019 else if (cint_ok_for_move (-intval))
3021 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
3022 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
3029 (define_insn "addsi3"
3030 [(set (match_operand:SI 0 "register_operand" "=r,r")
3031 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3032 (match_operand:SI 2 "arith_operand" "r,J")))]
3037 [(set_attr "type" "binary,binary")
3038 (set_attr "pa_combine_type" "addmove")
3039 (set_attr "length" "4,4")])
3041 ;; Disgusting kludge to work around reload bugs with frame pointer
3042 ;; elimination. Similar to other magic reload patterns in the
3043 ;; indexed memory operations.
3045 [(set (match_operand:SI 0 "register_operand" "=&r")
3046 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3047 (match_operand:SI 2 "register_operand" "r"))
3048 (match_operand:SI 3 "const_int_operand" "rL")))]
3049 "reload_in_progress"
3052 if (GET_CODE (operands[3]) == CONST_INT)
3053 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3055 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3057 [(set_attr "type" "binary")
3058 (set_attr "length" "8")])
3060 (define_expand "subdi3"
3061 [(set (match_operand:DI 0 "register_operand" "")
3062 (minus:DI (match_operand:DI 1 "register_operand" "")
3063 (match_operand:DI 2 "register_operand" "")))]
3068 [(set (match_operand:DI 0 "register_operand" "=r")
3069 (minus:DI (match_operand:DI 1 "register_operand" "r")
3070 (match_operand:DI 2 "register_operand" "r")))]
3072 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3073 [(set_attr "type" "binary")
3074 (set_attr "length" "8")])
3076 (define_insn "subsi3"
3077 [(set (match_operand:SI 0 "register_operand" "=r,r")
3078 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3079 (match_operand:SI 2 "register_operand" "r,r")))]
3084 [(set_attr "type" "binary,binary")
3085 (set_attr "length" "4,4")])
3087 ;; Clobbering a "register_operand" instead of a match_scratch
3088 ;; in operand3 of millicode calls avoids spilling %r1 and
3089 ;; produces better code.
3091 ;; The mulsi3 insns set up registers for the millicode call.
3092 (define_expand "mulsi3"
3093 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3094 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3095 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3096 (clobber (match_dup 3))
3097 (clobber (reg:SI 26))
3098 (clobber (reg:SI 25))
3099 (clobber (reg:SI 31))])
3100 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3104 if (TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3106 rtx scratch = gen_reg_rtx (DImode);
3107 operands[1] = force_reg (SImode, operands[1]);
3108 operands[2] = force_reg (SImode, operands[2]);
3109 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3110 emit_insn (gen_rtx_SET (VOIDmode,
3112 gen_rtx_SUBREG (SImode, scratch, 1)));
3115 operands[3] = gen_reg_rtx (SImode);
3118 (define_insn "umulsidi3"
3119 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3120 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3121 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3122 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3124 [(set_attr "type" "fpmuldbl")
3125 (set_attr "length" "4")])
3128 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3129 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3130 (match_operand:DI 2 "uint32_operand" "f")))]
3131 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3133 [(set_attr "type" "fpmuldbl")
3134 (set_attr "length" "4")])
3137 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3138 (clobber (match_operand:SI 0 "register_operand" "=a"))
3139 (clobber (reg:SI 26))
3140 (clobber (reg:SI 25))
3141 (clobber (reg:SI 31))]
3143 "* return output_mul_insn (0, insn);"
3144 [(set_attr "type" "milli")
3145 (set (attr "length")
3147 ;; Target (or stub) within reach
3148 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3150 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3155 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3159 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3160 ;; same as NO_SPACE_REGS code
3161 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3163 (eq (symbol_ref "flag_pic")
3167 ;; Out of range and either PIC or PORTABLE_RUNTIME
3170 ;;; Division and mod.
3171 (define_expand "divsi3"
3172 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3173 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3174 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3175 (clobber (match_dup 3))
3176 (clobber (match_dup 4))
3177 (clobber (reg:SI 26))
3178 (clobber (reg:SI 25))
3179 (clobber (reg:SI 31))])
3180 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3184 operands[3] = gen_reg_rtx (SImode);
3185 operands[4] = gen_reg_rtx (SImode);
3186 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3192 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3193 (clobber (match_operand:SI 1 "register_operand" "=a"))
3194 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3195 (clobber (reg:SI 26))
3196 (clobber (reg:SI 25))
3197 (clobber (reg:SI 31))]
3200 return output_div_insn (operands, 0, insn);"
3201 [(set_attr "type" "milli")
3202 (set (attr "length")
3204 ;; Target (or stub) within reach
3205 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3207 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3212 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3216 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3217 ;; same as NO_SPACE_REGS code
3218 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3220 (eq (symbol_ref "flag_pic")
3224 ;; Out of range and either PIC or PORTABLE_RUNTIME
3227 (define_expand "udivsi3"
3228 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3229 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3230 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3231 (clobber (match_dup 3))
3232 (clobber (match_dup 4))
3233 (clobber (reg:SI 26))
3234 (clobber (reg:SI 25))
3235 (clobber (reg:SI 31))])
3236 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3240 operands[3] = gen_reg_rtx (SImode);
3241 operands[4] = gen_reg_rtx (SImode);
3242 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3248 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3249 (clobber (match_operand:SI 1 "register_operand" "=a"))
3250 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3251 (clobber (reg:SI 26))
3252 (clobber (reg:SI 25))
3253 (clobber (reg:SI 31))]
3256 return output_div_insn (operands, 1, insn);"
3257 [(set_attr "type" "milli")
3258 (set (attr "length")
3260 ;; Target (or stub) within reach
3261 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3263 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3268 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3272 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3273 ;; same as NO_SPACE_REGS code
3274 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3276 (eq (symbol_ref "flag_pic")
3280 ;; Out of range and either PIC or PORTABLE_RUNTIME
3283 (define_expand "modsi3"
3284 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3285 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3286 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3287 (clobber (match_dup 3))
3288 (clobber (match_dup 4))
3289 (clobber (reg:SI 26))
3290 (clobber (reg:SI 25))
3291 (clobber (reg:SI 31))])
3292 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3296 operands[4] = gen_reg_rtx (SImode);
3297 operands[3] = gen_reg_rtx (SImode);
3301 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3302 (clobber (match_operand:SI 0 "register_operand" "=a"))
3303 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3304 (clobber (reg:SI 26))
3305 (clobber (reg:SI 25))
3306 (clobber (reg:SI 31))]
3309 return output_mod_insn (0, insn);"
3310 [(set_attr "type" "milli")
3311 (set (attr "length")
3313 ;; Target (or stub) within reach
3314 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3316 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3321 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3325 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3326 ;; same as NO_SPACE_REGS code
3327 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3329 (eq (symbol_ref "flag_pic")
3333 ;; Out of range and either PIC or PORTABLE_RUNTIME
3336 (define_expand "umodsi3"
3337 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3338 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3339 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3340 (clobber (match_dup 3))
3341 (clobber (match_dup 4))
3342 (clobber (reg:SI 26))
3343 (clobber (reg:SI 25))
3344 (clobber (reg:SI 31))])
3345 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3349 operands[4] = gen_reg_rtx (SImode);
3350 operands[3] = gen_reg_rtx (SImode);
3354 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3355 (clobber (match_operand:SI 0 "register_operand" "=a"))
3356 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3357 (clobber (reg:SI 26))
3358 (clobber (reg:SI 25))
3359 (clobber (reg:SI 31))]
3362 return output_mod_insn (1, insn);"
3363 [(set_attr "type" "milli")
3364 (set (attr "length")
3366 ;; Target (or stub) within reach
3367 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3369 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3374 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3378 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3379 ;; same as NO_SPACE_REGS code
3380 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3382 (eq (symbol_ref "flag_pic")
3386 ;; Out of range and either PIC or PORTABLE_RUNTIME
3389 ;;- and instructions
3390 ;; We define DImode `and` so with DImode `not` we can get
3391 ;; DImode `andn`. Other combinations are possible.
3393 (define_expand "anddi3"
3394 [(set (match_operand:DI 0 "register_operand" "")
3395 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3396 (match_operand:DI 2 "arith_double_operand" "")))]
3400 if (! register_operand (operands[1], DImode)
3401 || ! register_operand (operands[2], DImode))
3402 /* Let GCC break this into word-at-a-time operations. */
3407 [(set (match_operand:DI 0 "register_operand" "=r")
3408 (and:DI (match_operand:DI 1 "register_operand" "%r")
3409 (match_operand:DI 2 "register_operand" "r")))]
3411 "and %1,%2,%0\;and %R1,%R2,%R0"
3412 [(set_attr "type" "binary")
3413 (set_attr "length" "8")])
3415 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3416 ; constant with ldil;ldo.
3417 (define_insn "andsi3"
3418 [(set (match_operand:SI 0 "register_operand" "=r,r")
3419 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3420 (match_operand:SI 2 "and_operand" "rO,P")))]
3422 "* return output_and (operands); "
3423 [(set_attr "type" "binary,shift")
3424 (set_attr "length" "4,4")])
3427 [(set (match_operand:DI 0 "register_operand" "=r")
3428 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3429 (match_operand:DI 2 "register_operand" "r")))]
3431 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3432 [(set_attr "type" "binary")
3433 (set_attr "length" "8")])
3436 [(set (match_operand:SI 0 "register_operand" "=r")
3437 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3438 (match_operand:SI 2 "register_operand" "r")))]
3441 [(set_attr "type" "binary")
3442 (set_attr "length" "4")])
3444 (define_expand "iordi3"
3445 [(set (match_operand:DI 0 "register_operand" "")
3446 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3447 (match_operand:DI 2 "arith_double_operand" "")))]
3451 if (! register_operand (operands[1], DImode)
3452 || ! register_operand (operands[2], DImode))
3453 /* Let GCC break this into word-at-a-time operations. */
3458 [(set (match_operand:DI 0 "register_operand" "=r")
3459 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3460 (match_operand:DI 2 "register_operand" "r")))]
3462 "or %1,%2,%0\;or %R1,%R2,%R0"
3463 [(set_attr "type" "binary")
3464 (set_attr "length" "8")])
3466 ;; Need a define_expand because we've run out of CONST_OK... characters.
3467 (define_expand "iorsi3"
3468 [(set (match_operand:SI 0 "register_operand" "")
3469 (ior:SI (match_operand:SI 1 "register_operand" "")
3470 (match_operand:SI 2 "arith32_operand" "")))]
3474 if (! (ior_operand (operands[2], SImode)
3475 || register_operand (operands[2], SImode)))
3476 operands[2] = force_reg (SImode, operands[2]);
3480 [(set (match_operand:SI 0 "register_operand" "=r,r")
3481 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3482 (match_operand:SI 2 "ior_operand" "M,i")))]
3484 "* return output_ior (operands); "
3485 [(set_attr "type" "binary,shift")
3486 (set_attr "length" "4,4")])
3489 [(set (match_operand:SI 0 "register_operand" "=r")
3490 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3491 (match_operand:SI 2 "register_operand" "r")))]
3494 [(set_attr "type" "binary")
3495 (set_attr "length" "4")])
3497 (define_expand "xordi3"
3498 [(set (match_operand:DI 0 "register_operand" "")
3499 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3500 (match_operand:DI 2 "arith_double_operand" "")))]
3504 if (! register_operand (operands[1], DImode)
3505 || ! register_operand (operands[2], DImode))
3506 /* Let GCC break this into word-at-a-time operations. */
3511 [(set (match_operand:DI 0 "register_operand" "=r")
3512 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3513 (match_operand:DI 2 "register_operand" "r")))]
3515 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3516 [(set_attr "type" "binary")
3517 (set_attr "length" "8")])
3519 (define_insn "xorsi3"
3520 [(set (match_operand:SI 0 "register_operand" "=r")
3521 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3522 (match_operand:SI 2 "register_operand" "r")))]
3525 [(set_attr "type" "binary")
3526 (set_attr "length" "4")])
3528 (define_insn "negdi2"
3529 [(set (match_operand:DI 0 "register_operand" "=r")
3530 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3532 "sub %%r0,%R1,%R0\;subb %%r0,%1,%0"
3533 [(set_attr "type" "unary")
3534 (set_attr "length" "8")])
3536 (define_insn "negsi2"
3537 [(set (match_operand:SI 0 "register_operand" "=r")
3538 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3541 [(set_attr "type" "unary")
3542 (set_attr "length" "4")])
3544 (define_expand "one_cmpldi2"
3545 [(set (match_operand:DI 0 "register_operand" "")
3546 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3550 if (! register_operand (operands[1], DImode))
3555 [(set (match_operand:DI 0 "register_operand" "=r")
3556 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3558 "uaddcm %%r0,%1,%0\;uaddcm %%r0,%R1,%R0"
3559 [(set_attr "type" "unary")
3560 (set_attr "length" "8")])
3562 (define_insn "one_cmplsi2"
3563 [(set (match_operand:SI 0 "register_operand" "=r")
3564 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3567 [(set_attr "type" "unary")
3568 (set_attr "length" "4")])
3570 ;; Floating point arithmetic instructions.
3572 (define_insn "adddf3"
3573 [(set (match_operand:DF 0 "register_operand" "=f")
3574 (plus:DF (match_operand:DF 1 "register_operand" "f")
3575 (match_operand:DF 2 "register_operand" "f")))]
3576 "! TARGET_SOFT_FLOAT"
3578 [(set_attr "type" "fpalu")
3579 (set_attr "pa_combine_type" "faddsub")
3580 (set_attr "length" "4")])
3582 (define_insn "addsf3"
3583 [(set (match_operand:SF 0 "register_operand" "=f")
3584 (plus:SF (match_operand:SF 1 "register_operand" "f")
3585 (match_operand:SF 2 "register_operand" "f")))]
3586 "! TARGET_SOFT_FLOAT"
3588 [(set_attr "type" "fpalu")
3589 (set_attr "pa_combine_type" "faddsub")
3590 (set_attr "length" "4")])
3592 (define_insn "subdf3"
3593 [(set (match_operand:DF 0 "register_operand" "=f")
3594 (minus:DF (match_operand:DF 1 "register_operand" "f")
3595 (match_operand:DF 2 "register_operand" "f")))]
3596 "! TARGET_SOFT_FLOAT"
3598 [(set_attr "type" "fpalu")
3599 (set_attr "pa_combine_type" "faddsub")
3600 (set_attr "length" "4")])
3602 (define_insn "subsf3"
3603 [(set (match_operand:SF 0 "register_operand" "=f")
3604 (minus:SF (match_operand:SF 1 "register_operand" "f")
3605 (match_operand:SF 2 "register_operand" "f")))]
3606 "! TARGET_SOFT_FLOAT"
3608 [(set_attr "type" "fpalu")
3609 (set_attr "pa_combine_type" "faddsub")
3610 (set_attr "length" "4")])
3612 (define_insn "muldf3"
3613 [(set (match_operand:DF 0 "register_operand" "=f")
3614 (mult:DF (match_operand:DF 1 "register_operand" "f")
3615 (match_operand:DF 2 "register_operand" "f")))]
3616 "! TARGET_SOFT_FLOAT"
3618 [(set_attr "type" "fpmuldbl")
3619 (set_attr "pa_combine_type" "fmpy")
3620 (set_attr "length" "4")])
3622 (define_insn "mulsf3"
3623 [(set (match_operand:SF 0 "register_operand" "=f")
3624 (mult:SF (match_operand:SF 1 "register_operand" "f")
3625 (match_operand:SF 2 "register_operand" "f")))]
3626 "! TARGET_SOFT_FLOAT"
3628 [(set_attr "type" "fpmulsgl")
3629 (set_attr "pa_combine_type" "fmpy")
3630 (set_attr "length" "4")])
3632 (define_insn "divdf3"
3633 [(set (match_operand:DF 0 "register_operand" "=f")
3634 (div:DF (match_operand:DF 1 "register_operand" "f")
3635 (match_operand:DF 2 "register_operand" "f")))]
3636 "! TARGET_SOFT_FLOAT"
3638 [(set_attr "type" "fpdivdbl")
3639 (set_attr "length" "4")])
3641 (define_insn "divsf3"
3642 [(set (match_operand:SF 0 "register_operand" "=f")
3643 (div:SF (match_operand:SF 1 "register_operand" "f")
3644 (match_operand:SF 2 "register_operand" "f")))]
3645 "! TARGET_SOFT_FLOAT"
3647 [(set_attr "type" "fpdivsgl")
3648 (set_attr "length" "4")])
3650 (define_insn "negdf2"
3651 [(set (match_operand:DF 0 "register_operand" "=f")
3652 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3653 "! TARGET_SOFT_FLOAT"
3657 return \"fneg,dbl %1,%0\";
3659 return \"fsub,dbl %%fr0,%1,%0\";
3661 [(set_attr "type" "fpalu")
3662 (set_attr "length" "4")])
3664 (define_insn "negsf2"
3665 [(set (match_operand:SF 0 "register_operand" "=f")
3666 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3667 "! TARGET_SOFT_FLOAT"
3671 return \"fneg,sgl %1,%0\";
3673 return \"fsub,sgl %%fr0,%1,%0\";
3675 [(set_attr "type" "fpalu")
3676 (set_attr "length" "4")])
3678 (define_insn "absdf2"
3679 [(set (match_operand:DF 0 "register_operand" "=f")
3680 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3681 "! TARGET_SOFT_FLOAT"
3683 [(set_attr "type" "fpalu")
3684 (set_attr "length" "4")])
3686 (define_insn "abssf2"
3687 [(set (match_operand:SF 0 "register_operand" "=f")
3688 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3689 "! TARGET_SOFT_FLOAT"
3691 [(set_attr "type" "fpalu")
3692 (set_attr "length" "4")])
3694 (define_insn "sqrtdf2"
3695 [(set (match_operand:DF 0 "register_operand" "=f")
3696 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3697 "! TARGET_SOFT_FLOAT"
3699 [(set_attr "type" "fpsqrtdbl")
3700 (set_attr "length" "4")])
3702 (define_insn "sqrtsf2"
3703 [(set (match_operand:SF 0 "register_operand" "=f")
3704 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3705 "! TARGET_SOFT_FLOAT"
3707 [(set_attr "type" "fpsqrtsgl")
3708 (set_attr "length" "4")])
3710 ;; PA 2.0 floating point instructions
3714 [(set (match_operand:DF 0 "register_operand" "=f")
3715 (plus:DF (mult:DF (match_operand:DF 1 "register_operand" "f")
3716 (match_operand:DF 2 "register_operand" "f"))
3717 (match_operand:DF 3 "register_operand" "f")))]
3718 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3719 "fmpyfadd,dbl %1,%2,%3,%0"
3720 [(set_attr "type" "fpmuldbl")
3721 (set_attr "length" "4")])
3724 [(set (match_operand:DF 0 "register_operand" "=f")
3725 (plus:DF (match_operand:DF 1 "register_operand" "f")
3726 (mult:DF (match_operand:DF 2 "register_operand" "f")
3727 (match_operand:DF 3 "register_operand" "f"))))]
3728 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3729 "fmpyfadd,dbl %2,%3,%1,%0"
3730 [(set_attr "type" "fpmuldbl")
3731 (set_attr "length" "4")])
3734 [(set (match_operand:SF 0 "register_operand" "=f")
3735 (plus:SF (mult:SF (match_operand:SF 1 "register_operand" "f")
3736 (match_operand:SF 2 "register_operand" "f"))
3737 (match_operand:SF 3 "register_operand" "f")))]
3738 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3739 "fmpyfadd,sgl %1,%2,%3,%0"
3740 [(set_attr "type" "fpmulsgl")
3741 (set_attr "length" "4")])
3744 [(set (match_operand:SF 0 "register_operand" "=f")
3745 (plus:SF (match_operand:SF 1 "register_operand" "f")
3746 (mult:SF (match_operand:SF 2 "register_operand" "f")
3747 (match_operand:SF 3 "register_operand" "f"))))]
3748 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3749 "fmpyfadd,sgl %2,%3,%1,%0"
3750 [(set_attr "type" "fpmulsgl")
3751 (set_attr "length" "4")])
3753 ; fmpynfadd patterns
3755 [(set (match_operand:DF 0 "register_operand" "=f")
3756 (minus:DF (match_operand:DF 1 "register_operand" "f")
3757 (mult:DF (match_operand:DF 2 "register_operand" "f")
3758 (match_operand:DF 3 "register_operand" "f"))))]
3759 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3760 "fmpynfadd,dbl %2,%3,%1,%0"
3761 [(set_attr "type" "fpmuldbl")
3762 (set_attr "length" "4")])
3765 [(set (match_operand:SF 0 "register_operand" "=f")
3766 (minus:SF (match_operand:SF 1 "register_operand" "f")
3767 (mult:SF (match_operand:SF 2 "register_operand" "f")
3768 (match_operand:SF 3 "register_operand" "f"))))]
3769 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3770 "fmpynfadd,sgl %2,%3,%1,%0"
3771 [(set_attr "type" "fpmulsgl")
3772 (set_attr "length" "4")])
3776 [(set (match_operand:DF 0 "register_operand" "=f")
3777 (neg:DF (abs:DF (match_operand:DF 1 "register_operand" "f"))))]
3778 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3780 [(set_attr "type" "fpalu")
3781 (set_attr "length" "4")])
3784 [(set (match_operand:SF 0 "register_operand" "=f")
3785 (neg:SF (abs:SF (match_operand:SF 1 "register_operand" "f"))))]
3786 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3788 [(set_attr "type" "fpalu")
3789 (set_attr "length" "4")])
3792 ;;- Shift instructions
3794 ;; Optimized special case of shifting.
3797 [(set (match_operand:SI 0 "register_operand" "=r")
3798 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3802 [(set_attr "type" "load")
3803 (set_attr "length" "4")])
3806 [(set (match_operand:SI 0 "register_operand" "=r")
3807 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3811 [(set_attr "type" "load")
3812 (set_attr "length" "4")])
3815 [(set (match_operand:SI 0 "register_operand" "=r")
3816 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3817 (match_operand:SI 3 "shadd_operand" ""))
3818 (match_operand:SI 1 "register_operand" "r")))]
3820 "sh%O3addl %2,%1,%0"
3821 [(set_attr "type" "binary")
3822 (set_attr "length" "4")])
3824 ;; This variant of the above insn can occur if the first operand
3825 ;; is the frame pointer. This is a kludge, but there doesn't
3826 ;; seem to be a way around it. Only recognize it while reloading.
3827 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3828 ;; has constraints allowing a register. I don't know how this works,
3829 ;; but it somehow makes sure that out-of-range constants are placed
3830 ;; in a register which somehow magically is a "const_int_operand".
3831 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3834 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3835 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3836 (match_operand:SI 4 "shadd_operand" ""))
3837 (match_operand:SI 1 "register_operand" "r,r"))
3838 (match_operand:SI 3 "const_int_operand" "r,J")))]
3839 "reload_in_progress"
3841 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3842 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3843 [(set_attr "type" "multi")
3844 (set_attr "length" "8")])
3846 ;; This anonymous pattern and splitter wins because it reduces the latency
3847 ;; of the shadd sequence without increasing the latency of the shift.
3849 ;; We want to make sure and split up the operations for the scheduler since
3850 ;; these instructions can (and should) schedule independently.
3852 ;; It would be clearer if combine used the same operator for both expressions,
3853 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3856 ;; If this pattern is not split before register allocation, then we must expose
3857 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3859 [(set (match_operand:SI 0 "register_operand" "=r")
3860 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3861 (match_operand:SI 3 "shadd_operand" ""))
3862 (match_operand:SI 1 "register_operand" "r")))
3863 (set (match_operand:SI 4 "register_operand" "=&r")
3864 (ashift:SI (match_dup 2)
3865 (match_operand:SI 5 "const_int_operand" "i")))]
3866 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3868 [(set_attr "type" "binary")
3869 (set_attr "length" "8")])
3872 [(set (match_operand:SI 0 "register_operand" "=r")
3873 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3874 (match_operand:SI 3 "shadd_operand" ""))
3875 (match_operand:SI 1 "register_operand" "r")))
3876 (set (match_operand:SI 4 "register_operand" "=&r")
3877 (ashift:SI (match_dup 2)
3878 (match_operand:SI 5 "const_int_operand" "i")))]
3879 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3880 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3881 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3885 (define_expand "ashlsi3"
3886 [(set (match_operand:SI 0 "register_operand" "")
3887 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3888 (match_operand:SI 2 "arith32_operand" "")))]
3892 if (GET_CODE (operands[2]) != CONST_INT)
3894 rtx temp = gen_reg_rtx (SImode);
3895 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3896 if (GET_CODE (operands[1]) == CONST_INT)
3897 emit_insn (gen_zvdep_imm32 (operands[0], operands[1], temp));
3899 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3902 /* Make sure both inputs are not constants,
3903 there are no patterns for that. */
3904 operands[1] = force_reg (SImode, operands[1]);
3908 [(set (match_operand:SI 0 "register_operand" "=r")
3909 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3910 (match_operand:SI 2 "const_int_operand" "n")))]
3912 "zdep %1,%P2,%L2,%0"
3913 [(set_attr "type" "shift")
3914 (set_attr "length" "4")])
3916 ; Match cases of op1 a CONST_INT here that zvdep_imm32 doesn't handle.
3917 ; Doing it like this makes slightly better code since reload can
3918 ; replace a register with a known value in range -16..15 with a
3919 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm32,
3920 ; but since we have no more CONST_OK... characters, that is not
3922 (define_insn "zvdep32"
3923 [(set (match_operand:SI 0 "register_operand" "=r,r")
3924 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3925 (minus:SI (const_int 31)
3926 (match_operand:SI 2 "register_operand" "q,q"))))]
3931 [(set_attr "type" "shift,shift")
3932 (set_attr "length" "4,4")])
3934 (define_insn "zvdep_imm32"
3935 [(set (match_operand:SI 0 "register_operand" "=r")
3936 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3937 (minus:SI (const_int 31)
3938 (match_operand:SI 2 "register_operand" "q"))))]
3942 int x = INTVAL (operands[1]);
3943 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3944 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3945 return \"zvdepi %1,%2,%0\";
3947 [(set_attr "type" "shift")
3948 (set_attr "length" "4")])
3950 (define_insn "vdepi_ior"
3951 [(set (match_operand:SI 0 "register_operand" "=r")
3952 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3953 (minus:SI (const_int 31)
3954 (match_operand:SI 2 "register_operand" "q")))
3955 (match_operand:SI 3 "register_operand" "0")))]
3956 ; accept ...0001...1, can this be generalized?
3957 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3960 int x = INTVAL (operands[1]);
3961 operands[2] = GEN_INT (exact_log2 (x + 1));
3962 return \"vdepi -1,%2,%0\";
3964 [(set_attr "type" "shift")
3965 (set_attr "length" "4")])
3967 (define_insn "vdepi_and"
3968 [(set (match_operand:SI 0 "register_operand" "=r")
3969 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3970 (minus:SI (const_int 31)
3971 (match_operand:SI 2 "register_operand" "q")))
3972 (match_operand:SI 3 "register_operand" "0")))]
3973 ; this can be generalized...!
3974 "INTVAL (operands[1]) == -2"
3977 int x = INTVAL (operands[1]);
3978 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3979 return \"vdepi 0,%2,%0\";
3981 [(set_attr "type" "shift")
3982 (set_attr "length" "4")])
3984 (define_expand "ashrsi3"
3985 [(set (match_operand:SI 0 "register_operand" "")
3986 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3987 (match_operand:SI 2 "arith32_operand" "")))]
3991 if (GET_CODE (operands[2]) != CONST_INT)
3993 rtx temp = gen_reg_rtx (SImode);
3994 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3995 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
4001 [(set (match_operand:SI 0 "register_operand" "=r")
4002 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4003 (match_operand:SI 2 "const_int_operand" "n")))]
4005 "extrs %1,%P2,%L2,%0"
4006 [(set_attr "type" "shift")
4007 (set_attr "length" "4")])
4009 (define_insn "vextrs32"
4010 [(set (match_operand:SI 0 "register_operand" "=r")
4011 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4012 (minus:SI (const_int 31)
4013 (match_operand:SI 2 "register_operand" "q"))))]
4016 [(set_attr "type" "shift")
4017 (set_attr "length" "4")])
4019 (define_insn "lshrsi3"
4020 [(set (match_operand:SI 0 "register_operand" "=r,r")
4021 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
4022 (match_operand:SI 2 "arith32_operand" "q,n")))]
4026 extru %1,%P2,%L2,%0"
4027 [(set_attr "type" "shift")
4028 (set_attr "length" "4")])
4030 (define_insn "rotrsi3"
4031 [(set (match_operand:SI 0 "register_operand" "=r,r")
4032 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
4033 (match_operand:SI 2 "arith32_operand" "q,n")))]
4037 if (GET_CODE (operands[2]) == CONST_INT)
4039 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
4040 return \"shd %1,%1,%2,%0\";
4043 return \"vshd %1,%1,%0\";
4045 [(set_attr "type" "shift")
4046 (set_attr "length" "4")])
4048 (define_expand "rotlsi3"
4049 [(set (match_operand:SI 0 "register_operand" "")
4050 (rotate:SI (match_operand:SI 1 "register_operand" "")
4051 (match_operand:SI 2 "arith32_operand" "")))]
4055 if (GET_CODE (operands[2]) != CONST_INT)
4057 rtx temp = gen_reg_rtx (SImode);
4058 emit_insn (gen_subsi3 (temp, GEN_INT (32), operands[2]));
4059 emit_insn (gen_rotrsi3 (operands[0], operands[1], temp));
4062 /* Else expand normally. */
4066 [(set (match_operand:SI 0 "register_operand" "=r")
4067 (rotate:SI (match_operand:SI 1 "register_operand" "r")
4068 (match_operand:SI 2 "const_int_operand" "n")))]
4072 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) & 31);
4073 return \"shd %1,%1,%2,%0\";
4075 [(set_attr "type" "shift")
4076 (set_attr "length" "4")])
4079 [(set (match_operand:SI 0 "register_operand" "=r")
4080 (match_operator:SI 5 "plus_xor_ior_operator"
4081 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
4082 (match_operand:SI 3 "const_int_operand" "n"))
4083 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4084 (match_operand:SI 4 "const_int_operand" "n"))]))]
4085 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4087 [(set_attr "type" "shift")
4088 (set_attr "length" "4")])
4091 [(set (match_operand:SI 0 "register_operand" "=r")
4092 (match_operator:SI 5 "plus_xor_ior_operator"
4093 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4094 (match_operand:SI 4 "const_int_operand" "n"))
4095 (ashift:SI (match_operand:SI 1 "register_operand" "r")
4096 (match_operand:SI 3 "const_int_operand" "n"))]))]
4097 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4099 [(set_attr "type" "shift")
4100 (set_attr "length" "4")])
4103 [(set (match_operand:SI 0 "register_operand" "=r")
4104 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
4105 (match_operand:SI 2 "const_int_operand" ""))
4106 (match_operand:SI 3 "const_int_operand" "")))]
4107 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
4110 int cnt = INTVAL (operands[2]) & 31;
4111 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
4112 operands[2] = GEN_INT (31 - cnt);
4113 return \"zdep %1,%2,%3,%0\";
4115 [(set_attr "type" "shift")
4116 (set_attr "length" "4")])
4118 ;; Unconditional and other jump instructions.
4120 (define_insn "return"
4122 "hppa_can_use_return_insn_p ()"
4124 [(set_attr "type" "branch")
4125 (set_attr "length" "4")])
4127 ;; Use a different pattern for functions which have non-trivial
4128 ;; epilogues so as not to confuse jump and reorg.
4129 (define_insn "return_internal"
4134 [(set_attr "type" "branch")
4135 (set_attr "length" "4")])
4137 (define_expand "prologue"
4140 "hppa_expand_prologue ();DONE;")
4142 (define_expand "epilogue"
4147 /* Try to use the trivial return first. Else use the full
4149 if (hppa_can_use_return_insn_p ())
4150 emit_jump_insn (gen_return ());
4153 hppa_expand_epilogue ();
4154 emit_jump_insn (gen_return_internal ());
4159 ;; Special because we use the value placed in %r2 by the bl instruction
4160 ;; from within its delay slot to set the value for the 2nd parameter to
4162 (define_insn "call_profiler"
4163 [(unspec_volatile [(const_int 0)] 0)
4164 (use (match_operand:SI 0 "const_int_operand" ""))]
4166 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
4167 [(set_attr "type" "multi")
4168 (set_attr "length" "8")])
4170 (define_insn "blockage"
4171 [(unspec_volatile [(const_int 2)] 0)]
4174 [(set_attr "length" "0")])
4177 [(set (pc) (label_ref (match_operand 0 "" "")))]
4181 extern int optimize;
4183 if (GET_MODE (insn) == SImode)
4186 /* An unconditional branch which can reach its target. */
4187 if (get_attr_length (insn) != 24
4188 && get_attr_length (insn) != 16)
4191 /* An unconditional branch which can not reach its target.
4193 We need to be able to use %r1 as a scratch register; however,
4194 we can never be sure whether or not it's got a live value in
4195 it. Therefore, we must restore its original value after the
4198 To make matters worse, we don't have a stack slot which we
4199 can always clobber. sp-12/sp-16 shouldn't ever have a live
4200 value during a non-optimizing compilation, so we use those
4201 slots for now. We don't support very long branches when
4202 optimizing -- they should be quite rare when optimizing.
4204 Really the way to go long term is a register scavenger; goto
4205 the target of the jump and find a register which we can use
4206 as a scratch to hold the value in %r1. */
4208 /* We don't know how to register scavenge yet. */
4212 /* First store %r1 into the stack. */
4213 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4215 /* Now load the target address into %r1 and do an indirect jump
4216 to the value specified in %r1. Be careful to generate PIC
4221 xoperands[0] = operands[0];
4222 xoperands[1] = gen_label_rtx ();
4224 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4225 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4226 CODE_LABEL_NUMBER (xoperands[1]));
4227 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv %%r0(%%r1)\",
4231 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4233 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4234 so we know nothing else can be in the delay slot. */
4235 return \"ldw -16(%%r30),%%r1\";
4237 [(set_attr "type" "uncond_branch")
4238 (set_attr "pa_combine_type" "uncond_branch")
4239 (set (attr "length")
4240 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4241 (if_then_else (lt (abs (minus (match_dup 0)
4242 (plus (pc) (const_int 8))))
4246 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4248 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4253 ;; Subroutines of "casesi".
4254 ;; operand 0 is index
4255 ;; operand 1 is the minimum bound
4256 ;; operand 2 is the maximum bound - minimum bound + 1
4257 ;; operand 3 is CODE_LABEL for the table;
4258 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4260 (define_expand "casesi"
4261 [(match_operand:SI 0 "general_operand" "")
4262 (match_operand:SI 1 "const_int_operand" "")
4263 (match_operand:SI 2 "const_int_operand" "")
4264 (match_operand 3 "" "")
4265 (match_operand 4 "" "")]
4269 if (GET_CODE (operands[0]) != REG)
4270 operands[0] = force_reg (SImode, operands[0]);
4272 if (operands[1] != const0_rtx)
4274 rtx reg = gen_reg_rtx (SImode);
4276 operands[1] = GEN_INT (-INTVAL (operands[1]));
4277 if (!INT_14_BITS (operands[1]))
4278 operands[1] = force_reg (SImode, operands[1]);
4279 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4284 if (!INT_5_BITS (operands[2]))
4285 operands[2] = force_reg (SImode, operands[2]);
4287 emit_insn (gen_cmpsi (operands[0], operands[2]));
4288 emit_jump_insn (gen_bgtu (operands[4]));
4289 if (TARGET_BIG_SWITCH)
4291 rtx temp = gen_reg_rtx (SImode);
4292 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4295 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4299 (define_insn "casesi0"
4301 (mem:SI (plus:SI (pc)
4302 (match_operand:SI 0 "register_operand" "r")))
4303 (label_ref (match_operand 1 "" ""))))]
4306 [(set_attr "type" "multi")
4307 (set_attr "length" "8")])
4309 ;; Need nops for the calls because execution is supposed to continue
4310 ;; past; we don't want to nullify an instruction that we need.
4311 ;;- jump to subroutine
4313 (define_expand "call"
4314 [(parallel [(call (match_operand:SI 0 "" "")
4315 (match_operand 1 "" ""))
4316 (clobber (reg:SI 2))])]
4323 if (TARGET_PORTABLE_RUNTIME)
4324 op = force_reg (SImode, XEXP (operands[0], 0));
4326 op = XEXP (operands[0], 0);
4328 /* Use two different patterns for calls to explicitly named functions
4329 and calls through function pointers. This is necessary as these two
4330 types of calls use different calling conventions, and CSE might try
4331 to change the named call into an indirect call in some cases (using
4332 two patterns keeps CSE from performing this optimization). */
4333 if (GET_CODE (op) == SYMBOL_REF)
4334 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4337 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4338 emit_move_insn (tmpreg, force_reg (word_mode, op));
4339 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4344 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4346 /* After each call we must restore the PIC register, even if it
4347 doesn't appear to be used.
4349 This will set regs_ever_live for the callee saved register we
4350 stored the PIC register in. */
4351 emit_move_insn (pic_offset_table_rtx,
4352 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4353 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4355 /* Gross. We have to keep the scheduler from moving the restore
4356 of the PIC register away from the call. SCHED_GROUP_P is
4357 supposed to do this, but for some reason the compiler will
4358 go into an infinite loop when we use that.
4360 This method (blockage insn) may make worse code (then again
4361 it may not since calls are nearly blockages anyway), but at
4362 least it should work. */
4363 emit_insn (gen_blockage ());
4368 (define_insn "call_internal_symref"
4369 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4370 (match_operand 1 "" "i"))
4371 (clobber (reg:SI 2))
4372 (use (const_int 0))]
4373 "! TARGET_PORTABLE_RUNTIME"
4376 output_arg_descriptor (insn);
4377 return output_call (insn, operands[0]);
4379 [(set_attr "type" "call")
4380 (set (attr "length")
4381 ;; If we're sure that we can either reach the target or that the
4382 ;; linker can use a long-branch stub, then the length is 4 bytes.
4384 ;; For long-calls the length will be either 52 bytes (non-pic)
4385 ;; or 68 bytes (pic). */
4386 ;; Else we have to use a long-call;
4387 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4390 (if_then_else (eq (symbol_ref "flag_pic")
4395 (define_insn "call_internal_reg"
4396 [(call (mem:SI (reg:SI 22))
4397 (match_operand 0 "" "i"))
4398 (clobber (reg:SI 2))
4399 (use (const_int 1))]
4405 /* First the special case for kernels, level 0 systems, etc. */
4406 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4407 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4409 /* Now the normal case -- we can reach $$dyncall directly or
4410 we're sure that we can get there via a long-branch stub.
4412 No need to check target flags as the length uniquely identifies
4413 the remaining cases. */
4414 if (get_attr_length (insn) == 8)
4415 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4417 /* Long millicode call, but we are not generating PIC or portable runtime
4419 if (get_attr_length (insn) == 12)
4420 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4422 /* Long millicode call for portable runtime. */
4423 if (get_attr_length (insn) == 20)
4424 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4426 /* If we're generating PIC code. */
4427 xoperands[0] = operands[0];
4428 xoperands[1] = gen_label_rtx ();
4429 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4430 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4431 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4432 CODE_LABEL_NUMBER (xoperands[1]));
4433 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4434 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4435 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4438 [(set_attr "type" "dyncall")
4439 (set (attr "length")
4441 ;; First NO_SPACE_REGS
4442 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4446 ;; Target (or stub) within reach
4447 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4449 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4453 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4454 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4456 (eq (symbol_ref "flag_pic")
4460 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4464 ;; Out of range PIC case
4467 (define_expand "call_value"
4468 [(parallel [(set (match_operand 0 "" "")
4469 (call (match_operand:SI 1 "" "")
4470 (match_operand 2 "" "")))
4471 (clobber (reg:SI 2))])]
4478 if (TARGET_PORTABLE_RUNTIME)
4479 op = force_reg (word_mode, XEXP (operands[1], 0));
4481 op = XEXP (operands[1], 0);
4483 /* Use two different patterns for calls to explicitly named functions
4484 and calls through function pointers. This is necessary as these two
4485 types of calls use different calling conventions, and CSE might try
4486 to change the named call into an indirect call in some cases (using
4487 two patterns keeps CSE from performing this optimization). */
4488 if (GET_CODE (op) == SYMBOL_REF)
4489 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4494 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4495 emit_move_insn (tmpreg, force_reg (word_mode, op));
4496 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4501 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4503 /* After each call we must restore the PIC register, even if it
4504 doesn't appear to be used.
4506 This will set regs_ever_live for the callee saved register we
4507 stored the PIC register in. */
4508 emit_move_insn (pic_offset_table_rtx,
4509 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4510 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4512 /* Gross. We have to keep the scheduler from moving the restore
4513 of the PIC register away from the call. SCHED_GROUP_P is
4514 supposed to do this, but for some reason the compiler will
4515 go into an infinite loop when we use that.
4517 This method (blockage insn) may make worse code (then again
4518 it may not since calls are nearly blockages anyway), but at
4519 least it should work. */
4520 emit_insn (gen_blockage ());
4525 (define_insn "call_value_internal_symref"
4526 [(set (match_operand 0 "" "=rf")
4527 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4528 (match_operand 2 "" "i")))
4529 (clobber (reg:SI 2))
4530 (use (const_int 0))]
4531 ;;- Don't use operand 1 for most machines.
4532 "! TARGET_PORTABLE_RUNTIME"
4535 output_arg_descriptor (insn);
4536 return output_call (insn, operands[1]);
4538 [(set_attr "type" "call")
4539 (set (attr "length")
4540 ;; If we're sure that we can either reach the target or that the
4541 ;; linker can use a long-branch stub, then the length is 4 bytes.
4543 ;; For long-calls the length will be either 52 bytes (non-pic)
4544 ;; or 68 bytes (pic). */
4545 ;; Else we have to use a long-call;
4546 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4549 (if_then_else (eq (symbol_ref "flag_pic")
4554 (define_insn "call_value_internal_reg"
4555 [(set (match_operand 0 "" "=rf")
4556 (call (mem:SI (reg:SI 22))
4557 (match_operand 1 "" "i")))
4558 (clobber (reg:SI 2))
4559 (use (const_int 1))]
4565 /* First the special case for kernels, level 0 systems, etc. */
4566 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4567 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4569 /* Now the normal case -- we can reach $$dyncall directly or
4570 we're sure that we can get there via a long-branch stub.
4572 No need to check target flags as the length uniquely identifies
4573 the remaining cases. */
4574 if (get_attr_length (insn) == 8)
4575 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4577 /* Long millicode call, but we are not generating PIC or portable runtime
4579 if (get_attr_length (insn) == 12)
4580 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4582 /* Long millicode call for portable runtime. */
4583 if (get_attr_length (insn) == 20)
4584 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4586 /* If we're generating PIC code. */
4587 xoperands[0] = operands[1];
4588 xoperands[1] = gen_label_rtx ();
4589 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4590 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4591 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4592 CODE_LABEL_NUMBER (xoperands[1]));
4593 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4594 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4595 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4598 [(set_attr "type" "dyncall")
4599 (set (attr "length")
4601 ;; First NO_SPACE_REGS
4602 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4606 ;; Target (or stub) within reach
4607 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4609 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4613 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4614 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4616 (eq (symbol_ref "flag_pic")
4620 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4624 ;; Out of range PIC case
4627 ;; Call subroutine returning any type.
4629 (define_expand "untyped_call"
4630 [(parallel [(call (match_operand 0 "" "")
4632 (match_operand 1 "" "")
4633 (match_operand 2 "" "")])]
4639 emit_call_insn (gen_call (operands[0], const0_rtx));
4641 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4643 rtx set = XVECEXP (operands[2], 0, i);
4644 emit_move_insn (SET_DEST (set), SET_SRC (set));
4647 /* The optimizer does not know that the call sets the function value
4648 registers we stored in the result block. We avoid problems by
4649 claiming that all hard registers are used and clobbered at this
4651 emit_insn (gen_blockage ());
4659 [(set_attr "type" "move")
4660 (set_attr "length" "4")])
4662 ;; These are just placeholders so we know where branch tables
4664 (define_insn "begin_brtab"
4669 /* Only GAS actually supports this pseudo-op. */
4671 return \".begin_brtab\";
4675 [(set_attr "type" "move")
4676 (set_attr "length" "0")])
4678 (define_insn "end_brtab"
4683 /* Only GAS actually supports this pseudo-op. */
4685 return \".end_brtab\";
4689 [(set_attr "type" "move")
4690 (set_attr "length" "0")])
4692 ;;; Hope this is only within a function...
4693 (define_insn "indirect_jump"
4694 [(set (pc) (match_operand 0 "register_operand" "r"))]
4695 "GET_MODE (operands[0]) == word_mode"
4697 [(set_attr "type" "branch")
4698 (set_attr "length" "4")])
4700 ;;; EH does longjmp's from and within the data section. Thus,
4701 ;;; an interspace branch is required for the longjmp implementation.
4702 ;;; Registers r1 and r2 are not saved in the jmpbuf environment.
4703 ;;; Thus, they can be used as scratch registers for the jump.
4704 (define_insn "interspace_jump"
4705 [(set (pc) (match_operand:SI 0 "register_operand" "a"))
4706 (clobber (reg:SI 2))]
4708 "ldsid (%%sr0,%0),%%r2\; mtsp %%r2,%%sr0\; be%* 0(%%sr0,%0)"
4709 [(set_attr "type" "branch")
4710 (set_attr "length" "12")])
4712 (define_expand "builtin_longjmp"
4713 [(unspec_volatile [(match_operand 0 "register_operand" "r")] 3)]
4717 /* The elements of the buffer are, in order: */
4718 rtx fp = gen_rtx_MEM (Pmode, operands[0]);
4719 rtx lab = gen_rtx_MEM (Pmode, plus_constant (operands[0], 4));
4720 rtx stack = gen_rtx_MEM (Pmode, plus_constant (operands[0], 8));
4721 rtx pv = gen_rtx_REG (Pmode, 1);
4723 /* This bit is the same as expand_builtin_longjmp. */
4724 emit_move_insn (hard_frame_pointer_rtx, fp);
4725 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
4726 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
4727 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
4729 /* Load the label we are jumping through into r1 so that we know
4730 where to look for it when we get back to setjmp's function for
4731 restoring the gp. */
4732 emit_move_insn (pv, lab);
4733 emit_jump_insn (gen_interspace_jump (pv));
4738 (define_insn "extzv"
4739 [(set (match_operand:SI 0 "register_operand" "=r")
4740 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4741 (match_operand:SI 2 "uint5_operand" "")
4742 (match_operand:SI 3 "uint5_operand" "")))]
4744 "extru %1,%3+%2-1,%2,%0"
4745 [(set_attr "type" "shift")
4746 (set_attr "length" "4")])
4749 [(set (match_operand:SI 0 "register_operand" "=r")
4750 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4752 (match_operand:SI 3 "register_operand" "q")))]
4755 [(set_attr "type" "shift")
4756 (set_attr "length" "4")])
4759 [(set (match_operand:SI 0 "register_operand" "=r")
4760 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4761 (match_operand:SI 2 "uint5_operand" "")
4762 (match_operand:SI 3 "uint5_operand" "")))]
4764 "extrs %1,%3+%2-1,%2,%0"
4765 [(set_attr "type" "shift")
4766 (set_attr "length" "4")])
4769 [(set (match_operand:SI 0 "register_operand" "=r")
4770 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4772 (match_operand:SI 3 "register_operand" "q")))]
4775 [(set_attr "type" "shift")
4776 (set_attr "length" "4")])
4779 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4780 (match_operand:SI 1 "uint5_operand" "")
4781 (match_operand:SI 2 "uint5_operand" ""))
4782 (match_operand:SI 3 "arith5_operand" "r,L"))]
4785 dep %3,%2+%1-1,%1,%0
4786 depi %3,%2+%1-1,%1,%0"
4787 [(set_attr "type" "shift,shift")
4788 (set_attr "length" "4,4")])
4790 ;; Optimize insertion of const_int values of type 1...1xxxx.
4792 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4793 (match_operand:SI 1 "uint5_operand" "")
4794 (match_operand:SI 2 "uint5_operand" ""))
4795 (match_operand:SI 3 "const_int_operand" ""))]
4796 "(INTVAL (operands[3]) & 0x10) != 0 &&
4797 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4800 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4801 return \"depi %3,%2+%1-1,%1,%0\";
4803 [(set_attr "type" "shift")
4804 (set_attr "length" "4")])
4806 ;; This insn is used for some loop tests, typically loops reversed when
4807 ;; strength reduction is used. It is actually created when the instruction
4808 ;; combination phase combines the special loop test. Since this insn
4809 ;; is both a jump insn and has an output, it must deal with its own
4810 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4811 ;; to not choose the register alternatives in the event a reload is needed.
4812 (define_insn "decrement_and_branch_until_zero"
4815 (match_operator 2 "comparison_operator"
4816 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4817 (match_operand:SI 1 "int5_operand" "L,L,L"))
4819 (label_ref (match_operand 3 "" ""))
4822 (plus:SI (match_dup 0) (match_dup 1)))
4823 (clobber (match_scratch:SI 4 "=X,r,r"))]
4825 "* return output_dbra (operands, insn, which_alternative); "
4826 ;; Do not expect to understand this the first time through.
4827 [(set_attr "type" "cbranch,multi,multi")
4828 (set (attr "length")
4829 (if_then_else (eq_attr "alternative" "0")
4830 ;; Loop counter in register case
4831 ;; Short branch has length of 4
4832 ;; Long branch has length of 8
4833 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4838 ;; Loop counter in FP reg case.
4839 ;; Extra goo to deal with additional reload insns.
4840 (if_then_else (eq_attr "alternative" "1")
4841 (if_then_else (lt (match_dup 3) (pc))
4843 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4848 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4852 ;; Loop counter in memory case.
4853 ;; Extra goo to deal with additional reload insns.
4854 (if_then_else (lt (match_dup 3) (pc))
4856 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4861 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4864 (const_int 16))))))])
4869 (match_operator 2 "movb_comparison_operator"
4870 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4871 (label_ref (match_operand 3 "" ""))
4873 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4876 "* return output_movb (operands, insn, which_alternative, 0); "
4877 ;; Do not expect to understand this the first time through.
4878 [(set_attr "type" "cbranch,multi,multi,multi")
4879 (set (attr "length")
4880 (if_then_else (eq_attr "alternative" "0")
4881 ;; Loop counter in register case
4882 ;; Short branch has length of 4
4883 ;; Long branch has length of 8
4884 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4889 ;; Loop counter in FP reg case.
4890 ;; Extra goo to deal with additional reload insns.
4891 (if_then_else (eq_attr "alternative" "1")
4892 (if_then_else (lt (match_dup 3) (pc))
4894 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4899 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4903 ;; Loop counter in memory or sar case.
4904 ;; Extra goo to deal with additional reload insns.
4906 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4909 (const_int 12)))))])
4911 ;; Handle negated branch.
4915 (match_operator 2 "movb_comparison_operator"
4916 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4918 (label_ref (match_operand 3 "" ""))))
4919 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4922 "* return output_movb (operands, insn, which_alternative, 1); "
4923 ;; Do not expect to understand this the first time through.
4924 [(set_attr "type" "cbranch,multi,multi,multi")
4925 (set (attr "length")
4926 (if_then_else (eq_attr "alternative" "0")
4927 ;; Loop counter in register case
4928 ;; Short branch has length of 4
4929 ;; Long branch has length of 8
4930 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4935 ;; Loop counter in FP reg case.
4936 ;; Extra goo to deal with additional reload insns.
4937 (if_then_else (eq_attr "alternative" "1")
4938 (if_then_else (lt (match_dup 3) (pc))
4940 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4945 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4949 ;; Loop counter in memory or SAR case.
4950 ;; Extra goo to deal with additional reload insns.
4952 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4955 (const_int 12)))))])
4957 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4958 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4960 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4963 [(set (pc) (label_ref (match_operand 3 "" "" )))
4964 (set (match_operand:SI 0 "register_operand" "=r")
4965 (plus:SI (match_operand:SI 1 "register_operand" "r")
4966 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4967 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4970 return output_parallel_addb (operands, get_attr_length (insn));
4972 [(set_attr "type" "parallel_branch")
4973 (set (attr "length")
4974 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4980 [(set (pc) (label_ref (match_operand 2 "" "" )))
4981 (set (match_operand:SF 0 "register_operand" "=r")
4982 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4986 return output_parallel_movb (operands, get_attr_length (insn));
4988 [(set_attr "type" "parallel_branch")
4989 (set (attr "length")
4990 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4996 [(set (pc) (label_ref (match_operand 2 "" "" )))
4997 (set (match_operand:SI 0 "register_operand" "=r")
4998 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
5002 return output_parallel_movb (operands, get_attr_length (insn));
5004 [(set_attr "type" "parallel_branch")
5005 (set (attr "length")
5006 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5012 [(set (pc) (label_ref (match_operand 2 "" "" )))
5013 (set (match_operand:HI 0 "register_operand" "=r")
5014 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
5018 return output_parallel_movb (operands, get_attr_length (insn));
5020 [(set_attr "type" "parallel_branch")
5021 (set (attr "length")
5022 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5028 [(set (pc) (label_ref (match_operand 2 "" "" )))
5029 (set (match_operand:QI 0 "register_operand" "=r")
5030 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
5034 return output_parallel_movb (operands, get_attr_length (insn));
5036 [(set_attr "type" "parallel_branch")
5037 (set (attr "length")
5038 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5044 [(set (match_operand 0 "register_operand" "=f")
5045 (mult (match_operand 1 "register_operand" "f")
5046 (match_operand 2 "register_operand" "f")))
5047 (set (match_operand 3 "register_operand" "+f")
5048 (plus (match_operand 4 "register_operand" "f")
5049 (match_operand 5 "register_operand" "f")))]
5050 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5051 && reload_completed && fmpyaddoperands (operands)"
5054 if (GET_MODE (operands[0]) == DFmode)
5056 if (rtx_equal_p (operands[3], operands[5]))
5057 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5059 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5063 if (rtx_equal_p (operands[3], operands[5]))
5064 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5066 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5069 [(set_attr "type" "fpalu")
5070 (set_attr "length" "4")])
5073 [(set (match_operand 3 "register_operand" "+f")
5074 (plus (match_operand 4 "register_operand" "f")
5075 (match_operand 5 "register_operand" "f")))
5076 (set (match_operand 0 "register_operand" "=f")
5077 (mult (match_operand 1 "register_operand" "f")
5078 (match_operand 2 "register_operand" "f")))]
5079 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5080 && reload_completed && fmpyaddoperands (operands)"
5083 if (GET_MODE (operands[0]) == DFmode)
5085 if (rtx_equal_p (operands[3], operands[5]))
5086 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5088 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5092 if (rtx_equal_p (operands[3], operands[5]))
5093 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5095 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5098 [(set_attr "type" "fpalu")
5099 (set_attr "length" "4")])
5102 [(set (match_operand 0 "register_operand" "=f")
5103 (mult (match_operand 1 "register_operand" "f")
5104 (match_operand 2 "register_operand" "f")))
5105 (set (match_operand 3 "register_operand" "+f")
5106 (minus (match_operand 4 "register_operand" "f")
5107 (match_operand 5 "register_operand" "f")))]
5108 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5109 && reload_completed && fmpysuboperands (operands)"
5112 if (GET_MODE (operands[0]) == DFmode)
5113 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5115 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5117 [(set_attr "type" "fpalu")
5118 (set_attr "length" "4")])
5121 [(set (match_operand 3 "register_operand" "+f")
5122 (minus (match_operand 4 "register_operand" "f")
5123 (match_operand 5 "register_operand" "f")))
5124 (set (match_operand 0 "register_operand" "=f")
5125 (mult (match_operand 1 "register_operand" "f")
5126 (match_operand 2 "register_operand" "f")))]
5127 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5128 && reload_completed && fmpysuboperands (operands)"
5131 if (GET_MODE (operands[0]) == DFmode)
5132 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5134 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5136 [(set_attr "type" "fpalu")
5137 (set_attr "length" "4")])
5139 ;; Clean up turds left by reload.
5141 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
5142 (match_operand 1 "register_operand" "fr"))
5143 (set (match_operand 2 "register_operand" "fr")
5145 "! TARGET_SOFT_FLOAT
5146 && GET_CODE (operands[0]) == MEM
5147 && ! MEM_VOLATILE_P (operands[0])
5148 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5149 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5150 && GET_MODE (operands[0]) == DFmode
5151 && GET_CODE (operands[1]) == REG
5152 && GET_CODE (operands[2]) == REG
5153 && ! side_effects_p (XEXP (operands[0], 0))
5154 && REGNO_REG_CLASS (REGNO (operands[1]))
5155 == REGNO_REG_CLASS (REGNO (operands[2]))"
5160 if (FP_REG_P (operands[1]))
5161 output_asm_insn (output_fp_move_double (operands), operands);
5163 output_asm_insn (output_move_double (operands), operands);
5165 if (rtx_equal_p (operands[1], operands[2]))
5168 xoperands[0] = operands[2];
5169 xoperands[1] = operands[1];
5171 if (FP_REG_P (xoperands[1]))
5172 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5174 output_asm_insn (output_move_double (xoperands), xoperands);
5180 [(set (match_operand 0 "register_operand" "fr")
5181 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
5182 (set (match_operand 2 "register_operand" "fr")
5184 "! TARGET_SOFT_FLOAT
5185 && GET_CODE (operands[1]) == MEM
5186 && ! MEM_VOLATILE_P (operands[1])
5187 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5188 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5189 && GET_MODE (operands[0]) == DFmode
5190 && GET_CODE (operands[0]) == REG
5191 && GET_CODE (operands[2]) == REG
5192 && ! side_effects_p (XEXP (operands[1], 0))
5193 && REGNO_REG_CLASS (REGNO (operands[0]))
5194 == REGNO_REG_CLASS (REGNO (operands[2]))"
5199 if (FP_REG_P (operands[0]))
5200 output_asm_insn (output_fp_move_double (operands), operands);
5202 output_asm_insn (output_move_double (operands), operands);
5204 xoperands[0] = operands[2];
5205 xoperands[1] = operands[0];
5207 if (FP_REG_P (xoperands[1]))
5208 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5210 output_asm_insn (output_move_double (xoperands), xoperands);
5215 ;; Flush the I and D cache line found at the address in operand 0.
5216 ;; This is used by the trampoline code for nested functions.
5217 ;; So long as the trampoline itself is less than 32 bytes this
5220 (define_insn "dcacheflush"
5221 [(unspec_volatile [(const_int 1)] 0)
5222 (use (mem:SI (match_operand 0 "register_operand" "r")))
5223 (use (mem:SI (match_operand 1 "register_operand" "r")))]
5225 "fdc 0(%0)\;fdc 0(%1)\;sync"
5226 [(set_attr "type" "multi")
5227 (set_attr "length" "12")])
5229 (define_insn "icacheflush"
5230 [(unspec_volatile [(const_int 2)] 0)
5231 (use (mem:SI (match_operand 0 "register_operand" "r")))
5232 (use (mem:SI (match_operand 1 "register_operand" "r")))
5233 (use (match_operand 2 "register_operand" "r"))
5234 (clobber (match_operand 3 "register_operand" "=&r"))
5235 (clobber (match_operand 4 "register_operand" "=&r"))]
5237 "mfsp %%sr0,%4\;ldsid (%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5238 [(set_attr "type" "multi")
5239 (set_attr "length" "52")])
5241 ;; An out-of-line prologue.
5242 (define_insn "outline_prologue_call"
5243 [(unspec_volatile [(const_int 0)] 0)
5244 (clobber (reg:SI 31))
5245 (clobber (reg:SI 22))
5246 (clobber (reg:SI 21))
5247 (clobber (reg:SI 20))
5248 (clobber (reg:SI 19))
5249 (clobber (reg:SI 1))]
5253 extern int frame_pointer_needed;
5255 /* We need two different versions depending on whether or not we
5256 need a frame pointer. Also note that we return to the instruction
5257 immediately after the branch rather than two instructions after the
5258 break as normally is the case. */
5259 if (frame_pointer_needed)
5261 /* Must import the magic millicode routine(s). */
5262 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5264 if (TARGET_PORTABLE_RUNTIME)
5266 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5267 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5271 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5275 /* Must import the magic millicode routine(s). */
5276 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5278 if (TARGET_PORTABLE_RUNTIME)
5280 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5281 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5284 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5288 [(set_attr "type" "multi")
5289 (set_attr "length" "8")])
5291 ;; An out-of-line epilogue.
5292 (define_insn "outline_epilogue_call"
5293 [(unspec_volatile [(const_int 1)] 0)
5296 (clobber (reg:SI 31))
5297 (clobber (reg:SI 22))
5298 (clobber (reg:SI 21))
5299 (clobber (reg:SI 20))
5300 (clobber (reg:SI 19))
5301 (clobber (reg:SI 2))
5302 (clobber (reg:SI 1))]
5306 extern int frame_pointer_needed;
5308 /* We need two different versions depending on whether or not we
5309 need a frame pointer. Also note that we return to the instruction
5310 immediately after the branch rather than two instructions after the
5311 break as normally is the case. */
5312 if (frame_pointer_needed)
5314 /* Must import the magic millicode routine. */
5315 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5317 /* The out-of-line prologue will make sure we return to the right
5319 if (TARGET_PORTABLE_RUNTIME)
5321 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5322 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5326 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5330 /* Must import the magic millicode routine. */
5331 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5333 /* The out-of-line prologue will make sure we return to the right
5335 if (TARGET_PORTABLE_RUNTIME)
5337 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5338 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5341 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5345 [(set_attr "type" "multi")
5346 (set_attr "length" "8")])
5348 ;; Given a function pointer, canonicalize it so it can be
5349 ;; reliably compared to another function pointer. */
5350 (define_expand "canonicalize_funcptr_for_compare"
5351 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5352 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5353 (clobber (match_dup 2))
5354 (clobber (reg:SI 26))
5355 (clobber (reg:SI 22))
5356 (clobber (reg:SI 31))])
5357 (set (match_operand:SI 0 "register_operand" "")
5359 "! TARGET_PORTABLE_RUNTIME"
5362 operands[2] = gen_reg_rtx (SImode);
5363 if (GET_CODE (operands[1]) != REG)
5365 rtx tmp = gen_reg_rtx (Pmode);
5366 emit_move_insn (tmp, operands[1]);
5372 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5373 (clobber (match_operand:SI 0 "register_operand" "=a"))
5374 (clobber (reg:SI 26))
5375 (clobber (reg:SI 22))
5376 (clobber (reg:SI 31))]
5380 /* Must import the magic millicode routine. */
5381 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5383 /* This is absolutely amazing.
5385 First, copy our input parameter into %r29 just in case we don't
5386 need to call $$sh_func_adrs. */
5387 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5389 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5390 we use %r26 unchanged. */
5391 if (get_attr_length (insn) == 32)
5392 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5393 else if (get_attr_length (insn) == 40)
5394 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5395 else if (get_attr_length (insn) == 44)
5396 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5398 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5400 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5401 4096, then we use %r26 unchanged. */
5402 if (get_attr_length (insn) == 32)
5403 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5404 else if (get_attr_length (insn) == 40)
5405 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5406 else if (get_attr_length (insn) == 44)
5407 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5409 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5411 /* Else call $$sh_func_adrs to extract the function's real add24. */
5412 return output_millicode_call (insn,
5413 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5415 [(set_attr "type" "multi")
5416 (set (attr "length")
5418 ;; Target (or stub) within reach
5419 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5421 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5426 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5430 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5431 ;; same as NO_SPACE_REGS code
5432 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5434 (eq (symbol_ref "flag_pic")
5439 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5443 ;; Out of range and PIC
5446 ;; On the PA, the PIC register is call clobbered, so it must
5447 ;; be saved & restored around calls by the caller. If the call
5448 ;; doesn't return normally (nonlocal goto, or an exception is
5449 ;; thrown), then the code at the exception handler label must
5450 ;; restore the PIC register.
5451 (define_expand "exception_receiver"
5453 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5456 /* Load the PIC register from the stack slot (in our caller's
5458 emit_move_insn (pic_offset_table_rtx,
5459 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5460 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5461 emit_insn (gen_blockage ());