]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/pa/pa.md
f28d191564382b539b8726dd38523a2e8ab78015
[thirdparty/gcc.git] / gcc / config / pa / pa.md
1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
4 ;; of Utah.
5
6 ;; This file is part of GNU CC.
7
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
11 ;; any later version.
12
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
17
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
22
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
24 ;; mips.md.
25
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
27
28 ;; Insn type. Used to default other attribute values.
29
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
32
33 (define_attr "type"
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
36
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
40
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
43 ;;
44 ;; FIXME: Add 800 scheduling for completeness?
45
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
47
48 ;; Length (in # of bytes).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
53
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
57
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
61
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
65
66 (const_int 4)))
67
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
71
72 ;; Attributes for instruction and branch scheduling
73
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
78 (const_string "true")
79 (const_string "false")))
80
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
86 (const_string "true")
87 (const_string "false")))
88
89 ;; For calls and millicode calls. Allow unconditional branches in the
90 ;; delay slot.
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
94 (const_string "true")
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
97 (const_int 0))
98 (const_string "true")
99 (const_string "false"))]
100 (const_string "false")))
101
102
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
106
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
112 (nil) (nil)])
113
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
117
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
122 (nil)])
123
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
135
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
138 (const_int 0)))
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
140
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
145
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
148
149 ;; The integer ALU.
150 ;; (Noted only for documentation; units that take one cycle do not need to
151 ;; be specified.)
152
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
156 ;; 1 0)
157
158
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
162
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
169
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
171 ;; Timings:
172 ;; Instruction Time Unit Minimum Distance (unit contention)
173 ;; fcpy 3 ALU 2
174 ;; fabs 3 ALU 2
175 ;; fadd 3 ALU 2
176 ;; fsub 3 ALU 2
177 ;; fcmp 3 ALU 2
178 ;; fcnv 3 ALU 2
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
182 ;; fmpy 3 MPY 2
183 ;; fmpyi 3 MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
188
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
210
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
214 ;;
215
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
226
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
228 ;; Timings:
229 ;; Instruction Time Unit Minimum Distance (unit contention)
230 ;; fcpy 2 ALU 1
231 ;; fabs 2 ALU 1
232 ;; fadd 2 ALU 1
233 ;; fsub 2 ALU 1
234 ;; fcmp 2 ALU 1
235 ;; fcnv 2 ALU 1
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
239 ;; fmpy 2 MPY 1
240 ;; fmpyi 2 MPY 1
241 ;; fdiv,sgl 8 DIV 8
242 ;; fdiv,dbl 15 DIV 15
243 ;; fsqrt,sgl 8 DIV 8
244 ;; fsqrt,dbl 15 DIV 15
245
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
258
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
263 (and
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
266
267 (define_function_unit "pa7100nonflop" 1 1
268 (and
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
271
272
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
275
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
280 ;; Timings:
281 ;; Instruction Time Unit Minimum Distance (unit contention)
282 ;; fcpy 2 ALU 1
283 ;; fabs 2 ALU 1
284 ;; fadd 2 ALU 1
285 ;; fsub 2 ALU 1
286 ;; fcmp 2 ALU 1
287 ;; fcnv 2 ALU 1
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
294 ;; fmpy,sgl 2 MPY 1
295 ;; fmpy,dbl 3 MPY 2
296 ;; fmpyi 3 MPY 2
297 ;; fdiv,sgl 8 DIV 8
298 ;; fdiv,dbl 15 DIV 15
299 ;; fsqrt,sgl 8 DIV 8
300 ;; fsqrt,dbl 15 DIV 15
301
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
317
318 ;; Define the various functional units for dual-issue.
319
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
322 (and
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
325
326 ;; Shifts and memory ops execute in only one of the integer ALUs
327 (define_function_unit "pa7100LCshiftmem" 1 1
328 (and
329 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
330 (eq_attr "cpu" "7100LC,7200")) 1 1)
331
332 ;; We have two basic ALUs.
333 (define_function_unit "pa7100LCalu" 2 1
334 (and
335 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
336 (eq_attr "cpu" "7100LC,7200")) 1 1)
337
338 ;; I don't have complete information on the PA7200; however, most of
339 ;; what I've heard makes it look like a 7100LC without the store-store
340 ;; penalty. So that's how we'll model it.
341
342 ;; Memory. Disregarding Cache misses, memory loads and stores take
343 ;; two cycles. Any special cases are handled in pa_adjust_cost.
344 (define_function_unit "pa7200memory" 1 0
345 (and (eq_attr "type" "load,fpload,store,fpstore")
346 (eq_attr "cpu" "7200")) 2 0)
347
348 ;; I don't have detailed information on the PA7200 FP pipeline, so I
349 ;; treat it just like the 7100LC pipeline.
350 ;; Similarly for the multi-issue fake units.
351
352 ;;
353 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
354 ;; traditional architecture.
355 ;;
356 ;; The PA8000 has a large (56) entry reorder buffer that is split between
357 ;; memory and non-memory operations.
358 ;;
359 ;; The PA800 can issue two memory and two non-memory operations per cycle to
360 ;; the function units. Similarly, the PA8000 can retire two memory and two
361 ;; non-memory operations per cycle.
362 ;;
363 ;; Given the large reorder buffer, the processor can hide most latencies.
364 ;; According to HP, they've got the best results by scheduling for retirement
365 ;; bandwidth with limited latency scheduling for floating point operations.
366 ;; Latency for integer operations and memory references is ignored.
367 ;;
368 ;; We claim floating point operations have a 2 cycle latency and are
369 ;; fully pipelined, except for div and sqrt which are not pipelined.
370 ;;
371 ;; It is not necessary to define the shifter and integer alu units.
372 ;;
373 ;; These first two define_unit_unit descriptions model retirement from
374 ;; the reorder buffer.
375 (define_function_unit "pa8000lsu" 2 1
376 (and
377 (eq_attr "type" "load,fpload,store,fpstore")
378 (eq_attr "cpu" "8000")) 1 1)
379
380 (define_function_unit "pa8000alu" 2 1
381 (and
382 (eq_attr "type" "!load,fpload,store,fpstore")
383 (eq_attr "cpu" "8000")) 1 1)
384
385 ;; Claim floating point ops have a 2 cycle latency, excluding div and
386 ;; sqrt, which are not pipelined and issue to different units.
387 (define_function_unit "pa8000fmac" 2 0
388 (and
389 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
390 (eq_attr "cpu" "8000")) 2 1)
391
392 (define_function_unit "pa8000fdiv" 2 1
393 (and
394 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
395 (eq_attr "cpu" "8000")) 17 17)
396
397 (define_function_unit "pa8000fdiv" 2 1
398 (and
399 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
400 (eq_attr "cpu" "8000")) 31 31)
401
402 \f
403 ;; Compare instructions.
404 ;; This controls RTL generation and register allocation.
405
406 ;; We generate RTL for comparisons and branches by having the cmpxx
407 ;; patterns store away the operands. Then, the scc and bcc patterns
408 ;; emit RTL for both the compare and the branch.
409 ;;
410
411 (define_expand "cmpsi"
412 [(set (reg:CC 0)
413 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
414 (match_operand:SI 1 "arith5_operand" "")))]
415 ""
416 "
417 {
418 hppa_compare_op0 = operands[0];
419 hppa_compare_op1 = operands[1];
420 hppa_branch_type = CMP_SI;
421 DONE;
422 }")
423
424 (define_expand "cmpsf"
425 [(set (reg:CCFP 0)
426 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
427 (match_operand:SF 1 "reg_or_0_operand" "")))]
428 "! TARGET_SOFT_FLOAT"
429 "
430 {
431 hppa_compare_op0 = operands[0];
432 hppa_compare_op1 = operands[1];
433 hppa_branch_type = CMP_SF;
434 DONE;
435 }")
436
437 (define_expand "cmpdf"
438 [(set (reg:CCFP 0)
439 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
440 (match_operand:DF 1 "reg_or_0_operand" "")))]
441 "! TARGET_SOFT_FLOAT"
442 "
443 {
444 hppa_compare_op0 = operands[0];
445 hppa_compare_op1 = operands[1];
446 hppa_branch_type = CMP_DF;
447 DONE;
448 }")
449
450 (define_insn ""
451 [(set (reg:CCFP 0)
452 (match_operator:CCFP 2 "comparison_operator"
453 [(match_operand:SF 0 "reg_or_0_operand" "fG")
454 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
455 "! TARGET_SOFT_FLOAT"
456 "fcmp,sgl,%Y2 %f0,%f1"
457 [(set_attr "length" "4")
458 (set_attr "type" "fpcc")])
459
460 (define_insn ""
461 [(set (reg:CCFP 0)
462 (match_operator:CCFP 2 "comparison_operator"
463 [(match_operand:DF 0 "reg_or_0_operand" "fG")
464 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
465 "! TARGET_SOFT_FLOAT"
466 "fcmp,dbl,%Y2 %f0,%f1"
467 [(set_attr "length" "4")
468 (set_attr "type" "fpcc")])
469
470 ;; scc insns.
471
472 (define_expand "seq"
473 [(set (match_operand:SI 0 "register_operand" "")
474 (eq:SI (match_dup 1)
475 (match_dup 2)))]
476 ""
477 "
478 {
479 /* fp scc patterns rarely match, and are not a win on the PA. */
480 if (hppa_branch_type != CMP_SI)
481 FAIL;
482 /* set up operands from compare. */
483 operands[1] = hppa_compare_op0;
484 operands[2] = hppa_compare_op1;
485 /* fall through and generate default code */
486 }")
487
488 (define_expand "sne"
489 [(set (match_operand:SI 0 "register_operand" "")
490 (ne:SI (match_dup 1)
491 (match_dup 2)))]
492 ""
493 "
494 {
495 /* fp scc patterns rarely match, and are not a win on the PA. */
496 if (hppa_branch_type != CMP_SI)
497 FAIL;
498 operands[1] = hppa_compare_op0;
499 operands[2] = hppa_compare_op1;
500 }")
501
502 (define_expand "slt"
503 [(set (match_operand:SI 0 "register_operand" "")
504 (lt:SI (match_dup 1)
505 (match_dup 2)))]
506 ""
507 "
508 {
509 /* fp scc patterns rarely match, and are not a win on the PA. */
510 if (hppa_branch_type != CMP_SI)
511 FAIL;
512 operands[1] = hppa_compare_op0;
513 operands[2] = hppa_compare_op1;
514 }")
515
516 (define_expand "sgt"
517 [(set (match_operand:SI 0 "register_operand" "")
518 (gt:SI (match_dup 1)
519 (match_dup 2)))]
520 ""
521 "
522 {
523 /* fp scc patterns rarely match, and are not a win on the PA. */
524 if (hppa_branch_type != CMP_SI)
525 FAIL;
526 operands[1] = hppa_compare_op0;
527 operands[2] = hppa_compare_op1;
528 }")
529
530 (define_expand "sle"
531 [(set (match_operand:SI 0 "register_operand" "")
532 (le:SI (match_dup 1)
533 (match_dup 2)))]
534 ""
535 "
536 {
537 /* fp scc patterns rarely match, and are not a win on the PA. */
538 if (hppa_branch_type != CMP_SI)
539 FAIL;
540 operands[1] = hppa_compare_op0;
541 operands[2] = hppa_compare_op1;
542 }")
543
544 (define_expand "sge"
545 [(set (match_operand:SI 0 "register_operand" "")
546 (ge:SI (match_dup 1)
547 (match_dup 2)))]
548 ""
549 "
550 {
551 /* fp scc patterns rarely match, and are not a win on the PA. */
552 if (hppa_branch_type != CMP_SI)
553 FAIL;
554 operands[1] = hppa_compare_op0;
555 operands[2] = hppa_compare_op1;
556 }")
557
558 (define_expand "sltu"
559 [(set (match_operand:SI 0 "register_operand" "")
560 (ltu:SI (match_dup 1)
561 (match_dup 2)))]
562 ""
563 "
564 {
565 if (hppa_branch_type != CMP_SI)
566 FAIL;
567 operands[1] = hppa_compare_op0;
568 operands[2] = hppa_compare_op1;
569 }")
570
571 (define_expand "sgtu"
572 [(set (match_operand:SI 0 "register_operand" "")
573 (gtu:SI (match_dup 1)
574 (match_dup 2)))]
575 ""
576 "
577 {
578 if (hppa_branch_type != CMP_SI)
579 FAIL;
580 operands[1] = hppa_compare_op0;
581 operands[2] = hppa_compare_op1;
582 }")
583
584 (define_expand "sleu"
585 [(set (match_operand:SI 0 "register_operand" "")
586 (leu:SI (match_dup 1)
587 (match_dup 2)))]
588 ""
589 "
590 {
591 if (hppa_branch_type != CMP_SI)
592 FAIL;
593 operands[1] = hppa_compare_op0;
594 operands[2] = hppa_compare_op1;
595 }")
596
597 (define_expand "sgeu"
598 [(set (match_operand:SI 0 "register_operand" "")
599 (geu:SI (match_dup 1)
600 (match_dup 2)))]
601 ""
602 "
603 {
604 if (hppa_branch_type != CMP_SI)
605 FAIL;
606 operands[1] = hppa_compare_op0;
607 operands[2] = hppa_compare_op1;
608 }")
609
610 ;; Instruction canonicalization puts immediate operands second, which
611 ;; is the reverse of what we want.
612
613 (define_insn "scc"
614 [(set (match_operand:SI 0 "register_operand" "=r")
615 (match_operator:SI 3 "comparison_operator"
616 [(match_operand:SI 1 "register_operand" "r")
617 (match_operand:SI 2 "arith11_operand" "rI")]))]
618 ""
619 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
620 [(set_attr "type" "binary")
621 (set_attr "length" "8")])
622
623 (define_insn "iorscc"
624 [(set (match_operand:SI 0 "register_operand" "=r")
625 (ior:SI (match_operator:SI 3 "comparison_operator"
626 [(match_operand:SI 1 "register_operand" "r")
627 (match_operand:SI 2 "arith11_operand" "rI")])
628 (match_operator:SI 6 "comparison_operator"
629 [(match_operand:SI 4 "register_operand" "r")
630 (match_operand:SI 5 "arith11_operand" "rI")])))]
631 ""
632 "com%I2clr,%S3 %2,%1,%%r0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
633 [(set_attr "type" "binary")
634 (set_attr "length" "12")])
635
636 ;; Combiner patterns for common operations performed with the output
637 ;; from an scc insn (negscc and incscc).
638 (define_insn "negscc"
639 [(set (match_operand:SI 0 "register_operand" "=r")
640 (neg:SI (match_operator:SI 3 "comparison_operator"
641 [(match_operand:SI 1 "register_operand" "r")
642 (match_operand:SI 2 "arith11_operand" "rI")])))]
643 ""
644 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
645 [(set_attr "type" "binary")
646 (set_attr "length" "8")])
647
648 ;; Patterns for adding/subtracting the result of a boolean expression from
649 ;; a register. First we have special patterns that make use of the carry
650 ;; bit, and output only two instructions. For the cases we can't in
651 ;; general do in two instructions, the incscc pattern at the end outputs
652 ;; two or three instructions.
653
654 (define_insn ""
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))
658 (match_operand:SI 1 "register_operand" "r")))]
659 ""
660 "sub%I3 %3,%2,%%r0\;addc %%r0,%1,%0"
661 [(set_attr "type" "binary")
662 (set_attr "length" "8")])
663
664 ; This need only accept registers for op3, since canonicalization
665 ; replaces geu with gtu when op3 is an integer.
666 (define_insn ""
667 [(set (match_operand:SI 0 "register_operand" "=r")
668 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
669 (match_operand:SI 3 "register_operand" "r"))
670 (match_operand:SI 1 "register_operand" "r")))]
671 ""
672 "sub %2,%3,%%r0\;addc %%r0,%1,%0"
673 [(set_attr "type" "binary")
674 (set_attr "length" "8")])
675
676 ; Match only integers for op3 here. This is used as canonical form of the
677 ; geu pattern when op3 is an integer. Don't match registers since we can't
678 ; make better code than the general incscc pattern.
679 (define_insn ""
680 [(set (match_operand:SI 0 "register_operand" "=r")
681 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
682 (match_operand:SI 3 "int11_operand" "I"))
683 (match_operand:SI 1 "register_operand" "r")))]
684 ""
685 "addi %k3,%2,%%r0\;addc %%r0,%1,%0"
686 [(set_attr "type" "binary")
687 (set_attr "length" "8")])
688
689 (define_insn "incscc"
690 [(set (match_operand:SI 0 "register_operand" "=r,r")
691 (plus:SI (match_operator:SI 4 "comparison_operator"
692 [(match_operand:SI 2 "register_operand" "r,r")
693 (match_operand:SI 3 "arith11_operand" "rI,rI")])
694 (match_operand:SI 1 "register_operand" "0,?r")))]
695 ""
696 "@
697 com%I3clr,%B4 %3,%2,%%r0\;addi 1,%0,%0
698 com%I3clr,%B4 %3,%2,%%r0\;addi,tr 1,%1,%0\;copy %1,%0"
699 [(set_attr "type" "binary,binary")
700 (set_attr "length" "8,12")])
701
702 (define_insn ""
703 [(set (match_operand:SI 0 "register_operand" "=r")
704 (minus:SI (match_operand:SI 1 "register_operand" "r")
705 (gtu:SI (match_operand:SI 2 "register_operand" "r")
706 (match_operand:SI 3 "arith11_operand" "rI"))))]
707 ""
708 "sub%I3 %3,%2,%%r0\;subb %1,0,%0"
709 [(set_attr "type" "binary")
710 (set_attr "length" "8")])
711
712 (define_insn ""
713 [(set (match_operand:SI 0 "register_operand" "=r")
714 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
715 (gtu:SI (match_operand:SI 2 "register_operand" "r")
716 (match_operand:SI 3 "arith11_operand" "rI")))
717 (match_operand:SI 4 "register_operand" "r")))]
718 ""
719 "sub%I3 %3,%2,%%r0\;subb %1,%4,%0"
720 [(set_attr "type" "binary")
721 (set_attr "length" "8")])
722
723 ; This need only accept registers for op3, since canonicalization
724 ; replaces ltu with leu when op3 is an integer.
725 (define_insn ""
726 [(set (match_operand:SI 0 "register_operand" "=r")
727 (minus:SI (match_operand:SI 1 "register_operand" "r")
728 (ltu:SI (match_operand:SI 2 "register_operand" "r")
729 (match_operand:SI 3 "register_operand" "r"))))]
730 ""
731 "sub %2,%3,%%r0\;subb %1,0,%0"
732 [(set_attr "type" "binary")
733 (set_attr "length" "8")])
734
735 (define_insn ""
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
738 (ltu:SI (match_operand:SI 2 "register_operand" "r")
739 (match_operand:SI 3 "register_operand" "r")))
740 (match_operand:SI 4 "register_operand" "r")))]
741 ""
742 "sub %2,%3,%%r0\;subb %1,%4,%0"
743 [(set_attr "type" "binary")
744 (set_attr "length" "8")])
745
746 ; Match only integers for op3 here. This is used as canonical form of the
747 ; ltu pattern when op3 is an integer. Don't match registers since we can't
748 ; make better code than the general incscc pattern.
749 (define_insn ""
750 [(set (match_operand:SI 0 "register_operand" "=r")
751 (minus:SI (match_operand:SI 1 "register_operand" "r")
752 (leu:SI (match_operand:SI 2 "register_operand" "r")
753 (match_operand:SI 3 "int11_operand" "I"))))]
754 ""
755 "addi %k3,%2,%%r0\;subb %1,0,%0"
756 [(set_attr "type" "binary")
757 (set_attr "length" "8")])
758
759 (define_insn ""
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
762 (leu:SI (match_operand:SI 2 "register_operand" "r")
763 (match_operand:SI 3 "int11_operand" "I")))
764 (match_operand:SI 4 "register_operand" "r")))]
765 ""
766 "addi %k3,%2,%%r0\;subb %1,%4,%0"
767 [(set_attr "type" "binary")
768 (set_attr "length" "8")])
769
770 (define_insn "decscc"
771 [(set (match_operand:SI 0 "register_operand" "=r,r")
772 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
773 (match_operator:SI 4 "comparison_operator"
774 [(match_operand:SI 2 "register_operand" "r,r")
775 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
776 ""
777 "@
778 com%I3clr,%B4 %3,%2,%%r0\;addi -1,%0,%0
779 com%I3clr,%B4 %3,%2,%%r0\;addi,tr -1,%1,%0\;copy %1,%0"
780 [(set_attr "type" "binary,binary")
781 (set_attr "length" "8,12")])
782
783 ; Patterns for max and min. (There is no need for an earlyclobber in the
784 ; last alternative since the middle alternative will match if op0 == op1.)
785
786 (define_insn "sminsi3"
787 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
788 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
789 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
790 ""
791 "@
792 comclr,> %2,%0,%%r0\;copy %2,%0
793 comiclr,> %2,%0,%%r0\;ldi %2,%0
794 comclr,> %1,%r2,%0\;copy %1,%0"
795 [(set_attr "type" "multi,multi,multi")
796 (set_attr "length" "8,8,8")])
797
798 (define_insn "uminsi3"
799 [(set (match_operand:SI 0 "register_operand" "=r,r")
800 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
801 (match_operand:SI 2 "arith11_operand" "r,I")))]
802 ""
803 "@
804 comclr,>> %2,%0,%%r0\;copy %2,%0
805 comiclr,>> %2,%0,%%r0\;ldi %2,%0"
806 [(set_attr "type" "multi,multi")
807 (set_attr "length" "8,8")])
808
809 (define_insn "smaxsi3"
810 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
811 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
812 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
813 ""
814 "@
815 comclr,< %2,%0,%%r0\;copy %2,%0
816 comiclr,< %2,%0,%%r0\;ldi %2,%0
817 comclr,< %1,%r2,%0\;copy %1,%0"
818 [(set_attr "type" "multi,multi,multi")
819 (set_attr "length" "8,8,8")])
820
821 (define_insn "umaxsi3"
822 [(set (match_operand:SI 0 "register_operand" "=r,r")
823 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
824 (match_operand:SI 2 "arith11_operand" "r,I")))]
825 ""
826 "@
827 comclr,<< %2,%0,%%r0\;copy %2,%0
828 comiclr,<< %2,%0,%%r0\;ldi %2,%0"
829 [(set_attr "type" "multi,multi")
830 (set_attr "length" "8,8")])
831
832 (define_insn "abssi2"
833 [(set (match_operand:SI 0 "register_operand" "=r")
834 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
835 ""
836 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
837 [(set_attr "type" "multi")
838 (set_attr "length" "8")])
839
840 ;;; Experimental conditional move patterns
841
842 (define_expand "movsicc"
843 [(set (match_operand:SI 0 "register_operand" "")
844 (if_then_else:SI
845 (match_operator 1 "comparison_operator"
846 [(match_dup 4)
847 (match_dup 5)])
848 (match_operand:SI 2 "reg_or_cint_move_operand" "")
849 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
850 ""
851 "
852 {
853 enum rtx_code code = GET_CODE (operands[1]);
854
855 if (hppa_branch_type != CMP_SI)
856 FAIL;
857
858 /* operands[1] is currently the result of compare_from_rtx. We want to
859 emit a compare of the original operands. */
860 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
861 operands[4] = hppa_compare_op0;
862 operands[5] = hppa_compare_op1;
863 }")
864
865 ; We need the first constraint alternative in order to avoid
866 ; earlyclobbers on all other alternatives.
867 (define_insn ""
868 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
869 (if_then_else:SI
870 (match_operator 5 "comparison_operator"
871 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
872 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
873 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
874 (const_int 0)))]
875 ""
876 "@
877 com%I4clr,%S5 %4,%3,%%r0\;ldi 0,%0
878 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
879 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
881 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
882 [(set_attr "type" "multi,multi,multi,multi,nullshift")
883 (set_attr "length" "8,8,8,8,8")])
884
885 (define_insn ""
886 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
887 (if_then_else:SI
888 (match_operator 5 "comparison_operator"
889 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
890 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
891 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
892 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
893 ""
894 "@
895 com%I4clr,%S5 %4,%3,%%r0\;copy %2,%0
896 com%I4clr,%S5 %4,%3,%%r0\;ldi %2,%0
897 com%I4clr,%S5 %4,%3,%%r0\;ldil L'%2,%0
898 com%I4clr,%S5 %4,%3,%%r0\;zdepi %Z2,%0
899 com%I4clr,%B5 %4,%3,%%r0\;copy %1,%0
900 com%I4clr,%B5 %4,%3,%%r0\;ldi %1,%0
901 com%I4clr,%B5 %4,%3,%%r0\;ldil L'%1,%0
902 com%I4clr,%B5 %4,%3,%%r0\;zdepi %Z1,%0"
903 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
904 (set_attr "length" "8,8,8,8,8,8,8,8")])
905
906 ;; Conditional Branches
907
908 (define_expand "beq"
909 [(set (pc)
910 (if_then_else (eq (match_dup 1) (match_dup 2))
911 (label_ref (match_operand 0 "" ""))
912 (pc)))]
913 ""
914 "
915 {
916 if (hppa_branch_type != CMP_SI)
917 {
918 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
919 emit_bcond_fp (NE, operands[0]);
920 DONE;
921 }
922 /* set up operands from compare. */
923 operands[1] = hppa_compare_op0;
924 operands[2] = hppa_compare_op1;
925 /* fall through and generate default code */
926 }")
927
928 (define_expand "bne"
929 [(set (pc)
930 (if_then_else (ne (match_dup 1) (match_dup 2))
931 (label_ref (match_operand 0 "" ""))
932 (pc)))]
933 ""
934 "
935 {
936 if (hppa_branch_type != CMP_SI)
937 {
938 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
939 emit_bcond_fp (NE, operands[0]);
940 DONE;
941 }
942 operands[1] = hppa_compare_op0;
943 operands[2] = hppa_compare_op1;
944 }")
945
946 (define_expand "bgt"
947 [(set (pc)
948 (if_then_else (gt (match_dup 1) (match_dup 2))
949 (label_ref (match_operand 0 "" ""))
950 (pc)))]
951 ""
952 "
953 {
954 if (hppa_branch_type != CMP_SI)
955 {
956 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
957 emit_bcond_fp (NE, operands[0]);
958 DONE;
959 }
960 operands[1] = hppa_compare_op0;
961 operands[2] = hppa_compare_op1;
962 }")
963
964 (define_expand "blt"
965 [(set (pc)
966 (if_then_else (lt (match_dup 1) (match_dup 2))
967 (label_ref (match_operand 0 "" ""))
968 (pc)))]
969 ""
970 "
971 {
972 if (hppa_branch_type != CMP_SI)
973 {
974 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
975 emit_bcond_fp (NE, operands[0]);
976 DONE;
977 }
978 operands[1] = hppa_compare_op0;
979 operands[2] = hppa_compare_op1;
980 }")
981
982 (define_expand "bge"
983 [(set (pc)
984 (if_then_else (ge (match_dup 1) (match_dup 2))
985 (label_ref (match_operand 0 "" ""))
986 (pc)))]
987 ""
988 "
989 {
990 if (hppa_branch_type != CMP_SI)
991 {
992 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
993 emit_bcond_fp (NE, operands[0]);
994 DONE;
995 }
996 operands[1] = hppa_compare_op0;
997 operands[2] = hppa_compare_op1;
998 }")
999
1000 (define_expand "ble"
1001 [(set (pc)
1002 (if_then_else (le (match_dup 1) (match_dup 2))
1003 (label_ref (match_operand 0 "" ""))
1004 (pc)))]
1005 ""
1006 "
1007 {
1008 if (hppa_branch_type != CMP_SI)
1009 {
1010 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1011 emit_bcond_fp (NE, operands[0]);
1012 DONE;
1013 }
1014 operands[1] = hppa_compare_op0;
1015 operands[2] = hppa_compare_op1;
1016 }")
1017
1018 (define_expand "bgtu"
1019 [(set (pc)
1020 (if_then_else (gtu (match_dup 1) (match_dup 2))
1021 (label_ref (match_operand 0 "" ""))
1022 (pc)))]
1023 ""
1024 "
1025 {
1026 if (hppa_branch_type != CMP_SI)
1027 FAIL;
1028 operands[1] = hppa_compare_op0;
1029 operands[2] = hppa_compare_op1;
1030 }")
1031
1032 (define_expand "bltu"
1033 [(set (pc)
1034 (if_then_else (ltu (match_dup 1) (match_dup 2))
1035 (label_ref (match_operand 0 "" ""))
1036 (pc)))]
1037 ""
1038 "
1039 {
1040 if (hppa_branch_type != CMP_SI)
1041 FAIL;
1042 operands[1] = hppa_compare_op0;
1043 operands[2] = hppa_compare_op1;
1044 }")
1045
1046 (define_expand "bgeu"
1047 [(set (pc)
1048 (if_then_else (geu (match_dup 1) (match_dup 2))
1049 (label_ref (match_operand 0 "" ""))
1050 (pc)))]
1051 ""
1052 "
1053 {
1054 if (hppa_branch_type != CMP_SI)
1055 FAIL;
1056 operands[1] = hppa_compare_op0;
1057 operands[2] = hppa_compare_op1;
1058 }")
1059
1060 (define_expand "bleu"
1061 [(set (pc)
1062 (if_then_else (leu (match_dup 1) (match_dup 2))
1063 (label_ref (match_operand 0 "" ""))
1064 (pc)))]
1065 ""
1066 "
1067 {
1068 if (hppa_branch_type != CMP_SI)
1069 FAIL;
1070 operands[1] = hppa_compare_op0;
1071 operands[2] = hppa_compare_op1;
1072 }")
1073
1074 ;; Match the branch patterns.
1075
1076
1077 ;; Note a long backward conditional branch with an annulled delay slot
1078 ;; has a length of 12.
1079 (define_insn ""
1080 [(set (pc)
1081 (if_then_else
1082 (match_operator 3 "comparison_operator"
1083 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1084 (match_operand:SI 2 "arith5_operand" "rL")])
1085 (label_ref (match_operand 0 "" ""))
1086 (pc)))]
1087 ""
1088 "*
1089 {
1090 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1091 get_attr_length (insn), 0, insn);
1092 }"
1093 [(set_attr "type" "cbranch")
1094 (set (attr "length")
1095 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1096 (const_int 8184))
1097 (const_int 4)
1098 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1099 (const_int 262100))
1100 (const_int 8)
1101 (eq (symbol_ref "flag_pic") (const_int 0))
1102 (const_int 20)]
1103 (const_int 28)))])
1104
1105 ;; Match the negated branch.
1106
1107 (define_insn ""
1108 [(set (pc)
1109 (if_then_else
1110 (match_operator 3 "comparison_operator"
1111 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1112 (match_operand:SI 2 "arith5_operand" "rL")])
1113 (pc)
1114 (label_ref (match_operand 0 "" ""))))]
1115 ""
1116 "*
1117 {
1118 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1119 get_attr_length (insn), 1, insn);
1120 }"
1121 [(set_attr "type" "cbranch")
1122 (set (attr "length")
1123 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1124 (const_int 8184))
1125 (const_int 4)
1126 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1127 (const_int 262100))
1128 (const_int 8)
1129 (eq (symbol_ref "flag_pic") (const_int 0))
1130 (const_int 20)]
1131 (const_int 28)))])
1132
1133 ;; Branch on Bit patterns.
1134 (define_insn ""
1135 [(set (pc)
1136 (if_then_else
1137 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1138 (const_int 1)
1139 (match_operand:SI 1 "uint5_operand" ""))
1140 (const_int 0))
1141 (label_ref (match_operand 2 "" ""))
1142 (pc)))]
1143 ""
1144 "*
1145 {
1146 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1147 get_attr_length (insn), 0, insn, 0);
1148 }"
1149 [(set_attr "type" "cbranch")
1150 (set (attr "length")
1151 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1152 (const_int 8184))
1153 (const_int 4)
1154 (const_int 8)))])
1155
1156 (define_insn ""
1157 [(set (pc)
1158 (if_then_else
1159 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1160 (const_int 1)
1161 (match_operand:SI 1 "uint5_operand" ""))
1162 (const_int 0))
1163 (pc)
1164 (label_ref (match_operand 2 "" ""))))]
1165 ""
1166 "*
1167 {
1168 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1169 get_attr_length (insn), 1, insn, 0);
1170 }"
1171 [(set_attr "type" "cbranch")
1172 (set (attr "length")
1173 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1174 (const_int 8184))
1175 (const_int 4)
1176 (const_int 8)))])
1177
1178 (define_insn ""
1179 [(set (pc)
1180 (if_then_else
1181 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1182 (const_int 1)
1183 (match_operand:SI 1 "uint5_operand" ""))
1184 (const_int 0))
1185 (label_ref (match_operand 2 "" ""))
1186 (pc)))]
1187 ""
1188 "*
1189 {
1190 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1191 get_attr_length (insn), 0, insn, 1);
1192 }"
1193 [(set_attr "type" "cbranch")
1194 (set (attr "length")
1195 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1196 (const_int 8184))
1197 (const_int 4)
1198 (const_int 8)))])
1199
1200 (define_insn ""
1201 [(set (pc)
1202 (if_then_else
1203 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1204 (const_int 1)
1205 (match_operand:SI 1 "uint5_operand" ""))
1206 (const_int 0))
1207 (pc)
1208 (label_ref (match_operand 2 "" ""))))]
1209 ""
1210 "*
1211 {
1212 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1213 get_attr_length (insn), 1, insn, 1);
1214 }"
1215 [(set_attr "type" "cbranch")
1216 (set (attr "length")
1217 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1218 (const_int 8184))
1219 (const_int 4)
1220 (const_int 8)))])
1221
1222 ;; Branch on Variable Bit patterns.
1223 (define_insn ""
1224 [(set (pc)
1225 (if_then_else
1226 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1227 (const_int 1)
1228 (match_operand:SI 1 "register_operand" "q"))
1229 (const_int 0))
1230 (label_ref (match_operand 2 "" ""))
1231 (pc)))]
1232 ""
1233 "*
1234 {
1235 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1236 get_attr_length (insn), 0, insn, 0);
1237 }"
1238 [(set_attr "type" "cbranch")
1239 (set (attr "length")
1240 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1241 (const_int 8184))
1242 (const_int 4)
1243 (const_int 8)))])
1244
1245 (define_insn ""
1246 [(set (pc)
1247 (if_then_else
1248 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1249 (const_int 1)
1250 (match_operand:SI 1 "register_operand" "q"))
1251 (const_int 0))
1252 (pc)
1253 (label_ref (match_operand 2 "" ""))))]
1254 ""
1255 "*
1256 {
1257 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1258 get_attr_length (insn), 1, insn, 0);
1259 }"
1260 [(set_attr "type" "cbranch")
1261 (set (attr "length")
1262 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1263 (const_int 8184))
1264 (const_int 4)
1265 (const_int 8)))])
1266
1267 (define_insn ""
1268 [(set (pc)
1269 (if_then_else
1270 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1271 (const_int 1)
1272 (match_operand:SI 1 "register_operand" "q"))
1273 (const_int 0))
1274 (label_ref (match_operand 2 "" ""))
1275 (pc)))]
1276 ""
1277 "*
1278 {
1279 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1280 get_attr_length (insn), 0, insn, 1);
1281 }"
1282 [(set_attr "type" "cbranch")
1283 (set (attr "length")
1284 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1285 (const_int 8184))
1286 (const_int 4)
1287 (const_int 8)))])
1288
1289 (define_insn ""
1290 [(set (pc)
1291 (if_then_else
1292 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1293 (const_int 1)
1294 (match_operand:SI 1 "register_operand" "q"))
1295 (const_int 0))
1296 (pc)
1297 (label_ref (match_operand 2 "" ""))))]
1298 ""
1299 "*
1300 {
1301 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1302 get_attr_length (insn), 1, insn, 1);
1303 }"
1304 [(set_attr "type" "cbranch")
1305 (set (attr "length")
1306 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1307 (const_int 8184))
1308 (const_int 4)
1309 (const_int 8)))])
1310
1311 ;; Floating point branches
1312 (define_insn ""
1313 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1314 (label_ref (match_operand 0 "" ""))
1315 (pc)))]
1316 "! TARGET_SOFT_FLOAT"
1317 "*
1318 {
1319 if (INSN_ANNULLED_BRANCH_P (insn))
1320 return \"ftest\;b,n %0\";
1321 else
1322 return \"ftest\;b%* %0\";
1323 }"
1324 [(set_attr "type" "fbranch")
1325 (set_attr "length" "8")])
1326
1327 (define_insn ""
1328 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1329 (pc)
1330 (label_ref (match_operand 0 "" ""))))]
1331 "! TARGET_SOFT_FLOAT"
1332 "*
1333 {
1334 if (INSN_ANNULLED_BRANCH_P (insn))
1335 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b,n %0\";
1336 else
1337 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b%* %0\";
1338 }"
1339 [(set_attr "type" "fbranch")
1340 (set_attr "length" "12")])
1341
1342 ;; Move instructions
1343
1344 (define_expand "movsi"
1345 [(set (match_operand:SI 0 "general_operand" "")
1346 (match_operand:SI 1 "general_operand" ""))]
1347 ""
1348 "
1349 {
1350 if (emit_move_sequence (operands, SImode, 0))
1351 DONE;
1352 }")
1353
1354 ;; Reloading an SImode or DImode value requires a scratch register if
1355 ;; going in to or out of float point registers.
1356
1357 (define_expand "reload_insi"
1358 [(set (match_operand:SI 0 "register_operand" "=Z")
1359 (match_operand:SI 1 "non_hard_reg_operand" ""))
1360 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1361 ""
1362 "
1363 {
1364 if (emit_move_sequence (operands, SImode, operands[2]))
1365 DONE;
1366
1367 /* We don't want the clobber emitted, so handle this ourselves. */
1368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1369 DONE;
1370 }")
1371
1372 (define_expand "reload_outsi"
1373 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1374 (match_operand:SI 1 "register_operand" "Z"))
1375 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1376 ""
1377 "
1378 {
1379 if (emit_move_sequence (operands, SImode, operands[2]))
1380 DONE;
1381
1382 /* We don't want the clobber emitted, so handle this ourselves. */
1383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1384 DONE;
1385 }")
1386
1387 ;;; pic symbol references
1388
1389 (define_insn ""
1390 [(set (match_operand:SI 0 "register_operand" "=r")
1391 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1392 (match_operand:SI 2 "symbolic_operand" ""))))]
1393 "flag_pic && operands[1] == pic_offset_table_rtx"
1394 "ldw T'%2(%1),%0"
1395 [(set_attr "type" "load")
1396 (set_attr "length" "4")])
1397
1398 (define_insn ""
1399 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1400 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1401 (match_operand:SI 1 "move_operand"
1402 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1403 "(register_operand (operands[0], SImode)
1404 || reg_or_0_operand (operands[1], SImode))
1405 && ! TARGET_SOFT_FLOAT"
1406 "@
1407 copy %1,%0
1408 ldi %1,%0
1409 ldil L'%1,%0
1410 zdepi %Z1,%0
1411 ldw%M1 %1,%0
1412 stw%M0 %r1,%0
1413 mtsar %r1
1414 fcpy,sgl %f1,%0
1415 fldw%F1 %1,%0
1416 fstw%F0 %1,%0"
1417 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1418 (set_attr "pa_combine_type" "addmove")
1419 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1420
1421 (define_insn ""
1422 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1423 "=r,r,r,r,r,Q,*q")
1424 (match_operand:SI 1 "move_operand"
1425 "r,J,N,K,RQ,rM,rM"))]
1426 "(register_operand (operands[0], SImode)
1427 || reg_or_0_operand (operands[1], SImode))
1428 && TARGET_SOFT_FLOAT"
1429 "@
1430 copy %1,%0
1431 ldi %1,%0
1432 ldil L'%1,%0
1433 zdepi %Z1,%0
1434 ldw%M1 %1,%0
1435 stw%M0 %r1,%0
1436 mtsar %r1"
1437 [(set_attr "type" "move,move,move,move,load,store,move")
1438 (set_attr "pa_combine_type" "addmove")
1439 (set_attr "length" "4,4,4,4,4,4,4")])
1440
1441 (define_insn ""
1442 [(set (match_operand:SI 0 "register_operand" "=r")
1443 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1444 (match_operand:SI 2 "register_operand" "r"))))]
1445 "! TARGET_DISABLE_INDEXING"
1446 "*
1447 {
1448 /* Reload can create backwards (relative to cse) unscaled index
1449 address modes when eliminating registers and possibly for
1450 pseudos that don't get hard registers. Deal with it. */
1451 if (operands[2] == hard_frame_pointer_rtx
1452 || operands[2] == stack_pointer_rtx)
1453 return \"ldwx %1(%2),%0\";
1454 else
1455 return \"ldwx %2(%1),%0\";
1456 }"
1457 [(set_attr "type" "load")
1458 (set_attr "length" "4")])
1459
1460 (define_insn ""
1461 [(set (match_operand:SI 0 "register_operand" "=r")
1462 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1463 (match_operand:SI 2 "basereg_operand" "r"))))]
1464 "! TARGET_DISABLE_INDEXING"
1465 "*
1466 {
1467 /* Reload can create backwards (relative to cse) unscaled index
1468 address modes when eliminating registers and possibly for
1469 pseudos that don't get hard registers. Deal with it. */
1470 if (operands[1] == hard_frame_pointer_rtx
1471 || operands[1] == stack_pointer_rtx)
1472 return \"ldwx %2(%1),%0\";
1473 else
1474 return \"ldwx %1(%2),%0\";
1475 }"
1476 [(set_attr "type" "load")
1477 (set_attr "length" "4")])
1478
1479 ;; Load or store with base-register modification.
1480
1481 (define_expand "pre_load"
1482 [(parallel [(set (match_operand:SI 0 "register_operand" "")
1483 (mem (plus (match_operand 1 "register_operand" "")
1484 (match_operand 2 "pre_cint_operand" ""))))
1485 (set (match_dup 1)
1486 (plus (match_dup 1) (match_dup 2)))])]
1487 ""
1488 "
1489 {
1490 emit_insn (gen_pre_ldw (operands[0], operands[1], operands[2]));
1491 DONE;
1492 }")
1493
1494 (define_insn "pre_ldw"
1495 [(set (match_operand:SI 0 "register_operand" "=r")
1496 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1497 (match_operand:SI 2 "pre_cint_operand" ""))))
1498 (set (match_dup 1)
1499 (plus:SI (match_dup 1) (match_dup 2)))]
1500 ""
1501 "*
1502 {
1503 if (INTVAL (operands[2]) < 0)
1504 return \"ldwm %2(%1),%0\";
1505 return \"ldws,mb %2(%1),%0\";
1506 }"
1507 [(set_attr "type" "load")
1508 (set_attr "length" "4")])
1509
1510 (define_insn ""
1511 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1512 (match_operand:SI 1 "pre_cint_operand" "")))
1513 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1514 (set (match_dup 0)
1515 (plus:SI (match_dup 0) (match_dup 1)))]
1516 ""
1517 "*
1518 {
1519 if (INTVAL (operands[1]) < 0)
1520 return \"stwm %r2,%1(%0)\";
1521 return \"stws,mb %r2,%1(%0)\";
1522 }"
1523 [(set_attr "type" "store")
1524 (set_attr "length" "4")])
1525
1526 (define_insn ""
1527 [(set (match_operand:SI 0 "register_operand" "=r")
1528 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1529 (set (match_dup 1)
1530 (plus:SI (match_dup 1)
1531 (match_operand:SI 2 "post_cint_operand" "")))]
1532 ""
1533 "*
1534 {
1535 if (INTVAL (operands[2]) > 0)
1536 return \"ldwm %2(%1),%0\";
1537 return \"ldws,ma %2(%1),%0\";
1538 }"
1539 [(set_attr "type" "load")
1540 (set_attr "length" "4")])
1541
1542 (define_expand "post_store"
1543 [(parallel [(set (mem (match_operand 0 "register_operand" ""))
1544 (match_operand 1 "reg_or_0_operand" ""))
1545 (set (match_dup 0)
1546 (plus (match_dup 0)
1547 (match_operand 2 "post_cint_operand" "")))])]
1548 ""
1549 "
1550 {
1551 emit_insn (gen_post_stw (operands[0], operands[1], operands[2]));
1552 DONE;
1553 }")
1554
1555 (define_insn "post_stw"
1556 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1557 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1558 (set (match_dup 0)
1559 (plus:SI (match_dup 0)
1560 (match_operand:SI 2 "post_cint_operand" "")))]
1561 ""
1562 "*
1563 {
1564 if (INTVAL (operands[2]) > 0)
1565 return \"stwm %r1,%2(%0)\";
1566 return \"stws,ma %r1,%2(%0)\";
1567 }"
1568 [(set_attr "type" "store")
1569 (set_attr "length" "4")])
1570
1571 ;; For loading the address of a label while generating PIC code.
1572 ;; Note since this pattern can be created at reload time (via movsi), all
1573 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1574 (define_insn ""
1575 [(set (match_operand 0 "register_operand" "=a")
1576 (match_operand 1 "pic_label_operand" ""))]
1577 ""
1578 "*
1579 {
1580 rtx label_rtx = gen_label_rtx ();
1581 rtx xoperands[3];
1582 extern FILE *asm_out_file;
1583
1584 xoperands[0] = operands[0];
1585 xoperands[1] = operands[1];
1586 xoperands[2] = label_rtx;
1587 output_asm_insn (\"bl .+8,%0\", xoperands);
1588 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1589 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1590 CODE_LABEL_NUMBER (label_rtx));
1591
1592 /* If we're trying to load the address of a label that happens to be
1593 close, then we can use a shorter sequence. */
1594 if (GET_CODE (operands[1]) == LABEL_REF
1595 && insn_addresses
1596 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1597 - insn_addresses[INSN_UID (insn)]) < 8100)
1598 {
1599 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1600 always non-negative. */
1601 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1602 }
1603 else
1604 {
1605 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1606 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1607 }
1608 return \"\";
1609 }"
1610 [(set_attr "type" "multi")
1611 (set_attr "length" "16")]) ; 12 or 16
1612
1613 (define_insn ""
1614 [(set (match_operand:SI 0 "register_operand" "=a")
1615 (plus:SI (match_operand:SI 1 "register_operand" "r")
1616 (high:SI (match_operand 2 "" ""))))]
1617 "symbolic_operand (operands[2], Pmode)
1618 && ! function_label_operand (operands[2])
1619 && flag_pic == 2"
1620 "addil LT'%G2,%1"
1621 [(set_attr "type" "binary")
1622 (set_attr "length" "4")])
1623
1624 ; We need this to make sure CSE doesn't simplify a memory load with a
1625 ; symbolic address, whose content it think it knows. For PIC, what CSE
1626 ; think is the real value will be the address of that value.
1627 (define_insn ""
1628 [(set (match_operand:SI 0 "register_operand" "=r")
1629 (mem:SI
1630 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1631 (unspec:SI
1632 [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1633 ""
1634 "*
1635 {
1636 if (flag_pic != 2)
1637 abort ();
1638 return \"ldw RT'%G2(%1),%0\";
1639 }"
1640 [(set_attr "type" "load")
1641 (set_attr "length" "4")])
1642
1643 ;; Always use addil rather than ldil;add sequences. This allows the
1644 ;; HP linker to eliminate the dp relocation if the symbolic operand
1645 ;; lives in the TEXT space.
1646 (define_insn ""
1647 [(set (match_operand:SI 0 "register_operand" "=a")
1648 (high:SI (match_operand 1 "" "")))]
1649 "symbolic_operand (operands[1], Pmode)
1650 && ! function_label_operand (operands[1])
1651 && ! read_only_operand (operands[1])
1652 && ! flag_pic"
1653 "*
1654 {
1655 if (TARGET_LONG_LOAD_STORE)
1656 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1657 else
1658 return \"addil LR'%H1,%%r27\";
1659 }"
1660 [(set_attr "type" "binary")
1661 (set (attr "length")
1662 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1663 (const_int 4)
1664 (const_int 8)))])
1665
1666
1667 ;; This is for use in the prologue/epilogue code. We need it
1668 ;; to add large constants to a stack pointer or frame pointer.
1669 ;; Because of the additional %r1 pressure, we probably do not
1670 ;; want to use this in general code, so make it available
1671 ;; only after reload.
1672 (define_insn ""
1673 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1674 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1675 (high:SI (match_operand 2 "const_int_operand" ""))))]
1676 "reload_completed"
1677 "@
1678 addil L'%G2,%1
1679 ldil L'%G2,%0\;addl %0,%1,%0"
1680 [(set_attr "type" "binary,binary")
1681 (set_attr "length" "4,8")])
1682
1683 (define_insn ""
1684 [(set (match_operand:SI 0 "register_operand" "=r")
1685 (high:SI (match_operand 1 "" "")))]
1686 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1687 && !is_function_label_plus_const (operands[1])"
1688 "*
1689 {
1690 if (symbolic_operand (operands[1], Pmode))
1691 return \"ldil LR'%H1,%0\";
1692 else
1693 return \"ldil L'%G1,%0\";
1694 }"
1695 [(set_attr "type" "move")
1696 (set_attr "length" "4")])
1697
1698 (define_insn ""
1699 [(set (match_operand:SI 0 "register_operand" "=r")
1700 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1701 (match_operand:SI 2 "immediate_operand" "i")))]
1702 "!is_function_label_plus_const (operands[2])"
1703 "*
1704 {
1705 if (flag_pic && symbolic_operand (operands[2], Pmode))
1706 abort ();
1707 else if (symbolic_operand (operands[2], Pmode))
1708 return \"ldo RR'%G2(%1),%0\";
1709 else
1710 return \"ldo R'%G2(%1),%0\";
1711 }"
1712 [(set_attr "type" "move")
1713 (set_attr "length" "4")])
1714
1715 ;; Now that a symbolic_address plus a constant is broken up early
1716 ;; in the compilation phase (for better CSE) we need a special
1717 ;; combiner pattern to load the symbolic address plus the constant
1718 ;; in only 2 instructions. (For cases where the symbolic address
1719 ;; was not a common subexpression.)
1720 (define_split
1721 [(set (match_operand:SI 0 "register_operand" "")
1722 (match_operand:SI 1 "symbolic_operand" ""))
1723 (clobber (match_operand:SI 2 "register_operand" ""))]
1724 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1725 [(set (match_dup 2) (high:SI (match_dup 1)))
1726 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1727 "")
1728
1729 ;; hppa_legitimize_address goes to a great deal of trouble to
1730 ;; create addresses which use indexing. In some cases, this
1731 ;; is a lose because there isn't any store instructions which
1732 ;; allow indexed addresses (with integer register source).
1733 ;;
1734 ;; These define_splits try to turn a 3 insn store into
1735 ;; a 2 insn store with some creative RTL rewriting.
1736 (define_split
1737 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1738 (match_operand:SI 1 "shadd_operand" ""))
1739 (plus:SI (match_operand:SI 2 "register_operand" "")
1740 (match_operand:SI 3 "const_int_operand" ""))))
1741 (match_operand:SI 4 "register_operand" ""))
1742 (clobber (match_operand:SI 5 "register_operand" ""))]
1743 ""
1744 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1745 (match_dup 2)))
1746 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1747 "")
1748
1749 (define_split
1750 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1751 (match_operand:SI 1 "shadd_operand" ""))
1752 (plus:SI (match_operand:SI 2 "register_operand" "")
1753 (match_operand:SI 3 "const_int_operand" ""))))
1754 (match_operand:HI 4 "register_operand" ""))
1755 (clobber (match_operand:SI 5 "register_operand" ""))]
1756 ""
1757 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1758 (match_dup 2)))
1759 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1760 "")
1761
1762 (define_split
1763 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1764 (match_operand:SI 1 "shadd_operand" ""))
1765 (plus:SI (match_operand:SI 2 "register_operand" "")
1766 (match_operand:SI 3 "const_int_operand" ""))))
1767 (match_operand:QI 4 "register_operand" ""))
1768 (clobber (match_operand:SI 5 "register_operand" ""))]
1769 ""
1770 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1771 (match_dup 2)))
1772 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1773 "")
1774
1775 (define_expand "movhi"
1776 [(set (match_operand:HI 0 "general_operand" "")
1777 (match_operand:HI 1 "general_operand" ""))]
1778 ""
1779 "
1780 {
1781 if (emit_move_sequence (operands, HImode, 0))
1782 DONE;
1783 }")
1784
1785 (define_insn ""
1786 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!*f")
1787 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!*fM"))]
1788 "register_operand (operands[0], HImode)
1789 || reg_or_0_operand (operands[1], HImode)"
1790 "@
1791 copy %1,%0
1792 ldi %1,%0
1793 ldil L'%1,%0
1794 zdepi %Z1,%0
1795 ldh%M1 %1,%0
1796 sth%M0 %r1,%0
1797 mtsar %r1
1798 fcpy,sgl %f1,%0"
1799 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1800 (set_attr "pa_combine_type" "addmove")
1801 (set_attr "length" "4,4,4,4,4,4,4,4")])
1802
1803 (define_insn ""
1804 [(set (match_operand:HI 0 "register_operand" "=r")
1805 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1806 (match_operand:SI 2 "register_operand" "r"))))]
1807 "! TARGET_DISABLE_INDEXING"
1808 "*
1809 {
1810 /* Reload can create backwards (relative to cse) unscaled index
1811 address modes when eliminating registers and possibly for
1812 pseudos that don't get hard registers. Deal with it. */
1813 if (operands[2] == hard_frame_pointer_rtx
1814 || operands[2] == stack_pointer_rtx)
1815 return \"ldhx %1(%2),%0\";
1816 else
1817 return \"ldhx %2(%1),%0\";
1818 }"
1819 [(set_attr "type" "load")
1820 (set_attr "length" "4")])
1821
1822 (define_insn ""
1823 [(set (match_operand:HI 0 "register_operand" "=r")
1824 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1825 (match_operand:SI 2 "basereg_operand" "r"))))]
1826 "! TARGET_DISABLE_INDEXING"
1827 "*
1828 {
1829 /* Reload can create backwards (relative to cse) unscaled index
1830 address modes when eliminating registers and possibly for
1831 pseudos that don't get hard registers. Deal with it. */
1832 if (operands[1] == hard_frame_pointer_rtx
1833 || operands[1] == stack_pointer_rtx)
1834 return \"ldhx %2(%1),%0\";
1835 else
1836 return \"ldhx %1(%2),%0\";
1837 }"
1838 [(set_attr "type" "load")
1839 (set_attr "length" "4")])
1840
1841 ; Now zero extended variants.
1842 (define_insn ""
1843 [(set (match_operand:SI 0 "register_operand" "=r")
1844 (zero_extend:SI (mem:HI
1845 (plus:SI
1846 (match_operand:SI 1 "basereg_operand" "r")
1847 (match_operand:SI 2 "register_operand" "r")))))]
1848 "! TARGET_DISABLE_INDEXING"
1849 "*
1850 {
1851 /* Reload can create backwards (relative to cse) unscaled index
1852 address modes when eliminating registers and possibly for
1853 pseudos that don't get hard registers. Deal with it. */
1854 if (operands[2] == hard_frame_pointer_rtx
1855 || operands[2] == stack_pointer_rtx)
1856 return \"ldhx %1(%2),%0\";
1857 else
1858 return \"ldhx %2(%1),%0\";
1859 }"
1860 [(set_attr "type" "load")
1861 (set_attr "length" "4")])
1862
1863 (define_insn ""
1864 [(set (match_operand:SI 0 "register_operand" "=r")
1865 (zero_extend:SI (mem:HI
1866 (plus:SI
1867 (match_operand:SI 1 "register_operand" "r")
1868 (match_operand:SI 2 "basereg_operand" "r")))))]
1869 "! TARGET_DISABLE_INDEXING"
1870 "*
1871 {
1872 /* Reload can create backwards (relative to cse) unscaled index
1873 address modes when eliminating registers and possibly for
1874 pseudos that don't get hard registers. Deal with it. */
1875 if (operands[1] == hard_frame_pointer_rtx
1876 || operands[1] == stack_pointer_rtx)
1877 return \"ldhx %2(%1),%0\";
1878 else
1879 return \"ldhx %1(%2),%0\";
1880 }"
1881 [(set_attr "type" "load")
1882 (set_attr "length" "4")])
1883
1884 (define_insn ""
1885 [(set (match_operand:HI 0 "register_operand" "=r")
1886 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1887 (match_operand:SI 2 "int5_operand" "L"))))
1888 (set (match_dup 1)
1889 (plus:SI (match_dup 1) (match_dup 2)))]
1890 ""
1891 "ldhs,mb %2(%1),%0"
1892 [(set_attr "type" "load")
1893 (set_attr "length" "4")])
1894
1895 ; And a zero extended variant.
1896 (define_insn ""
1897 [(set (match_operand:SI 0 "register_operand" "=r")
1898 (zero_extend:SI (mem:HI
1899 (plus:SI
1900 (match_operand:SI 1 "register_operand" "+r")
1901 (match_operand:SI 2 "int5_operand" "L")))))
1902 (set (match_dup 1)
1903 (plus:SI (match_dup 1) (match_dup 2)))]
1904 ""
1905 "ldhs,mb %2(%1),%0"
1906 [(set_attr "type" "load")
1907 (set_attr "length" "4")])
1908
1909 (define_insn ""
1910 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1911 (match_operand:SI 1 "int5_operand" "L")))
1912 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1913 (set (match_dup 0)
1914 (plus:SI (match_dup 0) (match_dup 1)))]
1915 ""
1916 "sths,mb %r2,%1(%0)"
1917 [(set_attr "type" "store")
1918 (set_attr "length" "4")])
1919
1920 (define_insn ""
1921 [(set (match_operand:HI 0 "register_operand" "=r")
1922 (high:HI (match_operand 1 "const_int_operand" "")))]
1923 ""
1924 "ldil L'%G1,%0"
1925 [(set_attr "type" "move")
1926 (set_attr "length" "4")])
1927
1928 (define_insn ""
1929 [(set (match_operand:HI 0 "register_operand" "=r")
1930 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1931 (match_operand 2 "const_int_operand" "")))]
1932 ""
1933 "ldo R'%G2(%1),%0"
1934 [(set_attr "type" "move")
1935 (set_attr "length" "4")])
1936
1937 (define_expand "movqi"
1938 [(set (match_operand:QI 0 "general_operand" "")
1939 (match_operand:QI 1 "general_operand" ""))]
1940 ""
1941 "
1942 {
1943 if (emit_move_sequence (operands, QImode, 0))
1944 DONE;
1945 }")
1946
1947 (define_insn ""
1948 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!*f")
1949 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!*fM"))]
1950 "register_operand (operands[0], QImode)
1951 || reg_or_0_operand (operands[1], QImode)"
1952 "@
1953 copy %1,%0
1954 ldi %1,%0
1955 ldil L'%1,%0
1956 zdepi %Z1,%0
1957 ldb%M1 %1,%0
1958 stb%M0 %r1,%0
1959 mtsar %r1
1960 fcpy,sgl %f1,%0"
1961 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1962 (set_attr "pa_combine_type" "addmove")
1963 (set_attr "length" "4,4,4,4,4,4,4,4")])
1964
1965 (define_insn ""
1966 [(set (match_operand:QI 0 "register_operand" "=r")
1967 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1968 (match_operand:SI 2 "register_operand" "r"))))]
1969 "! TARGET_DISABLE_INDEXING"
1970 "*
1971 {
1972 /* Reload can create backwards (relative to cse) unscaled index
1973 address modes when eliminating registers and possibly for
1974 pseudos that don't get hard registers. Deal with it. */
1975 if (operands[2] == hard_frame_pointer_rtx
1976 || operands[2] == stack_pointer_rtx)
1977 return \"ldbx %1(%2),%0\";
1978 else
1979 return \"ldbx %2(%1),%0\";
1980 }"
1981 [(set_attr "type" "load")
1982 (set_attr "length" "4")])
1983
1984 (define_insn ""
1985 [(set (match_operand:QI 0 "register_operand" "=r")
1986 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1987 (match_operand:SI 2 "basereg_operand" "r"))))]
1988 "! TARGET_DISABLE_INDEXING"
1989 "*
1990 {
1991 /* Reload can create backwards (relative to cse) unscaled index
1992 address modes when eliminating registers and possibly for
1993 pseudos that don't get hard registers. Deal with it. */
1994 if (operands[1] == hard_frame_pointer_rtx
1995 || operands[1] == stack_pointer_rtx)
1996 return \"ldbx %2(%1),%0\";
1997 else
1998 return \"ldbx %1(%2),%0\";
1999 }"
2000 [(set_attr "type" "load")
2001 (set_attr "length" "4")])
2002
2003 ; Indexed byte load with zero extension to SImode or HImode.
2004 (define_insn ""
2005 [(set (match_operand:SI 0 "register_operand" "=r")
2006 (zero_extend:SI (mem:QI
2007 (plus:SI
2008 (match_operand:SI 1 "basereg_operand" "r")
2009 (match_operand:SI 2 "register_operand" "r")))))]
2010 "! TARGET_DISABLE_INDEXING"
2011 "*
2012 {
2013 /* Reload can create backwards (relative to cse) unscaled index
2014 address modes when eliminating registers and possibly for
2015 pseudos that don't get hard registers. Deal with it. */
2016 if (operands[2] == hard_frame_pointer_rtx
2017 || operands[2] == stack_pointer_rtx)
2018 return \"ldbx %1(%2),%0\";
2019 else
2020 return \"ldbx %2(%1),%0\";
2021 }"
2022 [(set_attr "type" "load")
2023 (set_attr "length" "4")])
2024
2025 (define_insn ""
2026 [(set (match_operand:SI 0 "register_operand" "=r")
2027 (zero_extend:SI (mem:QI
2028 (plus:SI
2029 (match_operand:SI 1 "register_operand" "r")
2030 (match_operand:SI 2 "basereg_operand" "r")))))]
2031 "! TARGET_DISABLE_INDEXING"
2032 "*
2033 {
2034 /* Reload can create backwards (relative to cse) unscaled index
2035 address modes when eliminating registers and possibly for
2036 pseudos that don't get hard registers. Deal with it. */
2037 if (operands[1] == hard_frame_pointer_rtx
2038 || operands[1] == stack_pointer_rtx)
2039 return \"ldbx %2(%1),%0\";
2040 else
2041 return \"ldbx %1(%2),%0\";
2042 }"
2043 [(set_attr "type" "load")
2044 (set_attr "length" "4")])
2045
2046 (define_insn ""
2047 [(set (match_operand:HI 0 "register_operand" "=r")
2048 (zero_extend:HI (mem:QI
2049 (plus:SI
2050 (match_operand:SI 1 "basereg_operand" "r")
2051 (match_operand:SI 2 "register_operand" "r")))))]
2052 "! TARGET_DISABLE_INDEXING"
2053 "*
2054 {
2055 /* Reload can create backwards (relative to cse) unscaled index
2056 address modes when eliminating registers and possibly for
2057 pseudos that don't get hard registers. Deal with it. */
2058 if (operands[2] == hard_frame_pointer_rtx
2059 || operands[2] == stack_pointer_rtx)
2060 return \"ldbx %1(%2),%0\";
2061 else
2062 return \"ldbx %2(%1),%0\";
2063 }"
2064 [(set_attr "type" "load")
2065 (set_attr "length" "4")])
2066
2067 (define_insn ""
2068 [(set (match_operand:HI 0 "register_operand" "=r")
2069 (zero_extend:HI (mem:QI
2070 (plus:SI
2071 (match_operand:SI 1 "register_operand" "r")
2072 (match_operand:SI 2 "basereg_operand" "r")))))]
2073 "! TARGET_DISABLE_INDEXING"
2074 "*
2075 {
2076 /* Reload can create backwards (relative to cse) unscaled index
2077 address modes when eliminating registers and possibly for
2078 pseudos that don't get hard registers. Deal with it. */
2079 if (operands[1] == hard_frame_pointer_rtx
2080 || operands[1] == stack_pointer_rtx)
2081 return \"ldbx %2(%1),%0\";
2082 else
2083 return \"ldbx %1(%2),%0\";
2084 }"
2085 [(set_attr "type" "load")
2086 (set_attr "length" "4")])
2087
2088 (define_insn ""
2089 [(set (match_operand:QI 0 "register_operand" "=r")
2090 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2091 (match_operand:SI 2 "int5_operand" "L"))))
2092 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2093 ""
2094 "ldbs,mb %2(%1),%0"
2095 [(set_attr "type" "load")
2096 (set_attr "length" "4")])
2097
2098 ; Now the same thing with zero extensions.
2099 (define_insn ""
2100 [(set (match_operand:SI 0 "register_operand" "=r")
2101 (zero_extend:SI (mem:QI (plus:SI
2102 (match_operand:SI 1 "register_operand" "+r")
2103 (match_operand:SI 2 "int5_operand" "L")))))
2104 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2105 ""
2106 "ldbs,mb %2(%1),%0"
2107 [(set_attr "type" "load")
2108 (set_attr "length" "4")])
2109
2110 (define_insn ""
2111 [(set (match_operand:HI 0 "register_operand" "=r")
2112 (zero_extend:HI (mem:QI (plus:SI
2113 (match_operand:SI 1 "register_operand" "+r")
2114 (match_operand:SI 2 "int5_operand" "L")))))
2115 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2116 ""
2117 "ldbs,mb %2(%1),%0"
2118 [(set_attr "type" "load")
2119 (set_attr "length" "4")])
2120
2121 (define_insn ""
2122 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2123 (match_operand:SI 1 "int5_operand" "L")))
2124 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2125 (set (match_dup 0)
2126 (plus:SI (match_dup 0) (match_dup 1)))]
2127 ""
2128 "stbs,mb %r2,%1(%0)"
2129 [(set_attr "type" "store")
2130 (set_attr "length" "4")])
2131
2132 ;; The definition of this insn does not really explain what it does,
2133 ;; but it should suffice
2134 ;; that anything generated as this insn will be recognized as one
2135 ;; and that it will not successfully combine with anything.
2136 (define_expand "movstrsi"
2137 [(parallel [(set (match_operand:BLK 0 "" "")
2138 (match_operand:BLK 1 "" ""))
2139 (clobber (match_dup 7))
2140 (clobber (match_dup 8))
2141 (clobber (match_dup 4))
2142 (clobber (match_dup 5))
2143 (clobber (match_dup 6))
2144 (use (match_operand:SI 2 "arith_operand" ""))
2145 (use (match_operand:SI 3 "const_int_operand" ""))])]
2146 ""
2147 "
2148 {
2149 int size, align;
2150
2151 /* HP provides very fast block move library routine for the PA;
2152 this routine includes:
2153
2154 4x4 byte at a time block moves,
2155 1x4 byte at a time with alignment checked at runtime with
2156 attempts to align the source and destination as needed
2157 1x1 byte loop
2158
2159 With that in mind, here's the heuristics to try and guess when
2160 the inlined block move will be better than the library block
2161 move:
2162
2163 If the size isn't constant, then always use the library routines.
2164
2165 If the size is large in respect to the known alignment, then use
2166 the library routines.
2167
2168 If the size is small in repsect to the known alignment, then open
2169 code the copy (since that will lead to better scheduling).
2170
2171 Else use the block move pattern. */
2172
2173 /* Undetermined size, use the library routine. */
2174 if (GET_CODE (operands[2]) != CONST_INT)
2175 FAIL;
2176
2177 size = INTVAL (operands[2]);
2178 align = INTVAL (operands[3]);
2179 align = align > 4 ? 4 : align;
2180
2181 /* If size/alignment > 8 (eg size is large in respect to alignment),
2182 then use the library routines. */
2183 if (size / align > 16)
2184 FAIL;
2185
2186 /* This does happen, but not often enough to worry much about. */
2187 if (size / align < MOVE_RATIO)
2188 FAIL;
2189
2190 /* Fall through means we're going to use our block move pattern. */
2191 operands[0]
2192 = change_address (operands[0], VOIDmode,
2193 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2194 operands[1]
2195 = change_address (operands[1], VOIDmode,
2196 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2197 operands[4] = gen_reg_rtx (SImode);
2198 operands[5] = gen_reg_rtx (SImode);
2199 operands[6] = gen_reg_rtx (SImode);
2200 operands[7] = XEXP (operands[0], 0);
2201 operands[8] = XEXP (operands[1], 0);
2202 }")
2203
2204 ;; The operand constraints are written like this to support both compile-time
2205 ;; and run-time determined byte count. If the count is run-time determined,
2206 ;; the register with the byte count is clobbered by the copying code, and
2207 ;; therefore it is forced to operand 2. If the count is compile-time
2208 ;; determined, we need two scratch registers for the unrolled code.
2209 (define_insn "movstrsi_internal"
2210 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2211 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2212 (clobber (match_dup 0))
2213 (clobber (match_dup 1))
2214 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2215 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2216 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2217 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2218 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2219 ""
2220 "* return output_block_move (operands, !which_alternative);"
2221 [(set_attr "type" "multi,multi")])
2222 \f
2223 ;; Floating point move insns
2224
2225 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2226 ;; to be reloaded by putting the constant into memory when
2227 ;; reg is a floating point register.
2228 ;;
2229 ;; For integer registers we use ldil;ldo to set the appropriate
2230 ;; value.
2231 ;;
2232 ;; This must come before the movdf pattern, and it must be present
2233 ;; to handle obscure reloading cases.
2234 (define_insn ""
2235 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2236 (match_operand:DF 1 "" "?F,m"))]
2237 "GET_CODE (operands[1]) == CONST_DOUBLE
2238 && operands[1] != CONST0_RTX (DFmode)
2239 && ! TARGET_SOFT_FLOAT"
2240 "* return (which_alternative == 0 ? output_move_double (operands)
2241 : \"fldd%F1 %1,%0\");"
2242 [(set_attr "type" "move,fpload")
2243 (set_attr "length" "16,4")])
2244
2245 (define_expand "movdf"
2246 [(set (match_operand:DF 0 "general_operand" "")
2247 (match_operand:DF 1 "general_operand" ""))]
2248 ""
2249 "
2250 {
2251 if (emit_move_sequence (operands, DFmode, 0))
2252 DONE;
2253 }")
2254
2255 ;; Reloading an SImode or DImode value requires a scratch register if
2256 ;; going in to or out of float point registers.
2257
2258 (define_expand "reload_indf"
2259 [(set (match_operand:DF 0 "register_operand" "=Z")
2260 (match_operand:DF 1 "non_hard_reg_operand" ""))
2261 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2262 ""
2263 "
2264 {
2265 if (emit_move_sequence (operands, DFmode, operands[2]))
2266 DONE;
2267
2268 /* We don't want the clobber emitted, so handle this ourselves. */
2269 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2270 DONE;
2271 }")
2272
2273 (define_expand "reload_outdf"
2274 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2275 (match_operand:DF 1 "register_operand" "Z"))
2276 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2277 ""
2278 "
2279 {
2280 if (emit_move_sequence (operands, DFmode, operands[2]))
2281 DONE;
2282
2283 /* We don't want the clobber emitted, so handle this ourselves. */
2284 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2285 DONE;
2286 }")
2287
2288 (define_insn ""
2289 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2290 "=f,*r,RQ,?o,?Q,f,*r,*r")
2291 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2292 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2293 "(register_operand (operands[0], DFmode)
2294 || reg_or_0_operand (operands[1], DFmode))
2295 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2296 && GET_CODE (operands[0]) == MEM)
2297 && ! TARGET_SOFT_FLOAT"
2298 "*
2299 {
2300 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2301 || operands[1] == CONST0_RTX (DFmode))
2302 return output_fp_move_double (operands);
2303 return output_move_double (operands);
2304 }"
2305 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2306 (set_attr "length" "4,8,4,8,16,4,8,16")])
2307
2308 (define_insn ""
2309 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2310 "=r,?o,?Q,r,r")
2311 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2312 "rG,r,r,o,Q"))]
2313 "(register_operand (operands[0], DFmode)
2314 || reg_or_0_operand (operands[1], DFmode))
2315 && TARGET_SOFT_FLOAT"
2316 "*
2317 {
2318 return output_move_double (operands);
2319 }"
2320 [(set_attr "type" "move,store,store,load,load")
2321 (set_attr "length" "8,8,16,8,16")])
2322
2323 (define_insn ""
2324 [(set (match_operand:DF 0 "register_operand" "=fx")
2325 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2326 (match_operand:SI 2 "register_operand" "r"))))]
2327 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2328 "*
2329 {
2330 /* Reload can create backwards (relative to cse) unscaled index
2331 address modes when eliminating registers and possibly for
2332 pseudos that don't get hard registers. Deal with it. */
2333 if (operands[2] == hard_frame_pointer_rtx
2334 || operands[2] == stack_pointer_rtx)
2335 return \"flddx %1(%2),%0\";
2336 else
2337 return \"flddx %2(%1),%0\";
2338 }"
2339 [(set_attr "type" "fpload")
2340 (set_attr "length" "4")])
2341
2342 (define_insn ""
2343 [(set (match_operand:DF 0 "register_operand" "=fx")
2344 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2345 (match_operand:SI 2 "basereg_operand" "r"))))]
2346 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2347 "*
2348 {
2349 /* Reload can create backwards (relative to cse) unscaled index
2350 address modes when eliminating registers and possibly for
2351 pseudos that don't get hard registers. Deal with it. */
2352 if (operands[1] == hard_frame_pointer_rtx
2353 || operands[1] == stack_pointer_rtx)
2354 return \"flddx %2(%1),%0\";
2355 else
2356 return \"flddx %1(%2),%0\";
2357 }"
2358 [(set_attr "type" "fpload")
2359 (set_attr "length" "4")])
2360
2361 (define_insn ""
2362 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2363 (match_operand:SI 2 "register_operand" "r")))
2364 (match_operand:DF 0 "register_operand" "fx"))]
2365 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2366 "*
2367 {
2368 /* Reload can create backwards (relative to cse) unscaled index
2369 address modes when eliminating registers and possibly for
2370 pseudos that don't get hard registers. Deal with it. */
2371 if (operands[2] == hard_frame_pointer_rtx
2372 || operands[2] == stack_pointer_rtx)
2373 return \"fstdx %0,%1(%2)\";
2374 else
2375 return \"fstdx %0,%2(%1)\";
2376 }"
2377 [(set_attr "type" "fpstore")
2378 (set_attr "length" "4")])
2379
2380 (define_insn ""
2381 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2382 (match_operand:SI 2 "basereg_operand" "r")))
2383 (match_operand:DF 0 "register_operand" "fx"))]
2384 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2385 "*
2386 {
2387 /* Reload can create backwards (relative to cse) unscaled index
2388 address modes when eliminating registers and possibly for
2389 pseudos that don't get hard registers. Deal with it. */
2390 if (operands[1] == hard_frame_pointer_rtx
2391 || operands[1] == stack_pointer_rtx)
2392 return \"fstdx %0,%2(%1)\";
2393 else
2394 return \"fstdx %0,%1(%2)\";
2395 }"
2396 [(set_attr "type" "fpstore")
2397 (set_attr "length" "4")])
2398
2399 (define_expand "movdi"
2400 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2401 (match_operand:DI 1 "general_operand" ""))]
2402 ""
2403 "
2404 {
2405 if (emit_move_sequence (operands, DImode, 0))
2406 DONE;
2407 }")
2408
2409 (define_expand "reload_indi"
2410 [(set (match_operand:DI 0 "register_operand" "=Z")
2411 (match_operand:DI 1 "non_hard_reg_operand" ""))
2412 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2413 ""
2414 "
2415 {
2416 if (emit_move_sequence (operands, DImode, operands[2]))
2417 DONE;
2418
2419 /* We don't want the clobber emitted, so handle this ourselves. */
2420 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2421 DONE;
2422 }")
2423
2424 (define_expand "reload_outdi"
2425 [(set (match_operand:DI 0 "general_operand" "")
2426 (match_operand:DI 1 "register_operand" "Z"))
2427 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2428 ""
2429 "
2430 {
2431 if (emit_move_sequence (operands, DImode, operands[2]))
2432 DONE;
2433
2434 /* We don't want the clobber emitted, so handle this ourselves. */
2435 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2436 DONE;
2437 }")
2438
2439 (define_insn ""
2440 [(set (match_operand:DI 0 "register_operand" "=r")
2441 (high:DI (match_operand 1 "" "")))]
2442 ""
2443 "*
2444 {
2445 rtx op0 = operands[0];
2446 rtx op1 = operands[1];
2447
2448 if (GET_CODE (op1) == CONST_INT)
2449 {
2450 operands[0] = operand_subword (op0, 1, 0, DImode);
2451 output_asm_insn (\"ldil L'%1,%0\", operands);
2452
2453 operands[0] = operand_subword (op0, 0, 0, DImode);
2454 if (INTVAL (op1) < 0)
2455 output_asm_insn (\"ldi -1,%0\", operands);
2456 else
2457 output_asm_insn (\"ldi 0,%0\", operands);
2458 return \"\";
2459 }
2460 else if (GET_CODE (op1) == CONST_DOUBLE)
2461 {
2462 operands[0] = operand_subword (op0, 1, 0, DImode);
2463 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2464 output_asm_insn (\"ldil L'%1,%0\", operands);
2465
2466 operands[0] = operand_subword (op0, 0, 0, DImode);
2467 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2468 output_asm_insn (singlemove_string (operands), operands);
2469 return \"\";
2470 }
2471 else
2472 abort ();
2473 }"
2474 [(set_attr "type" "move")
2475 (set_attr "length" "8")])
2476
2477 (define_insn ""
2478 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2479 "=r,o,Q,r,r,r,f,f,*TR")
2480 (match_operand:DI 1 "general_operand"
2481 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2482 "(register_operand (operands[0], DImode)
2483 || reg_or_0_operand (operands[1], DImode))
2484 && ! TARGET_SOFT_FLOAT"
2485 "*
2486 {
2487 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2488 || (operands[1] == CONST0_RTX (DImode)))
2489 return output_fp_move_double (operands);
2490 return output_move_double (operands);
2491 }"
2492 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2493 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2494
2495 (define_insn ""
2496 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2497 "=r,o,Q,r,r,r")
2498 (match_operand:DI 1 "general_operand"
2499 "rM,r,r,o,Q,i"))]
2500 "(register_operand (operands[0], DImode)
2501 || reg_or_0_operand (operands[1], DImode))
2502 && TARGET_SOFT_FLOAT"
2503 "*
2504 {
2505 return output_move_double (operands);
2506 }"
2507 [(set_attr "type" "move,store,store,load,load,multi")
2508 (set_attr "length" "8,8,16,8,16,16")])
2509
2510 (define_insn ""
2511 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2512 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2513 (match_operand:DI 2 "immediate_operand" "i,i")))]
2514 ""
2515 "*
2516 {
2517 /* Don't output a 64 bit constant, since we can't trust the assembler to
2518 handle it correctly. */
2519 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2520 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2521 if (which_alternative == 1)
2522 output_asm_insn (\"copy %1,%0\", operands);
2523 return \"ldo R'%G2(%R1),%R0\";
2524 }"
2525 [(set_attr "type" "move,move")
2526 (set_attr "length" "4,8")])
2527
2528 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2529 ;; to be reloaded by putting the constant into memory when
2530 ;; reg is a floating point register.
2531 ;;
2532 ;; For integer registers we use ldil;ldo to set the appropriate
2533 ;; value.
2534 ;;
2535 ;; This must come before the movsf pattern, and it must be present
2536 ;; to handle obscure reloading cases.
2537 (define_insn ""
2538 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2539 (match_operand:SF 1 "" "?F,m"))]
2540 "GET_CODE (operands[1]) == CONST_DOUBLE
2541 && operands[1] != CONST0_RTX (SFmode)
2542 && ! TARGET_SOFT_FLOAT"
2543 "* return (which_alternative == 0 ? singlemove_string (operands)
2544 : \" fldw%F1 %1,%0\");"
2545 [(set_attr "type" "move,fpload")
2546 (set_attr "length" "8,4")])
2547
2548 (define_expand "movsf"
2549 [(set (match_operand:SF 0 "general_operand" "")
2550 (match_operand:SF 1 "general_operand" ""))]
2551 ""
2552 "
2553 {
2554 if (emit_move_sequence (operands, SFmode, 0))
2555 DONE;
2556 }")
2557
2558 ;; Reloading an SImode or DImode value requires a scratch register if
2559 ;; going in to or out of float point registers.
2560
2561 (define_expand "reload_insf"
2562 [(set (match_operand:SF 0 "register_operand" "=Z")
2563 (match_operand:SF 1 "non_hard_reg_operand" ""))
2564 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2565 ""
2566 "
2567 {
2568 if (emit_move_sequence (operands, SFmode, operands[2]))
2569 DONE;
2570
2571 /* We don't want the clobber emitted, so handle this ourselves. */
2572 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2573 DONE;
2574 }")
2575
2576 (define_expand "reload_outsf"
2577 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2578 (match_operand:SF 1 "register_operand" "Z"))
2579 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2580 ""
2581 "
2582 {
2583 if (emit_move_sequence (operands, SFmode, operands[2]))
2584 DONE;
2585
2586 /* We don't want the clobber emitted, so handle this ourselves. */
2587 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2588 DONE;
2589 }")
2590
2591 (define_insn ""
2592 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2593 "=f,r,f,r,RQ,Q")
2594 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2595 "fG,rG,RQ,RQ,f,rG"))]
2596 "(register_operand (operands[0], SFmode)
2597 || reg_or_0_operand (operands[1], SFmode))
2598 && ! TARGET_SOFT_FLOAT"
2599 "@
2600 fcpy,sgl %f1,%0
2601 copy %r1,%0
2602 fldw%F1 %1,%0
2603 ldw%M1 %1,%0
2604 fstw%F0 %r1,%0
2605 stw%M0 %r1,%0"
2606 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2607 (set_attr "pa_combine_type" "addmove")
2608 (set_attr "length" "4,4,4,4,4,4")])
2609
2610 (define_insn ""
2611 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2612 "=r,r,Q")
2613 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2614 "rG,RQ,rG"))]
2615 "(register_operand (operands[0], SFmode)
2616 || reg_or_0_operand (operands[1], SFmode))
2617 && TARGET_SOFT_FLOAT"
2618 "@
2619 copy %r1,%0
2620 ldw%M1 %1,%0
2621 stw%M0 %r1,%0"
2622 [(set_attr "type" "move,load,store")
2623 (set_attr "pa_combine_type" "addmove")
2624 (set_attr "length" "4,4,4")])
2625
2626 (define_insn ""
2627 [(set (match_operand:SF 0 "register_operand" "=fx")
2628 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2629 (match_operand:SI 2 "register_operand" "r"))))]
2630 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2631 "*
2632 {
2633 /* Reload can create backwards (relative to cse) unscaled index
2634 address modes when eliminating registers and possibly for
2635 pseudos that don't get hard registers. Deal with it. */
2636 if (operands[2] == hard_frame_pointer_rtx
2637 || operands[2] == stack_pointer_rtx)
2638 return \"fldwx %1(%2),%0\";
2639 else
2640 return \"fldwx %2(%1),%0\";
2641 }"
2642 [(set_attr "type" "fpload")
2643 (set_attr "length" "4")])
2644
2645 (define_insn ""
2646 [(set (match_operand:SF 0 "register_operand" "=fx")
2647 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2648 (match_operand:SI 2 "basereg_operand" "r"))))]
2649 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2650 "*
2651 {
2652 /* Reload can create backwards (relative to cse) unscaled index
2653 address modes when eliminating registers and possibly for
2654 pseudos that don't get hard registers. Deal with it. */
2655 if (operands[1] == hard_frame_pointer_rtx
2656 || operands[1] == stack_pointer_rtx)
2657 return \"fldwx %2(%1),%0\";
2658 else
2659 return \"fldwx %1(%2),%0\";
2660 }"
2661 [(set_attr "type" "fpload")
2662 (set_attr "length" "4")])
2663
2664 (define_insn ""
2665 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2666 (match_operand:SI 2 "register_operand" "r")))
2667 (match_operand:SF 0 "register_operand" "fx"))]
2668 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2669 "*
2670 {
2671 /* Reload can create backwards (relative to cse) unscaled index
2672 address modes when eliminating registers and possibly for
2673 pseudos that don't get hard registers. Deal with it. */
2674 if (operands[2] == hard_frame_pointer_rtx
2675 || operands[2] == stack_pointer_rtx)
2676 return \"fstwx %0,%1(%2)\";
2677 else
2678 return \"fstwx %0,%2(%1)\";
2679 }"
2680 [(set_attr "type" "fpstore")
2681 (set_attr "length" "4")])
2682 \f
2683 (define_insn ""
2684 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2685 (match_operand:SI 2 "basereg_operand" "r")))
2686 (match_operand:SF 0 "register_operand" "fx"))]
2687 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2688 "*
2689 {
2690 /* Reload can create backwards (relative to cse) unscaled index
2691 address modes when eliminating registers and possibly for
2692 pseudos that don't get hard registers. Deal with it. */
2693 if (operands[1] == hard_frame_pointer_rtx
2694 || operands[1] == stack_pointer_rtx)
2695 return \"fstwx %0,%2(%1)\";
2696 else
2697 return \"fstwx %0,%1(%2)\";
2698 }"
2699 [(set_attr "type" "fpstore")
2700 (set_attr "length" "4")])
2701 \f
2702
2703 ;;- zero extension instructions
2704 ;; We have define_expand for zero extension patterns to make sure the
2705 ;; operands get loaded into registers. The define_insns accept
2706 ;; memory operands. This gives us better overall code than just
2707 ;; having a pattern that does or does not accept memory operands.
2708
2709 (define_expand "zero_extendhisi2"
2710 [(set (match_operand:SI 0 "register_operand" "")
2711 (zero_extend:SI
2712 (match_operand:HI 1 "register_operand" "")))]
2713 ""
2714 "")
2715
2716 (define_insn ""
2717 [(set (match_operand:SI 0 "register_operand" "=r,r")
2718 (zero_extend:SI
2719 (match_operand:HI 1 "move_operand" "r,RQ")))]
2720 "GET_CODE (operands[1]) != CONST_INT"
2721 "@
2722 extru %1,31,16,%0
2723 ldh%M1 %1,%0"
2724 [(set_attr "type" "shift,load")
2725 (set_attr "length" "4,4")])
2726
2727 (define_expand "zero_extendqihi2"
2728 [(set (match_operand:HI 0 "register_operand" "")
2729 (zero_extend:HI
2730 (match_operand:QI 1 "register_operand" "")))]
2731 ""
2732 "")
2733
2734 (define_insn ""
2735 [(set (match_operand:HI 0 "register_operand" "=r,r")
2736 (zero_extend:HI
2737 (match_operand:QI 1 "move_operand" "r,RQ")))]
2738 "GET_CODE (operands[1]) != CONST_INT"
2739 "@
2740 extru %1,31,8,%0
2741 ldb%M1 %1,%0"
2742 [(set_attr "type" "shift,load")
2743 (set_attr "length" "4,4")])
2744
2745 (define_expand "zero_extendqisi2"
2746 [(set (match_operand:SI 0 "register_operand" "")
2747 (zero_extend:SI
2748 (match_operand:QI 1 "register_operand" "")))]
2749 ""
2750 "")
2751
2752 (define_insn ""
2753 [(set (match_operand:SI 0 "register_operand" "=r,r")
2754 (zero_extend:SI
2755 (match_operand:QI 1 "move_operand" "r,RQ")))]
2756 "GET_CODE (operands[1]) != CONST_INT"
2757 "@
2758 extru %1,31,8,%0
2759 ldb%M1 %1,%0"
2760 [(set_attr "type" "shift,load")
2761 (set_attr "length" "4,4")])
2762
2763 ;;- sign extension instructions
2764
2765 (define_insn "extendhisi2"
2766 [(set (match_operand:SI 0 "register_operand" "=r")
2767 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2768 ""
2769 "extrs %1,31,16,%0"
2770 [(set_attr "type" "shift")
2771 (set_attr "length" "4")])
2772
2773 (define_insn "extendqihi2"
2774 [(set (match_operand:HI 0 "register_operand" "=r")
2775 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2776 ""
2777 "extrs %1,31,8,%0"
2778 [(set_attr "type" "shift")
2779 (set_attr "length" "4")])
2780
2781 (define_insn "extendqisi2"
2782 [(set (match_operand:SI 0 "register_operand" "=r")
2783 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2784 ""
2785 "extrs %1,31,8,%0"
2786 [(set_attr "type" "shift")
2787 (set_attr "length" "4")])
2788 \f
2789 ;; Conversions between float and double.
2790
2791 (define_insn "extendsfdf2"
2792 [(set (match_operand:DF 0 "register_operand" "=f")
2793 (float_extend:DF
2794 (match_operand:SF 1 "register_operand" "f")))]
2795 "! TARGET_SOFT_FLOAT"
2796 "fcnvff,sgl,dbl %1,%0"
2797 [(set_attr "type" "fpalu")
2798 (set_attr "length" "4")])
2799
2800 (define_insn "truncdfsf2"
2801 [(set (match_operand:SF 0 "register_operand" "=f")
2802 (float_truncate:SF
2803 (match_operand:DF 1 "register_operand" "f")))]
2804 "! TARGET_SOFT_FLOAT"
2805 "fcnvff,dbl,sgl %1,%0"
2806 [(set_attr "type" "fpalu")
2807 (set_attr "length" "4")])
2808
2809 ;; Conversion between fixed point and floating point.
2810 ;; Note that among the fix-to-float insns
2811 ;; the ones that start with SImode come first.
2812 ;; That is so that an operand that is a CONST_INT
2813 ;; (and therefore lacks a specific machine mode).
2814 ;; will be recognized as SImode (which is always valid)
2815 ;; rather than as QImode or HImode.
2816
2817 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2818 ;; to be reloaded by putting the constant into memory.
2819 ;; It must come before the more general floatsisf2 pattern.
2820 (define_insn ""
2821 [(set (match_operand:SF 0 "register_operand" "=f")
2822 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2823 "! TARGET_SOFT_FLOAT"
2824 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2825 [(set_attr "type" "fpalu")
2826 (set_attr "length" "8")])
2827
2828 (define_insn "floatsisf2"
2829 [(set (match_operand:SF 0 "register_operand" "=f")
2830 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2831 "! TARGET_SOFT_FLOAT"
2832 "fcnvxf,sgl,sgl %1,%0"
2833 [(set_attr "type" "fpalu")
2834 (set_attr "length" "4")])
2835
2836 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2837 ;; to be reloaded by putting the constant into memory.
2838 ;; It must come before the more general floatsidf2 pattern.
2839 (define_insn ""
2840 [(set (match_operand:DF 0 "register_operand" "=f")
2841 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2842 "! TARGET_SOFT_FLOAT"
2843 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2844 [(set_attr "type" "fpalu")
2845 (set_attr "length" "8")])
2846
2847 (define_insn "floatsidf2"
2848 [(set (match_operand:DF 0 "register_operand" "=f")
2849 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2850 "! TARGET_SOFT_FLOAT"
2851 "fcnvxf,sgl,dbl %1,%0"
2852 [(set_attr "type" "fpalu")
2853 (set_attr "length" "4")])
2854
2855 (define_expand "floatunssisf2"
2856 [(set (subreg:SI (match_dup 2) 1)
2857 (match_operand:SI 1 "register_operand" ""))
2858 (set (subreg:SI (match_dup 2) 0)
2859 (const_int 0))
2860 (set (match_operand:SF 0 "register_operand" "")
2861 (float:SF (match_dup 2)))]
2862 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2863 "operands[2] = gen_reg_rtx (DImode);")
2864
2865 (define_expand "floatunssidf2"
2866 [(set (subreg:SI (match_dup 2) 1)
2867 (match_operand:SI 1 "register_operand" ""))
2868 (set (subreg:SI (match_dup 2) 0)
2869 (const_int 0))
2870 (set (match_operand:DF 0 "register_operand" "")
2871 (float:DF (match_dup 2)))]
2872 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2873 "operands[2] = gen_reg_rtx (DImode);")
2874
2875 (define_insn "floatdisf2"
2876 [(set (match_operand:SF 0 "register_operand" "=f")
2877 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2878 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2879 "fcnvxf,dbl,sgl %1,%0"
2880 [(set_attr "type" "fpalu")
2881 (set_attr "length" "4")])
2882
2883 (define_insn "floatdidf2"
2884 [(set (match_operand:DF 0 "register_operand" "=f")
2885 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2886 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2887 "fcnvxf,dbl,dbl %1,%0"
2888 [(set_attr "type" "fpalu")
2889 (set_attr "length" "4")])
2890
2891 ;; Convert a float to an actual integer.
2892 ;; Truncation is performed as part of the conversion.
2893
2894 (define_insn "fix_truncsfsi2"
2895 [(set (match_operand:SI 0 "register_operand" "=f")
2896 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2897 "! TARGET_SOFT_FLOAT"
2898 "fcnvfxt,sgl,sgl %1,%0"
2899 [(set_attr "type" "fpalu")
2900 (set_attr "length" "4")])
2901
2902 (define_insn "fix_truncdfsi2"
2903 [(set (match_operand:SI 0 "register_operand" "=f")
2904 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2905 "! TARGET_SOFT_FLOAT"
2906 "fcnvfxt,dbl,sgl %1,%0"
2907 [(set_attr "type" "fpalu")
2908 (set_attr "length" "4")])
2909
2910 (define_insn "fix_truncsfdi2"
2911 [(set (match_operand:DI 0 "register_operand" "=f")
2912 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2913 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2914 "fcnvfxt,sgl,dbl %1,%0"
2915 [(set_attr "type" "fpalu")
2916 (set_attr "length" "4")])
2917
2918 (define_insn "fix_truncdfdi2"
2919 [(set (match_operand:DI 0 "register_operand" "=f")
2920 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2921 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2922 "fcnvfxt,dbl,dbl %1,%0"
2923 [(set_attr "type" "fpalu")
2924 (set_attr "length" "4")])
2925 \f
2926 ;;- arithmetic instructions
2927
2928 (define_expand "adddi3"
2929 [(set (match_operand:DI 0 "register_operand" "")
2930 (plus:DI (match_operand:DI 1 "register_operand" "")
2931 (match_operand:DI 2 "arith11_operand" "")))]
2932 ""
2933 "")
2934
2935 (define_insn ""
2936 [(set (match_operand:DI 0 "register_operand" "=r")
2937 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2938 (match_operand:DI 2 "arith11_operand" "rI")))]
2939 ""
2940 "*
2941 {
2942 if (GET_CODE (operands[2]) == CONST_INT)
2943 {
2944 if (INTVAL (operands[2]) >= 0)
2945 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2946 else
2947 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2948 }
2949 else
2950 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2951 }"
2952 [(set_attr "type" "binary")
2953 (set_attr "length" "8")])
2954
2955 (define_insn ""
2956 [(set (match_operand:SI 0 "register_operand" "=r")
2957 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2958 (match_operand:SI 2 "register_operand" "r")))]
2959 ""
2960 "uaddcm %2,%1,%0"
2961 [(set_attr "type" "binary")
2962 (set_attr "length" "4")])
2963
2964 ;; define_splits to optimize cases of adding a constant integer
2965 ;; to a register when the constant does not fit in 14 bits. */
2966 (define_split
2967 [(set (match_operand:SI 0 "register_operand" "")
2968 (plus:SI (match_operand:SI 1 "register_operand" "")
2969 (match_operand:SI 2 "const_int_operand" "")))
2970 (clobber (match_operand:SI 4 "register_operand" ""))]
2971 "! cint_ok_for_move (INTVAL (operands[2]))
2972 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2973 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2974 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2975 "
2976 {
2977 int val = INTVAL (operands[2]);
2978 int low = (val < 0) ? -0x2000 : 0x1fff;
2979 int rest = val - low;
2980
2981 operands[2] = GEN_INT (rest);
2982 operands[3] = GEN_INT (low);
2983 }")
2984
2985 (define_split
2986 [(set (match_operand:SI 0 "register_operand" "")
2987 (plus:SI (match_operand:SI 1 "register_operand" "")
2988 (match_operand:SI 2 "const_int_operand" "")))
2989 (clobber (match_operand:SI 4 "register_operand" ""))]
2990 "! cint_ok_for_move (INTVAL (operands[2]))"
2991 [(set (match_dup 4) (match_dup 2))
2992 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2993 (match_dup 1)))]
2994 "
2995 {
2996 HOST_WIDE_INT intval = INTVAL (operands[2]);
2997
2998 /* Try dividing the constant by 2, then 4, and finally 8 to see
2999 if we can get a constant which can be loaded into a register
3000 in a single instruction (cint_ok_for_move).
3001
3002 If that fails, try to negate the constant and subtract it
3003 from our input operand. */
3004 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
3005 {
3006 operands[2] = GEN_INT (intval / 2);
3007 operands[3] = GEN_INT (2);
3008 }
3009 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
3010 {
3011 operands[2] = GEN_INT (intval / 4);
3012 operands[3] = GEN_INT (4);
3013 }
3014 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
3015 {
3016 operands[2] = GEN_INT (intval / 8);
3017 operands[3] = GEN_INT (8);
3018 }
3019 else if (cint_ok_for_move (-intval))
3020 {
3021 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
3022 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
3023 DONE;
3024 }
3025 else
3026 FAIL;
3027 }")
3028
3029 (define_insn "addsi3"
3030 [(set (match_operand:SI 0 "register_operand" "=r,r")
3031 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3032 (match_operand:SI 2 "arith_operand" "r,J")))]
3033 ""
3034 "@
3035 addl %1,%2,%0
3036 ldo %2(%1),%0"
3037 [(set_attr "type" "binary,binary")
3038 (set_attr "pa_combine_type" "addmove")
3039 (set_attr "length" "4,4")])
3040
3041 ;; Disgusting kludge to work around reload bugs with frame pointer
3042 ;; elimination. Similar to other magic reload patterns in the
3043 ;; indexed memory operations.
3044 (define_insn ""
3045 [(set (match_operand:SI 0 "register_operand" "=&r")
3046 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3047 (match_operand:SI 2 "register_operand" "r"))
3048 (match_operand:SI 3 "const_int_operand" "rL")))]
3049 "reload_in_progress"
3050 "*
3051 {
3052 if (GET_CODE (operands[3]) == CONST_INT)
3053 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3054 else
3055 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3056 }"
3057 [(set_attr "type" "binary")
3058 (set_attr "length" "8")])
3059
3060 (define_expand "subdi3"
3061 [(set (match_operand:DI 0 "register_operand" "")
3062 (minus:DI (match_operand:DI 1 "register_operand" "")
3063 (match_operand:DI 2 "register_operand" "")))]
3064 ""
3065 "")
3066
3067 (define_insn ""
3068 [(set (match_operand:DI 0 "register_operand" "=r")
3069 (minus:DI (match_operand:DI 1 "register_operand" "r")
3070 (match_operand:DI 2 "register_operand" "r")))]
3071 ""
3072 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3073 [(set_attr "type" "binary")
3074 (set_attr "length" "8")])
3075
3076 (define_insn "subsi3"
3077 [(set (match_operand:SI 0 "register_operand" "=r,r")
3078 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3079 (match_operand:SI 2 "register_operand" "r,r")))]
3080 ""
3081 "@
3082 sub %1,%2,%0
3083 subi %1,%2,%0"
3084 [(set_attr "type" "binary,binary")
3085 (set_attr "length" "4,4")])
3086
3087 ;; Clobbering a "register_operand" instead of a match_scratch
3088 ;; in operand3 of millicode calls avoids spilling %r1 and
3089 ;; produces better code.
3090
3091 ;; The mulsi3 insns set up registers for the millicode call.
3092 (define_expand "mulsi3"
3093 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3094 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3095 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3096 (clobber (match_dup 3))
3097 (clobber (reg:SI 26))
3098 (clobber (reg:SI 25))
3099 (clobber (reg:SI 31))])
3100 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3101 ""
3102 "
3103 {
3104 if (TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3105 {
3106 rtx scratch = gen_reg_rtx (DImode);
3107 operands[1] = force_reg (SImode, operands[1]);
3108 operands[2] = force_reg (SImode, operands[2]);
3109 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3110 emit_insn (gen_rtx_SET (VOIDmode,
3111 operands[0],
3112 gen_rtx_SUBREG (SImode, scratch, 1)));
3113 DONE;
3114 }
3115 operands[3] = gen_reg_rtx (SImode);
3116 }")
3117
3118 (define_insn "umulsidi3"
3119 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3120 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3121 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3122 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3123 "xmpyu %1,%2,%0"
3124 [(set_attr "type" "fpmuldbl")
3125 (set_attr "length" "4")])
3126
3127 (define_insn ""
3128 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3129 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3130 (match_operand:DI 2 "uint32_operand" "f")))]
3131 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3132 "xmpyu %1,%R2,%0"
3133 [(set_attr "type" "fpmuldbl")
3134 (set_attr "length" "4")])
3135
3136 (define_insn ""
3137 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3138 (clobber (match_operand:SI 0 "register_operand" "=a"))
3139 (clobber (reg:SI 26))
3140 (clobber (reg:SI 25))
3141 (clobber (reg:SI 31))]
3142 ""
3143 "* return output_mul_insn (0, insn);"
3144 [(set_attr "type" "milli")
3145 (set (attr "length")
3146 (cond [
3147 ;; Target (or stub) within reach
3148 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3149 (const_int 240000))
3150 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3151 (const_int 0)))
3152 (const_int 4)
3153
3154 ;; NO_SPACE_REGS
3155 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3156 (const_int 0))
3157 (const_int 8)
3158
3159 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3160 ;; same as NO_SPACE_REGS code
3161 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3162 (const_int 0))
3163 (eq (symbol_ref "flag_pic")
3164 (const_int 0)))
3165 (const_int 8)]
3166
3167 ;; Out of range and either PIC or PORTABLE_RUNTIME
3168 (const_int 24)))])
3169
3170 ;;; Division and mod.
3171 (define_expand "divsi3"
3172 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3173 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3174 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3175 (clobber (match_dup 3))
3176 (clobber (match_dup 4))
3177 (clobber (reg:SI 26))
3178 (clobber (reg:SI 25))
3179 (clobber (reg:SI 31))])
3180 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3181 ""
3182 "
3183 {
3184 operands[3] = gen_reg_rtx (SImode);
3185 operands[4] = gen_reg_rtx (SImode);
3186 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3187 DONE;
3188 }")
3189
3190 (define_insn ""
3191 [(set (reg:SI 29)
3192 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3193 (clobber (match_operand:SI 1 "register_operand" "=a"))
3194 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3195 (clobber (reg:SI 26))
3196 (clobber (reg:SI 25))
3197 (clobber (reg:SI 31))]
3198 ""
3199 "*
3200 return output_div_insn (operands, 0, insn);"
3201 [(set_attr "type" "milli")
3202 (set (attr "length")
3203 (cond [
3204 ;; Target (or stub) within reach
3205 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3206 (const_int 240000))
3207 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3208 (const_int 0)))
3209 (const_int 4)
3210
3211 ;; NO_SPACE_REGS
3212 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3213 (const_int 0))
3214 (const_int 8)
3215
3216 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3217 ;; same as NO_SPACE_REGS code
3218 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3219 (const_int 0))
3220 (eq (symbol_ref "flag_pic")
3221 (const_int 0)))
3222 (const_int 8)]
3223
3224 ;; Out of range and either PIC or PORTABLE_RUNTIME
3225 (const_int 24)))])
3226
3227 (define_expand "udivsi3"
3228 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3229 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3230 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3231 (clobber (match_dup 3))
3232 (clobber (match_dup 4))
3233 (clobber (reg:SI 26))
3234 (clobber (reg:SI 25))
3235 (clobber (reg:SI 31))])
3236 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3237 ""
3238 "
3239 {
3240 operands[3] = gen_reg_rtx (SImode);
3241 operands[4] = gen_reg_rtx (SImode);
3242 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3243 DONE;
3244 }")
3245
3246 (define_insn ""
3247 [(set (reg:SI 29)
3248 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3249 (clobber (match_operand:SI 1 "register_operand" "=a"))
3250 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3251 (clobber (reg:SI 26))
3252 (clobber (reg:SI 25))
3253 (clobber (reg:SI 31))]
3254 ""
3255 "*
3256 return output_div_insn (operands, 1, insn);"
3257 [(set_attr "type" "milli")
3258 (set (attr "length")
3259 (cond [
3260 ;; Target (or stub) within reach
3261 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3262 (const_int 240000))
3263 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3264 (const_int 0)))
3265 (const_int 4)
3266
3267 ;; NO_SPACE_REGS
3268 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3269 (const_int 0))
3270 (const_int 8)
3271
3272 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3273 ;; same as NO_SPACE_REGS code
3274 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3275 (const_int 0))
3276 (eq (symbol_ref "flag_pic")
3277 (const_int 0)))
3278 (const_int 8)]
3279
3280 ;; Out of range and either PIC or PORTABLE_RUNTIME
3281 (const_int 24)))])
3282
3283 (define_expand "modsi3"
3284 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3285 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3286 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3287 (clobber (match_dup 3))
3288 (clobber (match_dup 4))
3289 (clobber (reg:SI 26))
3290 (clobber (reg:SI 25))
3291 (clobber (reg:SI 31))])
3292 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3293 ""
3294 "
3295 {
3296 operands[4] = gen_reg_rtx (SImode);
3297 operands[3] = gen_reg_rtx (SImode);
3298 }")
3299
3300 (define_insn ""
3301 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3302 (clobber (match_operand:SI 0 "register_operand" "=a"))
3303 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3304 (clobber (reg:SI 26))
3305 (clobber (reg:SI 25))
3306 (clobber (reg:SI 31))]
3307 ""
3308 "*
3309 return output_mod_insn (0, insn);"
3310 [(set_attr "type" "milli")
3311 (set (attr "length")
3312 (cond [
3313 ;; Target (or stub) within reach
3314 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3315 (const_int 240000))
3316 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3317 (const_int 0)))
3318 (const_int 4)
3319
3320 ;; NO_SPACE_REGS
3321 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3322 (const_int 0))
3323 (const_int 8)
3324
3325 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3326 ;; same as NO_SPACE_REGS code
3327 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3328 (const_int 0))
3329 (eq (symbol_ref "flag_pic")
3330 (const_int 0)))
3331 (const_int 8)]
3332
3333 ;; Out of range and either PIC or PORTABLE_RUNTIME
3334 (const_int 24)))])
3335
3336 (define_expand "umodsi3"
3337 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3338 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3339 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3340 (clobber (match_dup 3))
3341 (clobber (match_dup 4))
3342 (clobber (reg:SI 26))
3343 (clobber (reg:SI 25))
3344 (clobber (reg:SI 31))])
3345 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3346 ""
3347 "
3348 {
3349 operands[4] = gen_reg_rtx (SImode);
3350 operands[3] = gen_reg_rtx (SImode);
3351 }")
3352
3353 (define_insn ""
3354 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3355 (clobber (match_operand:SI 0 "register_operand" "=a"))
3356 (clobber (match_operand:SI 2 "register_operand" "=&r"))
3357 (clobber (reg:SI 26))
3358 (clobber (reg:SI 25))
3359 (clobber (reg:SI 31))]
3360 ""
3361 "*
3362 return output_mod_insn (1, insn);"
3363 [(set_attr "type" "milli")
3364 (set (attr "length")
3365 (cond [
3366 ;; Target (or stub) within reach
3367 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3368 (const_int 240000))
3369 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3370 (const_int 0)))
3371 (const_int 4)
3372
3373 ;; NO_SPACE_REGS
3374 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3375 (const_int 0))
3376 (const_int 8)
3377
3378 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3379 ;; same as NO_SPACE_REGS code
3380 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3381 (const_int 0))
3382 (eq (symbol_ref "flag_pic")
3383 (const_int 0)))
3384 (const_int 8)]
3385
3386 ;; Out of range and either PIC or PORTABLE_RUNTIME
3387 (const_int 24)))])
3388
3389 ;;- and instructions
3390 ;; We define DImode `and` so with DImode `not` we can get
3391 ;; DImode `andn`. Other combinations are possible.
3392
3393 (define_expand "anddi3"
3394 [(set (match_operand:DI 0 "register_operand" "")
3395 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3396 (match_operand:DI 2 "arith_double_operand" "")))]
3397 ""
3398 "
3399 {
3400 if (! register_operand (operands[1], DImode)
3401 || ! register_operand (operands[2], DImode))
3402 /* Let GCC break this into word-at-a-time operations. */
3403 FAIL;
3404 }")
3405
3406 (define_insn ""
3407 [(set (match_operand:DI 0 "register_operand" "=r")
3408 (and:DI (match_operand:DI 1 "register_operand" "%r")
3409 (match_operand:DI 2 "register_operand" "r")))]
3410 ""
3411 "and %1,%2,%0\;and %R1,%R2,%R0"
3412 [(set_attr "type" "binary")
3413 (set_attr "length" "8")])
3414
3415 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3416 ; constant with ldil;ldo.
3417 (define_insn "andsi3"
3418 [(set (match_operand:SI 0 "register_operand" "=r,r")
3419 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3420 (match_operand:SI 2 "and_operand" "rO,P")))]
3421 ""
3422 "* return output_and (operands); "
3423 [(set_attr "type" "binary,shift")
3424 (set_attr "length" "4,4")])
3425
3426 (define_insn ""
3427 [(set (match_operand:DI 0 "register_operand" "=r")
3428 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3429 (match_operand:DI 2 "register_operand" "r")))]
3430 ""
3431 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3432 [(set_attr "type" "binary")
3433 (set_attr "length" "8")])
3434
3435 (define_insn ""
3436 [(set (match_operand:SI 0 "register_operand" "=r")
3437 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3438 (match_operand:SI 2 "register_operand" "r")))]
3439 ""
3440 "andcm %2,%1,%0"
3441 [(set_attr "type" "binary")
3442 (set_attr "length" "4")])
3443
3444 (define_expand "iordi3"
3445 [(set (match_operand:DI 0 "register_operand" "")
3446 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3447 (match_operand:DI 2 "arith_double_operand" "")))]
3448 ""
3449 "
3450 {
3451 if (! register_operand (operands[1], DImode)
3452 || ! register_operand (operands[2], DImode))
3453 /* Let GCC break this into word-at-a-time operations. */
3454 FAIL;
3455 }")
3456
3457 (define_insn ""
3458 [(set (match_operand:DI 0 "register_operand" "=r")
3459 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3460 (match_operand:DI 2 "register_operand" "r")))]
3461 ""
3462 "or %1,%2,%0\;or %R1,%R2,%R0"
3463 [(set_attr "type" "binary")
3464 (set_attr "length" "8")])
3465
3466 ;; Need a define_expand because we've run out of CONST_OK... characters.
3467 (define_expand "iorsi3"
3468 [(set (match_operand:SI 0 "register_operand" "")
3469 (ior:SI (match_operand:SI 1 "register_operand" "")
3470 (match_operand:SI 2 "arith32_operand" "")))]
3471 ""
3472 "
3473 {
3474 if (! (ior_operand (operands[2], SImode)
3475 || register_operand (operands[2], SImode)))
3476 operands[2] = force_reg (SImode, operands[2]);
3477 }")
3478
3479 (define_insn ""
3480 [(set (match_operand:SI 0 "register_operand" "=r,r")
3481 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3482 (match_operand:SI 2 "ior_operand" "M,i")))]
3483 ""
3484 "* return output_ior (operands); "
3485 [(set_attr "type" "binary,shift")
3486 (set_attr "length" "4,4")])
3487
3488 (define_insn ""
3489 [(set (match_operand:SI 0 "register_operand" "=r")
3490 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3491 (match_operand:SI 2 "register_operand" "r")))]
3492 ""
3493 "or %1,%2,%0"
3494 [(set_attr "type" "binary")
3495 (set_attr "length" "4")])
3496
3497 (define_expand "xordi3"
3498 [(set (match_operand:DI 0 "register_operand" "")
3499 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3500 (match_operand:DI 2 "arith_double_operand" "")))]
3501 ""
3502 "
3503 {
3504 if (! register_operand (operands[1], DImode)
3505 || ! register_operand (operands[2], DImode))
3506 /* Let GCC break this into word-at-a-time operations. */
3507 FAIL;
3508 }")
3509
3510 (define_insn ""
3511 [(set (match_operand:DI 0 "register_operand" "=r")
3512 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3513 (match_operand:DI 2 "register_operand" "r")))]
3514 ""
3515 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3516 [(set_attr "type" "binary")
3517 (set_attr "length" "8")])
3518
3519 (define_insn "xorsi3"
3520 [(set (match_operand:SI 0 "register_operand" "=r")
3521 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3522 (match_operand:SI 2 "register_operand" "r")))]
3523 ""
3524 "xor %1,%2,%0"
3525 [(set_attr "type" "binary")
3526 (set_attr "length" "4")])
3527
3528 (define_insn "negdi2"
3529 [(set (match_operand:DI 0 "register_operand" "=r")
3530 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3531 ""
3532 "sub %%r0,%R1,%R0\;subb %%r0,%1,%0"
3533 [(set_attr "type" "unary")
3534 (set_attr "length" "8")])
3535
3536 (define_insn "negsi2"
3537 [(set (match_operand:SI 0 "register_operand" "=r")
3538 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3539 ""
3540 "sub %%r0,%1,%0"
3541 [(set_attr "type" "unary")
3542 (set_attr "length" "4")])
3543
3544 (define_expand "one_cmpldi2"
3545 [(set (match_operand:DI 0 "register_operand" "")
3546 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3547 ""
3548 "
3549 {
3550 if (! register_operand (operands[1], DImode))
3551 FAIL;
3552 }")
3553
3554 (define_insn ""
3555 [(set (match_operand:DI 0 "register_operand" "=r")
3556 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3557 ""
3558 "uaddcm %%r0,%1,%0\;uaddcm %%r0,%R1,%R0"
3559 [(set_attr "type" "unary")
3560 (set_attr "length" "8")])
3561
3562 (define_insn "one_cmplsi2"
3563 [(set (match_operand:SI 0 "register_operand" "=r")
3564 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3565 ""
3566 "uaddcm %%r0,%1,%0"
3567 [(set_attr "type" "unary")
3568 (set_attr "length" "4")])
3569 \f
3570 ;; Floating point arithmetic instructions.
3571
3572 (define_insn "adddf3"
3573 [(set (match_operand:DF 0 "register_operand" "=f")
3574 (plus:DF (match_operand:DF 1 "register_operand" "f")
3575 (match_operand:DF 2 "register_operand" "f")))]
3576 "! TARGET_SOFT_FLOAT"
3577 "fadd,dbl %1,%2,%0"
3578 [(set_attr "type" "fpalu")
3579 (set_attr "pa_combine_type" "faddsub")
3580 (set_attr "length" "4")])
3581
3582 (define_insn "addsf3"
3583 [(set (match_operand:SF 0 "register_operand" "=f")
3584 (plus:SF (match_operand:SF 1 "register_operand" "f")
3585 (match_operand:SF 2 "register_operand" "f")))]
3586 "! TARGET_SOFT_FLOAT"
3587 "fadd,sgl %1,%2,%0"
3588 [(set_attr "type" "fpalu")
3589 (set_attr "pa_combine_type" "faddsub")
3590 (set_attr "length" "4")])
3591
3592 (define_insn "subdf3"
3593 [(set (match_operand:DF 0 "register_operand" "=f")
3594 (minus:DF (match_operand:DF 1 "register_operand" "f")
3595 (match_operand:DF 2 "register_operand" "f")))]
3596 "! TARGET_SOFT_FLOAT"
3597 "fsub,dbl %1,%2,%0"
3598 [(set_attr "type" "fpalu")
3599 (set_attr "pa_combine_type" "faddsub")
3600 (set_attr "length" "4")])
3601
3602 (define_insn "subsf3"
3603 [(set (match_operand:SF 0 "register_operand" "=f")
3604 (minus:SF (match_operand:SF 1 "register_operand" "f")
3605 (match_operand:SF 2 "register_operand" "f")))]
3606 "! TARGET_SOFT_FLOAT"
3607 "fsub,sgl %1,%2,%0"
3608 [(set_attr "type" "fpalu")
3609 (set_attr "pa_combine_type" "faddsub")
3610 (set_attr "length" "4")])
3611
3612 (define_insn "muldf3"
3613 [(set (match_operand:DF 0 "register_operand" "=f")
3614 (mult:DF (match_operand:DF 1 "register_operand" "f")
3615 (match_operand:DF 2 "register_operand" "f")))]
3616 "! TARGET_SOFT_FLOAT"
3617 "fmpy,dbl %1,%2,%0"
3618 [(set_attr "type" "fpmuldbl")
3619 (set_attr "pa_combine_type" "fmpy")
3620 (set_attr "length" "4")])
3621
3622 (define_insn "mulsf3"
3623 [(set (match_operand:SF 0 "register_operand" "=f")
3624 (mult:SF (match_operand:SF 1 "register_operand" "f")
3625 (match_operand:SF 2 "register_operand" "f")))]
3626 "! TARGET_SOFT_FLOAT"
3627 "fmpy,sgl %1,%2,%0"
3628 [(set_attr "type" "fpmulsgl")
3629 (set_attr "pa_combine_type" "fmpy")
3630 (set_attr "length" "4")])
3631
3632 (define_insn "divdf3"
3633 [(set (match_operand:DF 0 "register_operand" "=f")
3634 (div:DF (match_operand:DF 1 "register_operand" "f")
3635 (match_operand:DF 2 "register_operand" "f")))]
3636 "! TARGET_SOFT_FLOAT"
3637 "fdiv,dbl %1,%2,%0"
3638 [(set_attr "type" "fpdivdbl")
3639 (set_attr "length" "4")])
3640
3641 (define_insn "divsf3"
3642 [(set (match_operand:SF 0 "register_operand" "=f")
3643 (div:SF (match_operand:SF 1 "register_operand" "f")
3644 (match_operand:SF 2 "register_operand" "f")))]
3645 "! TARGET_SOFT_FLOAT"
3646 "fdiv,sgl %1,%2,%0"
3647 [(set_attr "type" "fpdivsgl")
3648 (set_attr "length" "4")])
3649
3650 (define_insn "negdf2"
3651 [(set (match_operand:DF 0 "register_operand" "=f")
3652 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3653 "! TARGET_SOFT_FLOAT"
3654 "*
3655 {
3656 if (TARGET_PA_20)
3657 return \"fneg,dbl %1,%0\";
3658 else
3659 return \"fsub,dbl %%fr0,%1,%0\";
3660 }"
3661 [(set_attr "type" "fpalu")
3662 (set_attr "length" "4")])
3663
3664 (define_insn "negsf2"
3665 [(set (match_operand:SF 0 "register_operand" "=f")
3666 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3667 "! TARGET_SOFT_FLOAT"
3668 "*
3669 {
3670 if (TARGET_PA_20)
3671 return \"fneg,sgl %1,%0\";
3672 else
3673 return \"fsub,sgl %%fr0,%1,%0\";
3674 }"
3675 [(set_attr "type" "fpalu")
3676 (set_attr "length" "4")])
3677
3678 (define_insn "absdf2"
3679 [(set (match_operand:DF 0 "register_operand" "=f")
3680 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3681 "! TARGET_SOFT_FLOAT"
3682 "fabs,dbl %1,%0"
3683 [(set_attr "type" "fpalu")
3684 (set_attr "length" "4")])
3685
3686 (define_insn "abssf2"
3687 [(set (match_operand:SF 0 "register_operand" "=f")
3688 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3689 "! TARGET_SOFT_FLOAT"
3690 "fabs,sgl %1,%0"
3691 [(set_attr "type" "fpalu")
3692 (set_attr "length" "4")])
3693
3694 (define_insn "sqrtdf2"
3695 [(set (match_operand:DF 0 "register_operand" "=f")
3696 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3697 "! TARGET_SOFT_FLOAT"
3698 "fsqrt,dbl %1,%0"
3699 [(set_attr "type" "fpsqrtdbl")
3700 (set_attr "length" "4")])
3701
3702 (define_insn "sqrtsf2"
3703 [(set (match_operand:SF 0 "register_operand" "=f")
3704 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3705 "! TARGET_SOFT_FLOAT"
3706 "fsqrt,sgl %1,%0"
3707 [(set_attr "type" "fpsqrtsgl")
3708 (set_attr "length" "4")])
3709
3710 ;; PA 2.0 floating point instructions
3711
3712 ; fmpyfadd patterns
3713 (define_insn ""
3714 [(set (match_operand:DF 0 "register_operand" "=f")
3715 (plus:DF (mult:DF (match_operand:DF 1 "register_operand" "f")
3716 (match_operand:DF 2 "register_operand" "f"))
3717 (match_operand:DF 3 "register_operand" "f")))]
3718 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3719 "fmpyfadd,dbl %1,%2,%3,%0"
3720 [(set_attr "type" "fpmuldbl")
3721 (set_attr "length" "4")])
3722
3723 (define_insn ""
3724 [(set (match_operand:DF 0 "register_operand" "=f")
3725 (plus:DF (match_operand:DF 1 "register_operand" "f")
3726 (mult:DF (match_operand:DF 2 "register_operand" "f")
3727 (match_operand:DF 3 "register_operand" "f"))))]
3728 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3729 "fmpyfadd,dbl %2,%3,%1,%0"
3730 [(set_attr "type" "fpmuldbl")
3731 (set_attr "length" "4")])
3732
3733 (define_insn ""
3734 [(set (match_operand:SF 0 "register_operand" "=f")
3735 (plus:SF (mult:SF (match_operand:SF 1 "register_operand" "f")
3736 (match_operand:SF 2 "register_operand" "f"))
3737 (match_operand:SF 3 "register_operand" "f")))]
3738 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3739 "fmpyfadd,sgl %1,%2,%3,%0"
3740 [(set_attr "type" "fpmulsgl")
3741 (set_attr "length" "4")])
3742
3743 (define_insn ""
3744 [(set (match_operand:SF 0 "register_operand" "=f")
3745 (plus:SF (match_operand:SF 1 "register_operand" "f")
3746 (mult:SF (match_operand:SF 2 "register_operand" "f")
3747 (match_operand:SF 3 "register_operand" "f"))))]
3748 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3749 "fmpyfadd,sgl %2,%3,%1,%0"
3750 [(set_attr "type" "fpmulsgl")
3751 (set_attr "length" "4")])
3752
3753 ; fmpynfadd patterns
3754 (define_insn ""
3755 [(set (match_operand:DF 0 "register_operand" "=f")
3756 (minus:DF (match_operand:DF 1 "register_operand" "f")
3757 (mult:DF (match_operand:DF 2 "register_operand" "f")
3758 (match_operand:DF 3 "register_operand" "f"))))]
3759 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3760 "fmpynfadd,dbl %2,%3,%1,%0"
3761 [(set_attr "type" "fpmuldbl")
3762 (set_attr "length" "4")])
3763
3764 (define_insn ""
3765 [(set (match_operand:SF 0 "register_operand" "=f")
3766 (minus:SF (match_operand:SF 1 "register_operand" "f")
3767 (mult:SF (match_operand:SF 2 "register_operand" "f")
3768 (match_operand:SF 3 "register_operand" "f"))))]
3769 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3770 "fmpynfadd,sgl %2,%3,%1,%0"
3771 [(set_attr "type" "fpmulsgl")
3772 (set_attr "length" "4")])
3773
3774 ; fnegabs patterns
3775 (define_insn ""
3776 [(set (match_operand:DF 0 "register_operand" "=f")
3777 (neg:DF (abs:DF (match_operand:DF 1 "register_operand" "f"))))]
3778 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3779 "fnegabs,dbl %1,%0"
3780 [(set_attr "type" "fpalu")
3781 (set_attr "length" "4")])
3782
3783 (define_insn ""
3784 [(set (match_operand:SF 0 "register_operand" "=f")
3785 (neg:SF (abs:SF (match_operand:SF 1 "register_operand" "f"))))]
3786 "TARGET_PA_20 && ! TARGET_SOFT_FLOAT"
3787 "fnegabs,sgl %1,%0"
3788 [(set_attr "type" "fpalu")
3789 (set_attr "length" "4")])
3790
3791 \f
3792 ;;- Shift instructions
3793
3794 ;; Optimized special case of shifting.
3795
3796 (define_insn ""
3797 [(set (match_operand:SI 0 "register_operand" "=r")
3798 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3799 (const_int 24)))]
3800 ""
3801 "ldb%M1 %1,%0"
3802 [(set_attr "type" "load")
3803 (set_attr "length" "4")])
3804
3805 (define_insn ""
3806 [(set (match_operand:SI 0 "register_operand" "=r")
3807 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3808 (const_int 16)))]
3809 ""
3810 "ldh%M1 %1,%0"
3811 [(set_attr "type" "load")
3812 (set_attr "length" "4")])
3813
3814 (define_insn ""
3815 [(set (match_operand:SI 0 "register_operand" "=r")
3816 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3817 (match_operand:SI 3 "shadd_operand" ""))
3818 (match_operand:SI 1 "register_operand" "r")))]
3819 ""
3820 "sh%O3addl %2,%1,%0"
3821 [(set_attr "type" "binary")
3822 (set_attr "length" "4")])
3823
3824 ;; This variant of the above insn can occur if the first operand
3825 ;; is the frame pointer. This is a kludge, but there doesn't
3826 ;; seem to be a way around it. Only recognize it while reloading.
3827 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3828 ;; has constraints allowing a register. I don't know how this works,
3829 ;; but it somehow makes sure that out-of-range constants are placed
3830 ;; in a register which somehow magically is a "const_int_operand".
3831 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3832
3833 (define_insn ""
3834 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3835 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3836 (match_operand:SI 4 "shadd_operand" ""))
3837 (match_operand:SI 1 "register_operand" "r,r"))
3838 (match_operand:SI 3 "const_int_operand" "r,J")))]
3839 "reload_in_progress"
3840 "@
3841 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3842 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3843 [(set_attr "type" "multi")
3844 (set_attr "length" "8")])
3845
3846 ;; This anonymous pattern and splitter wins because it reduces the latency
3847 ;; of the shadd sequence without increasing the latency of the shift.
3848 ;;
3849 ;; We want to make sure and split up the operations for the scheduler since
3850 ;; these instructions can (and should) schedule independently.
3851 ;;
3852 ;; It would be clearer if combine used the same operator for both expressions,
3853 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3854 ;; in the other.
3855 ;;
3856 ;; If this pattern is not split before register allocation, then we must expose
3857 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3858 (define_insn ""
3859 [(set (match_operand:SI 0 "register_operand" "=r")
3860 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3861 (match_operand:SI 3 "shadd_operand" ""))
3862 (match_operand:SI 1 "register_operand" "r")))
3863 (set (match_operand:SI 4 "register_operand" "=&r")
3864 (ashift:SI (match_dup 2)
3865 (match_operand:SI 5 "const_int_operand" "i")))]
3866 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3867 "#"
3868 [(set_attr "type" "binary")
3869 (set_attr "length" "8")])
3870
3871 (define_split
3872 [(set (match_operand:SI 0 "register_operand" "=r")
3873 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3874 (match_operand:SI 3 "shadd_operand" ""))
3875 (match_operand:SI 1 "register_operand" "r")))
3876 (set (match_operand:SI 4 "register_operand" "=&r")
3877 (ashift:SI (match_dup 2)
3878 (match_operand:SI 5 "const_int_operand" "i")))]
3879 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3880 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3881 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3882 (match_dup 1)))]
3883 "")
3884
3885 (define_expand "ashlsi3"
3886 [(set (match_operand:SI 0 "register_operand" "")
3887 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3888 (match_operand:SI 2 "arith32_operand" "")))]
3889 ""
3890 "
3891 {
3892 if (GET_CODE (operands[2]) != CONST_INT)
3893 {
3894 rtx temp = gen_reg_rtx (SImode);
3895 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3896 if (GET_CODE (operands[1]) == CONST_INT)
3897 emit_insn (gen_zvdep_imm32 (operands[0], operands[1], temp));
3898 else
3899 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3900 DONE;
3901 }
3902 /* Make sure both inputs are not constants,
3903 there are no patterns for that. */
3904 operands[1] = force_reg (SImode, operands[1]);
3905 }")
3906
3907 (define_insn ""
3908 [(set (match_operand:SI 0 "register_operand" "=r")
3909 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3910 (match_operand:SI 2 "const_int_operand" "n")))]
3911 ""
3912 "zdep %1,%P2,%L2,%0"
3913 [(set_attr "type" "shift")
3914 (set_attr "length" "4")])
3915
3916 ; Match cases of op1 a CONST_INT here that zvdep_imm32 doesn't handle.
3917 ; Doing it like this makes slightly better code since reload can
3918 ; replace a register with a known value in range -16..15 with a
3919 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm32,
3920 ; but since we have no more CONST_OK... characters, that is not
3921 ; possible.
3922 (define_insn "zvdep32"
3923 [(set (match_operand:SI 0 "register_operand" "=r,r")
3924 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3925 (minus:SI (const_int 31)
3926 (match_operand:SI 2 "register_operand" "q,q"))))]
3927 ""
3928 "@
3929 zvdep %1,32,%0
3930 zvdepi %1,32,%0"
3931 [(set_attr "type" "shift,shift")
3932 (set_attr "length" "4,4")])
3933
3934 (define_insn "zvdep_imm32"
3935 [(set (match_operand:SI 0 "register_operand" "=r")
3936 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3937 (minus:SI (const_int 31)
3938 (match_operand:SI 2 "register_operand" "q"))))]
3939 ""
3940 "*
3941 {
3942 int x = INTVAL (operands[1]);
3943 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3944 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3945 return \"zvdepi %1,%2,%0\";
3946 }"
3947 [(set_attr "type" "shift")
3948 (set_attr "length" "4")])
3949
3950 (define_insn "vdepi_ior"
3951 [(set (match_operand:SI 0 "register_operand" "=r")
3952 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3953 (minus:SI (const_int 31)
3954 (match_operand:SI 2 "register_operand" "q")))
3955 (match_operand:SI 3 "register_operand" "0")))]
3956 ; accept ...0001...1, can this be generalized?
3957 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3958 "*
3959 {
3960 int x = INTVAL (operands[1]);
3961 operands[2] = GEN_INT (exact_log2 (x + 1));
3962 return \"vdepi -1,%2,%0\";
3963 }"
3964 [(set_attr "type" "shift")
3965 (set_attr "length" "4")])
3966
3967 (define_insn "vdepi_and"
3968 [(set (match_operand:SI 0 "register_operand" "=r")
3969 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3970 (minus:SI (const_int 31)
3971 (match_operand:SI 2 "register_operand" "q")))
3972 (match_operand:SI 3 "register_operand" "0")))]
3973 ; this can be generalized...!
3974 "INTVAL (operands[1]) == -2"
3975 "*
3976 {
3977 int x = INTVAL (operands[1]);
3978 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3979 return \"vdepi 0,%2,%0\";
3980 }"
3981 [(set_attr "type" "shift")
3982 (set_attr "length" "4")])
3983
3984 (define_expand "ashrsi3"
3985 [(set (match_operand:SI 0 "register_operand" "")
3986 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3987 (match_operand:SI 2 "arith32_operand" "")))]
3988 ""
3989 "
3990 {
3991 if (GET_CODE (operands[2]) != CONST_INT)
3992 {
3993 rtx temp = gen_reg_rtx (SImode);
3994 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3995 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
3996 DONE;
3997 }
3998 }")
3999
4000 (define_insn ""
4001 [(set (match_operand:SI 0 "register_operand" "=r")
4002 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4003 (match_operand:SI 2 "const_int_operand" "n")))]
4004 ""
4005 "extrs %1,%P2,%L2,%0"
4006 [(set_attr "type" "shift")
4007 (set_attr "length" "4")])
4008
4009 (define_insn "vextrs32"
4010 [(set (match_operand:SI 0 "register_operand" "=r")
4011 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4012 (minus:SI (const_int 31)
4013 (match_operand:SI 2 "register_operand" "q"))))]
4014 ""
4015 "vextrs %1,32,%0"
4016 [(set_attr "type" "shift")
4017 (set_attr "length" "4")])
4018
4019 (define_insn "lshrsi3"
4020 [(set (match_operand:SI 0 "register_operand" "=r,r")
4021 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
4022 (match_operand:SI 2 "arith32_operand" "q,n")))]
4023 ""
4024 "@
4025 vshd %%r0,%1,%0
4026 extru %1,%P2,%L2,%0"
4027 [(set_attr "type" "shift")
4028 (set_attr "length" "4")])
4029
4030 (define_insn "rotrsi3"
4031 [(set (match_operand:SI 0 "register_operand" "=r,r")
4032 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
4033 (match_operand:SI 2 "arith32_operand" "q,n")))]
4034 ""
4035 "*
4036 {
4037 if (GET_CODE (operands[2]) == CONST_INT)
4038 {
4039 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
4040 return \"shd %1,%1,%2,%0\";
4041 }
4042 else
4043 return \"vshd %1,%1,%0\";
4044 }"
4045 [(set_attr "type" "shift")
4046 (set_attr "length" "4")])
4047
4048 (define_expand "rotlsi3"
4049 [(set (match_operand:SI 0 "register_operand" "")
4050 (rotate:SI (match_operand:SI 1 "register_operand" "")
4051 (match_operand:SI 2 "arith32_operand" "")))]
4052 ""
4053 "
4054 {
4055 if (GET_CODE (operands[2]) != CONST_INT)
4056 {
4057 rtx temp = gen_reg_rtx (SImode);
4058 emit_insn (gen_subsi3 (temp, GEN_INT (32), operands[2]));
4059 emit_insn (gen_rotrsi3 (operands[0], operands[1], temp));
4060 DONE;
4061 }
4062 /* Else expand normally. */
4063 }")
4064
4065 (define_insn ""
4066 [(set (match_operand:SI 0 "register_operand" "=r")
4067 (rotate:SI (match_operand:SI 1 "register_operand" "r")
4068 (match_operand:SI 2 "const_int_operand" "n")))]
4069 ""
4070 "*
4071 {
4072 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) & 31);
4073 return \"shd %1,%1,%2,%0\";
4074 }"
4075 [(set_attr "type" "shift")
4076 (set_attr "length" "4")])
4077
4078 (define_insn ""
4079 [(set (match_operand:SI 0 "register_operand" "=r")
4080 (match_operator:SI 5 "plus_xor_ior_operator"
4081 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
4082 (match_operand:SI 3 "const_int_operand" "n"))
4083 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4084 (match_operand:SI 4 "const_int_operand" "n"))]))]
4085 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4086 "shd %1,%2,%4,%0"
4087 [(set_attr "type" "shift")
4088 (set_attr "length" "4")])
4089
4090 (define_insn ""
4091 [(set (match_operand:SI 0 "register_operand" "=r")
4092 (match_operator:SI 5 "plus_xor_ior_operator"
4093 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
4094 (match_operand:SI 4 "const_int_operand" "n"))
4095 (ashift:SI (match_operand:SI 1 "register_operand" "r")
4096 (match_operand:SI 3 "const_int_operand" "n"))]))]
4097 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
4098 "shd %1,%2,%4,%0"
4099 [(set_attr "type" "shift")
4100 (set_attr "length" "4")])
4101
4102 (define_insn ""
4103 [(set (match_operand:SI 0 "register_operand" "=r")
4104 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
4105 (match_operand:SI 2 "const_int_operand" ""))
4106 (match_operand:SI 3 "const_int_operand" "")))]
4107 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
4108 "*
4109 {
4110 int cnt = INTVAL (operands[2]) & 31;
4111 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
4112 operands[2] = GEN_INT (31 - cnt);
4113 return \"zdep %1,%2,%3,%0\";
4114 }"
4115 [(set_attr "type" "shift")
4116 (set_attr "length" "4")])
4117 \f
4118 ;; Unconditional and other jump instructions.
4119
4120 (define_insn "return"
4121 [(return)]
4122 "hppa_can_use_return_insn_p ()"
4123 "bv%* %%r0(%%r2)"
4124 [(set_attr "type" "branch")
4125 (set_attr "length" "4")])
4126
4127 ;; Use a different pattern for functions which have non-trivial
4128 ;; epilogues so as not to confuse jump and reorg.
4129 (define_insn "return_internal"
4130 [(use (reg:SI 2))
4131 (return)]
4132 ""
4133 "bv%* %%r0(%%r2)"
4134 [(set_attr "type" "branch")
4135 (set_attr "length" "4")])
4136
4137 (define_expand "prologue"
4138 [(const_int 0)]
4139 ""
4140 "hppa_expand_prologue ();DONE;")
4141
4142 (define_expand "epilogue"
4143 [(return)]
4144 ""
4145 "
4146 {
4147 /* Try to use the trivial return first. Else use the full
4148 epilogue. */
4149 if (hppa_can_use_return_insn_p ())
4150 emit_jump_insn (gen_return ());
4151 else
4152 {
4153 hppa_expand_epilogue ();
4154 emit_jump_insn (gen_return_internal ());
4155 }
4156 DONE;
4157 }")
4158
4159 ;; Special because we use the value placed in %r2 by the bl instruction
4160 ;; from within its delay slot to set the value for the 2nd parameter to
4161 ;; the call.
4162 (define_insn "call_profiler"
4163 [(unspec_volatile [(const_int 0)] 0)
4164 (use (match_operand:SI 0 "const_int_operand" ""))]
4165 ""
4166 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
4167 [(set_attr "type" "multi")
4168 (set_attr "length" "8")])
4169
4170 (define_insn "blockage"
4171 [(unspec_volatile [(const_int 2)] 0)]
4172 ""
4173 ""
4174 [(set_attr "length" "0")])
4175
4176 (define_insn "jump"
4177 [(set (pc) (label_ref (match_operand 0 "" "")))]
4178 ""
4179 "*
4180 {
4181 extern int optimize;
4182
4183 if (GET_MODE (insn) == SImode)
4184 return \"b %l0%#\";
4185
4186 /* An unconditional branch which can reach its target. */
4187 if (get_attr_length (insn) != 24
4188 && get_attr_length (insn) != 16)
4189 return \"b%* %l0\";
4190
4191 /* An unconditional branch which can not reach its target.
4192
4193 We need to be able to use %r1 as a scratch register; however,
4194 we can never be sure whether or not it's got a live value in
4195 it. Therefore, we must restore its original value after the
4196 jump.
4197
4198 To make matters worse, we don't have a stack slot which we
4199 can always clobber. sp-12/sp-16 shouldn't ever have a live
4200 value during a non-optimizing compilation, so we use those
4201 slots for now. We don't support very long branches when
4202 optimizing -- they should be quite rare when optimizing.
4203
4204 Really the way to go long term is a register scavenger; goto
4205 the target of the jump and find a register which we can use
4206 as a scratch to hold the value in %r1. */
4207
4208 /* We don't know how to register scavenge yet. */
4209 if (optimize)
4210 abort ();
4211
4212 /* First store %r1 into the stack. */
4213 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4214
4215 /* Now load the target address into %r1 and do an indirect jump
4216 to the value specified in %r1. Be careful to generate PIC
4217 code as needed. */
4218 if (flag_pic)
4219 {
4220 rtx xoperands[2];
4221 xoperands[0] = operands[0];
4222 xoperands[1] = gen_label_rtx ();
4223
4224 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4225 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4226 CODE_LABEL_NUMBER (xoperands[1]));
4227 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv %%r0(%%r1)\",
4228 xoperands);
4229 }
4230 else
4231 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4232
4233 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4234 so we know nothing else can be in the delay slot. */
4235 return \"ldw -16(%%r30),%%r1\";
4236 }"
4237 [(set_attr "type" "uncond_branch")
4238 (set_attr "pa_combine_type" "uncond_branch")
4239 (set (attr "length")
4240 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4241 (if_then_else (lt (abs (minus (match_dup 0)
4242 (plus (pc) (const_int 8))))
4243 (const_int 8184))
4244 (const_int 4)
4245 (const_int 8))
4246 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4247 (const_int 262100))
4248 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4249 (const_int 16)
4250 (const_int 24))]
4251 (const_int 4)))])
4252
4253 ;; Subroutines of "casesi".
4254 ;; operand 0 is index
4255 ;; operand 1 is the minimum bound
4256 ;; operand 2 is the maximum bound - minimum bound + 1
4257 ;; operand 3 is CODE_LABEL for the table;
4258 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4259
4260 (define_expand "casesi"
4261 [(match_operand:SI 0 "general_operand" "")
4262 (match_operand:SI 1 "const_int_operand" "")
4263 (match_operand:SI 2 "const_int_operand" "")
4264 (match_operand 3 "" "")
4265 (match_operand 4 "" "")]
4266 ""
4267 "
4268 {
4269 if (GET_CODE (operands[0]) != REG)
4270 operands[0] = force_reg (SImode, operands[0]);
4271
4272 if (operands[1] != const0_rtx)
4273 {
4274 rtx reg = gen_reg_rtx (SImode);
4275
4276 operands[1] = GEN_INT (-INTVAL (operands[1]));
4277 if (!INT_14_BITS (operands[1]))
4278 operands[1] = force_reg (SImode, operands[1]);
4279 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4280
4281 operands[0] = reg;
4282 }
4283
4284 if (!INT_5_BITS (operands[2]))
4285 operands[2] = force_reg (SImode, operands[2]);
4286
4287 emit_insn (gen_cmpsi (operands[0], operands[2]));
4288 emit_jump_insn (gen_bgtu (operands[4]));
4289 if (TARGET_BIG_SWITCH)
4290 {
4291 rtx temp = gen_reg_rtx (SImode);
4292 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4293 operands[0] = temp;
4294 }
4295 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4296 DONE;
4297 }")
4298
4299 (define_insn "casesi0"
4300 [(set (pc) (plus:SI
4301 (mem:SI (plus:SI (pc)
4302 (match_operand:SI 0 "register_operand" "r")))
4303 (label_ref (match_operand 1 "" ""))))]
4304 ""
4305 "blr %0,%%r0\;nop"
4306 [(set_attr "type" "multi")
4307 (set_attr "length" "8")])
4308
4309 ;; Need nops for the calls because execution is supposed to continue
4310 ;; past; we don't want to nullify an instruction that we need.
4311 ;;- jump to subroutine
4312
4313 (define_expand "call"
4314 [(parallel [(call (match_operand:SI 0 "" "")
4315 (match_operand 1 "" ""))
4316 (clobber (reg:SI 2))])]
4317 ""
4318 "
4319 {
4320 rtx op;
4321 rtx call_insn;
4322
4323 if (TARGET_PORTABLE_RUNTIME)
4324 op = force_reg (SImode, XEXP (operands[0], 0));
4325 else
4326 op = XEXP (operands[0], 0);
4327
4328 /* Use two different patterns for calls to explicitly named functions
4329 and calls through function pointers. This is necessary as these two
4330 types of calls use different calling conventions, and CSE might try
4331 to change the named call into an indirect call in some cases (using
4332 two patterns keeps CSE from performing this optimization). */
4333 if (GET_CODE (op) == SYMBOL_REF)
4334 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4335 else
4336 {
4337 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4338 emit_move_insn (tmpreg, force_reg (word_mode, op));
4339 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4340 }
4341
4342 if (flag_pic)
4343 {
4344 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4345
4346 /* After each call we must restore the PIC register, even if it
4347 doesn't appear to be used.
4348
4349 This will set regs_ever_live for the callee saved register we
4350 stored the PIC register in. */
4351 emit_move_insn (pic_offset_table_rtx,
4352 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4353 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4354
4355 /* Gross. We have to keep the scheduler from moving the restore
4356 of the PIC register away from the call. SCHED_GROUP_P is
4357 supposed to do this, but for some reason the compiler will
4358 go into an infinite loop when we use that.
4359
4360 This method (blockage insn) may make worse code (then again
4361 it may not since calls are nearly blockages anyway), but at
4362 least it should work. */
4363 emit_insn (gen_blockage ());
4364 }
4365 DONE;
4366 }")
4367
4368 (define_insn "call_internal_symref"
4369 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4370 (match_operand 1 "" "i"))
4371 (clobber (reg:SI 2))
4372 (use (const_int 0))]
4373 "! TARGET_PORTABLE_RUNTIME"
4374 "*
4375 {
4376 output_arg_descriptor (insn);
4377 return output_call (insn, operands[0]);
4378 }"
4379 [(set_attr "type" "call")
4380 (set (attr "length")
4381 ;; If we're sure that we can either reach the target or that the
4382 ;; linker can use a long-branch stub, then the length is 4 bytes.
4383 ;;
4384 ;; For long-calls the length will be either 52 bytes (non-pic)
4385 ;; or 68 bytes (pic). */
4386 ;; Else we have to use a long-call;
4387 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4388 (const_int 240000))
4389 (const_int 4)
4390 (if_then_else (eq (symbol_ref "flag_pic")
4391 (const_int 0))
4392 (const_int 52)
4393 (const_int 68))))])
4394
4395 (define_insn "call_internal_reg"
4396 [(call (mem:SI (reg:SI 22))
4397 (match_operand 0 "" "i"))
4398 (clobber (reg:SI 2))
4399 (use (const_int 1))]
4400 ""
4401 "*
4402 {
4403 rtx xoperands[2];
4404
4405 /* First the special case for kernels, level 0 systems, etc. */
4406 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4407 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4408
4409 /* Now the normal case -- we can reach $$dyncall directly or
4410 we're sure that we can get there via a long-branch stub.
4411
4412 No need to check target flags as the length uniquely identifies
4413 the remaining cases. */
4414 if (get_attr_length (insn) == 8)
4415 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4416
4417 /* Long millicode call, but we are not generating PIC or portable runtime
4418 code. */
4419 if (get_attr_length (insn) == 12)
4420 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4421
4422 /* Long millicode call for portable runtime. */
4423 if (get_attr_length (insn) == 20)
4424 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4425
4426 /* If we're generating PIC code. */
4427 xoperands[0] = operands[0];
4428 xoperands[1] = gen_label_rtx ();
4429 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4430 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4431 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4432 CODE_LABEL_NUMBER (xoperands[1]));
4433 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4434 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4435 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4436 return \"\";
4437 }"
4438 [(set_attr "type" "dyncall")
4439 (set (attr "length")
4440 (cond [
4441 ;; First NO_SPACE_REGS
4442 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4443 (const_int 0))
4444 (const_int 8)
4445
4446 ;; Target (or stub) within reach
4447 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4448 (const_int 240000))
4449 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4450 (const_int 0)))
4451 (const_int 8)
4452
4453 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4454 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4455 (const_int 0))
4456 (eq (symbol_ref "flag_pic")
4457 (const_int 0)))
4458 (const_int 12)
4459
4460 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4461 (const_int 0))
4462 (const_int 20)]
4463
4464 ;; Out of range PIC case
4465 (const_int 24)))])
4466
4467 (define_expand "call_value"
4468 [(parallel [(set (match_operand 0 "" "")
4469 (call (match_operand:SI 1 "" "")
4470 (match_operand 2 "" "")))
4471 (clobber (reg:SI 2))])]
4472 ""
4473 "
4474 {
4475 rtx op;
4476 rtx call_insn;
4477
4478 if (TARGET_PORTABLE_RUNTIME)
4479 op = force_reg (word_mode, XEXP (operands[1], 0));
4480 else
4481 op = XEXP (operands[1], 0);
4482
4483 /* Use two different patterns for calls to explicitly named functions
4484 and calls through function pointers. This is necessary as these two
4485 types of calls use different calling conventions, and CSE might try
4486 to change the named call into an indirect call in some cases (using
4487 two patterns keeps CSE from performing this optimization). */
4488 if (GET_CODE (op) == SYMBOL_REF)
4489 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4490 op,
4491 operands[2]));
4492 else
4493 {
4494 rtx tmpreg = gen_rtx_REG (word_mode, 22);
4495 emit_move_insn (tmpreg, force_reg (word_mode, op));
4496 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4497 operands[2]));
4498 }
4499 if (flag_pic)
4500 {
4501 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4502
4503 /* After each call we must restore the PIC register, even if it
4504 doesn't appear to be used.
4505
4506 This will set regs_ever_live for the callee saved register we
4507 stored the PIC register in. */
4508 emit_move_insn (pic_offset_table_rtx,
4509 gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4510 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4511
4512 /* Gross. We have to keep the scheduler from moving the restore
4513 of the PIC register away from the call. SCHED_GROUP_P is
4514 supposed to do this, but for some reason the compiler will
4515 go into an infinite loop when we use that.
4516
4517 This method (blockage insn) may make worse code (then again
4518 it may not since calls are nearly blockages anyway), but at
4519 least it should work. */
4520 emit_insn (gen_blockage ());
4521 }
4522 DONE;
4523 }")
4524
4525 (define_insn "call_value_internal_symref"
4526 [(set (match_operand 0 "" "=rf")
4527 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4528 (match_operand 2 "" "i")))
4529 (clobber (reg:SI 2))
4530 (use (const_int 0))]
4531 ;;- Don't use operand 1 for most machines.
4532 "! TARGET_PORTABLE_RUNTIME"
4533 "*
4534 {
4535 output_arg_descriptor (insn);
4536 return output_call (insn, operands[1]);
4537 }"
4538 [(set_attr "type" "call")
4539 (set (attr "length")
4540 ;; If we're sure that we can either reach the target or that the
4541 ;; linker can use a long-branch stub, then the length is 4 bytes.
4542 ;;
4543 ;; For long-calls the length will be either 52 bytes (non-pic)
4544 ;; or 68 bytes (pic). */
4545 ;; Else we have to use a long-call;
4546 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4547 (const_int 240000))
4548 (const_int 4)
4549 (if_then_else (eq (symbol_ref "flag_pic")
4550 (const_int 0))
4551 (const_int 52)
4552 (const_int 68))))])
4553
4554 (define_insn "call_value_internal_reg"
4555 [(set (match_operand 0 "" "=rf")
4556 (call (mem:SI (reg:SI 22))
4557 (match_operand 1 "" "i")))
4558 (clobber (reg:SI 2))
4559 (use (const_int 1))]
4560 ""
4561 "*
4562 {
4563 rtx xoperands[2];
4564
4565 /* First the special case for kernels, level 0 systems, etc. */
4566 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4567 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4568
4569 /* Now the normal case -- we can reach $$dyncall directly or
4570 we're sure that we can get there via a long-branch stub.
4571
4572 No need to check target flags as the length uniquely identifies
4573 the remaining cases. */
4574 if (get_attr_length (insn) == 8)
4575 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4576
4577 /* Long millicode call, but we are not generating PIC or portable runtime
4578 code. */
4579 if (get_attr_length (insn) == 12)
4580 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4581
4582 /* Long millicode call for portable runtime. */
4583 if (get_attr_length (insn) == 20)
4584 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4585
4586 /* If we're generating PIC code. */
4587 xoperands[0] = operands[1];
4588 xoperands[1] = gen_label_rtx ();
4589 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4590 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4591 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4592 CODE_LABEL_NUMBER (xoperands[1]));
4593 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4594 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4595 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4596 return \"\";
4597 }"
4598 [(set_attr "type" "dyncall")
4599 (set (attr "length")
4600 (cond [
4601 ;; First NO_SPACE_REGS
4602 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4603 (const_int 0))
4604 (const_int 8)
4605
4606 ;; Target (or stub) within reach
4607 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4608 (const_int 240000))
4609 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4610 (const_int 0)))
4611 (const_int 8)
4612
4613 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4614 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4615 (const_int 0))
4616 (eq (symbol_ref "flag_pic")
4617 (const_int 0)))
4618 (const_int 12)
4619
4620 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4621 (const_int 0))
4622 (const_int 20)]
4623
4624 ;; Out of range PIC case
4625 (const_int 24)))])
4626
4627 ;; Call subroutine returning any type.
4628
4629 (define_expand "untyped_call"
4630 [(parallel [(call (match_operand 0 "" "")
4631 (const_int 0))
4632 (match_operand 1 "" "")
4633 (match_operand 2 "" "")])]
4634 ""
4635 "
4636 {
4637 int i;
4638
4639 emit_call_insn (gen_call (operands[0], const0_rtx));
4640
4641 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4642 {
4643 rtx set = XVECEXP (operands[2], 0, i);
4644 emit_move_insn (SET_DEST (set), SET_SRC (set));
4645 }
4646
4647 /* The optimizer does not know that the call sets the function value
4648 registers we stored in the result block. We avoid problems by
4649 claiming that all hard registers are used and clobbered at this
4650 point. */
4651 emit_insn (gen_blockage ());
4652
4653 DONE;
4654 }")
4655 (define_insn "nop"
4656 [(const_int 0)]
4657 ""
4658 "nop"
4659 [(set_attr "type" "move")
4660 (set_attr "length" "4")])
4661
4662 ;; These are just placeholders so we know where branch tables
4663 ;; begin and end.
4664 (define_insn "begin_brtab"
4665 [(const_int 1)]
4666 ""
4667 "*
4668 {
4669 /* Only GAS actually supports this pseudo-op. */
4670 if (TARGET_GAS)
4671 return \".begin_brtab\";
4672 else
4673 return \"\";
4674 }"
4675 [(set_attr "type" "move")
4676 (set_attr "length" "0")])
4677
4678 (define_insn "end_brtab"
4679 [(const_int 2)]
4680 ""
4681 "*
4682 {
4683 /* Only GAS actually supports this pseudo-op. */
4684 if (TARGET_GAS)
4685 return \".end_brtab\";
4686 else
4687 return \"\";
4688 }"
4689 [(set_attr "type" "move")
4690 (set_attr "length" "0")])
4691
4692 ;;; Hope this is only within a function...
4693 (define_insn "indirect_jump"
4694 [(set (pc) (match_operand 0 "register_operand" "r"))]
4695 "GET_MODE (operands[0]) == word_mode"
4696 "bv%* %%r0(%0)"
4697 [(set_attr "type" "branch")
4698 (set_attr "length" "4")])
4699
4700 ;;; EH does longjmp's from and within the data section. Thus,
4701 ;;; an interspace branch is required for the longjmp implementation.
4702 ;;; Registers r1 and r2 are not saved in the jmpbuf environment.
4703 ;;; Thus, they can be used as scratch registers for the jump.
4704 (define_insn "interspace_jump"
4705 [(set (pc) (match_operand:SI 0 "register_operand" "a"))
4706 (clobber (reg:SI 2))]
4707 ""
4708 "ldsid (%%sr0,%0),%%r2\; mtsp %%r2,%%sr0\; be%* 0(%%sr0,%0)"
4709 [(set_attr "type" "branch")
4710 (set_attr "length" "12")])
4711
4712 (define_expand "builtin_longjmp"
4713 [(unspec_volatile [(match_operand 0 "register_operand" "r")] 3)]
4714 ""
4715 "
4716 {
4717 /* The elements of the buffer are, in order: */
4718 rtx fp = gen_rtx_MEM (Pmode, operands[0]);
4719 rtx lab = gen_rtx_MEM (Pmode, plus_constant (operands[0], 4));
4720 rtx stack = gen_rtx_MEM (Pmode, plus_constant (operands[0], 8));
4721 rtx pv = gen_rtx_REG (Pmode, 1);
4722
4723 /* This bit is the same as expand_builtin_longjmp. */
4724 emit_move_insn (hard_frame_pointer_rtx, fp);
4725 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
4726 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
4727 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
4728
4729 /* Load the label we are jumping through into r1 so that we know
4730 where to look for it when we get back to setjmp's function for
4731 restoring the gp. */
4732 emit_move_insn (pv, lab);
4733 emit_jump_insn (gen_interspace_jump (pv));
4734 emit_barrier ();
4735 DONE;
4736 }")
4737
4738 (define_insn "extzv"
4739 [(set (match_operand:SI 0 "register_operand" "=r")
4740 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4741 (match_operand:SI 2 "uint5_operand" "")
4742 (match_operand:SI 3 "uint5_operand" "")))]
4743 ""
4744 "extru %1,%3+%2-1,%2,%0"
4745 [(set_attr "type" "shift")
4746 (set_attr "length" "4")])
4747
4748 (define_insn ""
4749 [(set (match_operand:SI 0 "register_operand" "=r")
4750 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4751 (const_int 1)
4752 (match_operand:SI 3 "register_operand" "q")))]
4753 ""
4754 "vextru %1,1,%0"
4755 [(set_attr "type" "shift")
4756 (set_attr "length" "4")])
4757
4758 (define_insn "extv"
4759 [(set (match_operand:SI 0 "register_operand" "=r")
4760 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4761 (match_operand:SI 2 "uint5_operand" "")
4762 (match_operand:SI 3 "uint5_operand" "")))]
4763 ""
4764 "extrs %1,%3+%2-1,%2,%0"
4765 [(set_attr "type" "shift")
4766 (set_attr "length" "4")])
4767
4768 (define_insn ""
4769 [(set (match_operand:SI 0 "register_operand" "=r")
4770 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4771 (const_int 1)
4772 (match_operand:SI 3 "register_operand" "q")))]
4773 ""
4774 "vextrs %1,1,%0"
4775 [(set_attr "type" "shift")
4776 (set_attr "length" "4")])
4777
4778 (define_insn "insv"
4779 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4780 (match_operand:SI 1 "uint5_operand" "")
4781 (match_operand:SI 2 "uint5_operand" ""))
4782 (match_operand:SI 3 "arith5_operand" "r,L"))]
4783 ""
4784 "@
4785 dep %3,%2+%1-1,%1,%0
4786 depi %3,%2+%1-1,%1,%0"
4787 [(set_attr "type" "shift,shift")
4788 (set_attr "length" "4,4")])
4789
4790 ;; Optimize insertion of const_int values of type 1...1xxxx.
4791 (define_insn ""
4792 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4793 (match_operand:SI 1 "uint5_operand" "")
4794 (match_operand:SI 2 "uint5_operand" ""))
4795 (match_operand:SI 3 "const_int_operand" ""))]
4796 "(INTVAL (operands[3]) & 0x10) != 0 &&
4797 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4798 "*
4799 {
4800 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4801 return \"depi %3,%2+%1-1,%1,%0\";
4802 }"
4803 [(set_attr "type" "shift")
4804 (set_attr "length" "4")])
4805
4806 ;; This insn is used for some loop tests, typically loops reversed when
4807 ;; strength reduction is used. It is actually created when the instruction
4808 ;; combination phase combines the special loop test. Since this insn
4809 ;; is both a jump insn and has an output, it must deal with its own
4810 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4811 ;; to not choose the register alternatives in the event a reload is needed.
4812 (define_insn "decrement_and_branch_until_zero"
4813 [(set (pc)
4814 (if_then_else
4815 (match_operator 2 "comparison_operator"
4816 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4817 (match_operand:SI 1 "int5_operand" "L,L,L"))
4818 (const_int 0)])
4819 (label_ref (match_operand 3 "" ""))
4820 (pc)))
4821 (set (match_dup 0)
4822 (plus:SI (match_dup 0) (match_dup 1)))
4823 (clobber (match_scratch:SI 4 "=X,r,r"))]
4824 ""
4825 "* return output_dbra (operands, insn, which_alternative); "
4826 ;; Do not expect to understand this the first time through.
4827 [(set_attr "type" "cbranch,multi,multi")
4828 (set (attr "length")
4829 (if_then_else (eq_attr "alternative" "0")
4830 ;; Loop counter in register case
4831 ;; Short branch has length of 4
4832 ;; Long branch has length of 8
4833 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4834 (const_int 8184))
4835 (const_int 4)
4836 (const_int 8))
4837
4838 ;; Loop counter in FP reg case.
4839 ;; Extra goo to deal with additional reload insns.
4840 (if_then_else (eq_attr "alternative" "1")
4841 (if_then_else (lt (match_dup 3) (pc))
4842 (if_then_else
4843 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4844 (const_int 8184))
4845 (const_int 24)
4846 (const_int 28))
4847 (if_then_else
4848 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4849 (const_int 8184))
4850 (const_int 24)
4851 (const_int 28)))
4852 ;; Loop counter in memory case.
4853 ;; Extra goo to deal with additional reload insns.
4854 (if_then_else (lt (match_dup 3) (pc))
4855 (if_then_else
4856 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4857 (const_int 8184))
4858 (const_int 12)
4859 (const_int 16))
4860 (if_then_else
4861 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4862 (const_int 8184))
4863 (const_int 12)
4864 (const_int 16))))))])
4865
4866 (define_insn ""
4867 [(set (pc)
4868 (if_then_else
4869 (match_operator 2 "movb_comparison_operator"
4870 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4871 (label_ref (match_operand 3 "" ""))
4872 (pc)))
4873 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4874 (match_dup 1))]
4875 ""
4876 "* return output_movb (operands, insn, which_alternative, 0); "
4877 ;; Do not expect to understand this the first time through.
4878 [(set_attr "type" "cbranch,multi,multi,multi")
4879 (set (attr "length")
4880 (if_then_else (eq_attr "alternative" "0")
4881 ;; Loop counter in register case
4882 ;; Short branch has length of 4
4883 ;; Long branch has length of 8
4884 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4885 (const_int 8184))
4886 (const_int 4)
4887 (const_int 8))
4888
4889 ;; Loop counter in FP reg case.
4890 ;; Extra goo to deal with additional reload insns.
4891 (if_then_else (eq_attr "alternative" "1")
4892 (if_then_else (lt (match_dup 3) (pc))
4893 (if_then_else
4894 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4895 (const_int 8184))
4896 (const_int 12)
4897 (const_int 16))
4898 (if_then_else
4899 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4900 (const_int 8184))
4901 (const_int 12)
4902 (const_int 16)))
4903 ;; Loop counter in memory or sar case.
4904 ;; Extra goo to deal with additional reload insns.
4905 (if_then_else
4906 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4907 (const_int 8184))
4908 (const_int 8)
4909 (const_int 12)))))])
4910
4911 ;; Handle negated branch.
4912 (define_insn ""
4913 [(set (pc)
4914 (if_then_else
4915 (match_operator 2 "movb_comparison_operator"
4916 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4917 (pc)
4918 (label_ref (match_operand 3 "" ""))))
4919 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4920 (match_dup 1))]
4921 ""
4922 "* return output_movb (operands, insn, which_alternative, 1); "
4923 ;; Do not expect to understand this the first time through.
4924 [(set_attr "type" "cbranch,multi,multi,multi")
4925 (set (attr "length")
4926 (if_then_else (eq_attr "alternative" "0")
4927 ;; Loop counter in register case
4928 ;; Short branch has length of 4
4929 ;; Long branch has length of 8
4930 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4931 (const_int 8184))
4932 (const_int 4)
4933 (const_int 8))
4934
4935 ;; Loop counter in FP reg case.
4936 ;; Extra goo to deal with additional reload insns.
4937 (if_then_else (eq_attr "alternative" "1")
4938 (if_then_else (lt (match_dup 3) (pc))
4939 (if_then_else
4940 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4941 (const_int 8184))
4942 (const_int 12)
4943 (const_int 16))
4944 (if_then_else
4945 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4946 (const_int 8184))
4947 (const_int 12)
4948 (const_int 16)))
4949 ;; Loop counter in memory or SAR case.
4950 ;; Extra goo to deal with additional reload insns.
4951 (if_then_else
4952 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4953 (const_int 8184))
4954 (const_int 8)
4955 (const_int 12)))))])
4956
4957 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4958 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4959 ;;
4960 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4961 ;; merging easier.
4962 (define_insn ""
4963 [(set (pc) (label_ref (match_operand 3 "" "" )))
4964 (set (match_operand:SI 0 "register_operand" "=r")
4965 (plus:SI (match_operand:SI 1 "register_operand" "r")
4966 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4967 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4968 "*
4969 {
4970 return output_parallel_addb (operands, get_attr_length (insn));
4971 }"
4972 [(set_attr "type" "parallel_branch")
4973 (set (attr "length")
4974 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4975 (const_int 8184))
4976 (const_int 4)
4977 (const_int 8)))])
4978
4979 (define_insn ""
4980 [(set (pc) (label_ref (match_operand 2 "" "" )))
4981 (set (match_operand:SF 0 "register_operand" "=r")
4982 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4983 "reload_completed"
4984 "*
4985 {
4986 return output_parallel_movb (operands, get_attr_length (insn));
4987 }"
4988 [(set_attr "type" "parallel_branch")
4989 (set (attr "length")
4990 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4991 (const_int 8184))
4992 (const_int 4)
4993 (const_int 8)))])
4994
4995 (define_insn ""
4996 [(set (pc) (label_ref (match_operand 2 "" "" )))
4997 (set (match_operand:SI 0 "register_operand" "=r")
4998 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
4999 "reload_completed"
5000 "*
5001 {
5002 return output_parallel_movb (operands, get_attr_length (insn));
5003 }"
5004 [(set_attr "type" "parallel_branch")
5005 (set (attr "length")
5006 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5007 (const_int 8184))
5008 (const_int 4)
5009 (const_int 8)))])
5010
5011 (define_insn ""
5012 [(set (pc) (label_ref (match_operand 2 "" "" )))
5013 (set (match_operand:HI 0 "register_operand" "=r")
5014 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
5015 "reload_completed"
5016 "*
5017 {
5018 return output_parallel_movb (operands, get_attr_length (insn));
5019 }"
5020 [(set_attr "type" "parallel_branch")
5021 (set (attr "length")
5022 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5023 (const_int 8184))
5024 (const_int 4)
5025 (const_int 8)))])
5026
5027 (define_insn ""
5028 [(set (pc) (label_ref (match_operand 2 "" "" )))
5029 (set (match_operand:QI 0 "register_operand" "=r")
5030 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
5031 "reload_completed"
5032 "*
5033 {
5034 return output_parallel_movb (operands, get_attr_length (insn));
5035 }"
5036 [(set_attr "type" "parallel_branch")
5037 (set (attr "length")
5038 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
5039 (const_int 8184))
5040 (const_int 4)
5041 (const_int 8)))])
5042
5043 (define_insn ""
5044 [(set (match_operand 0 "register_operand" "=f")
5045 (mult (match_operand 1 "register_operand" "f")
5046 (match_operand 2 "register_operand" "f")))
5047 (set (match_operand 3 "register_operand" "+f")
5048 (plus (match_operand 4 "register_operand" "f")
5049 (match_operand 5 "register_operand" "f")))]
5050 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5051 && reload_completed && fmpyaddoperands (operands)"
5052 "*
5053 {
5054 if (GET_MODE (operands[0]) == DFmode)
5055 {
5056 if (rtx_equal_p (operands[3], operands[5]))
5057 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5058 else
5059 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5060 }
5061 else
5062 {
5063 if (rtx_equal_p (operands[3], operands[5]))
5064 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5065 else
5066 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5067 }
5068 }"
5069 [(set_attr "type" "fpalu")
5070 (set_attr "length" "4")])
5071
5072 (define_insn ""
5073 [(set (match_operand 3 "register_operand" "+f")
5074 (plus (match_operand 4 "register_operand" "f")
5075 (match_operand 5 "register_operand" "f")))
5076 (set (match_operand 0 "register_operand" "=f")
5077 (mult (match_operand 1 "register_operand" "f")
5078 (match_operand 2 "register_operand" "f")))]
5079 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5080 && reload_completed && fmpyaddoperands (operands)"
5081 "*
5082 {
5083 if (GET_MODE (operands[0]) == DFmode)
5084 {
5085 if (rtx_equal_p (operands[3], operands[5]))
5086 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
5087 else
5088 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
5089 }
5090 else
5091 {
5092 if (rtx_equal_p (operands[3], operands[5]))
5093 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
5094 else
5095 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
5096 }
5097 }"
5098 [(set_attr "type" "fpalu")
5099 (set_attr "length" "4")])
5100
5101 (define_insn ""
5102 [(set (match_operand 0 "register_operand" "=f")
5103 (mult (match_operand 1 "register_operand" "f")
5104 (match_operand 2 "register_operand" "f")))
5105 (set (match_operand 3 "register_operand" "+f")
5106 (minus (match_operand 4 "register_operand" "f")
5107 (match_operand 5 "register_operand" "f")))]
5108 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5109 && reload_completed && fmpysuboperands (operands)"
5110 "*
5111 {
5112 if (GET_MODE (operands[0]) == DFmode)
5113 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5114 else
5115 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5116 }"
5117 [(set_attr "type" "fpalu")
5118 (set_attr "length" "4")])
5119
5120 (define_insn ""
5121 [(set (match_operand 3 "register_operand" "+f")
5122 (minus (match_operand 4 "register_operand" "f")
5123 (match_operand 5 "register_operand" "f")))
5124 (set (match_operand 0 "register_operand" "=f")
5125 (mult (match_operand 1 "register_operand" "f")
5126 (match_operand 2 "register_operand" "f")))]
5127 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
5128 && reload_completed && fmpysuboperands (operands)"
5129 "*
5130 {
5131 if (GET_MODE (operands[0]) == DFmode)
5132 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
5133 else
5134 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
5135 }"
5136 [(set_attr "type" "fpalu")
5137 (set_attr "length" "4")])
5138
5139 ;; Clean up turds left by reload.
5140 (define_peephole
5141 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
5142 (match_operand 1 "register_operand" "fr"))
5143 (set (match_operand 2 "register_operand" "fr")
5144 (match_dup 0))]
5145 "! TARGET_SOFT_FLOAT
5146 && GET_CODE (operands[0]) == MEM
5147 && ! MEM_VOLATILE_P (operands[0])
5148 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5149 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5150 && GET_MODE (operands[0]) == DFmode
5151 && GET_CODE (operands[1]) == REG
5152 && GET_CODE (operands[2]) == REG
5153 && ! side_effects_p (XEXP (operands[0], 0))
5154 && REGNO_REG_CLASS (REGNO (operands[1]))
5155 == REGNO_REG_CLASS (REGNO (operands[2]))"
5156 "*
5157 {
5158 rtx xoperands[2];
5159
5160 if (FP_REG_P (operands[1]))
5161 output_asm_insn (output_fp_move_double (operands), operands);
5162 else
5163 output_asm_insn (output_move_double (operands), operands);
5164
5165 if (rtx_equal_p (operands[1], operands[2]))
5166 return \"\";
5167
5168 xoperands[0] = operands[2];
5169 xoperands[1] = operands[1];
5170
5171 if (FP_REG_P (xoperands[1]))
5172 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5173 else
5174 output_asm_insn (output_move_double (xoperands), xoperands);
5175
5176 return \"\";
5177 }")
5178
5179 (define_peephole
5180 [(set (match_operand 0 "register_operand" "fr")
5181 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
5182 (set (match_operand 2 "register_operand" "fr")
5183 (match_dup 1))]
5184 "! TARGET_SOFT_FLOAT
5185 && GET_CODE (operands[1]) == MEM
5186 && ! MEM_VOLATILE_P (operands[1])
5187 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5188 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5189 && GET_MODE (operands[0]) == DFmode
5190 && GET_CODE (operands[0]) == REG
5191 && GET_CODE (operands[2]) == REG
5192 && ! side_effects_p (XEXP (operands[1], 0))
5193 && REGNO_REG_CLASS (REGNO (operands[0]))
5194 == REGNO_REG_CLASS (REGNO (operands[2]))"
5195 "*
5196 {
5197 rtx xoperands[2];
5198
5199 if (FP_REG_P (operands[0]))
5200 output_asm_insn (output_fp_move_double (operands), operands);
5201 else
5202 output_asm_insn (output_move_double (operands), operands);
5203
5204 xoperands[0] = operands[2];
5205 xoperands[1] = operands[0];
5206
5207 if (FP_REG_P (xoperands[1]))
5208 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5209 else
5210 output_asm_insn (output_move_double (xoperands), xoperands);
5211
5212 return \"\";
5213 }")
5214
5215 ;; Flush the I and D cache line found at the address in operand 0.
5216 ;; This is used by the trampoline code for nested functions.
5217 ;; So long as the trampoline itself is less than 32 bytes this
5218 ;; is sufficient.
5219
5220 (define_insn "dcacheflush"
5221 [(unspec_volatile [(const_int 1)] 0)
5222 (use (mem:SI (match_operand 0 "register_operand" "r")))
5223 (use (mem:SI (match_operand 1 "register_operand" "r")))]
5224 ""
5225 "fdc 0(%0)\;fdc 0(%1)\;sync"
5226 [(set_attr "type" "multi")
5227 (set_attr "length" "12")])
5228
5229 (define_insn "icacheflush"
5230 [(unspec_volatile [(const_int 2)] 0)
5231 (use (mem:SI (match_operand 0 "register_operand" "r")))
5232 (use (mem:SI (match_operand 1 "register_operand" "r")))
5233 (use (match_operand 2 "register_operand" "r"))
5234 (clobber (match_operand 3 "register_operand" "=&r"))
5235 (clobber (match_operand 4 "register_operand" "=&r"))]
5236 ""
5237 "mfsp %%sr0,%4\;ldsid (%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5238 [(set_attr "type" "multi")
5239 (set_attr "length" "52")])
5240
5241 ;; An out-of-line prologue.
5242 (define_insn "outline_prologue_call"
5243 [(unspec_volatile [(const_int 0)] 0)
5244 (clobber (reg:SI 31))
5245 (clobber (reg:SI 22))
5246 (clobber (reg:SI 21))
5247 (clobber (reg:SI 20))
5248 (clobber (reg:SI 19))
5249 (clobber (reg:SI 1))]
5250 ""
5251 "*
5252 {
5253 extern int frame_pointer_needed;
5254
5255 /* We need two different versions depending on whether or not we
5256 need a frame pointer. Also note that we return to the instruction
5257 immediately after the branch rather than two instructions after the
5258 break as normally is the case. */
5259 if (frame_pointer_needed)
5260 {
5261 /* Must import the magic millicode routine(s). */
5262 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5263
5264 if (TARGET_PORTABLE_RUNTIME)
5265 {
5266 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5267 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5268 NULL);
5269 }
5270 else
5271 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5272 }
5273 else
5274 {
5275 /* Must import the magic millicode routine(s). */
5276 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5277
5278 if (TARGET_PORTABLE_RUNTIME)
5279 {
5280 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5281 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5282 }
5283 else
5284 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5285 }
5286 return \"\";
5287 }"
5288 [(set_attr "type" "multi")
5289 (set_attr "length" "8")])
5290
5291 ;; An out-of-line epilogue.
5292 (define_insn "outline_epilogue_call"
5293 [(unspec_volatile [(const_int 1)] 0)
5294 (use (reg:SI 29))
5295 (use (reg:SI 28))
5296 (clobber (reg:SI 31))
5297 (clobber (reg:SI 22))
5298 (clobber (reg:SI 21))
5299 (clobber (reg:SI 20))
5300 (clobber (reg:SI 19))
5301 (clobber (reg:SI 2))
5302 (clobber (reg:SI 1))]
5303 ""
5304 "*
5305 {
5306 extern int frame_pointer_needed;
5307
5308 /* We need two different versions depending on whether or not we
5309 need a frame pointer. Also note that we return to the instruction
5310 immediately after the branch rather than two instructions after the
5311 break as normally is the case. */
5312 if (frame_pointer_needed)
5313 {
5314 /* Must import the magic millicode routine. */
5315 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5316
5317 /* The out-of-line prologue will make sure we return to the right
5318 instruction. */
5319 if (TARGET_PORTABLE_RUNTIME)
5320 {
5321 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5322 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5323 NULL);
5324 }
5325 else
5326 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5327 }
5328 else
5329 {
5330 /* Must import the magic millicode routine. */
5331 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5332
5333 /* The out-of-line prologue will make sure we return to the right
5334 instruction. */
5335 if (TARGET_PORTABLE_RUNTIME)
5336 {
5337 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5338 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5339 }
5340 else
5341 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5342 }
5343 return \"\";
5344 }"
5345 [(set_attr "type" "multi")
5346 (set_attr "length" "8")])
5347
5348 ;; Given a function pointer, canonicalize it so it can be
5349 ;; reliably compared to another function pointer. */
5350 (define_expand "canonicalize_funcptr_for_compare"
5351 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5352 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5353 (clobber (match_dup 2))
5354 (clobber (reg:SI 26))
5355 (clobber (reg:SI 22))
5356 (clobber (reg:SI 31))])
5357 (set (match_operand:SI 0 "register_operand" "")
5358 (reg:SI 29))]
5359 "! TARGET_PORTABLE_RUNTIME"
5360 "
5361 {
5362 operands[2] = gen_reg_rtx (SImode);
5363 if (GET_CODE (operands[1]) != REG)
5364 {
5365 rtx tmp = gen_reg_rtx (Pmode);
5366 emit_move_insn (tmp, operands[1]);
5367 operands[1] = tmp;
5368 }
5369 }")
5370
5371 (define_insn ""
5372 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5373 (clobber (match_operand:SI 0 "register_operand" "=a"))
5374 (clobber (reg:SI 26))
5375 (clobber (reg:SI 22))
5376 (clobber (reg:SI 31))]
5377 ""
5378 "*
5379 {
5380 /* Must import the magic millicode routine. */
5381 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5382
5383 /* This is absolutely amazing.
5384
5385 First, copy our input parameter into %r29 just in case we don't
5386 need to call $$sh_func_adrs. */
5387 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5388
5389 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5390 we use %r26 unchanged. */
5391 if (get_attr_length (insn) == 32)
5392 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5393 else if (get_attr_length (insn) == 40)
5394 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5395 else if (get_attr_length (insn) == 44)
5396 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5397 else
5398 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5399
5400 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5401 4096, then we use %r26 unchanged. */
5402 if (get_attr_length (insn) == 32)
5403 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5404 else if (get_attr_length (insn) == 40)
5405 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5406 else if (get_attr_length (insn) == 44)
5407 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5408 else
5409 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5410
5411 /* Else call $$sh_func_adrs to extract the function's real add24. */
5412 return output_millicode_call (insn,
5413 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5414 }"
5415 [(set_attr "type" "multi")
5416 (set (attr "length")
5417 (cond [
5418 ;; Target (or stub) within reach
5419 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5420 (const_int 240000))
5421 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5422 (const_int 0)))
5423 (const_int 28)
5424
5425 ;; NO_SPACE_REGS
5426 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5427 (const_int 0))
5428 (const_int 32)
5429
5430 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5431 ;; same as NO_SPACE_REGS code
5432 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5433 (const_int 0))
5434 (eq (symbol_ref "flag_pic")
5435 (const_int 0)))
5436 (const_int 32)
5437
5438 ;; PORTABLE_RUNTIME
5439 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5440 (const_int 0))
5441 (const_int 40)]
5442
5443 ;; Out of range and PIC
5444 (const_int 44)))])
5445
5446 ;; On the PA, the PIC register is call clobbered, so it must
5447 ;; be saved & restored around calls by the caller. If the call
5448 ;; doesn't return normally (nonlocal goto, or an exception is
5449 ;; thrown), then the code at the exception handler label must
5450 ;; restore the PIC register.
5451 (define_expand "exception_receiver"
5452 [(const_int 4)]
5453 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5454 "
5455 {
5456 /* Load the PIC register from the stack slot (in our caller's
5457 frame). */
5458 emit_move_insn (pic_offset_table_rtx,
5459 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5460 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5461 emit_insn (gen_blockage ());
5462 DONE;
5463 }")
5464
5465