]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/pa/pa.c
Use boolean_* instead of integer_* where appropriate.
[thirdparty/gcc.git] / gcc / config / pa / pa.c
CommitLineData
87ad11b0 1/* Subroutines for insn-output.c for HPPA.
3ef80983 2 Copyright (C) 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
87ad11b0 3 Contributed by Tim Moore (moore@cs.utah.edu), based on sparc.c
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21#include <stdio.h>
22#include "config.h"
23#include "rtl.h"
24#include "regs.h"
25#include "hard-reg-set.h"
26#include "real.h"
27#include "insn-config.h"
28#include "conditions.h"
29#include "insn-flags.h"
30#include "output.h"
31#include "insn-attr.h"
32#include "flags.h"
33#include "tree.h"
34#include "c-tree.h"
35#include "expr.h"
d6f01525 36#include "obstack.h"
87ad11b0 37
38/* Save the operands last given to a compare for use when we
39 generate a scc or bcc insn. */
40
41rtx hppa_compare_op0, hppa_compare_op1;
42enum cmp_type hppa_branch_type;
43
134b4858 44/* Which cpu we are scheduling for. */
45enum processor_type pa_cpu;
46
47/* String to hold which cpu we are scheduling for. */
48char *pa_cpu_string;
49
eff812f1 50rtx hppa_save_pic_table_rtx;
51
87ad11b0 52/* Set by the FUNCTION_PROFILER macro. */
53int hp_profile_labelno;
54
a9960cdc 55/* Counts for the number of callee-saved general and floating point
56 registers which were saved by the current function's prologue. */
57static int gr_saved, fr_saved;
58
87ad11b0 59static rtx find_addr_reg ();
60
06ddb6f8 61/* Keep track of the number of bytes we have output in the CODE subspaces
62 during this compilation so we'll know when to emit inline long-calls. */
63
64unsigned int total_code_bytes;
65
134b4858 66void
67override_options ()
68{
69 /* Default to 700 scheduling which is reasonable for older 800 processors
70 correct for the 700s, and not too bad for the 7100s and 7100LCs. */
71 if (pa_cpu_string == NULL
72 || ! strcmp (pa_cpu_string, "700"))
73 {
74 pa_cpu_string = "700";
75 pa_cpu = PROCESSOR_700;
76 }
77 else if (! strcmp (pa_cpu_string, "7100"))
78 {
79 pa_cpu_string = "7100";
80 pa_cpu = PROCESSOR_7100;
81 }
82 else if (! strncmp (pa_cpu_string, "7100LC"))
83 {
84 pa_cpu_string = "7100LC";
85 pa_cpu = PROCESSOR_7100LC;
86 }
87 else
88 {
89 warning ("Unknown -mschedule= option (%s).\nValid options are 700, 7100 and 7100LC\n", pa_cpu_string);
90 }
91}
92
93
87ad11b0 94/* Return non-zero only if OP is a register of mode MODE,
891b55b4 95 or CONST0_RTX. */
87ad11b0 96int
97reg_or_0_operand (op, mode)
98 rtx op;
99 enum machine_mode mode;
100{
891b55b4 101 return (op == CONST0_RTX (mode) || register_operand (op, mode));
87ad11b0 102}
103
575e0eb4 104/* Return non-zero if OP is suitable for use in a call to a named
105 function.
106
6d36483b 107 (???) For 2.5 try to eliminate either call_operand_address or
575e0eb4 108 function_label_operand, they perform very similar functions. */
87ad11b0 109int
110call_operand_address (op, mode)
111 rtx op;
112 enum machine_mode mode;
113{
06ddb6f8 114 return (CONSTANT_P (op) && ! TARGET_PORTABLE_RUNTIME);
87ad11b0 115}
116
6d36483b 117/* Return 1 if X contains a symbolic expression. We know these
118 expressions will have one of a few well defined forms, so
347b5848 119 we need only check those forms. */
120int
121symbolic_expression_p (x)
122 register rtx x;
123{
124
6d36483b 125 /* Strip off any HIGH. */
347b5848 126 if (GET_CODE (x) == HIGH)
127 x = XEXP (x, 0);
128
129 return (symbolic_operand (x, VOIDmode));
130}
131
87ad11b0 132int
133symbolic_operand (op, mode)
134 register rtx op;
135 enum machine_mode mode;
136{
137 switch (GET_CODE (op))
138 {
139 case SYMBOL_REF:
140 case LABEL_REF:
141 return 1;
142 case CONST:
143 op = XEXP (op, 0);
144 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
145 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
146 && GET_CODE (XEXP (op, 1)) == CONST_INT);
147 default:
148 return 0;
149 }
150}
151
152/* Return truth value of statement that OP is a symbolic memory
153 operand of mode MODE. */
154
155int
156symbolic_memory_operand (op, mode)
157 rtx op;
158 enum machine_mode mode;
159{
160 if (GET_CODE (op) == SUBREG)
161 op = SUBREG_REG (op);
162 if (GET_CODE (op) != MEM)
163 return 0;
164 op = XEXP (op, 0);
165 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
166 || GET_CODE (op) == HIGH || GET_CODE (op) == LABEL_REF);
167}
168
169/* Return 1 if the operand is either a register or a memory operand that is
170 not symbolic. */
171
172int
173reg_or_nonsymb_mem_operand (op, mode)
174 register rtx op;
175 enum machine_mode mode;
176{
177 if (register_operand (op, mode))
178 return 1;
179
180 if (memory_operand (op, mode) && ! symbolic_memory_operand (op, mode))
181 return 1;
182
183 return 0;
184}
185
6d36483b 186/* Return 1 if the operand is either a register, zero, or a memory operand
891b55b4 187 that is not symbolic. */
188
189int
190reg_or_0_or_nonsymb_mem_operand (op, mode)
191 register rtx op;
192 enum machine_mode mode;
193{
194 if (register_operand (op, mode))
195 return 1;
196
197 if (op == CONST0_RTX (mode))
198 return 1;
199
200 if (memory_operand (op, mode) && ! symbolic_memory_operand (op, mode))
201 return 1;
202
203 return 0;
204}
205
6d36483b 206/* Accept any constant that can be moved in one instructions into a
d9d7c968 207 general register. */
6d36483b 208int
d9d7c968 209cint_ok_for_move (intval)
6d36483b 210 HOST_WIDE_INT intval;
d9d7c968 211{
212 /* OK if ldo, ldil, or zdepi, can be used. */
213 return (VAL_14_BITS_P (intval) || (intval & 0x7ff) == 0
214 || zdepi_cint_p (intval));
215}
216
6ecdbaa1 217/* Accept anything that can be moved in one instruction into a general
218 register. */
87ad11b0 219int
220move_operand (op, mode)
221 rtx op;
222 enum machine_mode mode;
223{
224 if (register_operand (op, mode))
225 return 1;
226
42faba01 227 if (GET_CODE (op) == CONST_INT)
d9d7c968 228 return cint_ok_for_move (INTVAL (op));
87ad11b0 229
230 if (GET_MODE (op) != mode)
231 return 0;
232 if (GET_CODE (op) == SUBREG)
233 op = SUBREG_REG (op);
234 if (GET_CODE (op) != MEM)
235 return 0;
236
237 op = XEXP (op, 0);
238 if (GET_CODE (op) == LO_SUM)
239 return (register_operand (XEXP (op, 0), Pmode)
240 && CONSTANT_P (XEXP (op, 1)));
241 return memory_address_p (mode, op);
242}
243
6ecdbaa1 244/* Accept REG and any CONST_INT that can be moved in one instruction into a
245 general register. */
246int
247reg_or_cint_move_operand (op, mode)
248 rtx op;
249 enum machine_mode mode;
250{
251 if (register_operand (op, mode))
252 return 1;
253
254 if (GET_CODE (op) == CONST_INT)
686b848d 255 return cint_ok_for_move (INTVAL (op));
256
6ecdbaa1 257 return 0;
258}
259
87ad11b0 260int
b4a7bf10 261pic_label_operand (op, mode)
87ad11b0 262 rtx op;
263 enum machine_mode mode;
264{
b4a7bf10 265 if (!flag_pic)
266 return 0;
267
268 switch (GET_CODE (op))
269 {
270 case LABEL_REF:
271 return 1;
b4a7bf10 272 case CONST:
273 op = XEXP (op, 0);
3c69dc97 274 return (GET_CODE (XEXP (op, 0)) == LABEL_REF
b4a7bf10 275 && GET_CODE (XEXP (op, 1)) == CONST_INT);
276 default:
277 return 0;
278 }
87ad11b0 279}
280
87ad11b0 281int
282fp_reg_operand (op, mode)
283 rtx op;
284 enum machine_mode mode;
285{
286 return reg_renumber && FP_REG_P (op);
287}
d6f01525 288
87ad11b0 289\f
87ad11b0 290
87ad11b0 291/* Return truth value of whether OP can be used as an operand in a
292 three operand arithmetic insn that accepts registers of mode MODE
293 or 14-bit signed integers. */
294int
295arith_operand (op, mode)
296 rtx op;
297 enum machine_mode mode;
298{
299 return (register_operand (op, mode)
300 || (GET_CODE (op) == CONST_INT && INT_14_BITS (op)));
301}
302
303/* Return truth value of whether OP can be used as an operand in a
304 three operand arithmetic insn that accepts registers of mode MODE
305 or 11-bit signed integers. */
306int
307arith11_operand (op, mode)
308 rtx op;
309 enum machine_mode mode;
310{
311 return (register_operand (op, mode)
312 || (GET_CODE (op) == CONST_INT && INT_11_BITS (op)));
313}
314
6d36483b 315/* A constant integer suitable for use in a PRE_MODIFY memory
757d4970 316 reference. */
42faba01 317int
318pre_cint_operand (op, mode)
319 rtx op;
320 enum machine_mode mode;
321{
322 return (GET_CODE (op) == CONST_INT
323 && INTVAL (op) >= -0x2000 && INTVAL (op) < 0x10);
324}
325
6d36483b 326/* A constant integer suitable for use in a POST_MODIFY memory
757d4970 327 reference. */
328int
329post_cint_operand (op, mode)
330 rtx op;
331 enum machine_mode mode;
332{
333 return (GET_CODE (op) == CONST_INT
334 && INTVAL (op) < 0x2000 && INTVAL (op) >= -0x10);
335}
336
87ad11b0 337int
338arith_double_operand (op, mode)
339 rtx op;
340 enum machine_mode mode;
341{
342 return (register_operand (op, mode)
343 || (GET_CODE (op) == CONST_DOUBLE
344 && GET_MODE (op) == mode
345 && VAL_14_BITS_P (CONST_DOUBLE_LOW (op))
346 && (CONST_DOUBLE_HIGH (op) >= 0
347 == ((CONST_DOUBLE_LOW (op) & 0x1000) == 0))));
348}
349
350/* Return truth value of whether OP is a integer which fits the
351 range constraining immediate operands in three-address insns. */
352
353int
354int5_operand (op, mode)
355 rtx op;
356 enum machine_mode mode;
357{
358 return (GET_CODE (op) == CONST_INT && INT_5_BITS (op));
359}
360
361int
362uint5_operand (op, mode)
363 rtx op;
364 enum machine_mode mode;
365{
366 return (GET_CODE (op) == CONST_INT && INT_U5_BITS (op));
367}
368
87ad11b0 369int
370int11_operand (op, mode)
371 rtx op;
372 enum machine_mode mode;
373{
6d36483b 374 return (GET_CODE (op) == CONST_INT && INT_11_BITS (op));
375}
376
377int
378uint32_operand (op, mode)
379 rtx op;
380 enum machine_mode mode;
381{
382#if HOST_BITS_PER_WIDE_INT > 32
383 /* All allowed constants will fit a CONST_INT. */
384 return (GET_CODE (op) == CONST_INT
385 && (INTVAL (op) >= 0 && INTVAL (op) < 0x100000000L));
386#else
387 return (GET_CODE (op) == CONST_INT
388 || (GET_CODE (op) == CONST_DOUBLE
389 && CONST_DOUBLE_HIGH (op) == 0));
390#endif
87ad11b0 391}
392
393int
394arith5_operand (op, mode)
395 rtx op;
396 enum machine_mode mode;
397{
398 return register_operand (op, mode) || int5_operand (op, mode);
399}
400
fad0b60f 401/* True iff zdepi can be used to generate this CONST_INT. */
e057641f 402int
42faba01 403zdepi_cint_p (x)
6d36483b 404 unsigned HOST_WIDE_INT x;
fad0b60f 405{
42faba01 406 unsigned lsb_mask, t;
fad0b60f 407
408 /* This might not be obvious, but it's at least fast.
409 This function is critcal; we don't have the time loops would take. */
42faba01 410 lsb_mask = x & -x;
411 t = ((x >> 4) + lsb_mask) & ~(lsb_mask - 1);
412 /* Return true iff t is a power of two. */
fad0b60f 413 return ((t & (t - 1)) == 0);
414}
415
6d36483b 416/* True iff depi or extru can be used to compute (reg & mask).
417 Accept bit pattern like these:
418 0....01....1
419 1....10....0
420 1..10..01..1 */
e057641f 421int
42faba01 422and_mask_p (mask)
6d36483b 423 unsigned HOST_WIDE_INT mask;
e057641f 424{
425 mask = ~mask;
426 mask += mask & -mask;
427 return (mask & (mask - 1)) == 0;
428}
429
430/* True iff depi or extru can be used to compute (reg & OP). */
431int
432and_operand (op, mode)
433 rtx op;
434 enum machine_mode mode;
435{
436 return (register_operand (op, mode)
42faba01 437 || (GET_CODE (op) == CONST_INT && and_mask_p (INTVAL (op))));
e057641f 438}
439
440/* True iff depi can be used to compute (reg | MASK). */
441int
442ior_mask_p (mask)
6d36483b 443 unsigned HOST_WIDE_INT mask;
e057641f 444{
445 mask += mask & -mask;
446 return (mask & (mask - 1)) == 0;
447}
448
449/* True iff depi can be used to compute (reg | OP). */
450int
451ior_operand (op, mode)
452 rtx op;
453 enum machine_mode mode;
454{
b744c8cb 455 return (GET_CODE (op) == CONST_INT && ior_mask_p (INTVAL (op)));
e057641f 456}
457
e5965947 458int
459lhs_lshift_operand (op, mode)
460 rtx op;
461 enum machine_mode mode;
462{
463 return register_operand (op, mode) || lhs_lshift_cint_operand (op, mode);
464}
465
466/* True iff OP is a CONST_INT of the forms 0...0xxxx or 0...01...1xxxx.
467 Such values can be the left hand side x in (x << r), using the zvdepi
468 instruction. */
469int
470lhs_lshift_cint_operand (op, mode)
471 rtx op;
472 enum machine_mode mode;
473{
474 unsigned x;
475 if (GET_CODE (op) != CONST_INT)
476 return 0;
477 x = INTVAL (op) >> 4;
478 return (x & (x + 1)) == 0;
479}
480
9c6d4825 481int
482arith32_operand (op, mode)
483 rtx op;
484 enum machine_mode mode;
485{
486 return register_operand (op, mode) || GET_CODE (op) == CONST_INT;
487}
ead9285f 488
489int
490pc_or_label_operand (op, mode)
491 rtx op;
492 enum machine_mode mode;
493{
494 return (GET_CODE (op) == PC || GET_CODE (op) == LABEL_REF);
495}
87ad11b0 496\f
497/* Legitimize PIC addresses. If the address is already
498 position-independent, we return ORIG. Newly generated
499 position-independent addresses go to REG. If we need more
500 than one register, we lose. */
501
502rtx
503legitimize_pic_address (orig, mode, reg)
504 rtx orig, reg;
505 enum machine_mode mode;
506{
507 rtx pic_ref = orig;
508
3c69dc97 509 /* Lables need special handling. */
b4a7bf10 510 if (pic_label_operand (orig))
511 {
512 emit_insn (gen_pic_load_label (reg, orig));
513 current_function_uses_pic_offset_table = 1;
514 return reg;
515 }
87ad11b0 516 if (GET_CODE (orig) == SYMBOL_REF)
517 {
518 if (reg == 0)
519 abort ();
520
521 if (flag_pic == 2)
522 {
e8df7698 523 emit_insn (gen_pic2_highpart (reg, pic_offset_table_rtx, orig));
524 pic_ref = gen_rtx (MEM, Pmode,
525 gen_rtx (LO_SUM, Pmode, reg,
526 gen_rtx (UNSPEC, SImode, gen_rtvec (1, orig), 0)));
87ad11b0 527 }
b4a7bf10 528 else
529 pic_ref = gen_rtx (MEM, Pmode,
530 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig));
87ad11b0 531 current_function_uses_pic_offset_table = 1;
532 RTX_UNCHANGING_P (pic_ref) = 1;
533 emit_move_insn (reg, pic_ref);
534 return reg;
535 }
536 else if (GET_CODE (orig) == CONST)
537 {
57ed30e5 538 rtx base;
87ad11b0 539
540 if (GET_CODE (XEXP (orig, 0)) == PLUS
541 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
542 return orig;
543
544 if (reg == 0)
545 abort ();
546
547 if (GET_CODE (XEXP (orig, 0)) == PLUS)
548 {
549 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
550 orig = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
551 base == reg ? 0 : reg);
552 }
553 else abort ();
554 if (GET_CODE (orig) == CONST_INT)
555 {
42faba01 556 if (INT_14_BITS (orig))
87ad11b0 557 return plus_constant_for_output (base, INTVAL (orig));
558 orig = force_reg (Pmode, orig);
559 }
560 pic_ref = gen_rtx (PLUS, Pmode, base, orig);
561 /* Likewise, should we set special REG_NOTEs here? */
562 }
563 return pic_ref;
564}
565
347b5848 566/* Try machine-dependent ways of modifying an illegitimate address
567 to be legitimate. If we find one, return the new, valid address.
568 This macro is used in only one place: `memory_address' in explow.c.
569
570 OLDX is the address as it was before break_out_memory_refs was called.
571 In some cases it is useful to look at this to decide what needs to be done.
572
573 MODE and WIN are passed so that this macro can use
574 GO_IF_LEGITIMATE_ADDRESS.
575
576 It is always safe for this macro to do nothing. It exists to recognize
6d36483b 577 opportunities to optimize the output.
347b5848 578
579 For the PA, transform:
580
581 memory(X + <large int>)
582
583 into:
584
585 if (<large int> & mask) >= 16
586 Y = (<large int> & ~mask) + mask + 1 Round up.
587 else
588 Y = (<large int> & ~mask) Round down.
589 Z = X + Y
590 memory (Z + (<large int> - Y));
591
6d36483b 592 This is for CSE to find several similar references, and only use one Z.
347b5848 593
594 X can either be a SYMBOL_REF or REG, but because combine can not
595 perform a 4->2 combination we do nothing for SYMBOL_REF + D where
596 D will not fit in 14 bits.
597
598 MODE_FLOAT references allow displacements which fit in 5 bits, so use
6d36483b 599 0x1f as the mask.
347b5848 600
601 MODE_INT references allow displacements which fit in 14 bits, so use
6d36483b 602 0x3fff as the mask.
347b5848 603
604 This relies on the fact that most mode MODE_FLOAT references will use FP
605 registers and most mode MODE_INT references will use integer registers.
606 (In the rare case of an FP register used in an integer MODE, we depend
607 on secondary reloads to clean things up.)
608
609
610 It is also beneficial to handle (plus (mult (X) (Y)) (Z)) in a special
611 manner if Y is 2, 4, or 8. (allows more shadd insns and shifted indexed
612 adressing modes to be used).
613
614 Put X and Z into registers. Then put the entire expression into
615 a register. */
616
617rtx
618hppa_legitimize_address (x, oldx, mode)
619 rtx x, oldx;
620 enum machine_mode mode;
621{
347b5848 622 rtx orig = x;
623
b4a7bf10 624 if (flag_pic)
625 return legitimize_pic_address (x, mode, gen_reg_rtx (Pmode));
626
347b5848 627 /* Strip off CONST. */
628 if (GET_CODE (x) == CONST)
629 x = XEXP (x, 0);
630
166bf021 631 /* Note we must reject symbols which represent function addresses
632 since the assembler/linker can't handle arithmetic on plabels. */
347b5848 633 if (GET_CODE (x) == PLUS
634 && GET_CODE (XEXP (x, 1)) == CONST_INT
166bf021 635 && ((GET_CODE (XEXP (x, 0)) == SYMBOL_REF
636 && !FUNCTION_NAME_P (XSTR (XEXP (x, 0), 0)))
347b5848 637 || GET_CODE (XEXP (x, 0)) == REG))
638 {
639 rtx int_part, ptr_reg;
640 int newoffset;
641 int offset = INTVAL (XEXP (x, 1));
642 int mask = GET_MODE_CLASS (mode) == MODE_FLOAT ? 0x1f : 0x3fff;
643
6d36483b 644 /* Choose which way to round the offset. Round up if we
347b5848 645 are >= halfway to the next boundary. */
646 if ((offset & mask) >= ((mask + 1) / 2))
647 newoffset = (offset & ~ mask) + mask + 1;
648 else
649 newoffset = (offset & ~ mask);
650
651 /* If the newoffset will not fit in 14 bits (ldo), then
652 handling this would take 4 or 5 instructions (2 to load
653 the SYMBOL_REF + 1 or 2 to load the newoffset + 1 to
654 add the new offset and the SYMBOL_REF.) Combine can
655 not handle 4->2 or 5->2 combinations, so do not create
656 them. */
657 if (! VAL_14_BITS_P (newoffset)
658 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
659 {
660 rtx const_part = gen_rtx (CONST, VOIDmode,
339613b4 661 gen_rtx (PLUS, Pmode,
347b5848 662 XEXP (x, 0),
663 GEN_INT (newoffset)));
664 rtx tmp_reg
339613b4 665 = force_reg (Pmode,
666 gen_rtx (HIGH, Pmode, const_part));
347b5848 667 ptr_reg
339613b4 668 = force_reg (Pmode,
669 gen_rtx (LO_SUM, Pmode,
347b5848 670 tmp_reg, const_part));
671 }
672 else
673 {
674 if (! VAL_14_BITS_P (newoffset))
339613b4 675 int_part = force_reg (Pmode, GEN_INT (newoffset));
347b5848 676 else
677 int_part = GEN_INT (newoffset);
678
339613b4 679 ptr_reg = force_reg (Pmode,
680 gen_rtx (PLUS, Pmode,
681 force_reg (Pmode, XEXP (x, 0)),
347b5848 682 int_part));
683 }
684 return plus_constant (ptr_reg, offset - newoffset);
685 }
45f1285a 686
687 /* Try to arrange things so that indexing modes can be used, but
6d36483b 688 only do so if indexing is safe.
45f1285a 689
690 Indexing is safe when the second operand for the outer PLUS
6d36483b 691 is a REG, SUBREG, SYMBOL_REF or the like.
45f1285a 692
6d36483b 693 For 2.5, indexing is also safe for (plus (symbol_ref) (const_int))
45f1285a 694 if the integer is > 0. */
347b5848 695 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == MULT
696 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
45f1285a 697 && shadd_constant_p (INTVAL (XEXP (XEXP (x, 0), 1)))
698 && (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == 'o'
699 || GET_CODE (XEXP (x, 1)) == SUBREG)
700 && GET_CODE (XEXP (x, 1)) != CONST)
347b5848 701 {
702 int val = INTVAL (XEXP (XEXP (x, 0), 1));
703 rtx reg1, reg2;
339613b4 704 reg1 = force_reg (Pmode, force_operand (XEXP (x, 1), 0));
705 reg2 = force_reg (Pmode,
347b5848 706 force_operand (XEXP (XEXP (x, 0), 0), 0));
339613b4 707 return force_reg (Pmode,
708 gen_rtx (PLUS, Pmode,
709 gen_rtx (MULT, Pmode, reg2,
347b5848 710 GEN_INT (val)),
711 reg1));
712 }
45f1285a 713
6d36483b 714 /* Uh-oh. We might have an address for x[n-100000]. This needs
45f1285a 715 special handling. */
716
717 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == MULT
718 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
719 && shadd_constant_p (INTVAL (XEXP (XEXP (x, 0), 1))))
720 {
721 /* Ugly. We modify things here so that the address offset specified
722 by the index expression is computed first, then added to x to form
723 the entire address.
724
725 For 2.5, it might be profitable to set things up so that we
726 compute the raw (unscaled) index first, then use scaled indexing
727 to access memory, or better yet have the MI parts of the compiler
728 handle this. */
729
730 rtx regx1, regy1, regy2, y;
731
732 /* Strip off any CONST. */
733 y = XEXP (x, 1);
734 if (GET_CODE (y) == CONST)
735 y = XEXP (y, 0);
736
7ee96d6e 737 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
738 {
739 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
740 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
741 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
742 regx1 = force_reg (Pmode, gen_rtx (GET_CODE (y), Pmode, regx1, regy2));
743 return force_reg (Pmode, gen_rtx (PLUS, Pmode, regx1, regy1));
744 }
45f1285a 745 }
746
347b5848 747 return orig;
748}
749
87ad11b0 750/* For the HPPA, REG and REG+CONST is cost 0
751 and addresses involving symbolic constants are cost 2.
752
753 PIC addresses are very expensive.
754
755 It is no coincidence that this has the same structure
756 as GO_IF_LEGITIMATE_ADDRESS. */
757int
758hppa_address_cost (X)
759 rtx X;
760{
761 if (GET_CODE (X) == PLUS)
762 return 1;
763 else if (GET_CODE (X) == LO_SUM)
764 return 1;
765 else if (GET_CODE (X) == HIGH)
766 return 2;
767 return 4;
768}
769
770/* Emit insns to move operands[1] into operands[0].
771
772 Return 1 if we have written out everything that needs to be done to
773 do the move. Otherwise, return 0 and the caller will emit the move
774 normally. */
775
776int
d6f01525 777emit_move_sequence (operands, mode, scratch_reg)
87ad11b0 778 rtx *operands;
779 enum machine_mode mode;
d6f01525 780 rtx scratch_reg;
87ad11b0 781{
782 register rtx operand0 = operands[0];
783 register rtx operand1 = operands[1];
784
e8fdbafa 785 /* Handle secondary reloads for loads/stores of FP registers from
6b1c36c2 786 REG+D addresses where D does not fit in 5 bits, including
787 (subreg (mem (addr)) cases. */
d6f01525 788 if (fp_reg_operand (operand0, mode)
6b1c36c2 789 && ((GET_CODE (operand1) == MEM
790 && ! memory_address_p (DFmode, XEXP (operand1, 0)))
791 || ((GET_CODE (operand1) == SUBREG
792 && GET_CODE (XEXP (operand1, 0)) == MEM
793 && !memory_address_p (DFmode, XEXP (XEXP (operand1, 0), 0)))))
d6f01525 794 && scratch_reg)
795 {
6b1c36c2 796 if (GET_CODE (operand1) == SUBREG)
797 operand1 = XEXP (operand1, 0);
798
799 scratch_reg = gen_rtx (REG, SImode, REGNO (scratch_reg));
6d36483b 800 emit_move_insn (scratch_reg, XEXP (operand1, 0));
d6f01525 801 emit_insn (gen_rtx (SET, VOIDmode, operand0, gen_rtx (MEM, mode,
802 scratch_reg)));
803 return 1;
804 }
805 else if (fp_reg_operand (operand1, mode)
6b1c36c2 806 && ((GET_CODE (operand0) == MEM
807 && ! memory_address_p (DFmode, XEXP (operand0, 0)))
808 || ((GET_CODE (operand0) == SUBREG)
809 && GET_CODE (XEXP (operand0, 0)) == MEM
810 && !memory_address_p (DFmode, XEXP (XEXP (operand0, 0), 0))))
d6f01525 811 && scratch_reg)
812 {
6b1c36c2 813 if (GET_CODE (operand0) == SUBREG)
814 operand0 = XEXP (operand0, 0);
815
816 scratch_reg = gen_rtx (REG, SImode, REGNO (scratch_reg));
6d36483b 817 emit_move_insn (scratch_reg, XEXP (operand0, 0));
818 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (MEM, mode, scratch_reg),
d6f01525 819 operand1));
820 return 1;
821 }
753bd06a 822 /* Handle secondary reloads for loads of FP registers from constant
823 expressions by forcing the constant into memory.
824
6d36483b 825 use scratch_reg to hold the address of the memory location.
753bd06a 826
6d36483b 827 ??? The proper fix is to change PREFERRED_RELOAD_CLASS to return
828 NO_REGS when presented with a const_int and an register class
753bd06a 829 containing only FP registers. Doing so unfortunately creates
830 more problems than it solves. Fix this for 2.5. */
831 else if (fp_reg_operand (operand0, mode)
832 && CONSTANT_P (operand1)
833 && scratch_reg)
834 {
835 rtx xoperands[2];
836
837 /* Force the constant into memory and put the address of the
838 memory location into scratch_reg. */
839 xoperands[0] = scratch_reg;
840 xoperands[1] = XEXP (force_const_mem (mode, operand1), 0);
8f258b49 841 emit_move_sequence (xoperands, Pmode, 0);
753bd06a 842
843 /* Now load the destination register. */
844 emit_insn (gen_rtx (SET, mode, operand0,
845 gen_rtx (MEM, mode, scratch_reg)));
846 return 1;
847 }
e8fdbafa 848 /* Handle secondary reloads for SAR. These occur when trying to load
7d43e0f7 849 the SAR from memory a FP register, or with a constant. */
e8fdbafa 850 else if (GET_CODE (operand0) == REG
851 && REGNO_REG_CLASS (REGNO (operand0)) == SHIFT_REGS
852 && (GET_CODE (operand1) == MEM
7d43e0f7 853 || GET_CODE (operand1) == CONST_INT
e8fdbafa 854 || (GET_CODE (operand1) == REG
855 && FP_REG_CLASS_P (REGNO_REG_CLASS (REGNO (operand1)))))
856 && scratch_reg)
857 {
858 emit_move_insn (scratch_reg, operand1);
859 emit_move_insn (operand0, scratch_reg);
860 return 1;
861 }
d6f01525 862 /* Handle most common case: storing into a register. */
863 else if (register_operand (operand0, mode))
87ad11b0 864 {
865 if (register_operand (operand1, mode)
42faba01 866 || (GET_CODE (operand1) == CONST_INT && INT_14_BITS (operand1))
891b55b4 867 || (operand1 == CONST0_RTX (mode))
87ad11b0 868 || (GET_CODE (operand1) == HIGH
df0651dc 869 && !symbolic_operand (XEXP (operand1, 0), VOIDmode))
87ad11b0 870 /* Only `general_operands' can come here, so MEM is ok. */
871 || GET_CODE (operand1) == MEM)
872 {
873 /* Run this case quickly. */
874 emit_insn (gen_rtx (SET, VOIDmode, operand0, operand1));
875 return 1;
876 }
877 }
878 else if (GET_CODE (operand0) == MEM)
879 {
891b55b4 880 if (register_operand (operand1, mode) || operand1 == CONST0_RTX (mode))
87ad11b0 881 {
882 /* Run this case quickly. */
883 emit_insn (gen_rtx (SET, VOIDmode, operand0, operand1));
884 return 1;
885 }
2ff4bf8d 886 if (! (reload_in_progress || reload_completed))
87ad11b0 887 {
888 operands[0] = validize_mem (operand0);
889 operands[1] = operand1 = force_reg (mode, operand1);
890 }
891 }
892
893 /* Simplify the source if we need to. */
57ed30e5 894 if ((GET_CODE (operand1) != HIGH && immediate_operand (operand1, mode))
2ee034bc 895 || (GET_CODE (operand1) == HIGH
63882853 896 && symbolic_operand (XEXP (operand1, 0), mode)))
87ad11b0 897 {
2ee034bc 898 int ishighonly = 0;
899
900 if (GET_CODE (operand1) == HIGH)
901 {
902 ishighonly = 1;
903 operand1 = XEXP (operand1, 0);
904 }
87ad11b0 905 if (symbolic_operand (operand1, mode))
906 {
907 if (flag_pic)
908 {
2ff4bf8d 909 rtx temp;
910
911 if (reload_in_progress || reload_completed)
b4a7bf10 912 temp = scratch_reg ? scratch_reg : operand0;
2ff4bf8d 913 else
914 temp = gen_reg_rtx (Pmode);
6d36483b 915
87ad11b0 916 operands[1] = legitimize_pic_address (operand1, mode, temp);
b4a7bf10 917 emit_insn (gen_rtx (SET, VOIDmode, operand0, operands[1]));
87ad11b0 918 }
b4a7bf10 919 /* On the HPPA, references to data space are supposed to use dp,
920 register 27, but showing it in the RTL inhibits various cse
921 and loop optimizations. */
6d36483b 922 else
87ad11b0 923 {
166bf021 924 rtx temp, set, const_part = NULL;
2ee034bc 925
6d36483b 926 if (reload_in_progress || reload_completed)
2ee034bc 927 temp = scratch_reg ? scratch_reg : operand0;
928 else
929 temp = gen_reg_rtx (mode);
930
166bf021 931 /* Argh. The assembler and linker can't handle arithmetic
932 involving plabels. We'll have to split up operand1 here
933 if it's a function label involved in an arithmetic
934 expression. Luckily, this only happens with addition
935 of constants to plabels, which simplifies the test. */
936 if (GET_CODE (operand1) == CONST
937 && GET_CODE (XEXP (operand1, 0)) == PLUS
938 && function_label_operand (XEXP (XEXP (operand1, 0), 0),
939 Pmode))
940 {
941 /* Save away the constant part of the expression. */
942 const_part = XEXP (XEXP (operand1, 0), 1);
943 if (GET_CODE (const_part) != CONST_INT)
944 abort ();
945
946 /* Set operand1 to just the SYMBOL_REF. */
947 operand1 = XEXP (XEXP (operand1, 0), 0);
948 }
949
2ee034bc 950 if (ishighonly)
951 set = gen_rtx (SET, mode, operand0, temp);
952 else
953 set = gen_rtx (SET, VOIDmode,
954 operand0,
955 gen_rtx (LO_SUM, mode, temp, operand1));
6d36483b 956
87ad11b0 957 emit_insn (gen_rtx (SET, VOIDmode,
2ee034bc 958 temp,
d6f01525 959 gen_rtx (HIGH, mode, operand1)));
d2498717 960 emit_insn (set);
166bf021 961
962 /* Add back in the constant part if needed. */
963 if (const_part != NULL)
964 emit_insn (gen_rtx (SET, mode, operand0,
965 plus_constant (operand0,
966 XEXP (const_part, 0))));
87ad11b0 967 return 1;
968 }
2ee034bc 969 return 1;
87ad11b0 970 }
42faba01 971 else if (GET_CODE (operand1) != CONST_INT
8c8ec4de 972 || ! cint_ok_for_move (INTVAL (operand1)))
87ad11b0 973 {
2ff4bf8d 974 rtx temp;
975
976 if (reload_in_progress || reload_completed)
977 temp = operand0;
978 else
979 temp = gen_reg_rtx (mode);
980
87ad11b0 981 emit_insn (gen_rtx (SET, VOIDmode, temp,
982 gen_rtx (HIGH, mode, operand1)));
983 operands[1] = gen_rtx (LO_SUM, mode, temp, operand1);
984 }
985 }
986 /* Now have insn-emit do whatever it normally does. */
987 return 0;
988}
989
990/* Does operand (which is a symbolic_operand) live in text space? If
201f01e9 991 so SYMBOL_REF_FLAG, which is set by ENCODE_SECTION_INFO, will be true. */
87ad11b0 992
993int
994read_only_operand (operand)
995 rtx operand;
996{
997 if (GET_CODE (operand) == CONST)
998 operand = XEXP (XEXP (operand, 0), 0);
b4a7bf10 999 if (flag_pic)
1000 {
1001 if (GET_CODE (operand) == SYMBOL_REF)
1002 return SYMBOL_REF_FLAG (operand) && !CONSTANT_POOL_ADDRESS_P (operand);
1003 }
1004 else
1005 {
1006 if (GET_CODE (operand) == SYMBOL_REF)
1007 return SYMBOL_REF_FLAG (operand) || CONSTANT_POOL_ADDRESS_P (operand);
1008 }
87ad11b0 1009 return 1;
1010}
6d36483b 1011
87ad11b0 1012\f
1013/* Return the best assembler insn template
f54b1341 1014 for moving operands[1] into operands[0] as a fullword. */
5c683f13 1015char *
87ad11b0 1016singlemove_string (operands)
1017 rtx *operands;
1018{
1019 if (GET_CODE (operands[0]) == MEM)
1020 return "stw %r1,%0";
9d5108ea 1021 else if (GET_CODE (operands[1]) == MEM)
87ad11b0 1022 return "ldw %1,%0";
9d5108ea 1023 else if (GET_CODE (operands[1]) == CONST_DOUBLE
1024 && GET_MODE (operands[1]) == SFmode)
1025 {
1026 int i;
1027 union real_extract u;
1028 union float_extract { float f; int i; } v;
1029
1030 bcopy (&CONST_DOUBLE_LOW (operands[1]), &u, sizeof u);
1031 v.f = REAL_VALUE_TRUNCATE (SFmode, u.d);
1032 i = v.i;
1033
1034 operands[1] = gen_rtx (CONST_INT, VOIDmode, i);
1035
f54b1341 1036 /* See if we can handle this constant in a single instruction. */
1037 if (cint_ok_for_move (INTVAL (operands[1])))
1038 {
6d36483b 1039 HOST_WIDE_INT intval = INTVAL (operands[1]);
f54b1341 1040
1041 if (intval == 0)
1042 return "copy 0,%0";
1043 else if (VAL_14_BITS_P (intval))
1044 return "ldi %1,%0";
1045 else if ((intval & 0x7ff) == 0)
1046 return "ldil L'%1,%0";
1047 else if (zdepi_cint_p (intval))
1048 return "zdepi %Z1,%0";
1049 }
9d5108ea 1050 else
1051 return "ldil L'%1,%0\n\tldo R'%1(%0),%0";
1052 }
1053
1054 else if (GET_CODE (operands[1]) == CONST_INT)
1055 {
f54b1341 1056 /* See if we can handle this in a single instruction. */
1057 if (cint_ok_for_move (INTVAL (operands[1])))
1058 {
1059 int intval = INTVAL (operands[1]);
1060
1061 if (intval == 0)
1062 return "copy 0,%0";
1063 else if (VAL_14_BITS_P (intval))
1064 return "ldi %1,%0";
1065 else if ((intval & 0x7ff) == 0)
1066 return "ldil L'%1,%0";
1067 else if (zdepi_cint_p (intval))
1068 return "zdepi %Z1,%0";
1069 }
9d5108ea 1070 else
1071 return "ldil L'%1,%0\n\tldo R'%1(%0),%0";
1072 }
87ad11b0 1073 return "copy %1,%0";
1074}
1075\f
1076
201f01e9 1077/* Compute position (in OP[1]) and width (in OP[2])
1078 useful for copying IMM to a register using the zdepi
1079 instructions. Store the immediate value to insert in OP[0]. */
e057641f 1080void
42faba01 1081compute_zdepi_operands (imm, op)
6d36483b 1082 unsigned HOST_WIDE_INT imm;
42faba01 1083 unsigned *op;
7e10ba53 1084{
e057641f 1085 int lsb, len;
7e10ba53 1086
e057641f 1087 /* Find the least significant set bit in IMM. */
1088 for (lsb = 0; lsb < 32; lsb++)
7e10ba53 1089 {
e057641f 1090 if ((imm & 1) != 0)
7e10ba53 1091 break;
e057641f 1092 imm >>= 1;
7e10ba53 1093 }
1094
e057641f 1095 /* Choose variants based on *sign* of the 5-bit field. */
1096 if ((imm & 0x10) == 0)
1097 len = (lsb <= 28) ? 4 : 32 - lsb;
7e10ba53 1098 else
1099 {
e057641f 1100 /* Find the width of the bitstring in IMM. */
1101 for (len = 5; len < 32; len++)
7e10ba53 1102 {
e057641f 1103 if ((imm & (1 << len)) == 0)
7e10ba53 1104 break;
7e10ba53 1105 }
1106
e057641f 1107 /* Sign extend IMM as a 5-bit value. */
1108 imm = (imm & 0xf) - 0x10;
7e10ba53 1109 }
1110
42faba01 1111 op[0] = imm;
1112 op[1] = 31 - lsb;
1113 op[2] = len;
7e10ba53 1114}
1115
87ad11b0 1116/* Output assembler code to perform a doubleword move insn
1117 with operands OPERANDS. */
1118
1119char *
1120output_move_double (operands)
1121 rtx *operands;
1122{
1123 enum { REGOP, OFFSOP, MEMOP, CNSTOP, RNDOP } optype0, optype1;
1124 rtx latehalf[2];
1125 rtx addreg0 = 0, addreg1 = 0;
1126
1127 /* First classify both operands. */
1128
1129 if (REG_P (operands[0]))
1130 optype0 = REGOP;
1131 else if (offsettable_memref_p (operands[0]))
1132 optype0 = OFFSOP;
1133 else if (GET_CODE (operands[0]) == MEM)
1134 optype0 = MEMOP;
1135 else
1136 optype0 = RNDOP;
1137
1138 if (REG_P (operands[1]))
1139 optype1 = REGOP;
1140 else if (CONSTANT_P (operands[1]))
1141 optype1 = CNSTOP;
1142 else if (offsettable_memref_p (operands[1]))
1143 optype1 = OFFSOP;
1144 else if (GET_CODE (operands[1]) == MEM)
1145 optype1 = MEMOP;
1146 else
1147 optype1 = RNDOP;
1148
1149 /* Check for the cases that the operand constraints are not
1150 supposed to allow to happen. Abort if we get one,
1151 because generating code for these cases is painful. */
1152
1153 if (optype0 != REGOP && optype1 != REGOP)
1154 abort ();
1155
1156 /* Handle auto decrementing and incrementing loads and stores
1157 specifically, since the structure of the function doesn't work
1158 for them without major modification. Do it better when we learn
1159 this port about the general inc/dec addressing of PA.
1160 (This was written by tege. Chide him if it doesn't work.) */
1161
1162 if (optype0 == MEMOP)
1163 {
1df0058a 1164 /* We have to output the address syntax ourselves, since print_operand
1165 doesn't deal with the addresses we want to use. Fix this later. */
1166
87ad11b0 1167 rtx addr = XEXP (operands[0], 0);
1df0058a 1168 if (GET_CODE (addr) == POST_INC || GET_CODE (addr) == POST_DEC)
87ad11b0 1169 {
1df0058a 1170 rtx high_reg = gen_rtx (SUBREG, SImode, operands[1], 0);
1171
1172 operands[0] = XEXP (addr, 0);
1173 if (GET_CODE (operands[1]) != REG || GET_CODE (operands[0]) != REG)
1174 abort ();
1175
1176 if (!reg_overlap_mentioned_p (high_reg, addr))
1177 {
1178 /* No overlap between high target register and address
1179 register. (We do this in a non-obvious way to
1180 save a register file writeback) */
1181 if (GET_CODE (addr) == POST_INC)
1182 return "stws,ma %1,8(0,%0)\n\tstw %R1,-4(0,%0)";
1183 return "stws,ma %1,-8(0,%0)\n\tstw %R1,12(0,%0)";
1184 }
1185 else
1186 abort();
a3217f65 1187 }
1df0058a 1188 else if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
a3217f65 1189 {
1df0058a 1190 rtx high_reg = gen_rtx (SUBREG, SImode, operands[1], 0);
1191
1192 operands[0] = XEXP (addr, 0);
1193 if (GET_CODE (operands[1]) != REG || GET_CODE (operands[0]) != REG)
1194 abort ();
1195
1196 if (!reg_overlap_mentioned_p (high_reg, addr))
1197 {
1198 /* No overlap between high target register and address
1199 register. (We do this in a non-obvious way to
1200 save a register file writeback) */
1201 if (GET_CODE (addr) == PRE_INC)
1202 return "stws,mb %1,8(0,%0)\n\tstw %R1,4(0,%0)";
1203 return "stws,mb %1,-8(0,%0)\n\tstw %R1,4(0,%0)";
1204 }
1205 else
1206 abort();
87ad11b0 1207 }
1208 }
1209 if (optype1 == MEMOP)
1210 {
1211 /* We have to output the address syntax ourselves, since print_operand
1212 doesn't deal with the addresses we want to use. Fix this later. */
1213
1214 rtx addr = XEXP (operands[1], 0);
1215 if (GET_CODE (addr) == POST_INC || GET_CODE (addr) == POST_DEC)
1216 {
1217 rtx high_reg = gen_rtx (SUBREG, SImode, operands[0], 0);
1218
1219 operands[1] = XEXP (addr, 0);
1220 if (GET_CODE (operands[0]) != REG || GET_CODE (operands[1]) != REG)
1221 abort ();
1222
1223 if (!reg_overlap_mentioned_p (high_reg, addr))
1224 {
1225 /* No overlap between high target register and address
3857fa62 1226 register. (We do this in a non-obvious way to
87ad11b0 1227 save a register file writeback) */
1228 if (GET_CODE (addr) == POST_INC)
1229 return "ldws,ma 8(0,%1),%0\n\tldw -4(0,%1),%R0";
1230 return "ldws,ma -8(0,%1),%0\n\tldw 12(0,%1),%R0";
1231 }
1232 else
1233 {
1234 /* This is an undefined situation. We should load into the
1235 address register *and* update that register. Probably
1236 we don't need to handle this at all. */
1237 if (GET_CODE (addr) == POST_INC)
1238 return "ldw 4(0,%1),%R0\n\tldws,ma 8(0,%1),%0";
1239 return "ldw 4(0,%1),%R0\n\tldws,ma -8(0,%1),%0";
1240 }
1241 }
1242 else if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
1243 {
1244 rtx high_reg = gen_rtx (SUBREG, SImode, operands[0], 0);
1245
1246 operands[1] = XEXP (addr, 0);
1247 if (GET_CODE (operands[0]) != REG || GET_CODE (operands[1]) != REG)
1248 abort ();
1249
1250 if (!reg_overlap_mentioned_p (high_reg, addr))
1251 {
1252 /* No overlap between high target register and address
3857fa62 1253 register. (We do this in a non-obvious way to
87ad11b0 1254 save a register file writeback) */
1255 if (GET_CODE (addr) == PRE_INC)
1256 return "ldws,mb 8(0,%1),%0\n\tldw 4(0,%1),%R0";
1257 return "ldws,mb -8(0,%1),%0\n\tldw 4(0,%1),%R0";
1258 }
1259 else
1260 {
1261 /* This is an undefined situation. We should load into the
1262 address register *and* update that register. Probably
1263 we don't need to handle this at all. */
1264 if (GET_CODE (addr) == PRE_INC)
1265 return "ldw 12(0,%1),%R0\n\tldws,mb 8(0,%1),%0";
1266 return "ldw -4(0,%1),%R0\n\tldws,mb -8(0,%1),%0";
1267 }
1268 }
1269 }
1270
1271 /* If an operand is an unoffsettable memory ref, find a register
1272 we can increment temporarily to make it refer to the second word. */
1273
1274 if (optype0 == MEMOP)
1275 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1276
1277 if (optype1 == MEMOP)
1278 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1279
1280 /* Ok, we can do one word at a time.
1281 Normally we do the low-numbered word first.
1282
1283 In either case, set up in LATEHALF the operands to use
1284 for the high-numbered word and in some cases alter the
1285 operands in OPERANDS to be suitable for the low-numbered word. */
1286
1287 if (optype0 == REGOP)
1288 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1289 else if (optype0 == OFFSOP)
1290 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1291 else
1292 latehalf[0] = operands[0];
1293
1294 if (optype1 == REGOP)
1295 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1296 else if (optype1 == OFFSOP)
1297 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1298 else if (optype1 == CNSTOP)
1299 split_double (operands[1], &operands[1], &latehalf[1]);
1300 else
1301 latehalf[1] = operands[1];
1302
1303 /* If the first move would clobber the source of the second one,
1304 do them in the other order.
1305
1306 RMS says "This happens only for registers;
1307 such overlap can't happen in memory unless the user explicitly
1308 sets it up, and that is an undefined circumstance."
1309
1310 but it happens on the HP-PA when loading parameter registers,
1311 so I am going to define that circumstance, and make it work
1312 as expected. */
1313
1314 if (optype0 == REGOP && (optype1 == MEMOP || optype1 == OFFSOP)
1315 && reg_overlap_mentioned_p (operands[0], XEXP (operands[1], 0)))
1316 {
1317 /* XXX THIS PROBABLY DOESN'T WORK. */
1318 /* Do the late half first. */
1319 if (addreg1)
6a5d085a 1320 output_asm_insn ("ldo 4(%0),%0", &addreg1);
87ad11b0 1321 output_asm_insn (singlemove_string (latehalf), latehalf);
1322 if (addreg1)
6a5d085a 1323 output_asm_insn ("ldo -4(%0),%0", &addreg1);
87ad11b0 1324 /* Then clobber. */
1325 return singlemove_string (operands);
1326 }
1327
c4fa5937 1328 if (optype0 == REGOP && optype1 == REGOP
1329 && REGNO (operands[0]) == REGNO (operands[1]) + 1)
1330 {
1331 output_asm_insn (singlemove_string (latehalf), latehalf);
1332 return singlemove_string (operands);
1333 }
1334
87ad11b0 1335 /* Normal case: do the two words, low-numbered first. */
1336
1337 output_asm_insn (singlemove_string (operands), operands);
1338
1339 /* Make any unoffsettable addresses point at high-numbered word. */
1340 if (addreg0)
6a5d085a 1341 output_asm_insn ("ldo 4(%0),%0", &addreg0);
87ad11b0 1342 if (addreg1)
6a5d085a 1343 output_asm_insn ("ldo 4(%0),%0", &addreg1);
87ad11b0 1344
1345 /* Do that word. */
1346 output_asm_insn (singlemove_string (latehalf), latehalf);
1347
1348 /* Undo the adds we just did. */
1349 if (addreg0)
6a5d085a 1350 output_asm_insn ("ldo -4(%0),%0", &addreg0);
87ad11b0 1351 if (addreg1)
6a5d085a 1352 output_asm_insn ("ldo -4(%0),%0", &addreg1);
87ad11b0 1353
1354 return "";
1355}
1356\f
1357char *
1358output_fp_move_double (operands)
1359 rtx *operands;
1360{
1361 if (FP_REG_P (operands[0]))
1362 {
6d36483b 1363 if (FP_REG_P (operands[1])
891b55b4 1364 || operands[1] == CONST0_RTX (GET_MODE (operands[0])))
1365 output_asm_insn ("fcpy,dbl %r1,%0", operands);
6d36483b 1366 else
87ad11b0 1367 output_asm_insn ("fldds%F1 %1,%0", operands);
1368 }
1369 else if (FP_REG_P (operands[1]))
1370 {
23b9e2b3 1371 output_asm_insn ("fstds%F0 %1,%0", operands);
87ad11b0 1372 }
891b55b4 1373 else if (operands[1] == CONST0_RTX (GET_MODE (operands[0])))
1374 {
1375 if (GET_CODE (operands[0]) == REG)
1376 {
1377 rtx xoperands[2];
1378 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1379 xoperands[0] = operands[0];
1380 output_asm_insn ("copy %%r0,%0\n\tcopy %%r0,%1", xoperands);
1381 }
6d36483b 1382 /* This is a pain. You have to be prepared to deal with an
891b55b4 1383 arbritary address here including pre/post increment/decrement.
1384
1385 so avoid this in the MD. */
1386 else
1387 abort ();
1388 }
87ad11b0 1389 else abort ();
1390 return "";
1391}
1392\f
1393/* Return a REG that occurs in ADDR with coefficient 1.
1394 ADDR can be effectively incremented by incrementing REG. */
1395
1396static rtx
1397find_addr_reg (addr)
1398 rtx addr;
1399{
1400 while (GET_CODE (addr) == PLUS)
1401 {
1402 if (GET_CODE (XEXP (addr, 0)) == REG)
1403 addr = XEXP (addr, 0);
1404 else if (GET_CODE (XEXP (addr, 1)) == REG)
1405 addr = XEXP (addr, 1);
1406 else if (CONSTANT_P (XEXP (addr, 0)))
1407 addr = XEXP (addr, 1);
1408 else if (CONSTANT_P (XEXP (addr, 1)))
1409 addr = XEXP (addr, 0);
1410 else
1411 abort ();
1412 }
1413 if (GET_CODE (addr) == REG)
1414 return addr;
1415 abort ();
1416}
1417
87ad11b0 1418/* Emit code to perform a block move.
1419
1420 Restriction: If the length argument is non-constant, alignment
1421 must be 4.
1422
1423 OPERANDS[0] is the destination pointer as a REG, clobbered.
1424 OPERANDS[1] is the source pointer as a REG, clobbered.
1425 if SIZE_IS_CONSTANT
1426 OPERANDS[2] is a register for temporary storage.
1427 OPERANDS[4] is the size as a CONST_INT
1428 else
1429 OPERANDS[2] is a REG which will contain the size, clobbered.
1430 OPERANDS[3] is a register for temporary storage.
1431 OPERANDS[5] is the alignment safe to use, as a CONST_INT. */
1432
1433char *
1434output_block_move (operands, size_is_constant)
1435 rtx *operands;
1436 int size_is_constant;
1437{
1438 int align = INTVAL (operands[5]);
1439 unsigned long n_bytes;
1440
1441 /* We can't move more than four bytes at a time because the PA
1442 has no longer integer move insns. (Could use fp mem ops?) */
1443 if (align > 4)
1444 align = 4;
1445
1446 if (size_is_constant)
1447 {
87ad11b0 1448 unsigned long offset;
1449 rtx temp;
1450
1451 n_bytes = INTVAL (operands[4]);
1452 if (n_bytes == 0)
1453 return "";
1454
1455 if (align >= 4)
1456 {
1457 /* Don't unroll too large blocks. */
6d36483b 1458 if (n_bytes > 32)
87ad11b0 1459 goto copy_with_loop;
1460
1461 /* Read and store using two registers, and hide latency
4bbea254 1462 by deferring the stores until three instructions after
87ad11b0 1463 the corresponding load. The last load insn will read
1464 the entire word were the last bytes are, possibly past
1465 the end of the source block, but since loads are aligned,
1466 this is harmless. */
1467
1468 output_asm_insn ("ldws,ma 4(0,%1),%2", operands);
1469
1470 for (offset = 4; offset < n_bytes; offset += 4)
1471 {
1472 output_asm_insn ("ldws,ma 4(0,%1),%3", operands);
1473 output_asm_insn ("stws,ma %2,4(0,%0)", operands);
1474
1475 temp = operands[2];
1476 operands[2] = operands[3];
1477 operands[3] = temp;
1478 }
1479 if (n_bytes % 4 == 0)
1480 /* Store the last word. */
1481 output_asm_insn ("stw %2,0(0,%0)", operands);
1482 else
1483 {
1484 /* Store the last, partial word. */
1485 operands[4] = gen_rtx (CONST_INT, VOIDmode, n_bytes % 4);
1486 output_asm_insn ("stbys,e %2,%4(0,%0)", operands);
1487 }
1488 return "";
1489 }
1490
1491 if (align >= 2 && n_bytes >= 2)
1492 {
1493 output_asm_insn ("ldhs,ma 2(0,%1),%2", operands);
1494
1495 for (offset = 2; offset + 2 <= n_bytes; offset += 2)
1496 {
1497 output_asm_insn ("ldhs,ma 2(0,%1),%3", operands);
1498 output_asm_insn ("sths,ma %2,2(0,%0)", operands);
1499
1500 temp = operands[2];
1501 operands[2] = operands[3];
1502 operands[3] = temp;
1503 }
1504 if (n_bytes % 2 != 0)
1505 output_asm_insn ("ldb 0(0,%1),%3", operands);
1506
1507 output_asm_insn ("sths,ma %2,2(0,%0)", operands);
1508
1509 if (n_bytes % 2 != 0)
1510 output_asm_insn ("stb %3,0(0,%0)", operands);
1511
1512 return "";
1513 }
1514
1515 output_asm_insn ("ldbs,ma 1(0,%1),%2", operands);
1516
1517 for (offset = 1; offset + 1 <= n_bytes; offset += 1)
1518 {
1519 output_asm_insn ("ldbs,ma 1(0,%1),%3", operands);
1520 output_asm_insn ("stbs,ma %2,1(0,%0)", operands);
1521
1522 temp = operands[2];
1523 operands[2] = operands[3];
1524 operands[3] = temp;
1525 }
1526 output_asm_insn ("stb %2,0(0,%0)", operands);
1527
1528 return "";
1529 }
1530
1531 if (align != 4)
1532 abort();
6d36483b 1533
87ad11b0 1534 copy_with_loop:
1535
1536 if (size_is_constant)
1537 {
a3217f65 1538 /* Size is compile-time determined, and also not
87ad11b0 1539 very small (such small cases are handled above). */
1540 operands[4] = gen_rtx (CONST_INT, VOIDmode, n_bytes - 4);
1541 output_asm_insn ("ldo %4(0),%2", operands);
1542 }
1543 else
1544 {
1545 /* Decrement counter by 4, and if it becomes negative, jump past the
1546 word copying loop. */
1547 output_asm_insn ("addib,<,n -4,%2,.+16", operands);
1548 }
1549
4bbea254 1550 /* Copying loop. Note that the first load is in the annulled delay slot
87ad11b0 1551 of addib. Is it OK on PA to have a load in a delay slot, i.e. is a
1552 possible page fault stopped in time? */
1553 output_asm_insn ("ldws,ma 4(0,%1),%3", operands);
1554 output_asm_insn ("addib,>= -4,%2,.-4", operands);
1555 output_asm_insn ("stws,ma %3,4(0,%0)", operands);
1556
1557 /* The counter is negative, >= -4. The remaining number of bytes are
1558 determined by the two least significant bits. */
1559
1560 if (size_is_constant)
1561 {
1562 if (n_bytes % 4 != 0)
1563 {
1564 /* Read the entire word of the source block tail. */
1565 output_asm_insn ("ldw 0(0,%1),%3", operands);
1566 operands[4] = gen_rtx (CONST_INT, VOIDmode, n_bytes % 4);
1567 output_asm_insn ("stbys,e %3,%4(0,%0)", operands);
1568 }
1569 }
1570 else
1571 {
1572 /* Add 4 to counter. If it becomes zero, we're done. */
1573 output_asm_insn ("addib,=,n 4,%2,.+16", operands);
1574
1575 /* Read the entire word of the source block tail. (Also this
4bbea254 1576 load is in an annulled delay slot.) */
87ad11b0 1577 output_asm_insn ("ldw 0(0,%1),%3", operands);
1578
1579 /* Make %0 point at the first byte after the destination block. */
df0651dc 1580 output_asm_insn ("addl %2,%0,%0", operands);
87ad11b0 1581 /* Store the leftmost bytes, up to, but not including, the address
1582 in %0. */
1583 output_asm_insn ("stbys,e %3,0(0,%0)", operands);
1584 }
1585 return "";
1586}
58e17b0b 1587
1588/* Count the number of insns necessary to handle this block move.
1589
1590 Basic structure is the same as emit_block_move, except that we
1591 count insns rather than emit them. */
1592
1593int
1594compute_movstrsi_length (insn)
1595 rtx insn;
1596{
1597 rtx pat = PATTERN (insn);
1598 int size_is_constant;
1599 int align = INTVAL (XEXP (XVECEXP (pat, 0, 6), 0));
1600 unsigned long n_bytes;
1601 int insn_count = 0;
1602
1603 if (GET_CODE (XEXP (XVECEXP (pat, 0, 5), 0)) == CONST_INT)
1604 {
1605 size_is_constant = 1;
1606 n_bytes = INTVAL (XEXP (XVECEXP (pat, 0, 5), 0));
1607 }
1608 else
1609 {
1610 size_is_constant = 0;
1611 n_bytes = 0;
1612 }
1613
1614 /* We can't move more than four bytes at a time because the PA
1615 has no longer integer move insns. (Could use fp mem ops?) */
1616 if (align > 4)
1617 align = 4;
1618
1619 if (size_is_constant)
1620 {
58e17b0b 1621 unsigned long offset;
58e17b0b 1622
1623 if (n_bytes == 0)
1624 return 0;
1625
1626 if (align >= 4)
1627 {
1628 /* Don't unroll too large blocks. */
6d36483b 1629 if (n_bytes > 32)
58e17b0b 1630 goto copy_with_loop;
1631
1632 /* first load */
1633 insn_count = 1;
1634
1635 /* Count the unrolled insns. */
1636 for (offset = 4; offset < n_bytes; offset += 4)
1637 insn_count += 2;
1638
1639 /* Count last store or partial store. */
1640 insn_count += 1;
5a1231ef 1641 return insn_count * 4;
58e17b0b 1642 }
1643
1644 if (align >= 2 && n_bytes >= 2)
1645 {
1646 /* initial load. */
1647 insn_count = 1;
1648
1649 /* Unrolled loop. */
1650 for (offset = 2; offset + 2 <= n_bytes; offset += 2)
1651 insn_count += 2;
1652
1653 /* ??? odd load/store */
1654 if (n_bytes % 2 != 0)
1655 insn_count += 2;
1656
1657 /* ??? final store from loop. */
1658 insn_count += 1;
1659
5a1231ef 1660 return insn_count * 4;
58e17b0b 1661 }
1662
1663 /* First load. */
1664 insn_count = 1;
1665
1666 /* The unrolled loop. */
1667 for (offset = 1; offset + 1 <= n_bytes; offset += 1)
1668 insn_count += 2;
1669
1670 /* Final store. */
1671 insn_count += 1;
1672
5a1231ef 1673 return insn_count * 4;
58e17b0b 1674 }
1675
1676 if (align != 4)
1677 abort();
6d36483b 1678
58e17b0b 1679 copy_with_loop:
1680
1681 /* setup for constant and non-constant case. */
1682 insn_count = 1;
1683
1684 /* The copying loop. */
1685 insn_count += 3;
1686
1687 /* The counter is negative, >= -4. The remaining number of bytes are
1688 determined by the two least significant bits. */
1689
1690 if (size_is_constant)
1691 {
1692 if (n_bytes % 4 != 0)
1693 insn_count += 2;
1694 }
1695 else
1696 insn_count += 4;
5a1231ef 1697 return insn_count * 4;
58e17b0b 1698}
87ad11b0 1699\f
1700
e057641f 1701char *
1702output_and (operands)
1703 rtx *operands;
1704{
d6f01525 1705 if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) != 0)
e057641f 1706 {
1707 unsigned mask = INTVAL (operands[2]);
1708 int ls0, ls1, ms0, p, len;
1709
1710 for (ls0 = 0; ls0 < 32; ls0++)
1711 if ((mask & (1 << ls0)) == 0)
1712 break;
1713
1714 for (ls1 = ls0; ls1 < 32; ls1++)
1715 if ((mask & (1 << ls1)) != 0)
1716 break;
1717
1718 for (ms0 = ls1; ms0 < 32; ms0++)
1719 if ((mask & (1 << ms0)) == 0)
1720 break;
1721
1722 if (ms0 != 32)
1723 abort();
1724
1725 if (ls1 == 32)
1726 {
1727 len = ls0;
1728
1729 if (len == 0)
1730 abort ();
1731
1732 operands[2] = gen_rtx (CONST_INT, VOIDmode, len);
1733 return "extru %1,31,%2,%0";
1734 }
1735 else
1736 {
1737 /* We could use this `depi' for the case above as well, but `depi'
1738 requires one more register file access than an `extru'. */
1739
1740 p = 31 - ls0;
1741 len = ls1 - ls0;
1742
1743 operands[2] = gen_rtx (CONST_INT, VOIDmode, p);
1744 operands[3] = gen_rtx (CONST_INT, VOIDmode, len);
1745 return "depi 0,%2,%3,%0";
1746 }
1747 }
1748 else
1749 return "and %1,%2,%0";
1750}
1751
1752char *
1753output_ior (operands)
1754 rtx *operands;
1755{
c9da5f4d 1756 unsigned mask = INTVAL (operands[2]);
57ed30e5 1757 int bs0, bs1, p, len;
6d36483b 1758
c9da5f4d 1759 if (INTVAL (operands[2]) == 0)
1760 return "copy %1,%0";
e057641f 1761
c9da5f4d 1762 for (bs0 = 0; bs0 < 32; bs0++)
1763 if ((mask & (1 << bs0)) != 0)
1764 break;
e057641f 1765
c9da5f4d 1766 for (bs1 = bs0; bs1 < 32; bs1++)
1767 if ((mask & (1 << bs1)) == 0)
1768 break;
e057641f 1769
c9da5f4d 1770 if (bs1 != 32 && ((unsigned) 1 << bs1) <= mask)
1771 abort();
e057641f 1772
c9da5f4d 1773 p = 31 - bs0;
1774 len = bs1 - bs0;
e057641f 1775
c9da5f4d 1776 operands[2] = gen_rtx (CONST_INT, VOIDmode, p);
1777 operands[3] = gen_rtx (CONST_INT, VOIDmode, len);
1778 return "depi -1,%2,%3,%0";
e057641f 1779}
1780\f
87ad11b0 1781/* Output an ascii string. */
57ed30e5 1782void
87ad11b0 1783output_ascii (file, p, size)
1784 FILE *file;
1785 unsigned char *p;
1786 int size;
1787{
1788 int i;
1789 int chars_output;
1790 unsigned char partial_output[16]; /* Max space 4 chars can occupy. */
1791
1792 /* The HP assembler can only take strings of 256 characters at one
1793 time. This is a limitation on input line length, *not* the
1794 length of the string. Sigh. Even worse, it seems that the
1795 restriction is in number of input characters (see \xnn &
1796 \whatever). So we have to do this very carefully. */
1797
1798 fprintf (file, "\t.STRING \"");
1799
1800 chars_output = 0;
1801 for (i = 0; i < size; i += 4)
1802 {
1803 int co = 0;
1804 int io = 0;
1805 for (io = 0, co = 0; io < MIN (4, size - i); io++)
1806 {
1807 register unsigned int c = p[i + io];
1808
1809 if (c == '\"' || c == '\\')
1810 partial_output[co++] = '\\';
1811 if (c >= ' ' && c < 0177)
1812 partial_output[co++] = c;
1813 else
1814 {
1815 unsigned int hexd;
1816 partial_output[co++] = '\\';
1817 partial_output[co++] = 'x';
1818 hexd = c / 16 - 0 + '0';
1819 if (hexd > '9')
1820 hexd -= '9' - 'a' + 1;
1821 partial_output[co++] = hexd;
1822 hexd = c % 16 - 0 + '0';
1823 if (hexd > '9')
1824 hexd -= '9' - 'a' + 1;
1825 partial_output[co++] = hexd;
1826 }
1827 }
1828 if (chars_output + co > 243)
1829 {
1830 fprintf (file, "\"\n\t.STRING \"");
1831 chars_output = 0;
1832 }
1833 fwrite (partial_output, 1, co, file);
1834 chars_output += co;
1835 co = 0;
1836 }
1837 fprintf (file, "\"\n");
1838}
1839\f
201f01e9 1840/* You may have trouble believing this, but this is the HP-PA stack
87ad11b0 1841 layout. Wow.
1842
1843 Offset Contents
1844
1845 Variable arguments (optional; any number may be allocated)
1846
1847 SP-(4*(N+9)) arg word N
1848 : :
1849 SP-56 arg word 5
1850 SP-52 arg word 4
1851
1852 Fixed arguments (must be allocated; may remain unused)
1853
1854 SP-48 arg word 3
1855 SP-44 arg word 2
1856 SP-40 arg word 1
1857 SP-36 arg word 0
1858
1859 Frame Marker
1860
1861 SP-32 External Data Pointer (DP)
1862 SP-28 External sr4
1863 SP-24 External/stub RP (RP')
1864 SP-20 Current RP
1865 SP-16 Static Link
1866 SP-12 Clean up
1867 SP-8 Calling Stub RP (RP'')
1868 SP-4 Previous SP
1869
1870 Top of Frame
1871
1872 SP-0 Stack Pointer (points to next available address)
1873
1874*/
1875
1876/* This function saves registers as follows. Registers marked with ' are
1877 this function's registers (as opposed to the previous function's).
1878 If a frame_pointer isn't needed, r4 is saved as a general register;
1879 the space for the frame pointer is still allocated, though, to keep
1880 things simple.
1881
1882
1883 Top of Frame
1884
1885 SP (FP') Previous FP
1886 SP + 4 Alignment filler (sigh)
1887 SP + 8 Space for locals reserved here.
1888 .
1889 .
1890 .
1891 SP + n All call saved register used.
1892 .
1893 .
1894 .
1895 SP + o All call saved fp registers used.
1896 .
1897 .
1898 .
1899 SP + p (SP') points to next available address.
6d36483b 1900
87ad11b0 1901*/
1902
daee63dd 1903/* Emit RTL to store REG at the memory location specified by BASE+DISP.
1904 Handle case where DISP > 8k by using the add_high_const pattern.
1905
1906 Note in DISP > 8k case, we will leave the high part of the address
1907 in %r1. There is code in expand_hppa_{prologue,epilogue} that knows this.*/
1908static void
1909store_reg (reg, disp, base)
1910 int reg, disp, base;
87ad11b0 1911{
1912 if (VAL_14_BITS_P (disp))
daee63dd 1913 {
6d36483b 1914 emit_move_insn (gen_rtx (MEM, SImode,
1915 gen_rtx (PLUS, SImode,
daee63dd 1916 gen_rtx (REG, SImode, base),
1917 GEN_INT (disp))),
1918 gen_rtx (REG, SImode, reg));
1919 }
87ad11b0 1920 else
daee63dd 1921 {
6d36483b 1922 emit_insn (gen_add_high_const (gen_rtx (REG, SImode, 1),
1923 gen_rtx (REG, SImode, base),
daee63dd 1924 GEN_INT (disp)));
1925 emit_move_insn (gen_rtx (MEM, SImode,
6d36483b 1926 gen_rtx (LO_SUM, SImode,
daee63dd 1927 gen_rtx (REG, SImode, 1),
1928 GEN_INT (disp))),
1929 gen_rtx (REG, SImode, reg));
1930 }
87ad11b0 1931}
1932
daee63dd 1933/* Emit RTL to load REG from the memory location specified by BASE+DISP.
1934 Handle case where DISP > 8k by using the add_high_const pattern.
1935
1936 Note in DISP > 8k case, we will leave the high part of the address
1937 in %r1. There is code in expand_hppa_{prologue,epilogue} that knows this.*/
1938static void
1939load_reg (reg, disp, base)
1940 int reg, disp, base;
1941{
1942 if (VAL_14_BITS_P (disp))
1943 {
1944 emit_move_insn (gen_rtx (REG, SImode, reg),
6d36483b 1945 gen_rtx (MEM, SImode,
1946 gen_rtx (PLUS, SImode,
daee63dd 1947 gen_rtx (REG, SImode, base),
1948 GEN_INT (disp))));
daee63dd 1949 }
1950 else
1951 {
6d36483b 1952 emit_insn (gen_add_high_const (gen_rtx (REG, SImode, 1),
daee63dd 1953 gen_rtx (REG, SImode, base),
1954 GEN_INT (disp)));
1955 emit_move_insn (gen_rtx (REG, SImode, reg),
1956 gen_rtx (MEM, SImode,
6d36483b 1957 gen_rtx (LO_SUM, SImode,
1958 gen_rtx (REG, SImode, 1),
daee63dd 1959 GEN_INT (disp))));
1960 }
1961}
1962
1963/* Emit RTL to set REG to the value specified by BASE+DISP.
1964 Handle case where DISP > 8k by using the add_high_const pattern.
1965
1966 Note in DISP > 8k case, we will leave the high part of the address
1967 in %r1. There is code in expand_hppa_{prologue,epilogue} that knows this.*/
1968static void
1969set_reg_plus_d(reg, base, disp)
1970 int reg, base, disp;
87ad11b0 1971{
1972 if (VAL_14_BITS_P (disp))
daee63dd 1973 {
1974 emit_move_insn (gen_rtx (REG, SImode, reg),
6d36483b 1975 gen_rtx (PLUS, SImode,
daee63dd 1976 gen_rtx (REG, SImode, base),
1977 GEN_INT (disp)));
daee63dd 1978 }
87ad11b0 1979 else
daee63dd 1980 {
6d36483b 1981 emit_insn (gen_add_high_const (gen_rtx (REG, SImode, 1),
daee63dd 1982 gen_rtx (REG, SImode, base),
1983 GEN_INT (disp)));
1984 emit_move_insn (gen_rtx (REG, SImode, reg),
6d36483b 1985 gen_rtx (LO_SUM, SImode,
daee63dd 1986 gen_rtx (REG, SImode, 1),
1987 GEN_INT (disp)));
1988 }
87ad11b0 1989}
1990
3ddcbb9d 1991/* Global variables set by FUNCTION_PROLOGUE. */
1992/* Size of frame. Need to know this to emit return insns from
1993 leaf procedures. */
a1ab4fa3 1994static int actual_fsize;
1995static int local_fsize, save_fregs;
3ddcbb9d 1996
87ad11b0 1997int
a1ab4fa3 1998compute_frame_size (size, fregs_live)
87ad11b0 1999 int size;
3ddcbb9d 2000 int *fregs_live;
87ad11b0 2001{
2002 extern int current_function_outgoing_args_size;
a1ab4fa3 2003 int i, fsize;
87ad11b0 2004
6d36483b 2005 /* 8 is space for frame pointer + filler. If any frame is allocated
a1ab4fa3 2006 we need to add this in because of STARTING_FRAME_OFFSET. */
2007 fsize = size + (size || frame_pointer_needed ? 8 : 0);
87ad11b0 2008
df0651dc 2009 for (i = 18; i >= 4; i--)
daee63dd 2010 {
df0651dc 2011 if (regs_ever_live[i])
daee63dd 2012 fsize += 4;
2013 }
df0651dc 2014 /* If we don't have a frame pointer, the register normally used for that
2015 purpose is saved just like other registers, not in the "frame marker". */
2016 if (! frame_pointer_needed)
87ad11b0 2017 {
df0651dc 2018 if (regs_ever_live[FRAME_POINTER_REGNUM])
2019 fsize += 4;
87ad11b0 2020 }
df0651dc 2021 fsize = (fsize + 7) & ~7;
2022
2023 for (i = 66; i >= 48; i -= 2)
2024 if (regs_ever_live[i] || regs_ever_live[i + 1])
2025 {
2026 fsize += 8;
2027 if (fregs_live)
2028 *fregs_live = 1;
2029 }
2030
a1ab4fa3 2031 fsize += current_function_outgoing_args_size;
2032 if (! leaf_function_p () || fsize)
2033 fsize += 32;
57ed30e5 2034 return (fsize + 63) & ~63;
87ad11b0 2035}
6d36483b 2036
daee63dd 2037rtx hp_profile_label_rtx;
2038static char hp_profile_label_name[8];
87ad11b0 2039void
a1ab4fa3 2040output_function_prologue (file, size)
87ad11b0 2041 FILE *file;
2042 int size;
87ad11b0 2043{
d151162a 2044 /* The function's label and associated .PROC must never be
2045 separated and must be output *after* any profiling declarations
2046 to avoid changing spaces/subspaces within a procedure. */
2047 ASM_OUTPUT_LABEL (file, XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0));
2048 fputs ("\t.PROC\n", file);
2049
daee63dd 2050 /* hppa_expand_prologue does the dirty work now. We just need
2051 to output the assembler directives which denote the start
2052 of a function. */
2acd4f33 2053 fprintf (file, "\t.CALLINFO FRAME=%d", actual_fsize);
daee63dd 2054 if (regs_ever_live[2] || profile_flag)
f3ba7709 2055 fprintf (file, ",CALLS,SAVE_RP");
daee63dd 2056 else
f3ba7709 2057 fprintf (file, ",NO_CALLS");
2058
2059 if (frame_pointer_needed)
2060 fprintf (file, ",SAVE_SP");
2061
a9960cdc 2062 /* Pass on information about the number of callee register saves
9b0c95be 2063 performed in the prologue.
2064
2065 The compiler is supposed to pass the highest register number
6d36483b 2066 saved, the assembler then has to adjust that number before
9b0c95be 2067 entering it into the unwind descriptor (to account for any
6d36483b 2068 caller saved registers with lower register numbers than the
9b0c95be 2069 first callee saved register). */
2070 if (gr_saved)
2071 fprintf (file, ",ENTRY_GR=%d", gr_saved + 2);
2072
2073 if (fr_saved)
2074 fprintf (file, ",ENTRY_FR=%d", fr_saved + 11);
a9960cdc 2075
f3ba7709 2076 fprintf (file, "\n\t.ENTRY\n");
daee63dd 2077
2078 /* Horrid hack. emit_function_prologue will modify this RTL in
2079 place to get the expected results. */
2080 if (profile_flag)
07b209fc 2081 ASM_GENERATE_INTERNAL_LABEL (hp_profile_label_name, "LP",
2082 hp_profile_labelno);
06ddb6f8 2083
2084 if (insn_addresses)
2085 {
2086 unsigned int old_total = total_code_bytes;
2087
2088 total_code_bytes += insn_addresses[INSN_UID (get_last_insn())];
2089 total_code_bytes += FUNCTION_BOUNDARY /BITS_PER_UNIT;
2090
2091 /* Be prepared to handle overflows. */
2092 total_code_bytes = old_total > total_code_bytes ? -1 : total_code_bytes;
2093 }
2094 else
2095 total_code_bytes = -1;
daee63dd 2096}
2097
57ed30e5 2098void
daee63dd 2099hppa_expand_prologue()
2100{
87ad11b0 2101 extern char call_used_regs[];
daee63dd 2102 int size = get_frame_size ();
afd7b680 2103 int merge_sp_adjust_with_store = 0;
daee63dd 2104 int i, offset;
2105 rtx tmpreg, size_rtx;
2106
a9960cdc 2107 gr_saved = 0;
2108 fr_saved = 0;
3ddcbb9d 2109 save_fregs = 0;
a1ab4fa3 2110 local_fsize = size + (size || frame_pointer_needed ? 8 : 0);
2111 actual_fsize = compute_frame_size (size, &save_fregs);
87ad11b0 2112
daee63dd 2113 /* Compute a few things we will use often. */
2114 tmpreg = gen_rtx (REG, SImode, 1);
2115 size_rtx = GEN_INT (actual_fsize);
87ad11b0 2116
6d36483b 2117 /* Save RP first. The calling conventions manual states RP will
daee63dd 2118 always be stored into the caller's frame at sp-20. */
372ef038 2119 if (regs_ever_live[2] || profile_flag)
6d36483b 2120 store_reg (2, -20, STACK_POINTER_REGNUM);
2121
daee63dd 2122 /* Allocate the local frame and set up the frame pointer if needed. */
a1ab4fa3 2123 if (actual_fsize)
2124 if (frame_pointer_needed)
2125 {
daee63dd 2126 /* Copy the old frame pointer temporarily into %r1. Set up the
2127 new stack pointer, then store away the saved old frame pointer
2128 into the stack at sp+actual_fsize and at the same time update
2129 the stack pointer by actual_fsize bytes. Two versions, first
2130 handles small (<8k) frames. The second handles large (>8k)
2131 frames. */
2132 emit_move_insn (tmpreg, frame_pointer_rtx);
2133 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
a1ab4fa3 2134 if (VAL_14_BITS_P (actual_fsize))
daee63dd 2135 emit_insn (gen_post_stwm (stack_pointer_rtx,
2136 stack_pointer_rtx,
2137 size_rtx, tmpreg));
a1ab4fa3 2138 else
2139 {
b75ad75e 2140 /* It is incorrect to store the saved frame pointer at *sp,
2141 then increment sp (writes beyond the current stack boundary).
2142
2143 So instead use stwm to store at *sp and post-increment the
2144 stack pointer as an atomic operation. Then increment sp to
2145 finish allocating the new frame. */
2146 emit_insn (gen_post_stwm (stack_pointer_rtx,
2147 stack_pointer_rtx,
2148 GEN_INT (64), tmpreg));
daee63dd 2149 set_reg_plus_d (STACK_POINTER_REGNUM,
2150 STACK_POINTER_REGNUM,
b75ad75e 2151 actual_fsize - 64);
a1ab4fa3 2152 }
2153 }
daee63dd 2154 /* no frame pointer needed. */
a1ab4fa3 2155 else
a1ab4fa3 2156 {
daee63dd 2157 /* In some cases we can perform the first callee register save
2158 and allocating the stack frame at the same time. If so, just
2159 make a note of it and defer allocating the frame until saving
2160 the callee registers. */
6d36483b 2161 if (VAL_14_BITS_P (-actual_fsize)
2162 && local_fsize == 0
daee63dd 2163 && ! profile_flag
2164 && ! flag_pic)
afd7b680 2165 merge_sp_adjust_with_store = 1;
daee63dd 2166 /* Can not optimize. Adjust the stack frame by actual_fsize bytes. */
2167 else if (actual_fsize != 0)
2168 set_reg_plus_d (STACK_POINTER_REGNUM,
2169 STACK_POINTER_REGNUM,
2170 actual_fsize);
a1ab4fa3 2171 }
201f01e9 2172 /* The hppa calling conventions say that that %r19, the pic offset
daee63dd 2173 register, is saved at sp - 32 (in this function's frame) when
2174 generating PIC code. */
d6f01525 2175 if (flag_pic)
6d36483b 2176 store_reg (PIC_OFFSET_TABLE_REGNUM, -32, STACK_POINTER_REGNUM);
daee63dd 2177
2178 /* Profiling code.
372ef038 2179
daee63dd 2180 Instead of taking one argument, the counter label, as most normal
2181 mcounts do, _mcount appears to behave differently on the HPPA. It
6d36483b 2182 takes the return address of the caller, the address of this routine,
2183 and the address of the label. Also, it isn't magic, so
daee63dd 2184 argument registre hsave to be preserved. */
372ef038 2185 if (profile_flag)
2186 {
daee63dd 2187 int pc_offset, i, arg_offset, basereg, offsetadj;
2188
2189 pc_offset = 4 + (frame_pointer_needed
2190 ? (VAL_14_BITS_P (actual_fsize) ? 12 : 20)
2191 : (VAL_14_BITS_P (actual_fsize) ? 4 : 8));
2192
2193 /* When the function has a frame pointer, use it as the base
2194 register for saving/restore registers. Else use the stack
2195 pointer. Adjust the offset according to the frame size if
2196 this function does not have a frame pointer. */
d2daf090 2197
2198 basereg = frame_pointer_needed ? FRAME_POINTER_REGNUM
2199 : STACK_POINTER_REGNUM;
2200 offsetadj = frame_pointer_needed ? 0 : actual_fsize;
2201
daee63dd 2202 /* Horrid hack. emit_function_prologue will modify this RTL in
2203 place to get the expected results. sprintf here is just to
2204 put something in the name. */
2205 sprintf(hp_profile_label_name, "LP$%04d", -1);
2206 hp_profile_label_rtx = gen_rtx (SYMBOL_REF, SImode,
2207 hp_profile_label_name);
d6f01525 2208 if (current_function_returns_struct)
daee63dd 2209 store_reg (STRUCT_VALUE_REGNUM, - 12 - offsetadj, basereg);
2210
d2daf090 2211 for (i = 26, arg_offset = -36 - offsetadj; i >= 23; i--, arg_offset -= 4)
daee63dd 2212 if (regs_ever_live [i])
372ef038 2213 {
daee63dd 2214 store_reg (i, arg_offset, basereg);
2215 /* Deal with arg_offset not fitting in 14 bits. */
d2daf090 2216 pc_offset += VAL_14_BITS_P (arg_offset) ? 4 : 8;
372ef038 2217 }
daee63dd 2218
2219 emit_move_insn (gen_rtx (REG, SImode, 26), gen_rtx (REG, SImode, 2));
2220 emit_move_insn (tmpreg, gen_rtx (HIGH, SImode, hp_profile_label_rtx));
2221 emit_move_insn (gen_rtx (REG, SImode, 24),
2222 gen_rtx (LO_SUM, SImode, tmpreg, hp_profile_label_rtx));
2223 /* %r25 is set from within the output pattern. */
2224 emit_insn (gen_call_profiler (GEN_INT (- pc_offset - 20)));
2225
2226 /* Restore argument registers. */
d2daf090 2227 for (i = 26, arg_offset = -36 - offsetadj; i >= 23; i--, arg_offset -= 4)
daee63dd 2228 if (regs_ever_live [i])
2229 load_reg (i, arg_offset, basereg);
2230
d6f01525 2231 if (current_function_returns_struct)
daee63dd 2232 load_reg (STRUCT_VALUE_REGNUM, -12 - offsetadj, basereg);
2233
372ef038 2234 }
2235
6d36483b 2236 /* Normal register save.
daee63dd 2237
2238 Do not save the frame pointer in the frame_pointer_needed case. It
2239 was done earlier. */
87ad11b0 2240 if (frame_pointer_needed)
2241 {
df0651dc 2242 for (i = 18, offset = local_fsize; i >= 4; i--)
2243 if (regs_ever_live[i] && ! call_used_regs[i])
87ad11b0 2244 {
6d36483b 2245 store_reg (i, offset, FRAME_POINTER_REGNUM);
daee63dd 2246 offset += 4;
a9960cdc 2247 gr_saved++;
87ad11b0 2248 }
9b0c95be 2249 /* Account for %r4 which is saved in a special place. */
2250 gr_saved++;
87ad11b0 2251 }
daee63dd 2252 /* No frame pointer needed. */
87ad11b0 2253 else
2254 {
daee63dd 2255 for (i = 18, offset = local_fsize - actual_fsize; i >= 3; i--)
87ad11b0 2256 if (regs_ever_live[i] && ! call_used_regs[i])
2257 {
6d36483b 2258 /* If merge_sp_adjust_with_store is nonzero, then we can
afd7b680 2259 optimize the first GR save. */
201f01e9 2260 if (merge_sp_adjust_with_store)
afd7b680 2261 {
2262 merge_sp_adjust_with_store = 0;
daee63dd 2263 emit_insn (gen_post_stwm (stack_pointer_rtx,
2264 stack_pointer_rtx,
2265 GEN_INT (-offset),
2266 gen_rtx (REG, SImode, i)));
afd7b680 2267 }
2268 else
daee63dd 2269 store_reg (i, offset, STACK_POINTER_REGNUM);
2270 offset += 4;
a9960cdc 2271 gr_saved++;
87ad11b0 2272 }
daee63dd 2273
afd7b680 2274 /* If we wanted to merge the SP adjustment with a GR save, but we never
daee63dd 2275 did any GR saves, then just emit the adjustment here. */
201f01e9 2276 if (merge_sp_adjust_with_store)
daee63dd 2277 set_reg_plus_d (STACK_POINTER_REGNUM,
2278 STACK_POINTER_REGNUM,
2279 actual_fsize);
87ad11b0 2280 }
6d36483b 2281
87ad11b0 2282 /* Align pointer properly (doubleword boundary). */
2283 offset = (offset + 7) & ~7;
2284
2285 /* Floating point register store. */
2286 if (save_fregs)
87ad11b0 2287 {
daee63dd 2288
2289 /* First get the frame or stack pointer to the start of the FP register
2290 save area. */
a1ab4fa3 2291 if (frame_pointer_needed)
daee63dd 2292 set_reg_plus_d (1, FRAME_POINTER_REGNUM, offset);
a1ab4fa3 2293 else
daee63dd 2294 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset);
2295
2296 /* Now actually save the FP registers. */
df0651dc 2297 for (i = 66; i >= 48; i -= 2)
2298 if (regs_ever_live[i] || regs_ever_live[i + 1])
2299 {
2300 emit_move_insn (gen_rtx (MEM, DFmode,
2301 gen_rtx (POST_INC, DFmode, tmpreg)),
2302 gen_rtx (REG, DFmode, i));
2303 fr_saved++;
2304 }
87ad11b0 2305 }
2306}
2307
daee63dd 2308
87ad11b0 2309void
a1ab4fa3 2310output_function_epilogue (file, size)
87ad11b0 2311 FILE *file;
2312 int size;
87ad11b0 2313{
3695c664 2314
2315 rtx insn = get_last_insn ();
2316
daee63dd 2317 /* hppa_expand_epilogue does the dirty work now. We just need
2318 to output the assembler directives which denote the end
3695c664 2319 of a function.
2320
2321 To make debuggers happy, emit a nop if the epilogue was completely
2322 eliminated due to a volatile call as the last insn in the
6d36483b 2323 current function. That way the return address (in %r2) will
3695c664 2324 always point to a valid instruction in the current function. */
2325
2326 /* Get the last real insn. */
2327 if (GET_CODE (insn) == NOTE)
2328 insn = prev_real_insn (insn);
2329
2330 /* If it is a sequence, then look inside. */
2331 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2332 insn = XVECEXP (PATTERN (insn), 0, 0);
2333
6d36483b 2334 /* If insn is a CALL_INSN, then it must be a call to a volatile
3695c664 2335 function (otherwise there would be epilogue insns). */
2336 if (insn && GET_CODE (insn) == CALL_INSN)
2337 fprintf (file, "\tnop\n");
6d36483b 2338
daee63dd 2339 fprintf (file, "\t.EXIT\n\t.PROCEND\n");
2340}
afd7b680 2341
daee63dd 2342void
3695c664 2343hppa_expand_epilogue ()
daee63dd 2344{
6d36483b 2345 rtx tmpreg;
daee63dd 2346 int offset,i;
2347 int merge_sp_adjust_with_load = 0;
2348
2349 /* We will use this often. */
2350 tmpreg = gen_rtx (REG, SImode, 1);
2351
2352 /* Try to restore RP early to avoid load/use interlocks when
2353 RP gets used in the return (bv) instruction. This appears to still
2354 be necessary even when we schedule the prologue and epilogue. */
afd7b680 2355 if (frame_pointer_needed
2356 && (regs_ever_live [2] || profile_flag))
daee63dd 2357 load_reg (2, -20, FRAME_POINTER_REGNUM);
87ad11b0 2358
daee63dd 2359 /* No frame pointer, and stack is smaller than 8k. */
2360 else if (! frame_pointer_needed
2361 && VAL_14_BITS_P (actual_fsize + 20)
2362 && (regs_ever_live[2] || profile_flag))
2363 load_reg (2, - (actual_fsize + 20), STACK_POINTER_REGNUM);
2364
2365 /* General register restores. */
87ad11b0 2366 if (frame_pointer_needed)
2367 {
df0651dc 2368 for (i = 18, offset = local_fsize; i >= 4; i--)
2369 if (regs_ever_live[i] && ! call_used_regs[i])
87ad11b0 2370 {
daee63dd 2371 load_reg (i, offset, FRAME_POINTER_REGNUM);
2372 offset += 4;
87ad11b0 2373 }
87ad11b0 2374 }
2375 else
2376 {
daee63dd 2377 for (i = 18, offset = local_fsize - actual_fsize; i >= 3; i--)
2378 if (regs_ever_live[i] && ! call_used_regs[i])
87ad11b0 2379 {
daee63dd 2380 /* Only for the first load.
2381 merge_sp_adjust_with_load holds the register load
2382 with which we will merge the sp adjustment. */
afd7b680 2383 if (VAL_14_BITS_P (actual_fsize + 20)
2384 && local_fsize == 0
afd7b680 2385 && ! merge_sp_adjust_with_load)
2386 merge_sp_adjust_with_load = i;
daee63dd 2387 else
2388 load_reg (i, offset, STACK_POINTER_REGNUM);
2389 offset += 4;
87ad11b0 2390 }
87ad11b0 2391 }
daee63dd 2392
87ad11b0 2393 /* Align pointer properly (doubleword boundary). */
2394 offset = (offset + 7) & ~7;
2395
daee63dd 2396 /* FP register restores. */
87ad11b0 2397 if (save_fregs)
87ad11b0 2398 {
daee63dd 2399 /* Adjust the register to index off of. */
a1ab4fa3 2400 if (frame_pointer_needed)
daee63dd 2401 set_reg_plus_d (1, FRAME_POINTER_REGNUM, offset);
a1ab4fa3 2402 else
daee63dd 2403 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset);
2404
2405 /* Actually do the restores now. */
df0651dc 2406 for (i = 66; i >= 48; i -= 2)
2407 if (regs_ever_live[i] || regs_ever_live[i + 1])
2408 emit_move_insn (gen_rtx (REG, DFmode, i),
2409 gen_rtx (MEM, DFmode,
2410 gen_rtx (POST_INC, DFmode, tmpreg)));
87ad11b0 2411 }
daee63dd 2412
2413 /* No frame pointer, but we have a stack greater than 8k. We restore
1921d762 2414 %r2 very late in this case. (All other cases are restored as early
2415 as possible.) */
daee63dd 2416 if (! frame_pointer_needed
2417 && ! VAL_14_BITS_P (actual_fsize + 20)
2418 && (regs_ever_live[2] || profile_flag))
87ad11b0 2419 {
daee63dd 2420 set_reg_plus_d (STACK_POINTER_REGNUM,
2421 STACK_POINTER_REGNUM,
2422 - actual_fsize);
2423 /* Uses value left over in %r1 by set_reg_plus_d. */
2424 load_reg (2, - (actual_fsize + 20 + ((- actual_fsize) & ~0x7ff)), 1);
87ad11b0 2425 }
daee63dd 2426
2427 /* Reset stack pointer (and possibly frame pointer). The stack */
2428 /* pointer is initially set to fp + 64 to avoid a race condition.
2429 ??? What race condition?!? */
2430 else if (frame_pointer_needed)
87ad11b0 2431 {
e03010ce 2432 /* Emit a blockage insn here to keep these insns from being moved
2433 to the beginning of the prologue or into the main instruction
2434 stream, doing so avoids some very obscure problems. */
2435 emit_insn (gen_blockage ());
daee63dd 2436 set_reg_plus_d (STACK_POINTER_REGNUM, FRAME_POINTER_REGNUM, 64);
2437 emit_insn (gen_pre_ldwm (stack_pointer_rtx, stack_pointer_rtx,
2438 GEN_INT (-64), frame_pointer_rtx));
87ad11b0 2439 }
daee63dd 2440 /* If we were deferring a callee register restore, do it now. */
2441 else if (! frame_pointer_needed && merge_sp_adjust_with_load)
2442 emit_insn (gen_pre_ldwm (stack_pointer_rtx,
2443 stack_pointer_rtx,
2444 GEN_INT (- actual_fsize),
6d36483b 2445 gen_rtx (REG, SImode,
daee63dd 2446 merge_sp_adjust_with_load)));
2447 else if (actual_fsize != 0)
2448 set_reg_plus_d (STACK_POINTER_REGNUM,
2449 STACK_POINTER_REGNUM,
2450 - actual_fsize);
87ad11b0 2451}
2452
757d4970 2453/* This is only valid once reload has completed because it depends on
2454 knowing exactly how much (if any) frame there is and...
2455
2456 It's only valid if there is no frame marker to de-allocate and...
2457
2458 It's only valid if %r2 hasn't been saved into the caller's frame
2459 (we're not profiling and %r2 isn't live anywhere). */
2460int
2461hppa_can_use_return_insn_p ()
2462{
2463 return (reload_completed
2464 && (compute_frame_size (get_frame_size (), 0) ? 0 : 1)
2465 && ! profile_flag
2466 && ! regs_ever_live[2]
2467 && ! frame_pointer_needed);
2468}
2469
87ad11b0 2470void
2471emit_bcond_fp (code, operand0)
2472 enum rtx_code code;
2473 rtx operand0;
2474{
2475 emit_jump_insn (gen_rtx (SET, VOIDmode, pc_rtx,
2476 gen_rtx (IF_THEN_ELSE, VOIDmode,
6d36483b 2477 gen_rtx (code, VOIDmode,
87ad11b0 2478 gen_rtx (REG, CCFPmode, 0),
2479 const0_rtx),
2480 gen_rtx (LABEL_REF, VOIDmode, operand0),
2481 pc_rtx)));
2482
2483}
2484
2485rtx
2486gen_cmp_fp (code, operand0, operand1)
2487 enum rtx_code code;
2488 rtx operand0, operand1;
2489{
2490 return gen_rtx (SET, VOIDmode, gen_rtx (REG, CCFPmode, 0),
2491 gen_rtx (code, CCFPmode, operand0, operand1));
2492}
2493
8b49b3c7 2494/* Adjust the cost of a scheduling dependency. Return the new cost of
2495 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
2496
2497int
2498pa_adjust_cost (insn, link, dep_insn, cost)
2499 rtx insn;
2500 rtx link;
2501 rtx dep_insn;
2502 int cost;
2503{
d402da4b 2504 if (! recog_memoized (insn))
2505 return 0;
8b49b3c7 2506
2507 if (REG_NOTE_KIND (link) == 0)
2508 {
2509 /* Data dependency; DEP_INSN writes a register that INSN reads some
2510 cycles later. */
2511
2512 if (get_attr_type (insn) == TYPE_FPSTORE)
2513 {
d402da4b 2514 rtx pat = PATTERN (insn);
2515 rtx dep_pat = PATTERN (dep_insn);
2516 if (GET_CODE (pat) == PARALLEL)
2517 {
2518 /* This happens for the fstXs,mb patterns. */
2519 pat = XVECEXP (pat, 0, 0);
2520 }
2521 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
8b49b3c7 2522 /* If this happens, we have to extend this to schedule
d402da4b 2523 optimally. Return 0 for now. */
2524 return 0;
8b49b3c7 2525
d402da4b 2526 if (rtx_equal_p (SET_DEST (dep_pat), SET_SRC (pat)))
8b49b3c7 2527 {
d402da4b 2528 if (! recog_memoized (dep_insn))
2529 return 0;
2530 /* DEP_INSN is writing its result to the register
2531 being stored in the fpstore INSN. */
8b49b3c7 2532 switch (get_attr_type (dep_insn))
2533 {
2534 case TYPE_FPLOAD:
134b4858 2535 /* This cost 3 cycles, not 2 as the md says for the
2536 700 and 7100. Note scaling of cost for 7100. */
2537 return cost + (pa_cpu_attr == PROCESSOR_700) ? 1 : 2;
8b49b3c7 2538
2539 case TYPE_FPALU:
134b4858 2540 case TYPE_FPMULSGL:
2541 case TYPE_FPMULDBL:
8b49b3c7 2542 case TYPE_FPDIVSGL:
2543 case TYPE_FPDIVDBL:
2544 case TYPE_FPSQRTSGL:
2545 case TYPE_FPSQRTDBL:
2546 /* In these important cases, we save one cycle compared to
2547 when flop instruction feed each other. */
134b4858 2548 return cost - (pa_cpu_attr == PROCESSOR_700) ? 1 : 2;
8b49b3c7 2549
2550 default:
2551 return cost;
2552 }
2553 }
2554 }
2555
2556 /* For other data dependencies, the default cost specified in the
2557 md is correct. */
2558 return cost;
2559 }
2560 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
2561 {
2562 /* Anti dependency; DEP_INSN reads a register that INSN writes some
2563 cycles later. */
2564
2565 if (get_attr_type (insn) == TYPE_FPLOAD)
2566 {
d402da4b 2567 rtx pat = PATTERN (insn);
2568 rtx dep_pat = PATTERN (dep_insn);
2569 if (GET_CODE (pat) == PARALLEL)
2570 {
2571 /* This happens for the fldXs,mb patterns. */
2572 pat = XVECEXP (pat, 0, 0);
2573 }
2574 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
8b49b3c7 2575 /* If this happens, we have to extend this to schedule
d402da4b 2576 optimally. Return 0 for now. */
2577 return 0;
8b49b3c7 2578
d402da4b 2579 if (reg_mentioned_p (SET_DEST (pat), SET_SRC (dep_pat)))
8b49b3c7 2580 {
d402da4b 2581 if (! recog_memoized (dep_insn))
2582 return 0;
8b49b3c7 2583 switch (get_attr_type (dep_insn))
2584 {
2585 case TYPE_FPALU:
134b4858 2586 case TYPE_FPMULSGL:
2587 case TYPE_FPMULDBL:
8b49b3c7 2588 case TYPE_FPDIVSGL:
2589 case TYPE_FPDIVDBL:
2590 case TYPE_FPSQRTSGL:
2591 case TYPE_FPSQRTDBL:
d402da4b 2592 /* A fpload can't be issued until one cycle before a
134b4858 2593 preceeding arithmetic operation has finished if
d402da4b 2594 the target of the fpload is any of the sources
2595 (or destination) of the arithmetic operation. */
134b4858 2596 return cost - (pa_cpu_attr == PROCESSOR_700) ? 1 : 2;
2597
2598 default:
2599 return 0;
2600 }
2601 }
2602 }
2603 else if (get_attr_type (insn) == TYPE_FPALU)
2604 {
2605 rtx pat = PATTERN (insn);
2606 rtx dep_pat = PATTERN (dep_insn);
2607 if (GET_CODE (pat) == PARALLEL)
2608 {
2609 /* This happens for the fldXs,mb patterns. */
2610 pat = XVECEXP (pat, 0, 0);
2611 }
2612 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
2613 /* If this happens, we have to extend this to schedule
2614 optimally. Return 0 for now. */
2615 return 0;
2616
2617 if (reg_mentioned_p (SET_DEST (pat), SET_SRC (dep_pat)))
2618 {
2619 if (! recog_memoized (dep_insn))
2620 return 0;
2621 switch (get_attr_type (dep_insn))
2622 {
2623 case TYPE_FPDIVSGL:
2624 case TYPE_FPDIVDBL:
2625 case TYPE_FPSQRTSGL:
2626 case TYPE_FPSQRTDBL:
2627 /* An ALU flop can't be issued until two cycles before a
2628 preceeding divide or sqrt operation has finished if
2629 the target of the ALU flop is any of the sources
2630 (or destination) of the divide or sqrt operation. */
2631 return cost - (pa_cpu_attr == PROCESSOR_700) ? 2 : 4;
8b49b3c7 2632
2633 default:
2634 return 0;
2635 }
2636 }
2637 }
2638
2639 /* For other anti dependencies, the cost is 0. */
2640 return 0;
2641 }
134b4858 2642 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2643 {
2644 /* Output dependency; DEP_INSN writes a register that INSN writes some
2645 cycles later. */
2646 if (get_attr_type (insn) == TYPE_FPLOAD)
2647 {
2648 rtx pat = PATTERN (insn);
2649 rtx dep_pat = PATTERN (dep_insn);
2650 if (GET_CODE (pat) == PARALLEL)
2651 {
2652 /* This happens for the fldXs,mb patterns. */
2653 pat = XVECEXP (pat, 0, 0);
2654 }
2655 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
2656 /* If this happens, we have to extend this to schedule
2657 optimally. Return 0 for now. */
2658 return 0;
2659
2660 if (reg_mentioned_p (SET_DEST (pat), SET_DEST (dep_pat)))
2661 {
2662 if (! recog_memoized (dep_insn))
2663 return 0;
2664 switch (get_attr_type (dep_insn))
2665 {
2666 case TYPE_FPALU:
2667 case TYPE_FPMULSGL:
2668 case TYPE_FPMULDBL:
2669 case TYPE_FPDIVSGL:
2670 case TYPE_FPDIVDBL:
2671 case TYPE_FPSQRTSGL:
2672 case TYPE_FPSQRTDBL:
2673 /* A fpload can't be issued until one cycle before a
2674 preceeding arithmetic operation has finished if
2675 the target of the fpload is the destination of the
2676 arithmetic operation. */
2677 return cost - (pa_cpu_attr == PROCESSOR_700) ? 1 : 2;
8b49b3c7 2678
134b4858 2679 default:
2680 return 0;
2681 }
2682 }
2683 }
2684 else if (get_attr_type (insn) == TYPE_FPALU)
2685 {
2686 rtx pat = PATTERN (insn);
2687 rtx dep_pat = PATTERN (dep_insn);
2688 if (GET_CODE (pat) == PARALLEL)
2689 {
2690 /* This happens for the fldXs,mb patterns. */
2691 pat = XVECEXP (pat, 0, 0);
2692 }
2693 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
2694 /* If this happens, we have to extend this to schedule
2695 optimally. Return 0 for now. */
2696 return 0;
2697
2698 if (reg_mentioned_p (SET_DEST (pat), SET_DEST (dep_pat)))
2699 {
2700 if (! recog_memoized (dep_insn))
2701 return 0;
2702 switch (get_attr_type (dep_insn))
2703 {
2704 case TYPE_FPDIVSGL:
2705 case TYPE_FPDIVDBL:
2706 case TYPE_FPSQRTSGL:
2707 case TYPE_FPSQRTDBL:
2708 /* An ALU flop can't be issued until two cycles before a
2709 preceeding divide or sqrt operation has finished if
2710 the target of the ALU flop is also the target of
2711 of the divide or sqrt operation. */
2712 return cost - (pa_cpu_attr == PROCESSOR_700) ? 2 : 4;
2713
2714 default:
2715 return 0;
2716 }
2717 }
2718 }
2719
2720 /* For other output dependencies, the cost is 0. */
2721 return 0;
2722 }
2723 else
2724 abort ();
8b49b3c7 2725}
87ad11b0 2726
58e17b0b 2727/* Return any length adjustment needed by INSN which already has its length
6d36483b 2728 computed as LENGTH. Return zero if no adjustment is necessary.
58e17b0b 2729
5fbd5940 2730 For the PA: function calls, millicode calls, and backwards short
6d36483b 2731 conditional branches with unfilled delay slots need an adjustment by +1
5fbd5940 2732 (to account for the NOP which will be inserted into the instruction stream).
58e17b0b 2733
2734 Also compute the length of an inline block move here as it is too
5fbd5940 2735 complicated to express as a length attribute in pa.md. */
58e17b0b 2736int
2737pa_adjust_insn_length (insn, length)
2738 rtx insn;
2739 int length;
2740{
2741 rtx pat = PATTERN (insn);
2742
5fbd5940 2743 /* Call insns which are *not* indirect and have unfilled delay slots. */
58e17b0b 2744 if (GET_CODE (insn) == CALL_INSN)
5fbd5940 2745 {
2746
2747 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL
2748 && GET_CODE (XEXP (XEXP (XVECEXP (pat, 0, 0), 0), 0)) == SYMBOL_REF)
5a1231ef 2749 return 4;
5fbd5940 2750 else if (GET_CODE (XVECEXP (pat, 0, 0)) == SET
2751 && GET_CODE (XEXP (XEXP (XEXP (XVECEXP (pat, 0, 0), 1), 0), 0))
2752 == SYMBOL_REF)
5a1231ef 2753 return 4;
5fbd5940 2754 else
2755 return 0;
2756 }
58e17b0b 2757 /* Millicode insn with an unfilled delay slot. */
2758 else if (GET_CODE (insn) == INSN
2759 && GET_CODE (pat) != SEQUENCE
2760 && GET_CODE (pat) != USE
2761 && GET_CODE (pat) != CLOBBER
2762 && get_attr_type (insn) == TYPE_MILLI)
5a1231ef 2763 return 4;
58e17b0b 2764 /* Block move pattern. */
2765 else if (GET_CODE (insn) == INSN
2766 && GET_CODE (pat) == PARALLEL
2767 && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
2768 && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 1)) == MEM
2769 && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode
2770 && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
5a1231ef 2771 return compute_movstrsi_length (insn) - 4;
58e17b0b 2772 /* Conditional branch with an unfilled delay slot. */
5fbd5940 2773 else if (GET_CODE (insn) == JUMP_INSN && ! simplejump_p (insn))
2774 {
2775 /* Adjust a short backwards conditional with an unfilled delay slot. */
2776 if (GET_CODE (pat) == SET
5a1231ef 2777 && length == 4
5fbd5940 2778 && ! forward_branch_p (insn))
5a1231ef 2779 return 4;
5fbd5940 2780 /* Adjust dbra insn with short backwards conditional branch with
6d36483b 2781 unfilled delay slot -- only for case where counter is in a
29a4502c 2782 general register register. */
5fbd5940 2783 else if (GET_CODE (pat) == PARALLEL
2784 && GET_CODE (XVECEXP (pat, 0, 1)) == SET
2785 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == REG
6d36483b 2786 && ! FP_REG_P (XEXP (XVECEXP (pat, 0, 1), 0))
5a1231ef 2787 && length == 4
5fbd5940 2788 && ! forward_branch_p (insn))
5a1231ef 2789 return 4;
5fbd5940 2790 else
2791 return 0;
2792 }
58e17b0b 2793 else
2794 return 0;
2795}
2796
87ad11b0 2797/* Print operand X (an rtx) in assembler syntax to file FILE.
2798 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2799 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2800
2801void
2802print_operand (file, x, code)
2803 FILE *file;
2804 rtx x;
2805 int code;
2806{
2807 switch (code)
2808 {
2809 case '#':
2810 /* Output a 'nop' if there's nothing for the delay slot. */
2811 if (dbr_sequence_length () == 0)
2812 fputs ("\n\tnop", file);
2813 return;
2814 case '*':
2815 /* Output an nullification completer if there's nothing for the */
6d36483b 2816 /* delay slot or nullification is requested. */
87ad11b0 2817 if (dbr_sequence_length () == 0 ||
2818 (final_sequence &&
2819 INSN_ANNULLED_BRANCH_P (XVECEXP (final_sequence, 0, 0))))
2820 fputs (",n", file);
2821 return;
2822 case 'R':
2823 /* Print out the second register name of a register pair.
2824 I.e., R (6) => 7. */
2825 fputs (reg_names[REGNO (x)+1], file);
2826 return;
2827 case 'r':
2828 /* A register or zero. */
891b55b4 2829 if (x == const0_rtx
2830 || (x == CONST0_RTX (DFmode))
2831 || (x == CONST0_RTX (SFmode)))
87ad11b0 2832 {
2833 fputs ("0", file);
2834 return;
2835 }
2836 else
2837 break;
c8975385 2838 case 'C': /* Plain (C)ondition */
87ad11b0 2839 case 'X':
2840 switch (GET_CODE (x))
6d36483b 2841 {
87ad11b0 2842 case EQ:
2843 fprintf (file, "="); break;
2844 case NE:
9d887bf4 2845 fprintf (file, "<>"); break;
87ad11b0 2846 case GT:
2847 fprintf (file, ">"); break;
2848 case GE:
2849 fprintf (file, ">="); break;
2850 case GEU:
2851 fprintf (file, ">>="); break;
2852 case GTU:
2853 fprintf (file, ">>"); break;
2854 case LT:
2855 fprintf (file, "<"); break;
2856 case LE:
2857 fprintf (file, "<="); break;
2858 case LEU:
2859 fprintf (file, "<<="); break;
2860 case LTU:
2861 fprintf (file, "<<"); break;
2862 default:
87ad11b0 2863 abort ();
2864 }
2865 return;
c8975385 2866 case 'N': /* Condition, (N)egated */
87ad11b0 2867 switch (GET_CODE (x))
2868 {
2869 case EQ:
9d887bf4 2870 fprintf (file, "<>"); break;
87ad11b0 2871 case NE:
2872 fprintf (file, "="); break;
2873 case GT:
2874 fprintf (file, "<="); break;
2875 case GE:
2876 fprintf (file, "<"); break;
2877 case GEU:
2878 fprintf (file, "<<"); break;
2879 case GTU:
2880 fprintf (file, "<<="); break;
2881 case LT:
2882 fprintf (file, ">="); break;
2883 case LE:
2884 fprintf (file, ">"); break;
2885 case LEU:
2886 fprintf (file, ">>"); break;
2887 case LTU:
2888 fprintf (file, ">>="); break;
2889 default:
87ad11b0 2890 abort ();
2891 }
2892 return;
61230bc9 2893 /* For floating point comparisons. Need special conditions to deal
2894 with NaNs properly. */
2895 case 'Y':
2896 switch (GET_CODE (x))
2897 {
2898 case EQ:
2899 fprintf (file, "!="); break;
2900 case NE:
2901 fprintf (file, "="); break;
2902 case GT:
2903 fprintf (file, "!>"); break;
2904 case GE:
2905 fprintf (file, "!>="); break;
2906 case LT:
2907 fprintf (file, "!<"); break;
2908 case LE:
2909 fprintf (file, "!<="); break;
2910 default:
61230bc9 2911 abort ();
2912 }
2913 return;
c8975385 2914 case 'S': /* Condition, operands are (S)wapped. */
2915 switch (GET_CODE (x))
2916 {
2917 case EQ:
2918 fprintf (file, "="); break;
2919 case NE:
2920 fprintf (file, "<>"); break;
2921 case GT:
2922 fprintf (file, "<"); break;
2923 case GE:
2924 fprintf (file, "<="); break;
2925 case GEU:
2926 fprintf (file, "<<="); break;
2927 case GTU:
2928 fprintf (file, "<<"); break;
2929 case LT:
2930 fprintf (file, ">"); break;
2931 case LE:
2932 fprintf (file, ">="); break;
2933 case LEU:
2934 fprintf (file, ">>="); break;
2935 case LTU:
2936 fprintf (file, ">>"); break;
2937 default:
c8975385 2938 abort ();
6d36483b 2939 }
c8975385 2940 return;
2941 case 'B': /* Condition, (B)oth swapped and negate. */
2942 switch (GET_CODE (x))
2943 {
2944 case EQ:
2945 fprintf (file, "<>"); break;
2946 case NE:
2947 fprintf (file, "="); break;
2948 case GT:
2949 fprintf (file, ">="); break;
2950 case GE:
2951 fprintf (file, ">"); break;
2952 case GEU:
2953 fprintf (file, ">>"); break;
2954 case GTU:
2955 fprintf (file, ">>="); break;
2956 case LT:
2957 fprintf (file, "<="); break;
2958 case LE:
2959 fprintf (file, "<"); break;
2960 case LEU:
2961 fprintf (file, "<<"); break;
2962 case LTU:
2963 fprintf (file, "<<="); break;
2964 default:
c8975385 2965 abort ();
6d36483b 2966 }
c8975385 2967 return;
2968 case 'k':
2969 if (GET_CODE (x) == CONST_INT)
2970 {
2971 fprintf (file, "%d", ~INTVAL (x));
2972 return;
2973 }
2974 abort();
e5965947 2975 case 'L':
2976 if (GET_CODE (x) == CONST_INT)
2977 {
2978 fprintf (file, "%d", 32 - (INTVAL (x) & 31));
2979 return;
2980 }
2981 abort();
3a16146d 2982 case 'O':
2983 if (GET_CODE (x) == CONST_INT && exact_log2 (INTVAL (x)) >= 0)
2984 {
2985 fprintf (file, "%d", exact_log2 (INTVAL (x)));
2986 return;
2987 }
2988 abort();
e5965947 2989 case 'P':
2990 if (GET_CODE (x) == CONST_INT)
2991 {
2992 fprintf (file, "%d", 31 - (INTVAL (x) & 31));
2993 return;
2994 }
2995 abort();
c8975385 2996 case 'I':
2997 if (GET_CODE (x) == CONST_INT)
2998 fputs ("i", file);
2999 return;
87ad11b0 3000 case 'M':
3001 switch (GET_CODE (XEXP (x, 0)))
3002 {
3003 case PRE_DEC:
3004 case PRE_INC:
3005 fprintf (file, "s,mb");
3006 break;
3007 case POST_DEC:
3008 case POST_INC:
3009 fprintf (file, "s,ma");
3010 break;
3011 default:
3012 break;
3013 }
3014 return;
3015 case 'F':
3016 switch (GET_CODE (XEXP (x, 0)))
3017 {
3018 case PRE_DEC:
3019 case PRE_INC:
3020 fprintf (file, ",mb");
3021 break;
3022 case POST_DEC:
3023 case POST_INC:
3024 fprintf (file, ",ma");
3025 break;
3026 default:
3027 break;
3028 }
3029 return;
3030 case 'G':
3031 output_global_address (file, x);
3032 return;
3033 case 0: /* Don't do anything special */
3034 break;
42faba01 3035 case 'Z':
3036 {
3037 unsigned op[3];
3038 compute_zdepi_operands (INTVAL (x), op);
3039 fprintf (file, "%d,%d,%d", op[0], op[1], op[2]);
3040 return;
3041 }
87ad11b0 3042 default:
3043 abort ();
3044 }
3045 if (GET_CODE (x) == REG)
df0651dc 3046 {
3047 if (FP_REG_P (x) && GET_MODE_SIZE (GET_MODE (x)) <= 4 && (REGNO (x) & 1) == 0)
3048 fprintf (file, "%sL", reg_names [REGNO (x)]);
3049 else
3050 fprintf (file, "%s", reg_names [REGNO (x)]);
3051 }
87ad11b0 3052 else if (GET_CODE (x) == MEM)
3053 {
3054 int size = GET_MODE_SIZE (GET_MODE (x));
3055 rtx base = XEXP (XEXP (x, 0), 0);
3056 switch (GET_CODE (XEXP (x, 0)))
3057 {
3058 case PRE_DEC:
3059 case POST_DEC:
3060 fprintf (file, "-%d(0,%s)", size, reg_names [REGNO (base)]);
3061 break;
3062 case PRE_INC:
3063 case POST_INC:
3064 fprintf (file, "%d(0,%s)", size, reg_names [REGNO (base)]);
3065 break;
3066 default:
3067 output_address (XEXP (x, 0));
3068 break;
3069 }
3070 }
3071 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3072 {
3073 union { double d; int i[2]; } u;
3074 union { float f; int i; } u1;
3075 u.i[0] = XINT (x, 0); u.i[1] = XINT (x, 1);
3076 u1.f = u.d;
3077 if (code == 'f')
3078 fprintf (file, "0r%.9g", u1.f);
3079 else
3080 fprintf (file, "0x%x", u1.i);
3081 }
6c640a06 3082 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) != VOIDmode)
87ad11b0 3083 {
3084 union { double d; int i[2]; } u;
3085 u.i[0] = XINT (x, 0); u.i[1] = XINT (x, 1);
3086 fprintf (file, "0r%.20g", u.d);
3087 }
3088 else
3089 output_addr_const (file, x);
3090}
3091
3092/* output a SYMBOL_REF or a CONST expression involving a SYMBOL_REF. */
3093
3094void
3095output_global_address (file, x)
3096 FILE *file;
3097 rtx x;
3098{
2ee034bc 3099
3100 /* Imagine (high (const (plus ...))). */
3101 if (GET_CODE (x) == HIGH)
3102 x = XEXP (x, 0);
3103
87ad11b0 3104 if (GET_CODE (x) == SYMBOL_REF && read_only_operand (x))
3105 assemble_name (file, XSTR (x, 0));
b4a7bf10 3106 else if (GET_CODE (x) == SYMBOL_REF && !flag_pic)
87ad11b0 3107 {
3108 assemble_name (file, XSTR (x, 0));
3109 fprintf (file, "-$global$");
3110 }
3111 else if (GET_CODE (x) == CONST)
3112 {
3113 char *sep = "";
3114 int offset = 0; /* assembler wants -$global$ at end */
3115 rtx base;
6d36483b 3116
87ad11b0 3117 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)
3118 {
3119 base = XEXP (XEXP (x, 0), 0);
3120 output_addr_const (file, base);
3121 }
3122 else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == CONST_INT)
3123 offset = INTVAL (XEXP (XEXP (x, 0), 0));
3124 else abort ();
3125
3126 if (GET_CODE (XEXP (XEXP (x, 0), 1)) == SYMBOL_REF)
3127 {
3128 base = XEXP (XEXP (x, 0), 1);
3129 output_addr_const (file, base);
3130 }
3131 else if (GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
3132 offset = INTVAL (XEXP (XEXP (x, 0),1));
3133 else abort ();
3134
3135 if (GET_CODE (XEXP (x, 0)) == PLUS)
3136 {
3137 if (offset < 0)
3138 {
3139 offset = -offset;
3140 sep = "-";
3141 }
3142 else
3143 sep = "+";
3144 }
3145 else if (GET_CODE (XEXP (x, 0)) == MINUS
3146 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF))
3147 sep = "-";
3148 else abort ();
3149
b4a7bf10 3150 if (!read_only_operand (base) && !flag_pic)
87ad11b0 3151 fprintf (file, "-$global$");
3152 fprintf (file, "%s", sep);
3153 if (offset) fprintf (file,"%d", offset);
3154 }
3155 else
3156 output_addr_const (file, x);
3157}
3158
87ad11b0 3159/* HP's millicode routines mean something special to the assembler.
3160 Keep track of which ones we have used. */
3161
3162enum millicodes { remI, remU, divI, divU, mulI, mulU, end1000 };
3163static char imported[(int)end1000];
3164static char *milli_names[] = {"remI", "remU", "divI", "divU", "mulI", "mulU"};
3165static char import_string[] = ".IMPORT $$....,MILLICODE";
3166#define MILLI_START 10
3167
57ed30e5 3168static void
87ad11b0 3169import_milli (code)
3170 enum millicodes code;
3171{
3172 char str[sizeof (import_string)];
6d36483b 3173
87ad11b0 3174 if (!imported[(int)code])
3175 {
3176 imported[(int)code] = 1;
3177 strcpy (str, import_string);
3178 strncpy (str + MILLI_START, milli_names[(int)code], 4);
3179 output_asm_insn (str, 0);
3180 }
3181}
3182
6d36483b 3183/* The register constraints have put the operands and return value in
87ad11b0 3184 the proper registers. */
3185
3186char *
d6686e21 3187output_mul_insn (unsignedp, insn)
87ad11b0 3188 int unsignedp;
d6686e21 3189 rtx insn;
87ad11b0 3190{
d6686e21 3191
87ad11b0 3192 if (unsignedp)
3193 {
3194 import_milli (mulU);
d6686e21 3195 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, "$$mulU"),
3196 gen_rtx (REG, SImode, 31));
87ad11b0 3197 }
3198 else
3199 {
3200 import_milli (mulI);
d6686e21 3201 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, "$$mulI"),
3202 gen_rtx (REG, SImode, 31));
87ad11b0 3203 }
3204}
3205
3206/* If operands isn't NULL, then it's a CONST_INT with which we can do
3207 something */
3208
3209
3210/* Emit the rtl for doing a division by a constant. */
3211
3212 /* Do magic division millicodes exist for this value? */
3213
3214static int magic_milli[]= {0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0,
3215 1, 1};
3216
6d36483b 3217/* We'll use an array to keep track of the magic millicodes and
87ad11b0 3218 whether or not we've used them already. [n][0] is signed, [n][1] is
3219 unsigned. */
3220
87ad11b0 3221static int div_milli[16][2];
3222
3223int
3224div_operand (op, mode)
3225 rtx op;
3226 enum machine_mode mode;
3227{
3228 return (mode == SImode
3229 && ((GET_CODE (op) == REG && REGNO (op) == 25)
3230 || (GET_CODE (op) == CONST_INT && INTVAL (op) > 0
3231 && INTVAL (op) < 16 && magic_milli[INTVAL (op)])));
3232}
3233
3234int
3235emit_hpdiv_const (operands, unsignedp)
3236 rtx *operands;
3237 int unsignedp;
3238{
3239 if (GET_CODE (operands[2]) == CONST_INT
3240 && INTVAL (operands[2]) > 0
3241 && INTVAL (operands[2]) < 16
3242 && magic_milli[INTVAL (operands[2])])
3243 {
3244 emit_move_insn ( gen_rtx (REG, SImode, 26), operands[1]);
3245 emit
3246 (gen_rtx
3247 (PARALLEL, VOIDmode,
3248 gen_rtvec (5, gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, 29),
3249 gen_rtx (unsignedp ? UDIV : DIV, SImode,
3250 gen_rtx (REG, SImode, 26),
3251 operands[2])),
33bd7237 3252 gen_rtx (CLOBBER, VOIDmode, operands[3]),
87ad11b0 3253 gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, SImode, 26)),
3254 gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, SImode, 25)),
3255 gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, SImode, 31)))));
3256 emit_move_insn (operands[0], gen_rtx (REG, SImode, 29));
3257 return 1;
3258 }
3259 return 0;
3260}
3261
3262char *
d6686e21 3263output_div_insn (operands, unsignedp, insn)
87ad11b0 3264 rtx *operands;
3265 int unsignedp;
d6686e21 3266 rtx insn;
87ad11b0 3267{
3268 int divisor;
6d36483b 3269
3270 /* If the divisor is a constant, try to use one of the special
87ad11b0 3271 opcodes .*/
3272 if (GET_CODE (operands[0]) == CONST_INT)
3273 {
d6686e21 3274 static char buf[100];
87ad11b0 3275 divisor = INTVAL (operands[0]);
3276 if (!div_milli[divisor][unsignedp])
3277 {
d6686e21 3278 div_milli[divisor][unsignedp] = 1;
87ad11b0 3279 if (unsignedp)
3280 output_asm_insn (".IMPORT $$divU_%0,MILLICODE", operands);
3281 else
3282 output_asm_insn (".IMPORT $$divI_%0,MILLICODE", operands);
87ad11b0 3283 }
3284 if (unsignedp)
d6686e21 3285 {
3286 sprintf (buf, "$$divU_%d", INTVAL (operands[0]));
3287 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, buf),
3288 gen_rtx (REG, SImode, 31));
3289 }
3290 else
3291 {
3292 sprintf (buf, "$$divI_%d", INTVAL (operands[0]));
3293 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, buf),
3294 gen_rtx (REG, SImode, 31));
3295 }
87ad11b0 3296 }
3297 /* Divisor isn't a special constant. */
3298 else
3299 {
3300 if (unsignedp)
3301 {
3302 import_milli (divU);
d6686e21 3303 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, "$$divU"),
3304 gen_rtx (REG, SImode, 31));
87ad11b0 3305 }
3306 else
3307 {
3308 import_milli (divI);
d6686e21 3309 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, "$$divI"),
3310 gen_rtx (REG, SImode, 31));
87ad11b0 3311 }
3312 }
3313}
3314
3315/* Output a $$rem millicode to do mod. */
3316
3317char *
d6686e21 3318output_mod_insn (unsignedp, insn)
87ad11b0 3319 int unsignedp;
d6686e21 3320 rtx insn;
87ad11b0 3321{
3322 if (unsignedp)
3323 {
3324 import_milli (remU);
d6686e21 3325 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, "$$remU"),
3326 gen_rtx (REG, SImode, 31));
87ad11b0 3327 }
3328 else
3329 {
3330 import_milli (remI);
d6686e21 3331 return output_call (insn, gen_rtx (SYMBOL_REF, SImode, "$$remI"),
3332 gen_rtx (REG, SImode, 31));
87ad11b0 3333 }
3334}
3335
3336void
df0651dc 3337output_arg_descriptor (call_insn)
3338 rtx call_insn;
87ad11b0 3339{
3340 char *arg_regs[4];
3341 enum machine_mode arg_mode;
df0651dc 3342 rtx link;
87ad11b0 3343 int i, output_flag = 0;
3344 int regno;
6d36483b 3345
87ad11b0 3346 for (i = 0; i < 4; i++)
3347 arg_regs[i] = 0;
3348
738176ab 3349 /* Specify explicitly that no argument relocations should take place
3350 if using the portable runtime calling conventions. */
3351 if (TARGET_PORTABLE_RUNTIME)
3352 {
3353 fprintf (asm_out_file,
3354 "\t.CALL ARGW0=NO,ARGW1=NO,ARGW2=NO,ARGW3=NO,RETVAL=NO\n");
3355 return;
3356 }
3357
df0651dc 3358 if (GET_CODE (call_insn) != CALL_INSN)
3359 abort ();
3360 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); link; link = XEXP (link, 1))
87ad11b0 3361 {
df0651dc 3362 rtx use = XEXP (link, 0);
c12afafd 3363
df0651dc 3364 if (! (GET_CODE (use) == USE
3365 && GET_CODE (XEXP (use, 0)) == REG
3366 && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
c12afafd 3367 continue;
3368
df0651dc 3369 arg_mode = GET_MODE (XEXP (use, 0));
3370 regno = REGNO (XEXP (use, 0));
87ad11b0 3371 if (regno >= 23 && regno <= 26)
372ef038 3372 {
3373 arg_regs[26 - regno] = "GR";
3374 if (arg_mode == DImode)
3375 arg_regs[25 - regno] = "GR";
3376 }
df0651dc 3377 else if (regno >= 32 && regno <= 39)
87ad11b0 3378 {
3379 if (arg_mode == SFmode)
df0651dc 3380 arg_regs[(regno - 32) / 2] = "FR";
e6ba640e 3381 else
87ad11b0 3382 {
eeec72c0 3383#ifndef HP_FP_ARG_DESCRIPTOR_REVERSED
df0651dc 3384 arg_regs[(regno - 34) / 2] = "FR";
3385 arg_regs[(regno - 34) / 2 + 1] = "FU";
87ad11b0 3386#else
df0651dc 3387 arg_regs[(regno - 34) / 2] = "FU";
3388 arg_regs[(regno - 34) / 2 + 1] = "FR";
87ad11b0 3389#endif
3390 }
87ad11b0 3391 }
3392 }
3393 fputs ("\t.CALL ", asm_out_file);
3394 for (i = 0; i < 4; i++)
3395 {
3396 if (arg_regs[i])
3397 {
3398 if (output_flag++)
3399 fputc (',', asm_out_file);
3400 fprintf (asm_out_file, "ARGW%d=%s", i, arg_regs[i]);
3401 }
3402 }
3403 fputc ('\n', asm_out_file);
3404}
3405\f
d2c1d63d 3406/* Memory loads/stores to/from the shift need to go through
3407 the general registers. */
87ad11b0 3408
3409enum reg_class
3410secondary_reload_class (class, mode, in)
3411 enum reg_class class;
3412 enum machine_mode mode;
3413 rtx in;
3414{
3415 int regno = true_regnum (in);
3416
b4a7bf10 3417 /* Trying to load a constant into a FP register during PIC code
3418 generation will require %r1 as a scratch register. */
3419 if (flag_pic == 2
3420 && GET_MODE_CLASS (mode) == MODE_INT
3421 && FP_REG_CLASS_P (class)
3422 && (GET_CODE (in) == CONST_INT || GET_CODE (in) == CONST_DOUBLE))
3423 return R1_REGS;
3424
d2498717 3425 if (((regno >= FIRST_PSEUDO_REGISTER || regno == -1)
6d36483b 3426 && GET_MODE_CLASS (mode) == MODE_INT
3427 && FP_REG_CLASS_P (class))
d6f01525 3428 || (class == SHIFT_REGS && (regno <= 0 || regno >= 32)))
9c6d4825 3429 return GENERAL_REGS;
d2c1d63d 3430
2ee034bc 3431 if (GET_CODE (in) == HIGH)
3432 in = XEXP (in, 0);
3433
b4a7bf10 3434 if (!flag_pic
3435 && symbolic_operand (in, VOIDmode)
3436 && read_only_operand (in))
3437 return NO_REGS;
3438
63882853 3439 if (class != R1_REGS && symbolic_operand (in, VOIDmode))
2ee034bc 3440 return R1_REGS;
3441
6b1c36c2 3442 if (GET_CODE (in) == SUBREG)
3443 in = SUBREG_REG (in);
3444
3445 if (FP_REG_CLASS_P (class)
3446 && GET_CODE (in) == MEM
3447 && !memory_address_p (DFmode, XEXP (in, 0))
3448 && memory_address_p (SImode, XEXP (in, 0)))
3449 return GENERAL_REGS;
3450
d2c1d63d 3451 return NO_REGS;
87ad11b0 3452}
3453
3454enum direction
3455function_arg_padding (mode, type)
3456 enum machine_mode mode;
3457 tree type;
3458{
3459 int size;
3460
3461 if (mode == BLKmode)
3462 {
3463 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3464 size = int_size_in_bytes (type) * BITS_PER_UNIT;
3465 else
3466 return upward; /* Don't know if this is right, but */
3467 /* same as old definition. */
3468 }
3469 else
3470 size = GET_MODE_BITSIZE (mode);
3471 if (size < PARM_BOUNDARY)
3472 return downward;
3473 else if (size % PARM_BOUNDARY)
3474 return upward;
3475 else
3476 return none;
3477}
3478
87ad11b0 3479\f
3480/* Do what is necessary for `va_start'. The argument is ignored;
3481 We look at the current function to determine if stdargs or varargs
3482 is used and fill in an initial va_list. A pointer to this constructor
3483 is returned. */
3484
3485struct rtx_def *
3486hppa_builtin_saveregs (arglist)
3487 tree arglist;
3488{
57ed30e5 3489 rtx offset;
87ad11b0 3490 tree fntype = TREE_TYPE (current_function_decl);
3491 int argadj = ((!(TYPE_ARG_TYPES (fntype) != 0
3492 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3493 != void_type_node)))
3494 ? UNITS_PER_WORD : 0);
3495
3496 if (argadj)
3497 offset = plus_constant (current_function_arg_offset_rtx, argadj);
3498 else
3499 offset = current_function_arg_offset_rtx;
9c6d4825 3500
87ad11b0 3501 /* Store general registers on the stack. */
3502 move_block_from_reg (23,
3503 gen_rtx (MEM, BLKmode,
3504 plus_constant
3505 (current_function_internal_arg_pointer, -16)),
6d36483b 3506 4, 4 * UNITS_PER_WORD);
9c6d4825 3507 return copy_to_reg (expand_binop (Pmode, add_optab,
3508 current_function_internal_arg_pointer,
3509 offset, 0, 0, OPTAB_LIB_WIDEN));
87ad11b0 3510}
d6f01525 3511
6d36483b 3512/* This routine handles all the normal conditional branch sequences we
3513 might need to generate. It handles compare immediate vs compare
3514 register, nullification of delay slots, varying length branches,
0d986529 3515 negated branches, and all combinations of the above. It returns the
6d36483b 3516 output appropriate to emit the branch corresponding to all given
0d986529 3517 parameters. */
3518
3519char *
3520output_cbranch (operands, nullify, length, negated, insn)
3521 rtx *operands;
3522 int nullify, length, negated;
3523 rtx insn;
29a4502c 3524{
0d986529 3525 static char buf[100];
3526 int useskip = 0;
3527
29a4502c 3528 /* A conditional branch to the following instruction (eg the delay slot) is
3529 asking for a disaster. This can happen when not optimizing.
3530
3531 In such cases it is safe to emit nothing. */
3532
3533 if (JUMP_LABEL (insn) == next_nonnote_insn (insn))
3534 return "";
6d36483b 3535
5fbd5940 3536 /* If this is a long branch with its delay slot unfilled, set `nullify'
3537 as it can nullify the delay slot and save a nop. */
5a1231ef 3538 if (length == 8 && dbr_sequence_length () == 0)
5fbd5940 3539 nullify = 1;
3540
3541 /* If this is a short forward conditional branch which did not get
3542 its delay slot filled, the delay slot can still be nullified. */
5a1231ef 3543 if (! nullify && length == 4 && dbr_sequence_length () == 0)
5fbd5940 3544 nullify = forward_branch_p (insn);
3545
6d36483b 3546 /* A forward branch over a single nullified insn can be done with a
0d986529 3547 comclr instruction. This avoids a single cycle penalty due to
3548 mis-predicted branch if we fall through (branch not taken). */
5a1231ef 3549 if (length == 4
5fbd5940 3550 && next_real_insn (insn) != 0
5a1231ef 3551 && get_attr_length (next_real_insn (insn)) == 4
5fbd5940 3552 && JUMP_LABEL (insn) == next_nonnote_insn (next_real_insn (insn))
0d986529 3553 && nullify)
3554 useskip = 1;
3555
3556 switch (length)
3557 {
5fbd5940 3558 /* All short conditional branches except backwards with an unfilled
3559 delay slot. */
5a1231ef 3560 case 4:
0d986529 3561 if (useskip)
3562 strcpy (buf, "com%I2clr,");
3563 else
3564 strcpy (buf, "com%I2b,");
3565 if (negated)
3566 strcat (buf, "%B3");
3567 else
3568 strcat (buf, "%S3");
3569 if (useskip)
3570 strcat (buf, " %2,%1,0");
3571 else if (nullify)
3572 strcat (buf, ",n %2,%1,%0");
6d36483b 3573 else
5fbd5940 3574 strcat (buf, " %2,%1,%0");
0d986529 3575 break;
3576
6d36483b 3577 /* All long conditionals. Note an short backward branch with an
5fbd5940 3578 unfilled delay slot is treated just like a long backward branch
3579 with an unfilled delay slot. */
5a1231ef 3580 case 8:
5fbd5940 3581 /* Handle weird backwards branch with a filled delay slot
3582 with is nullified. */
3583 if (dbr_sequence_length () != 0
3584 && ! forward_branch_p (insn)
3585 && nullify)
3586 {
3587 strcpy (buf, "com%I2b,");
3588 if (negated)
3589 strcat (buf, "%S3");
3590 else
3591 strcat (buf, "%B3");
3592 strcat (buf, ",n %2,%1,.+12\n\tbl %0,0");
3593 }
43f0c1f2 3594 /* Handle short backwards branch with an unfilled delay slot.
3595 Using a comb;nop rather than comiclr;bl saves 1 cycle for both
3596 taken and untaken branches. */
3597 else if (dbr_sequence_length () == 0
3598 && ! forward_branch_p (insn)
3599 && insn_addresses
3600 && VAL_14_BITS_P (insn_addresses[INSN_UID (JUMP_LABEL (insn))]
3601 - insn_addresses[INSN_UID (insn)]))
3602 {
3603 strcpy (buf, "com%I2b,");
3604 if (negated)
3605 strcat (buf, "%B3 %2,%1,%0%#");
3606 else
3607 strcat (buf, "%S3 %2,%1,%0%#");
3608 }
0d986529 3609 else
5fbd5940 3610 {
3611 strcpy (buf, "com%I2clr,");
3612 if (negated)
3613 strcat (buf, "%S3");
3614 else
3615 strcat (buf, "%B3");
3616 if (nullify)
3617 strcat (buf, " %2,%1,0\n\tbl,n %0,0");
3618 else
3619 strcat (buf, " %2,%1,0\n\tbl %0,0");
3620 }
0d986529 3621 break;
3622
3623 default:
3624 abort();
5fbd5940 3625 }
0d986529 3626 return buf;
3627}
3628
6d36483b 3629/* This routine handles all the branch-on-bit conditional branch sequences we
0d986529 3630 might need to generate. It handles nullification of delay slots,
3631 varying length branches, negated branches and all combinations of the
3632 above. it returns the appropriate output template to emit the branch. */
3633
3634char *
3635output_bb (operands, nullify, length, negated, insn, which)
3636 rtx *operands;
3637 int nullify, length, negated;
3638 rtx insn;
3639 int which;
29a4502c 3640{
0d986529 3641 static char buf[100];
3642 int useskip = 0;
3643
29a4502c 3644 /* A conditional branch to the following instruction (eg the delay slot) is
3645 asking for a disaster. I do not think this can happen as this pattern
6d36483b 3646 is only used when optimizing; jump optimization should eliminate the
29a4502c 3647 jump. But be prepared just in case. */
6d36483b 3648
29a4502c 3649 if (JUMP_LABEL (insn) == next_nonnote_insn (insn))
3650 return "";
6d36483b 3651
5fbd5940 3652 /* If this is a long branch with its delay slot unfilled, set `nullify'
3653 as it can nullify the delay slot and save a nop. */
5a1231ef 3654 if (length == 8 && dbr_sequence_length () == 0)
5fbd5940 3655 nullify = 1;
3656
3657 /* If this is a short forward conditional branch which did not get
3658 its delay slot filled, the delay slot can still be nullified. */
5a1231ef 3659 if (! nullify && length == 4 && dbr_sequence_length () == 0)
5fbd5940 3660 nullify = forward_branch_p (insn);
3661
6d36483b 3662 /* A forward branch over a single nullified insn can be done with a
0d986529 3663 extrs instruction. This avoids a single cycle penalty due to
3664 mis-predicted branch if we fall through (branch not taken). */
3665
5a1231ef 3666 if (length == 4
5fbd5940 3667 && next_real_insn (insn) != 0
5a1231ef 3668 && get_attr_length (next_real_insn (insn)) == 4
5fbd5940 3669 && JUMP_LABEL (insn) == next_nonnote_insn (next_real_insn (insn))
0d986529 3670 && nullify)
3671 useskip = 1;
3672
3673 switch (length)
3674 {
3675
5fbd5940 3676 /* All short conditional branches except backwards with an unfilled
3677 delay slot. */
5a1231ef 3678 case 4:
0d986529 3679 if (useskip)
3680 strcpy (buf, "extrs,");
6d36483b 3681 else
0d986529 3682 strcpy (buf, "bb,");
3683 if ((which == 0 && negated)
3684 || (which == 1 && ! negated))
3685 strcat (buf, ">=");
3686 else
3687 strcat (buf, "<");
3688 if (useskip)
3689 strcat (buf, " %0,%1,1,0");
3690 else if (nullify && negated)
3691 strcat (buf, ",n %0,%1,%3");
3692 else if (nullify && ! negated)
3693 strcat (buf, ",n %0,%1,%2");
3694 else if (! nullify && negated)
5fbd5940 3695 strcat (buf, "%0,%1,%3");
0d986529 3696 else if (! nullify && ! negated)
5fbd5940 3697 strcat (buf, " %0,%1,%2");
0d986529 3698 break;
3699
6d36483b 3700 /* All long conditionals. Note an short backward branch with an
5fbd5940 3701 unfilled delay slot is treated just like a long backward branch
3702 with an unfilled delay slot. */
5a1231ef 3703 case 8:
5fbd5940 3704 /* Handle weird backwards branch with a filled delay slot
3705 with is nullified. */
3706 if (dbr_sequence_length () != 0
3707 && ! forward_branch_p (insn)
3708 && nullify)
3709 {
3710 strcpy (buf, "bb,");
3711 if ((which == 0 && negated)
3712 || (which == 1 && ! negated))
3713 strcat (buf, "<");
3714 else
3715 strcat (buf, ">=");
3716 if (negated)
3717 strcat (buf, " %0,%1,.+12\n\tbl %3,0");
3718 else
3719 strcat (buf, " %0,%1,.+12\n\tbl %2,0");
3720 }
43f0c1f2 3721 /* Handle short backwards branch with an unfilled delay slot.
3722 Using a bb;nop rather than extrs;bl saves 1 cycle for both
3723 taken and untaken branches. */
3724 else if (dbr_sequence_length () == 0
3725 && ! forward_branch_p (insn)
3726 && insn_addresses
3727 && VAL_14_BITS_P (insn_addresses[INSN_UID (JUMP_LABEL (insn))]
3728 - insn_addresses[INSN_UID (insn)]))
3729 {
3730 strcpy (buf, "bb,");
3731 if ((which == 0 && negated)
3732 || (which == 1 && ! negated))
3733 strcat (buf, ">=");
3734 else
3735 strcat (buf, "<");
3736 if (negated)
3737 strcat (buf, " %0,%1,%3%#");
3738 else
3739 strcat (buf, " %0,%1,%2%#");
3740 }
0d986529 3741 else
5fbd5940 3742 {
3743 strcpy (buf, "extrs,");
3744 if ((which == 0 && negated)
3745 || (which == 1 && ! negated))
3746 strcat (buf, "<");
3747 else
3748 strcat (buf, ">=");
3749 if (nullify && negated)
3750 strcat (buf, " %0,%1,1,0\n\tbl,n %3,0");
3751 else if (nullify && ! negated)
3752 strcat (buf, " %0,%1,1,0\n\tbl,n %2,0");
3753 else if (negated)
3754 strcat (buf, " %0,%1,1,0\n\tbl %3,0");
6d36483b 3755 else
5fbd5940 3756 strcat (buf, " %0,%1,1,0\n\tbl %2,0");
3757 }
0d986529 3758 break;
3759
3760 default:
3761 abort();
5fbd5940 3762 }
0d986529 3763 return buf;
3764}
3765
29a4502c 3766/* Return the output template for emitting a dbra type insn.
3767
3768 Note it may perform some output operations on its own before
3769 returning the final output string. */
3770char *
3771output_dbra (operands, insn, which_alternative)
3772 rtx *operands;
3773 rtx insn;
3774 int which_alternative;
3775{
3776
3777 /* A conditional branch to the following instruction (eg the delay slot) is
3778 asking for a disaster. Be prepared! */
3779
3780 if (JUMP_LABEL (insn) == next_nonnote_insn (insn))
3781 {
3782 if (which_alternative == 0)
3783 return "ldo %1(%0),%0";
3784 else if (which_alternative == 1)
3785 {
3786 output_asm_insn ("fstws %0,-16(0,%%r30)",operands);
3787 output_asm_insn ("ldw -16(0,%%r30),%4",operands);
3788 output_asm_insn ("ldo %1(%4),%4\n\tstw %4,-16(0,%%r30)", operands);
3789 return "fldws -16(0,%%r30),%0";
3790 }
3791 else
3792 {
3793 output_asm_insn ("ldw %0,%4", operands);
3794 return "ldo %1(%4),%4\n\tstw %4,%0";
3795 }
3796 }
3797
3798 if (which_alternative == 0)
3799 {
3800 int nullify = INSN_ANNULLED_BRANCH_P (insn);
3801 int length = get_attr_length (insn);
3802
3803 /* If this is a long branch with its delay slot unfilled, set `nullify'
3804 as it can nullify the delay slot and save a nop. */
5a1231ef 3805 if (length == 8 && dbr_sequence_length () == 0)
29a4502c 3806 nullify = 1;
3807
3808 /* If this is a short forward conditional branch which did not get
3809 its delay slot filled, the delay slot can still be nullified. */
5a1231ef 3810 if (! nullify && length == 4 && dbr_sequence_length () == 0)
29a4502c 3811 nullify = forward_branch_p (insn);
3812
3813 /* Handle short versions first. */
5a1231ef 3814 if (length == 4 && nullify)
29a4502c 3815 return "addib,%C2,n %1,%0,%3";
5a1231ef 3816 else if (length == 4 && ! nullify)
29a4502c 3817 return "addib,%C2 %1,%0,%3";
5a1231ef 3818 else if (length == 8)
29a4502c 3819 {
6d36483b 3820 /* Handle weird backwards branch with a fulled delay slot
29a4502c 3821 which is nullified. */
3822 if (dbr_sequence_length () != 0
3823 && ! forward_branch_p (insn)
3824 && nullify)
3825 return "addib,%N2,n %1,%0,.+12\n\tbl %3,0";
43f0c1f2 3826 /* Handle short backwards branch with an unfilled delay slot.
3827 Using a addb;nop rather than addi;bl saves 1 cycle for both
3828 taken and untaken branches. */
3829 else if (dbr_sequence_length () == 0
3830 && ! forward_branch_p (insn)
3831 && insn_addresses
3832 && VAL_14_BITS_P (insn_addresses[INSN_UID (JUMP_LABEL (insn))]
3833 - insn_addresses[INSN_UID (insn)]))
3834 return "addib,%C2 %1,%0,%3%#";
6d36483b 3835
3836 /* Handle normal cases. */
29a4502c 3837 if (nullify)
3838 return "addi,%N2 %1,%0,%0\n\tbl,n %3,0";
3839 else
3840 return "addi,%N2 %1,%0,%0\n\tbl %3,0";
3841 }
3842 else
3843 abort();
3844 }
3845 /* Deal with gross reload from FP register case. */
3846 else if (which_alternative == 1)
3847 {
3848 /* Move loop counter from FP register to MEM then into a GR,
3849 increment the GR, store the GR into MEM, and finally reload
6d36483b 3850 the FP register from MEM from within the branch's delay slot. */
29a4502c 3851 output_asm_insn ("fstws %0,-16(0,%%r30)\n\tldw -16(0,%%r30),%4",operands);
3852 output_asm_insn ("ldo %1(%4),%4\n\tstw %4,-16(0,%%r30)", operands);
5a1231ef 3853 if (get_attr_length (insn) == 24)
29a4502c 3854 return "comb,%S2 0,%4,%3\n\tfldws -16(0,%%r30),%0";
3855 else
3856 return "comclr,%B2 0,%4,0\n\tbl %3,0\n\tfldws -16(0,%%r30),%0";
3857 }
3858 /* Deal with gross reload from memory case. */
3859 else
3860 {
3861 /* Reload loop counter from memory, the store back to memory
3862 happens in the branch's delay slot. */
3863 output_asm_insn ("ldw %0,%4", operands);
5a1231ef 3864 if (get_attr_length (insn) == 12)
29a4502c 3865 return "addib,%C2 %1,%4,%3\n\tstw %4,%0";
3866 else
b42d4c50 3867 return "addi,%N2 %1,%4,%4\n\tbl %3,0\n\tstw %4,%0";
29a4502c 3868 }
3869}
3870
3871/* Return the output template for emitting a dbra type insn.
3872
3873 Note it may perform some output operations on its own before
3874 returning the final output string. */
3875char *
3876output_movb (operands, insn, which_alternative, reverse_comparison)
3877 rtx *operands;
3878 rtx insn;
3879 int which_alternative;
3880 int reverse_comparison;
3881{
3882
3883 /* A conditional branch to the following instruction (eg the delay slot) is
3884 asking for a disaster. Be prepared! */
3885
3886 if (JUMP_LABEL (insn) == next_nonnote_insn (insn))
3887 {
3888 if (which_alternative == 0)
3889 return "copy %1,%0";
3890 else if (which_alternative == 1)
3891 {
b4437664 3892 output_asm_insn ("stw %1,-16(0,%%r30)",operands);
29a4502c 3893 return "fldws -16(0,%%r30),%0";
3894 }
3895 else
3896 return "stw %1,%0";
3897 }
3898
3899 /* Support the second variant. */
3900 if (reverse_comparison)
3901 PUT_CODE (operands[2], reverse_condition (GET_CODE (operands[2])));
3902
3903 if (which_alternative == 0)
3904 {
3905 int nullify = INSN_ANNULLED_BRANCH_P (insn);
3906 int length = get_attr_length (insn);
3907
3908 /* If this is a long branch with its delay slot unfilled, set `nullify'
3909 as it can nullify the delay slot and save a nop. */
5a1231ef 3910 if (length == 8 && dbr_sequence_length () == 0)
29a4502c 3911 nullify = 1;
3912
3913 /* If this is a short forward conditional branch which did not get
3914 its delay slot filled, the delay slot can still be nullified. */
5a1231ef 3915 if (! nullify && length == 4 && dbr_sequence_length () == 0)
29a4502c 3916 nullify = forward_branch_p (insn);
3917
3918 /* Handle short versions first. */
5a1231ef 3919 if (length == 4 && nullify)
29a4502c 3920 return "movb,%C2,n %1,%0,%3";
5a1231ef 3921 else if (length == 4 && ! nullify)
29a4502c 3922 return "movb,%C2 %1,%0,%3";
5a1231ef 3923 else if (length == 8)
29a4502c 3924 {
6d36483b 3925 /* Handle weird backwards branch with a filled delay slot
29a4502c 3926 which is nullified. */
3927 if (dbr_sequence_length () != 0
3928 && ! forward_branch_p (insn)
3929 && nullify)
eb4a3ec3 3930 return "movb,%N2,n %1,%0,.+12\n\tbl %3,0";
6d36483b 3931
43f0c1f2 3932 /* Handle short backwards branch with an unfilled delay slot.
3933 Using a movb;nop rather than or;bl saves 1 cycle for both
3934 taken and untaken branches. */
3935 else if (dbr_sequence_length () == 0
3936 && ! forward_branch_p (insn)
3937 && insn_addresses
3938 && VAL_14_BITS_P (insn_addresses[INSN_UID (JUMP_LABEL (insn))]
3939 - insn_addresses[INSN_UID (insn)]))
3940 return "movb,%C2 %1,%0,%3%#";
6d36483b 3941 /* Handle normal cases. */
29a4502c 3942 if (nullify)
3943 return "or,%N2 %1,%%r0,%0\n\tbl,n %3,0";
3944 else
3945 return "or,%N2 %1,%%r0,%0\n\tbl %3,0";
3946 }
3947 else
3948 abort();
3949 }
3950 /* Deal with gross reload from FP register case. */
3951 else if (which_alternative == 1)
3952 {
3953 /* Move loop counter from FP register to MEM then into a GR,
3954 increment the GR, store the GR into MEM, and finally reload
6d36483b 3955 the FP register from MEM from within the branch's delay slot. */
b4437664 3956 output_asm_insn ("stw %1,-16(0,%%r30)",operands);
5a1231ef 3957 if (get_attr_length (insn) == 12)
29a4502c 3958 return "comb,%S2 0,%1,%3\n\tfldws -16(0,%%r30),%0";
3959 else
3960 return "comclr,%B2 0,%1,0\n\tbl %3,0\n\tfldws -16(0,%%r30),%0";
3961 }
3962 /* Deal with gross reload from memory case. */
3963 else
3964 {
3965 /* Reload loop counter from memory, the store back to memory
3966 happens in the branch's delay slot. */
5a1231ef 3967 if (get_attr_length (insn) == 8)
29a4502c 3968 return "comb,%S2 0,%1,%3\n\tstw %1,%0";
3969 else
3970 return "comclr,%B2 0,%1,0\n\tbl %3,0\n\tstw %1,%0";
3971 }
3972}
3973
3974
d6686e21 3975/* INSN is either a function call or a millicode call. It may have an
6d36483b 3976 unconditional jump in its delay slot.
d6686e21 3977
3978 CALL_DEST is the routine we are calling.
3979
3980 RETURN_POINTER is the register which will hold the return address.
3683f840 3981 %r2 for most calls, %r31 for millicode calls.
3982
06ddb6f8 3983 When TARGET_MILLICODE_LONG_CALLS is true, then we have to assume
3984 that two instruction sequences must be used to reach the millicode
3985 routines (including dyncall!). */
3683f840 3986
d6686e21 3987char *
3988output_call (insn, call_dest, return_pointer)
3989 rtx insn;
3990 rtx call_dest;
3991 rtx return_pointer;
3992
3993{
3994 int distance;
3995 rtx xoperands[4];
3996 rtx seq_insn;
3997
06ddb6f8 3998 /* Handle long millicode calls for mod, div, and mul. */
3999 if (TARGET_PORTABLE_RUNTIME
4000 || (TARGET_MILLICODE_LONG_CALLS && REGNO (return_pointer) == 31))
4001 {
4002 xoperands[0] = call_dest;
4003 xoperands[1] = return_pointer;
4004 output_asm_insn ("ldil L%%%0,%%r29", xoperands);
4005 output_asm_insn ("ldo R%%%0(%%r29),%%r29", xoperands);
4006 output_asm_insn ("blr 0,%r1\n\tbv,n 0(%%r29)\n\tnop", xoperands);
4007 return "";
4008 }
4009
4010 /* Handle common case -- empty delay slot or no jump in the delay slot,
4011 and we're sure that the branch will reach the beginning of the $CODE$
4012 subspace. */
4013 if ((dbr_sequence_length () == 0
4014 && get_attr_length (insn) == 8)
6d36483b 4015 || (dbr_sequence_length () != 0
06ddb6f8 4016 && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
4017 && get_attr_length (insn) == 4))
d6686e21 4018 {
4019 xoperands[0] = call_dest;
4020 xoperands[1] = return_pointer;
06ddb6f8 4021 output_asm_insn ("bl %0,%r1%#", xoperands);
4022 return "";
4023 }
4024
4025 /* This call may not reach the beginning of the $CODE$ subspace. */
4026 if (get_attr_length (insn) > 8)
4027 {
4028 int delay_insn_deleted = 0;
4029 rtx xoperands[2];
4030 rtx link;
4031
4032 /* We need to emit an inline long-call branch. Furthermore,
4033 because we're changing a named function call into an indirect
4034 function call well after the parameters have been set up, we
4035 need to make sure any FP args appear in both the integer
4036 and FP registers. Also, we need move any delay slot insn
4037 out of the delay slot -- Yuk! */
4038 if (dbr_sequence_length () != 0
4039 && GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
3683f840 4040 {
06ddb6f8 4041 /* A non-jump insn in the delay slot. By definition we can
4042 emit this insn before the call (and in fact before argument
4043 relocating. */
4044 final_scan_insn (NEXT_INSN (insn), asm_out_file, optimize, 0, 0);
4045
4046 /* Now delete the delay insn. */
4047 PUT_CODE (NEXT_INSN (insn), NOTE);
4048 NOTE_LINE_NUMBER (NEXT_INSN (insn)) = NOTE_INSN_DELETED;
4049 NOTE_SOURCE_FILE (NEXT_INSN (insn)) = 0;
4050 delay_insn_deleted = 1;
4051 }
4052
4053 /* Now copy any FP arguments into integer registers. */
4054 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
4055 {
4056 int arg_mode, regno;
4057 rtx use = XEXP (link, 0);
4058 if (! (GET_CODE (use) == USE
4059 && GET_CODE (XEXP (use, 0)) == REG
4060 && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
4061 continue;
4062
4063 arg_mode = GET_MODE (XEXP (use, 0));
4064 regno = REGNO (XEXP (use, 0));
4065 /* Is it a floating point register? */
4066 if (regno >= 32 && regno <= 39)
4067 {
4068 /* Copy from the FP register into an integer register
4069 (via memory). */
4070 if (arg_mode == SFmode)
4071 {
4072 xoperands[0] = XEXP (use, 0);
4073 xoperands[1] = gen_rtx (REG, SImode, 26 - (regno - 32) / 2);
4074 output_asm_insn ("fstws %0,-16(%%sr0,%%r30)", xoperands);
4075 output_asm_insn ("ldw -16(%%sr0,%%r30),%1", xoperands);
4076 }
4077 else
4078 {
4079 xoperands[0] = XEXP (use, 0);
4080 xoperands[1] = gen_rtx (REG, DImode, 25 - (regno - 34) / 2);
4081 output_asm_insn ("fstds %0,-16(%%sr0,%%r30)", xoperands);
4082 output_asm_insn ("ldw -12(%%sr0,%%r30),%R1", xoperands);
4083 output_asm_insn ("ldw -16(%%sr0,%%r30),%1", xoperands);
4084 }
4085
4086 }
4087 }
4088
4089 /* Now emit the inline long-call. */
4090 xoperands[0] = call_dest;
7b42e7f8 4091 output_asm_insn ("ldil LP%%%0,%%r22\n\tldo RP%%%0(%%r22),%%r22",
4092 xoperands);
06ddb6f8 4093
4094 /* If TARGET_MILLICODE_LONG_CALLS, then we must use a long-call sequence
4095 to call dyncall! */
4096 if (TARGET_MILLICODE_LONG_CALLS)
4097 {
4098 output_asm_insn ("ldil L%%$$dyncall,%%r31", xoperands);
4099 output_asm_insn ("ldo R%%$$dyncall(%%r31),%%r31", xoperands);
4100 output_asm_insn ("blr 0,%%r2\n\tbv,n 0(%%r31)\n\tnop", xoperands);
3683f840 4101 }
4102 else
06ddb6f8 4103 output_asm_insn ("bl $$dyncall,%%r31\n\tcopy %%r31,%%r2", xoperands);
4104
4105 /* If we had a jump in the call's delay slot, output it now. */
4106 if (dbr_sequence_length () != 0
4107 && !delay_insn_deleted)
4108 {
4109 xoperands[0] = XEXP (PATTERN (NEXT_INSN (insn)), 1);
4110 output_asm_insn ("b,n %0", xoperands);
4111
4112 /* Now delete the delay insn. */
4113 PUT_CODE (NEXT_INSN (insn), NOTE);
4114 NOTE_LINE_NUMBER (NEXT_INSN (insn)) = NOTE_INSN_DELETED;
4115 NOTE_SOURCE_FILE (NEXT_INSN (insn)) = 0;
4116 }
d6686e21 4117 return "";
4118 }
6d36483b 4119
d6686e21 4120 /* This call has an unconditional jump in its delay slot. */
4121
4122 /* Use the containing sequence insn's address. */
4123 seq_insn = NEXT_INSN (PREV_INSN (XVECEXP (final_sequence, 0, 0)));
4124
6d36483b 4125 distance = insn_addresses[INSN_UID (JUMP_LABEL (NEXT_INSN (insn)))]
d6686e21 4126 - insn_addresses[INSN_UID (seq_insn)] - 8;
4127
4128 /* If the branch was too far away, emit a normal call followed
4129 by a nop, followed by the unconditional branch.
4130
6d36483b 4131 If the branch is close, then adjust %r2 from within the
d6686e21 4132 call's delay slot. */
4133
4134 xoperands[0] = call_dest;
4135 xoperands[1] = XEXP (PATTERN (NEXT_INSN (insn)), 1);
4136 xoperands[2] = return_pointer;
4137 if (! VAL_14_BITS_P (distance))
4138 output_asm_insn ("bl %0,%r2\n\tnop\n\tbl,n %1,%%r0", xoperands);
4139 else
4140 {
4141 xoperands[3] = gen_label_rtx ();
2a033928 4142 output_asm_insn ("\n\tbl %0,%r2\n\tldo %1-%3(%r2),%r2", xoperands);
6d36483b 4143 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
0410a74e 4144 CODE_LABEL_NUMBER (xoperands[3]));
d6686e21 4145 }
4146
4147 /* Delete the jump. */
4148 PUT_CODE (NEXT_INSN (insn), NOTE);
4149 NOTE_LINE_NUMBER (NEXT_INSN (insn)) = NOTE_INSN_DELETED;
4150 NOTE_SOURCE_FILE (NEXT_INSN (insn)) = 0;
4151 return "";
4152}
4153
d6f01525 4154extern struct obstack *saveable_obstack;
4155
4156/* In HPUX 8.0's shared library scheme, special relocations are needed
6d36483b 4157 for function labels if they might be passed to a function
d6f01525 4158 in a shared library (because shared libraries don't live in code
4159 space), and special magic is needed to construct their address. */
4160
4161void
4162hppa_encode_label (sym)
4163 rtx sym;
4164{
4165 char *str = XSTR (sym, 0);
4166 int len = strlen (str);
4167 char *newstr = obstack_alloc (saveable_obstack, len + 2) ;
4168
c1b3411e 4169 if (str[0] == '*')
4170 *newstr++ = *str++;
d6f01525 4171 strcpy (newstr + 1, str);
c1b3411e 4172 *newstr = '@';
d6f01525 4173 XSTR (sym,0) = newstr;
4174}
6d36483b 4175
d6f01525 4176int
166bf021 4177function_label_operand (op, mode)
d6f01525 4178 rtx op;
4179 enum machine_mode mode;
4180{
c1b3411e 4181 return GET_CODE (op) == SYMBOL_REF && FUNCTION_NAME_P (XSTR (op, 0));
d6f01525 4182}
f33e3942 4183
166bf021 4184/* Returns 1 if OP is a function label involved in a simple addition
4185 with a constant. Used to keep certain patterns from matching
4186 during instruction combination. */
4187int
4188is_function_label_plus_const (op)
4189 rtx op;
4190{
4191 /* Strip off any CONST. */
4192 if (GET_CODE (op) == CONST)
4193 op = XEXP (op, 0);
4194
4195 return (GET_CODE (op) == PLUS
4196 && function_label_operand (XEXP (op, 0), Pmode)
4197 && GET_CODE (XEXP (op, 1)) == CONST_INT);
4198}
4199
37580c80 4200/* Returns 1 if the 6 operands specified in OPERANDS are suitable for
4201 use in fmpyadd instructions. */
4ed6ee50 4202int
df0651dc 4203fmpyaddoperands (operands)
4ed6ee50 4204 rtx *operands;
4205{
201f01e9 4206 enum machine_mode mode = GET_MODE (operands[0]);
4ed6ee50 4207
4208 /* All modes must be the same. */
201f01e9 4209 if (! (mode == GET_MODE (operands[1])
4210 && mode == GET_MODE (operands[2])
4211 && mode == GET_MODE (operands[3])
4212 && mode == GET_MODE (operands[4])
4213 && mode == GET_MODE (operands[5])))
4ed6ee50 4214 return 0;
4215
4216 /* Both DFmode and SFmode should work. But using SFmode makes the
4217 assembler complain. Just turn it off for now. */
201f01e9 4218 if (mode != DFmode)
4ed6ee50 4219 return 0;
4220
37580c80 4221 /* Only 2 real operands to the addition. One of the input operands must
4222 be the same as the output operand. */
4ed6ee50 4223 if (! rtx_equal_p (operands[3], operands[4])
4224 && ! rtx_equal_p (operands[3], operands[5]))
4225 return 0;
4226
4227 /* Inout operand of add can not conflict with any operands from multiply. */
4228 if (rtx_equal_p (operands[3], operands[0])
4229 || rtx_equal_p (operands[3], operands[1])
4230 || rtx_equal_p (operands[3], operands[2]))
4231 return 0;
4232
4233 /* multiply can not feed into addition operands. */
4234 if (rtx_equal_p (operands[4], operands[0])
4235 || rtx_equal_p (operands[5], operands[0]))
4236 return 0;
4237
4ed6ee50 4238 /* Passed. Operands are suitable for fmpyadd. */
4239 return 1;
4240}
4241
37580c80 4242/* Returns 1 if the 6 operands specified in OPERANDS are suitable for
4243 use in fmpysub instructions. */
4ed6ee50 4244int
df0651dc 4245fmpysuboperands (operands)
4ed6ee50 4246 rtx *operands;
4247{
201f01e9 4248 enum machine_mode mode = GET_MODE (operands[0]);
4ed6ee50 4249
4250 /* All modes must be the same. */
201f01e9 4251 if (! (mode == GET_MODE (operands[1])
4252 && mode == GET_MODE (operands[2])
4253 && mode == GET_MODE (operands[3])
4254 && mode == GET_MODE (operands[4])
4255 && mode == GET_MODE (operands[5])))
4ed6ee50 4256 return 0;
4257
4258 /* Both DFmode and SFmode should work. But using SFmode makes the
4259 assembler complain. Just turn it off for now. */
201f01e9 4260 if (mode != DFmode)
4ed6ee50 4261 return 0;
4262
37580c80 4263 /* Only 2 real operands to the subtraction. Subtraction is not a commutative
4264 operation, so operands[4] must be the same as operand[3]. */
4ed6ee50 4265 if (! rtx_equal_p (operands[3], operands[4]))
4266 return 0;
4267
4268 /* multiply can not feed into subtraction. */
37580c80 4269 if (rtx_equal_p (operands[5], operands[0]))
4ed6ee50 4270 return 0;
4271
37580c80 4272 /* Inout operand of sub can not conflict with any operands from multiply. */
4ed6ee50 4273 if (rtx_equal_p (operands[3], operands[0])
4274 || rtx_equal_p (operands[3], operands[1])
4275 || rtx_equal_p (operands[3], operands[2]))
4276 return 0;
4277
4278 /* Passed. Operands are suitable for fmpysub. */
4279 return 1;
4280}
4281
89f29d73 4282int
4283plus_xor_ior_operator (op, mode)
4284 rtx op;
4285 enum machine_mode mode;
4286{
4287 return (GET_CODE (op) == PLUS || GET_CODE (op) == XOR
4288 || GET_CODE (op) == IOR);
4289}
6720f95e 4290
4291/* Return 1 if the given constant is 2, 4, or 8. These are the valid
4292 constants for shadd instructions. */
4293int
4294shadd_constant_p (val)
4295 int val;
4296{
4297 if (val == 2 || val == 4 || val == 8)
4298 return 1;
4299 else
4300 return 0;
4301}
3a16146d 4302
4303/* Return 1 if OP is a CONST_INT with the value 2, 4, or 8. These are
4304 the valid constant for shadd instructions. */
4305int
4306shadd_operand (op, mode)
4307 rtx op;
4308 enum machine_mode mode;
4309{
4310 return (GET_CODE (op) == CONST_INT && shadd_constant_p (INTVAL (op)));
4311}
5fbd5940 4312
51987f90 4313/* Return 1 if this operand is anything other than a hard register. */
4314
4315int
4316non_hard_reg_operand (op, mode)
4317 rtx op;
4318 enum machine_mode mode;
4319{
4320 return ! (GET_CODE (op) == REG && REGNO (op) < FIRST_PSEUDO_REGISTER);
4321}
4322
5fbd5940 4323/* Return 1 if INSN branches forward. Should be using insn_addresses
4324 to avoid walking through all the insns... */
4325int
4326forward_branch_p (insn)
4327 rtx insn;
4328{
4329 rtx label = JUMP_LABEL (insn);
4330
4331 while (insn)
4332 {
4333 if (insn == label)
4334 break;
4335 else
4336 insn = NEXT_INSN (insn);
4337 }
4338
4339 return (insn == label);
4340}
4341
29a4502c 4342/* Return 1 if OP is an equality comparison, else return 0. */
4343int
4344eq_neq_comparison_operator (op, mode)
4345 rtx op;
4346 enum machine_mode mode;
4347{
4348 return (GET_CODE (op) == EQ || GET_CODE (op) == NE);
4349}
4350
4351/* Return 1 if OP is an operator suitable for use in a movb instruction. */
4352int
4353movb_comparison_operator (op, mode)
4354 rtx op;
4355 enum machine_mode mode;
4356{
4357 return (GET_CODE (op) == EQ || GET_CODE (op) == NE
4358 || GET_CODE (op) == LT || GET_CODE (op) == GE);
4359}
4360
d6686e21 4361/* Return 1 if INSN is in the delay slot of a call instruction. */
4362int
4363jump_in_call_delay (insn)
4364 rtx insn;
4365{
4366
4367 if (GET_CODE (insn) != JUMP_INSN)
4368 return 0;
4369
4370 if (PREV_INSN (insn)
4371 && PREV_INSN (PREV_INSN (insn))
4372 && GET_CODE (next_active_insn (PREV_INSN (PREV_INSN (insn)))) == INSN)
4373 {
4374 rtx test_insn = next_active_insn (PREV_INSN (PREV_INSN (insn)));
4375
4376 return (GET_CODE (PATTERN (test_insn)) == SEQUENCE
4377 && XVECEXP (PATTERN (test_insn), 0, 1) == insn);
4378
4379 }
4380 else
4381 return 0;
4382}