]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
toplev.h: New file.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
35#include "expr.h"
36#include "insn-config.h"
37#include "recog.h"
38#include "output.h"
bbf6f052 39#include "typeclass.h"
ca55abae 40#include "defaults.h"
10f0ad3d 41#include "toplev.h"
bbf6f052
RK
42
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
18543a22
ILT
70/* Assume that case vectors are not pc-relative. */
71#ifndef CASE_VECTOR_PC_RELATIVE
72#define CASE_VECTOR_PC_RELATIVE 0
73#endif
74
bbf6f052
RK
75/* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81int cse_not_expected;
82
83/* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86int do_preexpand_calls = 1;
87
88/* Number of units that we should eventually pop off the stack.
89 These are the arguments to function calls that have already returned. */
90int pending_stack_adjust;
91
92/* Nonzero means stack pops must not be deferred, and deferred stack
93 pops must not be output. It is nonzero inside a function call,
94 inside a conditional expression, inside a statement expression,
95 and in other cases as well. */
96int inhibit_defer_pop;
97
d93d4205
MS
98/* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 of TARGET_EXPRs. */
102int target_temp_slot_level;
103
bbf6f052
RK
104/* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
106 returned. */
107static rtx saveregs_value;
108
dcf76fff
TW
109/* Similarly for __builtin_apply_args. */
110static rtx apply_args_value;
111
956d6950
JL
112/* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when flag_check_memory_usage is true, to avoid infinite
114 recursion. */
115static int in_check_memory_usage;
116
4969d05d
RK
117/* This structure is used by move_by_pieces to describe the move to
118 be performed. */
4969d05d
RK
119struct move_by_pieces
120{
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
e9cf6a97 125 int to_struct;
4969d05d
RK
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
e9cf6a97 130 int from_struct;
4969d05d
RK
131 int len;
132 int offset;
133 int reverse;
134};
135
9de08200
RK
136/* This structure is used by clear_by_pieces to describe the clear to
137 be performed. */
138
139struct clear_by_pieces
140{
141 rtx to;
142 rtx to_addr;
143 int autinc_to;
144 int explicit_inc_to;
145 int to_struct;
146 int len;
147 int offset;
148 int reverse;
149};
150
292b1216 151extern struct obstack permanent_obstack;
4ed67205 152extern rtx arg_pointer_save_area;
c02bd5d9 153
03566575
JW
154static rtx get_push_address PROTO ((int));
155
4969d05d
RK
156static rtx enqueue_insn PROTO((rtx, rtx));
157static int queued_subexp_p PROTO((rtx));
158static void init_queue PROTO((void));
159static void move_by_pieces PROTO((rtx, rtx, int, int));
160static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 161static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 162 struct move_by_pieces *));
9de08200 163static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 164static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
165 struct clear_by_pieces *));
166static int is_zeros_p PROTO((tree));
167static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
168static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
169 tree, tree, int));
e1a43f73 170static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
171static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
172 enum machine_mode, int, int, int));
e009aaf3
JL
173static enum memory_use_mode
174 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
175static tree save_noncopied_parts PROTO((tree, tree));
176static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 177static int safe_from_p PROTO((rtx, tree, int));
4969d05d 178static int fixed_type_p PROTO((tree));
01c8a7c8 179static rtx var_rtx PROTO((tree));
4969d05d
RK
180static int get_pointer_alignment PROTO((tree, unsigned));
181static tree string_constant PROTO((tree, tree *));
182static tree c_strlen PROTO((tree));
307b821c
RK
183static rtx expand_builtin PROTO((tree, rtx, rtx,
184 enum machine_mode, int));
0006469d
TW
185static int apply_args_size PROTO((void));
186static int apply_result_size PROTO((void));
187static rtx result_vector PROTO((int, rtx));
188static rtx expand_builtin_apply_args PROTO((void));
189static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
190static void expand_builtin_return PROTO((rtx));
7b8b9722 191static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
192static void preexpand_calls PROTO((tree));
193static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 194void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d 195static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
f5963e61 196void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
4969d05d
RK
197static void do_jump_for_compare PROTO((rtx, rtx, rtx));
198static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
199static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
16545b0a 200extern tree truthvalue_conversion PROTO((tree));
bbf6f052 201
4fa52007
RK
202/* Record for each mode whether we can move a register directly to or
203 from an object of that mode in memory. If we can't, we won't try
204 to use that mode directly when accessing a field of that mode. */
205
206static char direct_load[NUM_MACHINE_MODES];
207static char direct_store[NUM_MACHINE_MODES];
208
bbf6f052
RK
209/* MOVE_RATIO is the number of move instructions that is better than
210 a block move. */
211
212#ifndef MOVE_RATIO
266007a7 213#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
214#define MOVE_RATIO 2
215#else
996d9dac
MM
216/* If we are optimizing for space (-Os), cut down the default move ratio */
217#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
218#endif
219#endif
e87b4f3f 220
266007a7 221/* This array records the insn_code of insns to perform block moves. */
e6677db3 222enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 223
9de08200
RK
224/* This array records the insn_code of insns to perform block clears. */
225enum insn_code clrstr_optab[NUM_MACHINE_MODES];
226
0f41302f 227/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
228
229#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 230#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 231#endif
0006469d
TW
232
233/* Register mappings for target machines without register windows. */
234#ifndef INCOMING_REGNO
235#define INCOMING_REGNO(OUT) (OUT)
236#endif
237#ifndef OUTGOING_REGNO
238#define OUTGOING_REGNO(IN) (IN)
239#endif
bbf6f052 240\f
4fa52007 241/* This is run once per compilation to set up which modes can be used
266007a7 242 directly in memory and to initialize the block move optab. */
4fa52007
RK
243
244void
245init_expr_once ()
246{
247 rtx insn, pat;
248 enum machine_mode mode;
e2549997
RS
249 /* Try indexing by frame ptr and try by stack ptr.
250 It is known that on the Convex the stack ptr isn't a valid index.
251 With luck, one or the other is valid on any machine. */
38a448ca
RH
252 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
253 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007
RK
254
255 start_sequence ();
38a448ca 256 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
257 pat = PATTERN (insn);
258
259 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
260 mode = (enum machine_mode) ((int) mode + 1))
261 {
262 int regno;
263 rtx reg;
264 int num_clobbers;
265
266 direct_load[(int) mode] = direct_store[(int) mode] = 0;
267 PUT_MODE (mem, mode);
e2549997 268 PUT_MODE (mem1, mode);
4fa52007 269
e6fe56a4
RK
270 /* See if there is some register that can be used in this mode and
271 directly loaded or stored from memory. */
272
7308a047
RS
273 if (mode != VOIDmode && mode != BLKmode)
274 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
275 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
276 regno++)
277 {
278 if (! HARD_REGNO_MODE_OK (regno, mode))
279 continue;
e6fe56a4 280
38a448ca 281 reg = gen_rtx_REG (mode, regno);
e6fe56a4 282
7308a047
RS
283 SET_SRC (pat) = mem;
284 SET_DEST (pat) = reg;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_load[(int) mode] = 1;
e6fe56a4 287
e2549997
RS
288 SET_SRC (pat) = mem1;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
292
7308a047
RS
293 SET_SRC (pat) = reg;
294 SET_DEST (pat) = mem;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_store[(int) mode] = 1;
e2549997
RS
297
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem1;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
7308a047 302 }
4fa52007
RK
303 }
304
305 end_sequence ();
306}
307
bbf6f052
RK
308/* This is run at the start of compiling a function. */
309
310void
311init_expr ()
312{
313 init_queue ();
314
315 pending_stack_adjust = 0;
316 inhibit_defer_pop = 0;
bbf6f052 317 saveregs_value = 0;
0006469d 318 apply_args_value = 0;
e87b4f3f 319 forced_labels = 0;
bbf6f052
RK
320}
321
322/* Save all variables describing the current status into the structure *P.
323 This is used before starting a nested function. */
324
325void
326save_expr_status (p)
327 struct function *p;
328{
329 /* Instead of saving the postincrement queue, empty it. */
330 emit_queue ();
331
332 p->pending_stack_adjust = pending_stack_adjust;
333 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 334 p->saveregs_value = saveregs_value;
0006469d 335 p->apply_args_value = apply_args_value;
e87b4f3f 336 p->forced_labels = forced_labels;
bbf6f052
RK
337
338 pending_stack_adjust = 0;
339 inhibit_defer_pop = 0;
bbf6f052 340 saveregs_value = 0;
0006469d 341 apply_args_value = 0;
e87b4f3f 342 forced_labels = 0;
bbf6f052
RK
343}
344
345/* Restore all variables describing the current status from the structure *P.
346 This is used after a nested function. */
347
348void
349restore_expr_status (p)
350 struct function *p;
351{
352 pending_stack_adjust = p->pending_stack_adjust;
353 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 354 saveregs_value = p->saveregs_value;
0006469d 355 apply_args_value = p->apply_args_value;
e87b4f3f 356 forced_labels = p->forced_labels;
bbf6f052
RK
357}
358\f
359/* Manage the queue of increment instructions to be output
360 for POSTINCREMENT_EXPR expressions, etc. */
361
362static rtx pending_chain;
363
364/* Queue up to increment (or change) VAR later. BODY says how:
365 BODY should be the same thing you would pass to emit_insn
366 to increment right away. It will go to emit_insn later on.
367
368 The value is a QUEUED expression to be used in place of VAR
369 where you want to guarantee the pre-incrementation value of VAR. */
370
371static rtx
372enqueue_insn (var, body)
373 rtx var, body;
374{
38a448ca
RH
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
376 var, NULL_RTX, NULL_RTX, body,
377 pending_chain);
bbf6f052
RK
378 return pending_chain;
379}
380
381/* Use protect_from_queue to convert a QUEUED expression
382 into something that you can put immediately into an instruction.
383 If the queued incrementation has not happened yet,
384 protect_from_queue returns the variable itself.
385 If the incrementation has happened, protect_from_queue returns a temp
386 that contains a copy of the old value of the variable.
387
388 Any time an rtx which might possibly be a QUEUED is to be put
389 into an instruction, it must be passed through protect_from_queue first.
390 QUEUED expressions are not meaningful in instructions.
391
392 Do not pass a value through protect_from_queue and then hold
393 on to it for a while before putting it in an instruction!
394 If the queue is flushed in between, incorrect code will result. */
395
396rtx
397protect_from_queue (x, modify)
398 register rtx x;
399 int modify;
400{
401 register RTX_CODE code = GET_CODE (x);
402
403#if 0 /* A QUEUED can hang around after the queue is forced out. */
404 /* Shortcut for most common case. */
405 if (pending_chain == 0)
406 return x;
407#endif
408
409 if (code != QUEUED)
410 {
e9baa644
RK
411 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
412 use of autoincrement. Make a copy of the contents of the memory
413 location rather than a copy of the address, but not if the value is
414 of mode BLKmode. Don't modify X in place since it might be
415 shared. */
bbf6f052
RK
416 if (code == MEM && GET_MODE (x) != BLKmode
417 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
418 {
419 register rtx y = XEXP (x, 0);
38a448ca 420 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
421
422 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
423 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
424 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
425
bbf6f052
RK
426 if (QUEUED_INSN (y))
427 {
e9baa644
RK
428 register rtx temp = gen_reg_rtx (GET_MODE (new));
429 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
430 QUEUED_INSN (y));
431 return temp;
432 }
e9baa644 433 return new;
bbf6f052
RK
434 }
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
437 if (code == MEM)
3f15938e
RS
438 {
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
441 {
442 x = copy_rtx (x);
443 XEXP (x, 0) = tem;
444 }
445 }
bbf6f052
RK
446 else if (code == PLUS || code == MULT)
447 {
3f15938e
RS
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 {
452 x = copy_rtx (x);
453 XEXP (x, 0) = new0;
454 XEXP (x, 1) = new1;
455 }
bbf6f052
RK
456 }
457 return x;
458 }
459 /* If the increment has not happened, use the variable itself. */
460 if (QUEUED_INSN (x) == 0)
461 return QUEUED_VAR (x);
462 /* If the increment has happened and a pre-increment copy exists,
463 use that copy. */
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
470 QUEUED_INSN (x));
471 return QUEUED_COPY (x);
472}
473
474/* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
478
479static int
480queued_subexp_p (x)
481 rtx x;
482{
483 register enum rtx_code code = GET_CODE (x);
484 switch (code)
485 {
486 case QUEUED:
487 return 1;
488 case MEM:
489 return queued_subexp_p (XEXP (x, 0));
490 case MULT:
491 case PLUS:
492 case MINUS:
e9a25f70
JL
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
495 default:
496 return 0;
bbf6f052 497 }
bbf6f052
RK
498}
499
500/* Perform all the pending incrementations. */
501
502void
503emit_queue ()
504{
505 register rtx p;
381127e8 506 while ((p = pending_chain))
bbf6f052
RK
507 {
508 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
509 pending_chain = QUEUED_NEXT (p);
510 }
511}
512
513static void
514init_queue ()
515{
516 if (pending_chain)
517 abort ();
518}
519\f
520/* Copy data from FROM to TO, where the machine modes are not the same.
521 Both modes may be integer, or both may be floating.
522 UNSIGNEDP should be nonzero if FROM is an unsigned type.
523 This causes zero-extension instead of sign-extension. */
524
525void
526convert_move (to, from, unsignedp)
527 register rtx to, from;
528 int unsignedp;
529{
530 enum machine_mode to_mode = GET_MODE (to);
531 enum machine_mode from_mode = GET_MODE (from);
532 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
533 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
534 enum insn_code code;
535 rtx libcall;
536
537 /* rtx code for making an equivalent value. */
538 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
539
540 to = protect_from_queue (to, 1);
541 from = protect_from_queue (from, 0);
542
543 if (to_real != from_real)
544 abort ();
545
1499e0a8
RK
546 /* If FROM is a SUBREG that indicates that we have already done at least
547 the required extension, strip it. We don't handle such SUBREGs as
548 TO here. */
549
550 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
551 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
552 >= GET_MODE_SIZE (to_mode))
553 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
554 from = gen_lowpart (to_mode, from), from_mode = to_mode;
555
556 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
557 abort ();
558
bbf6f052
RK
559 if (to_mode == from_mode
560 || (from_mode == VOIDmode && CONSTANT_P (from)))
561 {
562 emit_move_insn (to, from);
563 return;
564 }
565
566 if (to_real)
567 {
81d79e2c
RS
568 rtx value;
569
2b01c326 570 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 571 {
2b01c326
RK
572 /* Try converting directly if the insn is supported. */
573 if ((code = can_extend_p (to_mode, from_mode, 0))
574 != CODE_FOR_nothing)
575 {
576 emit_unop_insn (code, to, from, UNKNOWN);
577 return;
578 }
bbf6f052 579 }
2b01c326 580
b424402e
RS
581#ifdef HAVE_trunchfqf2
582 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
583 {
584 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
585 return;
586 }
587#endif
704af6a1
JL
588#ifdef HAVE_trunctqfqf2
589 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
590 {
591 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
592 return;
593 }
594#endif
b424402e
RS
595#ifdef HAVE_truncsfqf2
596 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
597 {
598 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
599 return;
600 }
601#endif
602#ifdef HAVE_truncdfqf2
603 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
604 {
605 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
606 return;
607 }
608#endif
609#ifdef HAVE_truncxfqf2
610 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
611 {
612 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
613 return;
614 }
615#endif
616#ifdef HAVE_trunctfqf2
617 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
618 {
619 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
620 return;
621 }
622#endif
03747aa3
RK
623
624#ifdef HAVE_trunctqfhf2
625 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
626 {
627 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
628 return;
629 }
630#endif
b424402e
RS
631#ifdef HAVE_truncsfhf2
632 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 {
634 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
635 return;
636 }
637#endif
638#ifdef HAVE_truncdfhf2
639 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 {
641 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
642 return;
643 }
644#endif
645#ifdef HAVE_truncxfhf2
646 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 {
648 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
649 return;
650 }
651#endif
652#ifdef HAVE_trunctfhf2
653 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 {
655 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
656 return;
657 }
658#endif
2b01c326
RK
659
660#ifdef HAVE_truncsftqf2
661 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
662 {
663 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
664 return;
665 }
666#endif
667#ifdef HAVE_truncdftqf2
668 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
669 {
670 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
671 return;
672 }
673#endif
674#ifdef HAVE_truncxftqf2
675 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
676 {
677 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
678 return;
679 }
680#endif
681#ifdef HAVE_trunctftqf2
682 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
683 {
684 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
685 return;
686 }
687#endif
688
bbf6f052
RK
689#ifdef HAVE_truncdfsf2
690 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
691 {
692 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
b092b471
JW
696#ifdef HAVE_truncxfsf2
697 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
698 {
699 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
700 return;
701 }
702#endif
bbf6f052
RK
703#ifdef HAVE_trunctfsf2
704 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
705 {
706 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
707 return;
708 }
709#endif
b092b471
JW
710#ifdef HAVE_truncxfdf2
711 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
712 {
713 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
714 return;
715 }
716#endif
bbf6f052
RK
717#ifdef HAVE_trunctfdf2
718 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
719 {
720 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 return;
722 }
723#endif
724
b092b471
JW
725 libcall = (rtx) 0;
726 switch (from_mode)
727 {
728 case SFmode:
729 switch (to_mode)
730 {
731 case DFmode:
732 libcall = extendsfdf2_libfunc;
733 break;
734
735 case XFmode:
736 libcall = extendsfxf2_libfunc;
737 break;
738
739 case TFmode:
740 libcall = extendsftf2_libfunc;
741 break;
e9a25f70
JL
742
743 default:
744 break;
b092b471
JW
745 }
746 break;
747
748 case DFmode:
749 switch (to_mode)
750 {
751 case SFmode:
752 libcall = truncdfsf2_libfunc;
753 break;
754
755 case XFmode:
756 libcall = extenddfxf2_libfunc;
757 break;
758
759 case TFmode:
760 libcall = extenddftf2_libfunc;
761 break;
e9a25f70
JL
762
763 default:
764 break;
b092b471
JW
765 }
766 break;
767
768 case XFmode:
769 switch (to_mode)
770 {
771 case SFmode:
772 libcall = truncxfsf2_libfunc;
773 break;
774
775 case DFmode:
776 libcall = truncxfdf2_libfunc;
777 break;
e9a25f70
JL
778
779 default:
780 break;
b092b471
JW
781 }
782 break;
783
784 case TFmode:
785 switch (to_mode)
786 {
787 case SFmode:
788 libcall = trunctfsf2_libfunc;
789 break;
790
791 case DFmode:
792 libcall = trunctfdf2_libfunc;
793 break;
e9a25f70
JL
794
795 default:
796 break;
b092b471
JW
797 }
798 break;
e9a25f70
JL
799
800 default:
801 break;
b092b471
JW
802 }
803
804 if (libcall == (rtx) 0)
805 /* This conversion is not implemented yet. */
bbf6f052
RK
806 abort ();
807
81d79e2c
RS
808 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
809 1, from, from_mode);
810 emit_move_insn (to, value);
bbf6f052
RK
811 return;
812 }
813
814 /* Now both modes are integers. */
815
816 /* Handle expanding beyond a word. */
817 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
818 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
819 {
820 rtx insns;
821 rtx lowpart;
822 rtx fill_value;
823 rtx lowfrom;
824 int i;
825 enum machine_mode lowpart_mode;
826 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
827
828 /* Try converting directly if the insn is supported. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
831 {
cd1b4b44
RK
832 /* If FROM is a SUBREG, put it into a register. Do this
833 so that we always generate the same set of insns for
834 better cse'ing; if an intermediate assignment occurred,
835 we won't be doing the operation directly on the SUBREG. */
836 if (optimize > 0 && GET_CODE (from) == SUBREG)
837 from = force_reg (from_mode, from);
bbf6f052
RK
838 emit_unop_insn (code, to, from, equiv_code);
839 return;
840 }
841 /* Next, try converting via full word. */
842 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
843 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
844 != CODE_FOR_nothing))
845 {
a81fee56 846 if (GET_CODE (to) == REG)
38a448ca 847 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
848 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
849 emit_unop_insn (code, to,
850 gen_lowpart (word_mode, to), equiv_code);
851 return;
852 }
853
854 /* No special multiword conversion insn; do it by hand. */
855 start_sequence ();
856
5c5033c3
RK
857 /* Since we will turn this into a no conflict block, we must ensure
858 that the source does not overlap the target. */
859
860 if (reg_overlap_mentioned_p (to, from))
861 from = force_reg (from_mode, from);
862
bbf6f052
RK
863 /* Get a copy of FROM widened to a word, if necessary. */
864 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
865 lowpart_mode = word_mode;
866 else
867 lowpart_mode = from_mode;
868
869 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
870
871 lowpart = gen_lowpart (lowpart_mode, to);
872 emit_move_insn (lowpart, lowfrom);
873
874 /* Compute the value to put in each remaining word. */
875 if (unsignedp)
876 fill_value = const0_rtx;
877 else
878 {
879#ifdef HAVE_slt
880 if (HAVE_slt
881 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
882 && STORE_FLAG_VALUE == -1)
883 {
906c4e36
RK
884 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
885 lowpart_mode, 0, 0);
bbf6f052
RK
886 fill_value = gen_reg_rtx (word_mode);
887 emit_insn (gen_slt (fill_value));
888 }
889 else
890#endif
891 {
892 fill_value
893 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
894 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 895 NULL_RTX, 0);
bbf6f052
RK
896 fill_value = convert_to_mode (word_mode, fill_value, 1);
897 }
898 }
899
900 /* Fill the remaining words. */
901 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
902 {
903 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
904 rtx subword = operand_subword (to, index, 1, to_mode);
905
906 if (subword == 0)
907 abort ();
908
909 if (fill_value != subword)
910 emit_move_insn (subword, fill_value);
911 }
912
913 insns = get_insns ();
914 end_sequence ();
915
906c4e36 916 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 917 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
918 return;
919 }
920
d3c64ee3
RS
921 /* Truncating multi-word to a word or less. */
922 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
923 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 924 {
431a6eca
JW
925 if (!((GET_CODE (from) == MEM
926 && ! MEM_VOLATILE_P (from)
927 && direct_load[(int) to_mode]
928 && ! mode_dependent_address_p (XEXP (from, 0)))
929 || GET_CODE (from) == REG
930 || GET_CODE (from) == SUBREG))
931 from = force_reg (from_mode, from);
bbf6f052
RK
932 convert_move (to, gen_lowpart (word_mode, from), 0);
933 return;
934 }
935
936 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
937 if (to_mode == PQImode)
938 {
939 if (from_mode != QImode)
940 from = convert_to_mode (QImode, from, unsignedp);
941
942#ifdef HAVE_truncqipqi2
943 if (HAVE_truncqipqi2)
944 {
945 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
946 return;
947 }
948#endif /* HAVE_truncqipqi2 */
949 abort ();
950 }
951
952 if (from_mode == PQImode)
953 {
954 if (to_mode != QImode)
955 {
956 from = convert_to_mode (QImode, from, unsignedp);
957 from_mode = QImode;
958 }
959 else
960 {
961#ifdef HAVE_extendpqiqi2
962 if (HAVE_extendpqiqi2)
963 {
964 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
965 return;
966 }
967#endif /* HAVE_extendpqiqi2 */
968 abort ();
969 }
970 }
971
bbf6f052
RK
972 if (to_mode == PSImode)
973 {
974 if (from_mode != SImode)
975 from = convert_to_mode (SImode, from, unsignedp);
976
1f584163
DE
977#ifdef HAVE_truncsipsi2
978 if (HAVE_truncsipsi2)
bbf6f052 979 {
1f584163 980 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
981 return;
982 }
1f584163 983#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
984 abort ();
985 }
986
987 if (from_mode == PSImode)
988 {
989 if (to_mode != SImode)
990 {
991 from = convert_to_mode (SImode, from, unsignedp);
992 from_mode = SImode;
993 }
994 else
995 {
1f584163
DE
996#ifdef HAVE_extendpsisi2
997 if (HAVE_extendpsisi2)
bbf6f052 998 {
1f584163 999 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1000 return;
1001 }
1f584163 1002#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1003 abort ();
1004 }
1005 }
1006
0407367d
RK
1007 if (to_mode == PDImode)
1008 {
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1011
1012#ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1014 {
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1016 return;
1017 }
1018#endif /* HAVE_truncdipdi2 */
1019 abort ();
1020 }
1021
1022 if (from_mode == PDImode)
1023 {
1024 if (to_mode != DImode)
1025 {
1026 from = convert_to_mode (DImode, from, unsignedp);
1027 from_mode = DImode;
1028 }
1029 else
1030 {
1031#ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1033 {
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1035 return;
1036 }
1037#endif /* HAVE_extendpdidi2 */
1038 abort ();
1039 }
1040 }
1041
bbf6f052
RK
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1044
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1048 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1049 {
d3c64ee3
RS
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
34aa3599
RK
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
bbf6f052
RK
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1061 return;
1062 }
1063
d3c64ee3 1064 /* Handle extension. */
bbf6f052
RK
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 {
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1070 {
1071 emit_unop_insn (code, to, from, equiv_code);
1072 return;
1073 }
1074 else
1075 {
1076 enum machine_mode intermediate;
1077
1078 /* Search for a mode to convert via. */
1079 for (intermediate = from_mode; intermediate != VOIDmode;
1080 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1081 if (((can_extend_p (to_mode, intermediate, unsignedp)
1082 != CODE_FOR_nothing)
1083 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1084 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1085 && (can_extend_p (intermediate, from_mode, unsignedp)
1086 != CODE_FOR_nothing))
1087 {
1088 convert_move (to, convert_to_mode (intermediate, from,
1089 unsignedp), unsignedp);
1090 return;
1091 }
1092
1093 /* No suitable intermediate mode. */
1094 abort ();
1095 }
1096 }
1097
1098 /* Support special truncate insns for certain modes. */
1099
1100 if (from_mode == DImode && to_mode == SImode)
1101 {
1102#ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 return;
1107 }
1108#endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == DImode && to_mode == HImode)
1114 {
1115#ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 return;
1120 }
1121#endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == QImode)
1127 {
1128#ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134#endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == SImode && to_mode == HImode)
1140 {
1141#ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 return;
1146 }
1147#endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == QImode)
1153 {
1154#ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 return;
1159 }
1160#endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == HImode && to_mode == QImode)
1166 {
1167#ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173#endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
b9bcad65
RK
1178 if (from_mode == TImode && to_mode == DImode)
1179 {
1180#ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 return;
1185 }
1186#endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == SImode)
1192 {
1193#ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 return;
1198 }
1199#endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == HImode)
1205 {
1206#ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 return;
1211 }
1212#endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == QImode)
1218 {
1219#ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 return;
1224 }
1225#endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
bbf6f052
RK
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1234 {
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1237 return;
1238 }
1239
1240 /* Mode combination is not recognized. */
1241 abort ();
1242}
1243
1244/* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
5d901c31
RS
1249 or by copying to a new temporary with conversion.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1253
1254rtx
1255convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1257 rtx x;
1258 int unsignedp;
5ffe63ed
RS
1259{
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1261}
1262
1263/* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1267
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1270
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1272
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1275
1276rtx
1277convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1279 rtx x;
1280 int unsignedp;
bbf6f052
RK
1281{
1282 register rtx temp;
5ffe63ed 1283
1499e0a8
RK
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1286
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
bbf6f052 1291
64791b18
RK
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1294
5ffe63ed 1295 if (mode == oldmode)
bbf6f052
RK
1296 return x;
1297
1298 /* There is one case that we must handle specially: If we are converting
906c4e36 1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1303
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1307 {
1308 HOST_WIDE_INT val = INTVAL (x);
1309
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1312 {
1313 int width = GET_MODE_BITSIZE (oldmode);
1314
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1317 }
1318
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1320 }
bbf6f052
RK
1321
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1326
ba2e110c
RK
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1329 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1330 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1331 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
2bf29316
JW
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1338 {
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1344 {
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1347
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1351 if (! unsignedp
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1354
1355 return GEN_INT (val);
1356 }
1357
1358 return gen_lowpart (mode, x);
1359 }
bbf6f052
RK
1360
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1363 return temp;
1364}
1365\f
1366/* Generate several move instructions to copy LEN bytes
1367 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1368 The caller must pass FROM and TO
1369 through protect_from_queue before calling.
1370 ALIGN (in bytes) is maximum alignment we can assume. */
1371
bbf6f052
RK
1372static void
1373move_by_pieces (to, from, len, align)
1374 rtx to, from;
1375 int len, align;
1376{
1377 struct move_by_pieces data;
1378 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1379 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1380
1381 data.offset = 0;
1382 data.to_addr = to_addr;
1383 data.from_addr = from_addr;
1384 data.to = to;
1385 data.from = from;
1386 data.autinc_to
1387 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1388 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1389 data.autinc_from
1390 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1391 || GET_CODE (from_addr) == POST_INC
1392 || GET_CODE (from_addr) == POST_DEC);
1393
1394 data.explicit_inc_from = 0;
1395 data.explicit_inc_to = 0;
1396 data.reverse
1397 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1398 if (data.reverse) data.offset = len;
1399 data.len = len;
1400
e9cf6a97
JW
1401 data.to_struct = MEM_IN_STRUCT_P (to);
1402 data.from_struct = MEM_IN_STRUCT_P (from);
1403
bbf6f052
RK
1404 /* If copying requires more than two move insns,
1405 copy addresses to registers (to make displacements shorter)
1406 and use post-increment if available. */
1407 if (!(data.autinc_from && data.autinc_to)
1408 && move_by_pieces_ninsns (len, align) > 2)
1409 {
1410#ifdef HAVE_PRE_DECREMENT
1411 if (data.reverse && ! data.autinc_from)
1412 {
1413 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1414 data.autinc_from = 1;
1415 data.explicit_inc_from = -1;
1416 }
1417#endif
1418#ifdef HAVE_POST_INCREMENT
1419 if (! data.autinc_from)
1420 {
1421 data.from_addr = copy_addr_to_reg (from_addr);
1422 data.autinc_from = 1;
1423 data.explicit_inc_from = 1;
1424 }
1425#endif
1426 if (!data.autinc_from && CONSTANT_P (from_addr))
1427 data.from_addr = copy_addr_to_reg (from_addr);
1428#ifdef HAVE_PRE_DECREMENT
1429 if (data.reverse && ! data.autinc_to)
1430 {
1431 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1432 data.autinc_to = 1;
1433 data.explicit_inc_to = -1;
1434 }
1435#endif
1436#ifdef HAVE_POST_INCREMENT
1437 if (! data.reverse && ! data.autinc_to)
1438 {
1439 data.to_addr = copy_addr_to_reg (to_addr);
1440 data.autinc_to = 1;
1441 data.explicit_inc_to = 1;
1442 }
1443#endif
1444 if (!data.autinc_to && CONSTANT_P (to_addr))
1445 data.to_addr = copy_addr_to_reg (to_addr);
1446 }
1447
c7a7ac46 1448 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1449 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1450 align = MOVE_MAX;
bbf6f052
RK
1451
1452 /* First move what we can in the largest integer mode, then go to
1453 successively smaller modes. */
1454
1455 while (max_size > 1)
1456 {
1457 enum machine_mode mode = VOIDmode, tmode;
1458 enum insn_code icode;
1459
e7c33f54
RK
1460 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1461 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1462 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1463 mode = tmode;
1464
1465 if (mode == VOIDmode)
1466 break;
1467
1468 icode = mov_optab->handlers[(int) mode].insn_code;
1469 if (icode != CODE_FOR_nothing
1470 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1471 GET_MODE_SIZE (mode)))
1472 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1473
1474 max_size = GET_MODE_SIZE (mode);
1475 }
1476
1477 /* The code above should have handled everything. */
2a8e278c 1478 if (data.len > 0)
bbf6f052
RK
1479 abort ();
1480}
1481
1482/* Return number of insns required to move L bytes by pieces.
1483 ALIGN (in bytes) is maximum alignment we can assume. */
1484
1485static int
1486move_by_pieces_ninsns (l, align)
1487 unsigned int l;
1488 int align;
1489{
1490 register int n_insns = 0;
e87b4f3f 1491 int max_size = MOVE_MAX + 1;
bbf6f052 1492
c7a7ac46 1493 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1494 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1495 align = MOVE_MAX;
bbf6f052
RK
1496
1497 while (max_size > 1)
1498 {
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1501
e7c33f54
RK
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1505 mode = tmode;
1506
1507 if (mode == VOIDmode)
1508 break;
1509
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing
1512 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1513 GET_MODE_SIZE (mode)))
1514 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1515
1516 max_size = GET_MODE_SIZE (mode);
1517 }
1518
1519 return n_insns;
1520}
1521
1522/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1523 with move instructions for mode MODE. GENFUN is the gen_... function
1524 to make a move insn for that mode. DATA has all the other info. */
1525
1526static void
1527move_by_pieces_1 (genfun, mode, data)
eae4b970 1528 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1529 enum machine_mode mode;
1530 struct move_by_pieces *data;
1531{
1532 register int size = GET_MODE_SIZE (mode);
1533 register rtx to1, from1;
1534
1535 while (data->len >= size)
1536 {
1537 if (data->reverse) data->offset -= size;
1538
1539 to1 = (data->autinc_to
38a448ca 1540 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1541 : copy_rtx (change_address (data->to, mode,
1542 plus_constant (data->to_addr,
1543 data->offset))));
e9cf6a97 1544 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1545
db3cf6fb
MS
1546 from1
1547 = (data->autinc_from
38a448ca 1548 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1549 : copy_rtx (change_address (data->from, mode,
1550 plus_constant (data->from_addr,
1551 data->offset))));
e9cf6a97 1552 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1553
1554#ifdef HAVE_PRE_DECREMENT
1555 if (data->explicit_inc_to < 0)
906c4e36 1556 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1557 if (data->explicit_inc_from < 0)
906c4e36 1558 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1559#endif
1560
1561 emit_insn ((*genfun) (to1, from1));
1562#ifdef HAVE_POST_INCREMENT
1563 if (data->explicit_inc_to > 0)
906c4e36 1564 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1565 if (data->explicit_inc_from > 0)
906c4e36 1566 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1567#endif
1568
1569 if (! data->reverse) data->offset += size;
1570
1571 data->len -= size;
1572 }
1573}
1574\f
1575/* Emit code to move a block Y to a block X.
1576 This may be done with string-move instructions,
1577 with multiple scalar move instructions, or with a library call.
1578
1579 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1580 with mode BLKmode.
1581 SIZE is an rtx that says how long they are.
1582 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1583 measured in bytes.
bbf6f052 1584
e9a25f70
JL
1585 Return the address of the new block, if memcpy is called and returns it,
1586 0 otherwise. */
1587
1588rtx
bbf6f052
RK
1589emit_block_move (x, y, size, align)
1590 rtx x, y;
1591 rtx size;
1592 int align;
1593{
e9a25f70
JL
1594 rtx retval = 0;
1595
bbf6f052
RK
1596 if (GET_MODE (x) != BLKmode)
1597 abort ();
1598
1599 if (GET_MODE (y) != BLKmode)
1600 abort ();
1601
1602 x = protect_from_queue (x, 1);
1603 y = protect_from_queue (y, 0);
5d901c31 1604 size = protect_from_queue (size, 0);
bbf6f052
RK
1605
1606 if (GET_CODE (x) != MEM)
1607 abort ();
1608 if (GET_CODE (y) != MEM)
1609 abort ();
1610 if (size == 0)
1611 abort ();
1612
1613 if (GET_CODE (size) == CONST_INT
906c4e36 1614 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1615 move_by_pieces (x, y, INTVAL (size), align);
1616 else
1617 {
1618 /* Try the most limited insn first, because there's no point
1619 including more than one in the machine description unless
1620 the more limited one has some advantage. */
266007a7 1621
0bba3f6f 1622 rtx opalign = GEN_INT (align);
266007a7
RK
1623 enum machine_mode mode;
1624
1625 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1626 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1627 {
266007a7 1628 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1629
1630 if (code != CODE_FOR_nothing
803090c4
RK
1631 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1632 here because if SIZE is less than the mode mask, as it is
8008b228 1633 returned by the macro, it will definitely be less than the
803090c4 1634 actual mode mask. */
8ca00751
RK
1635 && ((GET_CODE (size) == CONST_INT
1636 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1637 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1638 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1639 && (insn_operand_predicate[(int) code][0] == 0
1640 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1641 && (insn_operand_predicate[(int) code][1] == 0
1642 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1643 && (insn_operand_predicate[(int) code][3] == 0
1644 || (*insn_operand_predicate[(int) code][3]) (opalign,
1645 VOIDmode)))
bbf6f052 1646 {
1ba1e2a8 1647 rtx op2;
266007a7
RK
1648 rtx last = get_last_insn ();
1649 rtx pat;
1650
1ba1e2a8 1651 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1652 if (insn_operand_predicate[(int) code][2] != 0
1653 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1654 op2 = copy_to_mode_reg (mode, op2);
1655
1656 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1657 if (pat)
1658 {
1659 emit_insn (pat);
e9a25f70 1660 return 0;
266007a7
RK
1661 }
1662 else
1663 delete_insns_since (last);
bbf6f052
RK
1664 }
1665 }
bbf6f052
RK
1666
1667#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
1668 retval
1669 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1670 ptr_mode, 3, XEXP (x, 0), Pmode,
1671 XEXP (y, 0), Pmode,
1672 convert_to_mode (TYPE_MODE (sizetype), size,
1673 TREE_UNSIGNED (sizetype)),
1674 TYPE_MODE (sizetype));
bbf6f052 1675#else
d562e42e 1676 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1677 VOIDmode, 3, XEXP (y, 0), Pmode,
1678 XEXP (x, 0), Pmode,
3b6f75e2
JW
1679 convert_to_mode (TYPE_MODE (integer_type_node), size,
1680 TREE_UNSIGNED (integer_type_node)),
1681 TYPE_MODE (integer_type_node));
bbf6f052
RK
1682#endif
1683 }
e9a25f70
JL
1684
1685 return retval;
bbf6f052
RK
1686}
1687\f
1688/* Copy all or part of a value X into registers starting at REGNO.
1689 The number of registers to be filled is NREGS. */
1690
1691void
1692move_block_to_reg (regno, x, nregs, mode)
1693 int regno;
1694 rtx x;
1695 int nregs;
1696 enum machine_mode mode;
1697{
1698 int i;
381127e8
RL
1699#ifdef HAVE_load_multiple
1700 rtx pat;
1701 rtx last;
1702#endif
bbf6f052 1703
72bb9717
RK
1704 if (nregs == 0)
1705 return;
1706
bbf6f052
RK
1707 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1708 x = validize_mem (force_const_mem (mode, x));
1709
1710 /* See if the machine can do this with a load multiple insn. */
1711#ifdef HAVE_load_multiple
c3a02afe 1712 if (HAVE_load_multiple)
bbf6f052 1713 {
c3a02afe 1714 last = get_last_insn ();
38a448ca 1715 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1716 GEN_INT (nregs));
1717 if (pat)
1718 {
1719 emit_insn (pat);
1720 return;
1721 }
1722 else
1723 delete_insns_since (last);
bbf6f052 1724 }
bbf6f052
RK
1725#endif
1726
1727 for (i = 0; i < nregs; i++)
38a448ca 1728 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1729 operand_subword_force (x, i, mode));
1730}
1731
1732/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1733 The number of registers to be filled is NREGS. SIZE indicates the number
1734 of bytes in the object X. */
1735
bbf6f052
RK
1736
1737void
0040593d 1738move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1739 int regno;
1740 rtx x;
1741 int nregs;
0040593d 1742 int size;
bbf6f052
RK
1743{
1744 int i;
381127e8
RL
1745#ifdef HAVE_store_multiple
1746 rtx pat;
1747 rtx last;
1748#endif
58a32c5c 1749 enum machine_mode mode;
bbf6f052 1750
58a32c5c
DE
1751 /* If SIZE is that of a mode no bigger than a word, just use that
1752 mode's store operation. */
1753 if (size <= UNITS_PER_WORD
1754 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1755 {
1756 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1757 gen_rtx_REG (mode, regno));
58a32c5c
DE
1758 return;
1759 }
1760
0040593d 1761 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1762 to the left before storing to memory. Note that the previous test
1763 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1764 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1765 {
1766 rtx tem = operand_subword (x, 0, 1, BLKmode);
1767 rtx shift;
1768
1769 if (tem == 0)
1770 abort ();
1771
1772 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1773 gen_rtx_REG (word_mode, regno),
0040593d
JW
1774 build_int_2 ((UNITS_PER_WORD - size)
1775 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1776 emit_move_insn (tem, shift);
1777 return;
1778 }
1779
bbf6f052
RK
1780 /* See if the machine can do this with a store multiple insn. */
1781#ifdef HAVE_store_multiple
c3a02afe 1782 if (HAVE_store_multiple)
bbf6f052 1783 {
c3a02afe 1784 last = get_last_insn ();
38a448ca 1785 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1786 GEN_INT (nregs));
1787 if (pat)
1788 {
1789 emit_insn (pat);
1790 return;
1791 }
1792 else
1793 delete_insns_since (last);
bbf6f052 1794 }
bbf6f052
RK
1795#endif
1796
1797 for (i = 0; i < nregs; i++)
1798 {
1799 rtx tem = operand_subword (x, i, 1, BLKmode);
1800
1801 if (tem == 0)
1802 abort ();
1803
38a448ca 1804 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1805 }
1806}
1807
fffa9c1d
JW
1808/* Emit code to move a block Y to a block X, where X is non-consecutive
1809 registers represented by a PARALLEL. */
1810
1811void
1812emit_group_load (x, y)
1813 rtx x, y;
1814{
1815 rtx target_reg, source;
1816 int i;
1817
1818 if (GET_CODE (x) != PARALLEL)
1819 abort ();
1820
1821 /* Check for a NULL entry, used to indicate that the parameter goes
1822 both on the stack and in registers. */
1823 if (XEXP (XVECEXP (x, 0, 0), 0))
1824 i = 0;
1825 else
1826 i = 1;
1827
1828 for (; i < XVECLEN (x, 0); i++)
1829 {
1830 rtx element = XVECEXP (x, 0, i);
1831
1832 target_reg = XEXP (element, 0);
1833
1834 if (GET_CODE (y) == MEM)
1835 source = change_address (y, GET_MODE (target_reg),
1836 plus_constant (XEXP (y, 0),
1837 INTVAL (XEXP (element, 1))));
1838 else if (XEXP (element, 1) == const0_rtx)
1839 {
1840 if (GET_MODE (target_reg) == GET_MODE (y))
1841 source = y;
eaa9b4d9
MM
1842 /* Allow for the target_reg to be smaller than the input register
1843 to allow for AIX with 4 DF arguments after a single SI arg. The
1844 last DF argument will only load 1 word into the integer registers,
1845 but load a DF value into the float registers. */
aff4d29b
JW
1846 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1847 <= GET_MODE_SIZE (GET_MODE (y)))
1848 && GET_MODE (target_reg) == word_mode)
1849 /* This might be a const_double, so we can't just use SUBREG. */
1850 source = operand_subword (y, 0, 0, VOIDmode);
d7d775a0
JW
1851 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1852 == GET_MODE_SIZE (GET_MODE (y)))
1853 source = gen_lowpart (GET_MODE (target_reg), y);
fffa9c1d
JW
1854 else
1855 abort ();
1856 }
1857 else
1858 abort ();
1859
1860 emit_move_insn (target_reg, source);
1861 }
1862}
1863
1864/* Emit code to move a block Y to a block X, where Y is non-consecutive
1865 registers represented by a PARALLEL. */
1866
1867void
1868emit_group_store (x, y)
1869 rtx x, y;
1870{
1871 rtx source_reg, target;
1872 int i;
1873
1874 if (GET_CODE (y) != PARALLEL)
1875 abort ();
1876
1877 /* Check for a NULL entry, used to indicate that the parameter goes
1878 both on the stack and in registers. */
1879 if (XEXP (XVECEXP (y, 0, 0), 0))
1880 i = 0;
1881 else
1882 i = 1;
1883
1884 for (; i < XVECLEN (y, 0); i++)
1885 {
1886 rtx element = XVECEXP (y, 0, i);
1887
1888 source_reg = XEXP (element, 0);
1889
1890 if (GET_CODE (x) == MEM)
1891 target = change_address (x, GET_MODE (source_reg),
1892 plus_constant (XEXP (x, 0),
1893 INTVAL (XEXP (element, 1))));
1894 else if (XEXP (element, 1) == const0_rtx)
71bc0330
JW
1895 {
1896 target = x;
1897 if (GET_MODE (target) != GET_MODE (source_reg))
1898 target = gen_lowpart (GET_MODE (source_reg), target);
1899 }
fffa9c1d
JW
1900 else
1901 abort ();
1902
1903 emit_move_insn (target, source_reg);
1904 }
1905}
1906
94b25f81
RK
1907/* Add a USE expression for REG to the (possibly empty) list pointed
1908 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1909
1910void
b3f8cf4a
RK
1911use_reg (call_fusage, reg)
1912 rtx *call_fusage, reg;
1913{
0304dfbb
DE
1914 if (GET_CODE (reg) != REG
1915 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1916 abort();
1917
1918 *call_fusage
38a448ca
RH
1919 = gen_rtx_EXPR_LIST (VOIDmode,
1920 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1921}
1922
94b25f81
RK
1923/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1924 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1925
1926void
0304dfbb
DE
1927use_regs (call_fusage, regno, nregs)
1928 rtx *call_fusage;
bbf6f052
RK
1929 int regno;
1930 int nregs;
1931{
0304dfbb 1932 int i;
bbf6f052 1933
0304dfbb
DE
1934 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1935 abort ();
1936
1937 for (i = 0; i < nregs; i++)
38a448ca 1938 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 1939}
fffa9c1d
JW
1940
1941/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1942 PARALLEL REGS. This is for calls that pass values in multiple
1943 non-contiguous locations. The Irix 6 ABI has examples of this. */
1944
1945void
1946use_group_regs (call_fusage, regs)
1947 rtx *call_fusage;
1948 rtx regs;
1949{
1950 int i;
1951
6bd35f86
DE
1952 for (i = 0; i < XVECLEN (regs, 0); i++)
1953 {
1954 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 1955
6bd35f86
DE
1956 /* A NULL entry means the parameter goes both on the stack and in
1957 registers. This can also be a MEM for targets that pass values
1958 partially on the stack and partially in registers. */
e9a25f70 1959 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
1960 use_reg (call_fusage, reg);
1961 }
fffa9c1d 1962}
bbf6f052 1963\f
9de08200
RK
1964/* Generate several move instructions to clear LEN bytes of block TO.
1965 (A MEM rtx with BLKmode). The caller must pass TO through
1966 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1967 we can assume. */
1968
1969static void
1970clear_by_pieces (to, len, align)
1971 rtx to;
1972 int len, align;
1973{
1974 struct clear_by_pieces data;
1975 rtx to_addr = XEXP (to, 0);
1976 int max_size = MOVE_MAX + 1;
1977
1978 data.offset = 0;
1979 data.to_addr = to_addr;
1980 data.to = to;
1981 data.autinc_to
1982 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1983 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1984
1985 data.explicit_inc_to = 0;
1986 data.reverse
1987 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1988 if (data.reverse) data.offset = len;
1989 data.len = len;
1990
1991 data.to_struct = MEM_IN_STRUCT_P (to);
1992
1993 /* If copying requires more than two move insns,
1994 copy addresses to registers (to make displacements shorter)
1995 and use post-increment if available. */
1996 if (!data.autinc_to
1997 && move_by_pieces_ninsns (len, align) > 2)
1998 {
1999#ifdef HAVE_PRE_DECREMENT
2000 if (data.reverse && ! data.autinc_to)
2001 {
2002 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2003 data.autinc_to = 1;
2004 data.explicit_inc_to = -1;
2005 }
2006#endif
2007#ifdef HAVE_POST_INCREMENT
2008 if (! data.reverse && ! data.autinc_to)
2009 {
2010 data.to_addr = copy_addr_to_reg (to_addr);
2011 data.autinc_to = 1;
2012 data.explicit_inc_to = 1;
2013 }
2014#endif
2015 if (!data.autinc_to && CONSTANT_P (to_addr))
2016 data.to_addr = copy_addr_to_reg (to_addr);
2017 }
2018
2019 if (! SLOW_UNALIGNED_ACCESS
2020 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2021 align = MOVE_MAX;
2022
2023 /* First move what we can in the largest integer mode, then go to
2024 successively smaller modes. */
2025
2026 while (max_size > 1)
2027 {
2028 enum machine_mode mode = VOIDmode, tmode;
2029 enum insn_code icode;
2030
2031 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2032 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2033 if (GET_MODE_SIZE (tmode) < max_size)
2034 mode = tmode;
2035
2036 if (mode == VOIDmode)
2037 break;
2038
2039 icode = mov_optab->handlers[(int) mode].insn_code;
2040 if (icode != CODE_FOR_nothing
2041 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2042 GET_MODE_SIZE (mode)))
2043 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2044
2045 max_size = GET_MODE_SIZE (mode);
2046 }
2047
2048 /* The code above should have handled everything. */
2049 if (data.len != 0)
2050 abort ();
2051}
2052
2053/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2054 with move instructions for mode MODE. GENFUN is the gen_... function
2055 to make a move insn for that mode. DATA has all the other info. */
2056
2057static void
2058clear_by_pieces_1 (genfun, mode, data)
eae4b970 2059 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2060 enum machine_mode mode;
2061 struct clear_by_pieces *data;
2062{
2063 register int size = GET_MODE_SIZE (mode);
2064 register rtx to1;
2065
2066 while (data->len >= size)
2067 {
2068 if (data->reverse) data->offset -= size;
2069
2070 to1 = (data->autinc_to
38a448ca 2071 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2072 : copy_rtx (change_address (data->to, mode,
2073 plus_constant (data->to_addr,
2074 data->offset))));
9de08200
RK
2075 MEM_IN_STRUCT_P (to1) = data->to_struct;
2076
2077#ifdef HAVE_PRE_DECREMENT
2078 if (data->explicit_inc_to < 0)
2079 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2080#endif
2081
2082 emit_insn ((*genfun) (to1, const0_rtx));
2083#ifdef HAVE_POST_INCREMENT
2084 if (data->explicit_inc_to > 0)
2085 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2086#endif
2087
2088 if (! data->reverse) data->offset += size;
2089
2090 data->len -= size;
2091 }
2092}
2093\f
bbf6f052 2094/* Write zeros through the storage of OBJECT.
9de08200 2095 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2096 the maximum alignment we can is has, measured in bytes.
bbf6f052 2097
e9a25f70
JL
2098 If we call a function that returns the length of the block, return it. */
2099
2100rtx
9de08200 2101clear_storage (object, size, align)
bbf6f052 2102 rtx object;
4c08eef0 2103 rtx size;
9de08200 2104 int align;
bbf6f052 2105{
e9a25f70
JL
2106 rtx retval = 0;
2107
bbf6f052
RK
2108 if (GET_MODE (object) == BLKmode)
2109 {
9de08200
RK
2110 object = protect_from_queue (object, 1);
2111 size = protect_from_queue (size, 0);
2112
2113 if (GET_CODE (size) == CONST_INT
2114 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2115 clear_by_pieces (object, INTVAL (size), align);
2116
2117 else
2118 {
2119 /* Try the most limited insn first, because there's no point
2120 including more than one in the machine description unless
2121 the more limited one has some advantage. */
2122
2123 rtx opalign = GEN_INT (align);
2124 enum machine_mode mode;
2125
2126 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2127 mode = GET_MODE_WIDER_MODE (mode))
2128 {
2129 enum insn_code code = clrstr_optab[(int) mode];
2130
2131 if (code != CODE_FOR_nothing
2132 /* We don't need MODE to be narrower than
2133 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2134 the mode mask, as it is returned by the macro, it will
2135 definitely be less than the actual mode mask. */
2136 && ((GET_CODE (size) == CONST_INT
2137 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2138 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2139 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2140 && (insn_operand_predicate[(int) code][0] == 0
2141 || (*insn_operand_predicate[(int) code][0]) (object,
2142 BLKmode))
2143 && (insn_operand_predicate[(int) code][2] == 0
2144 || (*insn_operand_predicate[(int) code][2]) (opalign,
2145 VOIDmode)))
2146 {
2147 rtx op1;
2148 rtx last = get_last_insn ();
2149 rtx pat;
2150
2151 op1 = convert_to_mode (mode, size, 1);
2152 if (insn_operand_predicate[(int) code][1] != 0
2153 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2154 mode))
2155 op1 = copy_to_mode_reg (mode, op1);
2156
2157 pat = GEN_FCN ((int) code) (object, op1, opalign);
2158 if (pat)
2159 {
2160 emit_insn (pat);
e9a25f70 2161 return 0;
9de08200
RK
2162 }
2163 else
2164 delete_insns_since (last);
2165 }
2166 }
2167
2168
bbf6f052 2169#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
2170 retval
2171 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2172 ptr_mode, 3,
2173 XEXP (object, 0), Pmode,
2174 const0_rtx,
2175 TYPE_MODE (integer_type_node),
2176 convert_to_mode
2177 (TYPE_MODE (sizetype), size,
2178 TREE_UNSIGNED (sizetype)),
2179 TYPE_MODE (sizetype));
bbf6f052 2180#else
9de08200
RK
2181 emit_library_call (bzero_libfunc, 0,
2182 VOIDmode, 2,
2183 XEXP (object, 0), Pmode,
e9a25f70
JL
2184 convert_to_mode
2185 (TYPE_MODE (integer_type_node), size,
2186 TREE_UNSIGNED (integer_type_node)),
9de08200 2187 TYPE_MODE (integer_type_node));
bbf6f052 2188#endif
9de08200 2189 }
bbf6f052
RK
2190 }
2191 else
66ed0683 2192 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2193
2194 return retval;
bbf6f052
RK
2195}
2196
2197/* Generate code to copy Y into X.
2198 Both Y and X must have the same mode, except that
2199 Y can be a constant with VOIDmode.
2200 This mode cannot be BLKmode; use emit_block_move for that.
2201
2202 Return the last instruction emitted. */
2203
2204rtx
2205emit_move_insn (x, y)
2206 rtx x, y;
2207{
2208 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2209
2210 x = protect_from_queue (x, 1);
2211 y = protect_from_queue (y, 0);
2212
2213 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2214 abort ();
2215
2216 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2217 y = force_const_mem (mode, y);
2218
2219 /* If X or Y are memory references, verify that their addresses are valid
2220 for the machine. */
2221 if (GET_CODE (x) == MEM
2222 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2223 && ! push_operand (x, GET_MODE (x)))
2224 || (flag_force_addr
2225 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2226 x = change_address (x, VOIDmode, XEXP (x, 0));
2227
2228 if (GET_CODE (y) == MEM
2229 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2230 || (flag_force_addr
2231 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2232 y = change_address (y, VOIDmode, XEXP (y, 0));
2233
2234 if (mode == BLKmode)
2235 abort ();
2236
261c4230
RS
2237 return emit_move_insn_1 (x, y);
2238}
2239
2240/* Low level part of emit_move_insn.
2241 Called just like emit_move_insn, but assumes X and Y
2242 are basically valid. */
2243
2244rtx
2245emit_move_insn_1 (x, y)
2246 rtx x, y;
2247{
2248 enum machine_mode mode = GET_MODE (x);
2249 enum machine_mode submode;
2250 enum mode_class class = GET_MODE_CLASS (mode);
2251 int i;
2252
bbf6f052
RK
2253 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2254 return
2255 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2256
89742723 2257 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2258 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2259 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2260 * BITS_PER_UNIT),
2261 (class == MODE_COMPLEX_INT
2262 ? MODE_INT : MODE_FLOAT),
2263 0))
7308a047
RS
2264 && (mov_optab->handlers[(int) submode].insn_code
2265 != CODE_FOR_nothing))
2266 {
2267 /* Don't split destination if it is a stack push. */
2268 int stack = push_operand (x, GET_MODE (x));
7308a047 2269
7308a047
RS
2270 /* If this is a stack, push the highpart first, so it
2271 will be in the argument order.
2272
2273 In that case, change_address is used only to convert
2274 the mode, not to change the address. */
c937357e
RS
2275 if (stack)
2276 {
e33c0d66
RS
2277 /* Note that the real part always precedes the imag part in memory
2278 regardless of machine's endianness. */
c937357e
RS
2279#ifdef STACK_GROWS_DOWNWARD
2280 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2281 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2282 gen_imagpart (submode, y)));
c937357e 2283 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2284 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2285 gen_realpart (submode, y)));
c937357e
RS
2286#else
2287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2288 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2289 gen_realpart (submode, y)));
c937357e 2290 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2291 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2292 gen_imagpart (submode, y)));
c937357e
RS
2293#endif
2294 }
2295 else
2296 {
2297 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2298 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2299 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2300 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2301 }
7308a047 2302
7a1ab50a 2303 return get_last_insn ();
7308a047
RS
2304 }
2305
bbf6f052
RK
2306 /* This will handle any multi-word mode that lacks a move_insn pattern.
2307 However, you will get better code if you define such patterns,
2308 even if they must turn into multiple assembler instructions. */
a4320483 2309 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2310 {
2311 rtx last_insn = 0;
6551fa4d 2312
a98c9f1a
RK
2313#ifdef PUSH_ROUNDING
2314
2315 /* If X is a push on the stack, do the push now and replace
2316 X with a reference to the stack pointer. */
2317 if (push_operand (x, GET_MODE (x)))
2318 {
2319 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2320 x = change_address (x, VOIDmode, stack_pointer_rtx);
2321 }
2322#endif
2323
15a7a8ec 2324 /* Show the output dies here. */
43e046cb 2325 if (x != y)
38a448ca 2326 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2327
bbf6f052
RK
2328 for (i = 0;
2329 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2330 i++)
2331 {
2332 rtx xpart = operand_subword (x, i, 1, mode);
2333 rtx ypart = operand_subword (y, i, 1, mode);
2334
2335 /* If we can't get a part of Y, put Y into memory if it is a
2336 constant. Otherwise, force it into a register. If we still
2337 can't get a part of Y, abort. */
2338 if (ypart == 0 && CONSTANT_P (y))
2339 {
2340 y = force_const_mem (mode, y);
2341 ypart = operand_subword (y, i, 1, mode);
2342 }
2343 else if (ypart == 0)
2344 ypart = operand_subword_force (y, i, mode);
2345
2346 if (xpart == 0 || ypart == 0)
2347 abort ();
2348
2349 last_insn = emit_move_insn (xpart, ypart);
2350 }
6551fa4d 2351
bbf6f052
RK
2352 return last_insn;
2353 }
2354 else
2355 abort ();
2356}
2357\f
2358/* Pushing data onto the stack. */
2359
2360/* Push a block of length SIZE (perhaps variable)
2361 and return an rtx to address the beginning of the block.
2362 Note that it is not possible for the value returned to be a QUEUED.
2363 The value may be virtual_outgoing_args_rtx.
2364
2365 EXTRA is the number of bytes of padding to push in addition to SIZE.
2366 BELOW nonzero means this padding comes at low addresses;
2367 otherwise, the padding comes at high addresses. */
2368
2369rtx
2370push_block (size, extra, below)
2371 rtx size;
2372 int extra, below;
2373{
2374 register rtx temp;
88f63c77
RK
2375
2376 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2377 if (CONSTANT_P (size))
2378 anti_adjust_stack (plus_constant (size, extra));
2379 else if (GET_CODE (size) == REG && extra == 0)
2380 anti_adjust_stack (size);
2381 else
2382 {
2383 rtx temp = copy_to_mode_reg (Pmode, size);
2384 if (extra != 0)
906c4e36 2385 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2386 temp, 0, OPTAB_LIB_WIDEN);
2387 anti_adjust_stack (temp);
2388 }
2389
2390#ifdef STACK_GROWS_DOWNWARD
2391 temp = virtual_outgoing_args_rtx;
2392 if (extra != 0 && below)
2393 temp = plus_constant (temp, extra);
2394#else
2395 if (GET_CODE (size) == CONST_INT)
2396 temp = plus_constant (virtual_outgoing_args_rtx,
2397 - INTVAL (size) - (below ? 0 : extra));
2398 else if (extra != 0 && !below)
38a448ca 2399 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2400 negate_rtx (Pmode, plus_constant (size, extra)));
2401 else
38a448ca 2402 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2403 negate_rtx (Pmode, size));
2404#endif
2405
2406 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2407}
2408
87e38d84 2409rtx
bbf6f052
RK
2410gen_push_operand ()
2411{
38a448ca 2412 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2413}
2414
921b3427
RK
2415/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2416 block of SIZE bytes. */
2417
2418static rtx
2419get_push_address (size)
2420 int size;
2421{
2422 register rtx temp;
2423
2424 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2425 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2426 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2427 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2428 else
2429 temp = stack_pointer_rtx;
2430
c85f7c16 2431 return copy_to_reg (temp);
921b3427
RK
2432}
2433
bbf6f052
RK
2434/* Generate code to push X onto the stack, assuming it has mode MODE and
2435 type TYPE.
2436 MODE is redundant except when X is a CONST_INT (since they don't
2437 carry mode info).
2438 SIZE is an rtx for the size of data to be copied (in bytes),
2439 needed only if X is BLKmode.
2440
2441 ALIGN (in bytes) is maximum alignment we can assume.
2442
cd048831
RK
2443 If PARTIAL and REG are both nonzero, then copy that many of the first
2444 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2445 The amount of space pushed is decreased by PARTIAL words,
2446 rounded *down* to a multiple of PARM_BOUNDARY.
2447 REG must be a hard register in this case.
cd048831
RK
2448 If REG is zero but PARTIAL is not, take any all others actions for an
2449 argument partially in registers, but do not actually load any
2450 registers.
bbf6f052
RK
2451
2452 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2453 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2454
2455 On a machine that lacks real push insns, ARGS_ADDR is the address of
2456 the bottom of the argument block for this call. We use indexing off there
2457 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2458 argument block has not been preallocated.
2459
e5e809f4
JL
2460 ARGS_SO_FAR is the size of args previously pushed for this call.
2461
2462 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2463 for arguments passed in registers. If nonzero, it will be the number
2464 of bytes required. */
bbf6f052
RK
2465
2466void
2467emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2468 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2469 register rtx x;
2470 enum machine_mode mode;
2471 tree type;
2472 rtx size;
2473 int align;
2474 int partial;
2475 rtx reg;
2476 int extra;
2477 rtx args_addr;
2478 rtx args_so_far;
e5e809f4 2479 int reg_parm_stack_space;
bbf6f052
RK
2480{
2481 rtx xinner;
2482 enum direction stack_direction
2483#ifdef STACK_GROWS_DOWNWARD
2484 = downward;
2485#else
2486 = upward;
2487#endif
2488
2489 /* Decide where to pad the argument: `downward' for below,
2490 `upward' for above, or `none' for don't pad it.
2491 Default is below for small data on big-endian machines; else above. */
2492 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2493
2494 /* Invert direction if stack is post-update. */
2495 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2496 if (where_pad != none)
2497 where_pad = (where_pad == downward ? upward : downward);
2498
2499 xinner = x = protect_from_queue (x, 0);
2500
2501 if (mode == BLKmode)
2502 {
2503 /* Copy a block into the stack, entirely or partially. */
2504
2505 register rtx temp;
2506 int used = partial * UNITS_PER_WORD;
2507 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2508 int skip;
2509
2510 if (size == 0)
2511 abort ();
2512
2513 used -= offset;
2514
2515 /* USED is now the # of bytes we need not copy to the stack
2516 because registers will take care of them. */
2517
2518 if (partial != 0)
2519 xinner = change_address (xinner, BLKmode,
2520 plus_constant (XEXP (xinner, 0), used));
2521
2522 /* If the partial register-part of the arg counts in its stack size,
2523 skip the part of stack space corresponding to the registers.
2524 Otherwise, start copying to the beginning of the stack space,
2525 by setting SKIP to 0. */
e5e809f4 2526 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2527
2528#ifdef PUSH_ROUNDING
2529 /* Do it with several push insns if that doesn't take lots of insns
2530 and if there is no difficulty with push insns that skip bytes
2531 on the stack for alignment purposes. */
2532 if (args_addr == 0
2533 && GET_CODE (size) == CONST_INT
2534 && skip == 0
2535 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2536 < MOVE_RATIO)
bbf6f052
RK
2537 /* Here we avoid the case of a structure whose weak alignment
2538 forces many pushes of a small amount of data,
2539 and such small pushes do rounding that causes trouble. */
c7a7ac46 2540 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2541 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2542 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2543 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2544 {
2545 /* Push padding now if padding above and stack grows down,
2546 or if padding below and stack grows up.
2547 But if space already allocated, this has already been done. */
2548 if (extra && args_addr == 0
2549 && where_pad != none && where_pad != stack_direction)
906c4e36 2550 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2551
38a448ca 2552 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2553 INTVAL (size) - used, align);
921b3427 2554
956d6950 2555 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2556 {
2557 rtx temp;
2558
956d6950 2559 in_check_memory_usage = 1;
921b3427 2560 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2561 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2562 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2563 temp, ptr_mode,
2564 XEXP (xinner, 0), ptr_mode,
2565 GEN_INT (INTVAL(size) - used),
2566 TYPE_MODE (sizetype));
2567 else
2568 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2569 temp, ptr_mode,
2570 GEN_INT (INTVAL(size) - used),
2571 TYPE_MODE (sizetype),
956d6950
JL
2572 GEN_INT (MEMORY_USE_RW),
2573 TYPE_MODE (integer_type_node));
2574 in_check_memory_usage = 0;
921b3427 2575 }
bbf6f052
RK
2576 }
2577 else
2578#endif /* PUSH_ROUNDING */
2579 {
2580 /* Otherwise make space on the stack and copy the data
2581 to the address of that space. */
2582
2583 /* Deduct words put into registers from the size we must copy. */
2584 if (partial != 0)
2585 {
2586 if (GET_CODE (size) == CONST_INT)
906c4e36 2587 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2588 else
2589 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2590 GEN_INT (used), NULL_RTX, 0,
2591 OPTAB_LIB_WIDEN);
bbf6f052
RK
2592 }
2593
2594 /* Get the address of the stack space.
2595 In this case, we do not deal with EXTRA separately.
2596 A single stack adjust will do. */
2597 if (! args_addr)
2598 {
2599 temp = push_block (size, extra, where_pad == downward);
2600 extra = 0;
2601 }
2602 else if (GET_CODE (args_so_far) == CONST_INT)
2603 temp = memory_address (BLKmode,
2604 plus_constant (args_addr,
2605 skip + INTVAL (args_so_far)));
2606 else
2607 temp = memory_address (BLKmode,
38a448ca
RH
2608 plus_constant (gen_rtx_PLUS (Pmode,
2609 args_addr,
2610 args_so_far),
bbf6f052 2611 skip));
956d6950 2612 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2613 {
2614 rtx target;
2615
956d6950 2616 in_check_memory_usage = 1;
921b3427 2617 target = copy_to_reg (temp);
c85f7c16 2618 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2619 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2620 target, ptr_mode,
2621 XEXP (xinner, 0), ptr_mode,
2622 size, TYPE_MODE (sizetype));
2623 else
2624 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2625 target, ptr_mode,
2626 size, TYPE_MODE (sizetype),
956d6950
JL
2627 GEN_INT (MEMORY_USE_RW),
2628 TYPE_MODE (integer_type_node));
2629 in_check_memory_usage = 0;
921b3427 2630 }
bbf6f052
RK
2631
2632 /* TEMP is the address of the block. Copy the data there. */
2633 if (GET_CODE (size) == CONST_INT
2634 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2635 < MOVE_RATIO))
2636 {
38a448ca 2637 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2638 INTVAL (size), align);
2639 goto ret;
2640 }
e5e809f4 2641 else
bbf6f052 2642 {
e5e809f4
JL
2643 rtx opalign = GEN_INT (align);
2644 enum machine_mode mode;
2645 rtx target = gen_rtx (MEM, BLKmode, temp);
2646
2647 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2648 mode != VOIDmode;
2649 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2650 {
e5e809f4
JL
2651 enum insn_code code = movstr_optab[(int) mode];
2652
2653 if (code != CODE_FOR_nothing
2654 && ((GET_CODE (size) == CONST_INT
2655 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2656 <= (GET_MODE_MASK (mode) >> 1)))
2657 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2658 && (insn_operand_predicate[(int) code][0] == 0
2659 || ((*insn_operand_predicate[(int) code][0])
2660 (target, BLKmode)))
2661 && (insn_operand_predicate[(int) code][1] == 0
2662 || ((*insn_operand_predicate[(int) code][1])
2663 (xinner, BLKmode)))
2664 && (insn_operand_predicate[(int) code][3] == 0
2665 || ((*insn_operand_predicate[(int) code][3])
2666 (opalign, VOIDmode))))
2667 {
2668 rtx op2 = convert_to_mode (mode, size, 1);
2669 rtx last = get_last_insn ();
2670 rtx pat;
2671
2672 if (insn_operand_predicate[(int) code][2] != 0
2673 && ! ((*insn_operand_predicate[(int) code][2])
2674 (op2, mode)))
2675 op2 = copy_to_mode_reg (mode, op2);
2676
2677 pat = GEN_FCN ((int) code) (target, xinner,
2678 op2, opalign);
2679 if (pat)
2680 {
2681 emit_insn (pat);
2682 goto ret;
2683 }
2684 else
2685 delete_insns_since (last);
2686 }
c841050e 2687 }
bbf6f052 2688 }
bbf6f052
RK
2689
2690#ifndef ACCUMULATE_OUTGOING_ARGS
2691 /* If the source is referenced relative to the stack pointer,
2692 copy it to another register to stabilize it. We do not need
2693 to do this if we know that we won't be changing sp. */
2694
2695 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2696 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2697 temp = copy_to_reg (temp);
2698#endif
2699
2700 /* Make inhibit_defer_pop nonzero around the library call
2701 to force it to pop the bcopy-arguments right away. */
2702 NO_DEFER_POP;
2703#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2704 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2705 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2706 convert_to_mode (TYPE_MODE (sizetype),
2707 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2708 TYPE_MODE (sizetype));
bbf6f052 2709#else
d562e42e 2710 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2711 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2712 convert_to_mode (TYPE_MODE (integer_type_node),
2713 size,
2714 TREE_UNSIGNED (integer_type_node)),
2715 TYPE_MODE (integer_type_node));
bbf6f052
RK
2716#endif
2717 OK_DEFER_POP;
2718 }
2719 }
2720 else if (partial > 0)
2721 {
2722 /* Scalar partly in registers. */
2723
2724 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2725 int i;
2726 int not_stack;
2727 /* # words of start of argument
2728 that we must make space for but need not store. */
2729 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2730 int args_offset = INTVAL (args_so_far);
2731 int skip;
2732
2733 /* Push padding now if padding above and stack grows down,
2734 or if padding below and stack grows up.
2735 But if space already allocated, this has already been done. */
2736 if (extra && args_addr == 0
2737 && where_pad != none && where_pad != stack_direction)
906c4e36 2738 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2739
2740 /* If we make space by pushing it, we might as well push
2741 the real data. Otherwise, we can leave OFFSET nonzero
2742 and leave the space uninitialized. */
2743 if (args_addr == 0)
2744 offset = 0;
2745
2746 /* Now NOT_STACK gets the number of words that we don't need to
2747 allocate on the stack. */
2748 not_stack = partial - offset;
2749
2750 /* If the partial register-part of the arg counts in its stack size,
2751 skip the part of stack space corresponding to the registers.
2752 Otherwise, start copying to the beginning of the stack space,
2753 by setting SKIP to 0. */
e5e809f4 2754 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
2755
2756 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2757 x = validize_mem (force_const_mem (mode, x));
2758
2759 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2760 SUBREGs of such registers are not allowed. */
2761 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2762 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2763 x = copy_to_reg (x);
2764
2765 /* Loop over all the words allocated on the stack for this arg. */
2766 /* We can do it by words, because any scalar bigger than a word
2767 has a size a multiple of a word. */
2768#ifndef PUSH_ARGS_REVERSED
2769 for (i = not_stack; i < size; i++)
2770#else
2771 for (i = size - 1; i >= not_stack; i--)
2772#endif
2773 if (i >= not_stack + offset)
2774 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2775 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2776 0, args_addr,
2777 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
2778 * UNITS_PER_WORD)),
2779 reg_parm_stack_space);
bbf6f052
RK
2780 }
2781 else
2782 {
2783 rtx addr;
921b3427 2784 rtx target = NULL_RTX;
bbf6f052
RK
2785
2786 /* Push padding now if padding above and stack grows down,
2787 or if padding below and stack grows up.
2788 But if space already allocated, this has already been done. */
2789 if (extra && args_addr == 0
2790 && where_pad != none && where_pad != stack_direction)
906c4e36 2791 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2792
2793#ifdef PUSH_ROUNDING
2794 if (args_addr == 0)
2795 addr = gen_push_operand ();
2796 else
2797#endif
921b3427
RK
2798 {
2799 if (GET_CODE (args_so_far) == CONST_INT)
2800 addr
2801 = memory_address (mode,
2802 plus_constant (args_addr,
2803 INTVAL (args_so_far)));
2804 else
38a448ca
RH
2805 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2806 args_so_far));
921b3427
RK
2807 target = addr;
2808 }
bbf6f052 2809
38a448ca 2810 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 2811
956d6950 2812 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 2813 {
956d6950 2814 in_check_memory_usage = 1;
921b3427
RK
2815 if (target == 0)
2816 target = get_push_address (GET_MODE_SIZE (mode));
2817
c85f7c16 2818 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2819 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2820 target, ptr_mode,
2821 XEXP (x, 0), ptr_mode,
2822 GEN_INT (GET_MODE_SIZE (mode)),
2823 TYPE_MODE (sizetype));
2824 else
2825 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2826 target, ptr_mode,
2827 GEN_INT (GET_MODE_SIZE (mode)),
2828 TYPE_MODE (sizetype),
956d6950
JL
2829 GEN_INT (MEMORY_USE_RW),
2830 TYPE_MODE (integer_type_node));
2831 in_check_memory_usage = 0;
921b3427 2832 }
bbf6f052
RK
2833 }
2834
2835 ret:
2836 /* If part should go in registers, copy that part
2837 into the appropriate registers. Do this now, at the end,
2838 since mem-to-mem copies above may do function calls. */
cd048831 2839 if (partial > 0 && reg != 0)
fffa9c1d
JW
2840 {
2841 /* Handle calls that pass values in multiple non-contiguous locations.
2842 The Irix 6 ABI has examples of this. */
2843 if (GET_CODE (reg) == PARALLEL)
2844 emit_group_load (reg, x);
2845 else
2846 move_block_to_reg (REGNO (reg), x, partial, mode);
2847 }
bbf6f052
RK
2848
2849 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2850 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2851}
2852\f
bbf6f052
RK
2853/* Expand an assignment that stores the value of FROM into TO.
2854 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2855 (This may contain a QUEUED rtx;
2856 if the value is constant, this rtx is a constant.)
2857 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2858
2859 SUGGEST_REG is no longer actually used.
2860 It used to mean, copy the value through a register
2861 and return that register, if that is possible.
709f5be1 2862 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2863
2864rtx
2865expand_assignment (to, from, want_value, suggest_reg)
2866 tree to, from;
2867 int want_value;
2868 int suggest_reg;
2869{
2870 register rtx to_rtx = 0;
2871 rtx result;
2872
2873 /* Don't crash if the lhs of the assignment was erroneous. */
2874
2875 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2876 {
2877 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2878 return want_value ? result : NULL_RTX;
2879 }
bbf6f052
RK
2880
2881 /* Assignment of a structure component needs special treatment
2882 if the structure component's rtx is not simply a MEM.
6be58303
JW
2883 Assignment of an array element at a constant index, and assignment of
2884 an array element in an unaligned packed structure field, has the same
2885 problem. */
bbf6f052 2886
08293add
RK
2887 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2888 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
2889 {
2890 enum machine_mode mode1;
2891 int bitsize;
2892 int bitpos;
7bb0943f 2893 tree offset;
bbf6f052
RK
2894 int unsignedp;
2895 int volatilep = 0;
0088fcb1 2896 tree tem;
d78d243c 2897 int alignment;
0088fcb1
RK
2898
2899 push_temp_slots ();
839c4796
RK
2900 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2901 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
2902
2903 /* If we are going to use store_bit_field and extract_bit_field,
2904 make sure to_rtx will be safe for multiple use. */
2905
2906 if (mode1 == VOIDmode && want_value)
2907 tem = stabilize_reference (tem);
2908
921b3427 2909 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
2910 if (offset != 0)
2911 {
906c4e36 2912 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2913
2914 if (GET_CODE (to_rtx) != MEM)
2915 abort ();
2916 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
2917 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2918 force_reg (ptr_mode, offset_rtx)));
7bb0943f 2919 }
bbf6f052
RK
2920 if (volatilep)
2921 {
2922 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2923 {
2924 /* When the offset is zero, to_rtx is the address of the
2925 structure we are storing into, and hence may be shared.
2926 We must make a new MEM before setting the volatile bit. */
2927 if (offset == 0)
effbcc6a
RK
2928 to_rtx = copy_rtx (to_rtx);
2929
01188446
JW
2930 MEM_VOLATILE_P (to_rtx) = 1;
2931 }
bbf6f052
RK
2932#if 0 /* This was turned off because, when a field is volatile
2933 in an object which is not volatile, the object may be in a register,
2934 and then we would abort over here. */
2935 else
2936 abort ();
2937#endif
2938 }
2939
956d6950
JL
2940 if (TREE_CODE (to) == COMPONENT_REF
2941 && TREE_READONLY (TREE_OPERAND (to, 1)))
2942 {
8bd6ecc2 2943 if (offset == 0)
956d6950
JL
2944 to_rtx = copy_rtx (to_rtx);
2945
2946 RTX_UNCHANGING_P (to_rtx) = 1;
2947 }
2948
921b3427
RK
2949 /* Check the access. */
2950 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2951 {
2952 rtx to_addr;
2953 int size;
2954 int best_mode_size;
2955 enum machine_mode best_mode;
2956
2957 best_mode = get_best_mode (bitsize, bitpos,
2958 TYPE_ALIGN (TREE_TYPE (tem)),
2959 mode1, volatilep);
2960 if (best_mode == VOIDmode)
2961 best_mode = QImode;
2962
2963 best_mode_size = GET_MODE_BITSIZE (best_mode);
2964 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2965 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2966 size *= GET_MODE_SIZE (best_mode);
2967
2968 /* Check the access right of the pointer. */
e9a25f70
JL
2969 if (size)
2970 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2971 to_addr, ptr_mode,
2972 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
2973 GEN_INT (MEMORY_USE_WO),
2974 TYPE_MODE (integer_type_node));
921b3427
RK
2975 }
2976
bbf6f052
RK
2977 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2978 (want_value
2979 /* Spurious cast makes HPUX compiler happy. */
2980 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2981 : VOIDmode),
2982 unsignedp,
2983 /* Required alignment of containing datum. */
d78d243c 2984 alignment,
bbf6f052
RK
2985 int_size_in_bytes (TREE_TYPE (tem)));
2986 preserve_temp_slots (result);
2987 free_temp_slots ();
0088fcb1 2988 pop_temp_slots ();
bbf6f052 2989
709f5be1
RS
2990 /* If the value is meaningful, convert RESULT to the proper mode.
2991 Otherwise, return nothing. */
5ffe63ed
RS
2992 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2993 TYPE_MODE (TREE_TYPE (from)),
2994 result,
2995 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2996 : NULL_RTX);
bbf6f052
RK
2997 }
2998
cd1db108
RS
2999 /* If the rhs is a function call and its value is not an aggregate,
3000 call the function before we start to compute the lhs.
3001 This is needed for correct code for cases such as
3002 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3003 requires loading up part of an address in a separate insn.
3004
3005 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3006 a promoted variable where the zero- or sign- extension needs to be done.
3007 Handling this in the normal way is safe because no computation is done
3008 before the call. */
3009 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3010 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3011 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3012 {
0088fcb1
RK
3013 rtx value;
3014
3015 push_temp_slots ();
3016 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3017 if (to_rtx == 0)
921b3427 3018 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3019
fffa9c1d
JW
3020 /* Handle calls that return values in multiple non-contiguous locations.
3021 The Irix 6 ABI has examples of this. */
3022 if (GET_CODE (to_rtx) == PARALLEL)
3023 emit_group_load (to_rtx, value);
3024 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3025 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3026 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3027 else
3028 emit_move_insn (to_rtx, value);
cd1db108
RS
3029 preserve_temp_slots (to_rtx);
3030 free_temp_slots ();
0088fcb1 3031 pop_temp_slots ();
709f5be1 3032 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3033 }
3034
bbf6f052
RK
3035 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3036 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3037
3038 if (to_rtx == 0)
921b3427 3039 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
bbf6f052 3040
86d38d25
RS
3041 /* Don't move directly into a return register. */
3042 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3043 {
0088fcb1
RK
3044 rtx temp;
3045
3046 push_temp_slots ();
3047 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3048 emit_move_insn (to_rtx, temp);
3049 preserve_temp_slots (to_rtx);
3050 free_temp_slots ();
0088fcb1 3051 pop_temp_slots ();
709f5be1 3052 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3053 }
3054
bbf6f052
RK
3055 /* In case we are returning the contents of an object which overlaps
3056 the place the value is being stored, use a safe function when copying
3057 a value through a pointer into a structure value return block. */
3058 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3059 && current_function_returns_struct
3060 && !current_function_returns_pcc_struct)
3061 {
0088fcb1
RK
3062 rtx from_rtx, size;
3063
3064 push_temp_slots ();
33a20d10 3065 size = expr_size (from);
921b3427
RK
3066 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3067 EXPAND_MEMORY_USE_DONT);
3068
3069 /* Copy the rights of the bitmap. */
3070 if (flag_check_memory_usage)
3071 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3072 XEXP (to_rtx, 0), ptr_mode,
3073 XEXP (from_rtx, 0), ptr_mode,
3074 convert_to_mode (TYPE_MODE (sizetype),
3075 size, TREE_UNSIGNED (sizetype)),
3076 TYPE_MODE (sizetype));
bbf6f052
RK
3077
3078#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3079 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3080 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3081 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3082 convert_to_mode (TYPE_MODE (sizetype),
3083 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3084 TYPE_MODE (sizetype));
bbf6f052 3085#else
d562e42e 3086 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3087 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3088 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3089 convert_to_mode (TYPE_MODE (integer_type_node),
3090 size, TREE_UNSIGNED (integer_type_node)),
3091 TYPE_MODE (integer_type_node));
bbf6f052
RK
3092#endif
3093
3094 preserve_temp_slots (to_rtx);
3095 free_temp_slots ();
0088fcb1 3096 pop_temp_slots ();
709f5be1 3097 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3098 }
3099
3100 /* Compute FROM and store the value in the rtx we got. */
3101
0088fcb1 3102 push_temp_slots ();
bbf6f052
RK
3103 result = store_expr (from, to_rtx, want_value);
3104 preserve_temp_slots (result);
3105 free_temp_slots ();
0088fcb1 3106 pop_temp_slots ();
709f5be1 3107 return want_value ? result : NULL_RTX;
bbf6f052
RK
3108}
3109
3110/* Generate code for computing expression EXP,
3111 and storing the value into TARGET.
bbf6f052
RK
3112 TARGET may contain a QUEUED rtx.
3113
709f5be1
RS
3114 If WANT_VALUE is nonzero, return a copy of the value
3115 not in TARGET, so that we can be sure to use the proper
3116 value in a containing expression even if TARGET has something
3117 else stored in it. If possible, we copy the value through a pseudo
3118 and return that pseudo. Or, if the value is constant, we try to
3119 return the constant. In some cases, we return a pseudo
3120 copied *from* TARGET.
3121
3122 If the mode is BLKmode then we may return TARGET itself.
3123 It turns out that in BLKmode it doesn't cause a problem.
3124 because C has no operators that could combine two different
3125 assignments into the same BLKmode object with different values
3126 with no sequence point. Will other languages need this to
3127 be more thorough?
3128
3129 If WANT_VALUE is 0, we return NULL, to make sure
3130 to catch quickly any cases where the caller uses the value
3131 and fails to set WANT_VALUE. */
bbf6f052
RK
3132
3133rtx
709f5be1 3134store_expr (exp, target, want_value)
bbf6f052
RK
3135 register tree exp;
3136 register rtx target;
709f5be1 3137 int want_value;
bbf6f052
RK
3138{
3139 register rtx temp;
3140 int dont_return_target = 0;
3141
3142 if (TREE_CODE (exp) == COMPOUND_EXPR)
3143 {
3144 /* Perform first part of compound expression, then assign from second
3145 part. */
3146 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3147 emit_queue ();
709f5be1 3148 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3149 }
3150 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3151 {
3152 /* For conditional expression, get safe form of the target. Then
3153 test the condition, doing the appropriate assignment on either
3154 side. This avoids the creation of unnecessary temporaries.
3155 For non-BLKmode, it is more efficient not to do this. */
3156
3157 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3158
3159 emit_queue ();
3160 target = protect_from_queue (target, 1);
3161
dabf8373 3162 do_pending_stack_adjust ();
bbf6f052
RK
3163 NO_DEFER_POP;
3164 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3165 start_cleanup_deferral ();
709f5be1 3166 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3167 end_cleanup_deferral ();
bbf6f052
RK
3168 emit_queue ();
3169 emit_jump_insn (gen_jump (lab2));
3170 emit_barrier ();
3171 emit_label (lab1);
956d6950 3172 start_cleanup_deferral ();
709f5be1 3173 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3174 end_cleanup_deferral ();
bbf6f052
RK
3175 emit_queue ();
3176 emit_label (lab2);
3177 OK_DEFER_POP;
a3a58acc 3178
709f5be1 3179 return want_value ? target : NULL_RTX;
bbf6f052 3180 }
709f5be1 3181 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3182 && GET_MODE (target) != BLKmode)
3183 /* If target is in memory and caller wants value in a register instead,
3184 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3185 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3186 We know expand_expr will not use the target in that case.
3187 Don't do this if TARGET is volatile because we are supposed
3188 to write it and then read it. */
bbf6f052 3189 {
906c4e36 3190 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3191 GET_MODE (target), 0);
3192 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3193 temp = copy_to_reg (temp);
3194 dont_return_target = 1;
3195 }
3196 else if (queued_subexp_p (target))
709f5be1
RS
3197 /* If target contains a postincrement, let's not risk
3198 using it as the place to generate the rhs. */
bbf6f052
RK
3199 {
3200 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3201 {
3202 /* Expand EXP into a new pseudo. */
3203 temp = gen_reg_rtx (GET_MODE (target));
3204 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3205 }
3206 else
906c4e36 3207 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3208
3209 /* If target is volatile, ANSI requires accessing the value
3210 *from* the target, if it is accessed. So make that happen.
3211 In no case return the target itself. */
3212 if (! MEM_VOLATILE_P (target) && want_value)
3213 dont_return_target = 1;
bbf6f052 3214 }
1499e0a8
RK
3215 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3216 /* If this is an scalar in a register that is stored in a wider mode
3217 than the declared mode, compute the result into its declared mode
3218 and then convert to the wider mode. Our value is the computed
3219 expression. */
3220 {
5a32d038 3221 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3222 which will often result in some optimizations. Do the conversion
3223 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3224 the extend. But don't do this if the type of EXP is a subtype
3225 of something else since then the conversion might involve
3226 more than just converting modes. */
3227 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3228 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3229 {
3230 if (TREE_UNSIGNED (TREE_TYPE (exp))
3231 != SUBREG_PROMOTED_UNSIGNED_P (target))
3232 exp
3233 = convert
3234 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3235 TREE_TYPE (exp)),
3236 exp);
3237
3238 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3239 SUBREG_PROMOTED_UNSIGNED_P (target)),
3240 exp);
3241 }
5a32d038 3242
1499e0a8 3243 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3244
766f36c7 3245 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3246 the access now so it gets done only once. Likewise if
3247 it contains TARGET. */
3248 if (GET_CODE (temp) == MEM && want_value
3249 && (MEM_VOLATILE_P (temp)
3250 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3251 temp = copy_to_reg (temp);
3252
b258707c
RS
3253 /* If TEMP is a VOIDmode constant, use convert_modes to make
3254 sure that we properly convert it. */
3255 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3256 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3257 TYPE_MODE (TREE_TYPE (exp)), temp,
3258 SUBREG_PROMOTED_UNSIGNED_P (target));
3259
1499e0a8
RK
3260 convert_move (SUBREG_REG (target), temp,
3261 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3262 return want_value ? temp : NULL_RTX;
1499e0a8 3263 }
bbf6f052
RK
3264 else
3265 {
3266 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3267 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3268 If TARGET is a volatile mem ref, either return TARGET
3269 or return a reg copied *from* TARGET; ANSI requires this.
3270
3271 Otherwise, if TEMP is not TARGET, return TEMP
3272 if it is constant (for efficiency),
3273 or if we really want the correct value. */
bbf6f052
RK
3274 if (!(target && GET_CODE (target) == REG
3275 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3276 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3277 && ! rtx_equal_p (temp, target)
709f5be1 3278 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3279 dont_return_target = 1;
3280 }
3281
b258707c
RS
3282 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3283 the same as that of TARGET, adjust the constant. This is needed, for
3284 example, in case it is a CONST_DOUBLE and we want only a word-sized
3285 value. */
3286 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3287 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3288 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3289 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3290 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3291
921b3427
RK
3292 if (flag_check_memory_usage
3293 && GET_CODE (target) == MEM
3294 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3295 {
3296 if (GET_CODE (temp) == MEM)
3297 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3298 XEXP (target, 0), ptr_mode,
3299 XEXP (temp, 0), ptr_mode,
3300 expr_size (exp), TYPE_MODE (sizetype));
3301 else
3302 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3303 XEXP (target, 0), ptr_mode,
3304 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3305 GEN_INT (MEMORY_USE_WO),
3306 TYPE_MODE (integer_type_node));
921b3427
RK
3307 }
3308
bbf6f052
RK
3309 /* If value was not generated in the target, store it there.
3310 Convert the value to TARGET's type first if nec. */
3311
effbcc6a 3312 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3313 {
3314 target = protect_from_queue (target, 1);
3315 if (GET_MODE (temp) != GET_MODE (target)
3316 && GET_MODE (temp) != VOIDmode)
3317 {
3318 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3319 if (dont_return_target)
3320 {
3321 /* In this case, we will return TEMP,
3322 so make sure it has the proper mode.
3323 But don't forget to store the value into TARGET. */
3324 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3325 emit_move_insn (target, temp);
3326 }
3327 else
3328 convert_move (target, temp, unsignedp);
3329 }
3330
3331 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3332 {
3333 /* Handle copying a string constant into an array.
3334 The string constant may be shorter than the array.
3335 So copy just the string's actual length, and clear the rest. */
3336 rtx size;
22619c3f 3337 rtx addr;
bbf6f052 3338
e87b4f3f
RS
3339 /* Get the size of the data type of the string,
3340 which is actually the size of the target. */
3341 size = expr_size (exp);
3342 if (GET_CODE (size) == CONST_INT
3343 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3344 emit_block_move (target, temp, size,
3345 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3346 else
bbf6f052 3347 {
e87b4f3f
RS
3348 /* Compute the size of the data to copy from the string. */
3349 tree copy_size
c03b7665 3350 = size_binop (MIN_EXPR,
b50d17a1 3351 make_tree (sizetype, size),
c03b7665
RK
3352 convert (sizetype,
3353 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3354 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3355 VOIDmode, 0);
e87b4f3f
RS
3356 rtx label = 0;
3357
3358 /* Copy that much. */
3359 emit_block_move (target, temp, copy_size_rtx,
3360 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3361
88f63c77
RK
3362 /* Figure out how much is left in TARGET that we have to clear.
3363 Do all calculations in ptr_mode. */
3364
3365 addr = XEXP (target, 0);
3366 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3367
e87b4f3f
RS
3368 if (GET_CODE (copy_size_rtx) == CONST_INT)
3369 {
88f63c77 3370 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3371 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3372 }
3373 else
3374 {
88f63c77
RK
3375 addr = force_reg (ptr_mode, addr);
3376 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3377 copy_size_rtx, NULL_RTX, 0,
3378 OPTAB_LIB_WIDEN);
e87b4f3f 3379
88f63c77 3380 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3381 copy_size_rtx, NULL_RTX, 0,
3382 OPTAB_LIB_WIDEN);
e87b4f3f 3383
906c4e36 3384 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3385 GET_MODE (size), 0, 0);
3386 label = gen_label_rtx ();
3387 emit_jump_insn (gen_blt (label));
3388 }
3389
3390 if (size != const0_rtx)
3391 {
921b3427
RK
3392 /* Be sure we can write on ADDR. */
3393 if (flag_check_memory_usage)
3394 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3395 addr, ptr_mode,
3396 size, TYPE_MODE (sizetype),
956d6950
JL
3397 GEN_INT (MEMORY_USE_WO),
3398 TYPE_MODE (integer_type_node));
bbf6f052 3399#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3400 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3401 addr, ptr_mode,
3b6f75e2
JW
3402 const0_rtx, TYPE_MODE (integer_type_node),
3403 convert_to_mode (TYPE_MODE (sizetype),
3404 size,
3405 TREE_UNSIGNED (sizetype)),
3406 TYPE_MODE (sizetype));
bbf6f052 3407#else
d562e42e 3408 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3409 addr, ptr_mode,
3b6f75e2
JW
3410 convert_to_mode (TYPE_MODE (integer_type_node),
3411 size,
3412 TREE_UNSIGNED (integer_type_node)),
3413 TYPE_MODE (integer_type_node));
bbf6f052 3414#endif
e87b4f3f 3415 }
22619c3f 3416
e87b4f3f
RS
3417 if (label)
3418 emit_label (label);
bbf6f052
RK
3419 }
3420 }
fffa9c1d
JW
3421 /* Handle calls that return values in multiple non-contiguous locations.
3422 The Irix 6 ABI has examples of this. */
3423 else if (GET_CODE (target) == PARALLEL)
3424 emit_group_load (target, temp);
bbf6f052
RK
3425 else if (GET_MODE (temp) == BLKmode)
3426 emit_block_move (target, temp, expr_size (exp),
3427 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3428 else
3429 emit_move_insn (target, temp);
3430 }
709f5be1 3431
766f36c7
RK
3432 /* If we don't want a value, return NULL_RTX. */
3433 if (! want_value)
3434 return NULL_RTX;
3435
3436 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3437 ??? The latter test doesn't seem to make sense. */
3438 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3439 return temp;
766f36c7
RK
3440
3441 /* Return TARGET itself if it is a hard register. */
3442 else if (want_value && GET_MODE (target) != BLKmode
3443 && ! (GET_CODE (target) == REG
3444 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3445 return copy_to_reg (target);
766f36c7
RK
3446
3447 else
709f5be1 3448 return target;
bbf6f052
RK
3449}
3450\f
9de08200
RK
3451/* Return 1 if EXP just contains zeros. */
3452
3453static int
3454is_zeros_p (exp)
3455 tree exp;
3456{
3457 tree elt;
3458
3459 switch (TREE_CODE (exp))
3460 {
3461 case CONVERT_EXPR:
3462 case NOP_EXPR:
3463 case NON_LVALUE_EXPR:
3464 return is_zeros_p (TREE_OPERAND (exp, 0));
3465
3466 case INTEGER_CST:
3467 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3468
3469 case COMPLEX_CST:
3470 return
3471 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3472
3473 case REAL_CST:
41c9120b 3474 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3475
3476 case CONSTRUCTOR:
e1a43f73
PB
3477 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3478 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3479 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3480 if (! is_zeros_p (TREE_VALUE (elt)))
3481 return 0;
3482
3483 return 1;
e9a25f70
JL
3484
3485 default:
3486 return 0;
9de08200 3487 }
9de08200
RK
3488}
3489
3490/* Return 1 if EXP contains mostly (3/4) zeros. */
3491
3492static int
3493mostly_zeros_p (exp)
3494 tree exp;
3495{
9de08200
RK
3496 if (TREE_CODE (exp) == CONSTRUCTOR)
3497 {
e1a43f73
PB
3498 int elts = 0, zeros = 0;
3499 tree elt = CONSTRUCTOR_ELTS (exp);
3500 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3501 {
3502 /* If there are no ranges of true bits, it is all zero. */
3503 return elt == NULL_TREE;
3504 }
3505 for (; elt; elt = TREE_CHAIN (elt))
3506 {
3507 /* We do not handle the case where the index is a RANGE_EXPR,
3508 so the statistic will be somewhat inaccurate.
3509 We do make a more accurate count in store_constructor itself,
3510 so since this function is only used for nested array elements,
0f41302f 3511 this should be close enough. */
e1a43f73
PB
3512 if (mostly_zeros_p (TREE_VALUE (elt)))
3513 zeros++;
3514 elts++;
3515 }
9de08200
RK
3516
3517 return 4 * zeros >= 3 * elts;
3518 }
3519
3520 return is_zeros_p (exp);
3521}
3522\f
e1a43f73
PB
3523/* Helper function for store_constructor.
3524 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3525 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3526 CLEARED is as for store_constructor.
3527
3528 This provides a recursive shortcut back to store_constructor when it isn't
3529 necessary to go through store_field. This is so that we can pass through
3530 the cleared field to let store_constructor know that we may not have to
3531 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3532
3533static void
3534store_constructor_field (target, bitsize, bitpos,
3535 mode, exp, type, cleared)
3536 rtx target;
3537 int bitsize, bitpos;
3538 enum machine_mode mode;
3539 tree exp, type;
3540 int cleared;
3541{
3542 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3543 && bitpos % BITS_PER_UNIT == 0
3544 /* If we have a non-zero bitpos for a register target, then we just
3545 let store_field do the bitfield handling. This is unlikely to
3546 generate unnecessary clear instructions anyways. */
3547 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3548 {
126e5b0d
JW
3549 if (bitpos != 0)
3550 target = change_address (target, VOIDmode,
3551 plus_constant (XEXP (target, 0),
3552 bitpos / BITS_PER_UNIT));
3553 store_constructor (exp, target, cleared);
e1a43f73
PB
3554 }
3555 else
3556 store_field (target, bitsize, bitpos, mode, exp,
3557 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3558 int_size_in_bytes (type));
3559}
3560
bbf6f052 3561/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3562 TARGET is either a REG or a MEM.
0f41302f 3563 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3564
3565static void
e1a43f73 3566store_constructor (exp, target, cleared)
bbf6f052
RK
3567 tree exp;
3568 rtx target;
e1a43f73 3569 int cleared;
bbf6f052 3570{
4af3895e
JVA
3571 tree type = TREE_TYPE (exp);
3572
bbf6f052
RK
3573 /* We know our target cannot conflict, since safe_from_p has been called. */
3574#if 0
3575 /* Don't try copying piece by piece into a hard register
3576 since that is vulnerable to being clobbered by EXP.
3577 Instead, construct in a pseudo register and then copy it all. */
3578 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3579 {
3580 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3581 store_constructor (exp, temp, 0);
bbf6f052
RK
3582 emit_move_insn (target, temp);
3583 return;
3584 }
3585#endif
3586
e44842fe
RK
3587 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3588 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3589 {
3590 register tree elt;
3591
4af3895e 3592 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3593 if (TREE_CODE (type) == UNION_TYPE
3594 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3595 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3596
3597 /* If we are building a static constructor into a register,
3598 set the initial value as zero so we can fold the value into
67225c15
RK
3599 a constant. But if more than one register is involved,
3600 this probably loses. */
3601 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3602 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3603 {
3604 if (! cleared)
e9a25f70 3605 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3606
9de08200
RK
3607 cleared = 1;
3608 }
3609
3610 /* If the constructor has fewer fields than the structure
3611 or if we are initializing the structure to mostly zeros,
bbf6f052 3612 clear the whole structure first. */
9de08200
RK
3613 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3614 != list_length (TYPE_FIELDS (type)))
3615 || mostly_zeros_p (exp))
3616 {
3617 if (! cleared)
3618 clear_storage (target, expr_size (exp),
3619 TYPE_ALIGN (type) / BITS_PER_UNIT);
3620
3621 cleared = 1;
3622 }
bbf6f052
RK
3623 else
3624 /* Inform later passes that the old value is dead. */
38a448ca 3625 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3626
3627 /* Store each element of the constructor into
3628 the corresponding field of TARGET. */
3629
3630 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3631 {
3632 register tree field = TREE_PURPOSE (elt);
3633 register enum machine_mode mode;
3634 int bitsize;
b50d17a1 3635 int bitpos = 0;
bbf6f052 3636 int unsignedp;
b50d17a1
RK
3637 tree pos, constant = 0, offset = 0;
3638 rtx to_rtx = target;
bbf6f052 3639
f32fd778
RS
3640 /* Just ignore missing fields.
3641 We cleared the whole structure, above,
3642 if any fields are missing. */
3643 if (field == 0)
3644 continue;
3645
e1a43f73
PB
3646 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3647 continue;
9de08200 3648
bbf6f052
RK
3649 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3650 unsignedp = TREE_UNSIGNED (field);
3651 mode = DECL_MODE (field);
3652 if (DECL_BIT_FIELD (field))
3653 mode = VOIDmode;
3654
b50d17a1
RK
3655 pos = DECL_FIELD_BITPOS (field);
3656 if (TREE_CODE (pos) == INTEGER_CST)
3657 constant = pos;
3658 else if (TREE_CODE (pos) == PLUS_EXPR
3659 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3660 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3661 else
3662 offset = pos;
3663
3664 if (constant)
cd11b87e 3665 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3666
3667 if (offset)
3668 {
3669 rtx offset_rtx;
3670
3671 if (contains_placeholder_p (offset))
3672 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 3673 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 3674
b50d17a1
RK
3675 offset = size_binop (FLOOR_DIV_EXPR, offset,
3676 size_int (BITS_PER_UNIT));
bbf6f052 3677
b50d17a1
RK
3678 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3679 if (GET_CODE (to_rtx) != MEM)
3680 abort ();
3681
3682 to_rtx
3683 = change_address (to_rtx, VOIDmode,
38a448ca 3684 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 3685 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3686 }
cf04eb80
RK
3687 if (TREE_READONLY (field))
3688 {
9151b3bf 3689 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
3690 to_rtx = copy_rtx (to_rtx);
3691
cf04eb80
RK
3692 RTX_UNCHANGING_P (to_rtx) = 1;
3693 }
3694
e1a43f73
PB
3695 store_constructor_field (to_rtx, bitsize, bitpos,
3696 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3697 }
3698 }
4af3895e 3699 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3700 {
3701 register tree elt;
3702 register int i;
e1a43f73 3703 int need_to_clear;
4af3895e 3704 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3705 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3706 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3707 tree elttype = TREE_TYPE (type);
bbf6f052 3708
e1a43f73
PB
3709 /* If the constructor has fewer elements than the array,
3710 clear the whole array first. Similarly if this this is
3711 static constructor of a non-BLKmode object. */
3712 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3713 need_to_clear = 1;
3714 else
3715 {
3716 HOST_WIDE_INT count = 0, zero_count = 0;
3717 need_to_clear = 0;
3718 /* This loop is a more accurate version of the loop in
3719 mostly_zeros_p (it handles RANGE_EXPR in an index).
3720 It is also needed to check for missing elements. */
3721 for (elt = CONSTRUCTOR_ELTS (exp);
3722 elt != NULL_TREE;
df0faff1 3723 elt = TREE_CHAIN (elt))
e1a43f73
PB
3724 {
3725 tree index = TREE_PURPOSE (elt);
3726 HOST_WIDE_INT this_node_count;
3727 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3728 {
3729 tree lo_index = TREE_OPERAND (index, 0);
3730 tree hi_index = TREE_OPERAND (index, 1);
3731 if (TREE_CODE (lo_index) != INTEGER_CST
3732 || TREE_CODE (hi_index) != INTEGER_CST)
3733 {
3734 need_to_clear = 1;
3735 break;
3736 }
3737 this_node_count = TREE_INT_CST_LOW (hi_index)
3738 - TREE_INT_CST_LOW (lo_index) + 1;
3739 }
3740 else
3741 this_node_count = 1;
3742 count += this_node_count;
3743 if (mostly_zeros_p (TREE_VALUE (elt)))
3744 zero_count += this_node_count;
3745 }
8e958f70 3746 /* Clear the entire array first if there are any missing elements,
0f41302f 3747 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3748 if (count < maxelt - minelt + 1
3749 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3750 need_to_clear = 1;
3751 }
3752 if (need_to_clear)
9de08200
RK
3753 {
3754 if (! cleared)
3755 clear_storage (target, expr_size (exp),
3756 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3757 cleared = 1;
3758 }
bbf6f052
RK
3759 else
3760 /* Inform later passes that the old value is dead. */
38a448ca 3761 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3762
3763 /* Store each element of the constructor into
3764 the corresponding element of TARGET, determined
3765 by counting the elements. */
3766 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3767 elt;
3768 elt = TREE_CHAIN (elt), i++)
3769 {
3770 register enum machine_mode mode;
3771 int bitsize;
3772 int bitpos;
3773 int unsignedp;
e1a43f73 3774 tree value = TREE_VALUE (elt);
03dc44a6
RS
3775 tree index = TREE_PURPOSE (elt);
3776 rtx xtarget = target;
bbf6f052 3777
e1a43f73
PB
3778 if (cleared && is_zeros_p (value))
3779 continue;
9de08200 3780
bbf6f052
RK
3781 mode = TYPE_MODE (elttype);
3782 bitsize = GET_MODE_BITSIZE (mode);
3783 unsignedp = TREE_UNSIGNED (elttype);
3784
e1a43f73
PB
3785 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3786 {
3787 tree lo_index = TREE_OPERAND (index, 0);
3788 tree hi_index = TREE_OPERAND (index, 1);
3789 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3790 struct nesting *loop;
05c0b405
PB
3791 HOST_WIDE_INT lo, hi, count;
3792 tree position;
e1a43f73 3793
0f41302f 3794 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3795 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3796 && TREE_CODE (hi_index) == INTEGER_CST
3797 && (lo = TREE_INT_CST_LOW (lo_index),
3798 hi = TREE_INT_CST_LOW (hi_index),
3799 count = hi - lo + 1,
3800 (GET_CODE (target) != MEM
3801 || count <= 2
3802 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3803 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3804 <= 40 * 8))))
e1a43f73 3805 {
05c0b405
PB
3806 lo -= minelt; hi -= minelt;
3807 for (; lo <= hi; lo++)
e1a43f73 3808 {
05c0b405
PB
3809 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3810 store_constructor_field (target, bitsize, bitpos,
3811 mode, value, type, cleared);
e1a43f73
PB
3812 }
3813 }
3814 else
3815 {
3816 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3817 loop_top = gen_label_rtx ();
3818 loop_end = gen_label_rtx ();
3819
3820 unsignedp = TREE_UNSIGNED (domain);
3821
3822 index = build_decl (VAR_DECL, NULL_TREE, domain);
3823
3824 DECL_RTL (index) = index_r
3825 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3826 &unsignedp, 0));
3827
3828 if (TREE_CODE (value) == SAVE_EXPR
3829 && SAVE_EXPR_RTL (value) == 0)
3830 {
0f41302f
MS
3831 /* Make sure value gets expanded once before the
3832 loop. */
e1a43f73
PB
3833 expand_expr (value, const0_rtx, VOIDmode, 0);
3834 emit_queue ();
3835 }
3836 store_expr (lo_index, index_r, 0);
3837 loop = expand_start_loop (0);
3838
0f41302f 3839 /* Assign value to element index. */
e1a43f73
PB
3840 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3841 size_int (BITS_PER_UNIT));
3842 position = size_binop (MULT_EXPR,
3843 size_binop (MINUS_EXPR, index,
3844 TYPE_MIN_VALUE (domain)),
3845 position);
3846 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3847 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
3848 xtarget = change_address (target, mode, addr);
3849 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3850 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3851 else
3852 store_expr (value, xtarget, 0);
3853
3854 expand_exit_loop_if_false (loop,
3855 build (LT_EXPR, integer_type_node,
3856 index, hi_index));
3857
3858 expand_increment (build (PREINCREMENT_EXPR,
3859 TREE_TYPE (index),
7b8b9722 3860 index, integer_one_node), 0, 0);
e1a43f73
PB
3861 expand_end_loop ();
3862 emit_label (loop_end);
3863
3864 /* Needed by stupid register allocation. to extend the
3865 lifetime of pseudo-regs used by target past the end
3866 of the loop. */
38a448ca 3867 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
3868 }
3869 }
3870 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3871 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3872 {
e1a43f73 3873 rtx pos_rtx, addr;
03dc44a6
RS
3874 tree position;
3875
5b6c44ff
RK
3876 if (index == 0)
3877 index = size_int (i);
3878
e1a43f73
PB
3879 if (minelt)
3880 index = size_binop (MINUS_EXPR, index,
3881 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3882 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3883 size_int (BITS_PER_UNIT));
3884 position = size_binop (MULT_EXPR, index, position);
03dc44a6 3885 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3886 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 3887 xtarget = change_address (target, mode, addr);
e1a43f73 3888 store_expr (value, xtarget, 0);
03dc44a6
RS
3889 }
3890 else
3891 {
3892 if (index != 0)
7c314719 3893 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3894 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3895 else
3896 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3897 store_constructor_field (target, bitsize, bitpos,
3898 mode, value, type, cleared);
03dc44a6 3899 }
bbf6f052
RK
3900 }
3901 }
071a6595
PB
3902 /* set constructor assignments */
3903 else if (TREE_CODE (type) == SET_TYPE)
3904 {
e1a43f73 3905 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 3906 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3907 tree domain = TYPE_DOMAIN (type);
3908 tree domain_min, domain_max, bitlength;
3909
9faa82d8 3910 /* The default implementation strategy is to extract the constant
071a6595
PB
3911 parts of the constructor, use that to initialize the target,
3912 and then "or" in whatever non-constant ranges we need in addition.
3913
3914 If a large set is all zero or all ones, it is
3915 probably better to set it using memset (if available) or bzero.
3916 Also, if a large set has just a single range, it may also be
3917 better to first clear all the first clear the set (using
0f41302f 3918 bzero/memset), and set the bits we want. */
071a6595 3919
0f41302f 3920 /* Check for all zeros. */
e1a43f73 3921 if (elt == NULL_TREE)
071a6595 3922 {
e1a43f73
PB
3923 if (!cleared)
3924 clear_storage (target, expr_size (exp),
3925 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3926 return;
3927 }
3928
071a6595
PB
3929 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3930 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3931 bitlength = size_binop (PLUS_EXPR,
3932 size_binop (MINUS_EXPR, domain_max, domain_min),
3933 size_one_node);
3934
e1a43f73
PB
3935 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3936 abort ();
3937 nbits = TREE_INT_CST_LOW (bitlength);
3938
3939 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3940 are "complicated" (more than one range), initialize (the
3941 constant parts) by copying from a constant. */
3942 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3943 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3944 {
b4ee5a72
PB
3945 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3946 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3947 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3948 HOST_WIDE_INT word = 0;
3949 int bit_pos = 0;
3950 int ibit = 0;
0f41302f 3951 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3952 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3953 for (;;)
071a6595 3954 {
b4ee5a72
PB
3955 if (bit_buffer[ibit])
3956 {
b09f3348 3957 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3958 word |= (1 << (set_word_size - 1 - bit_pos));
3959 else
3960 word |= 1 << bit_pos;
3961 }
3962 bit_pos++; ibit++;
3963 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3964 {
e1a43f73
PB
3965 if (word != 0 || ! cleared)
3966 {
3967 rtx datum = GEN_INT (word);
3968 rtx to_rtx;
0f41302f
MS
3969 /* The assumption here is that it is safe to use
3970 XEXP if the set is multi-word, but not if
3971 it's single-word. */
e1a43f73
PB
3972 if (GET_CODE (target) == MEM)
3973 {
3974 to_rtx = plus_constant (XEXP (target, 0), offset);
3975 to_rtx = change_address (target, mode, to_rtx);
3976 }
3977 else if (offset == 0)
3978 to_rtx = target;
3979 else
3980 abort ();
3981 emit_move_insn (to_rtx, datum);
3982 }
b4ee5a72
PB
3983 if (ibit == nbits)
3984 break;
3985 word = 0;
3986 bit_pos = 0;
3987 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3988 }
3989 }
071a6595 3990 }
e1a43f73
PB
3991 else if (!cleared)
3992 {
0f41302f 3993 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3994 if (TREE_CHAIN (elt) != NULL_TREE
3995 || (TREE_PURPOSE (elt) == NULL_TREE
3996 ? nbits != 1
3997 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3998 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3999 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4000 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4001 != nbits))))
4002 clear_storage (target, expr_size (exp),
4003 TYPE_ALIGN (type) / BITS_PER_UNIT);
4004 }
4005
4006 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4007 {
4008 /* start of range of element or NULL */
4009 tree startbit = TREE_PURPOSE (elt);
4010 /* end of range of element, or element value */
4011 tree endbit = TREE_VALUE (elt);
381127e8 4012#ifdef TARGET_MEM_FUNCTIONS
071a6595 4013 HOST_WIDE_INT startb, endb;
381127e8 4014#endif
071a6595
PB
4015 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4016
4017 bitlength_rtx = expand_expr (bitlength,
4018 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4019
4020 /* handle non-range tuple element like [ expr ] */
4021 if (startbit == NULL_TREE)
4022 {
4023 startbit = save_expr (endbit);
4024 endbit = startbit;
4025 }
4026 startbit = convert (sizetype, startbit);
4027 endbit = convert (sizetype, endbit);
4028 if (! integer_zerop (domain_min))
4029 {
4030 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4031 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4032 }
4033 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4034 EXPAND_CONST_ADDRESS);
4035 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4036 EXPAND_CONST_ADDRESS);
4037
4038 if (REG_P (target))
4039 {
4040 targetx = assign_stack_temp (GET_MODE (target),
4041 GET_MODE_SIZE (GET_MODE (target)),
4042 0);
4043 emit_move_insn (targetx, target);
4044 }
4045 else if (GET_CODE (target) == MEM)
4046 targetx = target;
4047 else
4048 abort ();
4049
4050#ifdef TARGET_MEM_FUNCTIONS
4051 /* Optimization: If startbit and endbit are
9faa82d8 4052 constants divisible by BITS_PER_UNIT,
0f41302f 4053 call memset instead. */
071a6595
PB
4054 if (TREE_CODE (startbit) == INTEGER_CST
4055 && TREE_CODE (endbit) == INTEGER_CST
4056 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4057 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4058 {
071a6595
PB
4059 emit_library_call (memset_libfunc, 0,
4060 VOIDmode, 3,
e1a43f73
PB
4061 plus_constant (XEXP (targetx, 0),
4062 startb / BITS_PER_UNIT),
071a6595 4063 Pmode,
3b6f75e2 4064 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4065 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4066 TYPE_MODE (sizetype));
071a6595
PB
4067 }
4068 else
4069#endif
4070 {
38a448ca 4071 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4072 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4073 bitlength_rtx, TYPE_MODE (sizetype),
4074 startbit_rtx, TYPE_MODE (sizetype),
4075 endbit_rtx, TYPE_MODE (sizetype));
4076 }
4077 if (REG_P (target))
4078 emit_move_insn (target, targetx);
4079 }
4080 }
bbf6f052
RK
4081
4082 else
4083 abort ();
4084}
4085
4086/* Store the value of EXP (an expression tree)
4087 into a subfield of TARGET which has mode MODE and occupies
4088 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4089 If MODE is VOIDmode, it means that we are storing into a bit-field.
4090
4091 If VALUE_MODE is VOIDmode, return nothing in particular.
4092 UNSIGNEDP is not used in this case.
4093
4094 Otherwise, return an rtx for the value stored. This rtx
4095 has mode VALUE_MODE if that is convenient to do.
4096 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4097
4098 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4099 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4100
4101static rtx
4102store_field (target, bitsize, bitpos, mode, exp, value_mode,
4103 unsignedp, align, total_size)
4104 rtx target;
4105 int bitsize, bitpos;
4106 enum machine_mode mode;
4107 tree exp;
4108 enum machine_mode value_mode;
4109 int unsignedp;
4110 int align;
4111 int total_size;
4112{
906c4e36 4113 HOST_WIDE_INT width_mask = 0;
bbf6f052 4114
e9a25f70
JL
4115 if (TREE_CODE (exp) == ERROR_MARK)
4116 return const0_rtx;
4117
906c4e36
RK
4118 if (bitsize < HOST_BITS_PER_WIDE_INT)
4119 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4120
4121 /* If we are storing into an unaligned field of an aligned union that is
4122 in a register, we may have the mode of TARGET being an integer mode but
4123 MODE == BLKmode. In that case, get an aligned object whose size and
4124 alignment are the same as TARGET and store TARGET into it (we can avoid
4125 the store if the field being stored is the entire width of TARGET). Then
4126 call ourselves recursively to store the field into a BLKmode version of
4127 that object. Finally, load from the object into TARGET. This is not
4128 very efficient in general, but should only be slightly more expensive
4129 than the otherwise-required unaligned accesses. Perhaps this can be
4130 cleaned up later. */
4131
4132 if (mode == BLKmode
4133 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4134 {
4135 rtx object = assign_stack_temp (GET_MODE (target),
4136 GET_MODE_SIZE (GET_MODE (target)), 0);
4137 rtx blk_object = copy_rtx (object);
4138
24a13950
JW
4139 MEM_IN_STRUCT_P (object) = 1;
4140 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4141 PUT_MODE (blk_object, BLKmode);
4142
4143 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4144 emit_move_insn (object, target);
4145
4146 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4147 align, total_size);
4148
46093b97
RS
4149 /* Even though we aren't returning target, we need to
4150 give it the updated value. */
bbf6f052
RK
4151 emit_move_insn (target, object);
4152
46093b97 4153 return blk_object;
bbf6f052
RK
4154 }
4155
4156 /* If the structure is in a register or if the component
4157 is a bit field, we cannot use addressing to access it.
4158 Use bit-field techniques or SUBREG to store in it. */
4159
4fa52007
RK
4160 if (mode == VOIDmode
4161 || (mode != BLKmode && ! direct_store[(int) mode])
4162 || GET_CODE (target) == REG
c980ac49 4163 || GET_CODE (target) == SUBREG
ccc98036
RS
4164 /* If the field isn't aligned enough to store as an ordinary memref,
4165 store it as a bit field. */
c7a7ac46 4166 || (SLOW_UNALIGNED_ACCESS
ccc98036 4167 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4168 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4169 {
906c4e36 4170 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4171
ef19912d
RK
4172 /* If BITSIZE is narrower than the size of the type of EXP
4173 we will be narrowing TEMP. Normally, what's wanted are the
4174 low-order bits. However, if EXP's type is a record and this is
4175 big-endian machine, we want the upper BITSIZE bits. */
4176 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4177 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4178 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4179 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4180 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4181 - bitsize),
4182 temp, 1);
4183
bbd6cf73
RK
4184 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4185 MODE. */
4186 if (mode != VOIDmode && mode != BLKmode
4187 && mode != TYPE_MODE (TREE_TYPE (exp)))
4188 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4189
a281e72d
RK
4190 /* If the modes of TARGET and TEMP are both BLKmode, both
4191 must be in memory and BITPOS must be aligned on a byte
4192 boundary. If so, we simply do a block copy. */
4193 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4194 {
4195 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4196 || bitpos % BITS_PER_UNIT != 0)
4197 abort ();
4198
0086427c
RK
4199 target = change_address (target, VOIDmode,
4200 plus_constant (XEXP (target, 0),
a281e72d
RK
4201 bitpos / BITS_PER_UNIT));
4202
4203 emit_block_move (target, temp,
4204 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4205 / BITS_PER_UNIT),
4206 1);
4207
4208 return value_mode == VOIDmode ? const0_rtx : target;
4209 }
4210
bbf6f052
RK
4211 /* Store the value in the bitfield. */
4212 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4213 if (value_mode != VOIDmode)
4214 {
4215 /* The caller wants an rtx for the value. */
4216 /* If possible, avoid refetching from the bitfield itself. */
4217 if (width_mask != 0
4218 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4219 {
9074de27 4220 tree count;
5c4d7cfb 4221 enum machine_mode tmode;
86a2c12a 4222
5c4d7cfb
RS
4223 if (unsignedp)
4224 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4225 tmode = GET_MODE (temp);
86a2c12a
RS
4226 if (tmode == VOIDmode)
4227 tmode = value_mode;
5c4d7cfb
RS
4228 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4229 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4230 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4231 }
bbf6f052 4232 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4233 NULL_RTX, value_mode, 0, align,
4234 total_size);
bbf6f052
RK
4235 }
4236 return const0_rtx;
4237 }
4238 else
4239 {
4240 rtx addr = XEXP (target, 0);
4241 rtx to_rtx;
4242
4243 /* If a value is wanted, it must be the lhs;
4244 so make the address stable for multiple use. */
4245
4246 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4247 && ! CONSTANT_ADDRESS_P (addr)
4248 /* A frame-pointer reference is already stable. */
4249 && ! (GET_CODE (addr) == PLUS
4250 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4251 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4252 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4253 addr = copy_to_reg (addr);
4254
4255 /* Now build a reference to just the desired component. */
4256
effbcc6a
RK
4257 to_rtx = copy_rtx (change_address (target, mode,
4258 plus_constant (addr,
4259 (bitpos
4260 / BITS_PER_UNIT))));
bbf6f052
RK
4261 MEM_IN_STRUCT_P (to_rtx) = 1;
4262
4263 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4264 }
4265}
4266\f
4267/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4268 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4269 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4270
4271 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4272 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4273 If the position of the field is variable, we store a tree
4274 giving the variable offset (in units) in *POFFSET.
4275 This offset is in addition to the bit position.
4276 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4277 We set *PALIGNMENT to the alignment in bytes of the address that will be
4278 computed. This is the alignment of the thing we return if *POFFSET
4279 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4280
4281 If any of the extraction expressions is volatile,
4282 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4283
4284 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4285 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4286 is redundant.
4287
4288 If the field describes a variable-sized object, *PMODE is set to
4289 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4290 this case, but the address of the object can be found. */
bbf6f052
RK
4291
4292tree
4969d05d 4293get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4294 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4295 tree exp;
4296 int *pbitsize;
4297 int *pbitpos;
7bb0943f 4298 tree *poffset;
bbf6f052
RK
4299 enum machine_mode *pmode;
4300 int *punsignedp;
4301 int *pvolatilep;
839c4796 4302 int *palignment;
bbf6f052 4303{
b50d17a1 4304 tree orig_exp = exp;
bbf6f052
RK
4305 tree size_tree = 0;
4306 enum machine_mode mode = VOIDmode;
742920c7 4307 tree offset = integer_zero_node;
839c4796 4308 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4309
4310 if (TREE_CODE (exp) == COMPONENT_REF)
4311 {
4312 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4313 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4314 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4315 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4316 }
4317 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4318 {
4319 size_tree = TREE_OPERAND (exp, 1);
4320 *punsignedp = TREE_UNSIGNED (exp);
4321 }
4322 else
4323 {
4324 mode = TYPE_MODE (TREE_TYPE (exp));
4325 *pbitsize = GET_MODE_BITSIZE (mode);
4326 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4327 }
4328
4329 if (size_tree)
4330 {
4331 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4332 mode = BLKmode, *pbitsize = -1;
4333 else
4334 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4335 }
4336
4337 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4338 and find the ultimate containing object. */
4339
4340 *pbitpos = 0;
4341
4342 while (1)
4343 {
7bb0943f 4344 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4345 {
7bb0943f
RS
4346 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4347 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4348 : TREE_OPERAND (exp, 2));
e6d8c385 4349 tree constant = integer_zero_node, var = pos;
bbf6f052 4350
e7f3c83f
RK
4351 /* If this field hasn't been filled in yet, don't go
4352 past it. This should only happen when folding expressions
4353 made during type construction. */
4354 if (pos == 0)
4355 break;
4356
e6d8c385
RK
4357 /* Assume here that the offset is a multiple of a unit.
4358 If not, there should be an explicitly added constant. */
4359 if (TREE_CODE (pos) == PLUS_EXPR
4360 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4361 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4362 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4363 constant = pos, var = integer_zero_node;
4364
4365 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4366 offset = size_binop (PLUS_EXPR, offset,
4367 size_binop (EXACT_DIV_EXPR, var,
4368 size_int (BITS_PER_UNIT)));
bbf6f052 4369 }
bbf6f052 4370
742920c7 4371 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4372 {
742920c7
RK
4373 /* This code is based on the code in case ARRAY_REF in expand_expr
4374 below. We assume here that the size of an array element is
4375 always an integral multiple of BITS_PER_UNIT. */
4376
4377 tree index = TREE_OPERAND (exp, 1);
4378 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4379 tree low_bound
4380 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4381 tree index_type = TREE_TYPE (index);
4382
4c08eef0 4383 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4384 {
4c08eef0
RK
4385 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4386 index);
742920c7
RK
4387 index_type = TREE_TYPE (index);
4388 }
4389
ca0f2220
RH
4390 if (! integer_zerop (low_bound))
4391 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4392
f8dac6eb
R
4393 if (TREE_CODE (index) == INTEGER_CST)
4394 {
4395 index = convert (sbitsizetype, index);
4396 index_type = TREE_TYPE (index);
4397 }
4398
4399 index = fold (build (MULT_EXPR, sbitsizetype, index,
4400 convert (sbitsizetype,
0d15e60c 4401 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7
RK
4402
4403 if (TREE_CODE (index) == INTEGER_CST
4404 && TREE_INT_CST_HIGH (index) == 0)
4405 *pbitpos += TREE_INT_CST_LOW (index);
4406 else
956d6950 4407 {
e5e809f4
JL
4408 if (contains_placeholder_p (index))
4409 index = build (WITH_RECORD_EXPR, sizetype, index, exp);
4410
956d6950 4411 offset = size_binop (PLUS_EXPR, offset,
e5e809f4
JL
4412 size_binop (FLOOR_DIV_EXPR, index,
4413 size_int (BITS_PER_UNIT)));
956d6950 4414 }
bbf6f052
RK
4415 }
4416 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4417 && ! ((TREE_CODE (exp) == NOP_EXPR
4418 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4419 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4420 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4421 != UNION_TYPE))
bbf6f052
RK
4422 && (TYPE_MODE (TREE_TYPE (exp))
4423 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4424 break;
7bb0943f
RS
4425
4426 /* If any reference in the chain is volatile, the effect is volatile. */
4427 if (TREE_THIS_VOLATILE (exp))
4428 *pvolatilep = 1;
839c4796
RK
4429
4430 /* If the offset is non-constant already, then we can't assume any
4431 alignment more than the alignment here. */
4432 if (! integer_zerop (offset))
4433 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4434
bbf6f052
RK
4435 exp = TREE_OPERAND (exp, 0);
4436 }
4437
839c4796
RK
4438 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4439 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4440 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4441 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4442
742920c7
RK
4443 if (integer_zerop (offset))
4444 offset = 0;
4445
b50d17a1
RK
4446 if (offset != 0 && contains_placeholder_p (offset))
4447 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4448
bbf6f052 4449 *pmode = mode;
7bb0943f 4450 *poffset = offset;
839c4796 4451 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4452 return exp;
4453}
921b3427
RK
4454
4455/* Subroutine of expand_exp: compute memory_usage from modifier. */
4456static enum memory_use_mode
4457get_memory_usage_from_modifier (modifier)
4458 enum expand_modifier modifier;
4459{
4460 switch (modifier)
4461 {
4462 case EXPAND_NORMAL:
e5e809f4 4463 case EXPAND_SUM:
921b3427
RK
4464 return MEMORY_USE_RO;
4465 break;
4466 case EXPAND_MEMORY_USE_WO:
4467 return MEMORY_USE_WO;
4468 break;
4469 case EXPAND_MEMORY_USE_RW:
4470 return MEMORY_USE_RW;
4471 break;
921b3427 4472 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4473 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4474 MEMORY_USE_DONT, because they are modifiers to a call of
4475 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4476 case EXPAND_CONST_ADDRESS:
e5e809f4 4477 case EXPAND_INITIALIZER:
921b3427
RK
4478 return MEMORY_USE_DONT;
4479 case EXPAND_MEMORY_USE_BAD:
4480 default:
4481 abort ();
4482 }
4483}
bbf6f052
RK
4484\f
4485/* Given an rtx VALUE that may contain additions and multiplications,
4486 return an equivalent value that just refers to a register or memory.
4487 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4488 and returning a pseudo-register containing the value.
4489
4490 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4491
4492rtx
4493force_operand (value, target)
4494 rtx value, target;
4495{
4496 register optab binoptab = 0;
4497 /* Use a temporary to force order of execution of calls to
4498 `force_operand'. */
4499 rtx tmp;
4500 register rtx op2;
4501 /* Use subtarget as the target for operand 0 of a binary operation. */
4502 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4503
4504 if (GET_CODE (value) == PLUS)
4505 binoptab = add_optab;
4506 else if (GET_CODE (value) == MINUS)
4507 binoptab = sub_optab;
4508 else if (GET_CODE (value) == MULT)
4509 {
4510 op2 = XEXP (value, 1);
4511 if (!CONSTANT_P (op2)
4512 && !(GET_CODE (op2) == REG && op2 != subtarget))
4513 subtarget = 0;
4514 tmp = force_operand (XEXP (value, 0), subtarget);
4515 return expand_mult (GET_MODE (value), tmp,
906c4e36 4516 force_operand (op2, NULL_RTX),
bbf6f052
RK
4517 target, 0);
4518 }
4519
4520 if (binoptab)
4521 {
4522 op2 = XEXP (value, 1);
4523 if (!CONSTANT_P (op2)
4524 && !(GET_CODE (op2) == REG && op2 != subtarget))
4525 subtarget = 0;
4526 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4527 {
4528 binoptab = add_optab;
4529 op2 = negate_rtx (GET_MODE (value), op2);
4530 }
4531
4532 /* Check for an addition with OP2 a constant integer and our first
4533 operand a PLUS of a virtual register and something else. In that
4534 case, we want to emit the sum of the virtual register and the
4535 constant first and then add the other value. This allows virtual
4536 register instantiation to simply modify the constant rather than
4537 creating another one around this addition. */
4538 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4539 && GET_CODE (XEXP (value, 0)) == PLUS
4540 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4541 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4542 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4543 {
4544 rtx temp = expand_binop (GET_MODE (value), binoptab,
4545 XEXP (XEXP (value, 0), 0), op2,
4546 subtarget, 0, OPTAB_LIB_WIDEN);
4547 return expand_binop (GET_MODE (value), binoptab, temp,
4548 force_operand (XEXP (XEXP (value, 0), 1), 0),
4549 target, 0, OPTAB_LIB_WIDEN);
4550 }
4551
4552 tmp = force_operand (XEXP (value, 0), subtarget);
4553 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4554 force_operand (op2, NULL_RTX),
bbf6f052 4555 target, 0, OPTAB_LIB_WIDEN);
8008b228 4556 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4557 because the only operations we are expanding here are signed ones. */
4558 }
4559 return value;
4560}
4561\f
4562/* Subroutine of expand_expr:
4563 save the non-copied parts (LIST) of an expr (LHS), and return a list
4564 which can restore these values to their previous values,
4565 should something modify their storage. */
4566
4567static tree
4568save_noncopied_parts (lhs, list)
4569 tree lhs;
4570 tree list;
4571{
4572 tree tail;
4573 tree parts = 0;
4574
4575 for (tail = list; tail; tail = TREE_CHAIN (tail))
4576 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4577 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4578 else
4579 {
4580 tree part = TREE_VALUE (tail);
4581 tree part_type = TREE_TYPE (part);
906c4e36 4582 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4583 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4584 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4585 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4586 parts = tree_cons (to_be_saved,
906c4e36
RK
4587 build (RTL_EXPR, part_type, NULL_TREE,
4588 (tree) target),
bbf6f052
RK
4589 parts);
4590 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4591 }
4592 return parts;
4593}
4594
4595/* Subroutine of expand_expr:
4596 record the non-copied parts (LIST) of an expr (LHS), and return a list
4597 which specifies the initial values of these parts. */
4598
4599static tree
4600init_noncopied_parts (lhs, list)
4601 tree lhs;
4602 tree list;
4603{
4604 tree tail;
4605 tree parts = 0;
4606
4607 for (tail = list; tail; tail = TREE_CHAIN (tail))
4608 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4609 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4610 else
4611 {
4612 tree part = TREE_VALUE (tail);
4613 tree part_type = TREE_TYPE (part);
906c4e36 4614 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4615 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4616 }
4617 return parts;
4618}
4619
4620/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
4621 EXP can reference X, which is being modified. TOP_P is nonzero if this
4622 call is going to be used to determine whether we need a temporary
4623 for EXP, as opposed to a recursive call to this function. */
bbf6f052
RK
4624
4625static int
e5e809f4 4626safe_from_p (x, exp, top_p)
bbf6f052
RK
4627 rtx x;
4628 tree exp;
e5e809f4 4629 int top_p;
bbf6f052
RK
4630{
4631 rtx exp_rtl = 0;
4632 int i, nops;
4633
6676e72f
RK
4634 if (x == 0
4635 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4636 have no way of allocating temporaries of variable size
4637 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4638 So we assume here that something at a higher level has prevented a
f4510f37 4639 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
4640 do this when X is BLKmode and when we are at the top level. */
4641 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4642 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4643 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4644 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4645 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4646 != INTEGER_CST)
f4510f37 4647 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4648 return 1;
4649
4650 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4651 find the underlying pseudo. */
4652 if (GET_CODE (x) == SUBREG)
4653 {
4654 x = SUBREG_REG (x);
4655 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4656 return 0;
4657 }
4658
4659 /* If X is a location in the outgoing argument area, it is always safe. */
4660 if (GET_CODE (x) == MEM
4661 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4662 || (GET_CODE (XEXP (x, 0)) == PLUS
4663 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4664 return 1;
4665
4666 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4667 {
4668 case 'd':
4669 exp_rtl = DECL_RTL (exp);
4670 break;
4671
4672 case 'c':
4673 return 1;
4674
4675 case 'x':
4676 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 4677 return ((TREE_VALUE (exp) == 0
e5e809f4 4678 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 4679 && (TREE_CHAIN (exp) == 0
e5e809f4 4680 || safe_from_p (x, TREE_CHAIN (exp), 0)));
bbf6f052
RK
4681 else
4682 return 0;
4683
4684 case '1':
e5e809f4 4685 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
4686
4687 case '2':
4688 case '<':
e5e809f4
JL
4689 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4690 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
4691
4692 case 'e':
4693 case 'r':
4694 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4695 the expression. If it is set, we conflict iff we are that rtx or
4696 both are in memory. Otherwise, we check all operands of the
4697 expression recursively. */
4698
4699 switch (TREE_CODE (exp))
4700 {
4701 case ADDR_EXPR:
e44842fe 4702 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
4703 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4704 || TREE_STATIC (exp));
bbf6f052
RK
4705
4706 case INDIRECT_REF:
4707 if (GET_CODE (x) == MEM)
4708 return 0;
4709 break;
4710
4711 case CALL_EXPR:
4712 exp_rtl = CALL_EXPR_RTL (exp);
4713 if (exp_rtl == 0)
4714 {
4715 /* Assume that the call will clobber all hard registers and
4716 all of memory. */
4717 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4718 || GET_CODE (x) == MEM)
4719 return 0;
4720 }
4721
4722 break;
4723
4724 case RTL_EXPR:
3bb5826a
RK
4725 /* If a sequence exists, we would have to scan every instruction
4726 in the sequence to see if it was safe. This is probably not
4727 worthwhile. */
4728 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4729 return 0;
4730
3bb5826a 4731 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4732 break;
4733
4734 case WITH_CLEANUP_EXPR:
4735 exp_rtl = RTL_EXPR_RTL (exp);
4736 break;
4737
5dab5552 4738 case CLEANUP_POINT_EXPR:
e5e809f4 4739 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 4740
bbf6f052
RK
4741 case SAVE_EXPR:
4742 exp_rtl = SAVE_EXPR_RTL (exp);
4743 break;
4744
8129842c
RS
4745 case BIND_EXPR:
4746 /* The only operand we look at is operand 1. The rest aren't
4747 part of the expression. */
e5e809f4 4748 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 4749
bbf6f052 4750 case METHOD_CALL_EXPR:
0f41302f 4751 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 4752 abort ();
e9a25f70
JL
4753
4754 default:
4755 break;
bbf6f052
RK
4756 }
4757
4758 /* If we have an rtx, we do not need to scan our operands. */
4759 if (exp_rtl)
4760 break;
4761
4762 nops = tree_code_length[(int) TREE_CODE (exp)];
4763 for (i = 0; i < nops; i++)
4764 if (TREE_OPERAND (exp, i) != 0
e5e809f4 4765 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
4766 return 0;
4767 }
4768
4769 /* If we have an rtl, find any enclosed object. Then see if we conflict
4770 with it. */
4771 if (exp_rtl)
4772 {
4773 if (GET_CODE (exp_rtl) == SUBREG)
4774 {
4775 exp_rtl = SUBREG_REG (exp_rtl);
4776 if (GET_CODE (exp_rtl) == REG
4777 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4778 return 0;
4779 }
4780
4781 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4782 are memory and EXP is not readonly. */
4783 return ! (rtx_equal_p (x, exp_rtl)
4784 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4785 && ! TREE_READONLY (exp)));
4786 }
4787
4788 /* If we reach here, it is safe. */
4789 return 1;
4790}
4791
4792/* Subroutine of expand_expr: return nonzero iff EXP is an
4793 expression whose type is statically determinable. */
4794
4795static int
4796fixed_type_p (exp)
4797 tree exp;
4798{
4799 if (TREE_CODE (exp) == PARM_DECL
4800 || TREE_CODE (exp) == VAR_DECL
4801 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4802 || TREE_CODE (exp) == COMPONENT_REF
4803 || TREE_CODE (exp) == ARRAY_REF)
4804 return 1;
4805 return 0;
4806}
01c8a7c8
RK
4807
4808/* Subroutine of expand_expr: return rtx if EXP is a
4809 variable or parameter; else return 0. */
4810
4811static rtx
4812var_rtx (exp)
4813 tree exp;
4814{
4815 STRIP_NOPS (exp);
4816 switch (TREE_CODE (exp))
4817 {
4818 case PARM_DECL:
4819 case VAR_DECL:
4820 return DECL_RTL (exp);
4821 default:
4822 return 0;
4823 }
4824}
bbf6f052
RK
4825\f
4826/* expand_expr: generate code for computing expression EXP.
4827 An rtx for the computed value is returned. The value is never null.
4828 In the case of a void EXP, const0_rtx is returned.
4829
4830 The value may be stored in TARGET if TARGET is nonzero.
4831 TARGET is just a suggestion; callers must assume that
4832 the rtx returned may not be the same as TARGET.
4833
4834 If TARGET is CONST0_RTX, it means that the value will be ignored.
4835
4836 If TMODE is not VOIDmode, it suggests generating the
4837 result in mode TMODE. But this is done only when convenient.
4838 Otherwise, TMODE is ignored and the value generated in its natural mode.
4839 TMODE is just a suggestion; callers must assume that
4840 the rtx returned may not have mode TMODE.
4841
d6a5ac33
RK
4842 Note that TARGET may have neither TMODE nor MODE. In that case, it
4843 probably will not be used.
bbf6f052
RK
4844
4845 If MODIFIER is EXPAND_SUM then when EXP is an addition
4846 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4847 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4848 products as above, or REG or MEM, or constant.
4849 Ordinarily in such cases we would output mul or add instructions
4850 and then return a pseudo reg containing the sum.
4851
4852 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4853 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4854 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4855 This is used for outputting expressions used in initializers.
4856
4857 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4858 with a constant address even if that address is not normally legitimate.
4859 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4860
4861rtx
4862expand_expr (exp, target, tmode, modifier)
4863 register tree exp;
4864 rtx target;
4865 enum machine_mode tmode;
4866 enum expand_modifier modifier;
4867{
b50d17a1
RK
4868 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4869 This is static so it will be accessible to our recursive callees. */
4870 static tree placeholder_list = 0;
bbf6f052
RK
4871 register rtx op0, op1, temp;
4872 tree type = TREE_TYPE (exp);
4873 int unsignedp = TREE_UNSIGNED (type);
4874 register enum machine_mode mode = TYPE_MODE (type);
4875 register enum tree_code code = TREE_CODE (exp);
4876 optab this_optab;
4877 /* Use subtarget as the target for operand 0 of a binary operation. */
4878 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4879 rtx original_target = target;
dd27116b
RK
4880 int ignore = (target == const0_rtx
4881 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4882 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4883 || code == COND_EXPR)
dd27116b 4884 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 4885 tree context;
921b3427
RK
4886 /* Used by check-memory-usage to make modifier read only. */
4887 enum expand_modifier ro_modifier;
bbf6f052 4888
921b3427
RK
4889 /* Make a read-only version of the modifier. */
4890 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4891 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4892 ro_modifier = modifier;
4893 else
4894 ro_modifier = EXPAND_NORMAL;
ca695ac9 4895
bbf6f052
RK
4896 /* Don't use hard regs as subtargets, because the combiner
4897 can only handle pseudo regs. */
4898 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4899 subtarget = 0;
4900 /* Avoid subtargets inside loops,
4901 since they hide some invariant expressions. */
4902 if (preserve_subexpressions_p ())
4903 subtarget = 0;
4904
dd27116b
RK
4905 /* If we are going to ignore this result, we need only do something
4906 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4907 is, short-circuit the most common cases here. Note that we must
4908 not call expand_expr with anything but const0_rtx in case this
4909 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4910
dd27116b
RK
4911 if (ignore)
4912 {
4913 if (! TREE_SIDE_EFFECTS (exp))
4914 return const0_rtx;
4915
4916 /* Ensure we reference a volatile object even if value is ignored. */
4917 if (TREE_THIS_VOLATILE (exp)
4918 && TREE_CODE (exp) != FUNCTION_DECL
4919 && mode != VOIDmode && mode != BLKmode)
4920 {
921b3427 4921 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
4922 if (GET_CODE (temp) == MEM)
4923 temp = copy_to_reg (temp);
4924 return const0_rtx;
4925 }
4926
4927 if (TREE_CODE_CLASS (code) == '1')
4928 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4929 VOIDmode, ro_modifier);
dd27116b
RK
4930 else if (TREE_CODE_CLASS (code) == '2'
4931 || TREE_CODE_CLASS (code) == '<')
4932 {
921b3427
RK
4933 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4934 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
4935 return const0_rtx;
4936 }
4937 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4938 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4939 /* If the second operand has no side effects, just evaluate
0f41302f 4940 the first. */
dd27116b 4941 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4942 VOIDmode, ro_modifier);
dd27116b 4943
90764a87 4944 target = 0;
dd27116b 4945 }
bbf6f052 4946
e44842fe
RK
4947 /* If will do cse, generate all results into pseudo registers
4948 since 1) that allows cse to find more things
4949 and 2) otherwise cse could produce an insn the machine
4950 cannot support. */
4951
bbf6f052
RK
4952 if (! cse_not_expected && mode != BLKmode && target
4953 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4954 target = subtarget;
4955
bbf6f052
RK
4956 switch (code)
4957 {
4958 case LABEL_DECL:
b552441b
RS
4959 {
4960 tree function = decl_function_context (exp);
4961 /* Handle using a label in a containing function. */
d0977240
RK
4962 if (function != current_function_decl
4963 && function != inline_function_decl && function != 0)
b552441b
RS
4964 {
4965 struct function *p = find_function_data (function);
4966 /* Allocate in the memory associated with the function
4967 that the label is in. */
4968 push_obstacks (p->function_obstack,
4969 p->function_maybepermanent_obstack);
4970
38a448ca
RH
4971 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4972 label_rtx (exp),
4973 p->forced_labels);
b552441b
RS
4974 pop_obstacks ();
4975 }
4976 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
4977 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4978 label_rtx (exp), forced_labels);
4979 temp = gen_rtx_MEM (FUNCTION_MODE,
4980 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
4981 if (function != current_function_decl
4982 && function != inline_function_decl && function != 0)
26fcb35a
RS
4983 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4984 return temp;
b552441b 4985 }
bbf6f052
RK
4986
4987 case PARM_DECL:
4988 if (DECL_RTL (exp) == 0)
4989 {
4990 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4991 return CONST0_RTX (mode);
bbf6f052
RK
4992 }
4993
0f41302f 4994 /* ... fall through ... */
d6a5ac33 4995
bbf6f052 4996 case VAR_DECL:
2dca20cd
RS
4997 /* If a static var's type was incomplete when the decl was written,
4998 but the type is complete now, lay out the decl now. */
4999 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5000 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5001 {
5002 push_obstacks_nochange ();
5003 end_temporary_allocation ();
5004 layout_decl (exp, 0);
5005 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5006 pop_obstacks ();
5007 }
d6a5ac33 5008
921b3427
RK
5009 /* Only check automatic variables. Currently, function arguments are
5010 not checked (this can be done at compile-time with prototypes).
5011 Aggregates are not checked. */
5012 if (flag_check_memory_usage && code == VAR_DECL
5013 && GET_CODE (DECL_RTL (exp)) == MEM
5014 && DECL_CONTEXT (exp) != NULL_TREE
5015 && ! TREE_STATIC (exp)
5016 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5017 {
5018 enum memory_use_mode memory_usage;
5019 memory_usage = get_memory_usage_from_modifier (modifier);
5020
5021 if (memory_usage != MEMORY_USE_DONT)
5022 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5023 XEXP (DECL_RTL (exp), 0), ptr_mode,
5024 GEN_INT (int_size_in_bytes (type)),
5025 TYPE_MODE (sizetype),
956d6950
JL
5026 GEN_INT (memory_usage),
5027 TYPE_MODE (integer_type_node));
921b3427
RK
5028 }
5029
0f41302f 5030 /* ... fall through ... */
d6a5ac33 5031
2dca20cd 5032 case FUNCTION_DECL:
bbf6f052
RK
5033 case RESULT_DECL:
5034 if (DECL_RTL (exp) == 0)
5035 abort ();
d6a5ac33 5036
e44842fe
RK
5037 /* Ensure variable marked as used even if it doesn't go through
5038 a parser. If it hasn't be used yet, write out an external
5039 definition. */
5040 if (! TREE_USED (exp))
5041 {
5042 assemble_external (exp);
5043 TREE_USED (exp) = 1;
5044 }
5045
dc6d66b3
RK
5046 /* Show we haven't gotten RTL for this yet. */
5047 temp = 0;
5048
bbf6f052
RK
5049 /* Handle variables inherited from containing functions. */
5050 context = decl_function_context (exp);
5051
5052 /* We treat inline_function_decl as an alias for the current function
5053 because that is the inline function whose vars, types, etc.
5054 are being merged into the current function.
5055 See expand_inline_function. */
d6a5ac33 5056
bbf6f052
RK
5057 if (context != 0 && context != current_function_decl
5058 && context != inline_function_decl
5059 /* If var is static, we don't need a static chain to access it. */
5060 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5061 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5062 {
5063 rtx addr;
5064
5065 /* Mark as non-local and addressable. */
81feeecb 5066 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5067 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5068 abort ();
bbf6f052
RK
5069 mark_addressable (exp);
5070 if (GET_CODE (DECL_RTL (exp)) != MEM)
5071 abort ();
5072 addr = XEXP (DECL_RTL (exp), 0);
5073 if (GET_CODE (addr) == MEM)
38a448ca
RH
5074 addr = gen_rtx_MEM (Pmode,
5075 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5076 else
5077 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5078 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5079 }
4af3895e 5080
bbf6f052
RK
5081 /* This is the case of an array whose size is to be determined
5082 from its initializer, while the initializer is still being parsed.
5083 See expand_decl. */
d6a5ac33 5084
dc6d66b3
RK
5085 else if (GET_CODE (DECL_RTL (exp)) == MEM
5086 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5087 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5088 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5089
5090 /* If DECL_RTL is memory, we are in the normal case and either
5091 the address is not valid or it is not a register and -fforce-addr
5092 is specified, get the address into a register. */
5093
dc6d66b3
RK
5094 else if (GET_CODE (DECL_RTL (exp)) == MEM
5095 && modifier != EXPAND_CONST_ADDRESS
5096 && modifier != EXPAND_SUM
5097 && modifier != EXPAND_INITIALIZER
5098 && (! memory_address_p (DECL_MODE (exp),
5099 XEXP (DECL_RTL (exp), 0))
5100 || (flag_force_addr
5101 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5102 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5103 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5104
dc6d66b3
RK
5105 /* If we got something, return it. But first, set the alignment
5106 the address is a register. */
5107 if (temp != 0)
5108 {
5109 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5110 mark_reg_pointer (XEXP (temp, 0),
5111 DECL_ALIGN (exp) / BITS_PER_UNIT);
5112
5113 return temp;
5114 }
5115
1499e0a8
RK
5116 /* If the mode of DECL_RTL does not match that of the decl, it
5117 must be a promoted value. We return a SUBREG of the wanted mode,
5118 but mark it so that we know that it was already extended. */
5119
5120 if (GET_CODE (DECL_RTL (exp)) == REG
5121 && GET_MODE (DECL_RTL (exp)) != mode)
5122 {
1499e0a8
RK
5123 /* Get the signedness used for this variable. Ensure we get the
5124 same mode we got when the variable was declared. */
78911e8b
RK
5125 if (GET_MODE (DECL_RTL (exp))
5126 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5127 abort ();
5128
38a448ca 5129 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5130 SUBREG_PROMOTED_VAR_P (temp) = 1;
5131 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5132 return temp;
5133 }
5134
bbf6f052
RK
5135 return DECL_RTL (exp);
5136
5137 case INTEGER_CST:
5138 return immed_double_const (TREE_INT_CST_LOW (exp),
5139 TREE_INT_CST_HIGH (exp),
5140 mode);
5141
5142 case CONST_DECL:
921b3427
RK
5143 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5144 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5145
5146 case REAL_CST:
5147 /* If optimized, generate immediate CONST_DOUBLE
5148 which will be turned into memory by reload if necessary.
5149
5150 We used to force a register so that loop.c could see it. But
5151 this does not allow gen_* patterns to perform optimizations with
5152 the constants. It also produces two insns in cases like "x = 1.0;".
5153 On most machines, floating-point constants are not permitted in
5154 many insns, so we'd end up copying it to a register in any case.
5155
5156 Now, we do the copying in expand_binop, if appropriate. */
5157 return immed_real_const (exp);
5158
5159 case COMPLEX_CST:
5160 case STRING_CST:
5161 if (! TREE_CST_RTL (exp))
5162 output_constant_def (exp);
5163
5164 /* TREE_CST_RTL probably contains a constant address.
5165 On RISC machines where a constant address isn't valid,
5166 make some insns to get that address into a register. */
5167 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5168 && modifier != EXPAND_CONST_ADDRESS
5169 && modifier != EXPAND_INITIALIZER
5170 && modifier != EXPAND_SUM
d6a5ac33
RK
5171 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5172 || (flag_force_addr
5173 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5174 return change_address (TREE_CST_RTL (exp), VOIDmode,
5175 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5176 return TREE_CST_RTL (exp);
5177
bf1e5319
APB
5178 case EXPR_WITH_FILE_LOCATION:
5179 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5180 emit_line_note (EXPR_WFL_FILENAME (exp), EXPR_WFL_LINENO (exp));
5181 return expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5182
bbf6f052
RK
5183 case SAVE_EXPR:
5184 context = decl_function_context (exp);
d6a5ac33 5185
d0977240
RK
5186 /* If this SAVE_EXPR was at global context, assume we are an
5187 initialization function and move it into our context. */
5188 if (context == 0)
5189 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5190
bbf6f052
RK
5191 /* We treat inline_function_decl as an alias for the current function
5192 because that is the inline function whose vars, types, etc.
5193 are being merged into the current function.
5194 See expand_inline_function. */
5195 if (context == current_function_decl || context == inline_function_decl)
5196 context = 0;
5197
5198 /* If this is non-local, handle it. */
5199 if (context)
5200 {
d0977240
RK
5201 /* The following call just exists to abort if the context is
5202 not of a containing function. */
5203 find_function_data (context);
5204
bbf6f052
RK
5205 temp = SAVE_EXPR_RTL (exp);
5206 if (temp && GET_CODE (temp) == REG)
5207 {
5208 put_var_into_stack (exp);
5209 temp = SAVE_EXPR_RTL (exp);
5210 }
5211 if (temp == 0 || GET_CODE (temp) != MEM)
5212 abort ();
5213 return change_address (temp, mode,
5214 fix_lexical_addr (XEXP (temp, 0), exp));
5215 }
5216 if (SAVE_EXPR_RTL (exp) == 0)
5217 {
06089a8b
RK
5218 if (mode == VOIDmode)
5219 temp = const0_rtx;
5220 else
e5e809f4 5221 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5222
bbf6f052 5223 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5224 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5225 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5226 save_expr_regs);
ff78f773
RK
5227
5228 /* If the mode of TEMP does not match that of the expression, it
5229 must be a promoted value. We pass store_expr a SUBREG of the
5230 wanted mode but mark it so that we know that it was already
5231 extended. Note that `unsignedp' was modified above in
5232 this case. */
5233
5234 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5235 {
38a448ca 5236 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5237 SUBREG_PROMOTED_VAR_P (temp) = 1;
5238 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5239 }
5240
4c7a0be9 5241 if (temp == const0_rtx)
921b3427
RK
5242 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5243 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5244 else
5245 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5246
5247 TREE_USED (exp) = 1;
bbf6f052 5248 }
1499e0a8
RK
5249
5250 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5251 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5252 but mark it so that we know that it was already extended. */
1499e0a8
RK
5253
5254 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5255 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5256 {
e70d22c8
RK
5257 /* Compute the signedness and make the proper SUBREG. */
5258 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5259 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5260 SUBREG_PROMOTED_VAR_P (temp) = 1;
5261 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5262 return temp;
5263 }
5264
bbf6f052
RK
5265 return SAVE_EXPR_RTL (exp);
5266
679163cf
MS
5267 case UNSAVE_EXPR:
5268 {
5269 rtx temp;
5270 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5271 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5272 return temp;
5273 }
5274
b50d17a1 5275 case PLACEHOLDER_EXPR:
e9a25f70
JL
5276 {
5277 tree placeholder_expr;
5278
5279 /* If there is an object on the head of the placeholder list,
e5e809f4 5280 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5281 further information, see tree.def. */
5282 for (placeholder_expr = placeholder_list;
5283 placeholder_expr != 0;
5284 placeholder_expr = TREE_CHAIN (placeholder_expr))
5285 {
5286 tree need_type = TYPE_MAIN_VARIANT (type);
5287 tree object = 0;
5288 tree old_list = placeholder_list;
5289 tree elt;
5290
e5e809f4
JL
5291 /* Find the outermost reference that is of the type we want.
5292 If none, see if any object has a type that is a pointer to
5293 the type we want. */
5294 for (elt = TREE_PURPOSE (placeholder_expr);
5295 elt != 0 && object == 0;
5296 elt
5297 = ((TREE_CODE (elt) == COMPOUND_EXPR
5298 || TREE_CODE (elt) == COND_EXPR)
5299 ? TREE_OPERAND (elt, 1)
5300 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5301 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5302 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5303 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5304 ? TREE_OPERAND (elt, 0) : 0))
5305 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5306 object = elt;
e9a25f70 5307
e9a25f70 5308 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5309 elt != 0 && object == 0;
5310 elt
5311 = ((TREE_CODE (elt) == COMPOUND_EXPR
5312 || TREE_CODE (elt) == COND_EXPR)
5313 ? TREE_OPERAND (elt, 1)
5314 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5315 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5316 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5317 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5318 ? TREE_OPERAND (elt, 0) : 0))
5319 if (POINTER_TYPE_P (TREE_TYPE (elt))
5320 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5321 == need_type))
e5e809f4 5322 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5323
e9a25f70 5324 if (object != 0)
2cde2255 5325 {
e9a25f70
JL
5326 /* Expand this object skipping the list entries before
5327 it was found in case it is also a PLACEHOLDER_EXPR.
5328 In that case, we want to translate it using subsequent
5329 entries. */
5330 placeholder_list = TREE_CHAIN (placeholder_expr);
5331 temp = expand_expr (object, original_target, tmode,
5332 ro_modifier);
5333 placeholder_list = old_list;
5334 return temp;
2cde2255 5335 }
e9a25f70
JL
5336 }
5337 }
b50d17a1
RK
5338
5339 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5340 abort ();
5341
5342 case WITH_RECORD_EXPR:
5343 /* Put the object on the placeholder list, expand our first operand,
5344 and pop the list. */
5345 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5346 placeholder_list);
5347 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5348 tmode, ro_modifier);
b50d17a1
RK
5349 placeholder_list = TREE_CHAIN (placeholder_list);
5350 return target;
5351
bbf6f052 5352 case EXIT_EXPR:
e44842fe
RK
5353 expand_exit_loop_if_false (NULL_PTR,
5354 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5355 return const0_rtx;
5356
5357 case LOOP_EXPR:
0088fcb1 5358 push_temp_slots ();
bbf6f052
RK
5359 expand_start_loop (1);
5360 expand_expr_stmt (TREE_OPERAND (exp, 0));
5361 expand_end_loop ();
0088fcb1 5362 pop_temp_slots ();
bbf6f052
RK
5363
5364 return const0_rtx;
5365
5366 case BIND_EXPR:
5367 {
5368 tree vars = TREE_OPERAND (exp, 0);
5369 int vars_need_expansion = 0;
5370
5371 /* Need to open a binding contour here because
e976b8b2 5372 if there are any cleanups they must be contained here. */
bbf6f052
RK
5373 expand_start_bindings (0);
5374
2df53c0b
RS
5375 /* Mark the corresponding BLOCK for output in its proper place. */
5376 if (TREE_OPERAND (exp, 2) != 0
5377 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5378 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5379
5380 /* If VARS have not yet been expanded, expand them now. */
5381 while (vars)
5382 {
5383 if (DECL_RTL (vars) == 0)
5384 {
5385 vars_need_expansion = 1;
5386 expand_decl (vars);
5387 }
5388 expand_decl_init (vars);
5389 vars = TREE_CHAIN (vars);
5390 }
5391
921b3427 5392 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5393
5394 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5395
5396 return temp;
5397 }
5398
5399 case RTL_EXPR:
83b853c9
JM
5400 if (RTL_EXPR_SEQUENCE (exp))
5401 {
5402 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5403 abort ();
5404 emit_insns (RTL_EXPR_SEQUENCE (exp));
5405 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5406 }
99310285 5407 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5408 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5409 return RTL_EXPR_RTL (exp);
5410
5411 case CONSTRUCTOR:
dd27116b
RK
5412 /* If we don't need the result, just ensure we evaluate any
5413 subexpressions. */
5414 if (ignore)
5415 {
5416 tree elt;
5417 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
5418 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5419 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
5420 return const0_rtx;
5421 }
3207b172 5422
4af3895e
JVA
5423 /* All elts simple constants => refer to a constant in memory. But
5424 if this is a non-BLKmode mode, let it store a field at a time
5425 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5426 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5427 store directly into the target unless the type is large enough
5428 that memcpy will be used. If we are making an initializer and
3207b172 5429 all operands are constant, put it in memory as well. */
dd27116b 5430 else if ((TREE_STATIC (exp)
3207b172 5431 && ((mode == BLKmode
e5e809f4 5432 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
5433 || TREE_ADDRESSABLE (exp)
5434 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5435 && (move_by_pieces_ninsns
67225c15
RK
5436 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5437 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5438 > MOVE_RATIO)
5439 && ! mostly_zeros_p (exp))))
dd27116b 5440 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5441 {
5442 rtx constructor = output_constant_def (exp);
b552441b
RS
5443 if (modifier != EXPAND_CONST_ADDRESS
5444 && modifier != EXPAND_INITIALIZER
5445 && modifier != EXPAND_SUM
d6a5ac33
RK
5446 && (! memory_address_p (GET_MODE (constructor),
5447 XEXP (constructor, 0))
5448 || (flag_force_addr
5449 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5450 constructor = change_address (constructor, VOIDmode,
5451 XEXP (constructor, 0));
5452 return constructor;
5453 }
5454
bbf6f052
RK
5455 else
5456 {
e9ac02a6
JW
5457 /* Handle calls that pass values in multiple non-contiguous
5458 locations. The Irix 6 ABI has examples of this. */
e5e809f4 5459 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 5460 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5461 {
5462 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5463 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5464 else
5465 target = assign_temp (type, 0, 1, 1);
5466 }
07604beb
RK
5467
5468 if (TREE_READONLY (exp))
5469 {
9151b3bf 5470 if (GET_CODE (target) == MEM)
effbcc6a
RK
5471 target = copy_rtx (target);
5472
07604beb
RK
5473 RTX_UNCHANGING_P (target) = 1;
5474 }
5475
e1a43f73 5476 store_constructor (exp, target, 0);
bbf6f052
RK
5477 return target;
5478 }
5479
5480 case INDIRECT_REF:
5481 {
5482 tree exp1 = TREE_OPERAND (exp, 0);
5483 tree exp2;
7581a30f
JW
5484 tree index;
5485 tree string = string_constant (exp1, &index);
5486 int i;
5487
06eaa86f 5488 /* Try to optimize reads from const strings. */
7581a30f
JW
5489 if (string
5490 && TREE_CODE (string) == STRING_CST
5491 && TREE_CODE (index) == INTEGER_CST
5492 && !TREE_INT_CST_HIGH (index)
5493 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5494 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
5495 && GET_MODE_SIZE (mode) == 1
5496 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 5497 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 5498
405f0da6
JW
5499 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5500 op0 = memory_address (mode, op0);
8c8a8e34 5501
921b3427
RK
5502 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5503 {
5504 enum memory_use_mode memory_usage;
5505 memory_usage = get_memory_usage_from_modifier (modifier);
5506
5507 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
5508 {
5509 in_check_memory_usage = 1;
5510 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5511 op0, ptr_mode,
5512 GEN_INT (int_size_in_bytes (type)),
5513 TYPE_MODE (sizetype),
5514 GEN_INT (memory_usage),
5515 TYPE_MODE (integer_type_node));
5516 in_check_memory_usage = 0;
5517 }
921b3427
RK
5518 }
5519
38a448ca 5520 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
5521 /* If address was computed by addition,
5522 mark this as an element of an aggregate. */
5523 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5524 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5525 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5526 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5527 || (TREE_CODE (exp1) == ADDR_EXPR
5528 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5529 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5530 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5531 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5532
5533 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5534 here, because, in C and C++, the fact that a location is accessed
5535 through a pointer to const does not mean that the value there can
5536 never change. Languages where it can never change should
5537 also set TREE_STATIC. */
5cb7a25a 5538 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5539 return temp;
5540 }
bbf6f052
RK
5541
5542 case ARRAY_REF:
742920c7
RK
5543 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5544 abort ();
bbf6f052 5545
bbf6f052 5546 {
742920c7
RK
5547 tree array = TREE_OPERAND (exp, 0);
5548 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5549 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5550 tree index = TREE_OPERAND (exp, 1);
5551 tree index_type = TREE_TYPE (index);
08293add 5552 HOST_WIDE_INT i;
b50d17a1 5553
d4c89139
PB
5554 /* Optimize the special-case of a zero lower bound.
5555
5556 We convert the low_bound to sizetype to avoid some problems
5557 with constant folding. (E.g. suppose the lower bound is 1,
5558 and its mode is QI. Without the conversion, (ARRAY
5559 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5560 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5561
5562 But sizetype isn't quite right either (especially if
5563 the lowbound is negative). FIXME */
5564
742920c7 5565 if (! integer_zerop (low_bound))
d4c89139
PB
5566 index = fold (build (MINUS_EXPR, index_type, index,
5567 convert (sizetype, low_bound)));
742920c7 5568
742920c7 5569 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5570 This is not done in fold so it won't happen inside &.
5571 Don't fold if this is for wide characters since it's too
5572 difficult to do correctly and this is a very rare case. */
742920c7
RK
5573
5574 if (TREE_CODE (array) == STRING_CST
5575 && TREE_CODE (index) == INTEGER_CST
5576 && !TREE_INT_CST_HIGH (index)
307b821c 5577 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5578 && GET_MODE_CLASS (mode) == MODE_INT
5579 && GET_MODE_SIZE (mode) == 1)
307b821c 5580 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5581
742920c7
RK
5582 /* If this is a constant index into a constant array,
5583 just get the value from the array. Handle both the cases when
5584 we have an explicit constructor and when our operand is a variable
5585 that was declared const. */
4af3895e 5586
742920c7
RK
5587 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5588 {
5589 if (TREE_CODE (index) == INTEGER_CST
5590 && TREE_INT_CST_HIGH (index) == 0)
5591 {
5592 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5593
5594 i = TREE_INT_CST_LOW (index);
5595 while (elem && i--)
5596 elem = TREE_CHAIN (elem);
5597 if (elem)
5598 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5599 tmode, ro_modifier);
742920c7
RK
5600 }
5601 }
4af3895e 5602
742920c7
RK
5603 else if (optimize >= 1
5604 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5605 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5606 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5607 {
08293add 5608 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
5609 {
5610 tree init = DECL_INITIAL (array);
5611
5612 i = TREE_INT_CST_LOW (index);
5613 if (TREE_CODE (init) == CONSTRUCTOR)
5614 {
5615 tree elem = CONSTRUCTOR_ELTS (init);
5616
03dc44a6
RS
5617 while (elem
5618 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5619 elem = TREE_CHAIN (elem);
5620 if (elem)
5621 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5622 tmode, ro_modifier);
742920c7
RK
5623 }
5624 else if (TREE_CODE (init) == STRING_CST
08293add
RK
5625 && TREE_INT_CST_HIGH (index) == 0
5626 && (TREE_INT_CST_LOW (index)
5627 < TREE_STRING_LENGTH (init)))
5628 return (GEN_INT
5629 (TREE_STRING_POINTER
5630 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
5631 }
5632 }
5633 }
8c8a8e34 5634
08293add 5635 /* ... fall through ... */
bbf6f052
RK
5636
5637 case COMPONENT_REF:
5638 case BIT_FIELD_REF:
4af3895e 5639 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5640 appropriate field if it is present. Don't do this if we have
5641 already written the data since we want to refer to that copy
5642 and varasm.c assumes that's what we'll do. */
4af3895e 5643 if (code != ARRAY_REF
7a0b7b9a
RK
5644 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5645 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5646 {
5647 tree elt;
5648
5649 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5650 elt = TREE_CHAIN (elt))
86b5812c
RK
5651 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5652 /* We can normally use the value of the field in the
5653 CONSTRUCTOR. However, if this is a bitfield in
5654 an integral mode that we can fit in a HOST_WIDE_INT,
5655 we must mask only the number of bits in the bitfield,
5656 since this is done implicitly by the constructor. If
5657 the bitfield does not meet either of those conditions,
5658 we can't do this optimization. */
5659 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5660 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5661 == MODE_INT)
5662 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5663 <= HOST_BITS_PER_WIDE_INT))))
5664 {
5665 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5666 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5667 {
5668 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
5669
5670 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5671 {
5672 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5673 op0 = expand_and (op0, op1, target);
5674 }
5675 else
5676 {
e5e809f4
JL
5677 enum machine_mode imode
5678 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 5679 tree count
e5e809f4
JL
5680 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5681 0);
86b5812c
RK
5682
5683 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5684 target, 0);
5685 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5686 target, 0);
5687 }
5688 }
5689
5690 return op0;
5691 }
4af3895e
JVA
5692 }
5693
bbf6f052
RK
5694 {
5695 enum machine_mode mode1;
5696 int bitsize;
5697 int bitpos;
7bb0943f 5698 tree offset;
bbf6f052 5699 int volatilep = 0;
034f9101 5700 int alignment;
839c4796
RK
5701 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5702 &mode1, &unsignedp, &volatilep,
5703 &alignment);
bbf6f052 5704
e7f3c83f
RK
5705 /* If we got back the original object, something is wrong. Perhaps
5706 we are evaluating an expression too early. In any event, don't
5707 infinitely recurse. */
5708 if (tem == exp)
5709 abort ();
5710
3d27140a 5711 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5712 computation, since it will need a temporary and TARGET is known
5713 to have to do. This occurs in unchecked conversion in Ada. */
5714
5715 op0 = expand_expr (tem,
5716 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5717 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5718 != INTEGER_CST)
5719 ? target : NULL_RTX),
4ed67205 5720 VOIDmode,
e5e809f4
JL
5721 modifier == EXPAND_INITIALIZER
5722 ? modifier : EXPAND_NORMAL);
bbf6f052 5723
8c8a8e34 5724 /* If this is a constant, put it into a register if it is a
8008b228 5725 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5726 if (CONSTANT_P (op0))
5727 {
5728 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5729 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5730 op0 = force_reg (mode, op0);
5731 else
5732 op0 = validize_mem (force_const_mem (mode, op0));
5733 }
5734
7bb0943f
RS
5735 if (offset != 0)
5736 {
906c4e36 5737 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5738
5739 if (GET_CODE (op0) != MEM)
5740 abort ();
2d48c13d
JL
5741
5742 if (GET_MODE (offset_rtx) != ptr_mode)
5743#ifdef POINTERS_EXTEND_UNSIGNED
5744 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5745#else
5746 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5747#endif
5748
7bb0943f 5749 op0 = change_address (op0, VOIDmode,
38a448ca
RH
5750 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5751 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
5752 }
5753
bbf6f052
RK
5754 /* Don't forget about volatility even if this is a bitfield. */
5755 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5756 {
5757 op0 = copy_rtx (op0);
5758 MEM_VOLATILE_P (op0) = 1;
5759 }
5760
921b3427
RK
5761 /* Check the access. */
5762 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5763 {
5764 enum memory_use_mode memory_usage;
5765 memory_usage = get_memory_usage_from_modifier (modifier);
5766
5767 if (memory_usage != MEMORY_USE_DONT)
5768 {
5769 rtx to;
5770 int size;
5771
5772 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5773 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5774
5775 /* Check the access right of the pointer. */
e9a25f70
JL
5776 if (size > BITS_PER_UNIT)
5777 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5778 to, ptr_mode,
5779 GEN_INT (size / BITS_PER_UNIT),
5780 TYPE_MODE (sizetype),
956d6950
JL
5781 GEN_INT (memory_usage),
5782 TYPE_MODE (integer_type_node));
921b3427
RK
5783 }
5784 }
5785
ccc98036
RS
5786 /* In cases where an aligned union has an unaligned object
5787 as a field, we might be extracting a BLKmode value from
5788 an integer-mode (e.g., SImode) object. Handle this case
5789 by doing the extract into an object as wide as the field
5790 (which we know to be the width of a basic mode), then
f2420d0b
JW
5791 storing into memory, and changing the mode to BLKmode.
5792 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5793 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5794 if (mode1 == VOIDmode
ccc98036 5795 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5796 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 5797 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
5798 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5799 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5800 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
5801 /* If the field isn't aligned enough to fetch as a memref,
5802 fetch it as a bit field. */
5803 || (SLOW_UNALIGNED_ACCESS
5804 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5805 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5806 {
bbf6f052
RK
5807 enum machine_mode ext_mode = mode;
5808
5809 if (ext_mode == BLKmode)
5810 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5811
5812 if (ext_mode == BLKmode)
a281e72d
RK
5813 {
5814 /* In this case, BITPOS must start at a byte boundary and
5815 TARGET, if specified, must be a MEM. */
5816 if (GET_CODE (op0) != MEM
5817 || (target != 0 && GET_CODE (target) != MEM)
5818 || bitpos % BITS_PER_UNIT != 0)
5819 abort ();
5820
5821 op0 = change_address (op0, VOIDmode,
5822 plus_constant (XEXP (op0, 0),
5823 bitpos / BITS_PER_UNIT));
5824 if (target == 0)
5825 target = assign_temp (type, 0, 1, 1);
5826
5827 emit_block_move (target, op0,
5828 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5829 / BITS_PER_UNIT),
5830 1);
5831
5832 return target;
5833 }
bbf6f052 5834
dc6d66b3
RK
5835 op0 = validize_mem (op0);
5836
5837 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5838 mark_reg_pointer (XEXP (op0, 0), alignment);
5839
5840 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5841 unsignedp, target, ext_mode, ext_mode,
034f9101 5842 alignment,
bbf6f052 5843 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
5844
5845 /* If the result is a record type and BITSIZE is narrower than
5846 the mode of OP0, an integral mode, and this is a big endian
5847 machine, we must put the field into the high-order bits. */
5848 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5849 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5850 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5851 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5852 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5853 - bitsize),
5854 op0, 1);
5855
bbf6f052
RK
5856 if (mode == BLKmode)
5857 {
5858 rtx new = assign_stack_temp (ext_mode,
5859 bitsize / BITS_PER_UNIT, 0);
5860
5861 emit_move_insn (new, op0);
5862 op0 = copy_rtx (new);
5863 PUT_MODE (op0, BLKmode);
092dded9 5864 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5865 }
5866
5867 return op0;
5868 }
5869
05019f83
RK
5870 /* If the result is BLKmode, use that to access the object
5871 now as well. */
5872 if (mode == BLKmode)
5873 mode1 = BLKmode;
5874
bbf6f052
RK
5875 /* Get a reference to just this component. */
5876 if (modifier == EXPAND_CONST_ADDRESS
5877 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
5878 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5879 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
5880 else
5881 op0 = change_address (op0, mode1,
5882 plus_constant (XEXP (op0, 0),
5883 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5884 if (GET_CODE (XEXP (op0, 0)) == REG)
5885 mark_reg_pointer (XEXP (op0, 0), alignment);
5886
bbf6f052
RK
5887 MEM_IN_STRUCT_P (op0) = 1;
5888 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 5889 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 5890 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 5891 || modifier == EXPAND_INITIALIZER)
bbf6f052 5892 return op0;
0d15e60c 5893 else if (target == 0)
bbf6f052 5894 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 5895
bbf6f052
RK
5896 convert_move (target, op0, unsignedp);
5897 return target;
5898 }
5899
bbf6f052
RK
5900 /* Intended for a reference to a buffer of a file-object in Pascal.
5901 But it's not certain that a special tree code will really be
5902 necessary for these. INDIRECT_REF might work for them. */
5903 case BUFFER_REF:
5904 abort ();
5905
7308a047 5906 case IN_EXPR:
7308a047 5907 {
d6a5ac33
RK
5908 /* Pascal set IN expression.
5909
5910 Algorithm:
5911 rlo = set_low - (set_low%bits_per_word);
5912 the_word = set [ (index - rlo)/bits_per_word ];
5913 bit_index = index % bits_per_word;
5914 bitmask = 1 << bit_index;
5915 return !!(the_word & bitmask); */
5916
7308a047
RS
5917 tree set = TREE_OPERAND (exp, 0);
5918 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5919 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5920 tree set_type = TREE_TYPE (set);
7308a047
RS
5921 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5922 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5923 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5924 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5925 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5926 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5927 rtx setaddr = XEXP (setval, 0);
5928 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5929 rtx rlow;
5930 rtx diff, quo, rem, addr, bit, result;
7308a047 5931
d6a5ac33
RK
5932 preexpand_calls (exp);
5933
5934 /* If domain is empty, answer is no. Likewise if index is constant
5935 and out of bounds. */
51723711 5936 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 5937 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 5938 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
5939 || (TREE_CODE (index) == INTEGER_CST
5940 && TREE_CODE (set_low_bound) == INTEGER_CST
5941 && tree_int_cst_lt (index, set_low_bound))
5942 || (TREE_CODE (set_high_bound) == INTEGER_CST
5943 && TREE_CODE (index) == INTEGER_CST
5944 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5945 return const0_rtx;
5946
d6a5ac33
RK
5947 if (target == 0)
5948 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5949
5950 /* If we get here, we have to generate the code for both cases
5951 (in range and out of range). */
5952
5953 op0 = gen_label_rtx ();
5954 op1 = gen_label_rtx ();
5955
5956 if (! (GET_CODE (index_val) == CONST_INT
5957 && GET_CODE (lo_r) == CONST_INT))
5958 {
17938e57 5959 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5960 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5961 emit_jump_insn (gen_blt (op1));
5962 }
5963
5964 if (! (GET_CODE (index_val) == CONST_INT
5965 && GET_CODE (hi_r) == CONST_INT))
5966 {
17938e57 5967 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5968 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5969 emit_jump_insn (gen_bgt (op1));
5970 }
5971
5972 /* Calculate the element number of bit zero in the first word
5973 of the set. */
5974 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5975 rlow = GEN_INT (INTVAL (lo_r)
5976 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5977 else
17938e57
RK
5978 rlow = expand_binop (index_mode, and_optab, lo_r,
5979 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5980 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5981
d6a5ac33
RK
5982 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5983 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5984
5985 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5986 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5987 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5988 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5989
7308a047 5990 addr = memory_address (byte_mode,
d6a5ac33
RK
5991 expand_binop (index_mode, add_optab, diff,
5992 setaddr, NULL_RTX, iunsignedp,
17938e57 5993 OPTAB_LIB_WIDEN));
d6a5ac33 5994
7308a047
RS
5995 /* Extract the bit we want to examine */
5996 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 5997 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
5998 make_tree (TREE_TYPE (index), rem),
5999 NULL_RTX, 1);
6000 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6001 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6002 1, OPTAB_LIB_WIDEN);
17938e57
RK
6003
6004 if (result != target)
6005 convert_move (target, result, 1);
7308a047
RS
6006
6007 /* Output the code to handle the out-of-range case. */
6008 emit_jump (op0);
6009 emit_label (op1);
6010 emit_move_insn (target, const0_rtx);
6011 emit_label (op0);
6012 return target;
6013 }
6014
bbf6f052
RK
6015 case WITH_CLEANUP_EXPR:
6016 if (RTL_EXPR_RTL (exp) == 0)
6017 {
6018 RTL_EXPR_RTL (exp)
921b3427 6019 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6020 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6021
bbf6f052
RK
6022 /* That's it for this cleanup. */
6023 TREE_OPERAND (exp, 2) = 0;
6024 }
6025 return RTL_EXPR_RTL (exp);
6026
5dab5552
MS
6027 case CLEANUP_POINT_EXPR:
6028 {
d93d4205 6029 extern int temp_slot_level;
e976b8b2
MS
6030 /* Start a new binding layer that will keep track of all cleanup
6031 actions to be performed. */
6032 expand_start_bindings (0);
6033
d93d4205 6034 target_temp_slot_level = temp_slot_level;
e976b8b2 6035
921b3427 6036 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6037 /* If we're going to use this value, load it up now. */
6038 if (! ignore)
6039 op0 = force_not_mem (op0);
d93d4205 6040 preserve_temp_slots (op0);
e976b8b2 6041 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6042 }
6043 return op0;
6044
bbf6f052
RK
6045 case CALL_EXPR:
6046 /* Check for a built-in function. */
6047 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6048 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6049 == FUNCTION_DECL)
bbf6f052
RK
6050 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6051 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6052
bbf6f052
RK
6053 /* If this call was expanded already by preexpand_calls,
6054 just return the result we got. */
6055 if (CALL_EXPR_RTL (exp) != 0)
6056 return CALL_EXPR_RTL (exp);
d6a5ac33 6057
8129842c 6058 return expand_call (exp, target, ignore);
bbf6f052
RK
6059
6060 case NON_LVALUE_EXPR:
6061 case NOP_EXPR:
6062 case CONVERT_EXPR:
6063 case REFERENCE_EXPR:
bbf6f052
RK
6064 if (TREE_CODE (type) == UNION_TYPE)
6065 {
6066 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6067 if (target == 0)
06089a8b
RK
6068 {
6069 if (mode != BLKmode)
6070 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6071 else
6072 target = assign_temp (type, 0, 1, 1);
6073 }
d6a5ac33 6074
bbf6f052
RK
6075 if (GET_CODE (target) == MEM)
6076 /* Store data into beginning of memory target. */
6077 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6078 change_address (target, TYPE_MODE (valtype), 0), 0);
6079
bbf6f052
RK
6080 else if (GET_CODE (target) == REG)
6081 /* Store this field into a union of the proper type. */
6082 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6083 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6084 VOIDmode, 0, 1,
6085 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6086 else
6087 abort ();
6088
6089 /* Return the entire union. */
6090 return target;
6091 }
d6a5ac33 6092
7f62854a
RK
6093 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6094 {
6095 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6096 ro_modifier);
7f62854a
RK
6097
6098 /* If the signedness of the conversion differs and OP0 is
6099 a promoted SUBREG, clear that indication since we now
6100 have to do the proper extension. */
6101 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6102 && GET_CODE (op0) == SUBREG)
6103 SUBREG_PROMOTED_VAR_P (op0) = 0;
6104
6105 return op0;
6106 }
6107
1499e0a8 6108 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6109 if (GET_MODE (op0) == mode)
6110 return op0;
12342f90 6111
d6a5ac33
RK
6112 /* If OP0 is a constant, just convert it into the proper mode. */
6113 if (CONSTANT_P (op0))
6114 return
6115 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6116 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6117
26fcb35a 6118 if (modifier == EXPAND_INITIALIZER)
38a448ca 6119 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6120
bbf6f052 6121 if (target == 0)
d6a5ac33
RK
6122 return
6123 convert_to_mode (mode, op0,
6124 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6125 else
d6a5ac33
RK
6126 convert_move (target, op0,
6127 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6128 return target;
6129
6130 case PLUS_EXPR:
0f41302f
MS
6131 /* We come here from MINUS_EXPR when the second operand is a
6132 constant. */
bbf6f052
RK
6133 plus_expr:
6134 this_optab = add_optab;
6135
6136 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6137 something else, make sure we add the register to the constant and
6138 then to the other thing. This case can occur during strength
6139 reduction and doing it this way will produce better code if the
6140 frame pointer or argument pointer is eliminated.
6141
6142 fold-const.c will ensure that the constant is always in the inner
6143 PLUS_EXPR, so the only case we need to do anything about is if
6144 sp, ap, or fp is our second argument, in which case we must swap
6145 the innermost first argument and our second argument. */
6146
6147 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6148 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6149 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6150 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6151 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6152 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6153 {
6154 tree t = TREE_OPERAND (exp, 1);
6155
6156 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6157 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6158 }
6159
88f63c77 6160 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6161 something, we might be forming a constant. So try to use
6162 plus_constant. If it produces a sum and we can't accept it,
6163 use force_operand. This allows P = &ARR[const] to generate
6164 efficient code on machines where a SYMBOL_REF is not a valid
6165 address.
6166
6167 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6168 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6169 || mode == ptr_mode)
bbf6f052 6170 {
c980ac49
RS
6171 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6172 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6173 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6174 {
6175 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6176 EXPAND_SUM);
6177 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6178 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6179 op1 = force_operand (op1, target);
6180 return op1;
6181 }
bbf6f052 6182
c980ac49
RS
6183 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6184 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6185 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6186 {
6187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6188 EXPAND_SUM);
6189 if (! CONSTANT_P (op0))
6190 {
6191 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6192 VOIDmode, modifier);
709f5be1
RS
6193 /* Don't go to both_summands if modifier
6194 says it's not right to return a PLUS. */
6195 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6196 goto binop2;
c980ac49
RS
6197 goto both_summands;
6198 }
6199 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6200 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6201 op0 = force_operand (op0, target);
6202 return op0;
6203 }
bbf6f052
RK
6204 }
6205
6206 /* No sense saving up arithmetic to be done
6207 if it's all in the wrong mode to form part of an address.
6208 And force_operand won't know whether to sign-extend or
6209 zero-extend. */
6210 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6211 || mode != ptr_mode)
c980ac49 6212 goto binop;
bbf6f052
RK
6213
6214 preexpand_calls (exp);
e5e809f4 6215 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6216 subtarget = 0;
6217
921b3427
RK
6218 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6219 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6220
c980ac49 6221 both_summands:
bbf6f052
RK
6222 /* Make sure any term that's a sum with a constant comes last. */
6223 if (GET_CODE (op0) == PLUS
6224 && CONSTANT_P (XEXP (op0, 1)))
6225 {
6226 temp = op0;
6227 op0 = op1;
6228 op1 = temp;
6229 }
6230 /* If adding to a sum including a constant,
6231 associate it to put the constant outside. */
6232 if (GET_CODE (op1) == PLUS
6233 && CONSTANT_P (XEXP (op1, 1)))
6234 {
6235 rtx constant_term = const0_rtx;
6236
6237 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6238 if (temp != 0)
6239 op0 = temp;
6f90e075
JW
6240 /* Ensure that MULT comes first if there is one. */
6241 else if (GET_CODE (op0) == MULT)
38a448ca 6242 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6243 else
38a448ca 6244 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6245
6246 /* Let's also eliminate constants from op0 if possible. */
6247 op0 = eliminate_constant_term (op0, &constant_term);
6248
6249 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6250 their sum should be a constant. Form it into OP1, since the
6251 result we want will then be OP0 + OP1. */
6252
6253 temp = simplify_binary_operation (PLUS, mode, constant_term,
6254 XEXP (op1, 1));
6255 if (temp != 0)
6256 op1 = temp;
6257 else
38a448ca 6258 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6259 }
6260
6261 /* Put a constant term last and put a multiplication first. */
6262 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6263 temp = op1, op1 = op0, op0 = temp;
6264
6265 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6266 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6267
6268 case MINUS_EXPR:
ea87523e
RK
6269 /* For initializers, we are allowed to return a MINUS of two
6270 symbolic constants. Here we handle all cases when both operands
6271 are constant. */
bbf6f052
RK
6272 /* Handle difference of two symbolic constants,
6273 for the sake of an initializer. */
6274 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6275 && really_constant_p (TREE_OPERAND (exp, 0))
6276 && really_constant_p (TREE_OPERAND (exp, 1)))
6277 {
906c4e36 6278 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6279 VOIDmode, ro_modifier);
906c4e36 6280 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6281 VOIDmode, ro_modifier);
ea87523e 6282
ea87523e
RK
6283 /* If the last operand is a CONST_INT, use plus_constant of
6284 the negated constant. Else make the MINUS. */
6285 if (GET_CODE (op1) == CONST_INT)
6286 return plus_constant (op0, - INTVAL (op1));
6287 else
38a448ca 6288 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6289 }
6290 /* Convert A - const to A + (-const). */
6291 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6292 {
ae431183
RK
6293 tree negated = fold (build1 (NEGATE_EXPR, type,
6294 TREE_OPERAND (exp, 1)));
6295
6296 /* Deal with the case where we can't negate the constant
6297 in TYPE. */
6298 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6299 {
6300 tree newtype = signed_type (type);
6301 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6302 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6303 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6304
6305 if (! TREE_OVERFLOW (newneg))
6306 return expand_expr (convert (type,
6307 build (PLUS_EXPR, newtype,
6308 newop0, newneg)),
921b3427 6309 target, tmode, ro_modifier);
ae431183
RK
6310 }
6311 else
6312 {
6313 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6314 goto plus_expr;
6315 }
bbf6f052
RK
6316 }
6317 this_optab = sub_optab;
6318 goto binop;
6319
6320 case MULT_EXPR:
6321 preexpand_calls (exp);
6322 /* If first operand is constant, swap them.
6323 Thus the following special case checks need only
6324 check the second operand. */
6325 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6326 {
6327 register tree t1 = TREE_OPERAND (exp, 0);
6328 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6329 TREE_OPERAND (exp, 1) = t1;
6330 }
6331
6332 /* Attempt to return something suitable for generating an
6333 indexed address, for machines that support that. */
6334
88f63c77 6335 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6336 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6337 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6338 {
921b3427
RK
6339 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6340 EXPAND_SUM);
bbf6f052
RK
6341
6342 /* Apply distributive law if OP0 is x+c. */
6343 if (GET_CODE (op0) == PLUS
6344 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6345 return gen_rtx_PLUS (mode,
6346 gen_rtx_MULT (mode, XEXP (op0, 0),
6347 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6348 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6349 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6350
6351 if (GET_CODE (op0) != REG)
906c4e36 6352 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6353 if (GET_CODE (op0) != REG)
6354 op0 = copy_to_mode_reg (mode, op0);
6355
38a448ca
RH
6356 return gen_rtx_MULT (mode, op0,
6357 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6358 }
6359
e5e809f4 6360 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6361 subtarget = 0;
6362
6363 /* Check for multiplying things that have been extended
6364 from a narrower type. If this machine supports multiplying
6365 in that narrower type with a result in the desired type,
6366 do it that way, and avoid the explicit type-conversion. */
6367 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6368 && TREE_CODE (type) == INTEGER_TYPE
6369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6370 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6371 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6372 && int_fits_type_p (TREE_OPERAND (exp, 1),
6373 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6374 /* Don't use a widening multiply if a shift will do. */
6375 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6376 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6377 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6378 ||
6379 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6380 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6381 ==
6382 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6383 /* If both operands are extended, they must either both
6384 be zero-extended or both be sign-extended. */
6385 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6386 ==
6387 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6388 {
6389 enum machine_mode innermode
6390 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6391 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6392 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6393 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6394 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6395 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6396 {
b10af0c8
TG
6397 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6398 {
6399 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6400 NULL_RTX, VOIDmode, 0);
6401 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6402 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6403 VOIDmode, 0);
6404 else
6405 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6406 NULL_RTX, VOIDmode, 0);
6407 goto binop2;
6408 }
6409 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6410 && innermode == word_mode)
6411 {
6412 rtx htem;
6413 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6414 NULL_RTX, VOIDmode, 0);
6415 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6417 VOIDmode, 0);
6418 else
6419 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6420 NULL_RTX, VOIDmode, 0);
6421 temp = expand_binop (mode, other_optab, op0, op1, target,
6422 unsignedp, OPTAB_LIB_WIDEN);
6423 htem = expand_mult_highpart_adjust (innermode,
6424 gen_highpart (innermode, temp),
6425 op0, op1,
6426 gen_highpart (innermode, temp),
6427 unsignedp);
6428 emit_move_insn (gen_highpart (innermode, temp), htem);
6429 return temp;
6430 }
bbf6f052
RK
6431 }
6432 }
6433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6434 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6435 return expand_mult (mode, op0, op1, target, unsignedp);
6436
6437 case TRUNC_DIV_EXPR:
6438 case FLOOR_DIV_EXPR:
6439 case CEIL_DIV_EXPR:
6440 case ROUND_DIV_EXPR:
6441 case EXACT_DIV_EXPR:
6442 preexpand_calls (exp);
e5e809f4 6443 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6444 subtarget = 0;
6445 /* Possible optimization: compute the dividend with EXPAND_SUM
6446 then if the divisor is constant can optimize the case
6447 where some terms of the dividend have coeffs divisible by it. */
6448 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6449 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6450 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6451
6452 case RDIV_EXPR:
6453 this_optab = flodiv_optab;
6454 goto binop;
6455
6456 case TRUNC_MOD_EXPR:
6457 case FLOOR_MOD_EXPR:
6458 case CEIL_MOD_EXPR:
6459 case ROUND_MOD_EXPR:
6460 preexpand_calls (exp);
e5e809f4 6461 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6462 subtarget = 0;
6463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6464 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6465 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6466
6467 case FIX_ROUND_EXPR:
6468 case FIX_FLOOR_EXPR:
6469 case FIX_CEIL_EXPR:
6470 abort (); /* Not used for C. */
6471
6472 case FIX_TRUNC_EXPR:
906c4e36 6473 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6474 if (target == 0)
6475 target = gen_reg_rtx (mode);
6476 expand_fix (target, op0, unsignedp);
6477 return target;
6478
6479 case FLOAT_EXPR:
906c4e36 6480 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6481 if (target == 0)
6482 target = gen_reg_rtx (mode);
6483 /* expand_float can't figure out what to do if FROM has VOIDmode.
6484 So give it the correct mode. With -O, cse will optimize this. */
6485 if (GET_MODE (op0) == VOIDmode)
6486 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6487 op0);
6488 expand_float (target, op0,
6489 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6490 return target;
6491
6492 case NEGATE_EXPR:
5b22bee8 6493 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6494 temp = expand_unop (mode, neg_optab, op0, target, 0);
6495 if (temp == 0)
6496 abort ();
6497 return temp;
6498
6499 case ABS_EXPR:
6500 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6501
2d7050fd 6502 /* Handle complex values specially. */
d6a5ac33
RK
6503 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6504 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6505 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6506
bbf6f052
RK
6507 /* Unsigned abs is simply the operand. Testing here means we don't
6508 risk generating incorrect code below. */
6509 if (TREE_UNSIGNED (type))
6510 return op0;
6511
2e5ec6cf 6512 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 6513 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
6514
6515 case MAX_EXPR:
6516 case MIN_EXPR:
6517 target = original_target;
e5e809f4 6518 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 6519 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6520 || GET_MODE (target) != mode
bbf6f052
RK
6521 || (GET_CODE (target) == REG
6522 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6523 target = gen_reg_rtx (mode);
906c4e36 6524 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6525 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6526
6527 /* First try to do it with a special MIN or MAX instruction.
6528 If that does not win, use a conditional jump to select the proper
6529 value. */
6530 this_optab = (TREE_UNSIGNED (type)
6531 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6532 : (code == MIN_EXPR ? smin_optab : smax_optab));
6533
6534 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6535 OPTAB_WIDEN);
6536 if (temp != 0)
6537 return temp;
6538
fa2981d8
JW
6539 /* At this point, a MEM target is no longer useful; we will get better
6540 code without it. */
6541
6542 if (GET_CODE (target) == MEM)
6543 target = gen_reg_rtx (mode);
6544
ee456b1c
RK
6545 if (target != op0)
6546 emit_move_insn (target, op0);
d6a5ac33 6547
bbf6f052 6548 op0 = gen_label_rtx ();
d6a5ac33 6549
f81497d9
RS
6550 /* If this mode is an integer too wide to compare properly,
6551 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6552 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6553 {
f81497d9 6554 if (code == MAX_EXPR)
d6a5ac33
RK
6555 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6556 target, op1, NULL_RTX, op0);
bbf6f052 6557 else
d6a5ac33
RK
6558 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6559 op1, target, NULL_RTX, op0);
ee456b1c 6560 emit_move_insn (target, op1);
bbf6f052 6561 }
f81497d9
RS
6562 else
6563 {
6564 if (code == MAX_EXPR)
6565 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6566 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6567 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6568 else
6569 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6570 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6571 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6572 if (temp == const0_rtx)
ee456b1c 6573 emit_move_insn (target, op1);
f81497d9
RS
6574 else if (temp != const_true_rtx)
6575 {
6576 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6577 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6578 else
6579 abort ();
ee456b1c 6580 emit_move_insn (target, op1);
f81497d9
RS
6581 }
6582 }
bbf6f052
RK
6583 emit_label (op0);
6584 return target;
6585
bbf6f052
RK
6586 case BIT_NOT_EXPR:
6587 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6588 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6589 if (temp == 0)
6590 abort ();
6591 return temp;
6592
6593 case FFS_EXPR:
6594 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6595 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6596 if (temp == 0)
6597 abort ();
6598 return temp;
6599
d6a5ac33
RK
6600 /* ??? Can optimize bitwise operations with one arg constant.
6601 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6602 and (a bitwise1 b) bitwise2 b (etc)
6603 but that is probably not worth while. */
6604
6605 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6606 boolean values when we want in all cases to compute both of them. In
6607 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6608 as actual zero-or-1 values and then bitwise anding. In cases where
6609 there cannot be any side effects, better code would be made by
6610 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6611 how to recognize those cases. */
6612
bbf6f052
RK
6613 case TRUTH_AND_EXPR:
6614 case BIT_AND_EXPR:
6615 this_optab = and_optab;
6616 goto binop;
6617
bbf6f052
RK
6618 case TRUTH_OR_EXPR:
6619 case BIT_IOR_EXPR:
6620 this_optab = ior_optab;
6621 goto binop;
6622
874726a8 6623 case TRUTH_XOR_EXPR:
bbf6f052
RK
6624 case BIT_XOR_EXPR:
6625 this_optab = xor_optab;
6626 goto binop;
6627
6628 case LSHIFT_EXPR:
6629 case RSHIFT_EXPR:
6630 case LROTATE_EXPR:
6631 case RROTATE_EXPR:
6632 preexpand_calls (exp);
e5e809f4 6633 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6634 subtarget = 0;
6635 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6636 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6637 unsignedp);
6638
d6a5ac33
RK
6639 /* Could determine the answer when only additive constants differ. Also,
6640 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6641 case LT_EXPR:
6642 case LE_EXPR:
6643 case GT_EXPR:
6644 case GE_EXPR:
6645 case EQ_EXPR:
6646 case NE_EXPR:
6647 preexpand_calls (exp);
6648 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6649 if (temp != 0)
6650 return temp;
d6a5ac33 6651
0f41302f 6652 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6653 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6654 && original_target
6655 && GET_CODE (original_target) == REG
6656 && (GET_MODE (original_target)
6657 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6658 {
d6a5ac33
RK
6659 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6660 VOIDmode, 0);
6661
bbf6f052
RK
6662 if (temp != original_target)
6663 temp = copy_to_reg (temp);
d6a5ac33 6664
bbf6f052 6665 op1 = gen_label_rtx ();
906c4e36 6666 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6667 GET_MODE (temp), unsignedp, 0);
6668 emit_jump_insn (gen_beq (op1));
6669 emit_move_insn (temp, const1_rtx);
6670 emit_label (op1);
6671 return temp;
6672 }
d6a5ac33 6673
bbf6f052
RK
6674 /* If no set-flag instruction, must generate a conditional
6675 store into a temporary variable. Drop through
6676 and handle this like && and ||. */
6677
6678 case TRUTH_ANDIF_EXPR:
6679 case TRUTH_ORIF_EXPR:
e44842fe 6680 if (! ignore
e5e809f4 6681 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
6682 /* Make sure we don't have a hard reg (such as function's return
6683 value) live across basic blocks, if not optimizing. */
6684 || (!optimize && GET_CODE (target) == REG
6685 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6686 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6687
6688 if (target)
6689 emit_clr_insn (target);
6690
bbf6f052
RK
6691 op1 = gen_label_rtx ();
6692 jumpifnot (exp, op1);
e44842fe
RK
6693
6694 if (target)
6695 emit_0_to_1_insn (target);
6696
bbf6f052 6697 emit_label (op1);
e44842fe 6698 return ignore ? const0_rtx : target;
bbf6f052
RK
6699
6700 case TRUTH_NOT_EXPR:
6701 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6702 /* The parser is careful to generate TRUTH_NOT_EXPR
6703 only with operands that are always zero or one. */
906c4e36 6704 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6705 target, 1, OPTAB_LIB_WIDEN);
6706 if (temp == 0)
6707 abort ();
6708 return temp;
6709
6710 case COMPOUND_EXPR:
6711 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6712 emit_queue ();
6713 return expand_expr (TREE_OPERAND (exp, 1),
6714 (ignore ? const0_rtx : target),
6715 VOIDmode, 0);
6716
6717 case COND_EXPR:
ac01eace
RK
6718 /* If we would have a "singleton" (see below) were it not for a
6719 conversion in each arm, bring that conversion back out. */
6720 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6721 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6722 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6723 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6724 {
6725 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6726 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6727
6728 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6729 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6730 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6731 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6732 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6733 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6734 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6735 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6736 return expand_expr (build1 (NOP_EXPR, type,
6737 build (COND_EXPR, TREE_TYPE (true),
6738 TREE_OPERAND (exp, 0),
6739 true, false)),
6740 target, tmode, modifier);
6741 }
6742
bbf6f052
RK
6743 {
6744 /* Note that COND_EXPRs whose type is a structure or union
6745 are required to be constructed to contain assignments of
6746 a temporary variable, so that we can evaluate them here
6747 for side effect only. If type is void, we must do likewise. */
6748
6749 /* If an arm of the branch requires a cleanup,
6750 only that cleanup is performed. */
6751
6752 tree singleton = 0;
6753 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
6754
6755 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6756 convert it to our mode, if necessary. */
6757 if (integer_onep (TREE_OPERAND (exp, 1))
6758 && integer_zerop (TREE_OPERAND (exp, 2))
6759 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6760 {
dd27116b
RK
6761 if (ignore)
6762 {
6763 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 6764 ro_modifier);
dd27116b
RK
6765 return const0_rtx;
6766 }
6767
921b3427 6768 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
6769 if (GET_MODE (op0) == mode)
6770 return op0;
d6a5ac33 6771
bbf6f052
RK
6772 if (target == 0)
6773 target = gen_reg_rtx (mode);
6774 convert_move (target, op0, unsignedp);
6775 return target;
6776 }
6777
ac01eace
RK
6778 /* Check for X ? A + B : A. If we have this, we can copy A to the
6779 output and conditionally add B. Similarly for unary operations.
6780 Don't do this if X has side-effects because those side effects
6781 might affect A or B and the "?" operation is a sequence point in
6782 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
6783
6784 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6785 && operand_equal_p (TREE_OPERAND (exp, 2),
6786 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6787 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6788 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6789 && operand_equal_p (TREE_OPERAND (exp, 1),
6790 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6791 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6792 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6793 && operand_equal_p (TREE_OPERAND (exp, 2),
6794 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6795 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6796 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6797 && operand_equal_p (TREE_OPERAND (exp, 1),
6798 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6799 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6800
01c8a7c8
RK
6801 /* If we are not to produce a result, we have no target. Otherwise,
6802 if a target was specified use it; it will not be used as an
6803 intermediate target unless it is safe. If no target, use a
6804 temporary. */
6805
6806 if (ignore)
6807 temp = 0;
6808 else if (original_target
e5e809f4 6809 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
6810 || (singleton && GET_CODE (original_target) == REG
6811 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6812 && original_target == var_rtx (singleton)))
6813 && GET_MODE (original_target) == mode
7c00d1fe
RK
6814#ifdef HAVE_conditional_move
6815 && (! can_conditionally_move_p (mode)
6816 || GET_CODE (original_target) == REG
6817 || TREE_ADDRESSABLE (type))
6818#endif
01c8a7c8
RK
6819 && ! (GET_CODE (original_target) == MEM
6820 && MEM_VOLATILE_P (original_target)))
6821 temp = original_target;
6822 else if (TREE_ADDRESSABLE (type))
6823 abort ();
6824 else
6825 temp = assign_temp (type, 0, 0, 1);
6826
ac01eace
RK
6827 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6828 do the test of X as a store-flag operation, do this as
6829 A + ((X != 0) << log C). Similarly for other simple binary
6830 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 6831 if (temp && singleton && binary_op
bbf6f052
RK
6832 && (TREE_CODE (binary_op) == PLUS_EXPR
6833 || TREE_CODE (binary_op) == MINUS_EXPR
6834 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6835 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
6836 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6837 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
6838 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6839 {
6840 rtx result;
6841 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6842 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6843 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6844 : xor_optab);
bbf6f052
RK
6845
6846 /* If we had X ? A : A + 1, do this as A + (X == 0).
6847
6848 We have to invert the truth value here and then put it
6849 back later if do_store_flag fails. We cannot simply copy
6850 TREE_OPERAND (exp, 0) to another variable and modify that
6851 because invert_truthvalue can modify the tree pointed to
6852 by its argument. */
6853 if (singleton == TREE_OPERAND (exp, 1))
6854 TREE_OPERAND (exp, 0)
6855 = invert_truthvalue (TREE_OPERAND (exp, 0));
6856
6857 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 6858 (safe_from_p (temp, singleton, 1)
906c4e36 6859 ? temp : NULL_RTX),
bbf6f052
RK
6860 mode, BRANCH_COST <= 1);
6861
ac01eace
RK
6862 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6863 result = expand_shift (LSHIFT_EXPR, mode, result,
6864 build_int_2 (tree_log2
6865 (TREE_OPERAND
6866 (binary_op, 1)),
6867 0),
e5e809f4 6868 (safe_from_p (temp, singleton, 1)
ac01eace
RK
6869 ? temp : NULL_RTX), 0);
6870
bbf6f052
RK
6871 if (result)
6872 {
906c4e36 6873 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6874 return expand_binop (mode, boptab, op1, result, temp,
6875 unsignedp, OPTAB_LIB_WIDEN);
6876 }
6877 else if (singleton == TREE_OPERAND (exp, 1))
6878 TREE_OPERAND (exp, 0)
6879 = invert_truthvalue (TREE_OPERAND (exp, 0));
6880 }
6881
dabf8373 6882 do_pending_stack_adjust ();
bbf6f052
RK
6883 NO_DEFER_POP;
6884 op0 = gen_label_rtx ();
6885
6886 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6887 {
6888 if (temp != 0)
6889 {
6890 /* If the target conflicts with the other operand of the
6891 binary op, we can't use it. Also, we can't use the target
6892 if it is a hard register, because evaluating the condition
6893 might clobber it. */
6894 if ((binary_op
e5e809f4 6895 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
6896 || (GET_CODE (temp) == REG
6897 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6898 temp = gen_reg_rtx (mode);
6899 store_expr (singleton, temp, 0);
6900 }
6901 else
906c4e36 6902 expand_expr (singleton,
2937cf87 6903 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6904 if (singleton == TREE_OPERAND (exp, 1))
6905 jumpif (TREE_OPERAND (exp, 0), op0);
6906 else
6907 jumpifnot (TREE_OPERAND (exp, 0), op0);
6908
956d6950 6909 start_cleanup_deferral ();
bbf6f052
RK
6910 if (binary_op && temp == 0)
6911 /* Just touch the other operand. */
6912 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6913 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6914 else if (binary_op)
6915 store_expr (build (TREE_CODE (binary_op), type,
6916 make_tree (type, temp),
6917 TREE_OPERAND (binary_op, 1)),
6918 temp, 0);
6919 else
6920 store_expr (build1 (TREE_CODE (unary_op), type,
6921 make_tree (type, temp)),
6922 temp, 0);
6923 op1 = op0;
bbf6f052 6924 }
bbf6f052
RK
6925 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6926 comparison operator. If we have one of these cases, set the
6927 output to A, branch on A (cse will merge these two references),
6928 then set the output to FOO. */
6929 else if (temp
6930 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6931 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6932 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6933 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
6934 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6935 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 6936 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
6937 {
6938 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6939 temp = gen_reg_rtx (mode);
6940 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6941 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6942
956d6950 6943 start_cleanup_deferral ();
bbf6f052
RK
6944 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6945 op1 = op0;
6946 }
6947 else if (temp
6948 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6949 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6950 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6951 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
6952 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6953 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 6954 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6955 {
6956 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6957 temp = gen_reg_rtx (mode);
6958 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6959 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6960
956d6950 6961 start_cleanup_deferral ();
bbf6f052
RK
6962 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6963 op1 = op0;
6964 }
6965 else
6966 {
6967 op1 = gen_label_rtx ();
6968 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6969
956d6950 6970 start_cleanup_deferral ();
bbf6f052
RK
6971 if (temp != 0)
6972 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6973 else
906c4e36
RK
6974 expand_expr (TREE_OPERAND (exp, 1),
6975 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 6976 end_cleanup_deferral ();
bbf6f052
RK
6977 emit_queue ();
6978 emit_jump_insn (gen_jump (op1));
6979 emit_barrier ();
6980 emit_label (op0);
956d6950 6981 start_cleanup_deferral ();
bbf6f052
RK
6982 if (temp != 0)
6983 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6984 else
906c4e36
RK
6985 expand_expr (TREE_OPERAND (exp, 2),
6986 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6987 }
6988
956d6950 6989 end_cleanup_deferral ();
bbf6f052
RK
6990
6991 emit_queue ();
6992 emit_label (op1);
6993 OK_DEFER_POP;
5dab5552 6994
bbf6f052
RK
6995 return temp;
6996 }
6997
6998 case TARGET_EXPR:
6999 {
7000 /* Something needs to be initialized, but we didn't know
7001 where that thing was when building the tree. For example,
7002 it could be the return value of a function, or a parameter
7003 to a function which lays down in the stack, or a temporary
7004 variable which must be passed by reference.
7005
7006 We guarantee that the expression will either be constructed
7007 or copied into our original target. */
7008
7009 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7010 tree cleanups = NULL_TREE;
5c062816 7011 tree exp1;
bbf6f052
RK
7012
7013 if (TREE_CODE (slot) != VAR_DECL)
7014 abort ();
7015
9c51f375
RK
7016 if (! ignore)
7017 target = original_target;
7018
bbf6f052
RK
7019 if (target == 0)
7020 {
7021 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7022 {
7023 target = DECL_RTL (slot);
5c062816 7024 /* If we have already expanded the slot, so don't do
ac993f4f 7025 it again. (mrs) */
5c062816
MS
7026 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7027 return target;
ac993f4f 7028 }
bbf6f052
RK
7029 else
7030 {
e9a25f70 7031 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7032 /* All temp slots at this level must not conflict. */
7033 preserve_temp_slots (target);
7034 DECL_RTL (slot) = target;
e9a25f70
JL
7035 if (TREE_ADDRESSABLE (slot))
7036 {
7037 TREE_ADDRESSABLE (slot) = 0;
7038 mark_addressable (slot);
7039 }
bbf6f052 7040
e287fd6e
RK
7041 /* Since SLOT is not known to the called function
7042 to belong to its stack frame, we must build an explicit
7043 cleanup. This case occurs when we must build up a reference
7044 to pass the reference as an argument. In this case,
7045 it is very likely that such a reference need not be
7046 built here. */
7047
7048 if (TREE_OPERAND (exp, 2) == 0)
7049 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7050 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7051 }
bbf6f052
RK
7052 }
7053 else
7054 {
7055 /* This case does occur, when expanding a parameter which
7056 needs to be constructed on the stack. The target
7057 is the actual stack address that we want to initialize.
7058 The function we call will perform the cleanup in this case. */
7059
8c042b47
RS
7060 /* If we have already assigned it space, use that space,
7061 not target that we were passed in, as our target
7062 parameter is only a hint. */
7063 if (DECL_RTL (slot) != 0)
7064 {
7065 target = DECL_RTL (slot);
7066 /* If we have already expanded the slot, so don't do
7067 it again. (mrs) */
7068 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7069 return target;
7070 }
21002281
JW
7071 else
7072 {
7073 DECL_RTL (slot) = target;
7074 /* If we must have an addressable slot, then make sure that
7075 the RTL that we just stored in slot is OK. */
7076 if (TREE_ADDRESSABLE (slot))
7077 {
7078 TREE_ADDRESSABLE (slot) = 0;
7079 mark_addressable (slot);
7080 }
7081 }
bbf6f052
RK
7082 }
7083
4847c938 7084 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7085 /* Mark it as expanded. */
7086 TREE_OPERAND (exp, 1) = NULL_TREE;
7087
e5e809f4 7088 TREE_USED (slot) = 1;
41531e5b 7089 store_expr (exp1, target, 0);
61d6b1cc 7090
e976b8b2 7091 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7092
41531e5b 7093 return target;
bbf6f052
RK
7094 }
7095
7096 case INIT_EXPR:
7097 {
7098 tree lhs = TREE_OPERAND (exp, 0);
7099 tree rhs = TREE_OPERAND (exp, 1);
7100 tree noncopied_parts = 0;
7101 tree lhs_type = TREE_TYPE (lhs);
7102
7103 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7104 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7105 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7106 TYPE_NONCOPIED_PARTS (lhs_type));
7107 while (noncopied_parts != 0)
7108 {
7109 expand_assignment (TREE_VALUE (noncopied_parts),
7110 TREE_PURPOSE (noncopied_parts), 0, 0);
7111 noncopied_parts = TREE_CHAIN (noncopied_parts);
7112 }
7113 return temp;
7114 }
7115
7116 case MODIFY_EXPR:
7117 {
7118 /* If lhs is complex, expand calls in rhs before computing it.
7119 That's so we don't compute a pointer and save it over a call.
7120 If lhs is simple, compute it first so we can give it as a
7121 target if the rhs is just a call. This avoids an extra temp and copy
7122 and that prevents a partial-subsumption which makes bad code.
7123 Actually we could treat component_ref's of vars like vars. */
7124
7125 tree lhs = TREE_OPERAND (exp, 0);
7126 tree rhs = TREE_OPERAND (exp, 1);
7127 tree noncopied_parts = 0;
7128 tree lhs_type = TREE_TYPE (lhs);
7129
7130 temp = 0;
7131
7132 if (TREE_CODE (lhs) != VAR_DECL
7133 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7134 && TREE_CODE (lhs) != PARM_DECL
7135 && ! (TREE_CODE (lhs) == INDIRECT_REF
7136 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7137 preexpand_calls (exp);
7138
7139 /* Check for |= or &= of a bitfield of size one into another bitfield
7140 of size 1. In this case, (unless we need the result of the
7141 assignment) we can do this more efficiently with a
7142 test followed by an assignment, if necessary.
7143
7144 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7145 things change so we do, this code should be enhanced to
7146 support it. */
7147 if (ignore
7148 && TREE_CODE (lhs) == COMPONENT_REF
7149 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7150 || TREE_CODE (rhs) == BIT_AND_EXPR)
7151 && TREE_OPERAND (rhs, 0) == lhs
7152 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7153 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7154 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7155 {
7156 rtx label = gen_label_rtx ();
7157
7158 do_jump (TREE_OPERAND (rhs, 1),
7159 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7160 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7161 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7162 (TREE_CODE (rhs) == BIT_IOR_EXPR
7163 ? integer_one_node
7164 : integer_zero_node)),
7165 0, 0);
e7c33f54 7166 do_pending_stack_adjust ();
bbf6f052
RK
7167 emit_label (label);
7168 return const0_rtx;
7169 }
7170
7171 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7172 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7173 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7174 TYPE_NONCOPIED_PARTS (lhs_type));
7175
7176 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7177 while (noncopied_parts != 0)
7178 {
7179 expand_assignment (TREE_PURPOSE (noncopied_parts),
7180 TREE_VALUE (noncopied_parts), 0, 0);
7181 noncopied_parts = TREE_CHAIN (noncopied_parts);
7182 }
7183 return temp;
7184 }
7185
7186 case PREINCREMENT_EXPR:
7187 case PREDECREMENT_EXPR:
7b8b9722 7188 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7189
7190 case POSTINCREMENT_EXPR:
7191 case POSTDECREMENT_EXPR:
7192 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7193 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7194
7195 case ADDR_EXPR:
987c71d9 7196 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7197 be a MEM corresponding to a stack slot. */
987c71d9
RK
7198 temp = 0;
7199
bbf6f052
RK
7200 /* Are we taking the address of a nested function? */
7201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7202 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7203 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7204 && ! TREE_STATIC (exp))
bbf6f052
RK
7205 {
7206 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7207 op0 = force_operand (op0, target);
7208 }
682ba3a6
RK
7209 /* If we are taking the address of something erroneous, just
7210 return a zero. */
7211 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7212 return const0_rtx;
bbf6f052
RK
7213 else
7214 {
e287fd6e
RK
7215 /* We make sure to pass const0_rtx down if we came in with
7216 ignore set, to avoid doing the cleanups twice for something. */
7217 op0 = expand_expr (TREE_OPERAND (exp, 0),
7218 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7219 (modifier == EXPAND_INITIALIZER
7220 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7221
119af78a
RK
7222 /* If we are going to ignore the result, OP0 will have been set
7223 to const0_rtx, so just return it. Don't get confused and
7224 think we are taking the address of the constant. */
7225 if (ignore)
7226 return op0;
7227
3539e816
MS
7228 op0 = protect_from_queue (op0, 0);
7229
896102d0
RK
7230 /* We would like the object in memory. If it is a constant,
7231 we can have it be statically allocated into memory. For
682ba3a6 7232 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7233 memory and store the value into it. */
7234
7235 if (CONSTANT_P (op0))
7236 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7237 op0);
987c71d9 7238 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7239 {
7240 mark_temp_addr_taken (op0);
7241 temp = XEXP (op0, 0);
7242 }
896102d0 7243
682ba3a6
RK
7244 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7245 || GET_CODE (op0) == CONCAT)
896102d0
RK
7246 {
7247 /* If this object is in a register, it must be not
0f41302f 7248 be BLKmode. */
896102d0 7249 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7250 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7251
7a0b7b9a 7252 mark_temp_addr_taken (memloc);
896102d0
RK
7253 emit_move_insn (memloc, op0);
7254 op0 = memloc;
7255 }
7256
bbf6f052
RK
7257 if (GET_CODE (op0) != MEM)
7258 abort ();
7259
7260 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7261 {
7262 temp = XEXP (op0, 0);
7263#ifdef POINTERS_EXTEND_UNSIGNED
7264 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7265 && mode == ptr_mode)
9fcfcce7 7266 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7267#endif
7268 return temp;
7269 }
987c71d9 7270
bbf6f052
RK
7271 op0 = force_operand (XEXP (op0, 0), target);
7272 }
987c71d9 7273
bbf6f052 7274 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7275 op0 = force_reg (Pmode, op0);
7276
dc6d66b3
RK
7277 if (GET_CODE (op0) == REG
7278 && ! REG_USERVAR_P (op0))
7279 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7280
7281 /* If we might have had a temp slot, add an equivalent address
7282 for it. */
7283 if (temp != 0)
7284 update_temp_slot_address (temp, op0);
7285
88f63c77
RK
7286#ifdef POINTERS_EXTEND_UNSIGNED
7287 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7288 && mode == ptr_mode)
9fcfcce7 7289 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7290#endif
7291
bbf6f052
RK
7292 return op0;
7293
7294 case ENTRY_VALUE_EXPR:
7295 abort ();
7296
7308a047
RS
7297 /* COMPLEX type for Extended Pascal & Fortran */
7298 case COMPLEX_EXPR:
7299 {
7300 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7301 rtx insns;
7308a047
RS
7302
7303 /* Get the rtx code of the operands. */
7304 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7305 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7306
7307 if (! target)
7308 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7309
6551fa4d 7310 start_sequence ();
7308a047
RS
7311
7312 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7313 emit_move_insn (gen_realpart (mode, target), op0);
7314 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7315
6551fa4d
JW
7316 insns = get_insns ();
7317 end_sequence ();
7318
7308a047 7319 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7320 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7321 each with a separate pseudo as destination.
7322 It's not correct for flow to treat them as a unit. */
6d6e61ce 7323 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7324 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7325 else
7326 emit_insns (insns);
7308a047
RS
7327
7328 return target;
7329 }
7330
7331 case REALPART_EXPR:
2d7050fd
RS
7332 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7333 return gen_realpart (mode, op0);
7308a047
RS
7334
7335 case IMAGPART_EXPR:
2d7050fd
RS
7336 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7337 return gen_imagpart (mode, op0);
7308a047
RS
7338
7339 case CONJ_EXPR:
7340 {
62acb978 7341 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7342 rtx imag_t;
6551fa4d 7343 rtx insns;
7308a047
RS
7344
7345 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7346
7347 if (! target)
d6a5ac33 7348 target = gen_reg_rtx (mode);
7308a047 7349
6551fa4d 7350 start_sequence ();
7308a047
RS
7351
7352 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7353 emit_move_insn (gen_realpart (partmode, target),
7354 gen_realpart (partmode, op0));
7308a047 7355
62acb978
RK
7356 imag_t = gen_imagpart (partmode, target);
7357 temp = expand_unop (partmode, neg_optab,
7358 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7359 if (temp != imag_t)
7360 emit_move_insn (imag_t, temp);
7361
6551fa4d
JW
7362 insns = get_insns ();
7363 end_sequence ();
7364
d6a5ac33
RK
7365 /* Conjugate should appear as a single unit
7366 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7367 each with a separate pseudo as destination.
7368 It's not correct for flow to treat them as a unit. */
6d6e61ce 7369 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7370 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7371 else
7372 emit_insns (insns);
7308a047
RS
7373
7374 return target;
7375 }
7376
e976b8b2
MS
7377 case TRY_CATCH_EXPR:
7378 {
7379 tree handler = TREE_OPERAND (exp, 1);
7380
7381 expand_eh_region_start ();
7382
7383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7384
7385 expand_eh_region_end (handler);
7386
7387 return op0;
7388 }
7389
7390 case POPDCC_EXPR:
7391 {
7392 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 7393 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
7394 return const0_rtx;
7395 }
7396
7397 case POPDHC_EXPR:
7398 {
7399 rtx dhc = get_dynamic_handler_chain ();
38a448ca 7400 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
7401 return const0_rtx;
7402 }
7403
bbf6f052 7404 case ERROR_MARK:
66538193
RS
7405 op0 = CONST0_RTX (tmode);
7406 if (op0 != 0)
7407 return op0;
bbf6f052
RK
7408 return const0_rtx;
7409
7410 default:
90764a87 7411 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7412 }
7413
7414 /* Here to do an ordinary binary operator, generating an instruction
7415 from the optab already placed in `this_optab'. */
7416 binop:
7417 preexpand_calls (exp);
e5e809f4 7418 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7419 subtarget = 0;
7420 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7421 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7422 binop2:
7423 temp = expand_binop (mode, this_optab, op0, op1, target,
7424 unsignedp, OPTAB_LIB_WIDEN);
7425 if (temp == 0)
7426 abort ();
7427 return temp;
7428}
bbf6f052 7429
bbf6f052 7430
b93a436e
JL
7431\f
7432/* Return the alignment in bits of EXP, a pointer valued expression.
7433 But don't return more than MAX_ALIGN no matter what.
7434 The alignment returned is, by default, the alignment of the thing that
7435 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7436
7437 Otherwise, look at the expression to see if we can do better, i.e., if the
7438 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 7439
b93a436e
JL
7440static int
7441get_pointer_alignment (exp, max_align)
7442 tree exp;
7443 unsigned max_align;
bbf6f052 7444{
b93a436e
JL
7445 unsigned align, inner;
7446
7447 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7448 return 0;
7449
7450 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7451 align = MIN (align, max_align);
7452
7453 while (1)
bbf6f052 7454 {
b93a436e 7455 switch (TREE_CODE (exp))
bbf6f052 7456 {
b93a436e
JL
7457 case NOP_EXPR:
7458 case CONVERT_EXPR:
7459 case NON_LVALUE_EXPR:
7460 exp = TREE_OPERAND (exp, 0);
7461 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7462 return align;
7463 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7464 align = MIN (inner, max_align);
7465 break;
7466
7467 case PLUS_EXPR:
7468 /* If sum of pointer + int, restrict our maximum alignment to that
7469 imposed by the integer. If not, we can't do any better than
7470 ALIGN. */
7471 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7472 return align;
7473
7474 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7475 & (max_align - 1))
7476 != 0)
7477 max_align >>= 1;
7478
7479 exp = TREE_OPERAND (exp, 0);
7480 break;
7481
7482 case ADDR_EXPR:
7483 /* See what we are pointing at and look at its alignment. */
7484 exp = TREE_OPERAND (exp, 0);
7485 if (TREE_CODE (exp) == FUNCTION_DECL)
7486 align = FUNCTION_BOUNDARY;
7487 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7488 align = DECL_ALIGN (exp);
7489#ifdef CONSTANT_ALIGNMENT
7490 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7491 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 7492#endif
b93a436e 7493 return MIN (align, max_align);
c02bd5d9 7494
b93a436e
JL
7495 default:
7496 return align;
7497 }
7498 }
7499}
7500\f
7501/* Return the tree node and offset if a given argument corresponds to
7502 a string constant. */
7503
7504static tree
7505string_constant (arg, ptr_offset)
7506 tree arg;
7507 tree *ptr_offset;
7508{
7509 STRIP_NOPS (arg);
7510
7511 if (TREE_CODE (arg) == ADDR_EXPR
7512 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7513 {
7514 *ptr_offset = integer_zero_node;
7515 return TREE_OPERAND (arg, 0);
7516 }
7517 else if (TREE_CODE (arg) == PLUS_EXPR)
7518 {
7519 tree arg0 = TREE_OPERAND (arg, 0);
7520 tree arg1 = TREE_OPERAND (arg, 1);
7521
7522 STRIP_NOPS (arg0);
7523 STRIP_NOPS (arg1);
7524
7525 if (TREE_CODE (arg0) == ADDR_EXPR
7526 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 7527 {
b93a436e
JL
7528 *ptr_offset = arg1;
7529 return TREE_OPERAND (arg0, 0);
bbf6f052 7530 }
b93a436e
JL
7531 else if (TREE_CODE (arg1) == ADDR_EXPR
7532 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 7533 {
b93a436e
JL
7534 *ptr_offset = arg0;
7535 return TREE_OPERAND (arg1, 0);
bbf6f052 7536 }
b93a436e 7537 }
ca695ac9 7538
b93a436e
JL
7539 return 0;
7540}
ca695ac9 7541
b93a436e
JL
7542/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7543 way, because it could contain a zero byte in the middle.
7544 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 7545
b93a436e
JL
7546 Unfortunately, string_constant can't access the values of const char
7547 arrays with initializers, so neither can we do so here. */
e87b4f3f 7548
b93a436e
JL
7549static tree
7550c_strlen (src)
7551 tree src;
7552{
7553 tree offset_node;
7554 int offset, max;
7555 char *ptr;
e7c33f54 7556
b93a436e
JL
7557 src = string_constant (src, &offset_node);
7558 if (src == 0)
7559 return 0;
7560 max = TREE_STRING_LENGTH (src);
7561 ptr = TREE_STRING_POINTER (src);
7562 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7563 {
7564 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7565 compute the offset to the following null if we don't know where to
7566 start searching for it. */
7567 int i;
7568 for (i = 0; i < max; i++)
7569 if (ptr[i] == 0)
7570 return 0;
7571 /* We don't know the starting offset, but we do know that the string
7572 has no internal zero bytes. We can assume that the offset falls
7573 within the bounds of the string; otherwise, the programmer deserves
7574 what he gets. Subtract the offset from the length of the string,
7575 and return that. */
7576 /* This would perhaps not be valid if we were dealing with named
7577 arrays in addition to literal string constants. */
7578 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7579 }
e7c33f54 7580
b93a436e
JL
7581 /* We have a known offset into the string. Start searching there for
7582 a null character. */
7583 if (offset_node == 0)
7584 offset = 0;
7585 else
7586 {
7587 /* Did we get a long long offset? If so, punt. */
7588 if (TREE_INT_CST_HIGH (offset_node) != 0)
7589 return 0;
7590 offset = TREE_INT_CST_LOW (offset_node);
7591 }
7592 /* If the offset is known to be out of bounds, warn, and call strlen at
7593 runtime. */
7594 if (offset < 0 || offset > max)
7595 {
7596 warning ("offset outside bounds of constant string");
7597 return 0;
7598 }
7599 /* Use strlen to search for the first zero byte. Since any strings
7600 constructed with build_string will have nulls appended, we win even
7601 if we get handed something like (char[4])"abcd".
e7c33f54 7602
b93a436e
JL
7603 Since OFFSET is our starting index into the string, no further
7604 calculation is needed. */
7605 return size_int (strlen (ptr + offset));
7606}
1bbddf11 7607
b93a436e
JL
7608rtx
7609expand_builtin_return_addr (fndecl_code, count, tem)
7610 enum built_in_function fndecl_code;
7611 int count;
7612 rtx tem;
7613{
7614 int i;
e7c33f54 7615
b93a436e
JL
7616 /* Some machines need special handling before we can access
7617 arbitrary frames. For example, on the sparc, we must first flush
7618 all register windows to the stack. */
7619#ifdef SETUP_FRAME_ADDRESSES
7620 if (count > 0)
7621 SETUP_FRAME_ADDRESSES ();
7622#endif
e87b4f3f 7623
b93a436e
JL
7624 /* On the sparc, the return address is not in the frame, it is in a
7625 register. There is no way to access it off of the current frame
7626 pointer, but it can be accessed off the previous frame pointer by
7627 reading the value from the register window save area. */
7628#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7629 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7630 count--;
7631#endif
60bac6ea 7632
b93a436e
JL
7633 /* Scan back COUNT frames to the specified frame. */
7634 for (i = 0; i < count; i++)
7635 {
7636 /* Assume the dynamic chain pointer is in the word that the
7637 frame address points to, unless otherwise specified. */
7638#ifdef DYNAMIC_CHAIN_ADDRESS
7639 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7640#endif
7641 tem = memory_address (Pmode, tem);
7642 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7643 }
ca695ac9 7644
b93a436e
JL
7645 /* For __builtin_frame_address, return what we've got. */
7646 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7647 return tem;
e9a25f70 7648
b93a436e
JL
7649 /* For __builtin_return_address, Get the return address from that
7650 frame. */
7651#ifdef RETURN_ADDR_RTX
7652 tem = RETURN_ADDR_RTX (count, tem);
7653#else
7654 tem = memory_address (Pmode,
7655 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7656 tem = gen_rtx_MEM (Pmode, tem);
7657#endif
7658 return tem;
7659}
e9a25f70 7660
b93a436e
JL
7661/* __builtin_setjmp is passed a pointer to an array of five words (not
7662 all will be used on all machines). It operates similarly to the C
7663 library function of the same name, but is more efficient. Much of
7664 the code below (and for longjmp) is copied from the handling of
7665 non-local gotos.
ca695ac9 7666
b93a436e
JL
7667 NOTE: This is intended for use by GNAT and the exception handling
7668 scheme in the compiler and will only work in the method used by
7669 them. */
e9a25f70 7670
b93a436e 7671rtx
6fd1c67b 7672expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
7673 rtx buf_addr;
7674 rtx target;
6fd1c67b 7675 rtx first_label, next_label;
b93a436e 7676{
6fd1c67b 7677 rtx lab1 = gen_label_rtx ();
b93a436e
JL
7678 enum machine_mode sa_mode = Pmode, value_mode;
7679 rtx stack_save;
e9a25f70 7680
b93a436e 7681 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 7682
b93a436e
JL
7683#ifdef POINTERS_EXTEND_UNSIGNED
7684 buf_addr = convert_memory_address (Pmode, buf_addr);
7685#endif
d7f21d63 7686
b93a436e 7687 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 7688
b93a436e
JL
7689 if (target == 0 || GET_CODE (target) != REG
7690 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7691 target = gen_reg_rtx (value_mode);
d7f21d63 7692
b93a436e 7693 emit_queue ();
d7f21d63 7694
b93a436e
JL
7695 /* We store the frame pointer and the address of lab1 in the buffer
7696 and use the rest of it for the stack save area, which is
7697 machine-dependent. */
7698 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7699 virtual_stack_vars_rtx);
6fd1c67b
RH
7700 emit_move_insn (validize_mem
7701 (gen_rtx_MEM (Pmode,
b93a436e
JL
7702 plus_constant (buf_addr,
7703 GET_MODE_SIZE (Pmode)))),
6fd1c67b 7704 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 7705
b93a436e
JL
7706#ifdef HAVE_save_stack_nonlocal
7707 if (HAVE_save_stack_nonlocal)
7708 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7709#endif
6c174fc0 7710
b93a436e
JL
7711 stack_save = gen_rtx_MEM (sa_mode,
7712 plus_constant (buf_addr,
7713 2 * GET_MODE_SIZE (Pmode)));
7714 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 7715
6fd1c67b
RH
7716 /* If there is further processing to do, do it. */
7717#ifdef HAVE_builtin_setjmp_setup
7718 if (HAVE_builtin_setjmp_setup)
7719 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 7720#endif
d7f21d63 7721
6fd1c67b 7722 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 7723 emit_move_insn (target, const0_rtx);
6fd1c67b 7724 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
7725 emit_barrier ();
7726 emit_label (lab1);
d7f21d63 7727
6fd1c67b
RH
7728 /* Tell flow about the strange goings on. */
7729 current_function_has_nonlocal_label = 1;
7730
7731 /* Clobber the FP when we get here, so we have to make sure it's
7732 marked as used by this function. */
b93a436e 7733 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 7734
b93a436e
JL
7735 /* Mark the static chain as clobbered here so life information
7736 doesn't get messed up for it. */
7737 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 7738
b93a436e
JL
7739 /* Now put in the code to restore the frame pointer, and argument
7740 pointer, if needed. The code below is from expand_end_bindings
7741 in stmt.c; see detailed documentation there. */
7742#ifdef HAVE_nonlocal_goto
7743 if (! HAVE_nonlocal_goto)
7744#endif
7745 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 7746
b93a436e
JL
7747#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7748 if (fixed_regs[ARG_POINTER_REGNUM])
7749 {
7750#ifdef ELIMINABLE_REGS
081f5e7e 7751 int i;
b93a436e 7752 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 7753
b93a436e
JL
7754 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7755 if (elim_regs[i].from == ARG_POINTER_REGNUM
7756 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7757 break;
ca695ac9 7758
b93a436e
JL
7759 if (i == sizeof elim_regs / sizeof elim_regs [0])
7760#endif
7761 {
7762 /* Now restore our arg pointer from the address at which it
7763 was saved in our stack frame.
7764 If there hasn't be space allocated for it yet, make
7765 some now. */
7766 if (arg_pointer_save_area == 0)
7767 arg_pointer_save_area
7768 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7769 emit_move_insn (virtual_incoming_args_rtx,
7770 copy_to_reg (arg_pointer_save_area));
7771 }
7772 }
7773#endif
ca695ac9 7774
6fd1c67b
RH
7775#ifdef HAVE_builtin_setjmp_receiver
7776 if (HAVE_builtin_setjmp_receiver)
7777 emit_insn (gen_builtin_setjmp_receiver (lab1));
7778 else
7779#endif
b93a436e 7780#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
7781 if (HAVE_nonlocal_goto_receiver)
7782 emit_insn (gen_nonlocal_goto_receiver ());
7783 else
b93a436e 7784#endif
081f5e7e
KG
7785 {
7786 ; /* Nothing */
7787 }
6fd1c67b
RH
7788
7789 /* Set TARGET, and branch to the next-time-through label. */
7790 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7791 emit_jump_insn (gen_jump (next_label));
7792 emit_barrier ();
ca695ac9 7793
6fd1c67b
RH
7794 return target;
7795}
ca695ac9 7796
6fd1c67b
RH
7797void
7798expand_builtin_longjmp (buf_addr, value)
7799 rtx buf_addr, value;
7800{
7801 rtx fp, lab, stack;
7802 enum machine_mode sa_mode;
ca695ac9 7803
6fd1c67b
RH
7804#ifdef POINTERS_EXTEND_UNSIGNED
7805 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 7806#endif
6fd1c67b
RH
7807 buf_addr = force_reg (Pmode, buf_addr);
7808
7809 /* The value sent by longjmp is not allowed to be zero. Force it
7810 to one if so. */
7811 if (GET_CODE (value) == CONST_INT)
7812 {
7813 if (INTVAL (value) == 0)
7814 value = const1_rtx;
7815 }
7816 else
7817 {
7818 lab = gen_label_rtx ();
7819
7820 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7821 emit_jump_insn (gen_bne (lab));
7822 emit_move_insn (value, const1_rtx);
7823 emit_label (lab);
7824 }
7825
7826 /* Make sure the value is in the right mode to be copied to the chain. */
7827 if (GET_MODE (value) != VOIDmode)
7828 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7829
7830#ifdef HAVE_builtin_longjmp
7831 if (HAVE_builtin_longjmp)
7832 {
7833 /* Copy the "return value" to the static chain reg. */
7834 emit_move_insn (static_chain_rtx, value);
7835 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7836 emit_insn (gen_builtin_longjmp (buf_addr));
7837 }
7838 else
b93a436e 7839#endif
6fd1c67b
RH
7840 {
7841 fp = gen_rtx_MEM (Pmode, buf_addr);
7842 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7843 GET_MODE_SIZE (Pmode)));
e9a25f70 7844
6fd1c67b
RH
7845#ifdef HAVE_save_stack_nonlocal
7846 sa_mode = (HAVE_save_stack_nonlocal
7847 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
7848 : Pmode);
7849#else
7850 sa_mode = Pmode;
b93a436e 7851#endif
ca695ac9 7852
6fd1c67b
RH
7853 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7854 2 * GET_MODE_SIZE (Pmode)));
7855
7856 /* Pick up FP, label, and SP from the block and jump. This code is
7857 from expand_goto in stmt.c; see there for detailed comments. */
7858#if HAVE_nonlocal_goto
7859 if (HAVE_nonlocal_goto)
7860 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7861 else
b93a436e 7862#endif
6fd1c67b
RH
7863 {
7864 lab = copy_to_reg (lab);
60bac6ea 7865
6fd1c67b
RH
7866 /* Copy the "return value" to the static chain reg. */
7867 emit_move_insn (static_chain_rtx, value);
7868
7869 emit_move_insn (hard_frame_pointer_rtx, fp);
7870 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7871
7872 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7873 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7874 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7875 emit_indirect_jump (lab);
7876 }
7877 }
b93a436e 7878}
60bac6ea 7879
b93a436e
JL
7880\f
7881/* Expand an expression EXP that calls a built-in function,
7882 with result going to TARGET if that's convenient
7883 (and in mode MODE if that's convenient).
7884 SUBTARGET may be used as the target for computing one of EXP's operands.
7885 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 7886
b93a436e
JL
7887#define CALLED_AS_BUILT_IN(NODE) \
7888 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 7889
b93a436e
JL
7890static rtx
7891expand_builtin (exp, target, subtarget, mode, ignore)
7892 tree exp;
7893 rtx target;
7894 rtx subtarget;
7895 enum machine_mode mode;
7896 int ignore;
7897{
7898 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7899 tree arglist = TREE_OPERAND (exp, 1);
7900 rtx op0;
7901 rtx lab1, insns;
7902 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7903 optab builtin_optab;
60bac6ea 7904
b93a436e
JL
7905 switch (DECL_FUNCTION_CODE (fndecl))
7906 {
7907 case BUILT_IN_ABS:
7908 case BUILT_IN_LABS:
7909 case BUILT_IN_FABS:
7910 /* build_function_call changes these into ABS_EXPR. */
7911 abort ();
4ed67205 7912
b93a436e
JL
7913 case BUILT_IN_SIN:
7914 case BUILT_IN_COS:
7915 /* Treat these like sqrt, but only if the user asks for them. */
7916 if (! flag_fast_math)
7917 break;
7918 case BUILT_IN_FSQRT:
7919 /* If not optimizing, call the library function. */
7920 if (! optimize)
7921 break;
4ed67205 7922
b93a436e
JL
7923 if (arglist == 0
7924 /* Arg could be wrong type if user redeclared this fcn wrong. */
7925 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
7926 break;
7927
b93a436e
JL
7928 /* Stabilize and compute the argument. */
7929 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7930 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7931 {
7932 exp = copy_node (exp);
7933 arglist = copy_node (arglist);
7934 TREE_OPERAND (exp, 1) = arglist;
7935 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7936 }
7937 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 7938
b93a436e
JL
7939 /* Make a suitable register to place result in. */
7940 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 7941
b93a436e
JL
7942 emit_queue ();
7943 start_sequence ();
7565a035 7944
b93a436e
JL
7945 switch (DECL_FUNCTION_CODE (fndecl))
7946 {
7947 case BUILT_IN_SIN:
7948 builtin_optab = sin_optab; break;
7949 case BUILT_IN_COS:
7950 builtin_optab = cos_optab; break;
7951 case BUILT_IN_FSQRT:
7952 builtin_optab = sqrt_optab; break;
7953 default:
7954 abort ();
7955 }
4ed67205 7956
b93a436e
JL
7957 /* Compute into TARGET.
7958 Set TARGET to wherever the result comes back. */
7959 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7960 builtin_optab, op0, target, 0);
7961
7962 /* If we were unable to expand via the builtin, stop the
7963 sequence (without outputting the insns) and break, causing
7964 a call the the library function. */
7965 if (target == 0)
4ed67205 7966 {
b93a436e
JL
7967 end_sequence ();
7968 break;
7969 }
4ed67205 7970
b93a436e
JL
7971 /* Check the results by default. But if flag_fast_math is turned on,
7972 then assume sqrt will always be called with valid arguments. */
4ed67205 7973
b93a436e
JL
7974 if (! flag_fast_math)
7975 {
7976 /* Don't define the builtin FP instructions
7977 if your machine is not IEEE. */
7978 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7979 abort ();
4ed67205 7980
b93a436e 7981 lab1 = gen_label_rtx ();
ca55abae 7982
b93a436e
JL
7983 /* Test the result; if it is NaN, set errno=EDOM because
7984 the argument was not in the domain. */
7985 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7986 emit_jump_insn (gen_beq (lab1));
7987
7988#ifdef TARGET_EDOM
7989 {
7990#ifdef GEN_ERRNO_RTX
7991 rtx errno_rtx = GEN_ERRNO_RTX;
7992#else
7993 rtx errno_rtx
7994 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
7995#endif
e87b4f3f 7996
b93a436e
JL
7997 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7998 }
7999#else
8000 /* We can't set errno=EDOM directly; let the library call do it.
8001 Pop the arguments right away in case the call gets deleted. */
8002 NO_DEFER_POP;
8003 expand_call (exp, target, 0);
8004 OK_DEFER_POP;
8005#endif
e7c33f54 8006
b93a436e
JL
8007 emit_label (lab1);
8008 }
0006469d 8009
b93a436e
JL
8010 /* Output the entire sequence. */
8011 insns = get_insns ();
8012 end_sequence ();
8013 emit_insns (insns);
8014
8015 return target;
0006469d 8016
b93a436e
JL
8017 case BUILT_IN_FMOD:
8018 break;
0006469d 8019
b93a436e
JL
8020 /* __builtin_apply_args returns block of memory allocated on
8021 the stack into which is stored the arg pointer, structure
8022 value address, static chain, and all the registers that might
8023 possibly be used in performing a function call. The code is
8024 moved to the start of the function so the incoming values are
8025 saved. */
8026 case BUILT_IN_APPLY_ARGS:
8027 /* Don't do __builtin_apply_args more than once in a function.
8028 Save the result of the first call and reuse it. */
8029 if (apply_args_value != 0)
8030 return apply_args_value;
8031 {
8032 /* When this function is called, it means that registers must be
8033 saved on entry to this function. So we migrate the
8034 call to the first insn of this function. */
8035 rtx temp;
8036 rtx seq;
0006469d 8037
b93a436e
JL
8038 start_sequence ();
8039 temp = expand_builtin_apply_args ();
8040 seq = get_insns ();
8041 end_sequence ();
0006469d 8042
b93a436e 8043 apply_args_value = temp;
0006469d 8044
b93a436e
JL
8045 /* Put the sequence after the NOTE that starts the function.
8046 If this is inside a SEQUENCE, make the outer-level insn
8047 chain current, so the code is placed at the start of the
8048 function. */
8049 push_topmost_sequence ();
8050 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8051 pop_topmost_sequence ();
8052 return temp;
8053 }
0006469d 8054
b93a436e
JL
8055 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8056 FUNCTION with a copy of the parameters described by
8057 ARGUMENTS, and ARGSIZE. It returns a block of memory
8058 allocated on the stack into which is stored all the registers
8059 that might possibly be used for returning the result of a
8060 function. ARGUMENTS is the value returned by
8061 __builtin_apply_args. ARGSIZE is the number of bytes of
8062 arguments that must be copied. ??? How should this value be
8063 computed? We'll also need a safe worst case value for varargs
8064 functions. */
8065 case BUILT_IN_APPLY:
8066 if (arglist == 0
8067 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8068 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8069 || TREE_CHAIN (arglist) == 0
8070 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8071 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8072 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8073 return const0_rtx;
8074 else
8075 {
8076 int i;
8077 tree t;
8078 rtx ops[3];
0006469d 8079
b93a436e
JL
8080 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8081 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8082
b93a436e
JL
8083 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8084 }
bbf6f052 8085
b93a436e
JL
8086 /* __builtin_return (RESULT) causes the function to return the
8087 value described by RESULT. RESULT is address of the block of
8088 memory returned by __builtin_apply. */
8089 case BUILT_IN_RETURN:
8090 if (arglist
8091 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8092 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8093 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8094 NULL_RTX, VOIDmode, 0));
8095 return const0_rtx;
ca695ac9 8096
b93a436e
JL
8097 case BUILT_IN_SAVEREGS:
8098 /* Don't do __builtin_saveregs more than once in a function.
8099 Save the result of the first call and reuse it. */
8100 if (saveregs_value != 0)
8101 return saveregs_value;
8102 {
8103 /* When this function is called, it means that registers must be
8104 saved on entry to this function. So we migrate the
8105 call to the first insn of this function. */
8106 rtx temp;
8107 rtx seq;
ca695ac9 8108
b93a436e
JL
8109 /* Now really call the function. `expand_call' does not call
8110 expand_builtin, so there is no danger of infinite recursion here. */
8111 start_sequence ();
ca695ac9 8112
b93a436e
JL
8113#ifdef EXPAND_BUILTIN_SAVEREGS
8114 /* Do whatever the machine needs done in this case. */
8115 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8116#else
8117 /* The register where the function returns its value
8118 is likely to have something else in it, such as an argument.
8119 So preserve that register around the call. */
ca695ac9 8120
b93a436e
JL
8121 if (value_mode != VOIDmode)
8122 {
8123 rtx valreg = hard_libcall_value (value_mode);
8124 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8125
b93a436e
JL
8126 emit_move_insn (saved_valreg, valreg);
8127 temp = expand_call (exp, target, ignore);
8128 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8129 }
8130 else
b93a436e
JL
8131 /* Generate the call, putting the value in a pseudo. */
8132 temp = expand_call (exp, target, ignore);
8133#endif
bbf6f052 8134
b93a436e
JL
8135 seq = get_insns ();
8136 end_sequence ();
bbf6f052 8137
b93a436e 8138 saveregs_value = temp;
bbf6f052 8139
b93a436e
JL
8140 /* Put the sequence after the NOTE that starts the function.
8141 If this is inside a SEQUENCE, make the outer-level insn
8142 chain current, so the code is placed at the start of the
8143 function. */
8144 push_topmost_sequence ();
8145 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8146 pop_topmost_sequence ();
8147 return temp;
8148 }
bbf6f052 8149
b93a436e
JL
8150 /* __builtin_args_info (N) returns word N of the arg space info
8151 for the current function. The number and meanings of words
8152 is controlled by the definition of CUMULATIVE_ARGS. */
8153 case BUILT_IN_ARGS_INFO:
8154 {
8155 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8156 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8157#if 0
8158 /* These are used by the code below that is if 0'ed away */
8159 int i;
b93a436e 8160 tree type, elts, result;
381127e8 8161#endif
bbf6f052 8162
b93a436e
JL
8163 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8164 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8165 __FILE__, __LINE__);
bbf6f052 8166
b93a436e
JL
8167 if (arglist != 0)
8168 {
8169 tree arg = TREE_VALUE (arglist);
8170 if (TREE_CODE (arg) != INTEGER_CST)
8171 error ("argument of `__builtin_args_info' must be constant");
8172 else
8173 {
8174 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8175
b93a436e
JL
8176 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8177 error ("argument of `__builtin_args_info' out of range");
8178 else
8179 return GEN_INT (word_ptr[wordnum]);
8180 }
bbf6f052
RK
8181 }
8182 else
b93a436e 8183 error ("missing argument in `__builtin_args_info'");
bbf6f052 8184
b93a436e 8185 return const0_rtx;
bbf6f052 8186
b93a436e
JL
8187#if 0
8188 for (i = 0; i < nwords; i++)
8189 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8190
b93a436e
JL
8191 type = build_array_type (integer_type_node,
8192 build_index_type (build_int_2 (nwords, 0)));
8193 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8194 TREE_CONSTANT (result) = 1;
8195 TREE_STATIC (result) = 1;
8196 result = build (INDIRECT_REF, build_pointer_type (type), result);
8197 TREE_CONSTANT (result) = 1;
8198 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8199#endif
8200 }
8201
8202 /* Return the address of the first anonymous stack arg. */
8203 case BUILT_IN_NEXT_ARG:
ca695ac9 8204 {
b93a436e
JL
8205 tree fntype = TREE_TYPE (current_function_decl);
8206
8207 if ((TYPE_ARG_TYPES (fntype) == 0
8208 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8209 == void_type_node))
8210 && ! current_function_varargs)
8211 {
8212 error ("`va_start' used in function with fixed args");
8213 return const0_rtx;
8214 }
8215
8216 if (arglist)
8217 {
8218 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8219 tree arg = TREE_VALUE (arglist);
8220
8221 /* Strip off all nops for the sake of the comparison. This
8222 is not quite the same as STRIP_NOPS. It does more.
8223 We must also strip off INDIRECT_EXPR for C++ reference
8224 parameters. */
8225 while (TREE_CODE (arg) == NOP_EXPR
8226 || TREE_CODE (arg) == CONVERT_EXPR
8227 || TREE_CODE (arg) == NON_LVALUE_EXPR
8228 || TREE_CODE (arg) == INDIRECT_REF)
8229 arg = TREE_OPERAND (arg, 0);
8230 if (arg != last_parm)
8231 warning ("second parameter of `va_start' not last named argument");
8232 }
8233 else if (! current_function_varargs)
8234 /* Evidently an out of date version of <stdarg.h>; can't validate
8235 va_start's second argument, but can still work as intended. */
8236 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8237 }
8238
b93a436e
JL
8239 return expand_binop (Pmode, add_optab,
8240 current_function_internal_arg_pointer,
8241 current_function_arg_offset_rtx,
8242 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8243
b93a436e
JL
8244 case BUILT_IN_CLASSIFY_TYPE:
8245 if (arglist != 0)
8246 {
8247 tree type = TREE_TYPE (TREE_VALUE (arglist));
8248 enum tree_code code = TREE_CODE (type);
8249 if (code == VOID_TYPE)
8250 return GEN_INT (void_type_class);
8251 if (code == INTEGER_TYPE)
8252 return GEN_INT (integer_type_class);
8253 if (code == CHAR_TYPE)
8254 return GEN_INT (char_type_class);
8255 if (code == ENUMERAL_TYPE)
8256 return GEN_INT (enumeral_type_class);
8257 if (code == BOOLEAN_TYPE)
8258 return GEN_INT (boolean_type_class);
8259 if (code == POINTER_TYPE)
8260 return GEN_INT (pointer_type_class);
8261 if (code == REFERENCE_TYPE)
8262 return GEN_INT (reference_type_class);
8263 if (code == OFFSET_TYPE)
8264 return GEN_INT (offset_type_class);
8265 if (code == REAL_TYPE)
8266 return GEN_INT (real_type_class);
8267 if (code == COMPLEX_TYPE)
8268 return GEN_INT (complex_type_class);
8269 if (code == FUNCTION_TYPE)
8270 return GEN_INT (function_type_class);
8271 if (code == METHOD_TYPE)
8272 return GEN_INT (method_type_class);
8273 if (code == RECORD_TYPE)
8274 return GEN_INT (record_type_class);
8275 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8276 return GEN_INT (union_type_class);
8277 if (code == ARRAY_TYPE)
8278 {
8279 if (TYPE_STRING_FLAG (type))
8280 return GEN_INT (string_type_class);
8281 else
8282 return GEN_INT (array_type_class);
8283 }
8284 if (code == SET_TYPE)
8285 return GEN_INT (set_type_class);
8286 if (code == FILE_TYPE)
8287 return GEN_INT (file_type_class);
8288 if (code == LANG_TYPE)
8289 return GEN_INT (lang_type_class);
8290 }
8291 return GEN_INT (no_type_class);
ca695ac9 8292
b93a436e
JL
8293 case BUILT_IN_CONSTANT_P:
8294 if (arglist == 0)
8295 return const0_rtx;
8296 else
8297 {
8298 tree arg = TREE_VALUE (arglist);
ca695ac9 8299
b93a436e
JL
8300 STRIP_NOPS (arg);
8301 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8302 || (TREE_CODE (arg) == ADDR_EXPR
8303 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8304 ? const1_rtx : const0_rtx);
8305 }
ca695ac9 8306
b93a436e
JL
8307 case BUILT_IN_FRAME_ADDRESS:
8308 /* The argument must be a nonnegative integer constant.
8309 It counts the number of frames to scan up the stack.
8310 The value is the address of that frame. */
8311 case BUILT_IN_RETURN_ADDRESS:
8312 /* The argument must be a nonnegative integer constant.
8313 It counts the number of frames to scan up the stack.
8314 The value is the return address saved in that frame. */
8315 if (arglist == 0)
8316 /* Warning about missing arg was already issued. */
8317 return const0_rtx;
8318 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8319 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8320 {
8321 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8322 error ("invalid arg to `__builtin_frame_address'");
8323 else
8324 error ("invalid arg to `__builtin_return_address'");
8325 return const0_rtx;
8326 }
8327 else
8328 {
8329 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8330 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8331 hard_frame_pointer_rtx);
ee33823f 8332
b93a436e
JL
8333 /* Some ports cannot access arbitrary stack frames. */
8334 if (tem == NULL)
8335 {
8336 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8337 warning ("unsupported arg to `__builtin_frame_address'");
8338 else
8339 warning ("unsupported arg to `__builtin_return_address'");
8340 return const0_rtx;
8341 }
ee33823f 8342
b93a436e
JL
8343 /* For __builtin_frame_address, return what we've got. */
8344 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8345 return tem;
ee33823f 8346
b93a436e
JL
8347 if (GET_CODE (tem) != REG)
8348 tem = copy_to_reg (tem);
8349 return tem;
8350 }
ee33823f 8351
b93a436e
JL
8352 /* Returns the address of the area where the structure is returned.
8353 0 otherwise. */
8354 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8355 if (arglist != 0
8356 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8357 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8358 return const0_rtx;
8359 else
8360 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 8361
b93a436e
JL
8362 case BUILT_IN_ALLOCA:
8363 if (arglist == 0
8364 /* Arg could be non-integer if user redeclared this fcn wrong. */
8365 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8366 break;
bbf6f052 8367
b93a436e
JL
8368 /* Compute the argument. */
8369 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 8370
b93a436e
JL
8371 /* Allocate the desired space. */
8372 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 8373
b93a436e
JL
8374 case BUILT_IN_FFS:
8375 /* If not optimizing, call the library function. */
8376 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8377 break;
ca695ac9 8378
b93a436e
JL
8379 if (arglist == 0
8380 /* Arg could be non-integer if user redeclared this fcn wrong. */
8381 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8382 break;
ca695ac9 8383
b93a436e
JL
8384 /* Compute the argument. */
8385 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8386 /* Compute ffs, into TARGET if possible.
8387 Set TARGET to wherever the result comes back. */
8388 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8389 ffs_optab, op0, target, 1);
8390 if (target == 0)
8391 abort ();
8392 return target;
bbf6f052 8393
b93a436e
JL
8394 case BUILT_IN_STRLEN:
8395 /* If not optimizing, call the library function. */
8396 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8397 break;
bbf6f052 8398
b93a436e
JL
8399 if (arglist == 0
8400 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8401 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8402 break;
8403 else
8404 {
8405 tree src = TREE_VALUE (arglist);
8406 tree len = c_strlen (src);
bbf6f052 8407
b93a436e
JL
8408 int align
8409 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 8410
b93a436e
JL
8411 rtx result, src_rtx, char_rtx;
8412 enum machine_mode insn_mode = value_mode, char_mode;
8413 enum insn_code icode;
46b68a37 8414
b93a436e
JL
8415 /* If the length is known, just return it. */
8416 if (len != 0)
8417 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 8418
b93a436e
JL
8419 /* If SRC is not a pointer type, don't do this operation inline. */
8420 if (align == 0)
8421 break;
bbf6f052 8422
b93a436e 8423 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 8424
b93a436e
JL
8425 while (insn_mode != VOIDmode)
8426 {
8427 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8428 if (icode != CODE_FOR_nothing)
8429 break;
ca695ac9 8430
b93a436e
JL
8431 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8432 }
8433 if (insn_mode == VOIDmode)
8434 break;
ca695ac9 8435
b93a436e
JL
8436 /* Make a place to write the result of the instruction. */
8437 result = target;
8438 if (! (result != 0
8439 && GET_CODE (result) == REG
8440 && GET_MODE (result) == insn_mode
8441 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8442 result = gen_reg_rtx (insn_mode);
ca695ac9 8443
b93a436e 8444 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 8445
b93a436e
JL
8446 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8447 result = gen_reg_rtx (insn_mode);
8448 src_rtx = memory_address (BLKmode,
8449 expand_expr (src, NULL_RTX, ptr_mode,
8450 EXPAND_NORMAL));
bbf6f052 8451
b93a436e
JL
8452 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8453 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 8454
b93a436e
JL
8455 /* Check the string is readable and has an end. */
8456 if (flag_check_memory_usage)
8457 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8458 src_rtx, ptr_mode,
8459 GEN_INT (MEMORY_USE_RO),
8460 TYPE_MODE (integer_type_node));
bbf6f052 8461
b93a436e
JL
8462 char_rtx = const0_rtx;
8463 char_mode = insn_operand_mode[(int)icode][2];
8464 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8465 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 8466
b93a436e
JL
8467 emit_insn (GEN_FCN (icode) (result,
8468 gen_rtx_MEM (BLKmode, src_rtx),
8469 char_rtx, GEN_INT (align)));
bbf6f052 8470
b93a436e
JL
8471 /* Return the value in the proper mode for this function. */
8472 if (GET_MODE (result) == value_mode)
8473 return result;
8474 else if (target != 0)
8475 {
8476 convert_move (target, result, 0);
8477 return target;
8478 }
8479 else
8480 return convert_to_mode (value_mode, result, 0);
8481 }
bbf6f052 8482
b93a436e
JL
8483 case BUILT_IN_STRCPY:
8484 /* If not optimizing, call the library function. */
8485 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8486 break;
bbf6f052 8487
b93a436e
JL
8488 if (arglist == 0
8489 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8490 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8491 || TREE_CHAIN (arglist) == 0
8492 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8493 break;
8494 else
8495 {
8496 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 8497
b93a436e
JL
8498 if (len == 0)
8499 break;
bbf6f052 8500
b93a436e 8501 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 8502
b93a436e
JL
8503 chainon (arglist, build_tree_list (NULL_TREE, len));
8504 }
6d100794 8505
b93a436e
JL
8506 /* Drops in. */
8507 case BUILT_IN_MEMCPY:
8508 /* If not optimizing, call the library function. */
8509 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8510 break;
e7c33f54 8511
b93a436e
JL
8512 if (arglist == 0
8513 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8514 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8515 || TREE_CHAIN (arglist) == 0
8516 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8517 != POINTER_TYPE)
8518 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8519 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8520 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8521 != INTEGER_TYPE))
8522 break;
8523 else
8524 {
8525 tree dest = TREE_VALUE (arglist);
8526 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8527 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8528 tree type;
e7c33f54 8529
b93a436e
JL
8530 int src_align
8531 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8532 int dest_align
8533 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8534 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
e7c33f54 8535
b93a436e
JL
8536 /* If either SRC or DEST is not a pointer type, don't do
8537 this operation in-line. */
8538 if (src_align == 0 || dest_align == 0)
8539 {
8540 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8541 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8542 break;
8543 }
e7c33f54 8544
b93a436e
JL
8545 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8546 dest_mem = gen_rtx_MEM (BLKmode,
8547 memory_address (BLKmode, dest_rtx));
8548 /* There could be a void* cast on top of the object. */
8549 while (TREE_CODE (dest) == NOP_EXPR)
8550 dest = TREE_OPERAND (dest, 0);
8551 type = TREE_TYPE (TREE_TYPE (dest));
8552 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8553 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8554 src_mem = gen_rtx_MEM (BLKmode,
8555 memory_address (BLKmode, src_rtx));
8556 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 8557
b93a436e
JL
8558 /* Just copy the rights of SRC to the rights of DEST. */
8559 if (flag_check_memory_usage)
8560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8561 dest_rtx, ptr_mode,
8562 src_rtx, ptr_mode,
8563 len_rtx, TYPE_MODE (sizetype));
e7c33f54 8564
b93a436e
JL
8565 /* There could be a void* cast on top of the object. */
8566 while (TREE_CODE (src) == NOP_EXPR)
8567 src = TREE_OPERAND (src, 0);
8568 type = TREE_TYPE (TREE_TYPE (src));
8569 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
e7c33f54 8570
b93a436e
JL
8571 /* Copy word part most expediently. */
8572 dest_addr
8573 = emit_block_move (dest_mem, src_mem, len_rtx,
8574 MIN (src_align, dest_align));
e7c33f54 8575
b93a436e
JL
8576 if (dest_addr == 0)
8577 dest_addr = force_operand (dest_rtx, NULL_RTX);
e7c33f54 8578
b93a436e
JL
8579 return dest_addr;
8580 }
e7c33f54 8581
b93a436e
JL
8582 case BUILT_IN_MEMSET:
8583 /* If not optimizing, call the library function. */
8584 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8585 break;
e7c33f54 8586
b93a436e
JL
8587 if (arglist == 0
8588 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8589 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8590 || TREE_CHAIN (arglist) == 0
8591 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8592 != INTEGER_TYPE)
8593 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8594 || (INTEGER_TYPE
8595 != (TREE_CODE (TREE_TYPE
8596 (TREE_VALUE
8597 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8598 break;
8599 else
8600 {
8601 tree dest = TREE_VALUE (arglist);
8602 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8603 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8604 tree type;
e7c33f54 8605
b93a436e
JL
8606 int dest_align
8607 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8608 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
e7c33f54 8609
b93a436e
JL
8610 /* If DEST is not a pointer type, don't do this
8611 operation in-line. */
8612 if (dest_align == 0)
8613 break;
bbf6f052 8614
bf931ec8
JW
8615 /* If the arguments have side-effects, then we can only evaluate
8616 them at most once. The following code evaluates them twice if
8617 they are not constants because we break out to expand_call
8618 in that case. They can't be constants if they have side-effects
8619 so we can check for that first. Alternatively, we could call
8620 save_expr to make multiple evaluation safe. */
8621 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
8622 break;
8623
b93a436e
JL
8624 /* If VAL is not 0, don't do this operation in-line. */
8625 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8626 break;
bbf6f052 8627
b93a436e
JL
8628 /* If LEN does not expand to a constant, don't do this
8629 operation in-line. */
8630 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8631 if (GET_CODE (len_rtx) != CONST_INT)
8632 break;
bbf6f052 8633
b93a436e
JL
8634 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8635 dest_mem = gen_rtx_MEM (BLKmode,
8636 memory_address (BLKmode, dest_rtx));
8637
8638 /* Just check DST is writable and mark it as readable. */
8639 if (flag_check_memory_usage)
8640 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8641 dest_rtx, ptr_mode,
8642 len_rtx, TYPE_MODE (sizetype),
8643 GEN_INT (MEMORY_USE_WO),
8644 TYPE_MODE (integer_type_node));
bbf6f052 8645
b93a436e
JL
8646 /* There could be a void* cast on top of the object. */
8647 while (TREE_CODE (dest) == NOP_EXPR)
8648 dest = TREE_OPERAND (dest, 0);
87d1ea79
JC
8649
8650 if (TREE_CODE (dest) == ADDR_EXPR)
8651 /* If this is the address of an object, check whether the
8652 object is an array. */
8653 type = TREE_TYPE (TREE_OPERAND (dest, 0));
8654 else
8655 type = TREE_TYPE (TREE_TYPE (dest));
b93a436e 8656 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
bbf6f052 8657
b93a436e 8658 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 8659
b93a436e
JL
8660 if (dest_addr == 0)
8661 dest_addr = force_operand (dest_rtx, NULL_RTX);
bbf6f052 8662
b93a436e
JL
8663 return dest_addr;
8664 }
bbf6f052 8665
b93a436e
JL
8666/* These comparison functions need an instruction that returns an actual
8667 index. An ordinary compare that just sets the condition codes
8668 is not enough. */
8669#ifdef HAVE_cmpstrsi
8670 case BUILT_IN_STRCMP:
8671 /* If not optimizing, call the library function. */
8672 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8673 break;
bbf6f052 8674
b93a436e
JL
8675 /* If we need to check memory accesses, call the library function. */
8676 if (flag_check_memory_usage)
8677 break;
bbf6f052 8678
b93a436e
JL
8679 if (arglist == 0
8680 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8681 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8682 || TREE_CHAIN (arglist) == 0
8683 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8684 break;
8685 else if (!HAVE_cmpstrsi)
8686 break;
8687 {
8688 tree arg1 = TREE_VALUE (arglist);
8689 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 8690 tree len, len2;
a97f5a86 8691
b93a436e
JL
8692 len = c_strlen (arg1);
8693 if (len)
8694 len = size_binop (PLUS_EXPR, integer_one_node, len);
8695 len2 = c_strlen (arg2);
8696 if (len2)
8697 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 8698
b93a436e
JL
8699 /* If we don't have a constant length for the first, use the length
8700 of the second, if we know it. We don't require a constant for
8701 this case; some cost analysis could be done if both are available
8702 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 8703
b93a436e
JL
8704 If both strings have constant lengths, use the smaller. This
8705 could arise if optimization results in strcpy being called with
8706 two fixed strings, or if the code was machine-generated. We should
8707 add some code to the `memcmp' handler below to deal with such
8708 situations, someday. */
8709 if (!len || TREE_CODE (len) != INTEGER_CST)
8710 {
8711 if (len2)
8712 len = len2;
8713 else if (len == 0)
8714 break;
8715 }
8716 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8717 {
8718 if (tree_int_cst_lt (len2, len))
8719 len = len2;
8720 }
bbf6f052 8721
b93a436e
JL
8722 chainon (arglist, build_tree_list (NULL_TREE, len));
8723 }
bbf6f052 8724
b93a436e
JL
8725 /* Drops in. */
8726 case BUILT_IN_MEMCMP:
8727 /* If not optimizing, call the library function. */
8728 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8729 break;
bbf6f052 8730
b93a436e
JL
8731 /* If we need to check memory accesses, call the library function. */
8732 if (flag_check_memory_usage)
8733 break;
bbf6f052 8734
b93a436e
JL
8735 if (arglist == 0
8736 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8737 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8738 || TREE_CHAIN (arglist) == 0
8739 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8740 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8741 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8742 break;
8743 else if (!HAVE_cmpstrsi)
8744 break;
8745 {
8746 tree arg1 = TREE_VALUE (arglist);
8747 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8748 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8749 rtx result;
0842a179 8750
b93a436e
JL
8751 int arg1_align
8752 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8753 int arg2_align
8754 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8755 enum machine_mode insn_mode
8756 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 8757
b93a436e
JL
8758 /* If we don't have POINTER_TYPE, call the function. */
8759 if (arg1_align == 0 || arg2_align == 0)
8760 {
8761 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8762 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8763 break;
8764 }
bbf6f052 8765
b93a436e
JL
8766 /* Make a place to write the result of the instruction. */
8767 result = target;
8768 if (! (result != 0
8769 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8770 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8771 result = gen_reg_rtx (insn_mode);
bbf6f052 8772
b93a436e
JL
8773 emit_insn (gen_cmpstrsi (result,
8774 gen_rtx_MEM (BLKmode,
8775 expand_expr (arg1, NULL_RTX,
8776 ptr_mode,
8777 EXPAND_NORMAL)),
8778 gen_rtx_MEM (BLKmode,
8779 expand_expr (arg2, NULL_RTX,
8780 ptr_mode,
8781 EXPAND_NORMAL)),
8782 expand_expr (len, NULL_RTX, VOIDmode, 0),
8783 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 8784
b93a436e
JL
8785 /* Return the value in the proper mode for this function. */
8786 mode = TYPE_MODE (TREE_TYPE (exp));
8787 if (GET_MODE (result) == mode)
8788 return result;
8789 else if (target != 0)
8790 {
8791 convert_move (target, result, 0);
8792 return target;
8793 }
8794 else
8795 return convert_to_mode (mode, result, 0);
8796 }
8797#else
8798 case BUILT_IN_STRCMP:
8799 case BUILT_IN_MEMCMP:
8800 break;
8801#endif
bbf6f052 8802
b93a436e
JL
8803 case BUILT_IN_SETJMP:
8804 if (arglist == 0
8805 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8806 break;
6fd1c67b
RH
8807 else
8808 {
8809 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8810 VOIDmode, 0);
8811 rtx lab = gen_label_rtx ();
8812 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8813 emit_label (lab);
8814 return ret;
8815 }
bbf6f052 8816
6fd1c67b
RH
8817 /* __builtin_longjmp is passed a pointer to an array of five words.
8818 It's similar to the C library longjmp function but works with
8819 __builtin_setjmp above. */
b93a436e
JL
8820 case BUILT_IN_LONGJMP:
8821 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8822 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8823 break;
b93a436e 8824 else
b93a436e 8825 {
6fd1c67b
RH
8826 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8827 VOIDmode, 0);
8828 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8829 const0_rtx, VOIDmode, 0);
8830 expand_builtin_longjmp (buf_addr, value);
8831 return const0_rtx;
b93a436e 8832 }
bbf6f052 8833
b93a436e
JL
8834 /* Various hooks for the DWARF 2 __throw routine. */
8835 case BUILT_IN_UNWIND_INIT:
8836 expand_builtin_unwind_init ();
8837 return const0_rtx;
8838 case BUILT_IN_FP:
8839 return frame_pointer_rtx;
8840 case BUILT_IN_SP:
8841 return stack_pointer_rtx;
8842#ifdef DWARF2_UNWIND_INFO
8843 case BUILT_IN_DWARF_FP_REGNUM:
8844 return expand_builtin_dwarf_fp_regnum ();
8845 case BUILT_IN_DWARF_REG_SIZE:
8846 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 8847#endif
b93a436e
JL
8848 case BUILT_IN_FROB_RETURN_ADDR:
8849 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8850 case BUILT_IN_EXTRACT_RETURN_ADDR:
8851 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8852 case BUILT_IN_SET_RETURN_ADDR_REG:
8853 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8854 return const0_rtx;
8855 case BUILT_IN_EH_STUB:
8856 return expand_builtin_eh_stub ();
8857 case BUILT_IN_SET_EH_REGS:
8858 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8859 TREE_VALUE (TREE_CHAIN (arglist)));
8860 return const0_rtx;
ca695ac9 8861
b93a436e
JL
8862 default: /* just do library call, if unknown builtin */
8863 error ("built-in function `%s' not currently supported",
8864 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 8865 }
0006469d 8866
b93a436e
JL
8867 /* The switch statement above can drop through to cause the function
8868 to be called normally. */
0006469d 8869
b93a436e 8870 return expand_call (exp, target, ignore);
ca695ac9 8871}
b93a436e
JL
8872\f
8873/* Built-in functions to perform an untyped call and return. */
0006469d 8874
b93a436e
JL
8875/* For each register that may be used for calling a function, this
8876 gives a mode used to copy the register's value. VOIDmode indicates
8877 the register is not used for calling a function. If the machine
8878 has register windows, this gives only the outbound registers.
8879 INCOMING_REGNO gives the corresponding inbound register. */
8880static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8881
b93a436e
JL
8882/* For each register that may be used for returning values, this gives
8883 a mode used to copy the register's value. VOIDmode indicates the
8884 register is not used for returning values. If the machine has
8885 register windows, this gives only the outbound registers.
8886 INCOMING_REGNO gives the corresponding inbound register. */
8887static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8888
b93a436e
JL
8889/* For each register that may be used for calling a function, this
8890 gives the offset of that register into the block returned by
8891 __builtin_apply_args. 0 indicates that the register is not
8892 used for calling a function. */
8893static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8894
8895/* Return the offset of register REGNO into the block returned by
8896 __builtin_apply_args. This is not declared static, since it is
8897 needed in objc-act.c. */
0006469d 8898
b93a436e
JL
8899int
8900apply_args_register_offset (regno)
8901 int regno;
8902{
8903 apply_args_size ();
0006469d 8904
b93a436e
JL
8905 /* Arguments are always put in outgoing registers (in the argument
8906 block) if such make sense. */
8907#ifdef OUTGOING_REGNO
8908 regno = OUTGOING_REGNO(regno);
8909#endif
8910 return apply_args_reg_offset[regno];
8911}
904762c8 8912
b93a436e
JL
8913/* Return the size required for the block returned by __builtin_apply_args,
8914 and initialize apply_args_mode. */
8915
8916static int
8917apply_args_size ()
0006469d 8918{
b93a436e
JL
8919 static int size = -1;
8920 int align, regno;
2f6e6d22 8921 enum machine_mode mode;
0006469d 8922
b93a436e
JL
8923 /* The values computed by this function never change. */
8924 if (size < 0)
ca695ac9 8925 {
b93a436e
JL
8926 /* The first value is the incoming arg-pointer. */
8927 size = GET_MODE_SIZE (Pmode);
0006469d 8928
b93a436e
JL
8929 /* The second value is the structure value address unless this is
8930 passed as an "invisible" first argument. */
8931 if (struct_value_rtx)
8932 size += GET_MODE_SIZE (Pmode);
0006469d 8933
b93a436e
JL
8934 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8935 if (FUNCTION_ARG_REGNO_P (regno))
8936 {
8937 /* Search for the proper mode for copying this register's
8938 value. I'm not sure this is right, but it works so far. */
8939 enum machine_mode best_mode = VOIDmode;
0006469d 8940
b93a436e
JL
8941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8942 mode != VOIDmode;
8943 mode = GET_MODE_WIDER_MODE (mode))
8944 if (HARD_REGNO_MODE_OK (regno, mode)
8945 && HARD_REGNO_NREGS (regno, mode) == 1)
8946 best_mode = mode;
0006469d 8947
b93a436e
JL
8948 if (best_mode == VOIDmode)
8949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8950 mode != VOIDmode;
8951 mode = GET_MODE_WIDER_MODE (mode))
8952 if (HARD_REGNO_MODE_OK (regno, mode)
8953 && (mov_optab->handlers[(int) mode].insn_code
8954 != CODE_FOR_nothing))
8955 best_mode = mode;
0006469d 8956
b93a436e
JL
8957 mode = best_mode;
8958 if (mode == VOIDmode)
8959 abort ();
904762c8 8960
b93a436e
JL
8961 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8962 if (size % align != 0)
8963 size = CEIL (size, align) * align;
8964 apply_args_reg_offset[regno] = size;
8965 size += GET_MODE_SIZE (mode);
8966 apply_args_mode[regno] = mode;
8967 }
8968 else
8969 {
8970 apply_args_mode[regno] = VOIDmode;
8971 apply_args_reg_offset[regno] = 0;
8972 }
8973 }
8974 return size;
8975}
0006469d 8976
b93a436e
JL
8977/* Return the size required for the block returned by __builtin_apply,
8978 and initialize apply_result_mode. */
904762c8 8979
b93a436e
JL
8980static int
8981apply_result_size ()
8982{
8983 static int size = -1;
8984 int align, regno;
8985 enum machine_mode mode;
0006469d 8986
b93a436e
JL
8987 /* The values computed by this function never change. */
8988 if (size < 0)
8989 {
8990 size = 0;
0006469d 8991
b93a436e
JL
8992 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8993 if (FUNCTION_VALUE_REGNO_P (regno))
8994 {
8995 /* Search for the proper mode for copying this register's
8996 value. I'm not sure this is right, but it works so far. */
8997 enum machine_mode best_mode = VOIDmode;
0006469d 8998
b93a436e
JL
8999 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9000 mode != TImode;
9001 mode = GET_MODE_WIDER_MODE (mode))
9002 if (HARD_REGNO_MODE_OK (regno, mode))
9003 best_mode = mode;
0006469d 9004
b93a436e
JL
9005 if (best_mode == VOIDmode)
9006 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9007 mode != VOIDmode;
9008 mode = GET_MODE_WIDER_MODE (mode))
9009 if (HARD_REGNO_MODE_OK (regno, mode)
9010 && (mov_optab->handlers[(int) mode].insn_code
9011 != CODE_FOR_nothing))
9012 best_mode = mode;
0006469d 9013
b93a436e
JL
9014 mode = best_mode;
9015 if (mode == VOIDmode)
9016 abort ();
9017
9018 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9019 if (size % align != 0)
9020 size = CEIL (size, align) * align;
9021 size += GET_MODE_SIZE (mode);
9022 apply_result_mode[regno] = mode;
9023 }
9024 else
9025 apply_result_mode[regno] = VOIDmode;
9026
9027 /* Allow targets that use untyped_call and untyped_return to override
9028 the size so that machine-specific information can be stored here. */
9029#ifdef APPLY_RESULT_SIZE
9030 size = APPLY_RESULT_SIZE;
9031#endif
9032 }
9033 return size;
9034}
0006469d 9035
b93a436e
JL
9036#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9037/* Create a vector describing the result block RESULT. If SAVEP is true,
9038 the result block is used to save the values; otherwise it is used to
9039 restore the values. */
9040
9041static rtx
9042result_vector (savep, result)
9043 int savep;
9044 rtx result;
9045{
9046 int regno, size, align, nelts;
9047 enum machine_mode mode;
9048 rtx reg, mem;
9049 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9050
9051 size = nelts = 0;
9052 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9053 if ((mode = apply_result_mode[regno]) != VOIDmode)
9054 {
9055 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9056 if (size % align != 0)
9057 size = CEIL (size, align) * align;
9058 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9059 mem = change_address (result, mode,
9060 plus_constant (XEXP (result, 0), size));
9061 savevec[nelts++] = (savep
9062 ? gen_rtx_SET (VOIDmode, mem, reg)
9063 : gen_rtx_SET (VOIDmode, reg, mem));
9064 size += GET_MODE_SIZE (mode);
ca695ac9 9065 }
b93a436e
JL
9066 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9067}
9068#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9069
b93a436e
JL
9070/* Save the state required to perform an untyped call with the same
9071 arguments as were passed to the current function. */
904762c8 9072
b93a436e
JL
9073static rtx
9074expand_builtin_apply_args ()
9075{
9076 rtx registers;
9077 int size, align, regno;
9078 enum machine_mode mode;
0006469d 9079
b93a436e
JL
9080 /* Create a block where the arg-pointer, structure value address,
9081 and argument registers can be saved. */
9082 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9083
b93a436e
JL
9084 /* Walk past the arg-pointer and structure value address. */
9085 size = GET_MODE_SIZE (Pmode);
9086 if (struct_value_rtx)
9087 size += GET_MODE_SIZE (Pmode);
0cb1d109 9088
b93a436e
JL
9089 /* Save each register used in calling a function to the block. */
9090 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9091 if ((mode = apply_args_mode[regno]) != VOIDmode)
9092 {
9093 rtx tem;
0cb1d109 9094
b93a436e
JL
9095 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9096 if (size % align != 0)
9097 size = CEIL (size, align) * align;
0006469d 9098
b93a436e 9099 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9100
b93a436e
JL
9101#ifdef STACK_REGS
9102 /* For reg-stack.c's stack register household.
9103 Compare with a similar piece of code in function.c. */
0006469d 9104
b93a436e
JL
9105 emit_insn (gen_rtx_USE (mode, tem));
9106#endif
0e8c9172 9107
b93a436e
JL
9108 emit_move_insn (change_address (registers, mode,
9109 plus_constant (XEXP (registers, 0),
9110 size)),
9111 tem);
9112 size += GET_MODE_SIZE (mode);
0e8c9172 9113 }
0006469d 9114
b93a436e
JL
9115 /* Save the arg pointer to the block. */
9116 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9117 copy_to_reg (virtual_incoming_args_rtx));
9118 size = GET_MODE_SIZE (Pmode);
0006469d 9119
b93a436e
JL
9120 /* Save the structure value address unless this is passed as an
9121 "invisible" first argument. */
9122 if (struct_value_incoming_rtx)
9123 {
9124 emit_move_insn (change_address (registers, Pmode,
9125 plus_constant (XEXP (registers, 0),
9126 size)),
9127 copy_to_reg (struct_value_incoming_rtx));
9128 size += GET_MODE_SIZE (Pmode);
9129 }
0006469d 9130
b93a436e
JL
9131 /* Return the address of the block. */
9132 return copy_addr_to_reg (XEXP (registers, 0));
9133}
0006469d 9134
b93a436e
JL
9135/* Perform an untyped call and save the state required to perform an
9136 untyped return of whatever value was returned by the given function. */
0006469d 9137
b93a436e
JL
9138static rtx
9139expand_builtin_apply (function, arguments, argsize)
9140 rtx function, arguments, argsize;
9141{
9142 int size, align, regno;
9143 enum machine_mode mode;
9144 rtx incoming_args, result, reg, dest, call_insn;
9145 rtx old_stack_level = 0;
9146 rtx call_fusage = 0;
0006469d 9147
b93a436e
JL
9148 /* Create a block where the return registers can be saved. */
9149 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9150
9151 /* ??? The argsize value should be adjusted here. */
9152
9153 /* Fetch the arg pointer from the ARGUMENTS block. */
9154 incoming_args = gen_reg_rtx (Pmode);
9155 emit_move_insn (incoming_args,
9156 gen_rtx_MEM (Pmode, arguments));
9157#ifndef STACK_GROWS_DOWNWARD
9158 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9159 incoming_args, 0, OPTAB_LIB_WIDEN);
9160#endif
9161
9162 /* Perform postincrements before actually calling the function. */
ca695ac9 9163 emit_queue ();
0006469d 9164
b93a436e
JL
9165 /* Push a new argument block and copy the arguments. */
9166 do_pending_stack_adjust ();
0006469d 9167
b93a436e
JL
9168 /* Save the stack with nonlocal if available */
9169#ifdef HAVE_save_stack_nonlocal
9170 if (HAVE_save_stack_nonlocal)
9171 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9172 else
9173#endif
9174 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9175
b93a436e
JL
9176 /* Push a block of memory onto the stack to store the memory arguments.
9177 Save the address in a register, and copy the memory arguments. ??? I
9178 haven't figured out how the calling convention macros effect this,
9179 but it's likely that the source and/or destination addresses in
9180 the block copy will need updating in machine specific ways. */
9181 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9182 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9183 gen_rtx_MEM (BLKmode, incoming_args),
9184 argsize,
9185 PARM_BOUNDARY / BITS_PER_UNIT);
9186
9187 /* Refer to the argument block. */
9188 apply_args_size ();
9189 arguments = gen_rtx_MEM (BLKmode, arguments);
9190
9191 /* Walk past the arg-pointer and structure value address. */
9192 size = GET_MODE_SIZE (Pmode);
9193 if (struct_value_rtx)
9194 size += GET_MODE_SIZE (Pmode);
9195
9196 /* Restore each of the registers previously saved. Make USE insns
9197 for each of these registers for use in making the call. */
9198 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9199 if ((mode = apply_args_mode[regno]) != VOIDmode)
9200 {
9201 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9202 if (size % align != 0)
9203 size = CEIL (size, align) * align;
9204 reg = gen_rtx_REG (mode, regno);
9205 emit_move_insn (reg,
9206 change_address (arguments, mode,
9207 plus_constant (XEXP (arguments, 0),
9208 size)));
9209
9210 use_reg (&call_fusage, reg);
9211 size += GET_MODE_SIZE (mode);
9212 }
9213
9214 /* Restore the structure value address unless this is passed as an
9215 "invisible" first argument. */
9216 size = GET_MODE_SIZE (Pmode);
9217 if (struct_value_rtx)
0006469d 9218 {
b93a436e
JL
9219 rtx value = gen_reg_rtx (Pmode);
9220 emit_move_insn (value,
9221 change_address (arguments, Pmode,
9222 plus_constant (XEXP (arguments, 0),
9223 size)));
9224 emit_move_insn (struct_value_rtx, value);
9225 if (GET_CODE (struct_value_rtx) == REG)
9226 use_reg (&call_fusage, struct_value_rtx);
9227 size += GET_MODE_SIZE (Pmode);
ca695ac9 9228 }
0006469d 9229
b93a436e
JL
9230 /* All arguments and registers used for the call are set up by now! */
9231 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9232
b93a436e
JL
9233 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9234 and we don't want to load it into a register as an optimization,
9235 because prepare_call_address already did it if it should be done. */
9236 if (GET_CODE (function) != SYMBOL_REF)
9237 function = memory_address (FUNCTION_MODE, function);
0006469d 9238
b93a436e
JL
9239 /* Generate the actual call instruction and save the return value. */
9240#ifdef HAVE_untyped_call
9241 if (HAVE_untyped_call)
9242 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9243 result, result_vector (1, result)));
9244 else
9245#endif
9246#ifdef HAVE_call_value
9247 if (HAVE_call_value)
ca695ac9 9248 {
b93a436e 9249 rtx valreg = 0;
0006469d 9250
b93a436e
JL
9251 /* Locate the unique return register. It is not possible to
9252 express a call that sets more than one return register using
9253 call_value; use untyped_call for that. In fact, untyped_call
9254 only needs to save the return registers in the given block. */
9255 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9256 if ((mode = apply_result_mode[regno]) != VOIDmode)
9257 {
9258 if (valreg)
9259 abort (); /* HAVE_untyped_call required. */
9260 valreg = gen_rtx_REG (mode, regno);
9261 }
0006469d 9262
b93a436e
JL
9263 emit_call_insn (gen_call_value (valreg,
9264 gen_rtx_MEM (FUNCTION_MODE, function),
9265 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9266
b93a436e
JL
9267 emit_move_insn (change_address (result, GET_MODE (valreg),
9268 XEXP (result, 0)),
9269 valreg);
ca695ac9 9270 }
b93a436e
JL
9271 else
9272#endif
9273 abort ();
0006469d 9274
b93a436e
JL
9275 /* Find the CALL insn we just emitted. */
9276 for (call_insn = get_last_insn ();
9277 call_insn && GET_CODE (call_insn) != CALL_INSN;
9278 call_insn = PREV_INSN (call_insn))
9279 ;
0006469d 9280
b93a436e
JL
9281 if (! call_insn)
9282 abort ();
0006469d 9283
b93a436e
JL
9284 /* Put the register usage information on the CALL. If there is already
9285 some usage information, put ours at the end. */
9286 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9287 {
b93a436e 9288 rtx link;
0006469d 9289
b93a436e
JL
9290 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9291 link = XEXP (link, 1))
9292 ;
9293
9294 XEXP (link, 1) = call_fusage;
ca695ac9 9295 }
b93a436e
JL
9296 else
9297 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9298
b93a436e
JL
9299 /* Restore the stack. */
9300#ifdef HAVE_save_stack_nonlocal
9301 if (HAVE_save_stack_nonlocal)
9302 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9303 else
9304#endif
9305 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9306
9307 /* Return the address of the result block. */
9308 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9309}
bbf6f052 9310
b93a436e 9311/* Perform an untyped return. */
ca695ac9
JB
9312
9313static void
b93a436e
JL
9314expand_builtin_return (result)
9315 rtx result;
bbf6f052 9316{
b93a436e
JL
9317 int size, align, regno;
9318 enum machine_mode mode;
9319 rtx reg;
9320 rtx call_fusage = 0;
bbf6f052 9321
b93a436e
JL
9322 apply_result_size ();
9323 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9324
b93a436e
JL
9325#ifdef HAVE_untyped_return
9326 if (HAVE_untyped_return)
ca695ac9 9327 {
b93a436e
JL
9328 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9329 emit_barrier ();
9330 return;
ca695ac9 9331 }
b93a436e 9332#endif
1499e0a8 9333
b93a436e
JL
9334 /* Restore the return value and note that each value is used. */
9335 size = 0;
9336 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9337 if ((mode = apply_result_mode[regno]) != VOIDmode)
9338 {
9339 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9340 if (size % align != 0)
9341 size = CEIL (size, align) * align;
9342 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9343 emit_move_insn (reg,
9344 change_address (result, mode,
9345 plus_constant (XEXP (result, 0),
9346 size)));
9347
9348 push_to_sequence (call_fusage);
9349 emit_insn (gen_rtx_USE (VOIDmode, reg));
9350 call_fusage = get_insns ();
9351 end_sequence ();
9352 size += GET_MODE_SIZE (mode);
9353 }
9354
9355 /* Put the USE insns before the return. */
9356 emit_insns (call_fusage);
9357
9358 /* Return whatever values was restored by jumping directly to the end
9359 of the function. */
9360 expand_null_return ();
ca695ac9
JB
9361}
9362\f
b93a436e
JL
9363/* Expand code for a post- or pre- increment or decrement
9364 and return the RTX for the result.
9365 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9366
b93a436e
JL
9367static rtx
9368expand_increment (exp, post, ignore)
9369 register tree exp;
9370 int post, ignore;
ca695ac9 9371{
b93a436e
JL
9372 register rtx op0, op1;
9373 register rtx temp, value;
9374 register tree incremented = TREE_OPERAND (exp, 0);
9375 optab this_optab = add_optab;
9376 int icode;
9377 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9378 int op0_is_copy = 0;
9379 int single_insn = 0;
9380 /* 1 means we can't store into OP0 directly,
9381 because it is a subreg narrower than a word,
9382 and we don't dare clobber the rest of the word. */
9383 int bad_subreg = 0;
1499e0a8 9384
b93a436e
JL
9385 /* Stabilize any component ref that might need to be
9386 evaluated more than once below. */
9387 if (!post
9388 || TREE_CODE (incremented) == BIT_FIELD_REF
9389 || (TREE_CODE (incremented) == COMPONENT_REF
9390 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9391 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9392 incremented = stabilize_reference (incremented);
9393 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9394 ones into save exprs so that they don't accidentally get evaluated
9395 more than once by the code below. */
9396 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9397 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9398 incremented = save_expr (incremented);
e9a25f70 9399
b93a436e
JL
9400 /* Compute the operands as RTX.
9401 Note whether OP0 is the actual lvalue or a copy of it:
9402 I believe it is a copy iff it is a register or subreg
9403 and insns were generated in computing it. */
e9a25f70 9404
b93a436e
JL
9405 temp = get_last_insn ();
9406 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9407
b93a436e
JL
9408 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9409 in place but instead must do sign- or zero-extension during assignment,
9410 so we copy it into a new register and let the code below use it as
9411 a copy.
e9a25f70 9412
b93a436e
JL
9413 Note that we can safely modify this SUBREG since it is know not to be
9414 shared (it was made by the expand_expr call above). */
9415
9416 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9417 {
9418 if (post)
9419 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9420 else
9421 bad_subreg = 1;
9422 }
9423 else if (GET_CODE (op0) == SUBREG
9424 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9425 {
9426 /* We cannot increment this SUBREG in place. If we are
9427 post-incrementing, get a copy of the old value. Otherwise,
9428 just mark that we cannot increment in place. */
9429 if (post)
9430 op0 = copy_to_reg (op0);
9431 else
9432 bad_subreg = 1;
e9a25f70
JL
9433 }
9434
b93a436e
JL
9435 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9436 && temp != get_last_insn ());
9437 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9438 EXPAND_MEMORY_USE_BAD);
1499e0a8 9439
b93a436e
JL
9440 /* Decide whether incrementing or decrementing. */
9441 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9442 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9443 this_optab = sub_optab;
9444
9445 /* Convert decrement by a constant into a negative increment. */
9446 if (this_optab == sub_optab
9447 && GET_CODE (op1) == CONST_INT)
ca695ac9 9448 {
b93a436e
JL
9449 op1 = GEN_INT (- INTVAL (op1));
9450 this_optab = add_optab;
ca695ac9 9451 }
1499e0a8 9452
b93a436e
JL
9453 /* For a preincrement, see if we can do this with a single instruction. */
9454 if (!post)
9455 {
9456 icode = (int) this_optab->handlers[(int) mode].insn_code;
9457 if (icode != (int) CODE_FOR_nothing
9458 /* Make sure that OP0 is valid for operands 0 and 1
9459 of the insn we want to queue. */
9460 && (*insn_operand_predicate[icode][0]) (op0, mode)
9461 && (*insn_operand_predicate[icode][1]) (op0, mode)
9462 && (*insn_operand_predicate[icode][2]) (op1, mode))
9463 single_insn = 1;
9464 }
bbf6f052 9465
b93a436e
JL
9466 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9467 then we cannot just increment OP0. We must therefore contrive to
9468 increment the original value. Then, for postincrement, we can return
9469 OP0 since it is a copy of the old value. For preincrement, expand here
9470 unless we can do it with a single insn.
bbf6f052 9471
b93a436e
JL
9472 Likewise if storing directly into OP0 would clobber high bits
9473 we need to preserve (bad_subreg). */
9474 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9475 {
b93a436e
JL
9476 /* This is the easiest way to increment the value wherever it is.
9477 Problems with multiple evaluation of INCREMENTED are prevented
9478 because either (1) it is a component_ref or preincrement,
9479 in which case it was stabilized above, or (2) it is an array_ref
9480 with constant index in an array in a register, which is
9481 safe to reevaluate. */
9482 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9483 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9484 ? MINUS_EXPR : PLUS_EXPR),
9485 TREE_TYPE (exp),
9486 incremented,
9487 TREE_OPERAND (exp, 1));
a358cee0 9488
b93a436e
JL
9489 while (TREE_CODE (incremented) == NOP_EXPR
9490 || TREE_CODE (incremented) == CONVERT_EXPR)
9491 {
9492 newexp = convert (TREE_TYPE (incremented), newexp);
9493 incremented = TREE_OPERAND (incremented, 0);
9494 }
bbf6f052 9495
b93a436e
JL
9496 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9497 return post ? op0 : temp;
9498 }
bbf6f052 9499
b93a436e
JL
9500 if (post)
9501 {
9502 /* We have a true reference to the value in OP0.
9503 If there is an insn to add or subtract in this mode, queue it.
9504 Queueing the increment insn avoids the register shuffling
9505 that often results if we must increment now and first save
9506 the old value for subsequent use. */
bbf6f052 9507
b93a436e
JL
9508#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9509 op0 = stabilize (op0);
9510#endif
41dfd40c 9511
b93a436e
JL
9512 icode = (int) this_optab->handlers[(int) mode].insn_code;
9513 if (icode != (int) CODE_FOR_nothing
9514 /* Make sure that OP0 is valid for operands 0 and 1
9515 of the insn we want to queue. */
9516 && (*insn_operand_predicate[icode][0]) (op0, mode)
9517 && (*insn_operand_predicate[icode][1]) (op0, mode))
9518 {
9519 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9520 op1 = force_reg (mode, op1);
bbf6f052 9521
b93a436e
JL
9522 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9523 }
9524 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9525 {
9526 rtx addr = (general_operand (XEXP (op0, 0), mode)
9527 ? force_reg (Pmode, XEXP (op0, 0))
9528 : copy_to_reg (XEXP (op0, 0)));
9529 rtx temp, result;
ca695ac9 9530
b93a436e
JL
9531 op0 = change_address (op0, VOIDmode, addr);
9532 temp = force_reg (GET_MODE (op0), op0);
9533 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9534 op1 = force_reg (mode, op1);
ca695ac9 9535
b93a436e
JL
9536 /* The increment queue is LIFO, thus we have to `queue'
9537 the instructions in reverse order. */
9538 enqueue_insn (op0, gen_move_insn (op0, temp));
9539 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9540 return result;
bbf6f052
RK
9541 }
9542 }
ca695ac9 9543
b93a436e
JL
9544 /* Preincrement, or we can't increment with one simple insn. */
9545 if (post)
9546 /* Save a copy of the value before inc or dec, to return it later. */
9547 temp = value = copy_to_reg (op0);
9548 else
9549 /* Arrange to return the incremented value. */
9550 /* Copy the rtx because expand_binop will protect from the queue,
9551 and the results of that would be invalid for us to return
9552 if our caller does emit_queue before using our result. */
9553 temp = copy_rtx (value = op0);
bbf6f052 9554
b93a436e
JL
9555 /* Increment however we can. */
9556 op1 = expand_binop (mode, this_optab, value, op1,
9557 flag_check_memory_usage ? NULL_RTX : op0,
9558 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9559 /* Make sure the value is stored into OP0. */
9560 if (op1 != op0)
9561 emit_move_insn (op0, op1);
5718612f 9562
b93a436e
JL
9563 return temp;
9564}
9565\f
9566/* Expand all function calls contained within EXP, innermost ones first.
9567 But don't look within expressions that have sequence points.
9568 For each CALL_EXPR, record the rtx for its value
9569 in the CALL_EXPR_RTL field. */
5718612f 9570
b93a436e
JL
9571static void
9572preexpand_calls (exp)
9573 tree exp;
9574{
9575 register int nops, i;
9576 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9577
b93a436e
JL
9578 if (! do_preexpand_calls)
9579 return;
5718612f 9580
b93a436e 9581 /* Only expressions and references can contain calls. */
bbf6f052 9582
b93a436e
JL
9583 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9584 return;
bbf6f052 9585
b93a436e
JL
9586 switch (TREE_CODE (exp))
9587 {
9588 case CALL_EXPR:
9589 /* Do nothing if already expanded. */
9590 if (CALL_EXPR_RTL (exp) != 0
9591 /* Do nothing if the call returns a variable-sized object. */
9592 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9593 /* Do nothing to built-in functions. */
9594 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9595 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9596 == FUNCTION_DECL)
9597 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9598 return;
bbf6f052 9599
b93a436e
JL
9600 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9601 return;
bbf6f052 9602
b93a436e
JL
9603 case COMPOUND_EXPR:
9604 case COND_EXPR:
9605 case TRUTH_ANDIF_EXPR:
9606 case TRUTH_ORIF_EXPR:
9607 /* If we find one of these, then we can be sure
9608 the adjust will be done for it (since it makes jumps).
9609 Do it now, so that if this is inside an argument
9610 of a function, we don't get the stack adjustment
9611 after some other args have already been pushed. */
9612 do_pending_stack_adjust ();
9613 return;
bbf6f052 9614
b93a436e
JL
9615 case BLOCK:
9616 case RTL_EXPR:
9617 case WITH_CLEANUP_EXPR:
9618 case CLEANUP_POINT_EXPR:
9619 case TRY_CATCH_EXPR:
9620 return;
bbf6f052 9621
b93a436e
JL
9622 case SAVE_EXPR:
9623 if (SAVE_EXPR_RTL (exp) != 0)
9624 return;
9625
9626 default:
9627 break;
ca695ac9 9628 }
bbf6f052 9629
b93a436e
JL
9630 nops = tree_code_length[(int) TREE_CODE (exp)];
9631 for (i = 0; i < nops; i++)
9632 if (TREE_OPERAND (exp, i) != 0)
9633 {
9634 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9635 if (type == 'e' || type == '<' || type == '1' || type == '2'
9636 || type == 'r')
9637 preexpand_calls (TREE_OPERAND (exp, i));
9638 }
9639}
9640\f
9641/* At the start of a function, record that we have no previously-pushed
9642 arguments waiting to be popped. */
bbf6f052 9643
b93a436e
JL
9644void
9645init_pending_stack_adjust ()
9646{
9647 pending_stack_adjust = 0;
9648}
bbf6f052 9649
b93a436e 9650/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9651 so the adjustment won't get done.
9652
9653 Note, if the current function calls alloca, then it must have a
9654 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9655
b93a436e
JL
9656void
9657clear_pending_stack_adjust ()
9658{
9659#ifdef EXIT_IGNORE_STACK
9660 if (optimize > 0
060fbabf
JL
9661 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9662 && EXIT_IGNORE_STACK
b93a436e
JL
9663 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9664 && ! flag_inline_functions)
9665 pending_stack_adjust = 0;
9666#endif
9667}
bbf6f052 9668
b93a436e
JL
9669/* Pop any previously-pushed arguments that have not been popped yet. */
9670
9671void
9672do_pending_stack_adjust ()
9673{
9674 if (inhibit_defer_pop == 0)
ca695ac9 9675 {
b93a436e
JL
9676 if (pending_stack_adjust != 0)
9677 adjust_stack (GEN_INT (pending_stack_adjust));
9678 pending_stack_adjust = 0;
bbf6f052 9679 }
bbf6f052
RK
9680}
9681\f
b93a436e 9682/* Expand conditional expressions. */
bbf6f052 9683
b93a436e
JL
9684/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9685 LABEL is an rtx of code CODE_LABEL, in this function and all the
9686 functions here. */
bbf6f052 9687
b93a436e
JL
9688void
9689jumpifnot (exp, label)
ca695ac9 9690 tree exp;
b93a436e 9691 rtx label;
bbf6f052 9692{
b93a436e
JL
9693 do_jump (exp, label, NULL_RTX);
9694}
bbf6f052 9695
b93a436e 9696/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9697
b93a436e
JL
9698void
9699jumpif (exp, label)
9700 tree exp;
9701 rtx label;
9702{
9703 do_jump (exp, NULL_RTX, label);
9704}
ca695ac9 9705
b93a436e
JL
9706/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9707 the result is zero, or IF_TRUE_LABEL if the result is one.
9708 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9709 meaning fall through in that case.
ca695ac9 9710
b93a436e
JL
9711 do_jump always does any pending stack adjust except when it does not
9712 actually perform a jump. An example where there is no jump
9713 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9714
b93a436e
JL
9715 This function is responsible for optimizing cases such as
9716 &&, || and comparison operators in EXP. */
5718612f 9717
b93a436e
JL
9718void
9719do_jump (exp, if_false_label, if_true_label)
9720 tree exp;
9721 rtx if_false_label, if_true_label;
9722{
9723 register enum tree_code code = TREE_CODE (exp);
9724 /* Some cases need to create a label to jump to
9725 in order to properly fall through.
9726 These cases set DROP_THROUGH_LABEL nonzero. */
9727 rtx drop_through_label = 0;
9728 rtx temp;
9729 rtx comparison = 0;
9730 int i;
9731 tree type;
9732 enum machine_mode mode;
ca695ac9 9733
b93a436e 9734 emit_queue ();
ca695ac9 9735
b93a436e 9736 switch (code)
ca695ac9 9737 {
b93a436e 9738 case ERROR_MARK:
ca695ac9 9739 break;
bbf6f052 9740
b93a436e
JL
9741 case INTEGER_CST:
9742 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9743 if (temp)
9744 emit_jump (temp);
9745 break;
bbf6f052 9746
b93a436e
JL
9747#if 0
9748 /* This is not true with #pragma weak */
9749 case ADDR_EXPR:
9750 /* The address of something can never be zero. */
9751 if (if_true_label)
9752 emit_jump (if_true_label);
9753 break;
9754#endif
bbf6f052 9755
b93a436e
JL
9756 case NOP_EXPR:
9757 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9758 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9759 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9760 goto normal;
9761 case CONVERT_EXPR:
9762 /* If we are narrowing the operand, we have to do the compare in the
9763 narrower mode. */
9764 if ((TYPE_PRECISION (TREE_TYPE (exp))
9765 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9766 goto normal;
9767 case NON_LVALUE_EXPR:
9768 case REFERENCE_EXPR:
9769 case ABS_EXPR:
9770 case NEGATE_EXPR:
9771 case LROTATE_EXPR:
9772 case RROTATE_EXPR:
9773 /* These cannot change zero->non-zero or vice versa. */
9774 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9775 break;
bbf6f052 9776
b93a436e
JL
9777#if 0
9778 /* This is never less insns than evaluating the PLUS_EXPR followed by
9779 a test and can be longer if the test is eliminated. */
9780 case PLUS_EXPR:
9781 /* Reduce to minus. */
9782 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9783 TREE_OPERAND (exp, 0),
9784 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9785 TREE_OPERAND (exp, 1))));
9786 /* Process as MINUS. */
ca695ac9 9787#endif
bbf6f052 9788
b93a436e
JL
9789 case MINUS_EXPR:
9790 /* Non-zero iff operands of minus differ. */
9791 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9792 TREE_OPERAND (exp, 0),
9793 TREE_OPERAND (exp, 1)),
9794 NE, NE);
9795 break;
bbf6f052 9796
b93a436e
JL
9797 case BIT_AND_EXPR:
9798 /* If we are AND'ing with a small constant, do this comparison in the
9799 smallest type that fits. If the machine doesn't have comparisons
9800 that small, it will be converted back to the wider comparison.
9801 This helps if we are testing the sign bit of a narrower object.
9802 combine can't do this for us because it can't know whether a
9803 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9804
b93a436e
JL
9805 if (! SLOW_BYTE_ACCESS
9806 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9807 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9808 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9809 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9810 && (type = type_for_mode (mode, 1)) != 0
9811 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9812 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9813 != CODE_FOR_nothing))
9814 {
9815 do_jump (convert (type, exp), if_false_label, if_true_label);
9816 break;
9817 }
9818 goto normal;
bbf6f052 9819
b93a436e
JL
9820 case TRUTH_NOT_EXPR:
9821 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9822 break;
bbf6f052 9823
b93a436e
JL
9824 case TRUTH_ANDIF_EXPR:
9825 if (if_false_label == 0)
9826 if_false_label = drop_through_label = gen_label_rtx ();
9827 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9828 start_cleanup_deferral ();
9829 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9830 end_cleanup_deferral ();
9831 break;
bbf6f052 9832
b93a436e
JL
9833 case TRUTH_ORIF_EXPR:
9834 if (if_true_label == 0)
9835 if_true_label = drop_through_label = gen_label_rtx ();
9836 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9837 start_cleanup_deferral ();
9838 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9839 end_cleanup_deferral ();
9840 break;
bbf6f052 9841
b93a436e
JL
9842 case COMPOUND_EXPR:
9843 push_temp_slots ();
9844 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9845 preserve_temp_slots (NULL_RTX);
9846 free_temp_slots ();
9847 pop_temp_slots ();
9848 emit_queue ();
9849 do_pending_stack_adjust ();
9850 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9851 break;
bbf6f052 9852
b93a436e
JL
9853 case COMPONENT_REF:
9854 case BIT_FIELD_REF:
9855 case ARRAY_REF:
9856 {
9857 int bitsize, bitpos, unsignedp;
9858 enum machine_mode mode;
9859 tree type;
9860 tree offset;
9861 int volatilep = 0;
9862 int alignment;
bbf6f052 9863
b93a436e
JL
9864 /* Get description of this reference. We don't actually care
9865 about the underlying object here. */
9866 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9867 &mode, &unsignedp, &volatilep,
9868 &alignment);
bbf6f052 9869
b93a436e
JL
9870 type = type_for_size (bitsize, unsignedp);
9871 if (! SLOW_BYTE_ACCESS
9872 && type != 0 && bitsize >= 0
9873 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9874 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9875 != CODE_FOR_nothing))
9876 {
9877 do_jump (convert (type, exp), if_false_label, if_true_label);
9878 break;
9879 }
9880 goto normal;
9881 }
bbf6f052 9882
b93a436e
JL
9883 case COND_EXPR:
9884 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9885 if (integer_onep (TREE_OPERAND (exp, 1))
9886 && integer_zerop (TREE_OPERAND (exp, 2)))
9887 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9888
b93a436e
JL
9889 else if (integer_zerop (TREE_OPERAND (exp, 1))
9890 && integer_onep (TREE_OPERAND (exp, 2)))
9891 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9892
b93a436e
JL
9893 else
9894 {
9895 register rtx label1 = gen_label_rtx ();
9896 drop_through_label = gen_label_rtx ();
bbf6f052 9897
b93a436e 9898 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9899
b93a436e
JL
9900 start_cleanup_deferral ();
9901 /* Now the THEN-expression. */
9902 do_jump (TREE_OPERAND (exp, 1),
9903 if_false_label ? if_false_label : drop_through_label,
9904 if_true_label ? if_true_label : drop_through_label);
9905 /* In case the do_jump just above never jumps. */
9906 do_pending_stack_adjust ();
9907 emit_label (label1);
bbf6f052 9908
b93a436e
JL
9909 /* Now the ELSE-expression. */
9910 do_jump (TREE_OPERAND (exp, 2),
9911 if_false_label ? if_false_label : drop_through_label,
9912 if_true_label ? if_true_label : drop_through_label);
9913 end_cleanup_deferral ();
9914 }
9915 break;
bbf6f052 9916
b93a436e
JL
9917 case EQ_EXPR:
9918 {
9919 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9920
b93a436e
JL
9921 if (integer_zerop (TREE_OPERAND (exp, 1)))
9922 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9923 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9924 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9925 do_jump
9926 (fold
9927 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9928 fold (build (EQ_EXPR, TREE_TYPE (exp),
9929 fold (build1 (REALPART_EXPR,
9930 TREE_TYPE (inner_type),
9931 TREE_OPERAND (exp, 0))),
9932 fold (build1 (REALPART_EXPR,
9933 TREE_TYPE (inner_type),
9934 TREE_OPERAND (exp, 1))))),
9935 fold (build (EQ_EXPR, TREE_TYPE (exp),
9936 fold (build1 (IMAGPART_EXPR,
9937 TREE_TYPE (inner_type),
9938 TREE_OPERAND (exp, 0))),
9939 fold (build1 (IMAGPART_EXPR,
9940 TREE_TYPE (inner_type),
9941 TREE_OPERAND (exp, 1))))))),
9942 if_false_label, if_true_label);
9943 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9944 && !can_compare_p (TYPE_MODE (inner_type)))
9945 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9946 else
9947 comparison = compare (exp, EQ, EQ);
9948 break;
9949 }
bbf6f052 9950
b93a436e
JL
9951 case NE_EXPR:
9952 {
9953 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9954
b93a436e
JL
9955 if (integer_zerop (TREE_OPERAND (exp, 1)))
9956 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9957 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9958 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9959 do_jump
9960 (fold
9961 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9962 fold (build (NE_EXPR, TREE_TYPE (exp),
9963 fold (build1 (REALPART_EXPR,
9964 TREE_TYPE (inner_type),
9965 TREE_OPERAND (exp, 0))),
9966 fold (build1 (REALPART_EXPR,
9967 TREE_TYPE (inner_type),
9968 TREE_OPERAND (exp, 1))))),
9969 fold (build (NE_EXPR, TREE_TYPE (exp),
9970 fold (build1 (IMAGPART_EXPR,
9971 TREE_TYPE (inner_type),
9972 TREE_OPERAND (exp, 0))),
9973 fold (build1 (IMAGPART_EXPR,
9974 TREE_TYPE (inner_type),
9975 TREE_OPERAND (exp, 1))))))),
9976 if_false_label, if_true_label);
9977 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9978 && !can_compare_p (TYPE_MODE (inner_type)))
9979 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9980 else
9981 comparison = compare (exp, NE, NE);
9982 break;
9983 }
bbf6f052 9984
b93a436e
JL
9985 case LT_EXPR:
9986 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9987 == MODE_INT)
9988 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9989 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9990 else
9991 comparison = compare (exp, LT, LTU);
9992 break;
bbf6f052 9993
b93a436e
JL
9994 case LE_EXPR:
9995 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9996 == MODE_INT)
9997 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9998 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9999 else
10000 comparison = compare (exp, LE, LEU);
10001 break;
bbf6f052 10002
b93a436e
JL
10003 case GT_EXPR:
10004 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10005 == MODE_INT)
10006 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10007 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10008 else
10009 comparison = compare (exp, GT, GTU);
10010 break;
bbf6f052 10011
b93a436e
JL
10012 case GE_EXPR:
10013 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10014 == MODE_INT)
10015 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10016 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10017 else
10018 comparison = compare (exp, GE, GEU);
10019 break;
bbf6f052 10020
b93a436e
JL
10021 default:
10022 normal:
10023 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10024#if 0
10025 /* This is not needed any more and causes poor code since it causes
10026 comparisons and tests from non-SI objects to have different code
10027 sequences. */
10028 /* Copy to register to avoid generating bad insns by cse
10029 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10030 if (!cse_not_expected && GET_CODE (temp) == MEM)
10031 temp = copy_to_reg (temp);
ca695ac9 10032#endif
b93a436e
JL
10033 do_pending_stack_adjust ();
10034 if (GET_CODE (temp) == CONST_INT)
10035 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10036 else if (GET_CODE (temp) == LABEL_REF)
10037 comparison = const_true_rtx;
10038 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10039 && !can_compare_p (GET_MODE (temp)))
10040 /* Note swapping the labels gives us not-equal. */
10041 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10042 else if (GET_MODE (temp) != VOIDmode)
10043 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10044 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10045 GET_MODE (temp), NULL_RTX, 0);
10046 else
10047 abort ();
10048 }
bbf6f052 10049
b93a436e
JL
10050 /* Do any postincrements in the expression that was tested. */
10051 emit_queue ();
bbf6f052 10052
b93a436e
JL
10053 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10054 straight into a conditional jump instruction as the jump condition.
10055 Otherwise, all the work has been done already. */
bbf6f052 10056
b93a436e
JL
10057 if (comparison == const_true_rtx)
10058 {
10059 if (if_true_label)
10060 emit_jump (if_true_label);
10061 }
10062 else if (comparison == const0_rtx)
10063 {
10064 if (if_false_label)
10065 emit_jump (if_false_label);
10066 }
10067 else if (comparison)
10068 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10069
b93a436e
JL
10070 if (drop_through_label)
10071 {
10072 /* If do_jump produces code that might be jumped around,
10073 do any stack adjusts from that code, before the place
10074 where control merges in. */
10075 do_pending_stack_adjust ();
10076 emit_label (drop_through_label);
10077 }
bbf6f052 10078}
b93a436e
JL
10079\f
10080/* Given a comparison expression EXP for values too wide to be compared
10081 with one insn, test the comparison and jump to the appropriate label.
10082 The code of EXP is ignored; we always test GT if SWAP is 0,
10083 and LT if SWAP is 1. */
bbf6f052 10084
b93a436e
JL
10085static void
10086do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10087 tree exp;
10088 int swap;
10089 rtx if_false_label, if_true_label;
10090{
10091 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10092 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10093 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10094 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10095 rtx drop_through_label = 0;
10096 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10097 int i;
bbf6f052 10098
b93a436e
JL
10099 if (! if_true_label || ! if_false_label)
10100 drop_through_label = gen_label_rtx ();
10101 if (! if_true_label)
10102 if_true_label = drop_through_label;
10103 if (! if_false_label)
10104 if_false_label = drop_through_label;
bbf6f052 10105
b93a436e
JL
10106 /* Compare a word at a time, high order first. */
10107 for (i = 0; i < nwords; i++)
f81497d9 10108 {
b93a436e
JL
10109 rtx comp;
10110 rtx op0_word, op1_word;
10111
10112 if (WORDS_BIG_ENDIAN)
10113 {
10114 op0_word = operand_subword_force (op0, i, mode);
10115 op1_word = operand_subword_force (op1, i, mode);
10116 }
f81497d9 10117 else
b93a436e
JL
10118 {
10119 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10120 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10121 }
10122
10123 /* All but high-order word must be compared as unsigned. */
10124 comp = compare_from_rtx (op0_word, op1_word,
10125 (unsignedp || i > 0) ? GTU : GT,
10126 unsignedp, word_mode, NULL_RTX, 0);
10127 if (comp == const_true_rtx)
10128 emit_jump (if_true_label);
10129 else if (comp != const0_rtx)
10130 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10131
10132 /* Consider lower words only if these are equal. */
10133 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10134 NULL_RTX, 0);
10135 if (comp == const_true_rtx)
10136 emit_jump (if_false_label);
10137 else if (comp != const0_rtx)
10138 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10139 }
ca695ac9 10140
b93a436e
JL
10141 if (if_false_label)
10142 emit_jump (if_false_label);
10143 if (drop_through_label)
10144 emit_label (drop_through_label);
f81497d9
RS
10145}
10146
b93a436e
JL
10147/* Compare OP0 with OP1, word at a time, in mode MODE.
10148 UNSIGNEDP says to do unsigned comparison.
10149 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10150
b93a436e
JL
10151void
10152do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10153 enum machine_mode mode;
10154 int unsignedp;
10155 rtx op0, op1;
10156 rtx if_false_label, if_true_label;
f81497d9 10157{
b93a436e
JL
10158 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10159 rtx drop_through_label = 0;
10160 int i;
f81497d9 10161
b93a436e
JL
10162 if (! if_true_label || ! if_false_label)
10163 drop_through_label = gen_label_rtx ();
10164 if (! if_true_label)
10165 if_true_label = drop_through_label;
10166 if (! if_false_label)
10167 if_false_label = drop_through_label;
f81497d9 10168
b93a436e
JL
10169 /* Compare a word at a time, high order first. */
10170 for (i = 0; i < nwords; i++)
10171 {
10172 rtx comp;
10173 rtx op0_word, op1_word;
bbf6f052 10174
b93a436e
JL
10175 if (WORDS_BIG_ENDIAN)
10176 {
10177 op0_word = operand_subword_force (op0, i, mode);
10178 op1_word = operand_subword_force (op1, i, mode);
10179 }
10180 else
10181 {
10182 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10183 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10184 }
bbf6f052 10185
b93a436e
JL
10186 /* All but high-order word must be compared as unsigned. */
10187 comp = compare_from_rtx (op0_word, op1_word,
10188 (unsignedp || i > 0) ? GTU : GT,
10189 unsignedp, word_mode, NULL_RTX, 0);
10190 if (comp == const_true_rtx)
10191 emit_jump (if_true_label);
10192 else if (comp != const0_rtx)
10193 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10194
b93a436e
JL
10195 /* Consider lower words only if these are equal. */
10196 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10197 NULL_RTX, 0);
10198 if (comp == const_true_rtx)
10199 emit_jump (if_false_label);
10200 else if (comp != const0_rtx)
10201 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10202 }
bbf6f052 10203
b93a436e
JL
10204 if (if_false_label)
10205 emit_jump (if_false_label);
10206 if (drop_through_label)
10207 emit_label (drop_through_label);
bbf6f052
RK
10208}
10209
b93a436e
JL
10210/* Given an EQ_EXPR expression EXP for values too wide to be compared
10211 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10212
b93a436e
JL
10213static void
10214do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10215 tree exp;
10216 rtx if_false_label, if_true_label;
bbf6f052 10217{
b93a436e
JL
10218 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10219 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10220 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10221 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10222 int i;
10223 rtx drop_through_label = 0;
bbf6f052 10224
b93a436e
JL
10225 if (! if_false_label)
10226 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10227
b93a436e
JL
10228 for (i = 0; i < nwords; i++)
10229 {
10230 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10231 operand_subword_force (op1, i, mode),
10232 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10233 word_mode, NULL_RTX, 0);
10234 if (comp == const_true_rtx)
10235 emit_jump (if_false_label);
10236 else if (comp != const0_rtx)
10237 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10238 }
bbf6f052 10239
b93a436e
JL
10240 if (if_true_label)
10241 emit_jump (if_true_label);
10242 if (drop_through_label)
10243 emit_label (drop_through_label);
bbf6f052 10244}
b93a436e
JL
10245\f
10246/* Jump according to whether OP0 is 0.
10247 We assume that OP0 has an integer mode that is too wide
10248 for the available compare insns. */
bbf6f052 10249
f5963e61 10250void
b93a436e
JL
10251do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10252 rtx op0;
10253 rtx if_false_label, if_true_label;
ca695ac9 10254{
b93a436e
JL
10255 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10256 rtx part;
10257 int i;
10258 rtx drop_through_label = 0;
bbf6f052 10259
b93a436e
JL
10260 /* The fastest way of doing this comparison on almost any machine is to
10261 "or" all the words and compare the result. If all have to be loaded
10262 from memory and this is a very wide item, it's possible this may
10263 be slower, but that's highly unlikely. */
bbf6f052 10264
b93a436e
JL
10265 part = gen_reg_rtx (word_mode);
10266 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10267 for (i = 1; i < nwords && part != 0; i++)
10268 part = expand_binop (word_mode, ior_optab, part,
10269 operand_subword_force (op0, i, GET_MODE (op0)),
10270 part, 1, OPTAB_WIDEN);
bbf6f052 10271
b93a436e
JL
10272 if (part != 0)
10273 {
10274 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10275 NULL_RTX, 0);
0f41302f 10276
b93a436e
JL
10277 if (comp == const_true_rtx)
10278 emit_jump (if_false_label);
10279 else if (comp == const0_rtx)
10280 emit_jump (if_true_label);
10281 else
10282 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10283
b93a436e
JL
10284 return;
10285 }
bbf6f052 10286
b93a436e
JL
10287 /* If we couldn't do the "or" simply, do this with a series of compares. */
10288 if (! if_false_label)
10289 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10290
b93a436e
JL
10291 for (i = 0; i < nwords; i++)
10292 {
10293 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10294 GET_MODE (op0)),
10295 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10296 if (comp == const_true_rtx)
10297 emit_jump (if_false_label);
10298 else if (comp != const0_rtx)
10299 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10300 }
bbf6f052 10301
b93a436e
JL
10302 if (if_true_label)
10303 emit_jump (if_true_label);
0f41302f 10304
b93a436e
JL
10305 if (drop_through_label)
10306 emit_label (drop_through_label);
bbf6f052 10307}
bbf6f052 10308
b93a436e
JL
10309/* Given a comparison expression in rtl form, output conditional branches to
10310 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10311
b93a436e
JL
10312static void
10313do_jump_for_compare (comparison, if_false_label, if_true_label)
10314 rtx comparison, if_false_label, if_true_label;
bbf6f052 10315{
b93a436e
JL
10316 if (if_true_label)
10317 {
10318 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10319 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10320 else
10321 abort ();
ca695ac9 10322
b93a436e
JL
10323 if (if_false_label)
10324 emit_jump (if_false_label);
10325 }
10326 else if (if_false_label)
10327 {
10328 rtx insn;
10329 rtx prev = get_last_insn ();
10330 rtx branch = 0;
0f41302f 10331
b93a436e
JL
10332 /* Output the branch with the opposite condition. Then try to invert
10333 what is generated. If more than one insn is a branch, or if the
10334 branch is not the last insn written, abort. If we can't invert
10335 the branch, emit make a true label, redirect this jump to that,
10336 emit a jump to the false label and define the true label. */
bbf6f052 10337
b93a436e
JL
10338 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10339 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10340 else
10341 abort ();
bbf6f052 10342
b93a436e
JL
10343 /* Here we get the first insn that was just emitted. It used to be the
10344 case that, on some machines, emitting the branch would discard
10345 the previous compare insn and emit a replacement. This isn't
10346 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 10347
b93a436e
JL
10348 if (prev == 0)
10349 insn = get_insns ();
10350 else if (INSN_DELETED_P (prev))
10351 abort ();
10352 else
10353 insn = NEXT_INSN (prev);
bbf6f052 10354
b93a436e
JL
10355 for (; insn; insn = NEXT_INSN (insn))
10356 if (GET_CODE (insn) == JUMP_INSN)
10357 {
10358 if (branch)
10359 abort ();
10360 branch = insn;
10361 }
a7c5971a 10362
b93a436e
JL
10363 if (branch != get_last_insn ())
10364 abort ();
bbf6f052 10365
b93a436e
JL
10366 JUMP_LABEL (branch) = if_false_label;
10367 if (! invert_jump (branch, if_false_label))
10368 {
10369 if_true_label = gen_label_rtx ();
10370 redirect_jump (branch, if_true_label);
10371 emit_jump (if_false_label);
10372 emit_label (if_true_label);
10373 }
10374 }
10375}
10376\f
10377/* Generate code for a comparison expression EXP
10378 (including code to compute the values to be compared)
10379 and set (CC0) according to the result.
10380 SIGNED_CODE should be the rtx operation for this comparison for
10381 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 10382
b93a436e
JL
10383 We force a stack adjustment unless there are currently
10384 things pushed on the stack that aren't yet used. */
ca695ac9 10385
b93a436e
JL
10386static rtx
10387compare (exp, signed_code, unsigned_code)
10388 register tree exp;
10389 enum rtx_code signed_code, unsigned_code;
10390{
10391 register rtx op0
10392 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10393 register rtx op1
10394 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10395 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10396 register enum machine_mode mode = TYPE_MODE (type);
10397 int unsignedp = TREE_UNSIGNED (type);
10398 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 10399
b93a436e
JL
10400#ifdef HAVE_canonicalize_funcptr_for_compare
10401 /* If function pointers need to be "canonicalized" before they can
10402 be reliably compared, then canonicalize them. */
10403 if (HAVE_canonicalize_funcptr_for_compare
10404 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10405 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10406 == FUNCTION_TYPE))
bbf6f052 10407 {
b93a436e 10408 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 10409
b93a436e
JL
10410 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10411 op0 = new_op0;
ca695ac9 10412 }
bbf6f052 10413
b93a436e
JL
10414 if (HAVE_canonicalize_funcptr_for_compare
10415 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10416 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10417 == FUNCTION_TYPE))
10418 {
10419 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 10420
b93a436e
JL
10421 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10422 op1 = new_op1;
10423 }
10424#endif
0f41302f 10425
b93a436e
JL
10426 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10427 ((mode == BLKmode)
10428 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10429 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 10430}
bbf6f052 10431
b93a436e
JL
10432/* Like compare but expects the values to compare as two rtx's.
10433 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10434
b93a436e
JL
10435 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10436 compared.
bbf6f052 10437
b93a436e
JL
10438 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10439 size of MODE should be used. */
ca695ac9 10440
b93a436e
JL
10441rtx
10442compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10443 register rtx op0, op1;
10444 enum rtx_code code;
10445 int unsignedp;
10446 enum machine_mode mode;
10447 rtx size;
10448 int align;
bbf6f052 10449{
b93a436e 10450 rtx tem;
bbf6f052 10451
b93a436e
JL
10452 /* If one operand is constant, make it the second one. Only do this
10453 if the other operand is not constant as well. */
e7c33f54 10454
b93a436e
JL
10455 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10456 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10457 {
b93a436e
JL
10458 tem = op0;
10459 op0 = op1;
10460 op1 = tem;
10461 code = swap_condition (code);
10462 }
bbf6f052 10463
b93a436e
JL
10464 if (flag_force_mem)
10465 {
10466 op0 = force_not_mem (op0);
10467 op1 = force_not_mem (op1);
10468 }
bbf6f052 10469
b93a436e 10470 do_pending_stack_adjust ();
ca695ac9 10471
b93a436e
JL
10472 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10473 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10474 return tem;
ca695ac9 10475
b93a436e
JL
10476#if 0
10477 /* There's no need to do this now that combine.c can eliminate lots of
10478 sign extensions. This can be less efficient in certain cases on other
10479 machines. */
ca695ac9 10480
b93a436e
JL
10481 /* If this is a signed equality comparison, we can do it as an
10482 unsigned comparison since zero-extension is cheaper than sign
10483 extension and comparisons with zero are done as unsigned. This is
10484 the case even on machines that can do fast sign extension, since
10485 zero-extension is easier to combine with other operations than
10486 sign-extension is. If we are comparing against a constant, we must
10487 convert it to what it would look like unsigned. */
10488 if ((code == EQ || code == NE) && ! unsignedp
10489 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10490 {
10491 if (GET_CODE (op1) == CONST_INT
10492 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10493 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10494 unsignedp = 1;
10495 }
10496#endif
ca695ac9 10497
b93a436e 10498 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 10499
b93a436e
JL
10500 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10501}
10502\f
10503/* Generate code to calculate EXP using a store-flag instruction
10504 and return an rtx for the result. EXP is either a comparison
10505 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10506
b93a436e 10507 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10508
b93a436e
JL
10509 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10510 cheap.
ca695ac9 10511
b93a436e
JL
10512 Return zero if there is no suitable set-flag instruction
10513 available on this machine.
ca695ac9 10514
b93a436e
JL
10515 Once expand_expr has been called on the arguments of the comparison,
10516 we are committed to doing the store flag, since it is not safe to
10517 re-evaluate the expression. We emit the store-flag insn by calling
10518 emit_store_flag, but only expand the arguments if we have a reason
10519 to believe that emit_store_flag will be successful. If we think that
10520 it will, but it isn't, we have to simulate the store-flag with a
10521 set/jump/set sequence. */
ca695ac9 10522
b93a436e
JL
10523static rtx
10524do_store_flag (exp, target, mode, only_cheap)
10525 tree exp;
10526 rtx target;
10527 enum machine_mode mode;
10528 int only_cheap;
10529{
10530 enum rtx_code code;
10531 tree arg0, arg1, type;
10532 tree tem;
10533 enum machine_mode operand_mode;
10534 int invert = 0;
10535 int unsignedp;
10536 rtx op0, op1;
10537 enum insn_code icode;
10538 rtx subtarget = target;
381127e8 10539 rtx result, label;
ca695ac9 10540
b93a436e
JL
10541 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10542 result at the end. We can't simply invert the test since it would
10543 have already been inverted if it were valid. This case occurs for
10544 some floating-point comparisons. */
ca695ac9 10545
b93a436e
JL
10546 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10547 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10548
b93a436e
JL
10549 arg0 = TREE_OPERAND (exp, 0);
10550 arg1 = TREE_OPERAND (exp, 1);
10551 type = TREE_TYPE (arg0);
10552 operand_mode = TYPE_MODE (type);
10553 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10554
b93a436e
JL
10555 /* We won't bother with BLKmode store-flag operations because it would mean
10556 passing a lot of information to emit_store_flag. */
10557 if (operand_mode == BLKmode)
10558 return 0;
ca695ac9 10559
b93a436e
JL
10560 /* We won't bother with store-flag operations involving function pointers
10561 when function pointers must be canonicalized before comparisons. */
10562#ifdef HAVE_canonicalize_funcptr_for_compare
10563 if (HAVE_canonicalize_funcptr_for_compare
10564 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10565 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10566 == FUNCTION_TYPE))
10567 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10568 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10569 == FUNCTION_TYPE))))
10570 return 0;
ca695ac9
JB
10571#endif
10572
b93a436e
JL
10573 STRIP_NOPS (arg0);
10574 STRIP_NOPS (arg1);
ca695ac9 10575
b93a436e
JL
10576 /* Get the rtx comparison code to use. We know that EXP is a comparison
10577 operation of some type. Some comparisons against 1 and -1 can be
10578 converted to comparisons with zero. Do so here so that the tests
10579 below will be aware that we have a comparison with zero. These
10580 tests will not catch constants in the first operand, but constants
10581 are rarely passed as the first operand. */
ca695ac9 10582
b93a436e
JL
10583 switch (TREE_CODE (exp))
10584 {
10585 case EQ_EXPR:
10586 code = EQ;
bbf6f052 10587 break;
b93a436e
JL
10588 case NE_EXPR:
10589 code = NE;
bbf6f052 10590 break;
b93a436e
JL
10591 case LT_EXPR:
10592 if (integer_onep (arg1))
10593 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10594 else
10595 code = unsignedp ? LTU : LT;
ca695ac9 10596 break;
b93a436e
JL
10597 case LE_EXPR:
10598 if (! unsignedp && integer_all_onesp (arg1))
10599 arg1 = integer_zero_node, code = LT;
10600 else
10601 code = unsignedp ? LEU : LE;
ca695ac9 10602 break;
b93a436e
JL
10603 case GT_EXPR:
10604 if (! unsignedp && integer_all_onesp (arg1))
10605 arg1 = integer_zero_node, code = GE;
10606 else
10607 code = unsignedp ? GTU : GT;
10608 break;
10609 case GE_EXPR:
10610 if (integer_onep (arg1))
10611 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10612 else
10613 code = unsignedp ? GEU : GE;
ca695ac9 10614 break;
ca695ac9 10615 default:
b93a436e 10616 abort ();
bbf6f052 10617 }
bbf6f052 10618
b93a436e
JL
10619 /* Put a constant second. */
10620 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10621 {
10622 tem = arg0; arg0 = arg1; arg1 = tem;
10623 code = swap_condition (code);
ca695ac9 10624 }
bbf6f052 10625
b93a436e
JL
10626 /* If this is an equality or inequality test of a single bit, we can
10627 do this by shifting the bit being tested to the low-order bit and
10628 masking the result with the constant 1. If the condition was EQ,
10629 we xor it with 1. This does not require an scc insn and is faster
10630 than an scc insn even if we have it. */
d39985fa 10631
b93a436e
JL
10632 if ((code == NE || code == EQ)
10633 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10634 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10635 {
10636 tree inner = TREE_OPERAND (arg0, 0);
10637 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10638 int ops_unsignedp;
bbf6f052 10639
b93a436e
JL
10640 /* If INNER is a right shift of a constant and it plus BITNUM does
10641 not overflow, adjust BITNUM and INNER. */
ca695ac9 10642
b93a436e
JL
10643 if (TREE_CODE (inner) == RSHIFT_EXPR
10644 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10645 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10646 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10647 < TYPE_PRECISION (type)))
ca695ac9 10648 {
b93a436e
JL
10649 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10650 inner = TREE_OPERAND (inner, 0);
ca695ac9 10651 }
ca695ac9 10652
b93a436e
JL
10653 /* If we are going to be able to omit the AND below, we must do our
10654 operations as unsigned. If we must use the AND, we have a choice.
10655 Normally unsigned is faster, but for some machines signed is. */
10656 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10657#ifdef LOAD_EXTEND_OP
10658 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10659#else
10660 : 1
10661#endif
10662 );
bbf6f052 10663
b93a436e
JL
10664 if (subtarget == 0 || GET_CODE (subtarget) != REG
10665 || GET_MODE (subtarget) != operand_mode
e5e809f4 10666 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10667 subtarget = 0;
bbf6f052 10668
b93a436e 10669 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10670
b93a436e
JL
10671 if (bitnum != 0)
10672 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10673 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10674
b93a436e
JL
10675 if (GET_MODE (op0) != mode)
10676 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10677
b93a436e
JL
10678 if ((code == EQ && ! invert) || (code == NE && invert))
10679 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10680 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10681
b93a436e
JL
10682 /* Put the AND last so it can combine with more things. */
10683 if (bitnum != TYPE_PRECISION (type) - 1)
10684 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10685
b93a436e
JL
10686 return op0;
10687 }
bbf6f052 10688
b93a436e
JL
10689 /* Now see if we are likely to be able to do this. Return if not. */
10690 if (! can_compare_p (operand_mode))
10691 return 0;
10692 icode = setcc_gen_code[(int) code];
10693 if (icode == CODE_FOR_nothing
10694 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 10695 {
b93a436e
JL
10696 /* We can only do this if it is one of the special cases that
10697 can be handled without an scc insn. */
10698 if ((code == LT && integer_zerop (arg1))
10699 || (! only_cheap && code == GE && integer_zerop (arg1)))
10700 ;
10701 else if (BRANCH_COST >= 0
10702 && ! only_cheap && (code == NE || code == EQ)
10703 && TREE_CODE (type) != REAL_TYPE
10704 && ((abs_optab->handlers[(int) operand_mode].insn_code
10705 != CODE_FOR_nothing)
10706 || (ffs_optab->handlers[(int) operand_mode].insn_code
10707 != CODE_FOR_nothing)))
10708 ;
10709 else
10710 return 0;
ca695ac9 10711 }
b93a436e
JL
10712
10713 preexpand_calls (exp);
10714 if (subtarget == 0 || GET_CODE (subtarget) != REG
10715 || GET_MODE (subtarget) != operand_mode
e5e809f4 10716 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10717 subtarget = 0;
10718
10719 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10720 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10721
10722 if (target == 0)
10723 target = gen_reg_rtx (mode);
10724
10725 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10726 because, if the emit_store_flag does anything it will succeed and
10727 OP0 and OP1 will not be used subsequently. */
ca695ac9 10728
b93a436e
JL
10729 result = emit_store_flag (target, code,
10730 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10731 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10732 operand_mode, unsignedp, 1);
ca695ac9 10733
b93a436e
JL
10734 if (result)
10735 {
10736 if (invert)
10737 result = expand_binop (mode, xor_optab, result, const1_rtx,
10738 result, 0, OPTAB_LIB_WIDEN);
10739 return result;
ca695ac9 10740 }
bbf6f052 10741
b93a436e
JL
10742 /* If this failed, we have to do this with set/compare/jump/set code. */
10743 if (GET_CODE (target) != REG
10744 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10745 target = gen_reg_rtx (GET_MODE (target));
10746
10747 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10748 result = compare_from_rtx (op0, op1, code, unsignedp,
10749 operand_mode, NULL_RTX, 0);
10750 if (GET_CODE (result) == CONST_INT)
10751 return (((result == const0_rtx && ! invert)
10752 || (result != const0_rtx && invert))
10753 ? const0_rtx : const1_rtx);
ca695ac9 10754
b93a436e
JL
10755 label = gen_label_rtx ();
10756 if (bcc_gen_fctn[(int) code] == 0)
10757 abort ();
0f41302f 10758
b93a436e
JL
10759 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10760 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10761 emit_label (label);
bbf6f052 10762
b93a436e 10763 return target;
ca695ac9 10764}
b93a436e
JL
10765\f
10766/* Generate a tablejump instruction (used for switch statements). */
10767
10768#ifdef HAVE_tablejump
e87b4f3f 10769
b93a436e
JL
10770/* INDEX is the value being switched on, with the lowest value
10771 in the table already subtracted.
10772 MODE is its expected mode (needed if INDEX is constant).
10773 RANGE is the length of the jump table.
10774 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10775
b93a436e
JL
10776 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10777 index value is out of range. */
0f41302f 10778
ca695ac9 10779void
b93a436e
JL
10780do_tablejump (index, mode, range, table_label, default_label)
10781 rtx index, range, table_label, default_label;
10782 enum machine_mode mode;
ca695ac9 10783{
b93a436e 10784 register rtx temp, vector;
88d3b7f0 10785
b93a436e
JL
10786 /* Do an unsigned comparison (in the proper mode) between the index
10787 expression and the value which represents the length of the range.
10788 Since we just finished subtracting the lower bound of the range
10789 from the index expression, this comparison allows us to simultaneously
10790 check that the original index expression value is both greater than
10791 or equal to the minimum value of the range and less than or equal to
10792 the maximum value of the range. */
709f5be1 10793
b93a436e
JL
10794 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10795 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 10796
b93a436e
JL
10797 /* If index is in range, it must fit in Pmode.
10798 Convert to Pmode so we can index with it. */
10799 if (mode != Pmode)
10800 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10801
b93a436e
JL
10802 /* Don't let a MEM slip thru, because then INDEX that comes
10803 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10804 and break_out_memory_refs will go to work on it and mess it up. */
10805#ifdef PIC_CASE_VECTOR_ADDRESS
10806 if (flag_pic && GET_CODE (index) != REG)
10807 index = copy_to_mode_reg (Pmode, index);
10808#endif
ca695ac9 10809
b93a436e
JL
10810 /* If flag_force_addr were to affect this address
10811 it could interfere with the tricky assumptions made
10812 about addresses that contain label-refs,
10813 which may be valid only very near the tablejump itself. */
10814 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10815 GET_MODE_SIZE, because this indicates how large insns are. The other
10816 uses should all be Pmode, because they are addresses. This code
10817 could fail if addresses and insns are not the same size. */
10818 index = gen_rtx_PLUS (Pmode,
10819 gen_rtx_MULT (Pmode, index,
10820 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10821 gen_rtx_LABEL_REF (Pmode, table_label));
10822#ifdef PIC_CASE_VECTOR_ADDRESS
10823 if (flag_pic)
10824 index = PIC_CASE_VECTOR_ADDRESS (index);
10825 else
bbf6f052 10826#endif
b93a436e
JL
10827 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10828 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10829 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10830 RTX_UNCHANGING_P (vector) = 1;
10831 convert_move (temp, vector, 0);
10832
10833 emit_jump_insn (gen_tablejump (temp, table_label));
10834
10835 /* If we are generating PIC code or if the table is PC-relative, the
10836 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10837 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10838 emit_barrier ();
bbf6f052 10839}
b93a436e
JL
10840
10841#endif /* HAVE_tablejump */