]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
loop.c (regs_patch_p): Add prototype.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
35#include "expr.h"
36#include "insn-config.h"
37#include "recog.h"
38#include "output.h"
bbf6f052 39#include "typeclass.h"
ca55abae 40#include "defaults.h"
bbf6f052
RK
41
42#define CEIL(x,y) (((x) + (y) - 1) / (y))
43
44/* Decide whether a function's arguments should be processed
bbc8a071
RK
45 from first to last or from last to first.
46
47 They should if the stack and args grow in opposite directions, but
48 only if we have push insns. */
bbf6f052 49
bbf6f052 50#ifdef PUSH_ROUNDING
bbc8a071 51
3319a347 52#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
53#define PUSH_ARGS_REVERSED /* If it's last to first */
54#endif
bbc8a071 55
bbf6f052
RK
56#endif
57
58#ifndef STACK_PUSH_CODE
59#ifdef STACK_GROWS_DOWNWARD
60#define STACK_PUSH_CODE PRE_DEC
61#else
62#define STACK_PUSH_CODE PRE_INC
63#endif
64#endif
65
66/* Like STACK_BOUNDARY but in units of bytes, not bits. */
67#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
68
18543a22
ILT
69/* Assume that case vectors are not pc-relative. */
70#ifndef CASE_VECTOR_PC_RELATIVE
71#define CASE_VECTOR_PC_RELATIVE 0
72#endif
73
bbf6f052
RK
74/* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80int cse_not_expected;
81
82/* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85int do_preexpand_calls = 1;
86
87/* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89int pending_stack_adjust;
90
91/* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95int inhibit_defer_pop;
96
d93d4205
MS
97/* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
100 of TARGET_EXPRs. */
101int target_temp_slot_level;
102
bbf6f052
RK
103/* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
105 returned. */
106static rtx saveregs_value;
107
dcf76fff
TW
108/* Similarly for __builtin_apply_args. */
109static rtx apply_args_value;
110
956d6950
JL
111/* Don't check memory usage, since code is being emitted to check a memory
112 usage. Used when flag_check_memory_usage is true, to avoid infinite
113 recursion. */
114static int in_check_memory_usage;
115
4969d05d
RK
116/* This structure is used by move_by_pieces to describe the move to
117 be performed. */
4969d05d
RK
118struct move_by_pieces
119{
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
e9cf6a97 124 int to_struct;
4969d05d
RK
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
e9cf6a97 129 int from_struct;
4969d05d
RK
130 int len;
131 int offset;
132 int reverse;
133};
134
9de08200
RK
135/* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138struct clear_by_pieces
139{
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148};
149
292b1216 150extern struct obstack permanent_obstack;
4ed67205 151extern rtx arg_pointer_save_area;
c02bd5d9 152
03566575
JW
153static rtx get_push_address PROTO ((int));
154
4969d05d
RK
155static rtx enqueue_insn PROTO((rtx, rtx));
156static int queued_subexp_p PROTO((rtx));
157static void init_queue PROTO((void));
158static void move_by_pieces PROTO((rtx, rtx, int, int));
159static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 160static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 161 struct move_by_pieces *));
9de08200 162static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 163static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
164 struct clear_by_pieces *));
165static int is_zeros_p PROTO((tree));
166static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
167static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
168 tree, tree, int));
e1a43f73 169static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
170static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
171 enum machine_mode, int, int, int));
e009aaf3
JL
172static enum memory_use_mode
173 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
174static tree save_noncopied_parts PROTO((tree, tree));
175static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 176static int safe_from_p PROTO((rtx, tree, int));
4969d05d 177static int fixed_type_p PROTO((tree));
01c8a7c8 178static rtx var_rtx PROTO((tree));
4969d05d
RK
179static int get_pointer_alignment PROTO((tree, unsigned));
180static tree string_constant PROTO((tree, tree *));
181static tree c_strlen PROTO((tree));
307b821c
RK
182static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
0006469d
TW
184static int apply_args_size PROTO((void));
185static int apply_result_size PROTO((void));
186static rtx result_vector PROTO((int, rtx));
187static rtx expand_builtin_apply_args PROTO((void));
188static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189static void expand_builtin_return PROTO((rtx));
7b8b9722 190static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
191static void preexpand_calls PROTO((tree));
192static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 193void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
194static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
195static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
196static void do_jump_for_compare PROTO((rtx, rtx, rtx));
197static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
198static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
16545b0a 199extern tree truthvalue_conversion PROTO((tree));
bbf6f052 200
4fa52007
RK
201/* Record for each mode whether we can move a register directly to or
202 from an object of that mode in memory. If we can't, we won't try
203 to use that mode directly when accessing a field of that mode. */
204
205static char direct_load[NUM_MACHINE_MODES];
206static char direct_store[NUM_MACHINE_MODES];
207
bbf6f052
RK
208/* MOVE_RATIO is the number of move instructions that is better than
209 a block move. */
210
211#ifndef MOVE_RATIO
266007a7 212#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
213#define MOVE_RATIO 2
214#else
996d9dac
MM
215/* If we are optimizing for space (-Os), cut down the default move ratio */
216#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
217#endif
218#endif
e87b4f3f 219
266007a7 220/* This array records the insn_code of insns to perform block moves. */
e6677db3 221enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 222
9de08200
RK
223/* This array records the insn_code of insns to perform block clears. */
224enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225
0f41302f 226/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
227
228#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 229#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 230#endif
0006469d
TW
231
232/* Register mappings for target machines without register windows. */
233#ifndef INCOMING_REGNO
234#define INCOMING_REGNO(OUT) (OUT)
235#endif
236#ifndef OUTGOING_REGNO
237#define OUTGOING_REGNO(IN) (IN)
238#endif
bbf6f052 239\f
4fa52007 240/* This is run once per compilation to set up which modes can be used
266007a7 241 directly in memory and to initialize the block move optab. */
4fa52007
RK
242
243void
244init_expr_once ()
245{
246 rtx insn, pat;
247 enum machine_mode mode;
e2549997
RS
248 /* Try indexing by frame ptr and try by stack ptr.
249 It is known that on the Convex the stack ptr isn't a valid index.
250 With luck, one or the other is valid on any machine. */
38a448ca
RH
251 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
252 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007
RK
253
254 start_sequence ();
38a448ca 255 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
256 pat = PATTERN (insn);
257
258 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
259 mode = (enum machine_mode) ((int) mode + 1))
260 {
261 int regno;
262 rtx reg;
263 int num_clobbers;
264
265 direct_load[(int) mode] = direct_store[(int) mode] = 0;
266 PUT_MODE (mem, mode);
e2549997 267 PUT_MODE (mem1, mode);
4fa52007 268
e6fe56a4
RK
269 /* See if there is some register that can be used in this mode and
270 directly loaded or stored from memory. */
271
7308a047
RS
272 if (mode != VOIDmode && mode != BLKmode)
273 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
274 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
275 regno++)
276 {
277 if (! HARD_REGNO_MODE_OK (regno, mode))
278 continue;
e6fe56a4 279
38a448ca 280 reg = gen_rtx_REG (mode, regno);
e6fe56a4 281
7308a047
RS
282 SET_SRC (pat) = mem;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
e6fe56a4 286
e2549997
RS
287 SET_SRC (pat) = mem1;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
291
7308a047
RS
292 SET_SRC (pat) = reg;
293 SET_DEST (pat) = mem;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
e2549997
RS
296
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem1;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
7308a047 301 }
4fa52007
RK
302 }
303
304 end_sequence ();
305}
306
bbf6f052
RK
307/* This is run at the start of compiling a function. */
308
309void
310init_expr ()
311{
312 init_queue ();
313
314 pending_stack_adjust = 0;
315 inhibit_defer_pop = 0;
bbf6f052 316 saveregs_value = 0;
0006469d 317 apply_args_value = 0;
e87b4f3f 318 forced_labels = 0;
bbf6f052
RK
319}
320
321/* Save all variables describing the current status into the structure *P.
322 This is used before starting a nested function. */
323
324void
325save_expr_status (p)
326 struct function *p;
327{
328 /* Instead of saving the postincrement queue, empty it. */
329 emit_queue ();
330
331 p->pending_stack_adjust = pending_stack_adjust;
332 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 333 p->saveregs_value = saveregs_value;
0006469d 334 p->apply_args_value = apply_args_value;
e87b4f3f 335 p->forced_labels = forced_labels;
bbf6f052
RK
336
337 pending_stack_adjust = 0;
338 inhibit_defer_pop = 0;
bbf6f052 339 saveregs_value = 0;
0006469d 340 apply_args_value = 0;
e87b4f3f 341 forced_labels = 0;
bbf6f052
RK
342}
343
344/* Restore all variables describing the current status from the structure *P.
345 This is used after a nested function. */
346
347void
348restore_expr_status (p)
349 struct function *p;
350{
351 pending_stack_adjust = p->pending_stack_adjust;
352 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 353 saveregs_value = p->saveregs_value;
0006469d 354 apply_args_value = p->apply_args_value;
e87b4f3f 355 forced_labels = p->forced_labels;
bbf6f052
RK
356}
357\f
358/* Manage the queue of increment instructions to be output
359 for POSTINCREMENT_EXPR expressions, etc. */
360
361static rtx pending_chain;
362
363/* Queue up to increment (or change) VAR later. BODY says how:
364 BODY should be the same thing you would pass to emit_insn
365 to increment right away. It will go to emit_insn later on.
366
367 The value is a QUEUED expression to be used in place of VAR
368 where you want to guarantee the pre-incrementation value of VAR. */
369
370static rtx
371enqueue_insn (var, body)
372 rtx var, body;
373{
38a448ca
RH
374 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
375 var, NULL_RTX, NULL_RTX, body,
376 pending_chain);
bbf6f052
RK
377 return pending_chain;
378}
379
380/* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
386
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
390
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
394
395rtx
396protect_from_queue (x, modify)
397 register rtx x;
398 int modify;
399{
400 register RTX_CODE code = GET_CODE (x);
401
402#if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
405 return x;
406#endif
407
408 if (code != QUEUED)
409 {
e9baa644
RK
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
414 shared. */
bbf6f052
RK
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 {
418 register rtx y = XEXP (x, 0);
38a448ca 419 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
420
421 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
422 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
423 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
424
bbf6f052
RK
425 if (QUEUED_INSN (y))
426 {
e9baa644
RK
427 register rtx temp = gen_reg_rtx (GET_MODE (new));
428 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
429 QUEUED_INSN (y));
430 return temp;
431 }
e9baa644 432 return new;
bbf6f052
RK
433 }
434 /* Otherwise, recursively protect the subexpressions of all
435 the kinds of rtx's that can contain a QUEUED. */
436 if (code == MEM)
3f15938e
RS
437 {
438 rtx tem = protect_from_queue (XEXP (x, 0), 0);
439 if (tem != XEXP (x, 0))
440 {
441 x = copy_rtx (x);
442 XEXP (x, 0) = tem;
443 }
444 }
bbf6f052
RK
445 else if (code == PLUS || code == MULT)
446 {
3f15938e
RS
447 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
448 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
449 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
450 {
451 x = copy_rtx (x);
452 XEXP (x, 0) = new0;
453 XEXP (x, 1) = new1;
454 }
bbf6f052
RK
455 }
456 return x;
457 }
458 /* If the increment has not happened, use the variable itself. */
459 if (QUEUED_INSN (x) == 0)
460 return QUEUED_VAR (x);
461 /* If the increment has happened and a pre-increment copy exists,
462 use that copy. */
463 if (QUEUED_COPY (x) != 0)
464 return QUEUED_COPY (x);
465 /* The increment has happened but we haven't set up a pre-increment copy.
466 Set one up now, and use it. */
467 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
468 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
469 QUEUED_INSN (x));
470 return QUEUED_COPY (x);
471}
472
473/* Return nonzero if X contains a QUEUED expression:
474 if it contains anything that will be altered by a queued increment.
475 We handle only combinations of MEM, PLUS, MINUS and MULT operators
476 since memory addresses generally contain only those. */
477
478static int
479queued_subexp_p (x)
480 rtx x;
481{
482 register enum rtx_code code = GET_CODE (x);
483 switch (code)
484 {
485 case QUEUED:
486 return 1;
487 case MEM:
488 return queued_subexp_p (XEXP (x, 0));
489 case MULT:
490 case PLUS:
491 case MINUS:
e9a25f70
JL
492 return (queued_subexp_p (XEXP (x, 0))
493 || queued_subexp_p (XEXP (x, 1)));
494 default:
495 return 0;
bbf6f052 496 }
bbf6f052
RK
497}
498
499/* Perform all the pending incrementations. */
500
501void
502emit_queue ()
503{
504 register rtx p;
381127e8 505 while ((p = pending_chain))
bbf6f052
RK
506 {
507 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
508 pending_chain = QUEUED_NEXT (p);
509 }
510}
511
512static void
513init_queue ()
514{
515 if (pending_chain)
516 abort ();
517}
518\f
519/* Copy data from FROM to TO, where the machine modes are not the same.
520 Both modes may be integer, or both may be floating.
521 UNSIGNEDP should be nonzero if FROM is an unsigned type.
522 This causes zero-extension instead of sign-extension. */
523
524void
525convert_move (to, from, unsignedp)
526 register rtx to, from;
527 int unsignedp;
528{
529 enum machine_mode to_mode = GET_MODE (to);
530 enum machine_mode from_mode = GET_MODE (from);
531 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
532 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
533 enum insn_code code;
534 rtx libcall;
535
536 /* rtx code for making an equivalent value. */
537 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
538
539 to = protect_from_queue (to, 1);
540 from = protect_from_queue (from, 0);
541
542 if (to_real != from_real)
543 abort ();
544
1499e0a8
RK
545 /* If FROM is a SUBREG that indicates that we have already done at least
546 the required extension, strip it. We don't handle such SUBREGs as
547 TO here. */
548
549 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
550 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
551 >= GET_MODE_SIZE (to_mode))
552 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
553 from = gen_lowpart (to_mode, from), from_mode = to_mode;
554
555 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
556 abort ();
557
bbf6f052
RK
558 if (to_mode == from_mode
559 || (from_mode == VOIDmode && CONSTANT_P (from)))
560 {
561 emit_move_insn (to, from);
562 return;
563 }
564
565 if (to_real)
566 {
81d79e2c
RS
567 rtx value;
568
2b01c326 569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 570 {
2b01c326
RK
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
573 != CODE_FOR_nothing)
574 {
575 emit_unop_insn (code, to, from, UNKNOWN);
576 return;
577 }
bbf6f052 578 }
2b01c326 579
b424402e
RS
580#ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
582 {
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
584 return;
585 }
586#endif
704af6a1
JL
587#ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
589 {
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
591 return;
592 }
593#endif
b424402e
RS
594#ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
598 return;
599 }
600#endif
601#ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
605 return;
606 }
607#endif
608#ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
612 return;
613 }
614#endif
615#ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
619 return;
620 }
621#endif
03747aa3
RK
622
623#ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
625 {
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
627 return;
628 }
629#endif
b424402e
RS
630#ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
634 return;
635 }
636#endif
637#ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
641 return;
642 }
643#endif
644#ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
648 return;
649 }
650#endif
651#ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
653 {
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
655 return;
656 }
657#endif
2b01c326
RK
658
659#ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
663 return;
664 }
665#endif
666#ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
670 return;
671 }
672#endif
673#ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
677 return;
678 }
679#endif
680#ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
682 {
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
684 return;
685 }
686#endif
687
bbf6f052
RK
688#ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
690 {
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
692 return;
693 }
694#endif
b092b471
JW
695#ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
697 {
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
699 return;
700 }
701#endif
bbf6f052
RK
702#ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
704 {
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
706 return;
707 }
708#endif
b092b471
JW
709#ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
711 {
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
713 return;
714 }
715#endif
bbf6f052
RK
716#ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
718 {
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
720 return;
721 }
722#endif
723
b092b471
JW
724 libcall = (rtx) 0;
725 switch (from_mode)
726 {
727 case SFmode:
728 switch (to_mode)
729 {
730 case DFmode:
731 libcall = extendsfdf2_libfunc;
732 break;
733
734 case XFmode:
735 libcall = extendsfxf2_libfunc;
736 break;
737
738 case TFmode:
739 libcall = extendsftf2_libfunc;
740 break;
e9a25f70
JL
741
742 default:
743 break;
b092b471
JW
744 }
745 break;
746
747 case DFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = truncdfsf2_libfunc;
752 break;
753
754 case XFmode:
755 libcall = extenddfxf2_libfunc;
756 break;
757
758 case TFmode:
759 libcall = extenddftf2_libfunc;
760 break;
e9a25f70
JL
761
762 default:
763 break;
b092b471
JW
764 }
765 break;
766
767 case XFmode:
768 switch (to_mode)
769 {
770 case SFmode:
771 libcall = truncxfsf2_libfunc;
772 break;
773
774 case DFmode:
775 libcall = truncxfdf2_libfunc;
776 break;
e9a25f70
JL
777
778 default:
779 break;
b092b471
JW
780 }
781 break;
782
783 case TFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = trunctfsf2_libfunc;
788 break;
789
790 case DFmode:
791 libcall = trunctfdf2_libfunc;
792 break;
e9a25f70
JL
793
794 default:
795 break;
b092b471
JW
796 }
797 break;
e9a25f70
JL
798
799 default:
800 break;
b092b471
JW
801 }
802
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
bbf6f052
RK
805 abort ();
806
81d79e2c
RS
807 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
808 1, from, from_mode);
809 emit_move_insn (to, value);
bbf6f052
RK
810 return;
811 }
812
813 /* Now both modes are integers. */
814
815 /* Handle expanding beyond a word. */
816 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
817 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 {
819 rtx insns;
820 rtx lowpart;
821 rtx fill_value;
822 rtx lowfrom;
823 int i;
824 enum machine_mode lowpart_mode;
825 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
826
827 /* Try converting directly if the insn is supported. */
828 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
829 != CODE_FOR_nothing)
830 {
cd1b4b44
RK
831 /* If FROM is a SUBREG, put it into a register. Do this
832 so that we always generate the same set of insns for
833 better cse'ing; if an intermediate assignment occurred,
834 we won't be doing the operation directly on the SUBREG. */
835 if (optimize > 0 && GET_CODE (from) == SUBREG)
836 from = force_reg (from_mode, from);
bbf6f052
RK
837 emit_unop_insn (code, to, from, equiv_code);
838 return;
839 }
840 /* Next, try converting via full word. */
841 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
842 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
843 != CODE_FOR_nothing))
844 {
a81fee56 845 if (GET_CODE (to) == REG)
38a448ca 846 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
847 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
848 emit_unop_insn (code, to,
849 gen_lowpart (word_mode, to), equiv_code);
850 return;
851 }
852
853 /* No special multiword conversion insn; do it by hand. */
854 start_sequence ();
855
5c5033c3
RK
856 /* Since we will turn this into a no conflict block, we must ensure
857 that the source does not overlap the target. */
858
859 if (reg_overlap_mentioned_p (to, from))
860 from = force_reg (from_mode, from);
861
bbf6f052
RK
862 /* Get a copy of FROM widened to a word, if necessary. */
863 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
864 lowpart_mode = word_mode;
865 else
866 lowpart_mode = from_mode;
867
868 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
869
870 lowpart = gen_lowpart (lowpart_mode, to);
871 emit_move_insn (lowpart, lowfrom);
872
873 /* Compute the value to put in each remaining word. */
874 if (unsignedp)
875 fill_value = const0_rtx;
876 else
877 {
878#ifdef HAVE_slt
879 if (HAVE_slt
880 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
881 && STORE_FLAG_VALUE == -1)
882 {
906c4e36
RK
883 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
884 lowpart_mode, 0, 0);
bbf6f052
RK
885 fill_value = gen_reg_rtx (word_mode);
886 emit_insn (gen_slt (fill_value));
887 }
888 else
889#endif
890 {
891 fill_value
892 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
893 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 894 NULL_RTX, 0);
bbf6f052
RK
895 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 }
897 }
898
899 /* Fill the remaining words. */
900 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
901 {
902 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
903 rtx subword = operand_subword (to, index, 1, to_mode);
904
905 if (subword == 0)
906 abort ();
907
908 if (fill_value != subword)
909 emit_move_insn (subword, fill_value);
910 }
911
912 insns = get_insns ();
913 end_sequence ();
914
906c4e36 915 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 916 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
917 return;
918 }
919
d3c64ee3
RS
920 /* Truncating multi-word to a word or less. */
921 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
922 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 923 {
431a6eca
JW
924 if (!((GET_CODE (from) == MEM
925 && ! MEM_VOLATILE_P (from)
926 && direct_load[(int) to_mode]
927 && ! mode_dependent_address_p (XEXP (from, 0)))
928 || GET_CODE (from) == REG
929 || GET_CODE (from) == SUBREG))
930 from = force_reg (from_mode, from);
bbf6f052
RK
931 convert_move (to, gen_lowpart (word_mode, from), 0);
932 return;
933 }
934
935 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
936 if (to_mode == PQImode)
937 {
938 if (from_mode != QImode)
939 from = convert_to_mode (QImode, from, unsignedp);
940
941#ifdef HAVE_truncqipqi2
942 if (HAVE_truncqipqi2)
943 {
944 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
945 return;
946 }
947#endif /* HAVE_truncqipqi2 */
948 abort ();
949 }
950
951 if (from_mode == PQImode)
952 {
953 if (to_mode != QImode)
954 {
955 from = convert_to_mode (QImode, from, unsignedp);
956 from_mode = QImode;
957 }
958 else
959 {
960#ifdef HAVE_extendpqiqi2
961 if (HAVE_extendpqiqi2)
962 {
963 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
964 return;
965 }
966#endif /* HAVE_extendpqiqi2 */
967 abort ();
968 }
969 }
970
bbf6f052
RK
971 if (to_mode == PSImode)
972 {
973 if (from_mode != SImode)
974 from = convert_to_mode (SImode, from, unsignedp);
975
1f584163
DE
976#ifdef HAVE_truncsipsi2
977 if (HAVE_truncsipsi2)
bbf6f052 978 {
1f584163 979 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
980 return;
981 }
1f584163 982#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
983 abort ();
984 }
985
986 if (from_mode == PSImode)
987 {
988 if (to_mode != SImode)
989 {
990 from = convert_to_mode (SImode, from, unsignedp);
991 from_mode = SImode;
992 }
993 else
994 {
1f584163
DE
995#ifdef HAVE_extendpsisi2
996 if (HAVE_extendpsisi2)
bbf6f052 997 {
1f584163 998 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
999 return;
1000 }
1f584163 1001#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1002 abort ();
1003 }
1004 }
1005
0407367d
RK
1006 if (to_mode == PDImode)
1007 {
1008 if (from_mode != DImode)
1009 from = convert_to_mode (DImode, from, unsignedp);
1010
1011#ifdef HAVE_truncdipdi2
1012 if (HAVE_truncdipdi2)
1013 {
1014 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1015 return;
1016 }
1017#endif /* HAVE_truncdipdi2 */
1018 abort ();
1019 }
1020
1021 if (from_mode == PDImode)
1022 {
1023 if (to_mode != DImode)
1024 {
1025 from = convert_to_mode (DImode, from, unsignedp);
1026 from_mode = DImode;
1027 }
1028 else
1029 {
1030#ifdef HAVE_extendpdidi2
1031 if (HAVE_extendpdidi2)
1032 {
1033 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1034 return;
1035 }
1036#endif /* HAVE_extendpdidi2 */
1037 abort ();
1038 }
1039 }
1040
bbf6f052
RK
1041 /* Now follow all the conversions between integers
1042 no more than a word long. */
1043
1044 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1045 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1046 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1047 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1048 {
d3c64ee3
RS
1049 if (!((GET_CODE (from) == MEM
1050 && ! MEM_VOLATILE_P (from)
1051 && direct_load[(int) to_mode]
1052 && ! mode_dependent_address_p (XEXP (from, 0)))
1053 || GET_CODE (from) == REG
1054 || GET_CODE (from) == SUBREG))
1055 from = force_reg (from_mode, from);
34aa3599
RK
1056 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1057 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1058 from = copy_to_reg (from);
bbf6f052
RK
1059 emit_move_insn (to, gen_lowpart (to_mode, from));
1060 return;
1061 }
1062
d3c64ee3 1063 /* Handle extension. */
bbf6f052
RK
1064 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1065 {
1066 /* Convert directly if that works. */
1067 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1068 != CODE_FOR_nothing)
1069 {
1070 emit_unop_insn (code, to, from, equiv_code);
1071 return;
1072 }
1073 else
1074 {
1075 enum machine_mode intermediate;
1076
1077 /* Search for a mode to convert via. */
1078 for (intermediate = from_mode; intermediate != VOIDmode;
1079 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1080 if (((can_extend_p (to_mode, intermediate, unsignedp)
1081 != CODE_FOR_nothing)
1082 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1083 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1084 && (can_extend_p (intermediate, from_mode, unsignedp)
1085 != CODE_FOR_nothing))
1086 {
1087 convert_move (to, convert_to_mode (intermediate, from,
1088 unsignedp), unsignedp);
1089 return;
1090 }
1091
1092 /* No suitable intermediate mode. */
1093 abort ();
1094 }
1095 }
1096
1097 /* Support special truncate insns for certain modes. */
1098
1099 if (from_mode == DImode && to_mode == SImode)
1100 {
1101#ifdef HAVE_truncdisi2
1102 if (HAVE_truncdisi2)
1103 {
1104 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1105 return;
1106 }
1107#endif
1108 convert_move (to, force_reg (from_mode, from), unsignedp);
1109 return;
1110 }
1111
1112 if (from_mode == DImode && to_mode == HImode)
1113 {
1114#ifdef HAVE_truncdihi2
1115 if (HAVE_truncdihi2)
1116 {
1117 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1118 return;
1119 }
1120#endif
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1122 return;
1123 }
1124
1125 if (from_mode == DImode && to_mode == QImode)
1126 {
1127#ifdef HAVE_truncdiqi2
1128 if (HAVE_truncdiqi2)
1129 {
1130 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1131 return;
1132 }
1133#endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1136 }
1137
1138 if (from_mode == SImode && to_mode == HImode)
1139 {
1140#ifdef HAVE_truncsihi2
1141 if (HAVE_truncsihi2)
1142 {
1143 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1144 return;
1145 }
1146#endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1149 }
1150
1151 if (from_mode == SImode && to_mode == QImode)
1152 {
1153#ifdef HAVE_truncsiqi2
1154 if (HAVE_truncsiqi2)
1155 {
1156 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1157 return;
1158 }
1159#endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1162 }
1163
1164 if (from_mode == HImode && to_mode == QImode)
1165 {
1166#ifdef HAVE_trunchiqi2
1167 if (HAVE_trunchiqi2)
1168 {
1169 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1170 return;
1171 }
1172#endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1175 }
1176
b9bcad65
RK
1177 if (from_mode == TImode && to_mode == DImode)
1178 {
1179#ifdef HAVE_trunctidi2
1180 if (HAVE_trunctidi2)
1181 {
1182 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1183 return;
1184 }
1185#endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1188 }
1189
1190 if (from_mode == TImode && to_mode == SImode)
1191 {
1192#ifdef HAVE_trunctisi2
1193 if (HAVE_trunctisi2)
1194 {
1195 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1196 return;
1197 }
1198#endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1201 }
1202
1203 if (from_mode == TImode && to_mode == HImode)
1204 {
1205#ifdef HAVE_trunctihi2
1206 if (HAVE_trunctihi2)
1207 {
1208 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1209 return;
1210 }
1211#endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1214 }
1215
1216 if (from_mode == TImode && to_mode == QImode)
1217 {
1218#ifdef HAVE_trunctiqi2
1219 if (HAVE_trunctiqi2)
1220 {
1221 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1222 return;
1223 }
1224#endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1227 }
1228
bbf6f052
RK
1229 /* Handle truncation of volatile memrefs, and so on;
1230 the things that couldn't be truncated directly,
1231 and for which there was no special instruction. */
1232 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1233 {
1234 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1235 emit_move_insn (to, temp);
1236 return;
1237 }
1238
1239 /* Mode combination is not recognized. */
1240 abort ();
1241}
1242
1243/* Return an rtx for a value that would result
1244 from converting X to mode MODE.
1245 Both X and MODE may be floating, or both integer.
1246 UNSIGNEDP is nonzero if X is an unsigned value.
1247 This can be done by referring to a part of X in place
5d901c31
RS
1248 or by copying to a new temporary with conversion.
1249
1250 This function *must not* call protect_from_queue
1251 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1252
1253rtx
1254convert_to_mode (mode, x, unsignedp)
1255 enum machine_mode mode;
1256 rtx x;
1257 int unsignedp;
5ffe63ed
RS
1258{
1259 return convert_modes (mode, VOIDmode, x, unsignedp);
1260}
1261
1262/* Return an rtx for a value that would result
1263 from converting X from mode OLDMODE to mode MODE.
1264 Both modes may be floating, or both integer.
1265 UNSIGNEDP is nonzero if X is an unsigned value.
1266
1267 This can be done by referring to a part of X in place
1268 or by copying to a new temporary with conversion.
1269
1270 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1271
1272 This function *must not* call protect_from_queue
1273 except when putting X into an insn (in which case convert_move does it). */
1274
1275rtx
1276convert_modes (mode, oldmode, x, unsignedp)
1277 enum machine_mode mode, oldmode;
1278 rtx x;
1279 int unsignedp;
bbf6f052
RK
1280{
1281 register rtx temp;
5ffe63ed 1282
1499e0a8
RK
1283 /* If FROM is a SUBREG that indicates that we have already done at least
1284 the required extension, strip it. */
1285
1286 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1287 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1288 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1289 x = gen_lowpart (mode, x);
bbf6f052 1290
64791b18
RK
1291 if (GET_MODE (x) != VOIDmode)
1292 oldmode = GET_MODE (x);
1293
5ffe63ed 1294 if (mode == oldmode)
bbf6f052
RK
1295 return x;
1296
1297 /* There is one case that we must handle specially: If we are converting
906c4e36 1298 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1299 we are to interpret the constant as unsigned, gen_lowpart will do
1300 the wrong if the constant appears negative. What we want to do is
1301 make the high-order word of the constant zero, not all ones. */
1302
1303 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1304 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1305 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1306 {
1307 HOST_WIDE_INT val = INTVAL (x);
1308
1309 if (oldmode != VOIDmode
1310 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1311 {
1312 int width = GET_MODE_BITSIZE (oldmode);
1313
1314 /* We need to zero extend VAL. */
1315 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1316 }
1317
1318 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1319 }
bbf6f052
RK
1320
1321 /* We can do this with a gen_lowpart if both desired and current modes
1322 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1323 non-volatile MEM. Except for the constant case where MODE is no
1324 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1325
ba2e110c
RK
1326 if ((GET_CODE (x) == CONST_INT
1327 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1328 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1329 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1330 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1331 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1332 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1333 && direct_load[(int) mode])
2bf29316
JW
1334 || (GET_CODE (x) == REG
1335 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1336 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1337 {
1338 /* ?? If we don't know OLDMODE, we have to assume here that
1339 X does not need sign- or zero-extension. This may not be
1340 the case, but it's the best we can do. */
1341 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1342 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1343 {
1344 HOST_WIDE_INT val = INTVAL (x);
1345 int width = GET_MODE_BITSIZE (oldmode);
1346
1347 /* We must sign or zero-extend in this case. Start by
1348 zero-extending, then sign extend if we need to. */
1349 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1350 if (! unsignedp
1351 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1352 val |= (HOST_WIDE_INT) (-1) << width;
1353
1354 return GEN_INT (val);
1355 }
1356
1357 return gen_lowpart (mode, x);
1358 }
bbf6f052
RK
1359
1360 temp = gen_reg_rtx (mode);
1361 convert_move (temp, x, unsignedp);
1362 return temp;
1363}
1364\f
1365/* Generate several move instructions to copy LEN bytes
1366 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1367 The caller must pass FROM and TO
1368 through protect_from_queue before calling.
1369 ALIGN (in bytes) is maximum alignment we can assume. */
1370
bbf6f052
RK
1371static void
1372move_by_pieces (to, from, len, align)
1373 rtx to, from;
1374 int len, align;
1375{
1376 struct move_by_pieces data;
1377 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1378 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1379
1380 data.offset = 0;
1381 data.to_addr = to_addr;
1382 data.from_addr = from_addr;
1383 data.to = to;
1384 data.from = from;
1385 data.autinc_to
1386 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1387 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1388 data.autinc_from
1389 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1390 || GET_CODE (from_addr) == POST_INC
1391 || GET_CODE (from_addr) == POST_DEC);
1392
1393 data.explicit_inc_from = 0;
1394 data.explicit_inc_to = 0;
1395 data.reverse
1396 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1397 if (data.reverse) data.offset = len;
1398 data.len = len;
1399
e9cf6a97
JW
1400 data.to_struct = MEM_IN_STRUCT_P (to);
1401 data.from_struct = MEM_IN_STRUCT_P (from);
1402
bbf6f052
RK
1403 /* If copying requires more than two move insns,
1404 copy addresses to registers (to make displacements shorter)
1405 and use post-increment if available. */
1406 if (!(data.autinc_from && data.autinc_to)
1407 && move_by_pieces_ninsns (len, align) > 2)
1408 {
1409#ifdef HAVE_PRE_DECREMENT
1410 if (data.reverse && ! data.autinc_from)
1411 {
1412 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1413 data.autinc_from = 1;
1414 data.explicit_inc_from = -1;
1415 }
1416#endif
1417#ifdef HAVE_POST_INCREMENT
1418 if (! data.autinc_from)
1419 {
1420 data.from_addr = copy_addr_to_reg (from_addr);
1421 data.autinc_from = 1;
1422 data.explicit_inc_from = 1;
1423 }
1424#endif
1425 if (!data.autinc_from && CONSTANT_P (from_addr))
1426 data.from_addr = copy_addr_to_reg (from_addr);
1427#ifdef HAVE_PRE_DECREMENT
1428 if (data.reverse && ! data.autinc_to)
1429 {
1430 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1431 data.autinc_to = 1;
1432 data.explicit_inc_to = -1;
1433 }
1434#endif
1435#ifdef HAVE_POST_INCREMENT
1436 if (! data.reverse && ! data.autinc_to)
1437 {
1438 data.to_addr = copy_addr_to_reg (to_addr);
1439 data.autinc_to = 1;
1440 data.explicit_inc_to = 1;
1441 }
1442#endif
1443 if (!data.autinc_to && CONSTANT_P (to_addr))
1444 data.to_addr = copy_addr_to_reg (to_addr);
1445 }
1446
c7a7ac46 1447 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1448 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1449 align = MOVE_MAX;
bbf6f052
RK
1450
1451 /* First move what we can in the largest integer mode, then go to
1452 successively smaller modes. */
1453
1454 while (max_size > 1)
1455 {
1456 enum machine_mode mode = VOIDmode, tmode;
1457 enum insn_code icode;
1458
e7c33f54
RK
1459 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1460 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1461 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1462 mode = tmode;
1463
1464 if (mode == VOIDmode)
1465 break;
1466
1467 icode = mov_optab->handlers[(int) mode].insn_code;
1468 if (icode != CODE_FOR_nothing
1469 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1470 GET_MODE_SIZE (mode)))
1471 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1472
1473 max_size = GET_MODE_SIZE (mode);
1474 }
1475
1476 /* The code above should have handled everything. */
2a8e278c 1477 if (data.len > 0)
bbf6f052
RK
1478 abort ();
1479}
1480
1481/* Return number of insns required to move L bytes by pieces.
1482 ALIGN (in bytes) is maximum alignment we can assume. */
1483
1484static int
1485move_by_pieces_ninsns (l, align)
1486 unsigned int l;
1487 int align;
1488{
1489 register int n_insns = 0;
e87b4f3f 1490 int max_size = MOVE_MAX + 1;
bbf6f052 1491
c7a7ac46 1492 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1493 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1494 align = MOVE_MAX;
bbf6f052
RK
1495
1496 while (max_size > 1)
1497 {
1498 enum machine_mode mode = VOIDmode, tmode;
1499 enum insn_code icode;
1500
e7c33f54
RK
1501 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1502 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1503 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1504 mode = tmode;
1505
1506 if (mode == VOIDmode)
1507 break;
1508
1509 icode = mov_optab->handlers[(int) mode].insn_code;
1510 if (icode != CODE_FOR_nothing
1511 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1512 GET_MODE_SIZE (mode)))
1513 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1514
1515 max_size = GET_MODE_SIZE (mode);
1516 }
1517
1518 return n_insns;
1519}
1520
1521/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1522 with move instructions for mode MODE. GENFUN is the gen_... function
1523 to make a move insn for that mode. DATA has all the other info. */
1524
1525static void
1526move_by_pieces_1 (genfun, mode, data)
eae4b970 1527 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1528 enum machine_mode mode;
1529 struct move_by_pieces *data;
1530{
1531 register int size = GET_MODE_SIZE (mode);
1532 register rtx to1, from1;
1533
1534 while (data->len >= size)
1535 {
1536 if (data->reverse) data->offset -= size;
1537
1538 to1 = (data->autinc_to
38a448ca 1539 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1540 : copy_rtx (change_address (data->to, mode,
1541 plus_constant (data->to_addr,
1542 data->offset))));
e9cf6a97 1543 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1544
db3cf6fb
MS
1545 from1
1546 = (data->autinc_from
38a448ca 1547 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1548 : copy_rtx (change_address (data->from, mode,
1549 plus_constant (data->from_addr,
1550 data->offset))));
e9cf6a97 1551 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1552
1553#ifdef HAVE_PRE_DECREMENT
1554 if (data->explicit_inc_to < 0)
906c4e36 1555 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1556 if (data->explicit_inc_from < 0)
906c4e36 1557 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1558#endif
1559
1560 emit_insn ((*genfun) (to1, from1));
1561#ifdef HAVE_POST_INCREMENT
1562 if (data->explicit_inc_to > 0)
906c4e36 1563 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1564 if (data->explicit_inc_from > 0)
906c4e36 1565 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1566#endif
1567
1568 if (! data->reverse) data->offset += size;
1569
1570 data->len -= size;
1571 }
1572}
1573\f
1574/* Emit code to move a block Y to a block X.
1575 This may be done with string-move instructions,
1576 with multiple scalar move instructions, or with a library call.
1577
1578 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1579 with mode BLKmode.
1580 SIZE is an rtx that says how long they are.
1581 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1582 measured in bytes.
bbf6f052 1583
e9a25f70
JL
1584 Return the address of the new block, if memcpy is called and returns it,
1585 0 otherwise. */
1586
1587rtx
bbf6f052
RK
1588emit_block_move (x, y, size, align)
1589 rtx x, y;
1590 rtx size;
1591 int align;
1592{
e9a25f70
JL
1593 rtx retval = 0;
1594
bbf6f052
RK
1595 if (GET_MODE (x) != BLKmode)
1596 abort ();
1597
1598 if (GET_MODE (y) != BLKmode)
1599 abort ();
1600
1601 x = protect_from_queue (x, 1);
1602 y = protect_from_queue (y, 0);
5d901c31 1603 size = protect_from_queue (size, 0);
bbf6f052
RK
1604
1605 if (GET_CODE (x) != MEM)
1606 abort ();
1607 if (GET_CODE (y) != MEM)
1608 abort ();
1609 if (size == 0)
1610 abort ();
1611
1612 if (GET_CODE (size) == CONST_INT
906c4e36 1613 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1614 move_by_pieces (x, y, INTVAL (size), align);
1615 else
1616 {
1617 /* Try the most limited insn first, because there's no point
1618 including more than one in the machine description unless
1619 the more limited one has some advantage. */
266007a7 1620
0bba3f6f 1621 rtx opalign = GEN_INT (align);
266007a7
RK
1622 enum machine_mode mode;
1623
1624 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1625 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1626 {
266007a7 1627 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1628
1629 if (code != CODE_FOR_nothing
803090c4
RK
1630 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1631 here because if SIZE is less than the mode mask, as it is
8008b228 1632 returned by the macro, it will definitely be less than the
803090c4 1633 actual mode mask. */
8ca00751
RK
1634 && ((GET_CODE (size) == CONST_INT
1635 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1636 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1637 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1638 && (insn_operand_predicate[(int) code][0] == 0
1639 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1640 && (insn_operand_predicate[(int) code][1] == 0
1641 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1642 && (insn_operand_predicate[(int) code][3] == 0
1643 || (*insn_operand_predicate[(int) code][3]) (opalign,
1644 VOIDmode)))
bbf6f052 1645 {
1ba1e2a8 1646 rtx op2;
266007a7
RK
1647 rtx last = get_last_insn ();
1648 rtx pat;
1649
1ba1e2a8 1650 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1651 if (insn_operand_predicate[(int) code][2] != 0
1652 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1653 op2 = copy_to_mode_reg (mode, op2);
1654
1655 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1656 if (pat)
1657 {
1658 emit_insn (pat);
e9a25f70 1659 return 0;
266007a7
RK
1660 }
1661 else
1662 delete_insns_since (last);
bbf6f052
RK
1663 }
1664 }
bbf6f052
RK
1665
1666#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
1667 retval
1668 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1669 ptr_mode, 3, XEXP (x, 0), Pmode,
1670 XEXP (y, 0), Pmode,
1671 convert_to_mode (TYPE_MODE (sizetype), size,
1672 TREE_UNSIGNED (sizetype)),
1673 TYPE_MODE (sizetype));
bbf6f052 1674#else
d562e42e 1675 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1676 VOIDmode, 3, XEXP (y, 0), Pmode,
1677 XEXP (x, 0), Pmode,
3b6f75e2
JW
1678 convert_to_mode (TYPE_MODE (integer_type_node), size,
1679 TREE_UNSIGNED (integer_type_node)),
1680 TYPE_MODE (integer_type_node));
bbf6f052
RK
1681#endif
1682 }
e9a25f70
JL
1683
1684 return retval;
bbf6f052
RK
1685}
1686\f
1687/* Copy all or part of a value X into registers starting at REGNO.
1688 The number of registers to be filled is NREGS. */
1689
1690void
1691move_block_to_reg (regno, x, nregs, mode)
1692 int regno;
1693 rtx x;
1694 int nregs;
1695 enum machine_mode mode;
1696{
1697 int i;
381127e8
RL
1698#ifdef HAVE_load_multiple
1699 rtx pat;
1700 rtx last;
1701#endif
bbf6f052 1702
72bb9717
RK
1703 if (nregs == 0)
1704 return;
1705
bbf6f052
RK
1706 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1707 x = validize_mem (force_const_mem (mode, x));
1708
1709 /* See if the machine can do this with a load multiple insn. */
1710#ifdef HAVE_load_multiple
c3a02afe 1711 if (HAVE_load_multiple)
bbf6f052 1712 {
c3a02afe 1713 last = get_last_insn ();
38a448ca 1714 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1715 GEN_INT (nregs));
1716 if (pat)
1717 {
1718 emit_insn (pat);
1719 return;
1720 }
1721 else
1722 delete_insns_since (last);
bbf6f052 1723 }
bbf6f052
RK
1724#endif
1725
1726 for (i = 0; i < nregs; i++)
38a448ca 1727 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1728 operand_subword_force (x, i, mode));
1729}
1730
1731/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1732 The number of registers to be filled is NREGS. SIZE indicates the number
1733 of bytes in the object X. */
1734
bbf6f052
RK
1735
1736void
0040593d 1737move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1738 int regno;
1739 rtx x;
1740 int nregs;
0040593d 1741 int size;
bbf6f052
RK
1742{
1743 int i;
381127e8
RL
1744#ifdef HAVE_store_multiple
1745 rtx pat;
1746 rtx last;
1747#endif
58a32c5c 1748 enum machine_mode mode;
bbf6f052 1749
58a32c5c
DE
1750 /* If SIZE is that of a mode no bigger than a word, just use that
1751 mode's store operation. */
1752 if (size <= UNITS_PER_WORD
1753 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1754 {
1755 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1756 gen_rtx_REG (mode, regno));
58a32c5c
DE
1757 return;
1758 }
1759
0040593d 1760 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1761 to the left before storing to memory. Note that the previous test
1762 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1763 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1764 {
1765 rtx tem = operand_subword (x, 0, 1, BLKmode);
1766 rtx shift;
1767
1768 if (tem == 0)
1769 abort ();
1770
1771 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1772 gen_rtx_REG (word_mode, regno),
0040593d
JW
1773 build_int_2 ((UNITS_PER_WORD - size)
1774 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1775 emit_move_insn (tem, shift);
1776 return;
1777 }
1778
bbf6f052
RK
1779 /* See if the machine can do this with a store multiple insn. */
1780#ifdef HAVE_store_multiple
c3a02afe 1781 if (HAVE_store_multiple)
bbf6f052 1782 {
c3a02afe 1783 last = get_last_insn ();
38a448ca 1784 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1785 GEN_INT (nregs));
1786 if (pat)
1787 {
1788 emit_insn (pat);
1789 return;
1790 }
1791 else
1792 delete_insns_since (last);
bbf6f052 1793 }
bbf6f052
RK
1794#endif
1795
1796 for (i = 0; i < nregs; i++)
1797 {
1798 rtx tem = operand_subword (x, i, 1, BLKmode);
1799
1800 if (tem == 0)
1801 abort ();
1802
38a448ca 1803 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1804 }
1805}
1806
fffa9c1d
JW
1807/* Emit code to move a block Y to a block X, where X is non-consecutive
1808 registers represented by a PARALLEL. */
1809
1810void
1811emit_group_load (x, y)
1812 rtx x, y;
1813{
1814 rtx target_reg, source;
1815 int i;
1816
1817 if (GET_CODE (x) != PARALLEL)
1818 abort ();
1819
1820 /* Check for a NULL entry, used to indicate that the parameter goes
1821 both on the stack and in registers. */
1822 if (XEXP (XVECEXP (x, 0, 0), 0))
1823 i = 0;
1824 else
1825 i = 1;
1826
1827 for (; i < XVECLEN (x, 0); i++)
1828 {
1829 rtx element = XVECEXP (x, 0, i);
1830
1831 target_reg = XEXP (element, 0);
1832
1833 if (GET_CODE (y) == MEM)
1834 source = change_address (y, GET_MODE (target_reg),
1835 plus_constant (XEXP (y, 0),
1836 INTVAL (XEXP (element, 1))));
1837 else if (XEXP (element, 1) == const0_rtx)
1838 {
1839 if (GET_MODE (target_reg) == GET_MODE (y))
1840 source = y;
eaa9b4d9
MM
1841 /* Allow for the target_reg to be smaller than the input register
1842 to allow for AIX with 4 DF arguments after a single SI arg. The
1843 last DF argument will only load 1 word into the integer registers,
1844 but load a DF value into the float registers. */
aff4d29b
JW
1845 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1846 <= GET_MODE_SIZE (GET_MODE (y)))
1847 && GET_MODE (target_reg) == word_mode)
1848 /* This might be a const_double, so we can't just use SUBREG. */
1849 source = operand_subword (y, 0, 0, VOIDmode);
d7d775a0
JW
1850 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1851 == GET_MODE_SIZE (GET_MODE (y)))
1852 source = gen_lowpart (GET_MODE (target_reg), y);
fffa9c1d
JW
1853 else
1854 abort ();
1855 }
1856 else
1857 abort ();
1858
1859 emit_move_insn (target_reg, source);
1860 }
1861}
1862
1863/* Emit code to move a block Y to a block X, where Y is non-consecutive
1864 registers represented by a PARALLEL. */
1865
1866void
1867emit_group_store (x, y)
1868 rtx x, y;
1869{
1870 rtx source_reg, target;
1871 int i;
1872
1873 if (GET_CODE (y) != PARALLEL)
1874 abort ();
1875
1876 /* Check for a NULL entry, used to indicate that the parameter goes
1877 both on the stack and in registers. */
1878 if (XEXP (XVECEXP (y, 0, 0), 0))
1879 i = 0;
1880 else
1881 i = 1;
1882
1883 for (; i < XVECLEN (y, 0); i++)
1884 {
1885 rtx element = XVECEXP (y, 0, i);
1886
1887 source_reg = XEXP (element, 0);
1888
1889 if (GET_CODE (x) == MEM)
1890 target = change_address (x, GET_MODE (source_reg),
1891 plus_constant (XEXP (x, 0),
1892 INTVAL (XEXP (element, 1))));
1893 else if (XEXP (element, 1) == const0_rtx)
71bc0330
JW
1894 {
1895 target = x;
1896 if (GET_MODE (target) != GET_MODE (source_reg))
1897 target = gen_lowpart (GET_MODE (source_reg), target);
1898 }
fffa9c1d
JW
1899 else
1900 abort ();
1901
1902 emit_move_insn (target, source_reg);
1903 }
1904}
1905
94b25f81
RK
1906/* Add a USE expression for REG to the (possibly empty) list pointed
1907 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1908
1909void
b3f8cf4a
RK
1910use_reg (call_fusage, reg)
1911 rtx *call_fusage, reg;
1912{
0304dfbb
DE
1913 if (GET_CODE (reg) != REG
1914 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1915 abort();
1916
1917 *call_fusage
38a448ca
RH
1918 = gen_rtx_EXPR_LIST (VOIDmode,
1919 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1920}
1921
94b25f81
RK
1922/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1923 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1924
1925void
0304dfbb
DE
1926use_regs (call_fusage, regno, nregs)
1927 rtx *call_fusage;
bbf6f052
RK
1928 int regno;
1929 int nregs;
1930{
0304dfbb 1931 int i;
bbf6f052 1932
0304dfbb
DE
1933 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1934 abort ();
1935
1936 for (i = 0; i < nregs; i++)
38a448ca 1937 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 1938}
fffa9c1d
JW
1939
1940/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1941 PARALLEL REGS. This is for calls that pass values in multiple
1942 non-contiguous locations. The Irix 6 ABI has examples of this. */
1943
1944void
1945use_group_regs (call_fusage, regs)
1946 rtx *call_fusage;
1947 rtx regs;
1948{
1949 int i;
1950
6bd35f86
DE
1951 for (i = 0; i < XVECLEN (regs, 0); i++)
1952 {
1953 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 1954
6bd35f86
DE
1955 /* A NULL entry means the parameter goes both on the stack and in
1956 registers. This can also be a MEM for targets that pass values
1957 partially on the stack and partially in registers. */
e9a25f70 1958 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
1959 use_reg (call_fusage, reg);
1960 }
fffa9c1d 1961}
bbf6f052 1962\f
9de08200
RK
1963/* Generate several move instructions to clear LEN bytes of block TO.
1964 (A MEM rtx with BLKmode). The caller must pass TO through
1965 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1966 we can assume. */
1967
1968static void
1969clear_by_pieces (to, len, align)
1970 rtx to;
1971 int len, align;
1972{
1973 struct clear_by_pieces data;
1974 rtx to_addr = XEXP (to, 0);
1975 int max_size = MOVE_MAX + 1;
1976
1977 data.offset = 0;
1978 data.to_addr = to_addr;
1979 data.to = to;
1980 data.autinc_to
1981 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1982 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1983
1984 data.explicit_inc_to = 0;
1985 data.reverse
1986 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1987 if (data.reverse) data.offset = len;
1988 data.len = len;
1989
1990 data.to_struct = MEM_IN_STRUCT_P (to);
1991
1992 /* If copying requires more than two move insns,
1993 copy addresses to registers (to make displacements shorter)
1994 and use post-increment if available. */
1995 if (!data.autinc_to
1996 && move_by_pieces_ninsns (len, align) > 2)
1997 {
1998#ifdef HAVE_PRE_DECREMENT
1999 if (data.reverse && ! data.autinc_to)
2000 {
2001 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2002 data.autinc_to = 1;
2003 data.explicit_inc_to = -1;
2004 }
2005#endif
2006#ifdef HAVE_POST_INCREMENT
2007 if (! data.reverse && ! data.autinc_to)
2008 {
2009 data.to_addr = copy_addr_to_reg (to_addr);
2010 data.autinc_to = 1;
2011 data.explicit_inc_to = 1;
2012 }
2013#endif
2014 if (!data.autinc_to && CONSTANT_P (to_addr))
2015 data.to_addr = copy_addr_to_reg (to_addr);
2016 }
2017
2018 if (! SLOW_UNALIGNED_ACCESS
2019 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2020 align = MOVE_MAX;
2021
2022 /* First move what we can in the largest integer mode, then go to
2023 successively smaller modes. */
2024
2025 while (max_size > 1)
2026 {
2027 enum machine_mode mode = VOIDmode, tmode;
2028 enum insn_code icode;
2029
2030 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2031 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2032 if (GET_MODE_SIZE (tmode) < max_size)
2033 mode = tmode;
2034
2035 if (mode == VOIDmode)
2036 break;
2037
2038 icode = mov_optab->handlers[(int) mode].insn_code;
2039 if (icode != CODE_FOR_nothing
2040 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2041 GET_MODE_SIZE (mode)))
2042 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2043
2044 max_size = GET_MODE_SIZE (mode);
2045 }
2046
2047 /* The code above should have handled everything. */
2048 if (data.len != 0)
2049 abort ();
2050}
2051
2052/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2053 with move instructions for mode MODE. GENFUN is the gen_... function
2054 to make a move insn for that mode. DATA has all the other info. */
2055
2056static void
2057clear_by_pieces_1 (genfun, mode, data)
eae4b970 2058 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2059 enum machine_mode mode;
2060 struct clear_by_pieces *data;
2061{
2062 register int size = GET_MODE_SIZE (mode);
2063 register rtx to1;
2064
2065 while (data->len >= size)
2066 {
2067 if (data->reverse) data->offset -= size;
2068
2069 to1 = (data->autinc_to
38a448ca 2070 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2071 : copy_rtx (change_address (data->to, mode,
2072 plus_constant (data->to_addr,
2073 data->offset))));
9de08200
RK
2074 MEM_IN_STRUCT_P (to1) = data->to_struct;
2075
2076#ifdef HAVE_PRE_DECREMENT
2077 if (data->explicit_inc_to < 0)
2078 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2079#endif
2080
2081 emit_insn ((*genfun) (to1, const0_rtx));
2082#ifdef HAVE_POST_INCREMENT
2083 if (data->explicit_inc_to > 0)
2084 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2085#endif
2086
2087 if (! data->reverse) data->offset += size;
2088
2089 data->len -= size;
2090 }
2091}
2092\f
bbf6f052 2093/* Write zeros through the storage of OBJECT.
9de08200 2094 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2095 the maximum alignment we can is has, measured in bytes.
bbf6f052 2096
e9a25f70
JL
2097 If we call a function that returns the length of the block, return it. */
2098
2099rtx
9de08200 2100clear_storage (object, size, align)
bbf6f052 2101 rtx object;
4c08eef0 2102 rtx size;
9de08200 2103 int align;
bbf6f052 2104{
e9a25f70
JL
2105 rtx retval = 0;
2106
bbf6f052
RK
2107 if (GET_MODE (object) == BLKmode)
2108 {
9de08200
RK
2109 object = protect_from_queue (object, 1);
2110 size = protect_from_queue (size, 0);
2111
2112 if (GET_CODE (size) == CONST_INT
2113 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2114 clear_by_pieces (object, INTVAL (size), align);
2115
2116 else
2117 {
2118 /* Try the most limited insn first, because there's no point
2119 including more than one in the machine description unless
2120 the more limited one has some advantage. */
2121
2122 rtx opalign = GEN_INT (align);
2123 enum machine_mode mode;
2124
2125 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2126 mode = GET_MODE_WIDER_MODE (mode))
2127 {
2128 enum insn_code code = clrstr_optab[(int) mode];
2129
2130 if (code != CODE_FOR_nothing
2131 /* We don't need MODE to be narrower than
2132 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2133 the mode mask, as it is returned by the macro, it will
2134 definitely be less than the actual mode mask. */
2135 && ((GET_CODE (size) == CONST_INT
2136 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2137 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2138 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2139 && (insn_operand_predicate[(int) code][0] == 0
2140 || (*insn_operand_predicate[(int) code][0]) (object,
2141 BLKmode))
2142 && (insn_operand_predicate[(int) code][2] == 0
2143 || (*insn_operand_predicate[(int) code][2]) (opalign,
2144 VOIDmode)))
2145 {
2146 rtx op1;
2147 rtx last = get_last_insn ();
2148 rtx pat;
2149
2150 op1 = convert_to_mode (mode, size, 1);
2151 if (insn_operand_predicate[(int) code][1] != 0
2152 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2153 mode))
2154 op1 = copy_to_mode_reg (mode, op1);
2155
2156 pat = GEN_FCN ((int) code) (object, op1, opalign);
2157 if (pat)
2158 {
2159 emit_insn (pat);
e9a25f70 2160 return 0;
9de08200
RK
2161 }
2162 else
2163 delete_insns_since (last);
2164 }
2165 }
2166
2167
bbf6f052 2168#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
2169 retval
2170 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2171 ptr_mode, 3,
2172 XEXP (object, 0), Pmode,
2173 const0_rtx,
2174 TYPE_MODE (integer_type_node),
2175 convert_to_mode
2176 (TYPE_MODE (sizetype), size,
2177 TREE_UNSIGNED (sizetype)),
2178 TYPE_MODE (sizetype));
bbf6f052 2179#else
9de08200
RK
2180 emit_library_call (bzero_libfunc, 0,
2181 VOIDmode, 2,
2182 XEXP (object, 0), Pmode,
e9a25f70
JL
2183 convert_to_mode
2184 (TYPE_MODE (integer_type_node), size,
2185 TREE_UNSIGNED (integer_type_node)),
9de08200 2186 TYPE_MODE (integer_type_node));
bbf6f052 2187#endif
9de08200 2188 }
bbf6f052
RK
2189 }
2190 else
66ed0683 2191 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2192
2193 return retval;
bbf6f052
RK
2194}
2195
2196/* Generate code to copy Y into X.
2197 Both Y and X must have the same mode, except that
2198 Y can be a constant with VOIDmode.
2199 This mode cannot be BLKmode; use emit_block_move for that.
2200
2201 Return the last instruction emitted. */
2202
2203rtx
2204emit_move_insn (x, y)
2205 rtx x, y;
2206{
2207 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2208
2209 x = protect_from_queue (x, 1);
2210 y = protect_from_queue (y, 0);
2211
2212 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2213 abort ();
2214
2215 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2216 y = force_const_mem (mode, y);
2217
2218 /* If X or Y are memory references, verify that their addresses are valid
2219 for the machine. */
2220 if (GET_CODE (x) == MEM
2221 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2222 && ! push_operand (x, GET_MODE (x)))
2223 || (flag_force_addr
2224 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2225 x = change_address (x, VOIDmode, XEXP (x, 0));
2226
2227 if (GET_CODE (y) == MEM
2228 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2229 || (flag_force_addr
2230 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2231 y = change_address (y, VOIDmode, XEXP (y, 0));
2232
2233 if (mode == BLKmode)
2234 abort ();
2235
261c4230
RS
2236 return emit_move_insn_1 (x, y);
2237}
2238
2239/* Low level part of emit_move_insn.
2240 Called just like emit_move_insn, but assumes X and Y
2241 are basically valid. */
2242
2243rtx
2244emit_move_insn_1 (x, y)
2245 rtx x, y;
2246{
2247 enum machine_mode mode = GET_MODE (x);
2248 enum machine_mode submode;
2249 enum mode_class class = GET_MODE_CLASS (mode);
2250 int i;
2251
bbf6f052
RK
2252 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2253 return
2254 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2255
89742723 2256 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2257 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2258 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2259 * BITS_PER_UNIT),
2260 (class == MODE_COMPLEX_INT
2261 ? MODE_INT : MODE_FLOAT),
2262 0))
7308a047
RS
2263 && (mov_optab->handlers[(int) submode].insn_code
2264 != CODE_FOR_nothing))
2265 {
2266 /* Don't split destination if it is a stack push. */
2267 int stack = push_operand (x, GET_MODE (x));
7308a047 2268
7308a047
RS
2269 /* If this is a stack, push the highpart first, so it
2270 will be in the argument order.
2271
2272 In that case, change_address is used only to convert
2273 the mode, not to change the address. */
c937357e
RS
2274 if (stack)
2275 {
e33c0d66
RS
2276 /* Note that the real part always precedes the imag part in memory
2277 regardless of machine's endianness. */
c937357e
RS
2278#ifdef STACK_GROWS_DOWNWARD
2279 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2280 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2281 gen_imagpart (submode, y)));
c937357e 2282 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2283 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2284 gen_realpart (submode, y)));
c937357e
RS
2285#else
2286 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2287 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2288 gen_realpart (submode, y)));
c937357e 2289 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2290 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2291 gen_imagpart (submode, y)));
c937357e
RS
2292#endif
2293 }
2294 else
2295 {
2296 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2297 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2299 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2300 }
7308a047 2301
7a1ab50a 2302 return get_last_insn ();
7308a047
RS
2303 }
2304
bbf6f052
RK
2305 /* This will handle any multi-word mode that lacks a move_insn pattern.
2306 However, you will get better code if you define such patterns,
2307 even if they must turn into multiple assembler instructions. */
a4320483 2308 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2309 {
2310 rtx last_insn = 0;
6551fa4d 2311
a98c9f1a
RK
2312#ifdef PUSH_ROUNDING
2313
2314 /* If X is a push on the stack, do the push now and replace
2315 X with a reference to the stack pointer. */
2316 if (push_operand (x, GET_MODE (x)))
2317 {
2318 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2319 x = change_address (x, VOIDmode, stack_pointer_rtx);
2320 }
2321#endif
2322
15a7a8ec 2323 /* Show the output dies here. */
43e046cb 2324 if (x != y)
38a448ca 2325 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2326
bbf6f052
RK
2327 for (i = 0;
2328 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2329 i++)
2330 {
2331 rtx xpart = operand_subword (x, i, 1, mode);
2332 rtx ypart = operand_subword (y, i, 1, mode);
2333
2334 /* If we can't get a part of Y, put Y into memory if it is a
2335 constant. Otherwise, force it into a register. If we still
2336 can't get a part of Y, abort. */
2337 if (ypart == 0 && CONSTANT_P (y))
2338 {
2339 y = force_const_mem (mode, y);
2340 ypart = operand_subword (y, i, 1, mode);
2341 }
2342 else if (ypart == 0)
2343 ypart = operand_subword_force (y, i, mode);
2344
2345 if (xpart == 0 || ypart == 0)
2346 abort ();
2347
2348 last_insn = emit_move_insn (xpart, ypart);
2349 }
6551fa4d 2350
bbf6f052
RK
2351 return last_insn;
2352 }
2353 else
2354 abort ();
2355}
2356\f
2357/* Pushing data onto the stack. */
2358
2359/* Push a block of length SIZE (perhaps variable)
2360 and return an rtx to address the beginning of the block.
2361 Note that it is not possible for the value returned to be a QUEUED.
2362 The value may be virtual_outgoing_args_rtx.
2363
2364 EXTRA is the number of bytes of padding to push in addition to SIZE.
2365 BELOW nonzero means this padding comes at low addresses;
2366 otherwise, the padding comes at high addresses. */
2367
2368rtx
2369push_block (size, extra, below)
2370 rtx size;
2371 int extra, below;
2372{
2373 register rtx temp;
88f63c77
RK
2374
2375 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2376 if (CONSTANT_P (size))
2377 anti_adjust_stack (plus_constant (size, extra));
2378 else if (GET_CODE (size) == REG && extra == 0)
2379 anti_adjust_stack (size);
2380 else
2381 {
2382 rtx temp = copy_to_mode_reg (Pmode, size);
2383 if (extra != 0)
906c4e36 2384 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2385 temp, 0, OPTAB_LIB_WIDEN);
2386 anti_adjust_stack (temp);
2387 }
2388
2389#ifdef STACK_GROWS_DOWNWARD
2390 temp = virtual_outgoing_args_rtx;
2391 if (extra != 0 && below)
2392 temp = plus_constant (temp, extra);
2393#else
2394 if (GET_CODE (size) == CONST_INT)
2395 temp = plus_constant (virtual_outgoing_args_rtx,
2396 - INTVAL (size) - (below ? 0 : extra));
2397 else if (extra != 0 && !below)
38a448ca 2398 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2399 negate_rtx (Pmode, plus_constant (size, extra)));
2400 else
38a448ca 2401 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2402 negate_rtx (Pmode, size));
2403#endif
2404
2405 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2406}
2407
87e38d84 2408rtx
bbf6f052
RK
2409gen_push_operand ()
2410{
38a448ca 2411 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2412}
2413
921b3427
RK
2414/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2415 block of SIZE bytes. */
2416
2417static rtx
2418get_push_address (size)
2419 int size;
2420{
2421 register rtx temp;
2422
2423 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2424 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2425 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2426 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2427 else
2428 temp = stack_pointer_rtx;
2429
c85f7c16 2430 return copy_to_reg (temp);
921b3427
RK
2431}
2432
bbf6f052
RK
2433/* Generate code to push X onto the stack, assuming it has mode MODE and
2434 type TYPE.
2435 MODE is redundant except when X is a CONST_INT (since they don't
2436 carry mode info).
2437 SIZE is an rtx for the size of data to be copied (in bytes),
2438 needed only if X is BLKmode.
2439
2440 ALIGN (in bytes) is maximum alignment we can assume.
2441
cd048831
RK
2442 If PARTIAL and REG are both nonzero, then copy that many of the first
2443 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2444 The amount of space pushed is decreased by PARTIAL words,
2445 rounded *down* to a multiple of PARM_BOUNDARY.
2446 REG must be a hard register in this case.
cd048831
RK
2447 If REG is zero but PARTIAL is not, take any all others actions for an
2448 argument partially in registers, but do not actually load any
2449 registers.
bbf6f052
RK
2450
2451 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2452 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2453
2454 On a machine that lacks real push insns, ARGS_ADDR is the address of
2455 the bottom of the argument block for this call. We use indexing off there
2456 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2457 argument block has not been preallocated.
2458
e5e809f4
JL
2459 ARGS_SO_FAR is the size of args previously pushed for this call.
2460
2461 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2462 for arguments passed in registers. If nonzero, it will be the number
2463 of bytes required. */
bbf6f052
RK
2464
2465void
2466emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2467 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2468 register rtx x;
2469 enum machine_mode mode;
2470 tree type;
2471 rtx size;
2472 int align;
2473 int partial;
2474 rtx reg;
2475 int extra;
2476 rtx args_addr;
2477 rtx args_so_far;
e5e809f4 2478 int reg_parm_stack_space;
bbf6f052
RK
2479{
2480 rtx xinner;
2481 enum direction stack_direction
2482#ifdef STACK_GROWS_DOWNWARD
2483 = downward;
2484#else
2485 = upward;
2486#endif
2487
2488 /* Decide where to pad the argument: `downward' for below,
2489 `upward' for above, or `none' for don't pad it.
2490 Default is below for small data on big-endian machines; else above. */
2491 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2492
2493 /* Invert direction if stack is post-update. */
2494 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2495 if (where_pad != none)
2496 where_pad = (where_pad == downward ? upward : downward);
2497
2498 xinner = x = protect_from_queue (x, 0);
2499
2500 if (mode == BLKmode)
2501 {
2502 /* Copy a block into the stack, entirely or partially. */
2503
2504 register rtx temp;
2505 int used = partial * UNITS_PER_WORD;
2506 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2507 int skip;
2508
2509 if (size == 0)
2510 abort ();
2511
2512 used -= offset;
2513
2514 /* USED is now the # of bytes we need not copy to the stack
2515 because registers will take care of them. */
2516
2517 if (partial != 0)
2518 xinner = change_address (xinner, BLKmode,
2519 plus_constant (XEXP (xinner, 0), used));
2520
2521 /* If the partial register-part of the arg counts in its stack size,
2522 skip the part of stack space corresponding to the registers.
2523 Otherwise, start copying to the beginning of the stack space,
2524 by setting SKIP to 0. */
e5e809f4 2525 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2526
2527#ifdef PUSH_ROUNDING
2528 /* Do it with several push insns if that doesn't take lots of insns
2529 and if there is no difficulty with push insns that skip bytes
2530 on the stack for alignment purposes. */
2531 if (args_addr == 0
2532 && GET_CODE (size) == CONST_INT
2533 && skip == 0
2534 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2535 < MOVE_RATIO)
bbf6f052
RK
2536 /* Here we avoid the case of a structure whose weak alignment
2537 forces many pushes of a small amount of data,
2538 and such small pushes do rounding that causes trouble. */
c7a7ac46 2539 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2540 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2541 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2542 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2543 {
2544 /* Push padding now if padding above and stack grows down,
2545 or if padding below and stack grows up.
2546 But if space already allocated, this has already been done. */
2547 if (extra && args_addr == 0
2548 && where_pad != none && where_pad != stack_direction)
906c4e36 2549 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2550
38a448ca 2551 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2552 INTVAL (size) - used, align);
921b3427 2553
956d6950 2554 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2555 {
2556 rtx temp;
2557
956d6950 2558 in_check_memory_usage = 1;
921b3427 2559 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2560 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2561 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2562 temp, ptr_mode,
2563 XEXP (xinner, 0), ptr_mode,
2564 GEN_INT (INTVAL(size) - used),
2565 TYPE_MODE (sizetype));
2566 else
2567 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2568 temp, ptr_mode,
2569 GEN_INT (INTVAL(size) - used),
2570 TYPE_MODE (sizetype),
956d6950
JL
2571 GEN_INT (MEMORY_USE_RW),
2572 TYPE_MODE (integer_type_node));
2573 in_check_memory_usage = 0;
921b3427 2574 }
bbf6f052
RK
2575 }
2576 else
2577#endif /* PUSH_ROUNDING */
2578 {
2579 /* Otherwise make space on the stack and copy the data
2580 to the address of that space. */
2581
2582 /* Deduct words put into registers from the size we must copy. */
2583 if (partial != 0)
2584 {
2585 if (GET_CODE (size) == CONST_INT)
906c4e36 2586 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2587 else
2588 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2589 GEN_INT (used), NULL_RTX, 0,
2590 OPTAB_LIB_WIDEN);
bbf6f052
RK
2591 }
2592
2593 /* Get the address of the stack space.
2594 In this case, we do not deal with EXTRA separately.
2595 A single stack adjust will do. */
2596 if (! args_addr)
2597 {
2598 temp = push_block (size, extra, where_pad == downward);
2599 extra = 0;
2600 }
2601 else if (GET_CODE (args_so_far) == CONST_INT)
2602 temp = memory_address (BLKmode,
2603 plus_constant (args_addr,
2604 skip + INTVAL (args_so_far)));
2605 else
2606 temp = memory_address (BLKmode,
38a448ca
RH
2607 plus_constant (gen_rtx_PLUS (Pmode,
2608 args_addr,
2609 args_so_far),
bbf6f052 2610 skip));
956d6950 2611 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2612 {
2613 rtx target;
2614
956d6950 2615 in_check_memory_usage = 1;
921b3427 2616 target = copy_to_reg (temp);
c85f7c16 2617 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2618 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2619 target, ptr_mode,
2620 XEXP (xinner, 0), ptr_mode,
2621 size, TYPE_MODE (sizetype));
2622 else
2623 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2624 target, ptr_mode,
2625 size, TYPE_MODE (sizetype),
956d6950
JL
2626 GEN_INT (MEMORY_USE_RW),
2627 TYPE_MODE (integer_type_node));
2628 in_check_memory_usage = 0;
921b3427 2629 }
bbf6f052
RK
2630
2631 /* TEMP is the address of the block. Copy the data there. */
2632 if (GET_CODE (size) == CONST_INT
2633 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2634 < MOVE_RATIO))
2635 {
38a448ca 2636 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2637 INTVAL (size), align);
2638 goto ret;
2639 }
e5e809f4 2640 else
bbf6f052 2641 {
e5e809f4
JL
2642 rtx opalign = GEN_INT (align);
2643 enum machine_mode mode;
2644 rtx target = gen_rtx (MEM, BLKmode, temp);
2645
2646 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2647 mode != VOIDmode;
2648 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2649 {
e5e809f4
JL
2650 enum insn_code code = movstr_optab[(int) mode];
2651
2652 if (code != CODE_FOR_nothing
2653 && ((GET_CODE (size) == CONST_INT
2654 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2655 <= (GET_MODE_MASK (mode) >> 1)))
2656 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2657 && (insn_operand_predicate[(int) code][0] == 0
2658 || ((*insn_operand_predicate[(int) code][0])
2659 (target, BLKmode)))
2660 && (insn_operand_predicate[(int) code][1] == 0
2661 || ((*insn_operand_predicate[(int) code][1])
2662 (xinner, BLKmode)))
2663 && (insn_operand_predicate[(int) code][3] == 0
2664 || ((*insn_operand_predicate[(int) code][3])
2665 (opalign, VOIDmode))))
2666 {
2667 rtx op2 = convert_to_mode (mode, size, 1);
2668 rtx last = get_last_insn ();
2669 rtx pat;
2670
2671 if (insn_operand_predicate[(int) code][2] != 0
2672 && ! ((*insn_operand_predicate[(int) code][2])
2673 (op2, mode)))
2674 op2 = copy_to_mode_reg (mode, op2);
2675
2676 pat = GEN_FCN ((int) code) (target, xinner,
2677 op2, opalign);
2678 if (pat)
2679 {
2680 emit_insn (pat);
2681 goto ret;
2682 }
2683 else
2684 delete_insns_since (last);
2685 }
c841050e 2686 }
bbf6f052 2687 }
bbf6f052
RK
2688
2689#ifndef ACCUMULATE_OUTGOING_ARGS
2690 /* If the source is referenced relative to the stack pointer,
2691 copy it to another register to stabilize it. We do not need
2692 to do this if we know that we won't be changing sp. */
2693
2694 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2695 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2696 temp = copy_to_reg (temp);
2697#endif
2698
2699 /* Make inhibit_defer_pop nonzero around the library call
2700 to force it to pop the bcopy-arguments right away. */
2701 NO_DEFER_POP;
2702#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2703 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2704 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2705 convert_to_mode (TYPE_MODE (sizetype),
2706 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2707 TYPE_MODE (sizetype));
bbf6f052 2708#else
d562e42e 2709 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2710 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2711 convert_to_mode (TYPE_MODE (integer_type_node),
2712 size,
2713 TREE_UNSIGNED (integer_type_node)),
2714 TYPE_MODE (integer_type_node));
bbf6f052
RK
2715#endif
2716 OK_DEFER_POP;
2717 }
2718 }
2719 else if (partial > 0)
2720 {
2721 /* Scalar partly in registers. */
2722
2723 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2724 int i;
2725 int not_stack;
2726 /* # words of start of argument
2727 that we must make space for but need not store. */
2728 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2729 int args_offset = INTVAL (args_so_far);
2730 int skip;
2731
2732 /* Push padding now if padding above and stack grows down,
2733 or if padding below and stack grows up.
2734 But if space already allocated, this has already been done. */
2735 if (extra && args_addr == 0
2736 && where_pad != none && where_pad != stack_direction)
906c4e36 2737 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2738
2739 /* If we make space by pushing it, we might as well push
2740 the real data. Otherwise, we can leave OFFSET nonzero
2741 and leave the space uninitialized. */
2742 if (args_addr == 0)
2743 offset = 0;
2744
2745 /* Now NOT_STACK gets the number of words that we don't need to
2746 allocate on the stack. */
2747 not_stack = partial - offset;
2748
2749 /* If the partial register-part of the arg counts in its stack size,
2750 skip the part of stack space corresponding to the registers.
2751 Otherwise, start copying to the beginning of the stack space,
2752 by setting SKIP to 0. */
e5e809f4 2753 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
2754
2755 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2756 x = validize_mem (force_const_mem (mode, x));
2757
2758 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2759 SUBREGs of such registers are not allowed. */
2760 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2761 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2762 x = copy_to_reg (x);
2763
2764 /* Loop over all the words allocated on the stack for this arg. */
2765 /* We can do it by words, because any scalar bigger than a word
2766 has a size a multiple of a word. */
2767#ifndef PUSH_ARGS_REVERSED
2768 for (i = not_stack; i < size; i++)
2769#else
2770 for (i = size - 1; i >= not_stack; i--)
2771#endif
2772 if (i >= not_stack + offset)
2773 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2774 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2775 0, args_addr,
2776 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
2777 * UNITS_PER_WORD)),
2778 reg_parm_stack_space);
bbf6f052
RK
2779 }
2780 else
2781 {
2782 rtx addr;
921b3427 2783 rtx target = NULL_RTX;
bbf6f052
RK
2784
2785 /* Push padding now if padding above and stack grows down,
2786 or if padding below and stack grows up.
2787 But if space already allocated, this has already been done. */
2788 if (extra && args_addr == 0
2789 && where_pad != none && where_pad != stack_direction)
906c4e36 2790 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2791
2792#ifdef PUSH_ROUNDING
2793 if (args_addr == 0)
2794 addr = gen_push_operand ();
2795 else
2796#endif
921b3427
RK
2797 {
2798 if (GET_CODE (args_so_far) == CONST_INT)
2799 addr
2800 = memory_address (mode,
2801 plus_constant (args_addr,
2802 INTVAL (args_so_far)));
2803 else
38a448ca
RH
2804 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2805 args_so_far));
921b3427
RK
2806 target = addr;
2807 }
bbf6f052 2808
38a448ca 2809 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 2810
956d6950 2811 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 2812 {
956d6950 2813 in_check_memory_usage = 1;
921b3427
RK
2814 if (target == 0)
2815 target = get_push_address (GET_MODE_SIZE (mode));
2816
c85f7c16 2817 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2818 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2819 target, ptr_mode,
2820 XEXP (x, 0), ptr_mode,
2821 GEN_INT (GET_MODE_SIZE (mode)),
2822 TYPE_MODE (sizetype));
2823 else
2824 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2825 target, ptr_mode,
2826 GEN_INT (GET_MODE_SIZE (mode)),
2827 TYPE_MODE (sizetype),
956d6950
JL
2828 GEN_INT (MEMORY_USE_RW),
2829 TYPE_MODE (integer_type_node));
2830 in_check_memory_usage = 0;
921b3427 2831 }
bbf6f052
RK
2832 }
2833
2834 ret:
2835 /* If part should go in registers, copy that part
2836 into the appropriate registers. Do this now, at the end,
2837 since mem-to-mem copies above may do function calls. */
cd048831 2838 if (partial > 0 && reg != 0)
fffa9c1d
JW
2839 {
2840 /* Handle calls that pass values in multiple non-contiguous locations.
2841 The Irix 6 ABI has examples of this. */
2842 if (GET_CODE (reg) == PARALLEL)
2843 emit_group_load (reg, x);
2844 else
2845 move_block_to_reg (REGNO (reg), x, partial, mode);
2846 }
bbf6f052
RK
2847
2848 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2849 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2850}
2851\f
bbf6f052
RK
2852/* Expand an assignment that stores the value of FROM into TO.
2853 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2854 (This may contain a QUEUED rtx;
2855 if the value is constant, this rtx is a constant.)
2856 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2857
2858 SUGGEST_REG is no longer actually used.
2859 It used to mean, copy the value through a register
2860 and return that register, if that is possible.
709f5be1 2861 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2862
2863rtx
2864expand_assignment (to, from, want_value, suggest_reg)
2865 tree to, from;
2866 int want_value;
2867 int suggest_reg;
2868{
2869 register rtx to_rtx = 0;
2870 rtx result;
2871
2872 /* Don't crash if the lhs of the assignment was erroneous. */
2873
2874 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2875 {
2876 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2877 return want_value ? result : NULL_RTX;
2878 }
bbf6f052
RK
2879
2880 /* Assignment of a structure component needs special treatment
2881 if the structure component's rtx is not simply a MEM.
6be58303
JW
2882 Assignment of an array element at a constant index, and assignment of
2883 an array element in an unaligned packed structure field, has the same
2884 problem. */
bbf6f052 2885
08293add
RK
2886 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2887 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
2888 {
2889 enum machine_mode mode1;
2890 int bitsize;
2891 int bitpos;
7bb0943f 2892 tree offset;
bbf6f052
RK
2893 int unsignedp;
2894 int volatilep = 0;
0088fcb1 2895 tree tem;
d78d243c 2896 int alignment;
0088fcb1
RK
2897
2898 push_temp_slots ();
839c4796
RK
2899 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2900 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
2901
2902 /* If we are going to use store_bit_field and extract_bit_field,
2903 make sure to_rtx will be safe for multiple use. */
2904
2905 if (mode1 == VOIDmode && want_value)
2906 tem = stabilize_reference (tem);
2907
921b3427 2908 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
2909 if (offset != 0)
2910 {
906c4e36 2911 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2912
2913 if (GET_CODE (to_rtx) != MEM)
2914 abort ();
2915 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
2916 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2917 force_reg (ptr_mode, offset_rtx)));
7bb0943f 2918 }
bbf6f052
RK
2919 if (volatilep)
2920 {
2921 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2922 {
2923 /* When the offset is zero, to_rtx is the address of the
2924 structure we are storing into, and hence may be shared.
2925 We must make a new MEM before setting the volatile bit. */
2926 if (offset == 0)
effbcc6a
RK
2927 to_rtx = copy_rtx (to_rtx);
2928
01188446
JW
2929 MEM_VOLATILE_P (to_rtx) = 1;
2930 }
bbf6f052
RK
2931#if 0 /* This was turned off because, when a field is volatile
2932 in an object which is not volatile, the object may be in a register,
2933 and then we would abort over here. */
2934 else
2935 abort ();
2936#endif
2937 }
2938
956d6950
JL
2939 if (TREE_CODE (to) == COMPONENT_REF
2940 && TREE_READONLY (TREE_OPERAND (to, 1)))
2941 {
8bd6ecc2 2942 if (offset == 0)
956d6950
JL
2943 to_rtx = copy_rtx (to_rtx);
2944
2945 RTX_UNCHANGING_P (to_rtx) = 1;
2946 }
2947
921b3427
RK
2948 /* Check the access. */
2949 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2950 {
2951 rtx to_addr;
2952 int size;
2953 int best_mode_size;
2954 enum machine_mode best_mode;
2955
2956 best_mode = get_best_mode (bitsize, bitpos,
2957 TYPE_ALIGN (TREE_TYPE (tem)),
2958 mode1, volatilep);
2959 if (best_mode == VOIDmode)
2960 best_mode = QImode;
2961
2962 best_mode_size = GET_MODE_BITSIZE (best_mode);
2963 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2964 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2965 size *= GET_MODE_SIZE (best_mode);
2966
2967 /* Check the access right of the pointer. */
e9a25f70
JL
2968 if (size)
2969 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2970 to_addr, ptr_mode,
2971 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
2972 GEN_INT (MEMORY_USE_WO),
2973 TYPE_MODE (integer_type_node));
921b3427
RK
2974 }
2975
bbf6f052
RK
2976 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2977 (want_value
2978 /* Spurious cast makes HPUX compiler happy. */
2979 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2980 : VOIDmode),
2981 unsignedp,
2982 /* Required alignment of containing datum. */
d78d243c 2983 alignment,
bbf6f052
RK
2984 int_size_in_bytes (TREE_TYPE (tem)));
2985 preserve_temp_slots (result);
2986 free_temp_slots ();
0088fcb1 2987 pop_temp_slots ();
bbf6f052 2988
709f5be1
RS
2989 /* If the value is meaningful, convert RESULT to the proper mode.
2990 Otherwise, return nothing. */
5ffe63ed
RS
2991 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2992 TYPE_MODE (TREE_TYPE (from)),
2993 result,
2994 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2995 : NULL_RTX);
bbf6f052
RK
2996 }
2997
cd1db108
RS
2998 /* If the rhs is a function call and its value is not an aggregate,
2999 call the function before we start to compute the lhs.
3000 This is needed for correct code for cases such as
3001 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3002 requires loading up part of an address in a separate insn.
3003
3004 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3005 a promoted variable where the zero- or sign- extension needs to be done.
3006 Handling this in the normal way is safe because no computation is done
3007 before the call. */
3008 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3009 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3010 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3011 {
0088fcb1
RK
3012 rtx value;
3013
3014 push_temp_slots ();
3015 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3016 if (to_rtx == 0)
921b3427 3017 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3018
fffa9c1d
JW
3019 /* Handle calls that return values in multiple non-contiguous locations.
3020 The Irix 6 ABI has examples of this. */
3021 if (GET_CODE (to_rtx) == PARALLEL)
3022 emit_group_load (to_rtx, value);
3023 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3024 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3025 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3026 else
3027 emit_move_insn (to_rtx, value);
cd1db108
RS
3028 preserve_temp_slots (to_rtx);
3029 free_temp_slots ();
0088fcb1 3030 pop_temp_slots ();
709f5be1 3031 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3032 }
3033
bbf6f052
RK
3034 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3035 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3036
3037 if (to_rtx == 0)
921b3427 3038 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
bbf6f052 3039
86d38d25
RS
3040 /* Don't move directly into a return register. */
3041 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3042 {
0088fcb1
RK
3043 rtx temp;
3044
3045 push_temp_slots ();
3046 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3047 emit_move_insn (to_rtx, temp);
3048 preserve_temp_slots (to_rtx);
3049 free_temp_slots ();
0088fcb1 3050 pop_temp_slots ();
709f5be1 3051 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3052 }
3053
bbf6f052
RK
3054 /* In case we are returning the contents of an object which overlaps
3055 the place the value is being stored, use a safe function when copying
3056 a value through a pointer into a structure value return block. */
3057 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3058 && current_function_returns_struct
3059 && !current_function_returns_pcc_struct)
3060 {
0088fcb1
RK
3061 rtx from_rtx, size;
3062
3063 push_temp_slots ();
33a20d10 3064 size = expr_size (from);
921b3427
RK
3065 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3066 EXPAND_MEMORY_USE_DONT);
3067
3068 /* Copy the rights of the bitmap. */
3069 if (flag_check_memory_usage)
3070 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3071 XEXP (to_rtx, 0), ptr_mode,
3072 XEXP (from_rtx, 0), ptr_mode,
3073 convert_to_mode (TYPE_MODE (sizetype),
3074 size, TREE_UNSIGNED (sizetype)),
3075 TYPE_MODE (sizetype));
bbf6f052
RK
3076
3077#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3078 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3079 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3080 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3081 convert_to_mode (TYPE_MODE (sizetype),
3082 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3083 TYPE_MODE (sizetype));
bbf6f052 3084#else
d562e42e 3085 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3086 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3087 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3088 convert_to_mode (TYPE_MODE (integer_type_node),
3089 size, TREE_UNSIGNED (integer_type_node)),
3090 TYPE_MODE (integer_type_node));
bbf6f052
RK
3091#endif
3092
3093 preserve_temp_slots (to_rtx);
3094 free_temp_slots ();
0088fcb1 3095 pop_temp_slots ();
709f5be1 3096 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3097 }
3098
3099 /* Compute FROM and store the value in the rtx we got. */
3100
0088fcb1 3101 push_temp_slots ();
bbf6f052
RK
3102 result = store_expr (from, to_rtx, want_value);
3103 preserve_temp_slots (result);
3104 free_temp_slots ();
0088fcb1 3105 pop_temp_slots ();
709f5be1 3106 return want_value ? result : NULL_RTX;
bbf6f052
RK
3107}
3108
3109/* Generate code for computing expression EXP,
3110 and storing the value into TARGET.
bbf6f052
RK
3111 TARGET may contain a QUEUED rtx.
3112
709f5be1
RS
3113 If WANT_VALUE is nonzero, return a copy of the value
3114 not in TARGET, so that we can be sure to use the proper
3115 value in a containing expression even if TARGET has something
3116 else stored in it. If possible, we copy the value through a pseudo
3117 and return that pseudo. Or, if the value is constant, we try to
3118 return the constant. In some cases, we return a pseudo
3119 copied *from* TARGET.
3120
3121 If the mode is BLKmode then we may return TARGET itself.
3122 It turns out that in BLKmode it doesn't cause a problem.
3123 because C has no operators that could combine two different
3124 assignments into the same BLKmode object with different values
3125 with no sequence point. Will other languages need this to
3126 be more thorough?
3127
3128 If WANT_VALUE is 0, we return NULL, to make sure
3129 to catch quickly any cases where the caller uses the value
3130 and fails to set WANT_VALUE. */
bbf6f052
RK
3131
3132rtx
709f5be1 3133store_expr (exp, target, want_value)
bbf6f052
RK
3134 register tree exp;
3135 register rtx target;
709f5be1 3136 int want_value;
bbf6f052
RK
3137{
3138 register rtx temp;
3139 int dont_return_target = 0;
3140
3141 if (TREE_CODE (exp) == COMPOUND_EXPR)
3142 {
3143 /* Perform first part of compound expression, then assign from second
3144 part. */
3145 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3146 emit_queue ();
709f5be1 3147 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3148 }
3149 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3150 {
3151 /* For conditional expression, get safe form of the target. Then
3152 test the condition, doing the appropriate assignment on either
3153 side. This avoids the creation of unnecessary temporaries.
3154 For non-BLKmode, it is more efficient not to do this. */
3155
3156 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3157
3158 emit_queue ();
3159 target = protect_from_queue (target, 1);
3160
dabf8373 3161 do_pending_stack_adjust ();
bbf6f052
RK
3162 NO_DEFER_POP;
3163 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3164 start_cleanup_deferral ();
709f5be1 3165 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3166 end_cleanup_deferral ();
bbf6f052
RK
3167 emit_queue ();
3168 emit_jump_insn (gen_jump (lab2));
3169 emit_barrier ();
3170 emit_label (lab1);
956d6950 3171 start_cleanup_deferral ();
709f5be1 3172 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3173 end_cleanup_deferral ();
bbf6f052
RK
3174 emit_queue ();
3175 emit_label (lab2);
3176 OK_DEFER_POP;
a3a58acc 3177
709f5be1 3178 return want_value ? target : NULL_RTX;
bbf6f052 3179 }
709f5be1 3180 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3181 && GET_MODE (target) != BLKmode)
3182 /* If target is in memory and caller wants value in a register instead,
3183 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3184 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3185 We know expand_expr will not use the target in that case.
3186 Don't do this if TARGET is volatile because we are supposed
3187 to write it and then read it. */
bbf6f052 3188 {
906c4e36 3189 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3190 GET_MODE (target), 0);
3191 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3192 temp = copy_to_reg (temp);
3193 dont_return_target = 1;
3194 }
3195 else if (queued_subexp_p (target))
709f5be1
RS
3196 /* If target contains a postincrement, let's not risk
3197 using it as the place to generate the rhs. */
bbf6f052
RK
3198 {
3199 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3200 {
3201 /* Expand EXP into a new pseudo. */
3202 temp = gen_reg_rtx (GET_MODE (target));
3203 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3204 }
3205 else
906c4e36 3206 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3207
3208 /* If target is volatile, ANSI requires accessing the value
3209 *from* the target, if it is accessed. So make that happen.
3210 In no case return the target itself. */
3211 if (! MEM_VOLATILE_P (target) && want_value)
3212 dont_return_target = 1;
bbf6f052 3213 }
1499e0a8
RK
3214 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3215 /* If this is an scalar in a register that is stored in a wider mode
3216 than the declared mode, compute the result into its declared mode
3217 and then convert to the wider mode. Our value is the computed
3218 expression. */
3219 {
5a32d038 3220 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3221 which will often result in some optimizations. Do the conversion
3222 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3223 the extend. But don't do this if the type of EXP is a subtype
3224 of something else since then the conversion might involve
3225 more than just converting modes. */
3226 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3227 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3228 {
3229 if (TREE_UNSIGNED (TREE_TYPE (exp))
3230 != SUBREG_PROMOTED_UNSIGNED_P (target))
3231 exp
3232 = convert
3233 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3234 TREE_TYPE (exp)),
3235 exp);
3236
3237 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3238 SUBREG_PROMOTED_UNSIGNED_P (target)),
3239 exp);
3240 }
5a32d038 3241
1499e0a8 3242 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3243
766f36c7 3244 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3245 the access now so it gets done only once. Likewise if
3246 it contains TARGET. */
3247 if (GET_CODE (temp) == MEM && want_value
3248 && (MEM_VOLATILE_P (temp)
3249 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3250 temp = copy_to_reg (temp);
3251
b258707c
RS
3252 /* If TEMP is a VOIDmode constant, use convert_modes to make
3253 sure that we properly convert it. */
3254 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3255 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3256 TYPE_MODE (TREE_TYPE (exp)), temp,
3257 SUBREG_PROMOTED_UNSIGNED_P (target));
3258
1499e0a8
RK
3259 convert_move (SUBREG_REG (target), temp,
3260 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3261 return want_value ? temp : NULL_RTX;
1499e0a8 3262 }
bbf6f052
RK
3263 else
3264 {
3265 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3266 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3267 If TARGET is a volatile mem ref, either return TARGET
3268 or return a reg copied *from* TARGET; ANSI requires this.
3269
3270 Otherwise, if TEMP is not TARGET, return TEMP
3271 if it is constant (for efficiency),
3272 or if we really want the correct value. */
bbf6f052
RK
3273 if (!(target && GET_CODE (target) == REG
3274 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3275 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3276 && ! rtx_equal_p (temp, target)
709f5be1 3277 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3278 dont_return_target = 1;
3279 }
3280
b258707c
RS
3281 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3282 the same as that of TARGET, adjust the constant. This is needed, for
3283 example, in case it is a CONST_DOUBLE and we want only a word-sized
3284 value. */
3285 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3286 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3287 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3288 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3289 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3290
921b3427
RK
3291 if (flag_check_memory_usage
3292 && GET_CODE (target) == MEM
3293 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3294 {
3295 if (GET_CODE (temp) == MEM)
3296 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3297 XEXP (target, 0), ptr_mode,
3298 XEXP (temp, 0), ptr_mode,
3299 expr_size (exp), TYPE_MODE (sizetype));
3300 else
3301 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3302 XEXP (target, 0), ptr_mode,
3303 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3304 GEN_INT (MEMORY_USE_WO),
3305 TYPE_MODE (integer_type_node));
921b3427
RK
3306 }
3307
bbf6f052
RK
3308 /* If value was not generated in the target, store it there.
3309 Convert the value to TARGET's type first if nec. */
3310
effbcc6a 3311 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3312 {
3313 target = protect_from_queue (target, 1);
3314 if (GET_MODE (temp) != GET_MODE (target)
3315 && GET_MODE (temp) != VOIDmode)
3316 {
3317 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3318 if (dont_return_target)
3319 {
3320 /* In this case, we will return TEMP,
3321 so make sure it has the proper mode.
3322 But don't forget to store the value into TARGET. */
3323 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3324 emit_move_insn (target, temp);
3325 }
3326 else
3327 convert_move (target, temp, unsignedp);
3328 }
3329
3330 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3331 {
3332 /* Handle copying a string constant into an array.
3333 The string constant may be shorter than the array.
3334 So copy just the string's actual length, and clear the rest. */
3335 rtx size;
22619c3f 3336 rtx addr;
bbf6f052 3337
e87b4f3f
RS
3338 /* Get the size of the data type of the string,
3339 which is actually the size of the target. */
3340 size = expr_size (exp);
3341 if (GET_CODE (size) == CONST_INT
3342 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3343 emit_block_move (target, temp, size,
3344 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3345 else
bbf6f052 3346 {
e87b4f3f
RS
3347 /* Compute the size of the data to copy from the string. */
3348 tree copy_size
c03b7665 3349 = size_binop (MIN_EXPR,
b50d17a1 3350 make_tree (sizetype, size),
c03b7665
RK
3351 convert (sizetype,
3352 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3353 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3354 VOIDmode, 0);
e87b4f3f
RS
3355 rtx label = 0;
3356
3357 /* Copy that much. */
3358 emit_block_move (target, temp, copy_size_rtx,
3359 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3360
88f63c77
RK
3361 /* Figure out how much is left in TARGET that we have to clear.
3362 Do all calculations in ptr_mode. */
3363
3364 addr = XEXP (target, 0);
3365 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3366
e87b4f3f
RS
3367 if (GET_CODE (copy_size_rtx) == CONST_INT)
3368 {
88f63c77 3369 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3370 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3371 }
3372 else
3373 {
88f63c77
RK
3374 addr = force_reg (ptr_mode, addr);
3375 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3376 copy_size_rtx, NULL_RTX, 0,
3377 OPTAB_LIB_WIDEN);
e87b4f3f 3378
88f63c77 3379 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3380 copy_size_rtx, NULL_RTX, 0,
3381 OPTAB_LIB_WIDEN);
e87b4f3f 3382
906c4e36 3383 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3384 GET_MODE (size), 0, 0);
3385 label = gen_label_rtx ();
3386 emit_jump_insn (gen_blt (label));
3387 }
3388
3389 if (size != const0_rtx)
3390 {
921b3427
RK
3391 /* Be sure we can write on ADDR. */
3392 if (flag_check_memory_usage)
3393 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3394 addr, ptr_mode,
3395 size, TYPE_MODE (sizetype),
956d6950
JL
3396 GEN_INT (MEMORY_USE_WO),
3397 TYPE_MODE (integer_type_node));
bbf6f052 3398#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3399 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3400 addr, ptr_mode,
3b6f75e2
JW
3401 const0_rtx, TYPE_MODE (integer_type_node),
3402 convert_to_mode (TYPE_MODE (sizetype),
3403 size,
3404 TREE_UNSIGNED (sizetype)),
3405 TYPE_MODE (sizetype));
bbf6f052 3406#else
d562e42e 3407 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3408 addr, ptr_mode,
3b6f75e2
JW
3409 convert_to_mode (TYPE_MODE (integer_type_node),
3410 size,
3411 TREE_UNSIGNED (integer_type_node)),
3412 TYPE_MODE (integer_type_node));
bbf6f052 3413#endif
e87b4f3f 3414 }
22619c3f 3415
e87b4f3f
RS
3416 if (label)
3417 emit_label (label);
bbf6f052
RK
3418 }
3419 }
fffa9c1d
JW
3420 /* Handle calls that return values in multiple non-contiguous locations.
3421 The Irix 6 ABI has examples of this. */
3422 else if (GET_CODE (target) == PARALLEL)
3423 emit_group_load (target, temp);
bbf6f052
RK
3424 else if (GET_MODE (temp) == BLKmode)
3425 emit_block_move (target, temp, expr_size (exp),
3426 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3427 else
3428 emit_move_insn (target, temp);
3429 }
709f5be1 3430
766f36c7
RK
3431 /* If we don't want a value, return NULL_RTX. */
3432 if (! want_value)
3433 return NULL_RTX;
3434
3435 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3436 ??? The latter test doesn't seem to make sense. */
3437 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3438 return temp;
766f36c7
RK
3439
3440 /* Return TARGET itself if it is a hard register. */
3441 else if (want_value && GET_MODE (target) != BLKmode
3442 && ! (GET_CODE (target) == REG
3443 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3444 return copy_to_reg (target);
766f36c7
RK
3445
3446 else
709f5be1 3447 return target;
bbf6f052
RK
3448}
3449\f
9de08200
RK
3450/* Return 1 if EXP just contains zeros. */
3451
3452static int
3453is_zeros_p (exp)
3454 tree exp;
3455{
3456 tree elt;
3457
3458 switch (TREE_CODE (exp))
3459 {
3460 case CONVERT_EXPR:
3461 case NOP_EXPR:
3462 case NON_LVALUE_EXPR:
3463 return is_zeros_p (TREE_OPERAND (exp, 0));
3464
3465 case INTEGER_CST:
3466 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3467
3468 case COMPLEX_CST:
3469 return
3470 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3471
3472 case REAL_CST:
41c9120b 3473 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3474
3475 case CONSTRUCTOR:
e1a43f73
PB
3476 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3477 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3478 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3479 if (! is_zeros_p (TREE_VALUE (elt)))
3480 return 0;
3481
3482 return 1;
e9a25f70
JL
3483
3484 default:
3485 return 0;
9de08200 3486 }
9de08200
RK
3487}
3488
3489/* Return 1 if EXP contains mostly (3/4) zeros. */
3490
3491static int
3492mostly_zeros_p (exp)
3493 tree exp;
3494{
9de08200
RK
3495 if (TREE_CODE (exp) == CONSTRUCTOR)
3496 {
e1a43f73
PB
3497 int elts = 0, zeros = 0;
3498 tree elt = CONSTRUCTOR_ELTS (exp);
3499 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3500 {
3501 /* If there are no ranges of true bits, it is all zero. */
3502 return elt == NULL_TREE;
3503 }
3504 for (; elt; elt = TREE_CHAIN (elt))
3505 {
3506 /* We do not handle the case where the index is a RANGE_EXPR,
3507 so the statistic will be somewhat inaccurate.
3508 We do make a more accurate count in store_constructor itself,
3509 so since this function is only used for nested array elements,
0f41302f 3510 this should be close enough. */
e1a43f73
PB
3511 if (mostly_zeros_p (TREE_VALUE (elt)))
3512 zeros++;
3513 elts++;
3514 }
9de08200
RK
3515
3516 return 4 * zeros >= 3 * elts;
3517 }
3518
3519 return is_zeros_p (exp);
3520}
3521\f
e1a43f73
PB
3522/* Helper function for store_constructor.
3523 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3524 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3525 CLEARED is as for store_constructor.
3526
3527 This provides a recursive shortcut back to store_constructor when it isn't
3528 necessary to go through store_field. This is so that we can pass through
3529 the cleared field to let store_constructor know that we may not have to
3530 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3531
3532static void
3533store_constructor_field (target, bitsize, bitpos,
3534 mode, exp, type, cleared)
3535 rtx target;
3536 int bitsize, bitpos;
3537 enum machine_mode mode;
3538 tree exp, type;
3539 int cleared;
3540{
3541 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3542 && bitpos % BITS_PER_UNIT == 0
3543 /* If we have a non-zero bitpos for a register target, then we just
3544 let store_field do the bitfield handling. This is unlikely to
3545 generate unnecessary clear instructions anyways. */
3546 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3547 {
126e5b0d
JW
3548 if (bitpos != 0)
3549 target = change_address (target, VOIDmode,
3550 plus_constant (XEXP (target, 0),
3551 bitpos / BITS_PER_UNIT));
3552 store_constructor (exp, target, cleared);
e1a43f73
PB
3553 }
3554 else
3555 store_field (target, bitsize, bitpos, mode, exp,
3556 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3557 int_size_in_bytes (type));
3558}
3559
bbf6f052 3560/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3561 TARGET is either a REG or a MEM.
0f41302f 3562 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3563
3564static void
e1a43f73 3565store_constructor (exp, target, cleared)
bbf6f052
RK
3566 tree exp;
3567 rtx target;
e1a43f73 3568 int cleared;
bbf6f052 3569{
4af3895e
JVA
3570 tree type = TREE_TYPE (exp);
3571
bbf6f052
RK
3572 /* We know our target cannot conflict, since safe_from_p has been called. */
3573#if 0
3574 /* Don't try copying piece by piece into a hard register
3575 since that is vulnerable to being clobbered by EXP.
3576 Instead, construct in a pseudo register and then copy it all. */
3577 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3578 {
3579 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3580 store_constructor (exp, temp, 0);
bbf6f052
RK
3581 emit_move_insn (target, temp);
3582 return;
3583 }
3584#endif
3585
e44842fe
RK
3586 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3587 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3588 {
3589 register tree elt;
3590
4af3895e 3591 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3592 if (TREE_CODE (type) == UNION_TYPE
3593 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3594 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3595
3596 /* If we are building a static constructor into a register,
3597 set the initial value as zero so we can fold the value into
67225c15
RK
3598 a constant. But if more than one register is involved,
3599 this probably loses. */
3600 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3601 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3602 {
3603 if (! cleared)
e9a25f70 3604 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3605
9de08200
RK
3606 cleared = 1;
3607 }
3608
3609 /* If the constructor has fewer fields than the structure
3610 or if we are initializing the structure to mostly zeros,
bbf6f052 3611 clear the whole structure first. */
9de08200
RK
3612 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3613 != list_length (TYPE_FIELDS (type)))
3614 || mostly_zeros_p (exp))
3615 {
3616 if (! cleared)
3617 clear_storage (target, expr_size (exp),
3618 TYPE_ALIGN (type) / BITS_PER_UNIT);
3619
3620 cleared = 1;
3621 }
bbf6f052
RK
3622 else
3623 /* Inform later passes that the old value is dead. */
38a448ca 3624 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3625
3626 /* Store each element of the constructor into
3627 the corresponding field of TARGET. */
3628
3629 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3630 {
3631 register tree field = TREE_PURPOSE (elt);
3632 register enum machine_mode mode;
3633 int bitsize;
b50d17a1 3634 int bitpos = 0;
bbf6f052 3635 int unsignedp;
b50d17a1
RK
3636 tree pos, constant = 0, offset = 0;
3637 rtx to_rtx = target;
bbf6f052 3638
f32fd778
RS
3639 /* Just ignore missing fields.
3640 We cleared the whole structure, above,
3641 if any fields are missing. */
3642 if (field == 0)
3643 continue;
3644
e1a43f73
PB
3645 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3646 continue;
9de08200 3647
bbf6f052
RK
3648 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3649 unsignedp = TREE_UNSIGNED (field);
3650 mode = DECL_MODE (field);
3651 if (DECL_BIT_FIELD (field))
3652 mode = VOIDmode;
3653
b50d17a1
RK
3654 pos = DECL_FIELD_BITPOS (field);
3655 if (TREE_CODE (pos) == INTEGER_CST)
3656 constant = pos;
3657 else if (TREE_CODE (pos) == PLUS_EXPR
3658 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3659 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3660 else
3661 offset = pos;
3662
3663 if (constant)
cd11b87e 3664 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3665
3666 if (offset)
3667 {
3668 rtx offset_rtx;
3669
3670 if (contains_placeholder_p (offset))
3671 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 3672 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 3673
b50d17a1
RK
3674 offset = size_binop (FLOOR_DIV_EXPR, offset,
3675 size_int (BITS_PER_UNIT));
bbf6f052 3676
b50d17a1
RK
3677 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3678 if (GET_CODE (to_rtx) != MEM)
3679 abort ();
3680
3681 to_rtx
3682 = change_address (to_rtx, VOIDmode,
38a448ca 3683 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 3684 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3685 }
cf04eb80
RK
3686 if (TREE_READONLY (field))
3687 {
9151b3bf 3688 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
3689 to_rtx = copy_rtx (to_rtx);
3690
cf04eb80
RK
3691 RTX_UNCHANGING_P (to_rtx) = 1;
3692 }
3693
e1a43f73
PB
3694 store_constructor_field (to_rtx, bitsize, bitpos,
3695 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3696 }
3697 }
4af3895e 3698 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3699 {
3700 register tree elt;
3701 register int i;
e1a43f73 3702 int need_to_clear;
4af3895e 3703 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3704 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3705 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3706 tree elttype = TREE_TYPE (type);
bbf6f052 3707
e1a43f73
PB
3708 /* If the constructor has fewer elements than the array,
3709 clear the whole array first. Similarly if this this is
3710 static constructor of a non-BLKmode object. */
3711 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3712 need_to_clear = 1;
3713 else
3714 {
3715 HOST_WIDE_INT count = 0, zero_count = 0;
3716 need_to_clear = 0;
3717 /* This loop is a more accurate version of the loop in
3718 mostly_zeros_p (it handles RANGE_EXPR in an index).
3719 It is also needed to check for missing elements. */
3720 for (elt = CONSTRUCTOR_ELTS (exp);
3721 elt != NULL_TREE;
df0faff1 3722 elt = TREE_CHAIN (elt))
e1a43f73
PB
3723 {
3724 tree index = TREE_PURPOSE (elt);
3725 HOST_WIDE_INT this_node_count;
3726 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3727 {
3728 tree lo_index = TREE_OPERAND (index, 0);
3729 tree hi_index = TREE_OPERAND (index, 1);
3730 if (TREE_CODE (lo_index) != INTEGER_CST
3731 || TREE_CODE (hi_index) != INTEGER_CST)
3732 {
3733 need_to_clear = 1;
3734 break;
3735 }
3736 this_node_count = TREE_INT_CST_LOW (hi_index)
3737 - TREE_INT_CST_LOW (lo_index) + 1;
3738 }
3739 else
3740 this_node_count = 1;
3741 count += this_node_count;
3742 if (mostly_zeros_p (TREE_VALUE (elt)))
3743 zero_count += this_node_count;
3744 }
8e958f70 3745 /* Clear the entire array first if there are any missing elements,
0f41302f 3746 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3747 if (count < maxelt - minelt + 1
3748 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3749 need_to_clear = 1;
3750 }
3751 if (need_to_clear)
9de08200
RK
3752 {
3753 if (! cleared)
3754 clear_storage (target, expr_size (exp),
3755 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3756 cleared = 1;
3757 }
bbf6f052
RK
3758 else
3759 /* Inform later passes that the old value is dead. */
38a448ca 3760 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3761
3762 /* Store each element of the constructor into
3763 the corresponding element of TARGET, determined
3764 by counting the elements. */
3765 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3766 elt;
3767 elt = TREE_CHAIN (elt), i++)
3768 {
3769 register enum machine_mode mode;
3770 int bitsize;
3771 int bitpos;
3772 int unsignedp;
e1a43f73 3773 tree value = TREE_VALUE (elt);
03dc44a6
RS
3774 tree index = TREE_PURPOSE (elt);
3775 rtx xtarget = target;
bbf6f052 3776
e1a43f73
PB
3777 if (cleared && is_zeros_p (value))
3778 continue;
9de08200 3779
bbf6f052
RK
3780 mode = TYPE_MODE (elttype);
3781 bitsize = GET_MODE_BITSIZE (mode);
3782 unsignedp = TREE_UNSIGNED (elttype);
3783
e1a43f73
PB
3784 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3785 {
3786 tree lo_index = TREE_OPERAND (index, 0);
3787 tree hi_index = TREE_OPERAND (index, 1);
3788 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3789 struct nesting *loop;
05c0b405
PB
3790 HOST_WIDE_INT lo, hi, count;
3791 tree position;
e1a43f73 3792
0f41302f 3793 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3794 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3795 && TREE_CODE (hi_index) == INTEGER_CST
3796 && (lo = TREE_INT_CST_LOW (lo_index),
3797 hi = TREE_INT_CST_LOW (hi_index),
3798 count = hi - lo + 1,
3799 (GET_CODE (target) != MEM
3800 || count <= 2
3801 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3802 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3803 <= 40 * 8))))
e1a43f73 3804 {
05c0b405
PB
3805 lo -= minelt; hi -= minelt;
3806 for (; lo <= hi; lo++)
e1a43f73 3807 {
05c0b405
PB
3808 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3809 store_constructor_field (target, bitsize, bitpos,
3810 mode, value, type, cleared);
e1a43f73
PB
3811 }
3812 }
3813 else
3814 {
3815 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3816 loop_top = gen_label_rtx ();
3817 loop_end = gen_label_rtx ();
3818
3819 unsignedp = TREE_UNSIGNED (domain);
3820
3821 index = build_decl (VAR_DECL, NULL_TREE, domain);
3822
3823 DECL_RTL (index) = index_r
3824 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3825 &unsignedp, 0));
3826
3827 if (TREE_CODE (value) == SAVE_EXPR
3828 && SAVE_EXPR_RTL (value) == 0)
3829 {
0f41302f
MS
3830 /* Make sure value gets expanded once before the
3831 loop. */
e1a43f73
PB
3832 expand_expr (value, const0_rtx, VOIDmode, 0);
3833 emit_queue ();
3834 }
3835 store_expr (lo_index, index_r, 0);
3836 loop = expand_start_loop (0);
3837
0f41302f 3838 /* Assign value to element index. */
e1a43f73
PB
3839 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3840 size_int (BITS_PER_UNIT));
3841 position = size_binop (MULT_EXPR,
3842 size_binop (MINUS_EXPR, index,
3843 TYPE_MIN_VALUE (domain)),
3844 position);
3845 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3846 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
3847 xtarget = change_address (target, mode, addr);
3848 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3849 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3850 else
3851 store_expr (value, xtarget, 0);
3852
3853 expand_exit_loop_if_false (loop,
3854 build (LT_EXPR, integer_type_node,
3855 index, hi_index));
3856
3857 expand_increment (build (PREINCREMENT_EXPR,
3858 TREE_TYPE (index),
7b8b9722 3859 index, integer_one_node), 0, 0);
e1a43f73
PB
3860 expand_end_loop ();
3861 emit_label (loop_end);
3862
3863 /* Needed by stupid register allocation. to extend the
3864 lifetime of pseudo-regs used by target past the end
3865 of the loop. */
38a448ca 3866 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
3867 }
3868 }
3869 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3870 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3871 {
e1a43f73 3872 rtx pos_rtx, addr;
03dc44a6
RS
3873 tree position;
3874
5b6c44ff
RK
3875 if (index == 0)
3876 index = size_int (i);
3877
e1a43f73
PB
3878 if (minelt)
3879 index = size_binop (MINUS_EXPR, index,
3880 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3881 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3882 size_int (BITS_PER_UNIT));
3883 position = size_binop (MULT_EXPR, index, position);
03dc44a6 3884 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3885 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 3886 xtarget = change_address (target, mode, addr);
e1a43f73 3887 store_expr (value, xtarget, 0);
03dc44a6
RS
3888 }
3889 else
3890 {
3891 if (index != 0)
7c314719 3892 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3893 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3894 else
3895 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3896 store_constructor_field (target, bitsize, bitpos,
3897 mode, value, type, cleared);
03dc44a6 3898 }
bbf6f052
RK
3899 }
3900 }
071a6595
PB
3901 /* set constructor assignments */
3902 else if (TREE_CODE (type) == SET_TYPE)
3903 {
e1a43f73 3904 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 3905 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3906 tree domain = TYPE_DOMAIN (type);
3907 tree domain_min, domain_max, bitlength;
3908
9faa82d8 3909 /* The default implementation strategy is to extract the constant
071a6595
PB
3910 parts of the constructor, use that to initialize the target,
3911 and then "or" in whatever non-constant ranges we need in addition.
3912
3913 If a large set is all zero or all ones, it is
3914 probably better to set it using memset (if available) or bzero.
3915 Also, if a large set has just a single range, it may also be
3916 better to first clear all the first clear the set (using
0f41302f 3917 bzero/memset), and set the bits we want. */
071a6595 3918
0f41302f 3919 /* Check for all zeros. */
e1a43f73 3920 if (elt == NULL_TREE)
071a6595 3921 {
e1a43f73
PB
3922 if (!cleared)
3923 clear_storage (target, expr_size (exp),
3924 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3925 return;
3926 }
3927
071a6595
PB
3928 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3929 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3930 bitlength = size_binop (PLUS_EXPR,
3931 size_binop (MINUS_EXPR, domain_max, domain_min),
3932 size_one_node);
3933
e1a43f73
PB
3934 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3935 abort ();
3936 nbits = TREE_INT_CST_LOW (bitlength);
3937
3938 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3939 are "complicated" (more than one range), initialize (the
3940 constant parts) by copying from a constant. */
3941 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3942 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3943 {
b4ee5a72
PB
3944 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3945 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3946 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3947 HOST_WIDE_INT word = 0;
3948 int bit_pos = 0;
3949 int ibit = 0;
0f41302f 3950 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3951 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3952 for (;;)
071a6595 3953 {
b4ee5a72
PB
3954 if (bit_buffer[ibit])
3955 {
b09f3348 3956 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3957 word |= (1 << (set_word_size - 1 - bit_pos));
3958 else
3959 word |= 1 << bit_pos;
3960 }
3961 bit_pos++; ibit++;
3962 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3963 {
e1a43f73
PB
3964 if (word != 0 || ! cleared)
3965 {
3966 rtx datum = GEN_INT (word);
3967 rtx to_rtx;
0f41302f
MS
3968 /* The assumption here is that it is safe to use
3969 XEXP if the set is multi-word, but not if
3970 it's single-word. */
e1a43f73
PB
3971 if (GET_CODE (target) == MEM)
3972 {
3973 to_rtx = plus_constant (XEXP (target, 0), offset);
3974 to_rtx = change_address (target, mode, to_rtx);
3975 }
3976 else if (offset == 0)
3977 to_rtx = target;
3978 else
3979 abort ();
3980 emit_move_insn (to_rtx, datum);
3981 }
b4ee5a72
PB
3982 if (ibit == nbits)
3983 break;
3984 word = 0;
3985 bit_pos = 0;
3986 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3987 }
3988 }
071a6595 3989 }
e1a43f73
PB
3990 else if (!cleared)
3991 {
0f41302f 3992 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3993 if (TREE_CHAIN (elt) != NULL_TREE
3994 || (TREE_PURPOSE (elt) == NULL_TREE
3995 ? nbits != 1
3996 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3997 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3998 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3999 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4000 != nbits))))
4001 clear_storage (target, expr_size (exp),
4002 TYPE_ALIGN (type) / BITS_PER_UNIT);
4003 }
4004
4005 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4006 {
4007 /* start of range of element or NULL */
4008 tree startbit = TREE_PURPOSE (elt);
4009 /* end of range of element, or element value */
4010 tree endbit = TREE_VALUE (elt);
381127e8 4011#ifdef TARGET_MEM_FUNCTIONS
071a6595 4012 HOST_WIDE_INT startb, endb;
381127e8 4013#endif
071a6595
PB
4014 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4015
4016 bitlength_rtx = expand_expr (bitlength,
4017 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4018
4019 /* handle non-range tuple element like [ expr ] */
4020 if (startbit == NULL_TREE)
4021 {
4022 startbit = save_expr (endbit);
4023 endbit = startbit;
4024 }
4025 startbit = convert (sizetype, startbit);
4026 endbit = convert (sizetype, endbit);
4027 if (! integer_zerop (domain_min))
4028 {
4029 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4030 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4031 }
4032 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4033 EXPAND_CONST_ADDRESS);
4034 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4035 EXPAND_CONST_ADDRESS);
4036
4037 if (REG_P (target))
4038 {
4039 targetx = assign_stack_temp (GET_MODE (target),
4040 GET_MODE_SIZE (GET_MODE (target)),
4041 0);
4042 emit_move_insn (targetx, target);
4043 }
4044 else if (GET_CODE (target) == MEM)
4045 targetx = target;
4046 else
4047 abort ();
4048
4049#ifdef TARGET_MEM_FUNCTIONS
4050 /* Optimization: If startbit and endbit are
9faa82d8 4051 constants divisible by BITS_PER_UNIT,
0f41302f 4052 call memset instead. */
071a6595
PB
4053 if (TREE_CODE (startbit) == INTEGER_CST
4054 && TREE_CODE (endbit) == INTEGER_CST
4055 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4056 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4057 {
071a6595
PB
4058 emit_library_call (memset_libfunc, 0,
4059 VOIDmode, 3,
e1a43f73
PB
4060 plus_constant (XEXP (targetx, 0),
4061 startb / BITS_PER_UNIT),
071a6595 4062 Pmode,
3b6f75e2 4063 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4064 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4065 TYPE_MODE (sizetype));
071a6595
PB
4066 }
4067 else
4068#endif
4069 {
38a448ca 4070 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4071 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4072 bitlength_rtx, TYPE_MODE (sizetype),
4073 startbit_rtx, TYPE_MODE (sizetype),
4074 endbit_rtx, TYPE_MODE (sizetype));
4075 }
4076 if (REG_P (target))
4077 emit_move_insn (target, targetx);
4078 }
4079 }
bbf6f052
RK
4080
4081 else
4082 abort ();
4083}
4084
4085/* Store the value of EXP (an expression tree)
4086 into a subfield of TARGET which has mode MODE and occupies
4087 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4088 If MODE is VOIDmode, it means that we are storing into a bit-field.
4089
4090 If VALUE_MODE is VOIDmode, return nothing in particular.
4091 UNSIGNEDP is not used in this case.
4092
4093 Otherwise, return an rtx for the value stored. This rtx
4094 has mode VALUE_MODE if that is convenient to do.
4095 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4096
4097 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4098 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4099
4100static rtx
4101store_field (target, bitsize, bitpos, mode, exp, value_mode,
4102 unsignedp, align, total_size)
4103 rtx target;
4104 int bitsize, bitpos;
4105 enum machine_mode mode;
4106 tree exp;
4107 enum machine_mode value_mode;
4108 int unsignedp;
4109 int align;
4110 int total_size;
4111{
906c4e36 4112 HOST_WIDE_INT width_mask = 0;
bbf6f052 4113
e9a25f70
JL
4114 if (TREE_CODE (exp) == ERROR_MARK)
4115 return const0_rtx;
4116
906c4e36
RK
4117 if (bitsize < HOST_BITS_PER_WIDE_INT)
4118 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4119
4120 /* If we are storing into an unaligned field of an aligned union that is
4121 in a register, we may have the mode of TARGET being an integer mode but
4122 MODE == BLKmode. In that case, get an aligned object whose size and
4123 alignment are the same as TARGET and store TARGET into it (we can avoid
4124 the store if the field being stored is the entire width of TARGET). Then
4125 call ourselves recursively to store the field into a BLKmode version of
4126 that object. Finally, load from the object into TARGET. This is not
4127 very efficient in general, but should only be slightly more expensive
4128 than the otherwise-required unaligned accesses. Perhaps this can be
4129 cleaned up later. */
4130
4131 if (mode == BLKmode
4132 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4133 {
4134 rtx object = assign_stack_temp (GET_MODE (target),
4135 GET_MODE_SIZE (GET_MODE (target)), 0);
4136 rtx blk_object = copy_rtx (object);
4137
24a13950
JW
4138 MEM_IN_STRUCT_P (object) = 1;
4139 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4140 PUT_MODE (blk_object, BLKmode);
4141
4142 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4143 emit_move_insn (object, target);
4144
4145 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4146 align, total_size);
4147
46093b97
RS
4148 /* Even though we aren't returning target, we need to
4149 give it the updated value. */
bbf6f052
RK
4150 emit_move_insn (target, object);
4151
46093b97 4152 return blk_object;
bbf6f052
RK
4153 }
4154
4155 /* If the structure is in a register or if the component
4156 is a bit field, we cannot use addressing to access it.
4157 Use bit-field techniques or SUBREG to store in it. */
4158
4fa52007
RK
4159 if (mode == VOIDmode
4160 || (mode != BLKmode && ! direct_store[(int) mode])
4161 || GET_CODE (target) == REG
c980ac49 4162 || GET_CODE (target) == SUBREG
ccc98036
RS
4163 /* If the field isn't aligned enough to store as an ordinary memref,
4164 store it as a bit field. */
c7a7ac46 4165 || (SLOW_UNALIGNED_ACCESS
ccc98036 4166 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4167 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4168 {
906c4e36 4169 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4170
ef19912d
RK
4171 /* If BITSIZE is narrower than the size of the type of EXP
4172 we will be narrowing TEMP. Normally, what's wanted are the
4173 low-order bits. However, if EXP's type is a record and this is
4174 big-endian machine, we want the upper BITSIZE bits. */
4175 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4176 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4177 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4178 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4179 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4180 - bitsize),
4181 temp, 1);
4182
bbd6cf73
RK
4183 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4184 MODE. */
4185 if (mode != VOIDmode && mode != BLKmode
4186 && mode != TYPE_MODE (TREE_TYPE (exp)))
4187 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4188
a281e72d
RK
4189 /* If the modes of TARGET and TEMP are both BLKmode, both
4190 must be in memory and BITPOS must be aligned on a byte
4191 boundary. If so, we simply do a block copy. */
4192 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4193 {
4194 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4195 || bitpos % BITS_PER_UNIT != 0)
4196 abort ();
4197
0086427c
RK
4198 target = change_address (target, VOIDmode,
4199 plus_constant (XEXP (target, 0),
a281e72d
RK
4200 bitpos / BITS_PER_UNIT));
4201
4202 emit_block_move (target, temp,
4203 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4204 / BITS_PER_UNIT),
4205 1);
4206
4207 return value_mode == VOIDmode ? const0_rtx : target;
4208 }
4209
bbf6f052
RK
4210 /* Store the value in the bitfield. */
4211 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4212 if (value_mode != VOIDmode)
4213 {
4214 /* The caller wants an rtx for the value. */
4215 /* If possible, avoid refetching from the bitfield itself. */
4216 if (width_mask != 0
4217 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4218 {
9074de27 4219 tree count;
5c4d7cfb 4220 enum machine_mode tmode;
86a2c12a 4221
5c4d7cfb
RS
4222 if (unsignedp)
4223 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4224 tmode = GET_MODE (temp);
86a2c12a
RS
4225 if (tmode == VOIDmode)
4226 tmode = value_mode;
5c4d7cfb
RS
4227 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4228 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4229 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4230 }
bbf6f052 4231 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4232 NULL_RTX, value_mode, 0, align,
4233 total_size);
bbf6f052
RK
4234 }
4235 return const0_rtx;
4236 }
4237 else
4238 {
4239 rtx addr = XEXP (target, 0);
4240 rtx to_rtx;
4241
4242 /* If a value is wanted, it must be the lhs;
4243 so make the address stable for multiple use. */
4244
4245 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4246 && ! CONSTANT_ADDRESS_P (addr)
4247 /* A frame-pointer reference is already stable. */
4248 && ! (GET_CODE (addr) == PLUS
4249 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4250 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4251 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4252 addr = copy_to_reg (addr);
4253
4254 /* Now build a reference to just the desired component. */
4255
effbcc6a
RK
4256 to_rtx = copy_rtx (change_address (target, mode,
4257 plus_constant (addr,
4258 (bitpos
4259 / BITS_PER_UNIT))));
bbf6f052
RK
4260 MEM_IN_STRUCT_P (to_rtx) = 1;
4261
4262 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4263 }
4264}
4265\f
4266/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4267 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4268 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4269
4270 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4271 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4272 If the position of the field is variable, we store a tree
4273 giving the variable offset (in units) in *POFFSET.
4274 This offset is in addition to the bit position.
4275 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4276 We set *PALIGNMENT to the alignment in bytes of the address that will be
4277 computed. This is the alignment of the thing we return if *POFFSET
4278 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4279
4280 If any of the extraction expressions is volatile,
4281 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4282
4283 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4284 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4285 is redundant.
4286
4287 If the field describes a variable-sized object, *PMODE is set to
4288 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4289 this case, but the address of the object can be found. */
bbf6f052
RK
4290
4291tree
4969d05d 4292get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4293 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4294 tree exp;
4295 int *pbitsize;
4296 int *pbitpos;
7bb0943f 4297 tree *poffset;
bbf6f052
RK
4298 enum machine_mode *pmode;
4299 int *punsignedp;
4300 int *pvolatilep;
839c4796 4301 int *palignment;
bbf6f052 4302{
b50d17a1 4303 tree orig_exp = exp;
bbf6f052
RK
4304 tree size_tree = 0;
4305 enum machine_mode mode = VOIDmode;
742920c7 4306 tree offset = integer_zero_node;
839c4796 4307 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4308
4309 if (TREE_CODE (exp) == COMPONENT_REF)
4310 {
4311 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4312 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4313 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4314 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4315 }
4316 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4317 {
4318 size_tree = TREE_OPERAND (exp, 1);
4319 *punsignedp = TREE_UNSIGNED (exp);
4320 }
4321 else
4322 {
4323 mode = TYPE_MODE (TREE_TYPE (exp));
4324 *pbitsize = GET_MODE_BITSIZE (mode);
4325 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4326 }
4327
4328 if (size_tree)
4329 {
4330 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4331 mode = BLKmode, *pbitsize = -1;
4332 else
4333 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4334 }
4335
4336 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4337 and find the ultimate containing object. */
4338
4339 *pbitpos = 0;
4340
4341 while (1)
4342 {
7bb0943f 4343 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4344 {
7bb0943f
RS
4345 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4346 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4347 : TREE_OPERAND (exp, 2));
e6d8c385 4348 tree constant = integer_zero_node, var = pos;
bbf6f052 4349
e7f3c83f
RK
4350 /* If this field hasn't been filled in yet, don't go
4351 past it. This should only happen when folding expressions
4352 made during type construction. */
4353 if (pos == 0)
4354 break;
4355
e6d8c385
RK
4356 /* Assume here that the offset is a multiple of a unit.
4357 If not, there should be an explicitly added constant. */
4358 if (TREE_CODE (pos) == PLUS_EXPR
4359 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4360 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4361 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4362 constant = pos, var = integer_zero_node;
4363
4364 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4365 offset = size_binop (PLUS_EXPR, offset,
4366 size_binop (EXACT_DIV_EXPR, var,
4367 size_int (BITS_PER_UNIT)));
bbf6f052 4368 }
bbf6f052 4369
742920c7 4370 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4371 {
742920c7
RK
4372 /* This code is based on the code in case ARRAY_REF in expand_expr
4373 below. We assume here that the size of an array element is
4374 always an integral multiple of BITS_PER_UNIT. */
4375
4376 tree index = TREE_OPERAND (exp, 1);
4377 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4378 tree low_bound
4379 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4380 tree index_type = TREE_TYPE (index);
4381
4c08eef0 4382 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4383 {
4c08eef0
RK
4384 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4385 index);
742920c7
RK
4386 index_type = TREE_TYPE (index);
4387 }
4388
ca0f2220
RH
4389 if (! integer_zerop (low_bound))
4390 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4391
f8dac6eb
R
4392 if (TREE_CODE (index) == INTEGER_CST)
4393 {
4394 index = convert (sbitsizetype, index);
4395 index_type = TREE_TYPE (index);
4396 }
4397
4398 index = fold (build (MULT_EXPR, sbitsizetype, index,
4399 convert (sbitsizetype,
0d15e60c 4400 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7
RK
4401
4402 if (TREE_CODE (index) == INTEGER_CST
4403 && TREE_INT_CST_HIGH (index) == 0)
4404 *pbitpos += TREE_INT_CST_LOW (index);
4405 else
956d6950 4406 {
e5e809f4
JL
4407 if (contains_placeholder_p (index))
4408 index = build (WITH_RECORD_EXPR, sizetype, index, exp);
4409
956d6950 4410 offset = size_binop (PLUS_EXPR, offset,
e5e809f4
JL
4411 size_binop (FLOOR_DIV_EXPR, index,
4412 size_int (BITS_PER_UNIT)));
956d6950 4413 }
bbf6f052
RK
4414 }
4415 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4416 && ! ((TREE_CODE (exp) == NOP_EXPR
4417 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4418 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4419 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4420 != UNION_TYPE))
bbf6f052
RK
4421 && (TYPE_MODE (TREE_TYPE (exp))
4422 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4423 break;
7bb0943f
RS
4424
4425 /* If any reference in the chain is volatile, the effect is volatile. */
4426 if (TREE_THIS_VOLATILE (exp))
4427 *pvolatilep = 1;
839c4796
RK
4428
4429 /* If the offset is non-constant already, then we can't assume any
4430 alignment more than the alignment here. */
4431 if (! integer_zerop (offset))
4432 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4433
bbf6f052
RK
4434 exp = TREE_OPERAND (exp, 0);
4435 }
4436
839c4796
RK
4437 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4438 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4439 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4440 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4441
742920c7
RK
4442 if (integer_zerop (offset))
4443 offset = 0;
4444
b50d17a1
RK
4445 if (offset != 0 && contains_placeholder_p (offset))
4446 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4447
bbf6f052 4448 *pmode = mode;
7bb0943f 4449 *poffset = offset;
839c4796 4450 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4451 return exp;
4452}
921b3427
RK
4453
4454/* Subroutine of expand_exp: compute memory_usage from modifier. */
4455static enum memory_use_mode
4456get_memory_usage_from_modifier (modifier)
4457 enum expand_modifier modifier;
4458{
4459 switch (modifier)
4460 {
4461 case EXPAND_NORMAL:
e5e809f4 4462 case EXPAND_SUM:
921b3427
RK
4463 return MEMORY_USE_RO;
4464 break;
4465 case EXPAND_MEMORY_USE_WO:
4466 return MEMORY_USE_WO;
4467 break;
4468 case EXPAND_MEMORY_USE_RW:
4469 return MEMORY_USE_RW;
4470 break;
921b3427 4471 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4472 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4473 MEMORY_USE_DONT, because they are modifiers to a call of
4474 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4475 case EXPAND_CONST_ADDRESS:
e5e809f4 4476 case EXPAND_INITIALIZER:
921b3427
RK
4477 return MEMORY_USE_DONT;
4478 case EXPAND_MEMORY_USE_BAD:
4479 default:
4480 abort ();
4481 }
4482}
bbf6f052
RK
4483\f
4484/* Given an rtx VALUE that may contain additions and multiplications,
4485 return an equivalent value that just refers to a register or memory.
4486 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4487 and returning a pseudo-register containing the value.
4488
4489 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4490
4491rtx
4492force_operand (value, target)
4493 rtx value, target;
4494{
4495 register optab binoptab = 0;
4496 /* Use a temporary to force order of execution of calls to
4497 `force_operand'. */
4498 rtx tmp;
4499 register rtx op2;
4500 /* Use subtarget as the target for operand 0 of a binary operation. */
4501 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4502
4503 if (GET_CODE (value) == PLUS)
4504 binoptab = add_optab;
4505 else if (GET_CODE (value) == MINUS)
4506 binoptab = sub_optab;
4507 else if (GET_CODE (value) == MULT)
4508 {
4509 op2 = XEXP (value, 1);
4510 if (!CONSTANT_P (op2)
4511 && !(GET_CODE (op2) == REG && op2 != subtarget))
4512 subtarget = 0;
4513 tmp = force_operand (XEXP (value, 0), subtarget);
4514 return expand_mult (GET_MODE (value), tmp,
906c4e36 4515 force_operand (op2, NULL_RTX),
bbf6f052
RK
4516 target, 0);
4517 }
4518
4519 if (binoptab)
4520 {
4521 op2 = XEXP (value, 1);
4522 if (!CONSTANT_P (op2)
4523 && !(GET_CODE (op2) == REG && op2 != subtarget))
4524 subtarget = 0;
4525 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4526 {
4527 binoptab = add_optab;
4528 op2 = negate_rtx (GET_MODE (value), op2);
4529 }
4530
4531 /* Check for an addition with OP2 a constant integer and our first
4532 operand a PLUS of a virtual register and something else. In that
4533 case, we want to emit the sum of the virtual register and the
4534 constant first and then add the other value. This allows virtual
4535 register instantiation to simply modify the constant rather than
4536 creating another one around this addition. */
4537 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4538 && GET_CODE (XEXP (value, 0)) == PLUS
4539 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4540 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4541 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4542 {
4543 rtx temp = expand_binop (GET_MODE (value), binoptab,
4544 XEXP (XEXP (value, 0), 0), op2,
4545 subtarget, 0, OPTAB_LIB_WIDEN);
4546 return expand_binop (GET_MODE (value), binoptab, temp,
4547 force_operand (XEXP (XEXP (value, 0), 1), 0),
4548 target, 0, OPTAB_LIB_WIDEN);
4549 }
4550
4551 tmp = force_operand (XEXP (value, 0), subtarget);
4552 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4553 force_operand (op2, NULL_RTX),
bbf6f052 4554 target, 0, OPTAB_LIB_WIDEN);
8008b228 4555 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4556 because the only operations we are expanding here are signed ones. */
4557 }
4558 return value;
4559}
4560\f
4561/* Subroutine of expand_expr:
4562 save the non-copied parts (LIST) of an expr (LHS), and return a list
4563 which can restore these values to their previous values,
4564 should something modify their storage. */
4565
4566static tree
4567save_noncopied_parts (lhs, list)
4568 tree lhs;
4569 tree list;
4570{
4571 tree tail;
4572 tree parts = 0;
4573
4574 for (tail = list; tail; tail = TREE_CHAIN (tail))
4575 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4576 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4577 else
4578 {
4579 tree part = TREE_VALUE (tail);
4580 tree part_type = TREE_TYPE (part);
906c4e36 4581 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4582 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4583 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4584 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4585 parts = tree_cons (to_be_saved,
906c4e36
RK
4586 build (RTL_EXPR, part_type, NULL_TREE,
4587 (tree) target),
bbf6f052
RK
4588 parts);
4589 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4590 }
4591 return parts;
4592}
4593
4594/* Subroutine of expand_expr:
4595 record the non-copied parts (LIST) of an expr (LHS), and return a list
4596 which specifies the initial values of these parts. */
4597
4598static tree
4599init_noncopied_parts (lhs, list)
4600 tree lhs;
4601 tree list;
4602{
4603 tree tail;
4604 tree parts = 0;
4605
4606 for (tail = list; tail; tail = TREE_CHAIN (tail))
4607 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4608 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4609 else
4610 {
4611 tree part = TREE_VALUE (tail);
4612 tree part_type = TREE_TYPE (part);
906c4e36 4613 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4614 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4615 }
4616 return parts;
4617}
4618
4619/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
4620 EXP can reference X, which is being modified. TOP_P is nonzero if this
4621 call is going to be used to determine whether we need a temporary
4622 for EXP, as opposed to a recursive call to this function. */
bbf6f052
RK
4623
4624static int
e5e809f4 4625safe_from_p (x, exp, top_p)
bbf6f052
RK
4626 rtx x;
4627 tree exp;
e5e809f4 4628 int top_p;
bbf6f052
RK
4629{
4630 rtx exp_rtl = 0;
4631 int i, nops;
4632
6676e72f
RK
4633 if (x == 0
4634 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4635 have no way of allocating temporaries of variable size
4636 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4637 So we assume here that something at a higher level has prevented a
f4510f37 4638 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
4639 do this when X is BLKmode and when we are at the top level. */
4640 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4641 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4642 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4643 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4644 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4645 != INTEGER_CST)
f4510f37 4646 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4647 return 1;
4648
4649 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4650 find the underlying pseudo. */
4651 if (GET_CODE (x) == SUBREG)
4652 {
4653 x = SUBREG_REG (x);
4654 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4655 return 0;
4656 }
4657
4658 /* If X is a location in the outgoing argument area, it is always safe. */
4659 if (GET_CODE (x) == MEM
4660 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4661 || (GET_CODE (XEXP (x, 0)) == PLUS
4662 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4663 return 1;
4664
4665 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4666 {
4667 case 'd':
4668 exp_rtl = DECL_RTL (exp);
4669 break;
4670
4671 case 'c':
4672 return 1;
4673
4674 case 'x':
4675 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 4676 return ((TREE_VALUE (exp) == 0
e5e809f4 4677 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 4678 && (TREE_CHAIN (exp) == 0
e5e809f4 4679 || safe_from_p (x, TREE_CHAIN (exp), 0)));
bbf6f052
RK
4680 else
4681 return 0;
4682
4683 case '1':
e5e809f4 4684 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
4685
4686 case '2':
4687 case '<':
e5e809f4
JL
4688 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4689 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
4690
4691 case 'e':
4692 case 'r':
4693 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4694 the expression. If it is set, we conflict iff we are that rtx or
4695 both are in memory. Otherwise, we check all operands of the
4696 expression recursively. */
4697
4698 switch (TREE_CODE (exp))
4699 {
4700 case ADDR_EXPR:
e44842fe 4701 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
4702 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4703 || TREE_STATIC (exp));
bbf6f052
RK
4704
4705 case INDIRECT_REF:
4706 if (GET_CODE (x) == MEM)
4707 return 0;
4708 break;
4709
4710 case CALL_EXPR:
4711 exp_rtl = CALL_EXPR_RTL (exp);
4712 if (exp_rtl == 0)
4713 {
4714 /* Assume that the call will clobber all hard registers and
4715 all of memory. */
4716 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4717 || GET_CODE (x) == MEM)
4718 return 0;
4719 }
4720
4721 break;
4722
4723 case RTL_EXPR:
3bb5826a
RK
4724 /* If a sequence exists, we would have to scan every instruction
4725 in the sequence to see if it was safe. This is probably not
4726 worthwhile. */
4727 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4728 return 0;
4729
3bb5826a 4730 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4731 break;
4732
4733 case WITH_CLEANUP_EXPR:
4734 exp_rtl = RTL_EXPR_RTL (exp);
4735 break;
4736
5dab5552 4737 case CLEANUP_POINT_EXPR:
e5e809f4 4738 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 4739
bbf6f052
RK
4740 case SAVE_EXPR:
4741 exp_rtl = SAVE_EXPR_RTL (exp);
4742 break;
4743
8129842c
RS
4744 case BIND_EXPR:
4745 /* The only operand we look at is operand 1. The rest aren't
4746 part of the expression. */
e5e809f4 4747 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 4748
bbf6f052 4749 case METHOD_CALL_EXPR:
0f41302f 4750 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 4751 abort ();
e9a25f70
JL
4752
4753 default:
4754 break;
bbf6f052
RK
4755 }
4756
4757 /* If we have an rtx, we do not need to scan our operands. */
4758 if (exp_rtl)
4759 break;
4760
4761 nops = tree_code_length[(int) TREE_CODE (exp)];
4762 for (i = 0; i < nops; i++)
4763 if (TREE_OPERAND (exp, i) != 0
e5e809f4 4764 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
4765 return 0;
4766 }
4767
4768 /* If we have an rtl, find any enclosed object. Then see if we conflict
4769 with it. */
4770 if (exp_rtl)
4771 {
4772 if (GET_CODE (exp_rtl) == SUBREG)
4773 {
4774 exp_rtl = SUBREG_REG (exp_rtl);
4775 if (GET_CODE (exp_rtl) == REG
4776 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4777 return 0;
4778 }
4779
4780 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4781 are memory and EXP is not readonly. */
4782 return ! (rtx_equal_p (x, exp_rtl)
4783 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4784 && ! TREE_READONLY (exp)));
4785 }
4786
4787 /* If we reach here, it is safe. */
4788 return 1;
4789}
4790
4791/* Subroutine of expand_expr: return nonzero iff EXP is an
4792 expression whose type is statically determinable. */
4793
4794static int
4795fixed_type_p (exp)
4796 tree exp;
4797{
4798 if (TREE_CODE (exp) == PARM_DECL
4799 || TREE_CODE (exp) == VAR_DECL
4800 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4801 || TREE_CODE (exp) == COMPONENT_REF
4802 || TREE_CODE (exp) == ARRAY_REF)
4803 return 1;
4804 return 0;
4805}
01c8a7c8
RK
4806
4807/* Subroutine of expand_expr: return rtx if EXP is a
4808 variable or parameter; else return 0. */
4809
4810static rtx
4811var_rtx (exp)
4812 tree exp;
4813{
4814 STRIP_NOPS (exp);
4815 switch (TREE_CODE (exp))
4816 {
4817 case PARM_DECL:
4818 case VAR_DECL:
4819 return DECL_RTL (exp);
4820 default:
4821 return 0;
4822 }
4823}
bbf6f052
RK
4824\f
4825/* expand_expr: generate code for computing expression EXP.
4826 An rtx for the computed value is returned. The value is never null.
4827 In the case of a void EXP, const0_rtx is returned.
4828
4829 The value may be stored in TARGET if TARGET is nonzero.
4830 TARGET is just a suggestion; callers must assume that
4831 the rtx returned may not be the same as TARGET.
4832
4833 If TARGET is CONST0_RTX, it means that the value will be ignored.
4834
4835 If TMODE is not VOIDmode, it suggests generating the
4836 result in mode TMODE. But this is done only when convenient.
4837 Otherwise, TMODE is ignored and the value generated in its natural mode.
4838 TMODE is just a suggestion; callers must assume that
4839 the rtx returned may not have mode TMODE.
4840
d6a5ac33
RK
4841 Note that TARGET may have neither TMODE nor MODE. In that case, it
4842 probably will not be used.
bbf6f052
RK
4843
4844 If MODIFIER is EXPAND_SUM then when EXP is an addition
4845 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4846 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4847 products as above, or REG or MEM, or constant.
4848 Ordinarily in such cases we would output mul or add instructions
4849 and then return a pseudo reg containing the sum.
4850
4851 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4852 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4853 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4854 This is used for outputting expressions used in initializers.
4855
4856 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4857 with a constant address even if that address is not normally legitimate.
4858 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4859
4860rtx
4861expand_expr (exp, target, tmode, modifier)
4862 register tree exp;
4863 rtx target;
4864 enum machine_mode tmode;
4865 enum expand_modifier modifier;
4866{
b50d17a1
RK
4867 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4868 This is static so it will be accessible to our recursive callees. */
4869 static tree placeholder_list = 0;
bbf6f052
RK
4870 register rtx op0, op1, temp;
4871 tree type = TREE_TYPE (exp);
4872 int unsignedp = TREE_UNSIGNED (type);
4873 register enum machine_mode mode = TYPE_MODE (type);
4874 register enum tree_code code = TREE_CODE (exp);
4875 optab this_optab;
4876 /* Use subtarget as the target for operand 0 of a binary operation. */
4877 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4878 rtx original_target = target;
dd27116b
RK
4879 int ignore = (target == const0_rtx
4880 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4881 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4882 || code == COND_EXPR)
dd27116b 4883 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 4884 tree context;
921b3427
RK
4885 /* Used by check-memory-usage to make modifier read only. */
4886 enum expand_modifier ro_modifier;
bbf6f052 4887
921b3427
RK
4888 /* Make a read-only version of the modifier. */
4889 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4890 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4891 ro_modifier = modifier;
4892 else
4893 ro_modifier = EXPAND_NORMAL;
ca695ac9 4894
bbf6f052
RK
4895 /* Don't use hard regs as subtargets, because the combiner
4896 can only handle pseudo regs. */
4897 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4898 subtarget = 0;
4899 /* Avoid subtargets inside loops,
4900 since they hide some invariant expressions. */
4901 if (preserve_subexpressions_p ())
4902 subtarget = 0;
4903
dd27116b
RK
4904 /* If we are going to ignore this result, we need only do something
4905 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4906 is, short-circuit the most common cases here. Note that we must
4907 not call expand_expr with anything but const0_rtx in case this
4908 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4909
dd27116b
RK
4910 if (ignore)
4911 {
4912 if (! TREE_SIDE_EFFECTS (exp))
4913 return const0_rtx;
4914
4915 /* Ensure we reference a volatile object even if value is ignored. */
4916 if (TREE_THIS_VOLATILE (exp)
4917 && TREE_CODE (exp) != FUNCTION_DECL
4918 && mode != VOIDmode && mode != BLKmode)
4919 {
921b3427 4920 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
4921 if (GET_CODE (temp) == MEM)
4922 temp = copy_to_reg (temp);
4923 return const0_rtx;
4924 }
4925
4926 if (TREE_CODE_CLASS (code) == '1')
4927 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4928 VOIDmode, ro_modifier);
dd27116b
RK
4929 else if (TREE_CODE_CLASS (code) == '2'
4930 || TREE_CODE_CLASS (code) == '<')
4931 {
921b3427
RK
4932 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4933 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
4934 return const0_rtx;
4935 }
4936 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4937 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4938 /* If the second operand has no side effects, just evaluate
0f41302f 4939 the first. */
dd27116b 4940 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4941 VOIDmode, ro_modifier);
dd27116b 4942
90764a87 4943 target = 0;
dd27116b 4944 }
bbf6f052 4945
e44842fe
RK
4946 /* If will do cse, generate all results into pseudo registers
4947 since 1) that allows cse to find more things
4948 and 2) otherwise cse could produce an insn the machine
4949 cannot support. */
4950
bbf6f052
RK
4951 if (! cse_not_expected && mode != BLKmode && target
4952 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4953 target = subtarget;
4954
bbf6f052
RK
4955 switch (code)
4956 {
4957 case LABEL_DECL:
b552441b
RS
4958 {
4959 tree function = decl_function_context (exp);
4960 /* Handle using a label in a containing function. */
d0977240
RK
4961 if (function != current_function_decl
4962 && function != inline_function_decl && function != 0)
b552441b
RS
4963 {
4964 struct function *p = find_function_data (function);
4965 /* Allocate in the memory associated with the function
4966 that the label is in. */
4967 push_obstacks (p->function_obstack,
4968 p->function_maybepermanent_obstack);
4969
38a448ca
RH
4970 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4971 label_rtx (exp),
4972 p->forced_labels);
b552441b
RS
4973 pop_obstacks ();
4974 }
4975 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
4976 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4977 label_rtx (exp), forced_labels);
4978 temp = gen_rtx_MEM (FUNCTION_MODE,
4979 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
4980 if (function != current_function_decl
4981 && function != inline_function_decl && function != 0)
26fcb35a
RS
4982 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4983 return temp;
b552441b 4984 }
bbf6f052
RK
4985
4986 case PARM_DECL:
4987 if (DECL_RTL (exp) == 0)
4988 {
4989 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4990 return CONST0_RTX (mode);
bbf6f052
RK
4991 }
4992
0f41302f 4993 /* ... fall through ... */
d6a5ac33 4994
bbf6f052 4995 case VAR_DECL:
2dca20cd
RS
4996 /* If a static var's type was incomplete when the decl was written,
4997 but the type is complete now, lay out the decl now. */
4998 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4999 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5000 {
5001 push_obstacks_nochange ();
5002 end_temporary_allocation ();
5003 layout_decl (exp, 0);
5004 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5005 pop_obstacks ();
5006 }
d6a5ac33 5007
921b3427
RK
5008 /* Only check automatic variables. Currently, function arguments are
5009 not checked (this can be done at compile-time with prototypes).
5010 Aggregates are not checked. */
5011 if (flag_check_memory_usage && code == VAR_DECL
5012 && GET_CODE (DECL_RTL (exp)) == MEM
5013 && DECL_CONTEXT (exp) != NULL_TREE
5014 && ! TREE_STATIC (exp)
5015 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5016 {
5017 enum memory_use_mode memory_usage;
5018 memory_usage = get_memory_usage_from_modifier (modifier);
5019
5020 if (memory_usage != MEMORY_USE_DONT)
5021 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5022 XEXP (DECL_RTL (exp), 0), ptr_mode,
5023 GEN_INT (int_size_in_bytes (type)),
5024 TYPE_MODE (sizetype),
956d6950
JL
5025 GEN_INT (memory_usage),
5026 TYPE_MODE (integer_type_node));
921b3427
RK
5027 }
5028
0f41302f 5029 /* ... fall through ... */
d6a5ac33 5030
2dca20cd 5031 case FUNCTION_DECL:
bbf6f052
RK
5032 case RESULT_DECL:
5033 if (DECL_RTL (exp) == 0)
5034 abort ();
d6a5ac33 5035
e44842fe
RK
5036 /* Ensure variable marked as used even if it doesn't go through
5037 a parser. If it hasn't be used yet, write out an external
5038 definition. */
5039 if (! TREE_USED (exp))
5040 {
5041 assemble_external (exp);
5042 TREE_USED (exp) = 1;
5043 }
5044
dc6d66b3
RK
5045 /* Show we haven't gotten RTL for this yet. */
5046 temp = 0;
5047
bbf6f052
RK
5048 /* Handle variables inherited from containing functions. */
5049 context = decl_function_context (exp);
5050
5051 /* We treat inline_function_decl as an alias for the current function
5052 because that is the inline function whose vars, types, etc.
5053 are being merged into the current function.
5054 See expand_inline_function. */
d6a5ac33 5055
bbf6f052
RK
5056 if (context != 0 && context != current_function_decl
5057 && context != inline_function_decl
5058 /* If var is static, we don't need a static chain to access it. */
5059 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5060 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5061 {
5062 rtx addr;
5063
5064 /* Mark as non-local and addressable. */
81feeecb 5065 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5066 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5067 abort ();
bbf6f052
RK
5068 mark_addressable (exp);
5069 if (GET_CODE (DECL_RTL (exp)) != MEM)
5070 abort ();
5071 addr = XEXP (DECL_RTL (exp), 0);
5072 if (GET_CODE (addr) == MEM)
38a448ca
RH
5073 addr = gen_rtx_MEM (Pmode,
5074 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5075 else
5076 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5077 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5078 }
4af3895e 5079
bbf6f052
RK
5080 /* This is the case of an array whose size is to be determined
5081 from its initializer, while the initializer is still being parsed.
5082 See expand_decl. */
d6a5ac33 5083
dc6d66b3
RK
5084 else if (GET_CODE (DECL_RTL (exp)) == MEM
5085 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5086 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5087 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5088
5089 /* If DECL_RTL is memory, we are in the normal case and either
5090 the address is not valid or it is not a register and -fforce-addr
5091 is specified, get the address into a register. */
5092
dc6d66b3
RK
5093 else if (GET_CODE (DECL_RTL (exp)) == MEM
5094 && modifier != EXPAND_CONST_ADDRESS
5095 && modifier != EXPAND_SUM
5096 && modifier != EXPAND_INITIALIZER
5097 && (! memory_address_p (DECL_MODE (exp),
5098 XEXP (DECL_RTL (exp), 0))
5099 || (flag_force_addr
5100 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5101 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5102 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5103
dc6d66b3
RK
5104 /* If we got something, return it. But first, set the alignment
5105 the address is a register. */
5106 if (temp != 0)
5107 {
5108 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5109 mark_reg_pointer (XEXP (temp, 0),
5110 DECL_ALIGN (exp) / BITS_PER_UNIT);
5111
5112 return temp;
5113 }
5114
1499e0a8
RK
5115 /* If the mode of DECL_RTL does not match that of the decl, it
5116 must be a promoted value. We return a SUBREG of the wanted mode,
5117 but mark it so that we know that it was already extended. */
5118
5119 if (GET_CODE (DECL_RTL (exp)) == REG
5120 && GET_MODE (DECL_RTL (exp)) != mode)
5121 {
1499e0a8
RK
5122 /* Get the signedness used for this variable. Ensure we get the
5123 same mode we got when the variable was declared. */
78911e8b
RK
5124 if (GET_MODE (DECL_RTL (exp))
5125 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5126 abort ();
5127
38a448ca 5128 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5129 SUBREG_PROMOTED_VAR_P (temp) = 1;
5130 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5131 return temp;
5132 }
5133
bbf6f052
RK
5134 return DECL_RTL (exp);
5135
5136 case INTEGER_CST:
5137 return immed_double_const (TREE_INT_CST_LOW (exp),
5138 TREE_INT_CST_HIGH (exp),
5139 mode);
5140
5141 case CONST_DECL:
921b3427
RK
5142 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5143 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5144
5145 case REAL_CST:
5146 /* If optimized, generate immediate CONST_DOUBLE
5147 which will be turned into memory by reload if necessary.
5148
5149 We used to force a register so that loop.c could see it. But
5150 this does not allow gen_* patterns to perform optimizations with
5151 the constants. It also produces two insns in cases like "x = 1.0;".
5152 On most machines, floating-point constants are not permitted in
5153 many insns, so we'd end up copying it to a register in any case.
5154
5155 Now, we do the copying in expand_binop, if appropriate. */
5156 return immed_real_const (exp);
5157
5158 case COMPLEX_CST:
5159 case STRING_CST:
5160 if (! TREE_CST_RTL (exp))
5161 output_constant_def (exp);
5162
5163 /* TREE_CST_RTL probably contains a constant address.
5164 On RISC machines where a constant address isn't valid,
5165 make some insns to get that address into a register. */
5166 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5167 && modifier != EXPAND_CONST_ADDRESS
5168 && modifier != EXPAND_INITIALIZER
5169 && modifier != EXPAND_SUM
d6a5ac33
RK
5170 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5171 || (flag_force_addr
5172 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5173 return change_address (TREE_CST_RTL (exp), VOIDmode,
5174 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5175 return TREE_CST_RTL (exp);
5176
bf1e5319
APB
5177 case EXPR_WITH_FILE_LOCATION:
5178 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5179 emit_line_note (EXPR_WFL_FILENAME (exp), EXPR_WFL_LINENO (exp));
5180 return expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5181
bbf6f052
RK
5182 case SAVE_EXPR:
5183 context = decl_function_context (exp);
d6a5ac33 5184
d0977240
RK
5185 /* If this SAVE_EXPR was at global context, assume we are an
5186 initialization function and move it into our context. */
5187 if (context == 0)
5188 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5189
bbf6f052
RK
5190 /* We treat inline_function_decl as an alias for the current function
5191 because that is the inline function whose vars, types, etc.
5192 are being merged into the current function.
5193 See expand_inline_function. */
5194 if (context == current_function_decl || context == inline_function_decl)
5195 context = 0;
5196
5197 /* If this is non-local, handle it. */
5198 if (context)
5199 {
d0977240
RK
5200 /* The following call just exists to abort if the context is
5201 not of a containing function. */
5202 find_function_data (context);
5203
bbf6f052
RK
5204 temp = SAVE_EXPR_RTL (exp);
5205 if (temp && GET_CODE (temp) == REG)
5206 {
5207 put_var_into_stack (exp);
5208 temp = SAVE_EXPR_RTL (exp);
5209 }
5210 if (temp == 0 || GET_CODE (temp) != MEM)
5211 abort ();
5212 return change_address (temp, mode,
5213 fix_lexical_addr (XEXP (temp, 0), exp));
5214 }
5215 if (SAVE_EXPR_RTL (exp) == 0)
5216 {
06089a8b
RK
5217 if (mode == VOIDmode)
5218 temp = const0_rtx;
5219 else
e5e809f4 5220 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5221
bbf6f052 5222 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5223 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5224 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5225 save_expr_regs);
ff78f773
RK
5226
5227 /* If the mode of TEMP does not match that of the expression, it
5228 must be a promoted value. We pass store_expr a SUBREG of the
5229 wanted mode but mark it so that we know that it was already
5230 extended. Note that `unsignedp' was modified above in
5231 this case. */
5232
5233 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5234 {
38a448ca 5235 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5236 SUBREG_PROMOTED_VAR_P (temp) = 1;
5237 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5238 }
5239
4c7a0be9 5240 if (temp == const0_rtx)
921b3427
RK
5241 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5242 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5243 else
5244 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5245
5246 TREE_USED (exp) = 1;
bbf6f052 5247 }
1499e0a8
RK
5248
5249 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5250 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5251 but mark it so that we know that it was already extended. */
1499e0a8
RK
5252
5253 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5254 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5255 {
e70d22c8
RK
5256 /* Compute the signedness and make the proper SUBREG. */
5257 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5258 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5259 SUBREG_PROMOTED_VAR_P (temp) = 1;
5260 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5261 return temp;
5262 }
5263
bbf6f052
RK
5264 return SAVE_EXPR_RTL (exp);
5265
679163cf
MS
5266 case UNSAVE_EXPR:
5267 {
5268 rtx temp;
5269 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5270 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5271 return temp;
5272 }
5273
b50d17a1 5274 case PLACEHOLDER_EXPR:
e9a25f70
JL
5275 {
5276 tree placeholder_expr;
5277
5278 /* If there is an object on the head of the placeholder list,
e5e809f4 5279 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5280 further information, see tree.def. */
5281 for (placeholder_expr = placeholder_list;
5282 placeholder_expr != 0;
5283 placeholder_expr = TREE_CHAIN (placeholder_expr))
5284 {
5285 tree need_type = TYPE_MAIN_VARIANT (type);
5286 tree object = 0;
5287 tree old_list = placeholder_list;
5288 tree elt;
5289
e5e809f4
JL
5290 /* Find the outermost reference that is of the type we want.
5291 If none, see if any object has a type that is a pointer to
5292 the type we want. */
5293 for (elt = TREE_PURPOSE (placeholder_expr);
5294 elt != 0 && object == 0;
5295 elt
5296 = ((TREE_CODE (elt) == COMPOUND_EXPR
5297 || TREE_CODE (elt) == COND_EXPR)
5298 ? TREE_OPERAND (elt, 1)
5299 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5300 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5301 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5302 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5303 ? TREE_OPERAND (elt, 0) : 0))
5304 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5305 object = elt;
e9a25f70 5306
e9a25f70 5307 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5308 elt != 0 && object == 0;
5309 elt
5310 = ((TREE_CODE (elt) == COMPOUND_EXPR
5311 || TREE_CODE (elt) == COND_EXPR)
5312 ? TREE_OPERAND (elt, 1)
5313 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5314 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5315 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5316 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5317 ? TREE_OPERAND (elt, 0) : 0))
5318 if (POINTER_TYPE_P (TREE_TYPE (elt))
5319 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5320 == need_type))
e5e809f4 5321 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5322
e9a25f70 5323 if (object != 0)
2cde2255 5324 {
e9a25f70
JL
5325 /* Expand this object skipping the list entries before
5326 it was found in case it is also a PLACEHOLDER_EXPR.
5327 In that case, we want to translate it using subsequent
5328 entries. */
5329 placeholder_list = TREE_CHAIN (placeholder_expr);
5330 temp = expand_expr (object, original_target, tmode,
5331 ro_modifier);
5332 placeholder_list = old_list;
5333 return temp;
2cde2255 5334 }
e9a25f70
JL
5335 }
5336 }
b50d17a1
RK
5337
5338 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5339 abort ();
5340
5341 case WITH_RECORD_EXPR:
5342 /* Put the object on the placeholder list, expand our first operand,
5343 and pop the list. */
5344 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5345 placeholder_list);
5346 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5347 tmode, ro_modifier);
b50d17a1
RK
5348 placeholder_list = TREE_CHAIN (placeholder_list);
5349 return target;
5350
bbf6f052 5351 case EXIT_EXPR:
e44842fe
RK
5352 expand_exit_loop_if_false (NULL_PTR,
5353 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5354 return const0_rtx;
5355
5356 case LOOP_EXPR:
0088fcb1 5357 push_temp_slots ();
bbf6f052
RK
5358 expand_start_loop (1);
5359 expand_expr_stmt (TREE_OPERAND (exp, 0));
5360 expand_end_loop ();
0088fcb1 5361 pop_temp_slots ();
bbf6f052
RK
5362
5363 return const0_rtx;
5364
5365 case BIND_EXPR:
5366 {
5367 tree vars = TREE_OPERAND (exp, 0);
5368 int vars_need_expansion = 0;
5369
5370 /* Need to open a binding contour here because
e976b8b2 5371 if there are any cleanups they must be contained here. */
bbf6f052
RK
5372 expand_start_bindings (0);
5373
2df53c0b
RS
5374 /* Mark the corresponding BLOCK for output in its proper place. */
5375 if (TREE_OPERAND (exp, 2) != 0
5376 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5377 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5378
5379 /* If VARS have not yet been expanded, expand them now. */
5380 while (vars)
5381 {
5382 if (DECL_RTL (vars) == 0)
5383 {
5384 vars_need_expansion = 1;
5385 expand_decl (vars);
5386 }
5387 expand_decl_init (vars);
5388 vars = TREE_CHAIN (vars);
5389 }
5390
921b3427 5391 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5392
5393 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5394
5395 return temp;
5396 }
5397
5398 case RTL_EXPR:
83b853c9
JM
5399 if (RTL_EXPR_SEQUENCE (exp))
5400 {
5401 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5402 abort ();
5403 emit_insns (RTL_EXPR_SEQUENCE (exp));
5404 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5405 }
99310285 5406 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5407 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5408 return RTL_EXPR_RTL (exp);
5409
5410 case CONSTRUCTOR:
dd27116b
RK
5411 /* If we don't need the result, just ensure we evaluate any
5412 subexpressions. */
5413 if (ignore)
5414 {
5415 tree elt;
5416 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
5417 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5418 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
5419 return const0_rtx;
5420 }
3207b172 5421
4af3895e
JVA
5422 /* All elts simple constants => refer to a constant in memory. But
5423 if this is a non-BLKmode mode, let it store a field at a time
5424 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5425 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5426 store directly into the target unless the type is large enough
5427 that memcpy will be used. If we are making an initializer and
3207b172 5428 all operands are constant, put it in memory as well. */
dd27116b 5429 else if ((TREE_STATIC (exp)
3207b172 5430 && ((mode == BLKmode
e5e809f4 5431 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
5432 || TREE_ADDRESSABLE (exp)
5433 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5434 && (move_by_pieces_ninsns
67225c15
RK
5435 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5436 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5437 > MOVE_RATIO)
5438 && ! mostly_zeros_p (exp))))
dd27116b 5439 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5440 {
5441 rtx constructor = output_constant_def (exp);
b552441b
RS
5442 if (modifier != EXPAND_CONST_ADDRESS
5443 && modifier != EXPAND_INITIALIZER
5444 && modifier != EXPAND_SUM
d6a5ac33
RK
5445 && (! memory_address_p (GET_MODE (constructor),
5446 XEXP (constructor, 0))
5447 || (flag_force_addr
5448 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5449 constructor = change_address (constructor, VOIDmode,
5450 XEXP (constructor, 0));
5451 return constructor;
5452 }
5453
bbf6f052
RK
5454 else
5455 {
e9ac02a6
JW
5456 /* Handle calls that pass values in multiple non-contiguous
5457 locations. The Irix 6 ABI has examples of this. */
e5e809f4 5458 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 5459 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5460 {
5461 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5462 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5463 else
5464 target = assign_temp (type, 0, 1, 1);
5465 }
07604beb
RK
5466
5467 if (TREE_READONLY (exp))
5468 {
9151b3bf 5469 if (GET_CODE (target) == MEM)
effbcc6a
RK
5470 target = copy_rtx (target);
5471
07604beb
RK
5472 RTX_UNCHANGING_P (target) = 1;
5473 }
5474
e1a43f73 5475 store_constructor (exp, target, 0);
bbf6f052
RK
5476 return target;
5477 }
5478
5479 case INDIRECT_REF:
5480 {
5481 tree exp1 = TREE_OPERAND (exp, 0);
5482 tree exp2;
7581a30f
JW
5483 tree index;
5484 tree string = string_constant (exp1, &index);
5485 int i;
5486
5487 if (string
5488 && TREE_CODE (string) == STRING_CST
5489 && TREE_CODE (index) == INTEGER_CST
5490 && !TREE_INT_CST_HIGH (index)
5491 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5492 && GET_MODE_CLASS (mode) == MODE_INT
5493 && GET_MODE_SIZE (mode) == 1)
5494 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 5495
405f0da6
JW
5496 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5497 op0 = memory_address (mode, op0);
8c8a8e34 5498
921b3427
RK
5499 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5500 {
5501 enum memory_use_mode memory_usage;
5502 memory_usage = get_memory_usage_from_modifier (modifier);
5503
5504 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
5505 {
5506 in_check_memory_usage = 1;
5507 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5508 op0, ptr_mode,
5509 GEN_INT (int_size_in_bytes (type)),
5510 TYPE_MODE (sizetype),
5511 GEN_INT (memory_usage),
5512 TYPE_MODE (integer_type_node));
5513 in_check_memory_usage = 0;
5514 }
921b3427
RK
5515 }
5516
38a448ca 5517 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
5518 /* If address was computed by addition,
5519 mark this as an element of an aggregate. */
5520 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5521 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5522 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5523 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5524 || (TREE_CODE (exp1) == ADDR_EXPR
5525 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5526 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5527 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5528 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5529
5530 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5531 here, because, in C and C++, the fact that a location is accessed
5532 through a pointer to const does not mean that the value there can
5533 never change. Languages where it can never change should
5534 also set TREE_STATIC. */
5cb7a25a 5535 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5536 return temp;
5537 }
bbf6f052
RK
5538
5539 case ARRAY_REF:
742920c7
RK
5540 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5541 abort ();
bbf6f052 5542
bbf6f052 5543 {
742920c7
RK
5544 tree array = TREE_OPERAND (exp, 0);
5545 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5546 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5547 tree index = TREE_OPERAND (exp, 1);
5548 tree index_type = TREE_TYPE (index);
08293add 5549 HOST_WIDE_INT i;
b50d17a1 5550
d4c89139
PB
5551 /* Optimize the special-case of a zero lower bound.
5552
5553 We convert the low_bound to sizetype to avoid some problems
5554 with constant folding. (E.g. suppose the lower bound is 1,
5555 and its mode is QI. Without the conversion, (ARRAY
5556 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5557 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5558
5559 But sizetype isn't quite right either (especially if
5560 the lowbound is negative). FIXME */
5561
742920c7 5562 if (! integer_zerop (low_bound))
d4c89139
PB
5563 index = fold (build (MINUS_EXPR, index_type, index,
5564 convert (sizetype, low_bound)));
742920c7 5565
742920c7 5566 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5567 This is not done in fold so it won't happen inside &.
5568 Don't fold if this is for wide characters since it's too
5569 difficult to do correctly and this is a very rare case. */
742920c7
RK
5570
5571 if (TREE_CODE (array) == STRING_CST
5572 && TREE_CODE (index) == INTEGER_CST
5573 && !TREE_INT_CST_HIGH (index)
307b821c 5574 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5575 && GET_MODE_CLASS (mode) == MODE_INT
5576 && GET_MODE_SIZE (mode) == 1)
307b821c 5577 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5578
742920c7
RK
5579 /* If this is a constant index into a constant array,
5580 just get the value from the array. Handle both the cases when
5581 we have an explicit constructor and when our operand is a variable
5582 that was declared const. */
4af3895e 5583
742920c7
RK
5584 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5585 {
5586 if (TREE_CODE (index) == INTEGER_CST
5587 && TREE_INT_CST_HIGH (index) == 0)
5588 {
5589 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5590
5591 i = TREE_INT_CST_LOW (index);
5592 while (elem && i--)
5593 elem = TREE_CHAIN (elem);
5594 if (elem)
5595 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5596 tmode, ro_modifier);
742920c7
RK
5597 }
5598 }
4af3895e 5599
742920c7
RK
5600 else if (optimize >= 1
5601 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5602 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5603 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5604 {
08293add 5605 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
5606 {
5607 tree init = DECL_INITIAL (array);
5608
5609 i = TREE_INT_CST_LOW (index);
5610 if (TREE_CODE (init) == CONSTRUCTOR)
5611 {
5612 tree elem = CONSTRUCTOR_ELTS (init);
5613
03dc44a6
RS
5614 while (elem
5615 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5616 elem = TREE_CHAIN (elem);
5617 if (elem)
5618 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5619 tmode, ro_modifier);
742920c7
RK
5620 }
5621 else if (TREE_CODE (init) == STRING_CST
08293add
RK
5622 && TREE_INT_CST_HIGH (index) == 0
5623 && (TREE_INT_CST_LOW (index)
5624 < TREE_STRING_LENGTH (init)))
5625 return (GEN_INT
5626 (TREE_STRING_POINTER
5627 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
5628 }
5629 }
5630 }
8c8a8e34 5631
08293add 5632 /* ... fall through ... */
bbf6f052
RK
5633
5634 case COMPONENT_REF:
5635 case BIT_FIELD_REF:
4af3895e 5636 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5637 appropriate field if it is present. Don't do this if we have
5638 already written the data since we want to refer to that copy
5639 and varasm.c assumes that's what we'll do. */
4af3895e 5640 if (code != ARRAY_REF
7a0b7b9a
RK
5641 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5642 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5643 {
5644 tree elt;
5645
5646 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5647 elt = TREE_CHAIN (elt))
86b5812c
RK
5648 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5649 /* We can normally use the value of the field in the
5650 CONSTRUCTOR. However, if this is a bitfield in
5651 an integral mode that we can fit in a HOST_WIDE_INT,
5652 we must mask only the number of bits in the bitfield,
5653 since this is done implicitly by the constructor. If
5654 the bitfield does not meet either of those conditions,
5655 we can't do this optimization. */
5656 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5657 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5658 == MODE_INT)
5659 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5660 <= HOST_BITS_PER_WIDE_INT))))
5661 {
5662 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5663 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5664 {
5665 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
5666
5667 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5668 {
5669 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5670 op0 = expand_and (op0, op1, target);
5671 }
5672 else
5673 {
e5e809f4
JL
5674 enum machine_mode imode
5675 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 5676 tree count
e5e809f4
JL
5677 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5678 0);
86b5812c
RK
5679
5680 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5681 target, 0);
5682 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5683 target, 0);
5684 }
5685 }
5686
5687 return op0;
5688 }
4af3895e
JVA
5689 }
5690
bbf6f052
RK
5691 {
5692 enum machine_mode mode1;
5693 int bitsize;
5694 int bitpos;
7bb0943f 5695 tree offset;
bbf6f052 5696 int volatilep = 0;
034f9101 5697 int alignment;
839c4796
RK
5698 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5699 &mode1, &unsignedp, &volatilep,
5700 &alignment);
bbf6f052 5701
e7f3c83f
RK
5702 /* If we got back the original object, something is wrong. Perhaps
5703 we are evaluating an expression too early. In any event, don't
5704 infinitely recurse. */
5705 if (tem == exp)
5706 abort ();
5707
3d27140a 5708 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5709 computation, since it will need a temporary and TARGET is known
5710 to have to do. This occurs in unchecked conversion in Ada. */
5711
5712 op0 = expand_expr (tem,
5713 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5714 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5715 != INTEGER_CST)
5716 ? target : NULL_RTX),
4ed67205 5717 VOIDmode,
e5e809f4
JL
5718 modifier == EXPAND_INITIALIZER
5719 ? modifier : EXPAND_NORMAL);
bbf6f052 5720
8c8a8e34 5721 /* If this is a constant, put it into a register if it is a
8008b228 5722 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5723 if (CONSTANT_P (op0))
5724 {
5725 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5726 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5727 op0 = force_reg (mode, op0);
5728 else
5729 op0 = validize_mem (force_const_mem (mode, op0));
5730 }
5731
7bb0943f
RS
5732 if (offset != 0)
5733 {
906c4e36 5734 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5735
5736 if (GET_CODE (op0) != MEM)
5737 abort ();
2d48c13d
JL
5738
5739 if (GET_MODE (offset_rtx) != ptr_mode)
5740#ifdef POINTERS_EXTEND_UNSIGNED
5741 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5742#else
5743 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5744#endif
5745
7bb0943f 5746 op0 = change_address (op0, VOIDmode,
38a448ca
RH
5747 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5748 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
5749 }
5750
bbf6f052
RK
5751 /* Don't forget about volatility even if this is a bitfield. */
5752 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5753 {
5754 op0 = copy_rtx (op0);
5755 MEM_VOLATILE_P (op0) = 1;
5756 }
5757
921b3427
RK
5758 /* Check the access. */
5759 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5760 {
5761 enum memory_use_mode memory_usage;
5762 memory_usage = get_memory_usage_from_modifier (modifier);
5763
5764 if (memory_usage != MEMORY_USE_DONT)
5765 {
5766 rtx to;
5767 int size;
5768
5769 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5770 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5771
5772 /* Check the access right of the pointer. */
e9a25f70
JL
5773 if (size > BITS_PER_UNIT)
5774 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5775 to, ptr_mode,
5776 GEN_INT (size / BITS_PER_UNIT),
5777 TYPE_MODE (sizetype),
956d6950
JL
5778 GEN_INT (memory_usage),
5779 TYPE_MODE (integer_type_node));
921b3427
RK
5780 }
5781 }
5782
ccc98036
RS
5783 /* In cases where an aligned union has an unaligned object
5784 as a field, we might be extracting a BLKmode value from
5785 an integer-mode (e.g., SImode) object. Handle this case
5786 by doing the extract into an object as wide as the field
5787 (which we know to be the width of a basic mode), then
f2420d0b
JW
5788 storing into memory, and changing the mode to BLKmode.
5789 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5790 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5791 if (mode1 == VOIDmode
ccc98036 5792 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5793 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 5794 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
5795 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5796 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5797 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
5798 /* If the field isn't aligned enough to fetch as a memref,
5799 fetch it as a bit field. */
5800 || (SLOW_UNALIGNED_ACCESS
5801 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5802 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5803 {
bbf6f052
RK
5804 enum machine_mode ext_mode = mode;
5805
5806 if (ext_mode == BLKmode)
5807 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5808
5809 if (ext_mode == BLKmode)
a281e72d
RK
5810 {
5811 /* In this case, BITPOS must start at a byte boundary and
5812 TARGET, if specified, must be a MEM. */
5813 if (GET_CODE (op0) != MEM
5814 || (target != 0 && GET_CODE (target) != MEM)
5815 || bitpos % BITS_PER_UNIT != 0)
5816 abort ();
5817
5818 op0 = change_address (op0, VOIDmode,
5819 plus_constant (XEXP (op0, 0),
5820 bitpos / BITS_PER_UNIT));
5821 if (target == 0)
5822 target = assign_temp (type, 0, 1, 1);
5823
5824 emit_block_move (target, op0,
5825 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5826 / BITS_PER_UNIT),
5827 1);
5828
5829 return target;
5830 }
bbf6f052 5831
dc6d66b3
RK
5832 op0 = validize_mem (op0);
5833
5834 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5835 mark_reg_pointer (XEXP (op0, 0), alignment);
5836
5837 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5838 unsignedp, target, ext_mode, ext_mode,
034f9101 5839 alignment,
bbf6f052 5840 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
5841
5842 /* If the result is a record type and BITSIZE is narrower than
5843 the mode of OP0, an integral mode, and this is a big endian
5844 machine, we must put the field into the high-order bits. */
5845 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5846 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5847 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5848 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5849 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5850 - bitsize),
5851 op0, 1);
5852
bbf6f052
RK
5853 if (mode == BLKmode)
5854 {
5855 rtx new = assign_stack_temp (ext_mode,
5856 bitsize / BITS_PER_UNIT, 0);
5857
5858 emit_move_insn (new, op0);
5859 op0 = copy_rtx (new);
5860 PUT_MODE (op0, BLKmode);
092dded9 5861 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5862 }
5863
5864 return op0;
5865 }
5866
05019f83
RK
5867 /* If the result is BLKmode, use that to access the object
5868 now as well. */
5869 if (mode == BLKmode)
5870 mode1 = BLKmode;
5871
bbf6f052
RK
5872 /* Get a reference to just this component. */
5873 if (modifier == EXPAND_CONST_ADDRESS
5874 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
5875 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5876 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
5877 else
5878 op0 = change_address (op0, mode1,
5879 plus_constant (XEXP (op0, 0),
5880 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5881 if (GET_CODE (XEXP (op0, 0)) == REG)
5882 mark_reg_pointer (XEXP (op0, 0), alignment);
5883
bbf6f052
RK
5884 MEM_IN_STRUCT_P (op0) = 1;
5885 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 5886 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 5887 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 5888 || modifier == EXPAND_INITIALIZER)
bbf6f052 5889 return op0;
0d15e60c 5890 else if (target == 0)
bbf6f052 5891 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 5892
bbf6f052
RK
5893 convert_move (target, op0, unsignedp);
5894 return target;
5895 }
5896
bbf6f052
RK
5897 /* Intended for a reference to a buffer of a file-object in Pascal.
5898 But it's not certain that a special tree code will really be
5899 necessary for these. INDIRECT_REF might work for them. */
5900 case BUFFER_REF:
5901 abort ();
5902
7308a047 5903 case IN_EXPR:
7308a047 5904 {
d6a5ac33
RK
5905 /* Pascal set IN expression.
5906
5907 Algorithm:
5908 rlo = set_low - (set_low%bits_per_word);
5909 the_word = set [ (index - rlo)/bits_per_word ];
5910 bit_index = index % bits_per_word;
5911 bitmask = 1 << bit_index;
5912 return !!(the_word & bitmask); */
5913
7308a047
RS
5914 tree set = TREE_OPERAND (exp, 0);
5915 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5916 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5917 tree set_type = TREE_TYPE (set);
7308a047
RS
5918 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5919 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5920 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5921 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5922 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5923 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5924 rtx setaddr = XEXP (setval, 0);
5925 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5926 rtx rlow;
5927 rtx diff, quo, rem, addr, bit, result;
7308a047 5928
d6a5ac33
RK
5929 preexpand_calls (exp);
5930
5931 /* If domain is empty, answer is no. Likewise if index is constant
5932 and out of bounds. */
51723711 5933 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 5934 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 5935 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
5936 || (TREE_CODE (index) == INTEGER_CST
5937 && TREE_CODE (set_low_bound) == INTEGER_CST
5938 && tree_int_cst_lt (index, set_low_bound))
5939 || (TREE_CODE (set_high_bound) == INTEGER_CST
5940 && TREE_CODE (index) == INTEGER_CST
5941 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5942 return const0_rtx;
5943
d6a5ac33
RK
5944 if (target == 0)
5945 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5946
5947 /* If we get here, we have to generate the code for both cases
5948 (in range and out of range). */
5949
5950 op0 = gen_label_rtx ();
5951 op1 = gen_label_rtx ();
5952
5953 if (! (GET_CODE (index_val) == CONST_INT
5954 && GET_CODE (lo_r) == CONST_INT))
5955 {
17938e57 5956 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5957 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5958 emit_jump_insn (gen_blt (op1));
5959 }
5960
5961 if (! (GET_CODE (index_val) == CONST_INT
5962 && GET_CODE (hi_r) == CONST_INT))
5963 {
17938e57 5964 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5965 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5966 emit_jump_insn (gen_bgt (op1));
5967 }
5968
5969 /* Calculate the element number of bit zero in the first word
5970 of the set. */
5971 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5972 rlow = GEN_INT (INTVAL (lo_r)
5973 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5974 else
17938e57
RK
5975 rlow = expand_binop (index_mode, and_optab, lo_r,
5976 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5977 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5978
d6a5ac33
RK
5979 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5980 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5981
5982 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5983 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5984 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5985 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5986
7308a047 5987 addr = memory_address (byte_mode,
d6a5ac33
RK
5988 expand_binop (index_mode, add_optab, diff,
5989 setaddr, NULL_RTX, iunsignedp,
17938e57 5990 OPTAB_LIB_WIDEN));
d6a5ac33 5991
7308a047
RS
5992 /* Extract the bit we want to examine */
5993 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 5994 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
5995 make_tree (TREE_TYPE (index), rem),
5996 NULL_RTX, 1);
5997 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5998 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5999 1, OPTAB_LIB_WIDEN);
17938e57
RK
6000
6001 if (result != target)
6002 convert_move (target, result, 1);
7308a047
RS
6003
6004 /* Output the code to handle the out-of-range case. */
6005 emit_jump (op0);
6006 emit_label (op1);
6007 emit_move_insn (target, const0_rtx);
6008 emit_label (op0);
6009 return target;
6010 }
6011
bbf6f052
RK
6012 case WITH_CLEANUP_EXPR:
6013 if (RTL_EXPR_RTL (exp) == 0)
6014 {
6015 RTL_EXPR_RTL (exp)
921b3427 6016 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6017 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6018
bbf6f052
RK
6019 /* That's it for this cleanup. */
6020 TREE_OPERAND (exp, 2) = 0;
6021 }
6022 return RTL_EXPR_RTL (exp);
6023
5dab5552
MS
6024 case CLEANUP_POINT_EXPR:
6025 {
d93d4205 6026 extern int temp_slot_level;
e976b8b2
MS
6027 /* Start a new binding layer that will keep track of all cleanup
6028 actions to be performed. */
6029 expand_start_bindings (0);
6030
d93d4205 6031 target_temp_slot_level = temp_slot_level;
e976b8b2 6032
921b3427 6033 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6034 /* If we're going to use this value, load it up now. */
6035 if (! ignore)
6036 op0 = force_not_mem (op0);
d93d4205 6037 preserve_temp_slots (op0);
e976b8b2 6038 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6039 }
6040 return op0;
6041
bbf6f052
RK
6042 case CALL_EXPR:
6043 /* Check for a built-in function. */
6044 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6045 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6046 == FUNCTION_DECL)
bbf6f052
RK
6047 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6048 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6049
bbf6f052
RK
6050 /* If this call was expanded already by preexpand_calls,
6051 just return the result we got. */
6052 if (CALL_EXPR_RTL (exp) != 0)
6053 return CALL_EXPR_RTL (exp);
d6a5ac33 6054
8129842c 6055 return expand_call (exp, target, ignore);
bbf6f052
RK
6056
6057 case NON_LVALUE_EXPR:
6058 case NOP_EXPR:
6059 case CONVERT_EXPR:
6060 case REFERENCE_EXPR:
bbf6f052
RK
6061 if (TREE_CODE (type) == UNION_TYPE)
6062 {
6063 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6064 if (target == 0)
06089a8b
RK
6065 {
6066 if (mode != BLKmode)
6067 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6068 else
6069 target = assign_temp (type, 0, 1, 1);
6070 }
d6a5ac33 6071
bbf6f052
RK
6072 if (GET_CODE (target) == MEM)
6073 /* Store data into beginning of memory target. */
6074 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6075 change_address (target, TYPE_MODE (valtype), 0), 0);
6076
bbf6f052
RK
6077 else if (GET_CODE (target) == REG)
6078 /* Store this field into a union of the proper type. */
6079 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6080 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6081 VOIDmode, 0, 1,
6082 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6083 else
6084 abort ();
6085
6086 /* Return the entire union. */
6087 return target;
6088 }
d6a5ac33 6089
7f62854a
RK
6090 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6091 {
6092 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6093 ro_modifier);
7f62854a
RK
6094
6095 /* If the signedness of the conversion differs and OP0 is
6096 a promoted SUBREG, clear that indication since we now
6097 have to do the proper extension. */
6098 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6099 && GET_CODE (op0) == SUBREG)
6100 SUBREG_PROMOTED_VAR_P (op0) = 0;
6101
6102 return op0;
6103 }
6104
1499e0a8 6105 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6106 if (GET_MODE (op0) == mode)
6107 return op0;
12342f90 6108
d6a5ac33
RK
6109 /* If OP0 is a constant, just convert it into the proper mode. */
6110 if (CONSTANT_P (op0))
6111 return
6112 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6113 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6114
26fcb35a 6115 if (modifier == EXPAND_INITIALIZER)
38a448ca 6116 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6117
bbf6f052 6118 if (target == 0)
d6a5ac33
RK
6119 return
6120 convert_to_mode (mode, op0,
6121 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6122 else
d6a5ac33
RK
6123 convert_move (target, op0,
6124 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6125 return target;
6126
6127 case PLUS_EXPR:
0f41302f
MS
6128 /* We come here from MINUS_EXPR when the second operand is a
6129 constant. */
bbf6f052
RK
6130 plus_expr:
6131 this_optab = add_optab;
6132
6133 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6134 something else, make sure we add the register to the constant and
6135 then to the other thing. This case can occur during strength
6136 reduction and doing it this way will produce better code if the
6137 frame pointer or argument pointer is eliminated.
6138
6139 fold-const.c will ensure that the constant is always in the inner
6140 PLUS_EXPR, so the only case we need to do anything about is if
6141 sp, ap, or fp is our second argument, in which case we must swap
6142 the innermost first argument and our second argument. */
6143
6144 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6145 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6146 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6147 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6148 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6149 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6150 {
6151 tree t = TREE_OPERAND (exp, 1);
6152
6153 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6154 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6155 }
6156
88f63c77 6157 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6158 something, we might be forming a constant. So try to use
6159 plus_constant. If it produces a sum and we can't accept it,
6160 use force_operand. This allows P = &ARR[const] to generate
6161 efficient code on machines where a SYMBOL_REF is not a valid
6162 address.
6163
6164 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6165 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6166 || mode == ptr_mode)
bbf6f052 6167 {
c980ac49
RS
6168 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6169 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6170 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6171 {
6172 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6173 EXPAND_SUM);
6174 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6175 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6176 op1 = force_operand (op1, target);
6177 return op1;
6178 }
bbf6f052 6179
c980ac49
RS
6180 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6181 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6182 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6183 {
6184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6185 EXPAND_SUM);
6186 if (! CONSTANT_P (op0))
6187 {
6188 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6189 VOIDmode, modifier);
709f5be1
RS
6190 /* Don't go to both_summands if modifier
6191 says it's not right to return a PLUS. */
6192 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6193 goto binop2;
c980ac49
RS
6194 goto both_summands;
6195 }
6196 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6197 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6198 op0 = force_operand (op0, target);
6199 return op0;
6200 }
bbf6f052
RK
6201 }
6202
6203 /* No sense saving up arithmetic to be done
6204 if it's all in the wrong mode to form part of an address.
6205 And force_operand won't know whether to sign-extend or
6206 zero-extend. */
6207 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6208 || mode != ptr_mode)
c980ac49 6209 goto binop;
bbf6f052
RK
6210
6211 preexpand_calls (exp);
e5e809f4 6212 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6213 subtarget = 0;
6214
921b3427
RK
6215 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6217
c980ac49 6218 both_summands:
bbf6f052
RK
6219 /* Make sure any term that's a sum with a constant comes last. */
6220 if (GET_CODE (op0) == PLUS
6221 && CONSTANT_P (XEXP (op0, 1)))
6222 {
6223 temp = op0;
6224 op0 = op1;
6225 op1 = temp;
6226 }
6227 /* If adding to a sum including a constant,
6228 associate it to put the constant outside. */
6229 if (GET_CODE (op1) == PLUS
6230 && CONSTANT_P (XEXP (op1, 1)))
6231 {
6232 rtx constant_term = const0_rtx;
6233
6234 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6235 if (temp != 0)
6236 op0 = temp;
6f90e075
JW
6237 /* Ensure that MULT comes first if there is one. */
6238 else if (GET_CODE (op0) == MULT)
38a448ca 6239 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6240 else
38a448ca 6241 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6242
6243 /* Let's also eliminate constants from op0 if possible. */
6244 op0 = eliminate_constant_term (op0, &constant_term);
6245
6246 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6247 their sum should be a constant. Form it into OP1, since the
6248 result we want will then be OP0 + OP1. */
6249
6250 temp = simplify_binary_operation (PLUS, mode, constant_term,
6251 XEXP (op1, 1));
6252 if (temp != 0)
6253 op1 = temp;
6254 else
38a448ca 6255 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6256 }
6257
6258 /* Put a constant term last and put a multiplication first. */
6259 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6260 temp = op1, op1 = op0, op0 = temp;
6261
6262 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6263 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6264
6265 case MINUS_EXPR:
ea87523e
RK
6266 /* For initializers, we are allowed to return a MINUS of two
6267 symbolic constants. Here we handle all cases when both operands
6268 are constant. */
bbf6f052
RK
6269 /* Handle difference of two symbolic constants,
6270 for the sake of an initializer. */
6271 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6272 && really_constant_p (TREE_OPERAND (exp, 0))
6273 && really_constant_p (TREE_OPERAND (exp, 1)))
6274 {
906c4e36 6275 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6276 VOIDmode, ro_modifier);
906c4e36 6277 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6278 VOIDmode, ro_modifier);
ea87523e 6279
ea87523e
RK
6280 /* If the last operand is a CONST_INT, use plus_constant of
6281 the negated constant. Else make the MINUS. */
6282 if (GET_CODE (op1) == CONST_INT)
6283 return plus_constant (op0, - INTVAL (op1));
6284 else
38a448ca 6285 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6286 }
6287 /* Convert A - const to A + (-const). */
6288 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6289 {
ae431183
RK
6290 tree negated = fold (build1 (NEGATE_EXPR, type,
6291 TREE_OPERAND (exp, 1)));
6292
6293 /* Deal with the case where we can't negate the constant
6294 in TYPE. */
6295 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6296 {
6297 tree newtype = signed_type (type);
6298 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6299 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6300 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6301
6302 if (! TREE_OVERFLOW (newneg))
6303 return expand_expr (convert (type,
6304 build (PLUS_EXPR, newtype,
6305 newop0, newneg)),
921b3427 6306 target, tmode, ro_modifier);
ae431183
RK
6307 }
6308 else
6309 {
6310 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6311 goto plus_expr;
6312 }
bbf6f052
RK
6313 }
6314 this_optab = sub_optab;
6315 goto binop;
6316
6317 case MULT_EXPR:
6318 preexpand_calls (exp);
6319 /* If first operand is constant, swap them.
6320 Thus the following special case checks need only
6321 check the second operand. */
6322 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6323 {
6324 register tree t1 = TREE_OPERAND (exp, 0);
6325 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6326 TREE_OPERAND (exp, 1) = t1;
6327 }
6328
6329 /* Attempt to return something suitable for generating an
6330 indexed address, for machines that support that. */
6331
88f63c77 6332 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6333 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6334 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6335 {
921b3427
RK
6336 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6337 EXPAND_SUM);
bbf6f052
RK
6338
6339 /* Apply distributive law if OP0 is x+c. */
6340 if (GET_CODE (op0) == PLUS
6341 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6342 return gen_rtx_PLUS (mode,
6343 gen_rtx_MULT (mode, XEXP (op0, 0),
6344 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6345 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6346 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6347
6348 if (GET_CODE (op0) != REG)
906c4e36 6349 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6350 if (GET_CODE (op0) != REG)
6351 op0 = copy_to_mode_reg (mode, op0);
6352
38a448ca
RH
6353 return gen_rtx_MULT (mode, op0,
6354 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6355 }
6356
e5e809f4 6357 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6358 subtarget = 0;
6359
6360 /* Check for multiplying things that have been extended
6361 from a narrower type. If this machine supports multiplying
6362 in that narrower type with a result in the desired type,
6363 do it that way, and avoid the explicit type-conversion. */
6364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6365 && TREE_CODE (type) == INTEGER_TYPE
6366 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6367 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6368 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6369 && int_fits_type_p (TREE_OPERAND (exp, 1),
6370 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6371 /* Don't use a widening multiply if a shift will do. */
6372 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6373 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6374 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6375 ||
6376 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6378 ==
6379 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6380 /* If both operands are extended, they must either both
6381 be zero-extended or both be sign-extended. */
6382 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6383 ==
6384 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6385 {
6386 enum machine_mode innermode
6387 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6388 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6389 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6390 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6391 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6392 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6393 {
b10af0c8
TG
6394 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6395 {
6396 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6397 NULL_RTX, VOIDmode, 0);
6398 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6399 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6400 VOIDmode, 0);
6401 else
6402 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6403 NULL_RTX, VOIDmode, 0);
6404 goto binop2;
6405 }
6406 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6407 && innermode == word_mode)
6408 {
6409 rtx htem;
6410 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6411 NULL_RTX, VOIDmode, 0);
6412 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6413 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6414 VOIDmode, 0);
6415 else
6416 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6417 NULL_RTX, VOIDmode, 0);
6418 temp = expand_binop (mode, other_optab, op0, op1, target,
6419 unsignedp, OPTAB_LIB_WIDEN);
6420 htem = expand_mult_highpart_adjust (innermode,
6421 gen_highpart (innermode, temp),
6422 op0, op1,
6423 gen_highpart (innermode, temp),
6424 unsignedp);
6425 emit_move_insn (gen_highpart (innermode, temp), htem);
6426 return temp;
6427 }
bbf6f052
RK
6428 }
6429 }
6430 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6431 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6432 return expand_mult (mode, op0, op1, target, unsignedp);
6433
6434 case TRUNC_DIV_EXPR:
6435 case FLOOR_DIV_EXPR:
6436 case CEIL_DIV_EXPR:
6437 case ROUND_DIV_EXPR:
6438 case EXACT_DIV_EXPR:
6439 preexpand_calls (exp);
e5e809f4 6440 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6441 subtarget = 0;
6442 /* Possible optimization: compute the dividend with EXPAND_SUM
6443 then if the divisor is constant can optimize the case
6444 where some terms of the dividend have coeffs divisible by it. */
6445 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6447 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6448
6449 case RDIV_EXPR:
6450 this_optab = flodiv_optab;
6451 goto binop;
6452
6453 case TRUNC_MOD_EXPR:
6454 case FLOOR_MOD_EXPR:
6455 case CEIL_MOD_EXPR:
6456 case ROUND_MOD_EXPR:
6457 preexpand_calls (exp);
e5e809f4 6458 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6459 subtarget = 0;
6460 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6461 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6462 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6463
6464 case FIX_ROUND_EXPR:
6465 case FIX_FLOOR_EXPR:
6466 case FIX_CEIL_EXPR:
6467 abort (); /* Not used for C. */
6468
6469 case FIX_TRUNC_EXPR:
906c4e36 6470 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6471 if (target == 0)
6472 target = gen_reg_rtx (mode);
6473 expand_fix (target, op0, unsignedp);
6474 return target;
6475
6476 case FLOAT_EXPR:
906c4e36 6477 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6478 if (target == 0)
6479 target = gen_reg_rtx (mode);
6480 /* expand_float can't figure out what to do if FROM has VOIDmode.
6481 So give it the correct mode. With -O, cse will optimize this. */
6482 if (GET_MODE (op0) == VOIDmode)
6483 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6484 op0);
6485 expand_float (target, op0,
6486 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6487 return target;
6488
6489 case NEGATE_EXPR:
5b22bee8 6490 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6491 temp = expand_unop (mode, neg_optab, op0, target, 0);
6492 if (temp == 0)
6493 abort ();
6494 return temp;
6495
6496 case ABS_EXPR:
6497 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6498
2d7050fd 6499 /* Handle complex values specially. */
d6a5ac33
RK
6500 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6501 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6502 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6503
bbf6f052
RK
6504 /* Unsigned abs is simply the operand. Testing here means we don't
6505 risk generating incorrect code below. */
6506 if (TREE_UNSIGNED (type))
6507 return op0;
6508
2e5ec6cf 6509 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 6510 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
6511
6512 case MAX_EXPR:
6513 case MIN_EXPR:
6514 target = original_target;
e5e809f4 6515 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 6516 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6517 || GET_MODE (target) != mode
bbf6f052
RK
6518 || (GET_CODE (target) == REG
6519 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6520 target = gen_reg_rtx (mode);
906c4e36 6521 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6522 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6523
6524 /* First try to do it with a special MIN or MAX instruction.
6525 If that does not win, use a conditional jump to select the proper
6526 value. */
6527 this_optab = (TREE_UNSIGNED (type)
6528 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6529 : (code == MIN_EXPR ? smin_optab : smax_optab));
6530
6531 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6532 OPTAB_WIDEN);
6533 if (temp != 0)
6534 return temp;
6535
fa2981d8
JW
6536 /* At this point, a MEM target is no longer useful; we will get better
6537 code without it. */
6538
6539 if (GET_CODE (target) == MEM)
6540 target = gen_reg_rtx (mode);
6541
ee456b1c
RK
6542 if (target != op0)
6543 emit_move_insn (target, op0);
d6a5ac33 6544
bbf6f052 6545 op0 = gen_label_rtx ();
d6a5ac33 6546
f81497d9
RS
6547 /* If this mode is an integer too wide to compare properly,
6548 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6549 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6550 {
f81497d9 6551 if (code == MAX_EXPR)
d6a5ac33
RK
6552 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6553 target, op1, NULL_RTX, op0);
bbf6f052 6554 else
d6a5ac33
RK
6555 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6556 op1, target, NULL_RTX, op0);
ee456b1c 6557 emit_move_insn (target, op1);
bbf6f052 6558 }
f81497d9
RS
6559 else
6560 {
6561 if (code == MAX_EXPR)
6562 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6563 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6564 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6565 else
6566 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6567 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6568 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6569 if (temp == const0_rtx)
ee456b1c 6570 emit_move_insn (target, op1);
f81497d9
RS
6571 else if (temp != const_true_rtx)
6572 {
6573 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6574 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6575 else
6576 abort ();
ee456b1c 6577 emit_move_insn (target, op1);
f81497d9
RS
6578 }
6579 }
bbf6f052
RK
6580 emit_label (op0);
6581 return target;
6582
bbf6f052
RK
6583 case BIT_NOT_EXPR:
6584 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6585 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6586 if (temp == 0)
6587 abort ();
6588 return temp;
6589
6590 case FFS_EXPR:
6591 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6592 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6593 if (temp == 0)
6594 abort ();
6595 return temp;
6596
d6a5ac33
RK
6597 /* ??? Can optimize bitwise operations with one arg constant.
6598 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6599 and (a bitwise1 b) bitwise2 b (etc)
6600 but that is probably not worth while. */
6601
6602 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6603 boolean values when we want in all cases to compute both of them. In
6604 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6605 as actual zero-or-1 values and then bitwise anding. In cases where
6606 there cannot be any side effects, better code would be made by
6607 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6608 how to recognize those cases. */
6609
bbf6f052
RK
6610 case TRUTH_AND_EXPR:
6611 case BIT_AND_EXPR:
6612 this_optab = and_optab;
6613 goto binop;
6614
bbf6f052
RK
6615 case TRUTH_OR_EXPR:
6616 case BIT_IOR_EXPR:
6617 this_optab = ior_optab;
6618 goto binop;
6619
874726a8 6620 case TRUTH_XOR_EXPR:
bbf6f052
RK
6621 case BIT_XOR_EXPR:
6622 this_optab = xor_optab;
6623 goto binop;
6624
6625 case LSHIFT_EXPR:
6626 case RSHIFT_EXPR:
6627 case LROTATE_EXPR:
6628 case RROTATE_EXPR:
6629 preexpand_calls (exp);
e5e809f4 6630 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6631 subtarget = 0;
6632 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6633 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6634 unsignedp);
6635
d6a5ac33
RK
6636 /* Could determine the answer when only additive constants differ. Also,
6637 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6638 case LT_EXPR:
6639 case LE_EXPR:
6640 case GT_EXPR:
6641 case GE_EXPR:
6642 case EQ_EXPR:
6643 case NE_EXPR:
6644 preexpand_calls (exp);
6645 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6646 if (temp != 0)
6647 return temp;
d6a5ac33 6648
0f41302f 6649 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6650 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6651 && original_target
6652 && GET_CODE (original_target) == REG
6653 && (GET_MODE (original_target)
6654 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6655 {
d6a5ac33
RK
6656 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6657 VOIDmode, 0);
6658
bbf6f052
RK
6659 if (temp != original_target)
6660 temp = copy_to_reg (temp);
d6a5ac33 6661
bbf6f052 6662 op1 = gen_label_rtx ();
906c4e36 6663 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6664 GET_MODE (temp), unsignedp, 0);
6665 emit_jump_insn (gen_beq (op1));
6666 emit_move_insn (temp, const1_rtx);
6667 emit_label (op1);
6668 return temp;
6669 }
d6a5ac33 6670
bbf6f052
RK
6671 /* If no set-flag instruction, must generate a conditional
6672 store into a temporary variable. Drop through
6673 and handle this like && and ||. */
6674
6675 case TRUTH_ANDIF_EXPR:
6676 case TRUTH_ORIF_EXPR:
e44842fe 6677 if (! ignore
e5e809f4 6678 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
6679 /* Make sure we don't have a hard reg (such as function's return
6680 value) live across basic blocks, if not optimizing. */
6681 || (!optimize && GET_CODE (target) == REG
6682 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6683 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6684
6685 if (target)
6686 emit_clr_insn (target);
6687
bbf6f052
RK
6688 op1 = gen_label_rtx ();
6689 jumpifnot (exp, op1);
e44842fe
RK
6690
6691 if (target)
6692 emit_0_to_1_insn (target);
6693
bbf6f052 6694 emit_label (op1);
e44842fe 6695 return ignore ? const0_rtx : target;
bbf6f052
RK
6696
6697 case TRUTH_NOT_EXPR:
6698 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6699 /* The parser is careful to generate TRUTH_NOT_EXPR
6700 only with operands that are always zero or one. */
906c4e36 6701 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6702 target, 1, OPTAB_LIB_WIDEN);
6703 if (temp == 0)
6704 abort ();
6705 return temp;
6706
6707 case COMPOUND_EXPR:
6708 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6709 emit_queue ();
6710 return expand_expr (TREE_OPERAND (exp, 1),
6711 (ignore ? const0_rtx : target),
6712 VOIDmode, 0);
6713
6714 case COND_EXPR:
ac01eace
RK
6715 /* If we would have a "singleton" (see below) were it not for a
6716 conversion in each arm, bring that conversion back out. */
6717 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6718 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6719 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6720 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6721 {
6722 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6723 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6724
6725 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6726 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6727 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6728 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6729 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6730 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6731 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6732 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6733 return expand_expr (build1 (NOP_EXPR, type,
6734 build (COND_EXPR, TREE_TYPE (true),
6735 TREE_OPERAND (exp, 0),
6736 true, false)),
6737 target, tmode, modifier);
6738 }
6739
bbf6f052
RK
6740 {
6741 /* Note that COND_EXPRs whose type is a structure or union
6742 are required to be constructed to contain assignments of
6743 a temporary variable, so that we can evaluate them here
6744 for side effect only. If type is void, we must do likewise. */
6745
6746 /* If an arm of the branch requires a cleanup,
6747 only that cleanup is performed. */
6748
6749 tree singleton = 0;
6750 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
6751
6752 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6753 convert it to our mode, if necessary. */
6754 if (integer_onep (TREE_OPERAND (exp, 1))
6755 && integer_zerop (TREE_OPERAND (exp, 2))
6756 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6757 {
dd27116b
RK
6758 if (ignore)
6759 {
6760 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 6761 ro_modifier);
dd27116b
RK
6762 return const0_rtx;
6763 }
6764
921b3427 6765 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
6766 if (GET_MODE (op0) == mode)
6767 return op0;
d6a5ac33 6768
bbf6f052
RK
6769 if (target == 0)
6770 target = gen_reg_rtx (mode);
6771 convert_move (target, op0, unsignedp);
6772 return target;
6773 }
6774
ac01eace
RK
6775 /* Check for X ? A + B : A. If we have this, we can copy A to the
6776 output and conditionally add B. Similarly for unary operations.
6777 Don't do this if X has side-effects because those side effects
6778 might affect A or B and the "?" operation is a sequence point in
6779 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
6780
6781 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6782 && operand_equal_p (TREE_OPERAND (exp, 2),
6783 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6784 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6785 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6786 && operand_equal_p (TREE_OPERAND (exp, 1),
6787 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6788 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6789 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6790 && operand_equal_p (TREE_OPERAND (exp, 2),
6791 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6792 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6793 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6794 && operand_equal_p (TREE_OPERAND (exp, 1),
6795 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6796 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6797
01c8a7c8
RK
6798 /* If we are not to produce a result, we have no target. Otherwise,
6799 if a target was specified use it; it will not be used as an
6800 intermediate target unless it is safe. If no target, use a
6801 temporary. */
6802
6803 if (ignore)
6804 temp = 0;
6805 else if (original_target
e5e809f4 6806 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
6807 || (singleton && GET_CODE (original_target) == REG
6808 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6809 && original_target == var_rtx (singleton)))
6810 && GET_MODE (original_target) == mode
7c00d1fe
RK
6811#ifdef HAVE_conditional_move
6812 && (! can_conditionally_move_p (mode)
6813 || GET_CODE (original_target) == REG
6814 || TREE_ADDRESSABLE (type))
6815#endif
01c8a7c8
RK
6816 && ! (GET_CODE (original_target) == MEM
6817 && MEM_VOLATILE_P (original_target)))
6818 temp = original_target;
6819 else if (TREE_ADDRESSABLE (type))
6820 abort ();
6821 else
6822 temp = assign_temp (type, 0, 0, 1);
6823
ac01eace
RK
6824 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6825 do the test of X as a store-flag operation, do this as
6826 A + ((X != 0) << log C). Similarly for other simple binary
6827 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 6828 if (temp && singleton && binary_op
bbf6f052
RK
6829 && (TREE_CODE (binary_op) == PLUS_EXPR
6830 || TREE_CODE (binary_op) == MINUS_EXPR
6831 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6832 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
6833 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6834 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
6835 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6836 {
6837 rtx result;
6838 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6839 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6840 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6841 : xor_optab);
bbf6f052
RK
6842
6843 /* If we had X ? A : A + 1, do this as A + (X == 0).
6844
6845 We have to invert the truth value here and then put it
6846 back later if do_store_flag fails. We cannot simply copy
6847 TREE_OPERAND (exp, 0) to another variable and modify that
6848 because invert_truthvalue can modify the tree pointed to
6849 by its argument. */
6850 if (singleton == TREE_OPERAND (exp, 1))
6851 TREE_OPERAND (exp, 0)
6852 = invert_truthvalue (TREE_OPERAND (exp, 0));
6853
6854 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 6855 (safe_from_p (temp, singleton, 1)
906c4e36 6856 ? temp : NULL_RTX),
bbf6f052
RK
6857 mode, BRANCH_COST <= 1);
6858
ac01eace
RK
6859 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6860 result = expand_shift (LSHIFT_EXPR, mode, result,
6861 build_int_2 (tree_log2
6862 (TREE_OPERAND
6863 (binary_op, 1)),
6864 0),
e5e809f4 6865 (safe_from_p (temp, singleton, 1)
ac01eace
RK
6866 ? temp : NULL_RTX), 0);
6867
bbf6f052
RK
6868 if (result)
6869 {
906c4e36 6870 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6871 return expand_binop (mode, boptab, op1, result, temp,
6872 unsignedp, OPTAB_LIB_WIDEN);
6873 }
6874 else if (singleton == TREE_OPERAND (exp, 1))
6875 TREE_OPERAND (exp, 0)
6876 = invert_truthvalue (TREE_OPERAND (exp, 0));
6877 }
6878
dabf8373 6879 do_pending_stack_adjust ();
bbf6f052
RK
6880 NO_DEFER_POP;
6881 op0 = gen_label_rtx ();
6882
6883 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6884 {
6885 if (temp != 0)
6886 {
6887 /* If the target conflicts with the other operand of the
6888 binary op, we can't use it. Also, we can't use the target
6889 if it is a hard register, because evaluating the condition
6890 might clobber it. */
6891 if ((binary_op
e5e809f4 6892 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
6893 || (GET_CODE (temp) == REG
6894 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6895 temp = gen_reg_rtx (mode);
6896 store_expr (singleton, temp, 0);
6897 }
6898 else
906c4e36 6899 expand_expr (singleton,
2937cf87 6900 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6901 if (singleton == TREE_OPERAND (exp, 1))
6902 jumpif (TREE_OPERAND (exp, 0), op0);
6903 else
6904 jumpifnot (TREE_OPERAND (exp, 0), op0);
6905
956d6950 6906 start_cleanup_deferral ();
bbf6f052
RK
6907 if (binary_op && temp == 0)
6908 /* Just touch the other operand. */
6909 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6910 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6911 else if (binary_op)
6912 store_expr (build (TREE_CODE (binary_op), type,
6913 make_tree (type, temp),
6914 TREE_OPERAND (binary_op, 1)),
6915 temp, 0);
6916 else
6917 store_expr (build1 (TREE_CODE (unary_op), type,
6918 make_tree (type, temp)),
6919 temp, 0);
6920 op1 = op0;
bbf6f052 6921 }
bbf6f052
RK
6922 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6923 comparison operator. If we have one of these cases, set the
6924 output to A, branch on A (cse will merge these two references),
6925 then set the output to FOO. */
6926 else if (temp
6927 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6928 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6929 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6930 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
6931 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6932 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 6933 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
6934 {
6935 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6936 temp = gen_reg_rtx (mode);
6937 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6938 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6939
956d6950 6940 start_cleanup_deferral ();
bbf6f052
RK
6941 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6942 op1 = op0;
6943 }
6944 else if (temp
6945 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6946 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6947 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6948 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
6949 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6950 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 6951 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6952 {
6953 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6954 temp = gen_reg_rtx (mode);
6955 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6956 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6957
956d6950 6958 start_cleanup_deferral ();
bbf6f052
RK
6959 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6960 op1 = op0;
6961 }
6962 else
6963 {
6964 op1 = gen_label_rtx ();
6965 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6966
956d6950 6967 start_cleanup_deferral ();
bbf6f052
RK
6968 if (temp != 0)
6969 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6970 else
906c4e36
RK
6971 expand_expr (TREE_OPERAND (exp, 1),
6972 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 6973 end_cleanup_deferral ();
bbf6f052
RK
6974 emit_queue ();
6975 emit_jump_insn (gen_jump (op1));
6976 emit_barrier ();
6977 emit_label (op0);
956d6950 6978 start_cleanup_deferral ();
bbf6f052
RK
6979 if (temp != 0)
6980 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6981 else
906c4e36
RK
6982 expand_expr (TREE_OPERAND (exp, 2),
6983 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6984 }
6985
956d6950 6986 end_cleanup_deferral ();
bbf6f052
RK
6987
6988 emit_queue ();
6989 emit_label (op1);
6990 OK_DEFER_POP;
5dab5552 6991
bbf6f052
RK
6992 return temp;
6993 }
6994
6995 case TARGET_EXPR:
6996 {
6997 /* Something needs to be initialized, but we didn't know
6998 where that thing was when building the tree. For example,
6999 it could be the return value of a function, or a parameter
7000 to a function which lays down in the stack, or a temporary
7001 variable which must be passed by reference.
7002
7003 We guarantee that the expression will either be constructed
7004 or copied into our original target. */
7005
7006 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7007 tree cleanups = NULL_TREE;
5c062816 7008 tree exp1;
bbf6f052
RK
7009
7010 if (TREE_CODE (slot) != VAR_DECL)
7011 abort ();
7012
9c51f375
RK
7013 if (! ignore)
7014 target = original_target;
7015
bbf6f052
RK
7016 if (target == 0)
7017 {
7018 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7019 {
7020 target = DECL_RTL (slot);
5c062816 7021 /* If we have already expanded the slot, so don't do
ac993f4f 7022 it again. (mrs) */
5c062816
MS
7023 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7024 return target;
ac993f4f 7025 }
bbf6f052
RK
7026 else
7027 {
e9a25f70 7028 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7029 /* All temp slots at this level must not conflict. */
7030 preserve_temp_slots (target);
7031 DECL_RTL (slot) = target;
e9a25f70
JL
7032 if (TREE_ADDRESSABLE (slot))
7033 {
7034 TREE_ADDRESSABLE (slot) = 0;
7035 mark_addressable (slot);
7036 }
bbf6f052 7037
e287fd6e
RK
7038 /* Since SLOT is not known to the called function
7039 to belong to its stack frame, we must build an explicit
7040 cleanup. This case occurs when we must build up a reference
7041 to pass the reference as an argument. In this case,
7042 it is very likely that such a reference need not be
7043 built here. */
7044
7045 if (TREE_OPERAND (exp, 2) == 0)
7046 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7047 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7048 }
bbf6f052
RK
7049 }
7050 else
7051 {
7052 /* This case does occur, when expanding a parameter which
7053 needs to be constructed on the stack. The target
7054 is the actual stack address that we want to initialize.
7055 The function we call will perform the cleanup in this case. */
7056
8c042b47
RS
7057 /* If we have already assigned it space, use that space,
7058 not target that we were passed in, as our target
7059 parameter is only a hint. */
7060 if (DECL_RTL (slot) != 0)
7061 {
7062 target = DECL_RTL (slot);
7063 /* If we have already expanded the slot, so don't do
7064 it again. (mrs) */
7065 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7066 return target;
7067 }
21002281
JW
7068 else
7069 {
7070 DECL_RTL (slot) = target;
7071 /* If we must have an addressable slot, then make sure that
7072 the RTL that we just stored in slot is OK. */
7073 if (TREE_ADDRESSABLE (slot))
7074 {
7075 TREE_ADDRESSABLE (slot) = 0;
7076 mark_addressable (slot);
7077 }
7078 }
bbf6f052
RK
7079 }
7080
4847c938 7081 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7082 /* Mark it as expanded. */
7083 TREE_OPERAND (exp, 1) = NULL_TREE;
7084
e5e809f4 7085 TREE_USED (slot) = 1;
41531e5b 7086 store_expr (exp1, target, 0);
61d6b1cc 7087
e976b8b2 7088 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7089
41531e5b 7090 return target;
bbf6f052
RK
7091 }
7092
7093 case INIT_EXPR:
7094 {
7095 tree lhs = TREE_OPERAND (exp, 0);
7096 tree rhs = TREE_OPERAND (exp, 1);
7097 tree noncopied_parts = 0;
7098 tree lhs_type = TREE_TYPE (lhs);
7099
7100 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7101 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7102 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7103 TYPE_NONCOPIED_PARTS (lhs_type));
7104 while (noncopied_parts != 0)
7105 {
7106 expand_assignment (TREE_VALUE (noncopied_parts),
7107 TREE_PURPOSE (noncopied_parts), 0, 0);
7108 noncopied_parts = TREE_CHAIN (noncopied_parts);
7109 }
7110 return temp;
7111 }
7112
7113 case MODIFY_EXPR:
7114 {
7115 /* If lhs is complex, expand calls in rhs before computing it.
7116 That's so we don't compute a pointer and save it over a call.
7117 If lhs is simple, compute it first so we can give it as a
7118 target if the rhs is just a call. This avoids an extra temp and copy
7119 and that prevents a partial-subsumption which makes bad code.
7120 Actually we could treat component_ref's of vars like vars. */
7121
7122 tree lhs = TREE_OPERAND (exp, 0);
7123 tree rhs = TREE_OPERAND (exp, 1);
7124 tree noncopied_parts = 0;
7125 tree lhs_type = TREE_TYPE (lhs);
7126
7127 temp = 0;
7128
7129 if (TREE_CODE (lhs) != VAR_DECL
7130 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7131 && TREE_CODE (lhs) != PARM_DECL
7132 && ! (TREE_CODE (lhs) == INDIRECT_REF
7133 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7134 preexpand_calls (exp);
7135
7136 /* Check for |= or &= of a bitfield of size one into another bitfield
7137 of size 1. In this case, (unless we need the result of the
7138 assignment) we can do this more efficiently with a
7139 test followed by an assignment, if necessary.
7140
7141 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7142 things change so we do, this code should be enhanced to
7143 support it. */
7144 if (ignore
7145 && TREE_CODE (lhs) == COMPONENT_REF
7146 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7147 || TREE_CODE (rhs) == BIT_AND_EXPR)
7148 && TREE_OPERAND (rhs, 0) == lhs
7149 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7150 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7151 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7152 {
7153 rtx label = gen_label_rtx ();
7154
7155 do_jump (TREE_OPERAND (rhs, 1),
7156 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7157 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7158 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7159 (TREE_CODE (rhs) == BIT_IOR_EXPR
7160 ? integer_one_node
7161 : integer_zero_node)),
7162 0, 0);
e7c33f54 7163 do_pending_stack_adjust ();
bbf6f052
RK
7164 emit_label (label);
7165 return const0_rtx;
7166 }
7167
7168 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7169 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7170 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7171 TYPE_NONCOPIED_PARTS (lhs_type));
7172
7173 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7174 while (noncopied_parts != 0)
7175 {
7176 expand_assignment (TREE_PURPOSE (noncopied_parts),
7177 TREE_VALUE (noncopied_parts), 0, 0);
7178 noncopied_parts = TREE_CHAIN (noncopied_parts);
7179 }
7180 return temp;
7181 }
7182
7183 case PREINCREMENT_EXPR:
7184 case PREDECREMENT_EXPR:
7b8b9722 7185 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7186
7187 case POSTINCREMENT_EXPR:
7188 case POSTDECREMENT_EXPR:
7189 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7190 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7191
7192 case ADDR_EXPR:
987c71d9 7193 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7194 be a MEM corresponding to a stack slot. */
987c71d9
RK
7195 temp = 0;
7196
bbf6f052
RK
7197 /* Are we taking the address of a nested function? */
7198 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7199 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7200 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7201 && ! TREE_STATIC (exp))
bbf6f052
RK
7202 {
7203 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7204 op0 = force_operand (op0, target);
7205 }
682ba3a6
RK
7206 /* If we are taking the address of something erroneous, just
7207 return a zero. */
7208 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7209 return const0_rtx;
bbf6f052
RK
7210 else
7211 {
e287fd6e
RK
7212 /* We make sure to pass const0_rtx down if we came in with
7213 ignore set, to avoid doing the cleanups twice for something. */
7214 op0 = expand_expr (TREE_OPERAND (exp, 0),
7215 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7216 (modifier == EXPAND_INITIALIZER
7217 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7218
119af78a
RK
7219 /* If we are going to ignore the result, OP0 will have been set
7220 to const0_rtx, so just return it. Don't get confused and
7221 think we are taking the address of the constant. */
7222 if (ignore)
7223 return op0;
7224
3539e816
MS
7225 op0 = protect_from_queue (op0, 0);
7226
896102d0
RK
7227 /* We would like the object in memory. If it is a constant,
7228 we can have it be statically allocated into memory. For
682ba3a6 7229 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7230 memory and store the value into it. */
7231
7232 if (CONSTANT_P (op0))
7233 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7234 op0);
987c71d9 7235 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7236 {
7237 mark_temp_addr_taken (op0);
7238 temp = XEXP (op0, 0);
7239 }
896102d0 7240
682ba3a6
RK
7241 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7242 || GET_CODE (op0) == CONCAT)
896102d0
RK
7243 {
7244 /* If this object is in a register, it must be not
0f41302f 7245 be BLKmode. */
896102d0 7246 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7247 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7248
7a0b7b9a 7249 mark_temp_addr_taken (memloc);
896102d0
RK
7250 emit_move_insn (memloc, op0);
7251 op0 = memloc;
7252 }
7253
bbf6f052
RK
7254 if (GET_CODE (op0) != MEM)
7255 abort ();
7256
7257 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7258 {
7259 temp = XEXP (op0, 0);
7260#ifdef POINTERS_EXTEND_UNSIGNED
7261 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7262 && mode == ptr_mode)
9fcfcce7 7263 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7264#endif
7265 return temp;
7266 }
987c71d9 7267
bbf6f052
RK
7268 op0 = force_operand (XEXP (op0, 0), target);
7269 }
987c71d9 7270
bbf6f052 7271 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7272 op0 = force_reg (Pmode, op0);
7273
dc6d66b3
RK
7274 if (GET_CODE (op0) == REG
7275 && ! REG_USERVAR_P (op0))
7276 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7277
7278 /* If we might have had a temp slot, add an equivalent address
7279 for it. */
7280 if (temp != 0)
7281 update_temp_slot_address (temp, op0);
7282
88f63c77
RK
7283#ifdef POINTERS_EXTEND_UNSIGNED
7284 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7285 && mode == ptr_mode)
9fcfcce7 7286 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7287#endif
7288
bbf6f052
RK
7289 return op0;
7290
7291 case ENTRY_VALUE_EXPR:
7292 abort ();
7293
7308a047
RS
7294 /* COMPLEX type for Extended Pascal & Fortran */
7295 case COMPLEX_EXPR:
7296 {
7297 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7298 rtx insns;
7308a047
RS
7299
7300 /* Get the rtx code of the operands. */
7301 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7302 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7303
7304 if (! target)
7305 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7306
6551fa4d 7307 start_sequence ();
7308a047
RS
7308
7309 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7310 emit_move_insn (gen_realpart (mode, target), op0);
7311 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7312
6551fa4d
JW
7313 insns = get_insns ();
7314 end_sequence ();
7315
7308a047 7316 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7317 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7318 each with a separate pseudo as destination.
7319 It's not correct for flow to treat them as a unit. */
6d6e61ce 7320 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7321 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7322 else
7323 emit_insns (insns);
7308a047
RS
7324
7325 return target;
7326 }
7327
7328 case REALPART_EXPR:
2d7050fd
RS
7329 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7330 return gen_realpart (mode, op0);
7308a047
RS
7331
7332 case IMAGPART_EXPR:
2d7050fd
RS
7333 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7334 return gen_imagpart (mode, op0);
7308a047
RS
7335
7336 case CONJ_EXPR:
7337 {
62acb978 7338 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7339 rtx imag_t;
6551fa4d 7340 rtx insns;
7308a047
RS
7341
7342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7343
7344 if (! target)
d6a5ac33 7345 target = gen_reg_rtx (mode);
7308a047 7346
6551fa4d 7347 start_sequence ();
7308a047
RS
7348
7349 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7350 emit_move_insn (gen_realpart (partmode, target),
7351 gen_realpart (partmode, op0));
7308a047 7352
62acb978
RK
7353 imag_t = gen_imagpart (partmode, target);
7354 temp = expand_unop (partmode, neg_optab,
7355 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7356 if (temp != imag_t)
7357 emit_move_insn (imag_t, temp);
7358
6551fa4d
JW
7359 insns = get_insns ();
7360 end_sequence ();
7361
d6a5ac33
RK
7362 /* Conjugate should appear as a single unit
7363 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7364 each with a separate pseudo as destination.
7365 It's not correct for flow to treat them as a unit. */
6d6e61ce 7366 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7367 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7368 else
7369 emit_insns (insns);
7308a047
RS
7370
7371 return target;
7372 }
7373
e976b8b2
MS
7374 case TRY_CATCH_EXPR:
7375 {
7376 tree handler = TREE_OPERAND (exp, 1);
7377
7378 expand_eh_region_start ();
7379
7380 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7381
7382 expand_eh_region_end (handler);
7383
7384 return op0;
7385 }
7386
7387 case POPDCC_EXPR:
7388 {
7389 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 7390 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
7391 return const0_rtx;
7392 }
7393
7394 case POPDHC_EXPR:
7395 {
7396 rtx dhc = get_dynamic_handler_chain ();
38a448ca 7397 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
7398 return const0_rtx;
7399 }
7400
bbf6f052 7401 case ERROR_MARK:
66538193
RS
7402 op0 = CONST0_RTX (tmode);
7403 if (op0 != 0)
7404 return op0;
bbf6f052
RK
7405 return const0_rtx;
7406
7407 default:
90764a87 7408 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7409 }
7410
7411 /* Here to do an ordinary binary operator, generating an instruction
7412 from the optab already placed in `this_optab'. */
7413 binop:
7414 preexpand_calls (exp);
e5e809f4 7415 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7416 subtarget = 0;
7417 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7418 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7419 binop2:
7420 temp = expand_binop (mode, this_optab, op0, op1, target,
7421 unsignedp, OPTAB_LIB_WIDEN);
7422 if (temp == 0)
7423 abort ();
7424 return temp;
7425}
bbf6f052 7426
bbf6f052 7427
b93a436e
JL
7428\f
7429/* Return the alignment in bits of EXP, a pointer valued expression.
7430 But don't return more than MAX_ALIGN no matter what.
7431 The alignment returned is, by default, the alignment of the thing that
7432 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7433
7434 Otherwise, look at the expression to see if we can do better, i.e., if the
7435 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 7436
b93a436e
JL
7437static int
7438get_pointer_alignment (exp, max_align)
7439 tree exp;
7440 unsigned max_align;
bbf6f052 7441{
b93a436e
JL
7442 unsigned align, inner;
7443
7444 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7445 return 0;
7446
7447 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7448 align = MIN (align, max_align);
7449
7450 while (1)
bbf6f052 7451 {
b93a436e 7452 switch (TREE_CODE (exp))
bbf6f052 7453 {
b93a436e
JL
7454 case NOP_EXPR:
7455 case CONVERT_EXPR:
7456 case NON_LVALUE_EXPR:
7457 exp = TREE_OPERAND (exp, 0);
7458 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7459 return align;
7460 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7461 align = MIN (inner, max_align);
7462 break;
7463
7464 case PLUS_EXPR:
7465 /* If sum of pointer + int, restrict our maximum alignment to that
7466 imposed by the integer. If not, we can't do any better than
7467 ALIGN. */
7468 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7469 return align;
7470
7471 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7472 & (max_align - 1))
7473 != 0)
7474 max_align >>= 1;
7475
7476 exp = TREE_OPERAND (exp, 0);
7477 break;
7478
7479 case ADDR_EXPR:
7480 /* See what we are pointing at and look at its alignment. */
7481 exp = TREE_OPERAND (exp, 0);
7482 if (TREE_CODE (exp) == FUNCTION_DECL)
7483 align = FUNCTION_BOUNDARY;
7484 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7485 align = DECL_ALIGN (exp);
7486#ifdef CONSTANT_ALIGNMENT
7487 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7488 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 7489#endif
b93a436e 7490 return MIN (align, max_align);
c02bd5d9 7491
b93a436e
JL
7492 default:
7493 return align;
7494 }
7495 }
7496}
7497\f
7498/* Return the tree node and offset if a given argument corresponds to
7499 a string constant. */
7500
7501static tree
7502string_constant (arg, ptr_offset)
7503 tree arg;
7504 tree *ptr_offset;
7505{
7506 STRIP_NOPS (arg);
7507
7508 if (TREE_CODE (arg) == ADDR_EXPR
7509 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7510 {
7511 *ptr_offset = integer_zero_node;
7512 return TREE_OPERAND (arg, 0);
7513 }
7514 else if (TREE_CODE (arg) == PLUS_EXPR)
7515 {
7516 tree arg0 = TREE_OPERAND (arg, 0);
7517 tree arg1 = TREE_OPERAND (arg, 1);
7518
7519 STRIP_NOPS (arg0);
7520 STRIP_NOPS (arg1);
7521
7522 if (TREE_CODE (arg0) == ADDR_EXPR
7523 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 7524 {
b93a436e
JL
7525 *ptr_offset = arg1;
7526 return TREE_OPERAND (arg0, 0);
bbf6f052 7527 }
b93a436e
JL
7528 else if (TREE_CODE (arg1) == ADDR_EXPR
7529 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 7530 {
b93a436e
JL
7531 *ptr_offset = arg0;
7532 return TREE_OPERAND (arg1, 0);
bbf6f052 7533 }
b93a436e 7534 }
ca695ac9 7535
b93a436e
JL
7536 return 0;
7537}
ca695ac9 7538
b93a436e
JL
7539/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7540 way, because it could contain a zero byte in the middle.
7541 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 7542
b93a436e
JL
7543 Unfortunately, string_constant can't access the values of const char
7544 arrays with initializers, so neither can we do so here. */
e87b4f3f 7545
b93a436e
JL
7546static tree
7547c_strlen (src)
7548 tree src;
7549{
7550 tree offset_node;
7551 int offset, max;
7552 char *ptr;
e7c33f54 7553
b93a436e
JL
7554 src = string_constant (src, &offset_node);
7555 if (src == 0)
7556 return 0;
7557 max = TREE_STRING_LENGTH (src);
7558 ptr = TREE_STRING_POINTER (src);
7559 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7560 {
7561 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7562 compute the offset to the following null if we don't know where to
7563 start searching for it. */
7564 int i;
7565 for (i = 0; i < max; i++)
7566 if (ptr[i] == 0)
7567 return 0;
7568 /* We don't know the starting offset, but we do know that the string
7569 has no internal zero bytes. We can assume that the offset falls
7570 within the bounds of the string; otherwise, the programmer deserves
7571 what he gets. Subtract the offset from the length of the string,
7572 and return that. */
7573 /* This would perhaps not be valid if we were dealing with named
7574 arrays in addition to literal string constants. */
7575 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7576 }
e7c33f54 7577
b93a436e
JL
7578 /* We have a known offset into the string. Start searching there for
7579 a null character. */
7580 if (offset_node == 0)
7581 offset = 0;
7582 else
7583 {
7584 /* Did we get a long long offset? If so, punt. */
7585 if (TREE_INT_CST_HIGH (offset_node) != 0)
7586 return 0;
7587 offset = TREE_INT_CST_LOW (offset_node);
7588 }
7589 /* If the offset is known to be out of bounds, warn, and call strlen at
7590 runtime. */
7591 if (offset < 0 || offset > max)
7592 {
7593 warning ("offset outside bounds of constant string");
7594 return 0;
7595 }
7596 /* Use strlen to search for the first zero byte. Since any strings
7597 constructed with build_string will have nulls appended, we win even
7598 if we get handed something like (char[4])"abcd".
e7c33f54 7599
b93a436e
JL
7600 Since OFFSET is our starting index into the string, no further
7601 calculation is needed. */
7602 return size_int (strlen (ptr + offset));
7603}
1bbddf11 7604
b93a436e
JL
7605rtx
7606expand_builtin_return_addr (fndecl_code, count, tem)
7607 enum built_in_function fndecl_code;
7608 int count;
7609 rtx tem;
7610{
7611 int i;
e7c33f54 7612
b93a436e
JL
7613 /* Some machines need special handling before we can access
7614 arbitrary frames. For example, on the sparc, we must first flush
7615 all register windows to the stack. */
7616#ifdef SETUP_FRAME_ADDRESSES
7617 if (count > 0)
7618 SETUP_FRAME_ADDRESSES ();
7619#endif
e87b4f3f 7620
b93a436e
JL
7621 /* On the sparc, the return address is not in the frame, it is in a
7622 register. There is no way to access it off of the current frame
7623 pointer, but it can be accessed off the previous frame pointer by
7624 reading the value from the register window save area. */
7625#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7626 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7627 count--;
7628#endif
60bac6ea 7629
b93a436e
JL
7630 /* Scan back COUNT frames to the specified frame. */
7631 for (i = 0; i < count; i++)
7632 {
7633 /* Assume the dynamic chain pointer is in the word that the
7634 frame address points to, unless otherwise specified. */
7635#ifdef DYNAMIC_CHAIN_ADDRESS
7636 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7637#endif
7638 tem = memory_address (Pmode, tem);
7639 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7640 }
ca695ac9 7641
b93a436e
JL
7642 /* For __builtin_frame_address, return what we've got. */
7643 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7644 return tem;
e9a25f70 7645
b93a436e
JL
7646 /* For __builtin_return_address, Get the return address from that
7647 frame. */
7648#ifdef RETURN_ADDR_RTX
7649 tem = RETURN_ADDR_RTX (count, tem);
7650#else
7651 tem = memory_address (Pmode,
7652 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7653 tem = gen_rtx_MEM (Pmode, tem);
7654#endif
7655 return tem;
7656}
e9a25f70 7657
b93a436e
JL
7658/* __builtin_setjmp is passed a pointer to an array of five words (not
7659 all will be used on all machines). It operates similarly to the C
7660 library function of the same name, but is more efficient. Much of
7661 the code below (and for longjmp) is copied from the handling of
7662 non-local gotos.
ca695ac9 7663
b93a436e
JL
7664 NOTE: This is intended for use by GNAT and the exception handling
7665 scheme in the compiler and will only work in the method used by
7666 them. */
e9a25f70 7667
b93a436e 7668rtx
6fd1c67b 7669expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
7670 rtx buf_addr;
7671 rtx target;
6fd1c67b 7672 rtx first_label, next_label;
b93a436e 7673{
6fd1c67b 7674 rtx lab1 = gen_label_rtx ();
b93a436e
JL
7675 enum machine_mode sa_mode = Pmode, value_mode;
7676 rtx stack_save;
e9a25f70 7677
b93a436e 7678 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 7679
b93a436e
JL
7680#ifdef POINTERS_EXTEND_UNSIGNED
7681 buf_addr = convert_memory_address (Pmode, buf_addr);
7682#endif
d7f21d63 7683
b93a436e 7684 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 7685
b93a436e
JL
7686 if (target == 0 || GET_CODE (target) != REG
7687 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7688 target = gen_reg_rtx (value_mode);
d7f21d63 7689
b93a436e 7690 emit_queue ();
d7f21d63 7691
b93a436e
JL
7692 /* We store the frame pointer and the address of lab1 in the buffer
7693 and use the rest of it for the stack save area, which is
7694 machine-dependent. */
7695 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7696 virtual_stack_vars_rtx);
6fd1c67b
RH
7697 emit_move_insn (validize_mem
7698 (gen_rtx_MEM (Pmode,
b93a436e
JL
7699 plus_constant (buf_addr,
7700 GET_MODE_SIZE (Pmode)))),
6fd1c67b 7701 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 7702
b93a436e
JL
7703#ifdef HAVE_save_stack_nonlocal
7704 if (HAVE_save_stack_nonlocal)
7705 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7706#endif
6c174fc0 7707
b93a436e
JL
7708 stack_save = gen_rtx_MEM (sa_mode,
7709 plus_constant (buf_addr,
7710 2 * GET_MODE_SIZE (Pmode)));
7711 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 7712
6fd1c67b
RH
7713 /* If there is further processing to do, do it. */
7714#ifdef HAVE_builtin_setjmp_setup
7715 if (HAVE_builtin_setjmp_setup)
7716 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 7717#endif
d7f21d63 7718
6fd1c67b 7719 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 7720 emit_move_insn (target, const0_rtx);
6fd1c67b 7721 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
7722 emit_barrier ();
7723 emit_label (lab1);
d7f21d63 7724
6fd1c67b
RH
7725 /* Tell flow about the strange goings on. */
7726 current_function_has_nonlocal_label = 1;
7727
7728 /* Clobber the FP when we get here, so we have to make sure it's
7729 marked as used by this function. */
b93a436e 7730 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 7731
b93a436e
JL
7732 /* Mark the static chain as clobbered here so life information
7733 doesn't get messed up for it. */
7734 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 7735
b93a436e
JL
7736 /* Now put in the code to restore the frame pointer, and argument
7737 pointer, if needed. The code below is from expand_end_bindings
7738 in stmt.c; see detailed documentation there. */
7739#ifdef HAVE_nonlocal_goto
7740 if (! HAVE_nonlocal_goto)
7741#endif
7742 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 7743
b93a436e
JL
7744#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7745 if (fixed_regs[ARG_POINTER_REGNUM])
7746 {
7747#ifdef ELIMINABLE_REGS
081f5e7e 7748 int i;
b93a436e 7749 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 7750
b93a436e
JL
7751 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7752 if (elim_regs[i].from == ARG_POINTER_REGNUM
7753 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7754 break;
ca695ac9 7755
b93a436e
JL
7756 if (i == sizeof elim_regs / sizeof elim_regs [0])
7757#endif
7758 {
7759 /* Now restore our arg pointer from the address at which it
7760 was saved in our stack frame.
7761 If there hasn't be space allocated for it yet, make
7762 some now. */
7763 if (arg_pointer_save_area == 0)
7764 arg_pointer_save_area
7765 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7766 emit_move_insn (virtual_incoming_args_rtx,
7767 copy_to_reg (arg_pointer_save_area));
7768 }
7769 }
7770#endif
ca695ac9 7771
6fd1c67b
RH
7772#ifdef HAVE_builtin_setjmp_receiver
7773 if (HAVE_builtin_setjmp_receiver)
7774 emit_insn (gen_builtin_setjmp_receiver (lab1));
7775 else
7776#endif
b93a436e 7777#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
7778 if (HAVE_nonlocal_goto_receiver)
7779 emit_insn (gen_nonlocal_goto_receiver ());
7780 else
b93a436e 7781#endif
081f5e7e
KG
7782 {
7783 ; /* Nothing */
7784 }
6fd1c67b
RH
7785
7786 /* Set TARGET, and branch to the next-time-through label. */
7787 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7788 emit_jump_insn (gen_jump (next_label));
7789 emit_barrier ();
ca695ac9 7790
6fd1c67b
RH
7791 return target;
7792}
ca695ac9 7793
6fd1c67b
RH
7794void
7795expand_builtin_longjmp (buf_addr, value)
7796 rtx buf_addr, value;
7797{
7798 rtx fp, lab, stack;
7799 enum machine_mode sa_mode;
ca695ac9 7800
6fd1c67b
RH
7801#ifdef POINTERS_EXTEND_UNSIGNED
7802 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 7803#endif
6fd1c67b
RH
7804 buf_addr = force_reg (Pmode, buf_addr);
7805
7806 /* The value sent by longjmp is not allowed to be zero. Force it
7807 to one if so. */
7808 if (GET_CODE (value) == CONST_INT)
7809 {
7810 if (INTVAL (value) == 0)
7811 value = const1_rtx;
7812 }
7813 else
7814 {
7815 lab = gen_label_rtx ();
7816
7817 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7818 emit_jump_insn (gen_bne (lab));
7819 emit_move_insn (value, const1_rtx);
7820 emit_label (lab);
7821 }
7822
7823 /* Make sure the value is in the right mode to be copied to the chain. */
7824 if (GET_MODE (value) != VOIDmode)
7825 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7826
7827#ifdef HAVE_builtin_longjmp
7828 if (HAVE_builtin_longjmp)
7829 {
7830 /* Copy the "return value" to the static chain reg. */
7831 emit_move_insn (static_chain_rtx, value);
7832 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7833 emit_insn (gen_builtin_longjmp (buf_addr));
7834 }
7835 else
b93a436e 7836#endif
6fd1c67b
RH
7837 {
7838 fp = gen_rtx_MEM (Pmode, buf_addr);
7839 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7840 GET_MODE_SIZE (Pmode)));
e9a25f70 7841
6fd1c67b
RH
7842#ifdef HAVE_save_stack_nonlocal
7843 sa_mode = (HAVE_save_stack_nonlocal
7844 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
7845 : Pmode);
7846#else
7847 sa_mode = Pmode;
b93a436e 7848#endif
ca695ac9 7849
6fd1c67b
RH
7850 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7851 2 * GET_MODE_SIZE (Pmode)));
7852
7853 /* Pick up FP, label, and SP from the block and jump. This code is
7854 from expand_goto in stmt.c; see there for detailed comments. */
7855#if HAVE_nonlocal_goto
7856 if (HAVE_nonlocal_goto)
7857 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7858 else
b93a436e 7859#endif
6fd1c67b
RH
7860 {
7861 lab = copy_to_reg (lab);
60bac6ea 7862
6fd1c67b
RH
7863 /* Copy the "return value" to the static chain reg. */
7864 emit_move_insn (static_chain_rtx, value);
7865
7866 emit_move_insn (hard_frame_pointer_rtx, fp);
7867 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7868
7869 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7870 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7871 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7872 emit_indirect_jump (lab);
7873 }
7874 }
b93a436e 7875}
60bac6ea 7876
b93a436e
JL
7877\f
7878/* Expand an expression EXP that calls a built-in function,
7879 with result going to TARGET if that's convenient
7880 (and in mode MODE if that's convenient).
7881 SUBTARGET may be used as the target for computing one of EXP's operands.
7882 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 7883
b93a436e
JL
7884#define CALLED_AS_BUILT_IN(NODE) \
7885 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 7886
b93a436e
JL
7887static rtx
7888expand_builtin (exp, target, subtarget, mode, ignore)
7889 tree exp;
7890 rtx target;
7891 rtx subtarget;
7892 enum machine_mode mode;
7893 int ignore;
7894{
7895 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7896 tree arglist = TREE_OPERAND (exp, 1);
7897 rtx op0;
7898 rtx lab1, insns;
7899 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7900 optab builtin_optab;
60bac6ea 7901
b93a436e
JL
7902 switch (DECL_FUNCTION_CODE (fndecl))
7903 {
7904 case BUILT_IN_ABS:
7905 case BUILT_IN_LABS:
7906 case BUILT_IN_FABS:
7907 /* build_function_call changes these into ABS_EXPR. */
7908 abort ();
4ed67205 7909
b93a436e
JL
7910 case BUILT_IN_SIN:
7911 case BUILT_IN_COS:
7912 /* Treat these like sqrt, but only if the user asks for them. */
7913 if (! flag_fast_math)
7914 break;
7915 case BUILT_IN_FSQRT:
7916 /* If not optimizing, call the library function. */
7917 if (! optimize)
7918 break;
4ed67205 7919
b93a436e
JL
7920 if (arglist == 0
7921 /* Arg could be wrong type if user redeclared this fcn wrong. */
7922 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
7923 break;
7924
b93a436e
JL
7925 /* Stabilize and compute the argument. */
7926 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7927 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7928 {
7929 exp = copy_node (exp);
7930 arglist = copy_node (arglist);
7931 TREE_OPERAND (exp, 1) = arglist;
7932 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7933 }
7934 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 7935
b93a436e
JL
7936 /* Make a suitable register to place result in. */
7937 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 7938
b93a436e
JL
7939 emit_queue ();
7940 start_sequence ();
7565a035 7941
b93a436e
JL
7942 switch (DECL_FUNCTION_CODE (fndecl))
7943 {
7944 case BUILT_IN_SIN:
7945 builtin_optab = sin_optab; break;
7946 case BUILT_IN_COS:
7947 builtin_optab = cos_optab; break;
7948 case BUILT_IN_FSQRT:
7949 builtin_optab = sqrt_optab; break;
7950 default:
7951 abort ();
7952 }
4ed67205 7953
b93a436e
JL
7954 /* Compute into TARGET.
7955 Set TARGET to wherever the result comes back. */
7956 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7957 builtin_optab, op0, target, 0);
7958
7959 /* If we were unable to expand via the builtin, stop the
7960 sequence (without outputting the insns) and break, causing
7961 a call the the library function. */
7962 if (target == 0)
4ed67205 7963 {
b93a436e
JL
7964 end_sequence ();
7965 break;
7966 }
4ed67205 7967
b93a436e
JL
7968 /* Check the results by default. But if flag_fast_math is turned on,
7969 then assume sqrt will always be called with valid arguments. */
4ed67205 7970
b93a436e
JL
7971 if (! flag_fast_math)
7972 {
7973 /* Don't define the builtin FP instructions
7974 if your machine is not IEEE. */
7975 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7976 abort ();
4ed67205 7977
b93a436e 7978 lab1 = gen_label_rtx ();
ca55abae 7979
b93a436e
JL
7980 /* Test the result; if it is NaN, set errno=EDOM because
7981 the argument was not in the domain. */
7982 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7983 emit_jump_insn (gen_beq (lab1));
7984
7985#ifdef TARGET_EDOM
7986 {
7987#ifdef GEN_ERRNO_RTX
7988 rtx errno_rtx = GEN_ERRNO_RTX;
7989#else
7990 rtx errno_rtx
7991 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
7992#endif
e87b4f3f 7993
b93a436e
JL
7994 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7995 }
7996#else
7997 /* We can't set errno=EDOM directly; let the library call do it.
7998 Pop the arguments right away in case the call gets deleted. */
7999 NO_DEFER_POP;
8000 expand_call (exp, target, 0);
8001 OK_DEFER_POP;
8002#endif
e7c33f54 8003
b93a436e
JL
8004 emit_label (lab1);
8005 }
0006469d 8006
b93a436e
JL
8007 /* Output the entire sequence. */
8008 insns = get_insns ();
8009 end_sequence ();
8010 emit_insns (insns);
8011
8012 return target;
0006469d 8013
b93a436e
JL
8014 case BUILT_IN_FMOD:
8015 break;
0006469d 8016
b93a436e
JL
8017 /* __builtin_apply_args returns block of memory allocated on
8018 the stack into which is stored the arg pointer, structure
8019 value address, static chain, and all the registers that might
8020 possibly be used in performing a function call. The code is
8021 moved to the start of the function so the incoming values are
8022 saved. */
8023 case BUILT_IN_APPLY_ARGS:
8024 /* Don't do __builtin_apply_args more than once in a function.
8025 Save the result of the first call and reuse it. */
8026 if (apply_args_value != 0)
8027 return apply_args_value;
8028 {
8029 /* When this function is called, it means that registers must be
8030 saved on entry to this function. So we migrate the
8031 call to the first insn of this function. */
8032 rtx temp;
8033 rtx seq;
0006469d 8034
b93a436e
JL
8035 start_sequence ();
8036 temp = expand_builtin_apply_args ();
8037 seq = get_insns ();
8038 end_sequence ();
0006469d 8039
b93a436e 8040 apply_args_value = temp;
0006469d 8041
b93a436e
JL
8042 /* Put the sequence after the NOTE that starts the function.
8043 If this is inside a SEQUENCE, make the outer-level insn
8044 chain current, so the code is placed at the start of the
8045 function. */
8046 push_topmost_sequence ();
8047 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8048 pop_topmost_sequence ();
8049 return temp;
8050 }
0006469d 8051
b93a436e
JL
8052 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8053 FUNCTION with a copy of the parameters described by
8054 ARGUMENTS, and ARGSIZE. It returns a block of memory
8055 allocated on the stack into which is stored all the registers
8056 that might possibly be used for returning the result of a
8057 function. ARGUMENTS is the value returned by
8058 __builtin_apply_args. ARGSIZE is the number of bytes of
8059 arguments that must be copied. ??? How should this value be
8060 computed? We'll also need a safe worst case value for varargs
8061 functions. */
8062 case BUILT_IN_APPLY:
8063 if (arglist == 0
8064 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8065 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8066 || TREE_CHAIN (arglist) == 0
8067 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8068 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8069 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8070 return const0_rtx;
8071 else
8072 {
8073 int i;
8074 tree t;
8075 rtx ops[3];
0006469d 8076
b93a436e
JL
8077 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8078 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8079
b93a436e
JL
8080 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8081 }
bbf6f052 8082
b93a436e
JL
8083 /* __builtin_return (RESULT) causes the function to return the
8084 value described by RESULT. RESULT is address of the block of
8085 memory returned by __builtin_apply. */
8086 case BUILT_IN_RETURN:
8087 if (arglist
8088 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8089 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8090 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8091 NULL_RTX, VOIDmode, 0));
8092 return const0_rtx;
ca695ac9 8093
b93a436e
JL
8094 case BUILT_IN_SAVEREGS:
8095 /* Don't do __builtin_saveregs more than once in a function.
8096 Save the result of the first call and reuse it. */
8097 if (saveregs_value != 0)
8098 return saveregs_value;
8099 {
8100 /* When this function is called, it means that registers must be
8101 saved on entry to this function. So we migrate the
8102 call to the first insn of this function. */
8103 rtx temp;
8104 rtx seq;
ca695ac9 8105
b93a436e
JL
8106 /* Now really call the function. `expand_call' does not call
8107 expand_builtin, so there is no danger of infinite recursion here. */
8108 start_sequence ();
ca695ac9 8109
b93a436e
JL
8110#ifdef EXPAND_BUILTIN_SAVEREGS
8111 /* Do whatever the machine needs done in this case. */
8112 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8113#else
8114 /* The register where the function returns its value
8115 is likely to have something else in it, such as an argument.
8116 So preserve that register around the call. */
ca695ac9 8117
b93a436e
JL
8118 if (value_mode != VOIDmode)
8119 {
8120 rtx valreg = hard_libcall_value (value_mode);
8121 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8122
b93a436e
JL
8123 emit_move_insn (saved_valreg, valreg);
8124 temp = expand_call (exp, target, ignore);
8125 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8126 }
8127 else
b93a436e
JL
8128 /* Generate the call, putting the value in a pseudo. */
8129 temp = expand_call (exp, target, ignore);
8130#endif
bbf6f052 8131
b93a436e
JL
8132 seq = get_insns ();
8133 end_sequence ();
bbf6f052 8134
b93a436e 8135 saveregs_value = temp;
bbf6f052 8136
b93a436e
JL
8137 /* Put the sequence after the NOTE that starts the function.
8138 If this is inside a SEQUENCE, make the outer-level insn
8139 chain current, so the code is placed at the start of the
8140 function. */
8141 push_topmost_sequence ();
8142 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8143 pop_topmost_sequence ();
8144 return temp;
8145 }
bbf6f052 8146
b93a436e
JL
8147 /* __builtin_args_info (N) returns word N of the arg space info
8148 for the current function. The number and meanings of words
8149 is controlled by the definition of CUMULATIVE_ARGS. */
8150 case BUILT_IN_ARGS_INFO:
8151 {
8152 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8153 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8154#if 0
8155 /* These are used by the code below that is if 0'ed away */
8156 int i;
b93a436e 8157 tree type, elts, result;
381127e8 8158#endif
bbf6f052 8159
b93a436e
JL
8160 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8161 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8162 __FILE__, __LINE__);
bbf6f052 8163
b93a436e
JL
8164 if (arglist != 0)
8165 {
8166 tree arg = TREE_VALUE (arglist);
8167 if (TREE_CODE (arg) != INTEGER_CST)
8168 error ("argument of `__builtin_args_info' must be constant");
8169 else
8170 {
8171 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8172
b93a436e
JL
8173 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8174 error ("argument of `__builtin_args_info' out of range");
8175 else
8176 return GEN_INT (word_ptr[wordnum]);
8177 }
bbf6f052
RK
8178 }
8179 else
b93a436e 8180 error ("missing argument in `__builtin_args_info'");
bbf6f052 8181
b93a436e 8182 return const0_rtx;
bbf6f052 8183
b93a436e
JL
8184#if 0
8185 for (i = 0; i < nwords; i++)
8186 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8187
b93a436e
JL
8188 type = build_array_type (integer_type_node,
8189 build_index_type (build_int_2 (nwords, 0)));
8190 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8191 TREE_CONSTANT (result) = 1;
8192 TREE_STATIC (result) = 1;
8193 result = build (INDIRECT_REF, build_pointer_type (type), result);
8194 TREE_CONSTANT (result) = 1;
8195 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8196#endif
8197 }
8198
8199 /* Return the address of the first anonymous stack arg. */
8200 case BUILT_IN_NEXT_ARG:
ca695ac9 8201 {
b93a436e
JL
8202 tree fntype = TREE_TYPE (current_function_decl);
8203
8204 if ((TYPE_ARG_TYPES (fntype) == 0
8205 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8206 == void_type_node))
8207 && ! current_function_varargs)
8208 {
8209 error ("`va_start' used in function with fixed args");
8210 return const0_rtx;
8211 }
8212
8213 if (arglist)
8214 {
8215 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8216 tree arg = TREE_VALUE (arglist);
8217
8218 /* Strip off all nops for the sake of the comparison. This
8219 is not quite the same as STRIP_NOPS. It does more.
8220 We must also strip off INDIRECT_EXPR for C++ reference
8221 parameters. */
8222 while (TREE_CODE (arg) == NOP_EXPR
8223 || TREE_CODE (arg) == CONVERT_EXPR
8224 || TREE_CODE (arg) == NON_LVALUE_EXPR
8225 || TREE_CODE (arg) == INDIRECT_REF)
8226 arg = TREE_OPERAND (arg, 0);
8227 if (arg != last_parm)
8228 warning ("second parameter of `va_start' not last named argument");
8229 }
8230 else if (! current_function_varargs)
8231 /* Evidently an out of date version of <stdarg.h>; can't validate
8232 va_start's second argument, but can still work as intended. */
8233 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8234 }
8235
b93a436e
JL
8236 return expand_binop (Pmode, add_optab,
8237 current_function_internal_arg_pointer,
8238 current_function_arg_offset_rtx,
8239 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8240
b93a436e
JL
8241 case BUILT_IN_CLASSIFY_TYPE:
8242 if (arglist != 0)
8243 {
8244 tree type = TREE_TYPE (TREE_VALUE (arglist));
8245 enum tree_code code = TREE_CODE (type);
8246 if (code == VOID_TYPE)
8247 return GEN_INT (void_type_class);
8248 if (code == INTEGER_TYPE)
8249 return GEN_INT (integer_type_class);
8250 if (code == CHAR_TYPE)
8251 return GEN_INT (char_type_class);
8252 if (code == ENUMERAL_TYPE)
8253 return GEN_INT (enumeral_type_class);
8254 if (code == BOOLEAN_TYPE)
8255 return GEN_INT (boolean_type_class);
8256 if (code == POINTER_TYPE)
8257 return GEN_INT (pointer_type_class);
8258 if (code == REFERENCE_TYPE)
8259 return GEN_INT (reference_type_class);
8260 if (code == OFFSET_TYPE)
8261 return GEN_INT (offset_type_class);
8262 if (code == REAL_TYPE)
8263 return GEN_INT (real_type_class);
8264 if (code == COMPLEX_TYPE)
8265 return GEN_INT (complex_type_class);
8266 if (code == FUNCTION_TYPE)
8267 return GEN_INT (function_type_class);
8268 if (code == METHOD_TYPE)
8269 return GEN_INT (method_type_class);
8270 if (code == RECORD_TYPE)
8271 return GEN_INT (record_type_class);
8272 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8273 return GEN_INT (union_type_class);
8274 if (code == ARRAY_TYPE)
8275 {
8276 if (TYPE_STRING_FLAG (type))
8277 return GEN_INT (string_type_class);
8278 else
8279 return GEN_INT (array_type_class);
8280 }
8281 if (code == SET_TYPE)
8282 return GEN_INT (set_type_class);
8283 if (code == FILE_TYPE)
8284 return GEN_INT (file_type_class);
8285 if (code == LANG_TYPE)
8286 return GEN_INT (lang_type_class);
8287 }
8288 return GEN_INT (no_type_class);
ca695ac9 8289
b93a436e
JL
8290 case BUILT_IN_CONSTANT_P:
8291 if (arglist == 0)
8292 return const0_rtx;
8293 else
8294 {
8295 tree arg = TREE_VALUE (arglist);
ca695ac9 8296
b93a436e
JL
8297 STRIP_NOPS (arg);
8298 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8299 || (TREE_CODE (arg) == ADDR_EXPR
8300 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8301 ? const1_rtx : const0_rtx);
8302 }
ca695ac9 8303
b93a436e
JL
8304 case BUILT_IN_FRAME_ADDRESS:
8305 /* The argument must be a nonnegative integer constant.
8306 It counts the number of frames to scan up the stack.
8307 The value is the address of that frame. */
8308 case BUILT_IN_RETURN_ADDRESS:
8309 /* The argument must be a nonnegative integer constant.
8310 It counts the number of frames to scan up the stack.
8311 The value is the return address saved in that frame. */
8312 if (arglist == 0)
8313 /* Warning about missing arg was already issued. */
8314 return const0_rtx;
8315 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8316 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8317 {
8318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8319 error ("invalid arg to `__builtin_frame_address'");
8320 else
8321 error ("invalid arg to `__builtin_return_address'");
8322 return const0_rtx;
8323 }
8324 else
8325 {
8326 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8327 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8328 hard_frame_pointer_rtx);
ee33823f 8329
b93a436e
JL
8330 /* Some ports cannot access arbitrary stack frames. */
8331 if (tem == NULL)
8332 {
8333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8334 warning ("unsupported arg to `__builtin_frame_address'");
8335 else
8336 warning ("unsupported arg to `__builtin_return_address'");
8337 return const0_rtx;
8338 }
ee33823f 8339
b93a436e
JL
8340 /* For __builtin_frame_address, return what we've got. */
8341 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8342 return tem;
ee33823f 8343
b93a436e
JL
8344 if (GET_CODE (tem) != REG)
8345 tem = copy_to_reg (tem);
8346 return tem;
8347 }
ee33823f 8348
b93a436e
JL
8349 /* Returns the address of the area where the structure is returned.
8350 0 otherwise. */
8351 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8352 if (arglist != 0
8353 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8354 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8355 return const0_rtx;
8356 else
8357 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 8358
b93a436e
JL
8359 case BUILT_IN_ALLOCA:
8360 if (arglist == 0
8361 /* Arg could be non-integer if user redeclared this fcn wrong. */
8362 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8363 break;
bbf6f052 8364
b93a436e
JL
8365 /* Compute the argument. */
8366 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 8367
b93a436e
JL
8368 /* Allocate the desired space. */
8369 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 8370
b93a436e
JL
8371 case BUILT_IN_FFS:
8372 /* If not optimizing, call the library function. */
8373 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8374 break;
ca695ac9 8375
b93a436e
JL
8376 if (arglist == 0
8377 /* Arg could be non-integer if user redeclared this fcn wrong. */
8378 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8379 break;
ca695ac9 8380
b93a436e
JL
8381 /* Compute the argument. */
8382 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8383 /* Compute ffs, into TARGET if possible.
8384 Set TARGET to wherever the result comes back. */
8385 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8386 ffs_optab, op0, target, 1);
8387 if (target == 0)
8388 abort ();
8389 return target;
bbf6f052 8390
b93a436e
JL
8391 case BUILT_IN_STRLEN:
8392 /* If not optimizing, call the library function. */
8393 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8394 break;
bbf6f052 8395
b93a436e
JL
8396 if (arglist == 0
8397 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8398 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8399 break;
8400 else
8401 {
8402 tree src = TREE_VALUE (arglist);
8403 tree len = c_strlen (src);
bbf6f052 8404
b93a436e
JL
8405 int align
8406 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 8407
b93a436e
JL
8408 rtx result, src_rtx, char_rtx;
8409 enum machine_mode insn_mode = value_mode, char_mode;
8410 enum insn_code icode;
46b68a37 8411
b93a436e
JL
8412 /* If the length is known, just return it. */
8413 if (len != 0)
8414 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 8415
b93a436e
JL
8416 /* If SRC is not a pointer type, don't do this operation inline. */
8417 if (align == 0)
8418 break;
bbf6f052 8419
b93a436e 8420 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 8421
b93a436e
JL
8422 while (insn_mode != VOIDmode)
8423 {
8424 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8425 if (icode != CODE_FOR_nothing)
8426 break;
ca695ac9 8427
b93a436e
JL
8428 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8429 }
8430 if (insn_mode == VOIDmode)
8431 break;
ca695ac9 8432
b93a436e
JL
8433 /* Make a place to write the result of the instruction. */
8434 result = target;
8435 if (! (result != 0
8436 && GET_CODE (result) == REG
8437 && GET_MODE (result) == insn_mode
8438 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8439 result = gen_reg_rtx (insn_mode);
ca695ac9 8440
b93a436e 8441 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 8442
b93a436e
JL
8443 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8444 result = gen_reg_rtx (insn_mode);
8445 src_rtx = memory_address (BLKmode,
8446 expand_expr (src, NULL_RTX, ptr_mode,
8447 EXPAND_NORMAL));
bbf6f052 8448
b93a436e
JL
8449 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8450 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 8451
b93a436e
JL
8452 /* Check the string is readable and has an end. */
8453 if (flag_check_memory_usage)
8454 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8455 src_rtx, ptr_mode,
8456 GEN_INT (MEMORY_USE_RO),
8457 TYPE_MODE (integer_type_node));
bbf6f052 8458
b93a436e
JL
8459 char_rtx = const0_rtx;
8460 char_mode = insn_operand_mode[(int)icode][2];
8461 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8462 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 8463
b93a436e
JL
8464 emit_insn (GEN_FCN (icode) (result,
8465 gen_rtx_MEM (BLKmode, src_rtx),
8466 char_rtx, GEN_INT (align)));
bbf6f052 8467
b93a436e
JL
8468 /* Return the value in the proper mode for this function. */
8469 if (GET_MODE (result) == value_mode)
8470 return result;
8471 else if (target != 0)
8472 {
8473 convert_move (target, result, 0);
8474 return target;
8475 }
8476 else
8477 return convert_to_mode (value_mode, result, 0);
8478 }
bbf6f052 8479
b93a436e
JL
8480 case BUILT_IN_STRCPY:
8481 /* If not optimizing, call the library function. */
8482 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8483 break;
bbf6f052 8484
b93a436e
JL
8485 if (arglist == 0
8486 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8487 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8488 || TREE_CHAIN (arglist) == 0
8489 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8490 break;
8491 else
8492 {
8493 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 8494
b93a436e
JL
8495 if (len == 0)
8496 break;
bbf6f052 8497
b93a436e 8498 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 8499
b93a436e
JL
8500 chainon (arglist, build_tree_list (NULL_TREE, len));
8501 }
6d100794 8502
b93a436e
JL
8503 /* Drops in. */
8504 case BUILT_IN_MEMCPY:
8505 /* If not optimizing, call the library function. */
8506 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8507 break;
e7c33f54 8508
b93a436e
JL
8509 if (arglist == 0
8510 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8511 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8512 || TREE_CHAIN (arglist) == 0
8513 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8514 != POINTER_TYPE)
8515 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8516 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8517 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8518 != INTEGER_TYPE))
8519 break;
8520 else
8521 {
8522 tree dest = TREE_VALUE (arglist);
8523 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8524 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8525 tree type;
e7c33f54 8526
b93a436e
JL
8527 int src_align
8528 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8529 int dest_align
8530 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8531 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
e7c33f54 8532
b93a436e
JL
8533 /* If either SRC or DEST is not a pointer type, don't do
8534 this operation in-line. */
8535 if (src_align == 0 || dest_align == 0)
8536 {
8537 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8538 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8539 break;
8540 }
e7c33f54 8541
b93a436e
JL
8542 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8543 dest_mem = gen_rtx_MEM (BLKmode,
8544 memory_address (BLKmode, dest_rtx));
8545 /* There could be a void* cast on top of the object. */
8546 while (TREE_CODE (dest) == NOP_EXPR)
8547 dest = TREE_OPERAND (dest, 0);
8548 type = TREE_TYPE (TREE_TYPE (dest));
8549 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8550 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8551 src_mem = gen_rtx_MEM (BLKmode,
8552 memory_address (BLKmode, src_rtx));
8553 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 8554
b93a436e
JL
8555 /* Just copy the rights of SRC to the rights of DEST. */
8556 if (flag_check_memory_usage)
8557 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8558 dest_rtx, ptr_mode,
8559 src_rtx, ptr_mode,
8560 len_rtx, TYPE_MODE (sizetype));
e7c33f54 8561
b93a436e
JL
8562 /* There could be a void* cast on top of the object. */
8563 while (TREE_CODE (src) == NOP_EXPR)
8564 src = TREE_OPERAND (src, 0);
8565 type = TREE_TYPE (TREE_TYPE (src));
8566 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
e7c33f54 8567
b93a436e
JL
8568 /* Copy word part most expediently. */
8569 dest_addr
8570 = emit_block_move (dest_mem, src_mem, len_rtx,
8571 MIN (src_align, dest_align));
e7c33f54 8572
b93a436e
JL
8573 if (dest_addr == 0)
8574 dest_addr = force_operand (dest_rtx, NULL_RTX);
e7c33f54 8575
b93a436e
JL
8576 return dest_addr;
8577 }
e7c33f54 8578
b93a436e
JL
8579 case BUILT_IN_MEMSET:
8580 /* If not optimizing, call the library function. */
8581 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8582 break;
e7c33f54 8583
b93a436e
JL
8584 if (arglist == 0
8585 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8586 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8587 || TREE_CHAIN (arglist) == 0
8588 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8589 != INTEGER_TYPE)
8590 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8591 || (INTEGER_TYPE
8592 != (TREE_CODE (TREE_TYPE
8593 (TREE_VALUE
8594 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8595 break;
8596 else
8597 {
8598 tree dest = TREE_VALUE (arglist);
8599 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8600 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8601 tree type;
e7c33f54 8602
b93a436e
JL
8603 int dest_align
8604 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8605 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
e7c33f54 8606
b93a436e
JL
8607 /* If DEST is not a pointer type, don't do this
8608 operation in-line. */
8609 if (dest_align == 0)
8610 break;
bbf6f052 8611
b93a436e
JL
8612 /* If VAL is not 0, don't do this operation in-line. */
8613 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8614 break;
bbf6f052 8615
b93a436e
JL
8616 /* If LEN does not expand to a constant, don't do this
8617 operation in-line. */
8618 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8619 if (GET_CODE (len_rtx) != CONST_INT)
8620 break;
bbf6f052 8621
b93a436e
JL
8622 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8623 dest_mem = gen_rtx_MEM (BLKmode,
8624 memory_address (BLKmode, dest_rtx));
8625
8626 /* Just check DST is writable and mark it as readable. */
8627 if (flag_check_memory_usage)
8628 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8629 dest_rtx, ptr_mode,
8630 len_rtx, TYPE_MODE (sizetype),
8631 GEN_INT (MEMORY_USE_WO),
8632 TYPE_MODE (integer_type_node));
bbf6f052 8633
b93a436e
JL
8634 /* There could be a void* cast on top of the object. */
8635 while (TREE_CODE (dest) == NOP_EXPR)
8636 dest = TREE_OPERAND (dest, 0);
8637 type = TREE_TYPE (TREE_TYPE (dest));
8638 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
bbf6f052 8639
b93a436e 8640 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 8641
b93a436e
JL
8642 if (dest_addr == 0)
8643 dest_addr = force_operand (dest_rtx, NULL_RTX);
bbf6f052 8644
b93a436e
JL
8645 return dest_addr;
8646 }
bbf6f052 8647
b93a436e
JL
8648/* These comparison functions need an instruction that returns an actual
8649 index. An ordinary compare that just sets the condition codes
8650 is not enough. */
8651#ifdef HAVE_cmpstrsi
8652 case BUILT_IN_STRCMP:
8653 /* If not optimizing, call the library function. */
8654 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8655 break;
bbf6f052 8656
b93a436e
JL
8657 /* If we need to check memory accesses, call the library function. */
8658 if (flag_check_memory_usage)
8659 break;
bbf6f052 8660
b93a436e
JL
8661 if (arglist == 0
8662 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8663 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8664 || TREE_CHAIN (arglist) == 0
8665 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8666 break;
8667 else if (!HAVE_cmpstrsi)
8668 break;
8669 {
8670 tree arg1 = TREE_VALUE (arglist);
8671 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 8672 tree len, len2;
a97f5a86 8673
b93a436e
JL
8674 len = c_strlen (arg1);
8675 if (len)
8676 len = size_binop (PLUS_EXPR, integer_one_node, len);
8677 len2 = c_strlen (arg2);
8678 if (len2)
8679 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 8680
b93a436e
JL
8681 /* If we don't have a constant length for the first, use the length
8682 of the second, if we know it. We don't require a constant for
8683 this case; some cost analysis could be done if both are available
8684 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 8685
b93a436e
JL
8686 If both strings have constant lengths, use the smaller. This
8687 could arise if optimization results in strcpy being called with
8688 two fixed strings, or if the code was machine-generated. We should
8689 add some code to the `memcmp' handler below to deal with such
8690 situations, someday. */
8691 if (!len || TREE_CODE (len) != INTEGER_CST)
8692 {
8693 if (len2)
8694 len = len2;
8695 else if (len == 0)
8696 break;
8697 }
8698 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8699 {
8700 if (tree_int_cst_lt (len2, len))
8701 len = len2;
8702 }
bbf6f052 8703
b93a436e
JL
8704 chainon (arglist, build_tree_list (NULL_TREE, len));
8705 }
bbf6f052 8706
b93a436e
JL
8707 /* Drops in. */
8708 case BUILT_IN_MEMCMP:
8709 /* If not optimizing, call the library function. */
8710 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8711 break;
bbf6f052 8712
b93a436e
JL
8713 /* If we need to check memory accesses, call the library function. */
8714 if (flag_check_memory_usage)
8715 break;
bbf6f052 8716
b93a436e
JL
8717 if (arglist == 0
8718 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8719 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8720 || TREE_CHAIN (arglist) == 0
8721 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8722 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8723 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8724 break;
8725 else if (!HAVE_cmpstrsi)
8726 break;
8727 {
8728 tree arg1 = TREE_VALUE (arglist);
8729 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8730 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8731 rtx result;
0842a179 8732
b93a436e
JL
8733 int arg1_align
8734 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8735 int arg2_align
8736 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8737 enum machine_mode insn_mode
8738 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 8739
b93a436e
JL
8740 /* If we don't have POINTER_TYPE, call the function. */
8741 if (arg1_align == 0 || arg2_align == 0)
8742 {
8743 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8744 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8745 break;
8746 }
bbf6f052 8747
b93a436e
JL
8748 /* Make a place to write the result of the instruction. */
8749 result = target;
8750 if (! (result != 0
8751 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8752 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8753 result = gen_reg_rtx (insn_mode);
bbf6f052 8754
b93a436e
JL
8755 emit_insn (gen_cmpstrsi (result,
8756 gen_rtx_MEM (BLKmode,
8757 expand_expr (arg1, NULL_RTX,
8758 ptr_mode,
8759 EXPAND_NORMAL)),
8760 gen_rtx_MEM (BLKmode,
8761 expand_expr (arg2, NULL_RTX,
8762 ptr_mode,
8763 EXPAND_NORMAL)),
8764 expand_expr (len, NULL_RTX, VOIDmode, 0),
8765 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 8766
b93a436e
JL
8767 /* Return the value in the proper mode for this function. */
8768 mode = TYPE_MODE (TREE_TYPE (exp));
8769 if (GET_MODE (result) == mode)
8770 return result;
8771 else if (target != 0)
8772 {
8773 convert_move (target, result, 0);
8774 return target;
8775 }
8776 else
8777 return convert_to_mode (mode, result, 0);
8778 }
8779#else
8780 case BUILT_IN_STRCMP:
8781 case BUILT_IN_MEMCMP:
8782 break;
8783#endif
bbf6f052 8784
b93a436e
JL
8785 case BUILT_IN_SETJMP:
8786 if (arglist == 0
8787 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8788 break;
6fd1c67b
RH
8789 else
8790 {
8791 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8792 VOIDmode, 0);
8793 rtx lab = gen_label_rtx ();
8794 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8795 emit_label (lab);
8796 return ret;
8797 }
bbf6f052 8798
6fd1c67b
RH
8799 /* __builtin_longjmp is passed a pointer to an array of five words.
8800 It's similar to the C library longjmp function but works with
8801 __builtin_setjmp above. */
b93a436e
JL
8802 case BUILT_IN_LONGJMP:
8803 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8804 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8805 break;
b93a436e 8806 else
b93a436e 8807 {
6fd1c67b
RH
8808 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8809 VOIDmode, 0);
8810 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8811 const0_rtx, VOIDmode, 0);
8812 expand_builtin_longjmp (buf_addr, value);
8813 return const0_rtx;
b93a436e 8814 }
bbf6f052 8815
b93a436e
JL
8816 /* Various hooks for the DWARF 2 __throw routine. */
8817 case BUILT_IN_UNWIND_INIT:
8818 expand_builtin_unwind_init ();
8819 return const0_rtx;
8820 case BUILT_IN_FP:
8821 return frame_pointer_rtx;
8822 case BUILT_IN_SP:
8823 return stack_pointer_rtx;
8824#ifdef DWARF2_UNWIND_INFO
8825 case BUILT_IN_DWARF_FP_REGNUM:
8826 return expand_builtin_dwarf_fp_regnum ();
8827 case BUILT_IN_DWARF_REG_SIZE:
8828 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 8829#endif
b93a436e
JL
8830 case BUILT_IN_FROB_RETURN_ADDR:
8831 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8832 case BUILT_IN_EXTRACT_RETURN_ADDR:
8833 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8834 case BUILT_IN_SET_RETURN_ADDR_REG:
8835 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8836 return const0_rtx;
8837 case BUILT_IN_EH_STUB:
8838 return expand_builtin_eh_stub ();
8839 case BUILT_IN_SET_EH_REGS:
8840 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8841 TREE_VALUE (TREE_CHAIN (arglist)));
8842 return const0_rtx;
ca695ac9 8843
b93a436e
JL
8844 default: /* just do library call, if unknown builtin */
8845 error ("built-in function `%s' not currently supported",
8846 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 8847 }
0006469d 8848
b93a436e
JL
8849 /* The switch statement above can drop through to cause the function
8850 to be called normally. */
0006469d 8851
b93a436e 8852 return expand_call (exp, target, ignore);
ca695ac9 8853}
b93a436e
JL
8854\f
8855/* Built-in functions to perform an untyped call and return. */
0006469d 8856
b93a436e
JL
8857/* For each register that may be used for calling a function, this
8858 gives a mode used to copy the register's value. VOIDmode indicates
8859 the register is not used for calling a function. If the machine
8860 has register windows, this gives only the outbound registers.
8861 INCOMING_REGNO gives the corresponding inbound register. */
8862static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8863
b93a436e
JL
8864/* For each register that may be used for returning values, this gives
8865 a mode used to copy the register's value. VOIDmode indicates the
8866 register is not used for returning values. If the machine has
8867 register windows, this gives only the outbound registers.
8868 INCOMING_REGNO gives the corresponding inbound register. */
8869static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8870
b93a436e
JL
8871/* For each register that may be used for calling a function, this
8872 gives the offset of that register into the block returned by
8873 __builtin_apply_args. 0 indicates that the register is not
8874 used for calling a function. */
8875static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8876
8877/* Return the offset of register REGNO into the block returned by
8878 __builtin_apply_args. This is not declared static, since it is
8879 needed in objc-act.c. */
0006469d 8880
b93a436e
JL
8881int
8882apply_args_register_offset (regno)
8883 int regno;
8884{
8885 apply_args_size ();
0006469d 8886
b93a436e
JL
8887 /* Arguments are always put in outgoing registers (in the argument
8888 block) if such make sense. */
8889#ifdef OUTGOING_REGNO
8890 regno = OUTGOING_REGNO(regno);
8891#endif
8892 return apply_args_reg_offset[regno];
8893}
904762c8 8894
b93a436e
JL
8895/* Return the size required for the block returned by __builtin_apply_args,
8896 and initialize apply_args_mode. */
8897
8898static int
8899apply_args_size ()
0006469d 8900{
b93a436e
JL
8901 static int size = -1;
8902 int align, regno;
2f6e6d22 8903 enum machine_mode mode;
0006469d 8904
b93a436e
JL
8905 /* The values computed by this function never change. */
8906 if (size < 0)
ca695ac9 8907 {
b93a436e
JL
8908 /* The first value is the incoming arg-pointer. */
8909 size = GET_MODE_SIZE (Pmode);
0006469d 8910
b93a436e
JL
8911 /* The second value is the structure value address unless this is
8912 passed as an "invisible" first argument. */
8913 if (struct_value_rtx)
8914 size += GET_MODE_SIZE (Pmode);
0006469d 8915
b93a436e
JL
8916 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8917 if (FUNCTION_ARG_REGNO_P (regno))
8918 {
8919 /* Search for the proper mode for copying this register's
8920 value. I'm not sure this is right, but it works so far. */
8921 enum machine_mode best_mode = VOIDmode;
0006469d 8922
b93a436e
JL
8923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8924 mode != VOIDmode;
8925 mode = GET_MODE_WIDER_MODE (mode))
8926 if (HARD_REGNO_MODE_OK (regno, mode)
8927 && HARD_REGNO_NREGS (regno, mode) == 1)
8928 best_mode = mode;
0006469d 8929
b93a436e
JL
8930 if (best_mode == VOIDmode)
8931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8932 mode != VOIDmode;
8933 mode = GET_MODE_WIDER_MODE (mode))
8934 if (HARD_REGNO_MODE_OK (regno, mode)
8935 && (mov_optab->handlers[(int) mode].insn_code
8936 != CODE_FOR_nothing))
8937 best_mode = mode;
0006469d 8938
b93a436e
JL
8939 mode = best_mode;
8940 if (mode == VOIDmode)
8941 abort ();
904762c8 8942
b93a436e
JL
8943 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8944 if (size % align != 0)
8945 size = CEIL (size, align) * align;
8946 apply_args_reg_offset[regno] = size;
8947 size += GET_MODE_SIZE (mode);
8948 apply_args_mode[regno] = mode;
8949 }
8950 else
8951 {
8952 apply_args_mode[regno] = VOIDmode;
8953 apply_args_reg_offset[regno] = 0;
8954 }
8955 }
8956 return size;
8957}
0006469d 8958
b93a436e
JL
8959/* Return the size required for the block returned by __builtin_apply,
8960 and initialize apply_result_mode. */
904762c8 8961
b93a436e
JL
8962static int
8963apply_result_size ()
8964{
8965 static int size = -1;
8966 int align, regno;
8967 enum machine_mode mode;
0006469d 8968
b93a436e
JL
8969 /* The values computed by this function never change. */
8970 if (size < 0)
8971 {
8972 size = 0;
0006469d 8973
b93a436e
JL
8974 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8975 if (FUNCTION_VALUE_REGNO_P (regno))
8976 {
8977 /* Search for the proper mode for copying this register's
8978 value. I'm not sure this is right, but it works so far. */
8979 enum machine_mode best_mode = VOIDmode;
0006469d 8980
b93a436e
JL
8981 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8982 mode != TImode;
8983 mode = GET_MODE_WIDER_MODE (mode))
8984 if (HARD_REGNO_MODE_OK (regno, mode))
8985 best_mode = mode;
0006469d 8986
b93a436e
JL
8987 if (best_mode == VOIDmode)
8988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8989 mode != VOIDmode;
8990 mode = GET_MODE_WIDER_MODE (mode))
8991 if (HARD_REGNO_MODE_OK (regno, mode)
8992 && (mov_optab->handlers[(int) mode].insn_code
8993 != CODE_FOR_nothing))
8994 best_mode = mode;
0006469d 8995
b93a436e
JL
8996 mode = best_mode;
8997 if (mode == VOIDmode)
8998 abort ();
8999
9000 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9001 if (size % align != 0)
9002 size = CEIL (size, align) * align;
9003 size += GET_MODE_SIZE (mode);
9004 apply_result_mode[regno] = mode;
9005 }
9006 else
9007 apply_result_mode[regno] = VOIDmode;
9008
9009 /* Allow targets that use untyped_call and untyped_return to override
9010 the size so that machine-specific information can be stored here. */
9011#ifdef APPLY_RESULT_SIZE
9012 size = APPLY_RESULT_SIZE;
9013#endif
9014 }
9015 return size;
9016}
0006469d 9017
b93a436e
JL
9018#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9019/* Create a vector describing the result block RESULT. If SAVEP is true,
9020 the result block is used to save the values; otherwise it is used to
9021 restore the values. */
9022
9023static rtx
9024result_vector (savep, result)
9025 int savep;
9026 rtx result;
9027{
9028 int regno, size, align, nelts;
9029 enum machine_mode mode;
9030 rtx reg, mem;
9031 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9032
9033 size = nelts = 0;
9034 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9035 if ((mode = apply_result_mode[regno]) != VOIDmode)
9036 {
9037 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9038 if (size % align != 0)
9039 size = CEIL (size, align) * align;
9040 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9041 mem = change_address (result, mode,
9042 plus_constant (XEXP (result, 0), size));
9043 savevec[nelts++] = (savep
9044 ? gen_rtx_SET (VOIDmode, mem, reg)
9045 : gen_rtx_SET (VOIDmode, reg, mem));
9046 size += GET_MODE_SIZE (mode);
ca695ac9 9047 }
b93a436e
JL
9048 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9049}
9050#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9051
b93a436e
JL
9052/* Save the state required to perform an untyped call with the same
9053 arguments as were passed to the current function. */
904762c8 9054
b93a436e
JL
9055static rtx
9056expand_builtin_apply_args ()
9057{
9058 rtx registers;
9059 int size, align, regno;
9060 enum machine_mode mode;
0006469d 9061
b93a436e
JL
9062 /* Create a block where the arg-pointer, structure value address,
9063 and argument registers can be saved. */
9064 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9065
b93a436e
JL
9066 /* Walk past the arg-pointer and structure value address. */
9067 size = GET_MODE_SIZE (Pmode);
9068 if (struct_value_rtx)
9069 size += GET_MODE_SIZE (Pmode);
0cb1d109 9070
b93a436e
JL
9071 /* Save each register used in calling a function to the block. */
9072 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9073 if ((mode = apply_args_mode[regno]) != VOIDmode)
9074 {
9075 rtx tem;
0cb1d109 9076
b93a436e
JL
9077 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9078 if (size % align != 0)
9079 size = CEIL (size, align) * align;
0006469d 9080
b93a436e 9081 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9082
b93a436e
JL
9083#ifdef STACK_REGS
9084 /* For reg-stack.c's stack register household.
9085 Compare with a similar piece of code in function.c. */
0006469d 9086
b93a436e
JL
9087 emit_insn (gen_rtx_USE (mode, tem));
9088#endif
0e8c9172 9089
b93a436e
JL
9090 emit_move_insn (change_address (registers, mode,
9091 plus_constant (XEXP (registers, 0),
9092 size)),
9093 tem);
9094 size += GET_MODE_SIZE (mode);
0e8c9172 9095 }
0006469d 9096
b93a436e
JL
9097 /* Save the arg pointer to the block. */
9098 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9099 copy_to_reg (virtual_incoming_args_rtx));
9100 size = GET_MODE_SIZE (Pmode);
0006469d 9101
b93a436e
JL
9102 /* Save the structure value address unless this is passed as an
9103 "invisible" first argument. */
9104 if (struct_value_incoming_rtx)
9105 {
9106 emit_move_insn (change_address (registers, Pmode,
9107 plus_constant (XEXP (registers, 0),
9108 size)),
9109 copy_to_reg (struct_value_incoming_rtx));
9110 size += GET_MODE_SIZE (Pmode);
9111 }
0006469d 9112
b93a436e
JL
9113 /* Return the address of the block. */
9114 return copy_addr_to_reg (XEXP (registers, 0));
9115}
0006469d 9116
b93a436e
JL
9117/* Perform an untyped call and save the state required to perform an
9118 untyped return of whatever value was returned by the given function. */
0006469d 9119
b93a436e
JL
9120static rtx
9121expand_builtin_apply (function, arguments, argsize)
9122 rtx function, arguments, argsize;
9123{
9124 int size, align, regno;
9125 enum machine_mode mode;
9126 rtx incoming_args, result, reg, dest, call_insn;
9127 rtx old_stack_level = 0;
9128 rtx call_fusage = 0;
0006469d 9129
b93a436e
JL
9130 /* Create a block where the return registers can be saved. */
9131 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9132
9133 /* ??? The argsize value should be adjusted here. */
9134
9135 /* Fetch the arg pointer from the ARGUMENTS block. */
9136 incoming_args = gen_reg_rtx (Pmode);
9137 emit_move_insn (incoming_args,
9138 gen_rtx_MEM (Pmode, arguments));
9139#ifndef STACK_GROWS_DOWNWARD
9140 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9141 incoming_args, 0, OPTAB_LIB_WIDEN);
9142#endif
9143
9144 /* Perform postincrements before actually calling the function. */
ca695ac9 9145 emit_queue ();
0006469d 9146
b93a436e
JL
9147 /* Push a new argument block and copy the arguments. */
9148 do_pending_stack_adjust ();
0006469d 9149
b93a436e
JL
9150 /* Save the stack with nonlocal if available */
9151#ifdef HAVE_save_stack_nonlocal
9152 if (HAVE_save_stack_nonlocal)
9153 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9154 else
9155#endif
9156 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9157
b93a436e
JL
9158 /* Push a block of memory onto the stack to store the memory arguments.
9159 Save the address in a register, and copy the memory arguments. ??? I
9160 haven't figured out how the calling convention macros effect this,
9161 but it's likely that the source and/or destination addresses in
9162 the block copy will need updating in machine specific ways. */
9163 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9164 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9165 gen_rtx_MEM (BLKmode, incoming_args),
9166 argsize,
9167 PARM_BOUNDARY / BITS_PER_UNIT);
9168
9169 /* Refer to the argument block. */
9170 apply_args_size ();
9171 arguments = gen_rtx_MEM (BLKmode, arguments);
9172
9173 /* Walk past the arg-pointer and structure value address. */
9174 size = GET_MODE_SIZE (Pmode);
9175 if (struct_value_rtx)
9176 size += GET_MODE_SIZE (Pmode);
9177
9178 /* Restore each of the registers previously saved. Make USE insns
9179 for each of these registers for use in making the call. */
9180 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9181 if ((mode = apply_args_mode[regno]) != VOIDmode)
9182 {
9183 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9184 if (size % align != 0)
9185 size = CEIL (size, align) * align;
9186 reg = gen_rtx_REG (mode, regno);
9187 emit_move_insn (reg,
9188 change_address (arguments, mode,
9189 plus_constant (XEXP (arguments, 0),
9190 size)));
9191
9192 use_reg (&call_fusage, reg);
9193 size += GET_MODE_SIZE (mode);
9194 }
9195
9196 /* Restore the structure value address unless this is passed as an
9197 "invisible" first argument. */
9198 size = GET_MODE_SIZE (Pmode);
9199 if (struct_value_rtx)
0006469d 9200 {
b93a436e
JL
9201 rtx value = gen_reg_rtx (Pmode);
9202 emit_move_insn (value,
9203 change_address (arguments, Pmode,
9204 plus_constant (XEXP (arguments, 0),
9205 size)));
9206 emit_move_insn (struct_value_rtx, value);
9207 if (GET_CODE (struct_value_rtx) == REG)
9208 use_reg (&call_fusage, struct_value_rtx);
9209 size += GET_MODE_SIZE (Pmode);
ca695ac9 9210 }
0006469d 9211
b93a436e
JL
9212 /* All arguments and registers used for the call are set up by now! */
9213 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9214
b93a436e
JL
9215 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9216 and we don't want to load it into a register as an optimization,
9217 because prepare_call_address already did it if it should be done. */
9218 if (GET_CODE (function) != SYMBOL_REF)
9219 function = memory_address (FUNCTION_MODE, function);
0006469d 9220
b93a436e
JL
9221 /* Generate the actual call instruction and save the return value. */
9222#ifdef HAVE_untyped_call
9223 if (HAVE_untyped_call)
9224 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9225 result, result_vector (1, result)));
9226 else
9227#endif
9228#ifdef HAVE_call_value
9229 if (HAVE_call_value)
ca695ac9 9230 {
b93a436e 9231 rtx valreg = 0;
0006469d 9232
b93a436e
JL
9233 /* Locate the unique return register. It is not possible to
9234 express a call that sets more than one return register using
9235 call_value; use untyped_call for that. In fact, untyped_call
9236 only needs to save the return registers in the given block. */
9237 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9238 if ((mode = apply_result_mode[regno]) != VOIDmode)
9239 {
9240 if (valreg)
9241 abort (); /* HAVE_untyped_call required. */
9242 valreg = gen_rtx_REG (mode, regno);
9243 }
0006469d 9244
b93a436e
JL
9245 emit_call_insn (gen_call_value (valreg,
9246 gen_rtx_MEM (FUNCTION_MODE, function),
9247 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9248
b93a436e
JL
9249 emit_move_insn (change_address (result, GET_MODE (valreg),
9250 XEXP (result, 0)),
9251 valreg);
ca695ac9 9252 }
b93a436e
JL
9253 else
9254#endif
9255 abort ();
0006469d 9256
b93a436e
JL
9257 /* Find the CALL insn we just emitted. */
9258 for (call_insn = get_last_insn ();
9259 call_insn && GET_CODE (call_insn) != CALL_INSN;
9260 call_insn = PREV_INSN (call_insn))
9261 ;
0006469d 9262
b93a436e
JL
9263 if (! call_insn)
9264 abort ();
0006469d 9265
b93a436e
JL
9266 /* Put the register usage information on the CALL. If there is already
9267 some usage information, put ours at the end. */
9268 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9269 {
b93a436e 9270 rtx link;
0006469d 9271
b93a436e
JL
9272 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9273 link = XEXP (link, 1))
9274 ;
9275
9276 XEXP (link, 1) = call_fusage;
ca695ac9 9277 }
b93a436e
JL
9278 else
9279 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9280
b93a436e
JL
9281 /* Restore the stack. */
9282#ifdef HAVE_save_stack_nonlocal
9283 if (HAVE_save_stack_nonlocal)
9284 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9285 else
9286#endif
9287 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9288
9289 /* Return the address of the result block. */
9290 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9291}
bbf6f052 9292
b93a436e 9293/* Perform an untyped return. */
ca695ac9
JB
9294
9295static void
b93a436e
JL
9296expand_builtin_return (result)
9297 rtx result;
bbf6f052 9298{
b93a436e
JL
9299 int size, align, regno;
9300 enum machine_mode mode;
9301 rtx reg;
9302 rtx call_fusage = 0;
bbf6f052 9303
b93a436e
JL
9304 apply_result_size ();
9305 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9306
b93a436e
JL
9307#ifdef HAVE_untyped_return
9308 if (HAVE_untyped_return)
ca695ac9 9309 {
b93a436e
JL
9310 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9311 emit_barrier ();
9312 return;
ca695ac9 9313 }
b93a436e 9314#endif
1499e0a8 9315
b93a436e
JL
9316 /* Restore the return value and note that each value is used. */
9317 size = 0;
9318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9319 if ((mode = apply_result_mode[regno]) != VOIDmode)
9320 {
9321 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9322 if (size % align != 0)
9323 size = CEIL (size, align) * align;
9324 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9325 emit_move_insn (reg,
9326 change_address (result, mode,
9327 plus_constant (XEXP (result, 0),
9328 size)));
9329
9330 push_to_sequence (call_fusage);
9331 emit_insn (gen_rtx_USE (VOIDmode, reg));
9332 call_fusage = get_insns ();
9333 end_sequence ();
9334 size += GET_MODE_SIZE (mode);
9335 }
9336
9337 /* Put the USE insns before the return. */
9338 emit_insns (call_fusage);
9339
9340 /* Return whatever values was restored by jumping directly to the end
9341 of the function. */
9342 expand_null_return ();
ca695ac9
JB
9343}
9344\f
b93a436e
JL
9345/* Expand code for a post- or pre- increment or decrement
9346 and return the RTX for the result.
9347 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9348
b93a436e
JL
9349static rtx
9350expand_increment (exp, post, ignore)
9351 register tree exp;
9352 int post, ignore;
ca695ac9 9353{
b93a436e
JL
9354 register rtx op0, op1;
9355 register rtx temp, value;
9356 register tree incremented = TREE_OPERAND (exp, 0);
9357 optab this_optab = add_optab;
9358 int icode;
9359 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9360 int op0_is_copy = 0;
9361 int single_insn = 0;
9362 /* 1 means we can't store into OP0 directly,
9363 because it is a subreg narrower than a word,
9364 and we don't dare clobber the rest of the word. */
9365 int bad_subreg = 0;
1499e0a8 9366
b93a436e
JL
9367 /* Stabilize any component ref that might need to be
9368 evaluated more than once below. */
9369 if (!post
9370 || TREE_CODE (incremented) == BIT_FIELD_REF
9371 || (TREE_CODE (incremented) == COMPONENT_REF
9372 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9373 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9374 incremented = stabilize_reference (incremented);
9375 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9376 ones into save exprs so that they don't accidentally get evaluated
9377 more than once by the code below. */
9378 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9379 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9380 incremented = save_expr (incremented);
e9a25f70 9381
b93a436e
JL
9382 /* Compute the operands as RTX.
9383 Note whether OP0 is the actual lvalue or a copy of it:
9384 I believe it is a copy iff it is a register or subreg
9385 and insns were generated in computing it. */
e9a25f70 9386
b93a436e
JL
9387 temp = get_last_insn ();
9388 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9389
b93a436e
JL
9390 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9391 in place but instead must do sign- or zero-extension during assignment,
9392 so we copy it into a new register and let the code below use it as
9393 a copy.
e9a25f70 9394
b93a436e
JL
9395 Note that we can safely modify this SUBREG since it is know not to be
9396 shared (it was made by the expand_expr call above). */
9397
9398 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9399 {
9400 if (post)
9401 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9402 else
9403 bad_subreg = 1;
9404 }
9405 else if (GET_CODE (op0) == SUBREG
9406 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9407 {
9408 /* We cannot increment this SUBREG in place. If we are
9409 post-incrementing, get a copy of the old value. Otherwise,
9410 just mark that we cannot increment in place. */
9411 if (post)
9412 op0 = copy_to_reg (op0);
9413 else
9414 bad_subreg = 1;
e9a25f70
JL
9415 }
9416
b93a436e
JL
9417 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9418 && temp != get_last_insn ());
9419 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9420 EXPAND_MEMORY_USE_BAD);
1499e0a8 9421
b93a436e
JL
9422 /* Decide whether incrementing or decrementing. */
9423 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9424 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9425 this_optab = sub_optab;
9426
9427 /* Convert decrement by a constant into a negative increment. */
9428 if (this_optab == sub_optab
9429 && GET_CODE (op1) == CONST_INT)
ca695ac9 9430 {
b93a436e
JL
9431 op1 = GEN_INT (- INTVAL (op1));
9432 this_optab = add_optab;
ca695ac9 9433 }
1499e0a8 9434
b93a436e
JL
9435 /* For a preincrement, see if we can do this with a single instruction. */
9436 if (!post)
9437 {
9438 icode = (int) this_optab->handlers[(int) mode].insn_code;
9439 if (icode != (int) CODE_FOR_nothing
9440 /* Make sure that OP0 is valid for operands 0 and 1
9441 of the insn we want to queue. */
9442 && (*insn_operand_predicate[icode][0]) (op0, mode)
9443 && (*insn_operand_predicate[icode][1]) (op0, mode)
9444 && (*insn_operand_predicate[icode][2]) (op1, mode))
9445 single_insn = 1;
9446 }
bbf6f052 9447
b93a436e
JL
9448 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9449 then we cannot just increment OP0. We must therefore contrive to
9450 increment the original value. Then, for postincrement, we can return
9451 OP0 since it is a copy of the old value. For preincrement, expand here
9452 unless we can do it with a single insn.
bbf6f052 9453
b93a436e
JL
9454 Likewise if storing directly into OP0 would clobber high bits
9455 we need to preserve (bad_subreg). */
9456 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9457 {
b93a436e
JL
9458 /* This is the easiest way to increment the value wherever it is.
9459 Problems with multiple evaluation of INCREMENTED are prevented
9460 because either (1) it is a component_ref or preincrement,
9461 in which case it was stabilized above, or (2) it is an array_ref
9462 with constant index in an array in a register, which is
9463 safe to reevaluate. */
9464 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9465 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9466 ? MINUS_EXPR : PLUS_EXPR),
9467 TREE_TYPE (exp),
9468 incremented,
9469 TREE_OPERAND (exp, 1));
a358cee0 9470
b93a436e
JL
9471 while (TREE_CODE (incremented) == NOP_EXPR
9472 || TREE_CODE (incremented) == CONVERT_EXPR)
9473 {
9474 newexp = convert (TREE_TYPE (incremented), newexp);
9475 incremented = TREE_OPERAND (incremented, 0);
9476 }
bbf6f052 9477
b93a436e
JL
9478 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9479 return post ? op0 : temp;
9480 }
bbf6f052 9481
b93a436e
JL
9482 if (post)
9483 {
9484 /* We have a true reference to the value in OP0.
9485 If there is an insn to add or subtract in this mode, queue it.
9486 Queueing the increment insn avoids the register shuffling
9487 that often results if we must increment now and first save
9488 the old value for subsequent use. */
bbf6f052 9489
b93a436e
JL
9490#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9491 op0 = stabilize (op0);
9492#endif
41dfd40c 9493
b93a436e
JL
9494 icode = (int) this_optab->handlers[(int) mode].insn_code;
9495 if (icode != (int) CODE_FOR_nothing
9496 /* Make sure that OP0 is valid for operands 0 and 1
9497 of the insn we want to queue. */
9498 && (*insn_operand_predicate[icode][0]) (op0, mode)
9499 && (*insn_operand_predicate[icode][1]) (op0, mode))
9500 {
9501 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9502 op1 = force_reg (mode, op1);
bbf6f052 9503
b93a436e
JL
9504 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9505 }
9506 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9507 {
9508 rtx addr = (general_operand (XEXP (op0, 0), mode)
9509 ? force_reg (Pmode, XEXP (op0, 0))
9510 : copy_to_reg (XEXP (op0, 0)));
9511 rtx temp, result;
ca695ac9 9512
b93a436e
JL
9513 op0 = change_address (op0, VOIDmode, addr);
9514 temp = force_reg (GET_MODE (op0), op0);
9515 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9516 op1 = force_reg (mode, op1);
ca695ac9 9517
b93a436e
JL
9518 /* The increment queue is LIFO, thus we have to `queue'
9519 the instructions in reverse order. */
9520 enqueue_insn (op0, gen_move_insn (op0, temp));
9521 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9522 return result;
bbf6f052
RK
9523 }
9524 }
ca695ac9 9525
b93a436e
JL
9526 /* Preincrement, or we can't increment with one simple insn. */
9527 if (post)
9528 /* Save a copy of the value before inc or dec, to return it later. */
9529 temp = value = copy_to_reg (op0);
9530 else
9531 /* Arrange to return the incremented value. */
9532 /* Copy the rtx because expand_binop will protect from the queue,
9533 and the results of that would be invalid for us to return
9534 if our caller does emit_queue before using our result. */
9535 temp = copy_rtx (value = op0);
bbf6f052 9536
b93a436e
JL
9537 /* Increment however we can. */
9538 op1 = expand_binop (mode, this_optab, value, op1,
9539 flag_check_memory_usage ? NULL_RTX : op0,
9540 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9541 /* Make sure the value is stored into OP0. */
9542 if (op1 != op0)
9543 emit_move_insn (op0, op1);
5718612f 9544
b93a436e
JL
9545 return temp;
9546}
9547\f
9548/* Expand all function calls contained within EXP, innermost ones first.
9549 But don't look within expressions that have sequence points.
9550 For each CALL_EXPR, record the rtx for its value
9551 in the CALL_EXPR_RTL field. */
5718612f 9552
b93a436e
JL
9553static void
9554preexpand_calls (exp)
9555 tree exp;
9556{
9557 register int nops, i;
9558 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9559
b93a436e
JL
9560 if (! do_preexpand_calls)
9561 return;
5718612f 9562
b93a436e 9563 /* Only expressions and references can contain calls. */
bbf6f052 9564
b93a436e
JL
9565 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9566 return;
bbf6f052 9567
b93a436e
JL
9568 switch (TREE_CODE (exp))
9569 {
9570 case CALL_EXPR:
9571 /* Do nothing if already expanded. */
9572 if (CALL_EXPR_RTL (exp) != 0
9573 /* Do nothing if the call returns a variable-sized object. */
9574 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9575 /* Do nothing to built-in functions. */
9576 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9577 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9578 == FUNCTION_DECL)
9579 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9580 return;
bbf6f052 9581
b93a436e
JL
9582 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9583 return;
bbf6f052 9584
b93a436e
JL
9585 case COMPOUND_EXPR:
9586 case COND_EXPR:
9587 case TRUTH_ANDIF_EXPR:
9588 case TRUTH_ORIF_EXPR:
9589 /* If we find one of these, then we can be sure
9590 the adjust will be done for it (since it makes jumps).
9591 Do it now, so that if this is inside an argument
9592 of a function, we don't get the stack adjustment
9593 after some other args have already been pushed. */
9594 do_pending_stack_adjust ();
9595 return;
bbf6f052 9596
b93a436e
JL
9597 case BLOCK:
9598 case RTL_EXPR:
9599 case WITH_CLEANUP_EXPR:
9600 case CLEANUP_POINT_EXPR:
9601 case TRY_CATCH_EXPR:
9602 return;
bbf6f052 9603
b93a436e
JL
9604 case SAVE_EXPR:
9605 if (SAVE_EXPR_RTL (exp) != 0)
9606 return;
9607
9608 default:
9609 break;
ca695ac9 9610 }
bbf6f052 9611
b93a436e
JL
9612 nops = tree_code_length[(int) TREE_CODE (exp)];
9613 for (i = 0; i < nops; i++)
9614 if (TREE_OPERAND (exp, i) != 0)
9615 {
9616 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9617 if (type == 'e' || type == '<' || type == '1' || type == '2'
9618 || type == 'r')
9619 preexpand_calls (TREE_OPERAND (exp, i));
9620 }
9621}
9622\f
9623/* At the start of a function, record that we have no previously-pushed
9624 arguments waiting to be popped. */
bbf6f052 9625
b93a436e
JL
9626void
9627init_pending_stack_adjust ()
9628{
9629 pending_stack_adjust = 0;
9630}
bbf6f052 9631
b93a436e 9632/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9633 so the adjustment won't get done.
9634
9635 Note, if the current function calls alloca, then it must have a
9636 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9637
b93a436e
JL
9638void
9639clear_pending_stack_adjust ()
9640{
9641#ifdef EXIT_IGNORE_STACK
9642 if (optimize > 0
060fbabf
JL
9643 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9644 && EXIT_IGNORE_STACK
b93a436e
JL
9645 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9646 && ! flag_inline_functions)
9647 pending_stack_adjust = 0;
9648#endif
9649}
bbf6f052 9650
b93a436e
JL
9651/* Pop any previously-pushed arguments that have not been popped yet. */
9652
9653void
9654do_pending_stack_adjust ()
9655{
9656 if (inhibit_defer_pop == 0)
ca695ac9 9657 {
b93a436e
JL
9658 if (pending_stack_adjust != 0)
9659 adjust_stack (GEN_INT (pending_stack_adjust));
9660 pending_stack_adjust = 0;
bbf6f052 9661 }
bbf6f052
RK
9662}
9663\f
b93a436e 9664/* Expand conditional expressions. */
bbf6f052 9665
b93a436e
JL
9666/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9667 LABEL is an rtx of code CODE_LABEL, in this function and all the
9668 functions here. */
bbf6f052 9669
b93a436e
JL
9670void
9671jumpifnot (exp, label)
ca695ac9 9672 tree exp;
b93a436e 9673 rtx label;
bbf6f052 9674{
b93a436e
JL
9675 do_jump (exp, label, NULL_RTX);
9676}
bbf6f052 9677
b93a436e 9678/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9679
b93a436e
JL
9680void
9681jumpif (exp, label)
9682 tree exp;
9683 rtx label;
9684{
9685 do_jump (exp, NULL_RTX, label);
9686}
ca695ac9 9687
b93a436e
JL
9688/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9689 the result is zero, or IF_TRUE_LABEL if the result is one.
9690 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9691 meaning fall through in that case.
ca695ac9 9692
b93a436e
JL
9693 do_jump always does any pending stack adjust except when it does not
9694 actually perform a jump. An example where there is no jump
9695 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9696
b93a436e
JL
9697 This function is responsible for optimizing cases such as
9698 &&, || and comparison operators in EXP. */
5718612f 9699
b93a436e
JL
9700void
9701do_jump (exp, if_false_label, if_true_label)
9702 tree exp;
9703 rtx if_false_label, if_true_label;
9704{
9705 register enum tree_code code = TREE_CODE (exp);
9706 /* Some cases need to create a label to jump to
9707 in order to properly fall through.
9708 These cases set DROP_THROUGH_LABEL nonzero. */
9709 rtx drop_through_label = 0;
9710 rtx temp;
9711 rtx comparison = 0;
9712 int i;
9713 tree type;
9714 enum machine_mode mode;
ca695ac9 9715
b93a436e 9716 emit_queue ();
ca695ac9 9717
b93a436e 9718 switch (code)
ca695ac9 9719 {
b93a436e 9720 case ERROR_MARK:
ca695ac9 9721 break;
bbf6f052 9722
b93a436e
JL
9723 case INTEGER_CST:
9724 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9725 if (temp)
9726 emit_jump (temp);
9727 break;
bbf6f052 9728
b93a436e
JL
9729#if 0
9730 /* This is not true with #pragma weak */
9731 case ADDR_EXPR:
9732 /* The address of something can never be zero. */
9733 if (if_true_label)
9734 emit_jump (if_true_label);
9735 break;
9736#endif
bbf6f052 9737
b93a436e
JL
9738 case NOP_EXPR:
9739 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9740 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9741 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9742 goto normal;
9743 case CONVERT_EXPR:
9744 /* If we are narrowing the operand, we have to do the compare in the
9745 narrower mode. */
9746 if ((TYPE_PRECISION (TREE_TYPE (exp))
9747 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9748 goto normal;
9749 case NON_LVALUE_EXPR:
9750 case REFERENCE_EXPR:
9751 case ABS_EXPR:
9752 case NEGATE_EXPR:
9753 case LROTATE_EXPR:
9754 case RROTATE_EXPR:
9755 /* These cannot change zero->non-zero or vice versa. */
9756 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9757 break;
bbf6f052 9758
b93a436e
JL
9759#if 0
9760 /* This is never less insns than evaluating the PLUS_EXPR followed by
9761 a test and can be longer if the test is eliminated. */
9762 case PLUS_EXPR:
9763 /* Reduce to minus. */
9764 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9765 TREE_OPERAND (exp, 0),
9766 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9767 TREE_OPERAND (exp, 1))));
9768 /* Process as MINUS. */
ca695ac9 9769#endif
bbf6f052 9770
b93a436e
JL
9771 case MINUS_EXPR:
9772 /* Non-zero iff operands of minus differ. */
9773 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9774 TREE_OPERAND (exp, 0),
9775 TREE_OPERAND (exp, 1)),
9776 NE, NE);
9777 break;
bbf6f052 9778
b93a436e
JL
9779 case BIT_AND_EXPR:
9780 /* If we are AND'ing with a small constant, do this comparison in the
9781 smallest type that fits. If the machine doesn't have comparisons
9782 that small, it will be converted back to the wider comparison.
9783 This helps if we are testing the sign bit of a narrower object.
9784 combine can't do this for us because it can't know whether a
9785 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9786
b93a436e
JL
9787 if (! SLOW_BYTE_ACCESS
9788 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9789 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9790 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9791 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9792 && (type = type_for_mode (mode, 1)) != 0
9793 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9794 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9795 != CODE_FOR_nothing))
9796 {
9797 do_jump (convert (type, exp), if_false_label, if_true_label);
9798 break;
9799 }
9800 goto normal;
bbf6f052 9801
b93a436e
JL
9802 case TRUTH_NOT_EXPR:
9803 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9804 break;
bbf6f052 9805
b93a436e
JL
9806 case TRUTH_ANDIF_EXPR:
9807 if (if_false_label == 0)
9808 if_false_label = drop_through_label = gen_label_rtx ();
9809 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9810 start_cleanup_deferral ();
9811 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9812 end_cleanup_deferral ();
9813 break;
bbf6f052 9814
b93a436e
JL
9815 case TRUTH_ORIF_EXPR:
9816 if (if_true_label == 0)
9817 if_true_label = drop_through_label = gen_label_rtx ();
9818 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9819 start_cleanup_deferral ();
9820 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9821 end_cleanup_deferral ();
9822 break;
bbf6f052 9823
b93a436e
JL
9824 case COMPOUND_EXPR:
9825 push_temp_slots ();
9826 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9827 preserve_temp_slots (NULL_RTX);
9828 free_temp_slots ();
9829 pop_temp_slots ();
9830 emit_queue ();
9831 do_pending_stack_adjust ();
9832 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9833 break;
bbf6f052 9834
b93a436e
JL
9835 case COMPONENT_REF:
9836 case BIT_FIELD_REF:
9837 case ARRAY_REF:
9838 {
9839 int bitsize, bitpos, unsignedp;
9840 enum machine_mode mode;
9841 tree type;
9842 tree offset;
9843 int volatilep = 0;
9844 int alignment;
bbf6f052 9845
b93a436e
JL
9846 /* Get description of this reference. We don't actually care
9847 about the underlying object here. */
9848 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9849 &mode, &unsignedp, &volatilep,
9850 &alignment);
bbf6f052 9851
b93a436e
JL
9852 type = type_for_size (bitsize, unsignedp);
9853 if (! SLOW_BYTE_ACCESS
9854 && type != 0 && bitsize >= 0
9855 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9856 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9857 != CODE_FOR_nothing))
9858 {
9859 do_jump (convert (type, exp), if_false_label, if_true_label);
9860 break;
9861 }
9862 goto normal;
9863 }
bbf6f052 9864
b93a436e
JL
9865 case COND_EXPR:
9866 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9867 if (integer_onep (TREE_OPERAND (exp, 1))
9868 && integer_zerop (TREE_OPERAND (exp, 2)))
9869 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9870
b93a436e
JL
9871 else if (integer_zerop (TREE_OPERAND (exp, 1))
9872 && integer_onep (TREE_OPERAND (exp, 2)))
9873 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9874
b93a436e
JL
9875 else
9876 {
9877 register rtx label1 = gen_label_rtx ();
9878 drop_through_label = gen_label_rtx ();
bbf6f052 9879
b93a436e 9880 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9881
b93a436e
JL
9882 start_cleanup_deferral ();
9883 /* Now the THEN-expression. */
9884 do_jump (TREE_OPERAND (exp, 1),
9885 if_false_label ? if_false_label : drop_through_label,
9886 if_true_label ? if_true_label : drop_through_label);
9887 /* In case the do_jump just above never jumps. */
9888 do_pending_stack_adjust ();
9889 emit_label (label1);
bbf6f052 9890
b93a436e
JL
9891 /* Now the ELSE-expression. */
9892 do_jump (TREE_OPERAND (exp, 2),
9893 if_false_label ? if_false_label : drop_through_label,
9894 if_true_label ? if_true_label : drop_through_label);
9895 end_cleanup_deferral ();
9896 }
9897 break;
bbf6f052 9898
b93a436e
JL
9899 case EQ_EXPR:
9900 {
9901 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9902
b93a436e
JL
9903 if (integer_zerop (TREE_OPERAND (exp, 1)))
9904 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9905 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9906 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9907 do_jump
9908 (fold
9909 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9910 fold (build (EQ_EXPR, TREE_TYPE (exp),
9911 fold (build1 (REALPART_EXPR,
9912 TREE_TYPE (inner_type),
9913 TREE_OPERAND (exp, 0))),
9914 fold (build1 (REALPART_EXPR,
9915 TREE_TYPE (inner_type),
9916 TREE_OPERAND (exp, 1))))),
9917 fold (build (EQ_EXPR, TREE_TYPE (exp),
9918 fold (build1 (IMAGPART_EXPR,
9919 TREE_TYPE (inner_type),
9920 TREE_OPERAND (exp, 0))),
9921 fold (build1 (IMAGPART_EXPR,
9922 TREE_TYPE (inner_type),
9923 TREE_OPERAND (exp, 1))))))),
9924 if_false_label, if_true_label);
9925 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9926 && !can_compare_p (TYPE_MODE (inner_type)))
9927 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9928 else
9929 comparison = compare (exp, EQ, EQ);
9930 break;
9931 }
bbf6f052 9932
b93a436e
JL
9933 case NE_EXPR:
9934 {
9935 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9936
b93a436e
JL
9937 if (integer_zerop (TREE_OPERAND (exp, 1)))
9938 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9939 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9940 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9941 do_jump
9942 (fold
9943 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9944 fold (build (NE_EXPR, TREE_TYPE (exp),
9945 fold (build1 (REALPART_EXPR,
9946 TREE_TYPE (inner_type),
9947 TREE_OPERAND (exp, 0))),
9948 fold (build1 (REALPART_EXPR,
9949 TREE_TYPE (inner_type),
9950 TREE_OPERAND (exp, 1))))),
9951 fold (build (NE_EXPR, TREE_TYPE (exp),
9952 fold (build1 (IMAGPART_EXPR,
9953 TREE_TYPE (inner_type),
9954 TREE_OPERAND (exp, 0))),
9955 fold (build1 (IMAGPART_EXPR,
9956 TREE_TYPE (inner_type),
9957 TREE_OPERAND (exp, 1))))))),
9958 if_false_label, if_true_label);
9959 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9960 && !can_compare_p (TYPE_MODE (inner_type)))
9961 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9962 else
9963 comparison = compare (exp, NE, NE);
9964 break;
9965 }
bbf6f052 9966
b93a436e
JL
9967 case LT_EXPR:
9968 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9969 == MODE_INT)
9970 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9971 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9972 else
9973 comparison = compare (exp, LT, LTU);
9974 break;
bbf6f052 9975
b93a436e
JL
9976 case LE_EXPR:
9977 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9978 == MODE_INT)
9979 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9980 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9981 else
9982 comparison = compare (exp, LE, LEU);
9983 break;
bbf6f052 9984
b93a436e
JL
9985 case GT_EXPR:
9986 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9987 == MODE_INT)
9988 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9989 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9990 else
9991 comparison = compare (exp, GT, GTU);
9992 break;
bbf6f052 9993
b93a436e
JL
9994 case GE_EXPR:
9995 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9996 == MODE_INT)
9997 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9998 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9999 else
10000 comparison = compare (exp, GE, GEU);
10001 break;
bbf6f052 10002
b93a436e
JL
10003 default:
10004 normal:
10005 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10006#if 0
10007 /* This is not needed any more and causes poor code since it causes
10008 comparisons and tests from non-SI objects to have different code
10009 sequences. */
10010 /* Copy to register to avoid generating bad insns by cse
10011 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10012 if (!cse_not_expected && GET_CODE (temp) == MEM)
10013 temp = copy_to_reg (temp);
ca695ac9 10014#endif
b93a436e
JL
10015 do_pending_stack_adjust ();
10016 if (GET_CODE (temp) == CONST_INT)
10017 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10018 else if (GET_CODE (temp) == LABEL_REF)
10019 comparison = const_true_rtx;
10020 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10021 && !can_compare_p (GET_MODE (temp)))
10022 /* Note swapping the labels gives us not-equal. */
10023 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10024 else if (GET_MODE (temp) != VOIDmode)
10025 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10026 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10027 GET_MODE (temp), NULL_RTX, 0);
10028 else
10029 abort ();
10030 }
bbf6f052 10031
b93a436e
JL
10032 /* Do any postincrements in the expression that was tested. */
10033 emit_queue ();
bbf6f052 10034
b93a436e
JL
10035 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10036 straight into a conditional jump instruction as the jump condition.
10037 Otherwise, all the work has been done already. */
bbf6f052 10038
b93a436e
JL
10039 if (comparison == const_true_rtx)
10040 {
10041 if (if_true_label)
10042 emit_jump (if_true_label);
10043 }
10044 else if (comparison == const0_rtx)
10045 {
10046 if (if_false_label)
10047 emit_jump (if_false_label);
10048 }
10049 else if (comparison)
10050 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10051
b93a436e
JL
10052 if (drop_through_label)
10053 {
10054 /* If do_jump produces code that might be jumped around,
10055 do any stack adjusts from that code, before the place
10056 where control merges in. */
10057 do_pending_stack_adjust ();
10058 emit_label (drop_through_label);
10059 }
bbf6f052 10060}
b93a436e
JL
10061\f
10062/* Given a comparison expression EXP for values too wide to be compared
10063 with one insn, test the comparison and jump to the appropriate label.
10064 The code of EXP is ignored; we always test GT if SWAP is 0,
10065 and LT if SWAP is 1. */
bbf6f052 10066
b93a436e
JL
10067static void
10068do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10069 tree exp;
10070 int swap;
10071 rtx if_false_label, if_true_label;
10072{
10073 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10074 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10075 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10076 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10077 rtx drop_through_label = 0;
10078 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10079 int i;
bbf6f052 10080
b93a436e
JL
10081 if (! if_true_label || ! if_false_label)
10082 drop_through_label = gen_label_rtx ();
10083 if (! if_true_label)
10084 if_true_label = drop_through_label;
10085 if (! if_false_label)
10086 if_false_label = drop_through_label;
bbf6f052 10087
b93a436e
JL
10088 /* Compare a word at a time, high order first. */
10089 for (i = 0; i < nwords; i++)
f81497d9 10090 {
b93a436e
JL
10091 rtx comp;
10092 rtx op0_word, op1_word;
10093
10094 if (WORDS_BIG_ENDIAN)
10095 {
10096 op0_word = operand_subword_force (op0, i, mode);
10097 op1_word = operand_subword_force (op1, i, mode);
10098 }
f81497d9 10099 else
b93a436e
JL
10100 {
10101 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10102 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10103 }
10104
10105 /* All but high-order word must be compared as unsigned. */
10106 comp = compare_from_rtx (op0_word, op1_word,
10107 (unsignedp || i > 0) ? GTU : GT,
10108 unsignedp, word_mode, NULL_RTX, 0);
10109 if (comp == const_true_rtx)
10110 emit_jump (if_true_label);
10111 else if (comp != const0_rtx)
10112 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10113
10114 /* Consider lower words only if these are equal. */
10115 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10116 NULL_RTX, 0);
10117 if (comp == const_true_rtx)
10118 emit_jump (if_false_label);
10119 else if (comp != const0_rtx)
10120 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10121 }
ca695ac9 10122
b93a436e
JL
10123 if (if_false_label)
10124 emit_jump (if_false_label);
10125 if (drop_through_label)
10126 emit_label (drop_through_label);
f81497d9
RS
10127}
10128
b93a436e
JL
10129/* Compare OP0 with OP1, word at a time, in mode MODE.
10130 UNSIGNEDP says to do unsigned comparison.
10131 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10132
b93a436e
JL
10133void
10134do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10135 enum machine_mode mode;
10136 int unsignedp;
10137 rtx op0, op1;
10138 rtx if_false_label, if_true_label;
f81497d9 10139{
b93a436e
JL
10140 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10141 rtx drop_through_label = 0;
10142 int i;
f81497d9 10143
b93a436e
JL
10144 if (! if_true_label || ! if_false_label)
10145 drop_through_label = gen_label_rtx ();
10146 if (! if_true_label)
10147 if_true_label = drop_through_label;
10148 if (! if_false_label)
10149 if_false_label = drop_through_label;
f81497d9 10150
b93a436e
JL
10151 /* Compare a word at a time, high order first. */
10152 for (i = 0; i < nwords; i++)
10153 {
10154 rtx comp;
10155 rtx op0_word, op1_word;
bbf6f052 10156
b93a436e
JL
10157 if (WORDS_BIG_ENDIAN)
10158 {
10159 op0_word = operand_subword_force (op0, i, mode);
10160 op1_word = operand_subword_force (op1, i, mode);
10161 }
10162 else
10163 {
10164 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10165 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10166 }
bbf6f052 10167
b93a436e
JL
10168 /* All but high-order word must be compared as unsigned. */
10169 comp = compare_from_rtx (op0_word, op1_word,
10170 (unsignedp || i > 0) ? GTU : GT,
10171 unsignedp, word_mode, NULL_RTX, 0);
10172 if (comp == const_true_rtx)
10173 emit_jump (if_true_label);
10174 else if (comp != const0_rtx)
10175 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10176
b93a436e
JL
10177 /* Consider lower words only if these are equal. */
10178 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10179 NULL_RTX, 0);
10180 if (comp == const_true_rtx)
10181 emit_jump (if_false_label);
10182 else if (comp != const0_rtx)
10183 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10184 }
bbf6f052 10185
b93a436e
JL
10186 if (if_false_label)
10187 emit_jump (if_false_label);
10188 if (drop_through_label)
10189 emit_label (drop_through_label);
bbf6f052
RK
10190}
10191
b93a436e
JL
10192/* Given an EQ_EXPR expression EXP for values too wide to be compared
10193 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10194
b93a436e
JL
10195static void
10196do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10197 tree exp;
10198 rtx if_false_label, if_true_label;
bbf6f052 10199{
b93a436e
JL
10200 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10201 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10202 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10203 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10204 int i;
10205 rtx drop_through_label = 0;
bbf6f052 10206
b93a436e
JL
10207 if (! if_false_label)
10208 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10209
b93a436e
JL
10210 for (i = 0; i < nwords; i++)
10211 {
10212 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10213 operand_subword_force (op1, i, mode),
10214 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10215 word_mode, NULL_RTX, 0);
10216 if (comp == const_true_rtx)
10217 emit_jump (if_false_label);
10218 else if (comp != const0_rtx)
10219 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10220 }
bbf6f052 10221
b93a436e
JL
10222 if (if_true_label)
10223 emit_jump (if_true_label);
10224 if (drop_through_label)
10225 emit_label (drop_through_label);
bbf6f052 10226}
b93a436e
JL
10227\f
10228/* Jump according to whether OP0 is 0.
10229 We assume that OP0 has an integer mode that is too wide
10230 for the available compare insns. */
bbf6f052 10231
b93a436e
JL
10232static void
10233do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10234 rtx op0;
10235 rtx if_false_label, if_true_label;
ca695ac9 10236{
b93a436e
JL
10237 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10238 rtx part;
10239 int i;
10240 rtx drop_through_label = 0;
bbf6f052 10241
b93a436e
JL
10242 /* The fastest way of doing this comparison on almost any machine is to
10243 "or" all the words and compare the result. If all have to be loaded
10244 from memory and this is a very wide item, it's possible this may
10245 be slower, but that's highly unlikely. */
bbf6f052 10246
b93a436e
JL
10247 part = gen_reg_rtx (word_mode);
10248 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10249 for (i = 1; i < nwords && part != 0; i++)
10250 part = expand_binop (word_mode, ior_optab, part,
10251 operand_subword_force (op0, i, GET_MODE (op0)),
10252 part, 1, OPTAB_WIDEN);
bbf6f052 10253
b93a436e
JL
10254 if (part != 0)
10255 {
10256 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10257 NULL_RTX, 0);
0f41302f 10258
b93a436e
JL
10259 if (comp == const_true_rtx)
10260 emit_jump (if_false_label);
10261 else if (comp == const0_rtx)
10262 emit_jump (if_true_label);
10263 else
10264 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10265
b93a436e
JL
10266 return;
10267 }
bbf6f052 10268
b93a436e
JL
10269 /* If we couldn't do the "or" simply, do this with a series of compares. */
10270 if (! if_false_label)
10271 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10272
b93a436e
JL
10273 for (i = 0; i < nwords; i++)
10274 {
10275 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10276 GET_MODE (op0)),
10277 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10278 if (comp == const_true_rtx)
10279 emit_jump (if_false_label);
10280 else if (comp != const0_rtx)
10281 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10282 }
bbf6f052 10283
b93a436e
JL
10284 if (if_true_label)
10285 emit_jump (if_true_label);
0f41302f 10286
b93a436e
JL
10287 if (drop_through_label)
10288 emit_label (drop_through_label);
bbf6f052 10289}
bbf6f052 10290
b93a436e
JL
10291/* Given a comparison expression in rtl form, output conditional branches to
10292 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10293
b93a436e
JL
10294static void
10295do_jump_for_compare (comparison, if_false_label, if_true_label)
10296 rtx comparison, if_false_label, if_true_label;
bbf6f052 10297{
b93a436e
JL
10298 if (if_true_label)
10299 {
10300 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10301 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10302 else
10303 abort ();
ca695ac9 10304
b93a436e
JL
10305 if (if_false_label)
10306 emit_jump (if_false_label);
10307 }
10308 else if (if_false_label)
10309 {
10310 rtx insn;
10311 rtx prev = get_last_insn ();
10312 rtx branch = 0;
0f41302f 10313
b93a436e
JL
10314 /* Output the branch with the opposite condition. Then try to invert
10315 what is generated. If more than one insn is a branch, or if the
10316 branch is not the last insn written, abort. If we can't invert
10317 the branch, emit make a true label, redirect this jump to that,
10318 emit a jump to the false label and define the true label. */
bbf6f052 10319
b93a436e
JL
10320 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10321 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10322 else
10323 abort ();
bbf6f052 10324
b93a436e
JL
10325 /* Here we get the first insn that was just emitted. It used to be the
10326 case that, on some machines, emitting the branch would discard
10327 the previous compare insn and emit a replacement. This isn't
10328 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 10329
b93a436e
JL
10330 if (prev == 0)
10331 insn = get_insns ();
10332 else if (INSN_DELETED_P (prev))
10333 abort ();
10334 else
10335 insn = NEXT_INSN (prev);
bbf6f052 10336
b93a436e
JL
10337 for (; insn; insn = NEXT_INSN (insn))
10338 if (GET_CODE (insn) == JUMP_INSN)
10339 {
10340 if (branch)
10341 abort ();
10342 branch = insn;
10343 }
a7c5971a 10344
b93a436e
JL
10345 if (branch != get_last_insn ())
10346 abort ();
bbf6f052 10347
b93a436e
JL
10348 JUMP_LABEL (branch) = if_false_label;
10349 if (! invert_jump (branch, if_false_label))
10350 {
10351 if_true_label = gen_label_rtx ();
10352 redirect_jump (branch, if_true_label);
10353 emit_jump (if_false_label);
10354 emit_label (if_true_label);
10355 }
10356 }
10357}
10358\f
10359/* Generate code for a comparison expression EXP
10360 (including code to compute the values to be compared)
10361 and set (CC0) according to the result.
10362 SIGNED_CODE should be the rtx operation for this comparison for
10363 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 10364
b93a436e
JL
10365 We force a stack adjustment unless there are currently
10366 things pushed on the stack that aren't yet used. */
ca695ac9 10367
b93a436e
JL
10368static rtx
10369compare (exp, signed_code, unsigned_code)
10370 register tree exp;
10371 enum rtx_code signed_code, unsigned_code;
10372{
10373 register rtx op0
10374 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10375 register rtx op1
10376 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10377 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10378 register enum machine_mode mode = TYPE_MODE (type);
10379 int unsignedp = TREE_UNSIGNED (type);
10380 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 10381
b93a436e
JL
10382#ifdef HAVE_canonicalize_funcptr_for_compare
10383 /* If function pointers need to be "canonicalized" before they can
10384 be reliably compared, then canonicalize them. */
10385 if (HAVE_canonicalize_funcptr_for_compare
10386 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10387 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10388 == FUNCTION_TYPE))
bbf6f052 10389 {
b93a436e 10390 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 10391
b93a436e
JL
10392 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10393 op0 = new_op0;
ca695ac9 10394 }
bbf6f052 10395
b93a436e
JL
10396 if (HAVE_canonicalize_funcptr_for_compare
10397 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10398 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10399 == FUNCTION_TYPE))
10400 {
10401 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 10402
b93a436e
JL
10403 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10404 op1 = new_op1;
10405 }
10406#endif
0f41302f 10407
b93a436e
JL
10408 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10409 ((mode == BLKmode)
10410 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10411 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 10412}
bbf6f052 10413
b93a436e
JL
10414/* Like compare but expects the values to compare as two rtx's.
10415 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10416
b93a436e
JL
10417 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10418 compared.
bbf6f052 10419
b93a436e
JL
10420 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10421 size of MODE should be used. */
ca695ac9 10422
b93a436e
JL
10423rtx
10424compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10425 register rtx op0, op1;
10426 enum rtx_code code;
10427 int unsignedp;
10428 enum machine_mode mode;
10429 rtx size;
10430 int align;
bbf6f052 10431{
b93a436e 10432 rtx tem;
bbf6f052 10433
b93a436e
JL
10434 /* If one operand is constant, make it the second one. Only do this
10435 if the other operand is not constant as well. */
e7c33f54 10436
b93a436e
JL
10437 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10438 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10439 {
b93a436e
JL
10440 tem = op0;
10441 op0 = op1;
10442 op1 = tem;
10443 code = swap_condition (code);
10444 }
bbf6f052 10445
b93a436e
JL
10446 if (flag_force_mem)
10447 {
10448 op0 = force_not_mem (op0);
10449 op1 = force_not_mem (op1);
10450 }
bbf6f052 10451
b93a436e 10452 do_pending_stack_adjust ();
ca695ac9 10453
b93a436e
JL
10454 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10455 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10456 return tem;
ca695ac9 10457
b93a436e
JL
10458#if 0
10459 /* There's no need to do this now that combine.c can eliminate lots of
10460 sign extensions. This can be less efficient in certain cases on other
10461 machines. */
ca695ac9 10462
b93a436e
JL
10463 /* If this is a signed equality comparison, we can do it as an
10464 unsigned comparison since zero-extension is cheaper than sign
10465 extension and comparisons with zero are done as unsigned. This is
10466 the case even on machines that can do fast sign extension, since
10467 zero-extension is easier to combine with other operations than
10468 sign-extension is. If we are comparing against a constant, we must
10469 convert it to what it would look like unsigned. */
10470 if ((code == EQ || code == NE) && ! unsignedp
10471 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10472 {
10473 if (GET_CODE (op1) == CONST_INT
10474 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10475 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10476 unsignedp = 1;
10477 }
10478#endif
ca695ac9 10479
b93a436e 10480 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 10481
b93a436e
JL
10482 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10483}
10484\f
10485/* Generate code to calculate EXP using a store-flag instruction
10486 and return an rtx for the result. EXP is either a comparison
10487 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10488
b93a436e 10489 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10490
b93a436e
JL
10491 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10492 cheap.
ca695ac9 10493
b93a436e
JL
10494 Return zero if there is no suitable set-flag instruction
10495 available on this machine.
ca695ac9 10496
b93a436e
JL
10497 Once expand_expr has been called on the arguments of the comparison,
10498 we are committed to doing the store flag, since it is not safe to
10499 re-evaluate the expression. We emit the store-flag insn by calling
10500 emit_store_flag, but only expand the arguments if we have a reason
10501 to believe that emit_store_flag will be successful. If we think that
10502 it will, but it isn't, we have to simulate the store-flag with a
10503 set/jump/set sequence. */
ca695ac9 10504
b93a436e
JL
10505static rtx
10506do_store_flag (exp, target, mode, only_cheap)
10507 tree exp;
10508 rtx target;
10509 enum machine_mode mode;
10510 int only_cheap;
10511{
10512 enum rtx_code code;
10513 tree arg0, arg1, type;
10514 tree tem;
10515 enum machine_mode operand_mode;
10516 int invert = 0;
10517 int unsignedp;
10518 rtx op0, op1;
10519 enum insn_code icode;
10520 rtx subtarget = target;
381127e8 10521 rtx result, label;
ca695ac9 10522
b93a436e
JL
10523 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10524 result at the end. We can't simply invert the test since it would
10525 have already been inverted if it were valid. This case occurs for
10526 some floating-point comparisons. */
ca695ac9 10527
b93a436e
JL
10528 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10529 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10530
b93a436e
JL
10531 arg0 = TREE_OPERAND (exp, 0);
10532 arg1 = TREE_OPERAND (exp, 1);
10533 type = TREE_TYPE (arg0);
10534 operand_mode = TYPE_MODE (type);
10535 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10536
b93a436e
JL
10537 /* We won't bother with BLKmode store-flag operations because it would mean
10538 passing a lot of information to emit_store_flag. */
10539 if (operand_mode == BLKmode)
10540 return 0;
ca695ac9 10541
b93a436e
JL
10542 /* We won't bother with store-flag operations involving function pointers
10543 when function pointers must be canonicalized before comparisons. */
10544#ifdef HAVE_canonicalize_funcptr_for_compare
10545 if (HAVE_canonicalize_funcptr_for_compare
10546 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10547 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10548 == FUNCTION_TYPE))
10549 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10550 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10551 == FUNCTION_TYPE))))
10552 return 0;
ca695ac9
JB
10553#endif
10554
b93a436e
JL
10555 STRIP_NOPS (arg0);
10556 STRIP_NOPS (arg1);
ca695ac9 10557
b93a436e
JL
10558 /* Get the rtx comparison code to use. We know that EXP is a comparison
10559 operation of some type. Some comparisons against 1 and -1 can be
10560 converted to comparisons with zero. Do so here so that the tests
10561 below will be aware that we have a comparison with zero. These
10562 tests will not catch constants in the first operand, but constants
10563 are rarely passed as the first operand. */
ca695ac9 10564
b93a436e
JL
10565 switch (TREE_CODE (exp))
10566 {
10567 case EQ_EXPR:
10568 code = EQ;
bbf6f052 10569 break;
b93a436e
JL
10570 case NE_EXPR:
10571 code = NE;
bbf6f052 10572 break;
b93a436e
JL
10573 case LT_EXPR:
10574 if (integer_onep (arg1))
10575 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10576 else
10577 code = unsignedp ? LTU : LT;
ca695ac9 10578 break;
b93a436e
JL
10579 case LE_EXPR:
10580 if (! unsignedp && integer_all_onesp (arg1))
10581 arg1 = integer_zero_node, code = LT;
10582 else
10583 code = unsignedp ? LEU : LE;
ca695ac9 10584 break;
b93a436e
JL
10585 case GT_EXPR:
10586 if (! unsignedp && integer_all_onesp (arg1))
10587 arg1 = integer_zero_node, code = GE;
10588 else
10589 code = unsignedp ? GTU : GT;
10590 break;
10591 case GE_EXPR:
10592 if (integer_onep (arg1))
10593 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10594 else
10595 code = unsignedp ? GEU : GE;
ca695ac9 10596 break;
ca695ac9 10597 default:
b93a436e 10598 abort ();
bbf6f052 10599 }
bbf6f052 10600
b93a436e
JL
10601 /* Put a constant second. */
10602 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10603 {
10604 tem = arg0; arg0 = arg1; arg1 = tem;
10605 code = swap_condition (code);
ca695ac9 10606 }
bbf6f052 10607
b93a436e
JL
10608 /* If this is an equality or inequality test of a single bit, we can
10609 do this by shifting the bit being tested to the low-order bit and
10610 masking the result with the constant 1. If the condition was EQ,
10611 we xor it with 1. This does not require an scc insn and is faster
10612 than an scc insn even if we have it. */
d39985fa 10613
b93a436e
JL
10614 if ((code == NE || code == EQ)
10615 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10616 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10617 {
10618 tree inner = TREE_OPERAND (arg0, 0);
10619 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10620 int ops_unsignedp;
bbf6f052 10621
b93a436e
JL
10622 /* If INNER is a right shift of a constant and it plus BITNUM does
10623 not overflow, adjust BITNUM and INNER. */
ca695ac9 10624
b93a436e
JL
10625 if (TREE_CODE (inner) == RSHIFT_EXPR
10626 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10627 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10628 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10629 < TYPE_PRECISION (type)))
ca695ac9 10630 {
b93a436e
JL
10631 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10632 inner = TREE_OPERAND (inner, 0);
ca695ac9 10633 }
ca695ac9 10634
b93a436e
JL
10635 /* If we are going to be able to omit the AND below, we must do our
10636 operations as unsigned. If we must use the AND, we have a choice.
10637 Normally unsigned is faster, but for some machines signed is. */
10638 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10639#ifdef LOAD_EXTEND_OP
10640 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10641#else
10642 : 1
10643#endif
10644 );
bbf6f052 10645
b93a436e
JL
10646 if (subtarget == 0 || GET_CODE (subtarget) != REG
10647 || GET_MODE (subtarget) != operand_mode
e5e809f4 10648 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10649 subtarget = 0;
bbf6f052 10650
b93a436e 10651 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10652
b93a436e
JL
10653 if (bitnum != 0)
10654 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10655 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10656
b93a436e
JL
10657 if (GET_MODE (op0) != mode)
10658 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10659
b93a436e
JL
10660 if ((code == EQ && ! invert) || (code == NE && invert))
10661 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10662 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10663
b93a436e
JL
10664 /* Put the AND last so it can combine with more things. */
10665 if (bitnum != TYPE_PRECISION (type) - 1)
10666 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10667
b93a436e
JL
10668 return op0;
10669 }
bbf6f052 10670
b93a436e
JL
10671 /* Now see if we are likely to be able to do this. Return if not. */
10672 if (! can_compare_p (operand_mode))
10673 return 0;
10674 icode = setcc_gen_code[(int) code];
10675 if (icode == CODE_FOR_nothing
10676 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 10677 {
b93a436e
JL
10678 /* We can only do this if it is one of the special cases that
10679 can be handled without an scc insn. */
10680 if ((code == LT && integer_zerop (arg1))
10681 || (! only_cheap && code == GE && integer_zerop (arg1)))
10682 ;
10683 else if (BRANCH_COST >= 0
10684 && ! only_cheap && (code == NE || code == EQ)
10685 && TREE_CODE (type) != REAL_TYPE
10686 && ((abs_optab->handlers[(int) operand_mode].insn_code
10687 != CODE_FOR_nothing)
10688 || (ffs_optab->handlers[(int) operand_mode].insn_code
10689 != CODE_FOR_nothing)))
10690 ;
10691 else
10692 return 0;
ca695ac9 10693 }
b93a436e
JL
10694
10695 preexpand_calls (exp);
10696 if (subtarget == 0 || GET_CODE (subtarget) != REG
10697 || GET_MODE (subtarget) != operand_mode
e5e809f4 10698 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10699 subtarget = 0;
10700
10701 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10702 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10703
10704 if (target == 0)
10705 target = gen_reg_rtx (mode);
10706
10707 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10708 because, if the emit_store_flag does anything it will succeed and
10709 OP0 and OP1 will not be used subsequently. */
ca695ac9 10710
b93a436e
JL
10711 result = emit_store_flag (target, code,
10712 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10713 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10714 operand_mode, unsignedp, 1);
ca695ac9 10715
b93a436e
JL
10716 if (result)
10717 {
10718 if (invert)
10719 result = expand_binop (mode, xor_optab, result, const1_rtx,
10720 result, 0, OPTAB_LIB_WIDEN);
10721 return result;
ca695ac9 10722 }
bbf6f052 10723
b93a436e
JL
10724 /* If this failed, we have to do this with set/compare/jump/set code. */
10725 if (GET_CODE (target) != REG
10726 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10727 target = gen_reg_rtx (GET_MODE (target));
10728
10729 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10730 result = compare_from_rtx (op0, op1, code, unsignedp,
10731 operand_mode, NULL_RTX, 0);
10732 if (GET_CODE (result) == CONST_INT)
10733 return (((result == const0_rtx && ! invert)
10734 || (result != const0_rtx && invert))
10735 ? const0_rtx : const1_rtx);
ca695ac9 10736
b93a436e
JL
10737 label = gen_label_rtx ();
10738 if (bcc_gen_fctn[(int) code] == 0)
10739 abort ();
0f41302f 10740
b93a436e
JL
10741 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10742 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10743 emit_label (label);
bbf6f052 10744
b93a436e 10745 return target;
ca695ac9 10746}
b93a436e
JL
10747\f
10748/* Generate a tablejump instruction (used for switch statements). */
10749
10750#ifdef HAVE_tablejump
e87b4f3f 10751
b93a436e
JL
10752/* INDEX is the value being switched on, with the lowest value
10753 in the table already subtracted.
10754 MODE is its expected mode (needed if INDEX is constant).
10755 RANGE is the length of the jump table.
10756 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10757
b93a436e
JL
10758 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10759 index value is out of range. */
0f41302f 10760
ca695ac9 10761void
b93a436e
JL
10762do_tablejump (index, mode, range, table_label, default_label)
10763 rtx index, range, table_label, default_label;
10764 enum machine_mode mode;
ca695ac9 10765{
b93a436e 10766 register rtx temp, vector;
88d3b7f0 10767
b93a436e
JL
10768 /* Do an unsigned comparison (in the proper mode) between the index
10769 expression and the value which represents the length of the range.
10770 Since we just finished subtracting the lower bound of the range
10771 from the index expression, this comparison allows us to simultaneously
10772 check that the original index expression value is both greater than
10773 or equal to the minimum value of the range and less than or equal to
10774 the maximum value of the range. */
709f5be1 10775
b93a436e
JL
10776 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10777 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 10778
b93a436e
JL
10779 /* If index is in range, it must fit in Pmode.
10780 Convert to Pmode so we can index with it. */
10781 if (mode != Pmode)
10782 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10783
b93a436e
JL
10784 /* Don't let a MEM slip thru, because then INDEX that comes
10785 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10786 and break_out_memory_refs will go to work on it and mess it up. */
10787#ifdef PIC_CASE_VECTOR_ADDRESS
10788 if (flag_pic && GET_CODE (index) != REG)
10789 index = copy_to_mode_reg (Pmode, index);
10790#endif
ca695ac9 10791
b93a436e
JL
10792 /* If flag_force_addr were to affect this address
10793 it could interfere with the tricky assumptions made
10794 about addresses that contain label-refs,
10795 which may be valid only very near the tablejump itself. */
10796 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10797 GET_MODE_SIZE, because this indicates how large insns are. The other
10798 uses should all be Pmode, because they are addresses. This code
10799 could fail if addresses and insns are not the same size. */
10800 index = gen_rtx_PLUS (Pmode,
10801 gen_rtx_MULT (Pmode, index,
10802 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10803 gen_rtx_LABEL_REF (Pmode, table_label));
10804#ifdef PIC_CASE_VECTOR_ADDRESS
10805 if (flag_pic)
10806 index = PIC_CASE_VECTOR_ADDRESS (index);
10807 else
bbf6f052 10808#endif
b93a436e
JL
10809 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10810 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10811 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10812 RTX_UNCHANGING_P (vector) = 1;
10813 convert_move (temp, vector, 0);
10814
10815 emit_jump_insn (gen_tablejump (temp, table_label));
10816
10817 /* If we are generating PIC code or if the table is PC-relative, the
10818 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10819 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10820 emit_barrier ();
bbf6f052 10821}
b93a436e
JL
10822
10823#endif /* HAVE_tablejump */