]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
Makefile.in (expr.o): Depend on $(TARGET_H).
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8e37cba8 3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
c988af2b 50#include "target.h"
bbf6f052 51
bbf6f052 52/* Decide whether a function's arguments should be processed
bbc8a071
RK
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
bbf6f052 57
bbf6f052 58#ifdef PUSH_ROUNDING
bbc8a071 59
2da4124d 60#ifndef PUSH_ARGS_REVERSED
3319a347 61#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 62#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 63#endif
2da4124d 64#endif
bbc8a071 65
bbf6f052
RK
66#endif
67
68#ifndef STACK_PUSH_CODE
69#ifdef STACK_GROWS_DOWNWARD
70#define STACK_PUSH_CODE PRE_DEC
71#else
72#define STACK_PUSH_CODE PRE_INC
73#endif
74#endif
75
18543a22
ILT
76/* Assume that case vectors are not pc-relative. */
77#ifndef CASE_VECTOR_PC_RELATIVE
78#define CASE_VECTOR_PC_RELATIVE 0
79#endif
80
4ca79136
RH
81/* Convert defined/undefined to boolean. */
82#ifdef TARGET_MEM_FUNCTIONS
83#undef TARGET_MEM_FUNCTIONS
84#define TARGET_MEM_FUNCTIONS 1
85#else
86#define TARGET_MEM_FUNCTIONS 0
87#endif
88
89
bbf6f052
RK
90/* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
96int cse_not_expected;
97
14a774a9 98/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
1cff8964 99tree placeholder_list = 0;
14a774a9 100
4969d05d
RK
101/* This structure is used by move_by_pieces to describe the move to
102 be performed. */
4969d05d
RK
103struct move_by_pieces
104{
105 rtx to;
106 rtx to_addr;
107 int autinc_to;
108 int explicit_inc_to;
109 rtx from;
110 rtx from_addr;
111 int autinc_from;
112 int explicit_inc_from;
3bdf5ad1
RK
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
4969d05d
RK
115 int reverse;
116};
117
57814e5e 118/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
119 be performed. */
120
57814e5e 121struct store_by_pieces
9de08200
RK
122{
123 rtx to;
124 rtx to_addr;
125 int autinc_to;
126 int explicit_inc_to;
3bdf5ad1
RK
127 unsigned HOST_WIDE_INT len;
128 HOST_WIDE_INT offset;
502b8322 129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 130 void *constfundata;
9de08200
RK
131 int reverse;
132};
133
502b8322
AJ
134static rtx enqueue_insn (rtx, rtx);
135static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 unsigned int);
137static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *);
139static bool block_move_libcall_safe_for_call_parm (void);
140static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142static tree emit_block_move_libcall_fn (int);
143static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148 struct store_by_pieces *);
149static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150static rtx clear_storage_via_libcall (rtx, rtx);
151static tree clear_storage_libcall_fn (int);
152static rtx compress_float_constant (rtx, rtx);
153static rtx get_subtarget (rtx);
154static int is_zeros_p (tree);
502b8322
AJ
155static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161static rtx var_rtx (tree);
162
163static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165
166static int is_aligning_offset (tree, tree);
167static rtx expand_increment (tree, int, int);
eb698c58
RS
168static void expand_operands (tree, tree, rtx, rtx*, rtx*,
169 enum expand_modifier);
502b8322 170static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 171#ifdef PUSH_ROUNDING
502b8322 172static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 173#endif
502b8322
AJ
174static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175static rtx const_vector_from_tree (tree);
bbf6f052 176
4fa52007
RK
177/* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181static char direct_load[NUM_MACHINE_MODES];
182static char direct_store[NUM_MACHINE_MODES];
183
51286de6
RH
184/* Record for each mode whether we can float-extend from memory. */
185
186static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187
7e24ffc9
HPN
188/* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
190
191#ifndef MOVE_RATIO
266007a7 192#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
193#define MOVE_RATIO 2
194#else
3a94c984 195/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 196#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
197#endif
198#endif
e87b4f3f 199
fbe1758d 200/* This macro is used to determine whether move_by_pieces should be called
3a94c984 201 to perform a structure copy. */
fbe1758d 202#ifndef MOVE_BY_PIECES_P
19caa751 203#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
205#endif
206
78762e3b
RS
207/* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
209
210#ifndef CLEAR_RATIO
211#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212#define CLEAR_RATIO 2
213#else
214/* If we are optimizing for space, cut down the default clear ratio. */
215#define CLEAR_RATIO (optimize_size ? 3 : 15)
216#endif
217#endif
218
219/* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221#ifndef CLEAR_BY_PIECES_P
222#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224#endif
225
4977bab6
ZW
226/* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229#ifndef STORE_BY_PIECES_P
230#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231#endif
232
266007a7 233/* This array records the insn_code of insns to perform block moves. */
e6677db3 234enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 235
9de08200
RK
236/* This array records the insn_code of insns to perform block clears. */
237enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238
118355a0
ZW
239/* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
242enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
243
72954a4f
JM
244/* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245struct file_stack *expr_wfl_stack;
246
cc2902df 247/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
248
249#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 250#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 251#endif
bbf6f052 252\f
4fa52007 253/* This is run once per compilation to set up which modes can be used
266007a7 254 directly in memory and to initialize the block move optab. */
4fa52007
RK
255
256void
502b8322 257init_expr_once (void)
4fa52007
RK
258{
259 rtx insn, pat;
260 enum machine_mode mode;
cff48d8f 261 int num_clobbers;
9ec36da5 262 rtx mem, mem1;
bf1660a6 263 rtx reg;
9ec36da5 264
e2549997
RS
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
9ec36da5
JL
268 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
269 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 270
bf1660a6
JL
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg = gen_rtx_REG (VOIDmode, -1);
274
1f8c3c5b
RH
275 insn = rtx_alloc (INSN);
276 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
277 PATTERN (insn) = pat;
4fa52007
RK
278
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
281 {
282 int regno;
4fa52007
RK
283
284 direct_load[(int) mode] = direct_store[(int) mode] = 0;
285 PUT_MODE (mem, mode);
e2549997 286 PUT_MODE (mem1, mode);
bf1660a6 287 PUT_MODE (reg, mode);
4fa52007 288
e6fe56a4
RK
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
291
7308a047
RS
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
296 {
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
e6fe56a4 299
bf1660a6 300 REGNO (reg) = regno;
e6fe56a4 301
7308a047
RS
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
e6fe56a4 306
e2549997
RS
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
311
7308a047
RS
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
e2549997
RS
316
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
7308a047 321 }
4fa52007
RK
322 }
323
51286de6
RH
324 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
325
326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
327 mode = GET_MODE_WIDER_MODE (mode))
328 {
329 enum machine_mode srcmode;
330 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 331 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
332 {
333 enum insn_code ic;
334
335 ic = can_extend_p (mode, srcmode, 0);
336 if (ic == CODE_FOR_nothing)
337 continue;
338
339 PUT_MODE (mem, srcmode);
0fb7aeda 340
51286de6
RH
341 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
342 float_extend_from_mem[mode][srcmode] = true;
343 }
344 }
4fa52007 345}
cff48d8f 346
bbf6f052
RK
347/* This is run at the start of compiling a function. */
348
349void
502b8322 350init_expr (void)
bbf6f052 351{
3a70d621 352 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052
RK
353}
354
49ad7cfa 355/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 356
bbf6f052 357void
502b8322 358finish_expr_for_function (void)
bbf6f052 359{
49ad7cfa
BS
360 if (pending_chain)
361 abort ();
bbf6f052
RK
362}
363\f
364/* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
366
bbf6f052
RK
367/* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
370
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
373
374static rtx
502b8322 375enqueue_insn (rtx var, rtx body)
bbf6f052 376{
c5c76735
JL
377 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
378 body, pending_chain);
bbf6f052
RK
379 return pending_chain;
380}
381
382/* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
388
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
392
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
396
397rtx
502b8322 398protect_from_queue (rtx x, int modify)
bbf6f052 399{
b3694847 400 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
401
402#if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
405 return x;
406#endif
407
408 if (code != QUEUED)
409 {
e9baa644
RK
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
414 shared. */
bbf6f052
RK
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 {
f1ec5147
RK
418 rtx y = XEXP (x, 0);
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 420
bbf6f052
RK
421 if (QUEUED_INSN (y))
422 {
f1ec5147
RK
423 rtx temp = gen_reg_rtx (GET_MODE (x));
424
e9baa644 425 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
426 QUEUED_INSN (y));
427 return temp;
428 }
f1ec5147 429
73b7f58c
BS
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
f1ec5147 432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 433 }
f1ec5147 434
bbf6f052
RK
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
437 if (code == MEM)
3f15938e
RS
438 {
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
441 {
442 x = copy_rtx (x);
443 XEXP (x, 0) = tem;
444 }
445 }
bbf6f052
RK
446 else if (code == PLUS || code == MULT)
447 {
3f15938e
RS
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 {
452 x = copy_rtx (x);
453 XEXP (x, 0) = new0;
454 XEXP (x, 1) = new1;
455 }
bbf6f052
RK
456 }
457 return x;
458 }
73b7f58c
BS
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
461 emit_queue. */
bbf6f052 462 if (QUEUED_INSN (x) == 0)
73b7f58c 463 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474}
475
476/* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
1f06ee8d 481int
502b8322 482queued_subexp_p (rtx x)
bbf6f052 483{
b3694847 484 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
485 switch (code)
486 {
487 case QUEUED:
488 return 1;
489 case MEM:
490 return queued_subexp_p (XEXP (x, 0));
491 case MULT:
492 case PLUS:
493 case MINUS:
e9a25f70
JL
494 return (queued_subexp_p (XEXP (x, 0))
495 || queued_subexp_p (XEXP (x, 1)));
496 default:
497 return 0;
bbf6f052 498 }
bbf6f052
RK
499}
500
501/* Perform all the pending incrementations. */
502
503void
502b8322 504emit_queue (void)
bbf6f052 505{
b3694847 506 rtx p;
381127e8 507 while ((p = pending_chain))
bbf6f052 508 {
41b083c4
R
509 rtx body = QUEUED_BODY (p);
510
2f937369
DM
511 switch (GET_CODE (body))
512 {
513 case INSN:
514 case JUMP_INSN:
515 case CALL_INSN:
516 case CODE_LABEL:
517 case BARRIER:
518 case NOTE:
519 QUEUED_INSN (p) = body;
520 emit_insn (body);
521 break;
522
523#ifdef ENABLE_CHECKING
524 case SEQUENCE:
525 abort ();
526 break;
527#endif
528
529 default:
530 QUEUED_INSN (p) = emit_insn (body);
531 break;
41b083c4 532 }
2f937369 533
bbf6f052
RK
534 pending_chain = QUEUED_NEXT (p);
535 }
536}
bbf6f052
RK
537\f
538/* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
542
543void
502b8322 544convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
545{
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
550 enum insn_code code;
551 rtx libcall;
552
553 /* rtx code for making an equivalent value. */
37d0b254
SE
554 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
555 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
556
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
559
560 if (to_real != from_real)
561 abort ();
562
1499e0a8
RK
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
566
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
575
bbf6f052
RK
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 {
579 emit_move_insn (to, from);
580 return;
581 }
582
0b4565c9
BS
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 {
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 abort ();
3a94c984 587
0b4565c9 588 if (VECTOR_MODE_P (to_mode))
bafe341a 589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 590 else
bafe341a 591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
592
593 emit_move_insn (to, from);
594 return;
595 }
596
06765df1
R
597 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 {
599 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
600 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
601 return;
602 }
603
bbf6f052
RK
604 if (to_real)
605 {
642dfa8b 606 rtx value, insns;
85363ca0 607 convert_optab tab;
81d79e2c 608
2b01c326 609 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
85363ca0
ZW
610 tab = sext_optab;
611 else if (GET_MODE_BITSIZE (from_mode) > GET_MODE_BITSIZE (to_mode))
612 tab = trunc_optab;
613 else
614 abort ();
2b01c326 615
85363ca0 616 /* Try converting directly if the insn is supported. */
2b01c326 617
85363ca0
ZW
618 code = tab->handlers[to_mode][from_mode].insn_code;
619 if (code != CODE_FOR_nothing)
b092b471 620 {
85363ca0
ZW
621 emit_unop_insn (code, to, from,
622 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
623 return;
624 }
b092b471 625
85363ca0
ZW
626 /* Otherwise use a libcall. */
627 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 628
85363ca0 629 if (!libcall)
b092b471 630 /* This conversion is not implemented yet. */
bbf6f052
RK
631 abort ();
632
642dfa8b 633 start_sequence ();
ebb1b59a 634 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 635 1, from, from_mode);
642dfa8b
BS
636 insns = get_insns ();
637 end_sequence ();
450b1728
EC
638 emit_libcall_block (insns, to, value,
639 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 from)
641 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
642 return;
643 }
644
85363ca0
ZW
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 {
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652
653 if (trunc_optab->handlers[to_mode][full_mode].insn_code
654 == CODE_FOR_nothing)
655 abort ();
656
657 if (full_mode != from_mode)
658 from = convert_to_mode (full_mode, from, unsignedp);
659 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
660 to, from, UNKNOWN);
661 return;
662 }
663 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 {
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667
668 if (sext_optab->handlers[full_mode][from_mode].insn_code
669 == CODE_FOR_nothing)
670 abort ();
671
672 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 to, from, UNKNOWN);
674 if (to_mode == full_mode)
675 return;
676
677 /* else proceed to integer conversions below */
678 from_mode = full_mode;
679 }
680
bbf6f052
RK
681 /* Now both modes are integers. */
682
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
685 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
686 {
687 rtx insns;
688 rtx lowpart;
689 rtx fill_value;
690 rtx lowfrom;
691 int i;
692 enum machine_mode lowpart_mode;
693 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694
695 /* Try converting directly if the insn is supported. */
696 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
697 != CODE_FOR_nothing)
698 {
cd1b4b44
RK
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize > 0 && GET_CODE (from) == SUBREG)
704 from = force_reg (from_mode, from);
bbf6f052
RK
705 emit_unop_insn (code, to, from, equiv_code);
706 return;
707 }
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
710 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
711 != CODE_FOR_nothing))
712 {
a81fee56 713 if (GET_CODE (to) == REG)
38a448ca 714 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
718 return;
719 }
720
721 /* No special multiword conversion insn; do it by hand. */
722 start_sequence ();
723
5c5033c3
RK
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
726
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
729
bbf6f052
RK
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
733 else
734 lowpart_mode = from_mode;
735
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
737
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
740
741 /* Compute the value to put in each remaining word. */
742 if (unsignedp)
743 fill_value = const0_rtx;
744 else
745 {
746#ifdef HAVE_slt
747 if (HAVE_slt
a995e389 748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
749 && STORE_FLAG_VALUE == -1)
750 {
906c4e36 751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 752 lowpart_mode, 0);
bbf6f052
RK
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
755 }
756 else
757#endif
758 {
759 fill_value
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 762 NULL_RTX, 0);
bbf6f052
RK
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
764 }
765 }
766
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
769 {
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
772
773 if (subword == 0)
774 abort ();
775
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
778 }
779
780 insns = get_insns ();
781 end_sequence ();
782
906c4e36 783 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
785 return;
786 }
787
d3c64ee3
RS
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 791 {
431a6eca
JW
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
bbf6f052
RK
799 convert_move (to, gen_lowpart (word_mode, from), 0);
800 return;
801 }
802
bbf6f052
RK
803 /* Now follow all the conversions between integers
804 no more than a word long. */
805
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 809 GET_MODE_BITSIZE (from_mode)))
bbf6f052 810 {
d3c64ee3
RS
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
34aa3599
RK
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
bbf6f052
RK
821 emit_move_insn (to, gen_lowpart (to_mode, from));
822 return;
823 }
824
d3c64ee3 825 /* Handle extension. */
bbf6f052
RK
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
827 {
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
831 {
9413de45
RK
832 if (flag_force_mem)
833 from = force_not_mem (from);
834
bbf6f052
RK
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
837 }
838 else
839 {
840 enum machine_mode intermediate;
2b28d92e
NC
841 rtx tmp;
842 tree shift_amount;
bbf6f052
RK
843
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 != CODE_FOR_nothing)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
854 {
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
857 return;
858 }
859
2b28d92e 860 /* No suitable intermediate mode.
3a94c984 861 Generate what we need with shifts. */
2b28d92e
NC
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 to, unsignedp);
3a94c984 867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
868 to, unsignedp);
869 if (tmp != to)
870 emit_move_insn (to, tmp);
871 return;
bbf6f052
RK
872 }
873 }
874
3a94c984 875 /* Support special truncate insns for certain modes. */
85363ca0 876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 877 {
85363ca0
ZW
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 to, from, UNKNOWN);
b9bcad65
RK
880 return;
881 }
882
bbf6f052
RK
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
85363ca0
ZW
885 and for which there was no special instruction.
886
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
891 {
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
894 return;
895 }
896
897 /* Mode combination is not recognized. */
898 abort ();
899}
900
901/* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
5d901c31
RS
906 or by copying to a new temporary with conversion.
907
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
910
911rtx
502b8322 912convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
913{
914 return convert_modes (mode, VOIDmode, x, unsignedp);
915}
916
917/* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
921
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
924
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
926
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
929
930rtx
502b8322 931convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 932{
b3694847 933 rtx temp;
5ffe63ed 934
1499e0a8
RK
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
937
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
bbf6f052 942
64791b18
RK
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
3a94c984 945
5ffe63ed 946 if (mode == oldmode)
bbf6f052
RK
947 return x;
948
949 /* There is one case that we must handle specially: If we are converting
906c4e36 950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
954
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
958 {
959 HOST_WIDE_INT val = INTVAL (x);
960
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
963 {
964 int width = GET_MODE_BITSIZE (oldmode);
965
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
968 }
969
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
971 }
bbf6f052
RK
972
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 977
ba2e110c
RK
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 980 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 981 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 982 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
2bf29316 986 || (GET_CODE (x) == REG
006c9f4a
SE
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
991 {
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
997 {
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1000
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 if (! unsignedp
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1007
2496c7bd 1008 return gen_int_mode (val, mode);
ba2e110c
RK
1009 }
1010
1011 return gen_lowpart (mode, x);
1012 }
bbf6f052 1013
ebe75517
JH
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1017 {
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 abort ();
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1021 }
1022
bbf6f052
RK
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1025 return temp;
1026}
1027\f
cf5124f6
RS
1028/* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1032
1033#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1034
8fd3cf4e
JJ
1035/* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1037 succeed. */
1038
1039int
502b8322
AJ
1040can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
1042{
1043 return MOVE_BY_PIECES_P (len, align);
1044}
1045
21d93687
RK
1046/* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
566aa174 1049
21d93687
RK
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
566aa174 1052
8fd3cf4e 1053 ALIGN is maximum stack alignment we can assume.
bbf6f052 1054
8fd3cf4e
JJ
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1057 stpcpy. */
1058
1059rtx
502b8322
AJ
1060move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
bbf6f052
RK
1062{
1063 struct move_by_pieces data;
566aa174 1064 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
bbf6f052 1068
f26aca6d
DD
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1070
bbf6f052 1071 data.offset = 0;
bbf6f052 1072 data.from_addr = from_addr;
566aa174
JH
1073 if (to)
1074 {
1075 to_addr = XEXP (to, 0);
1076 data.to = to;
1077 data.autinc_to
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 data.reverse
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1082 }
1083 else
1084 {
1085 to_addr = NULL_RTX;
1086 data.to = NULL_RTX;
1087 data.autinc_to = 1;
1088#ifdef STACK_GROWS_DOWNWARD
1089 data.reverse = 1;
1090#else
1091 data.reverse = 0;
1092#endif
1093 }
1094 data.to_addr = to_addr;
bbf6f052 1095 data.from = from;
bbf6f052
RK
1096 data.autinc_from
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1100
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
bbf6f052
RK
1103 if (data.reverse) data.offset = len;
1104 data.len = len;
1105
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1111 {
3a94c984 1112 /* Find the mode of the largest move... */
fbe1758d
AM
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1116 mode = tmode;
1117
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1119 {
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1123 }
fbe1758d 1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1125 {
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1129 }
bbf6f052
RK
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1133 {
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.autinc_to = 1;
1136 data.explicit_inc_to = -1;
1137 }
fbe1758d 1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1139 {
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.autinc_to = 1;
1142 data.explicit_inc_to = 1;
1143 }
bbf6f052
RK
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1146 }
1147
e1565e65 1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1151
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1154
1155 while (max_size > 1)
1156 {
e7c33f54
RK
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1160 mode = tmode;
1161
1162 if (mode == VOIDmode)
1163 break;
1164
1165 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1168
1169 max_size = GET_MODE_SIZE (mode);
1170 }
1171
1172 /* The code above should have handled everything. */
2a8e278c 1173 if (data.len > 0)
bbf6f052 1174 abort ();
8fd3cf4e
JJ
1175
1176 if (endp)
1177 {
1178 rtx to1;
1179
1180 if (data.reverse)
1181 abort ();
1182 if (data.autinc_to)
1183 {
1184 if (endp == 2)
1185 {
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 else
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 -1));
1191 }
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1193 data.offset);
1194 }
1195 else
1196 {
1197 if (endp == 2)
1198 --data.offset;
1199 to1 = adjust_address (data.to, QImode, data.offset);
1200 }
1201 return to1;
1202 }
1203 else
1204 return data.to;
bbf6f052
RK
1205}
1206
1207/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1208 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1209
3bdf5ad1 1210static unsigned HOST_WIDE_INT
502b8322 1211move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
bbf6f052 1212{
3bdf5ad1
RK
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1215
e1565e65 1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1218 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1219
1220 while (max_size > 1)
1221 {
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1224
e7c33f54
RK
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1228 mode = tmode;
1229
1230 if (mode == VOIDmode)
1231 break;
1232
1233 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1236
1237 max_size = GET_MODE_SIZE (mode);
1238 }
1239
13c6f0d5
NS
1240 if (l)
1241 abort ();
bbf6f052
RK
1242 return n_insns;
1243}
1244
1245/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1248
1249static void
502b8322
AJ
1250move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
bbf6f052 1252{
3bdf5ad1 1253 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1254 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1255
1256 while (data->len >= size)
1257 {
3bdf5ad1
RK
1258 if (data->reverse)
1259 data->offset -= size;
1260
566aa174 1261 if (data->to)
3bdf5ad1 1262 {
566aa174 1263 if (data->autinc_to)
630036c6
JJ
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 data->offset);
566aa174 1266 else
f4ef873c 1267 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1268 }
3bdf5ad1
RK
1269
1270 if (data->autinc_from)
630036c6
JJ
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 data->offset);
3bdf5ad1 1273 else
f4ef873c 1274 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1275
940da324 1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1282
566aa174
JH
1283 if (data->to)
1284 emit_insn ((*genfun) (to1, from1));
1285 else
21d93687
RK
1286 {
1287#ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1289#else
1290 abort ();
1291#endif
1292 }
3bdf5ad1 1293
940da324 1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1298
3bdf5ad1
RK
1299 if (! data->reverse)
1300 data->offset += size;
bbf6f052
RK
1301
1302 data->len -= size;
1303 }
1304}
1305\f
4ca79136
RH
1306/* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
bbf6f052 1309
4ca79136 1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1311 SIZE is an rtx that says how long they are.
19caa751 1312 ALIGN is the maximum alignment we can assume they have.
44bb111a 1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1314
e9a25f70
JL
1315 Return the address of the new block, if memcpy is called and returns it,
1316 0 otherwise. */
1317
1318rtx
502b8322 1319emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1320{
44bb111a 1321 bool may_use_call;
e9a25f70 1322 rtx retval = 0;
44bb111a
RH
1323 unsigned int align;
1324
1325 switch (method)
1326 {
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1329 break;
1330
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1333
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1336 NO_DEFER_POP;
1337 break;
1338
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1341 break;
1342
1343 default:
1344 abort ();
1345 }
1346
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1348
bbf6f052
RK
1349 if (GET_MODE (x) != BLKmode)
1350 abort ();
bbf6f052
RK
1351 if (GET_MODE (y) != BLKmode)
1352 abort ();
1353
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
5d901c31 1356 size = protect_from_queue (size, 0);
bbf6f052
RK
1357
1358 if (GET_CODE (x) != MEM)
1359 abort ();
1360 if (GET_CODE (y) != MEM)
1361 abort ();
1362 if (size == 0)
1363 abort ();
1364
cb38fd88
RH
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1368 {
6972c506
JJ
1369 if (INTVAL (size) == 0)
1370 return 0;
1371
cb38fd88
RH
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1376 }
1377
fbe1758d 1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1379 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1381 ;
44bb111a 1382 else if (may_use_call)
4ca79136 1383 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1384 else
1385 emit_block_move_via_loop (x, y, size, align);
1386
1387 if (method == BLOCK_OP_CALL_PARM)
1388 OK_DEFER_POP;
266007a7 1389
4ca79136
RH
1390 return retval;
1391}
266007a7 1392
502b8322 1393/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1396
1397static bool
502b8322 1398block_move_libcall_safe_for_call_parm (void)
44bb111a 1399{
a357a6d4 1400 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1401 if (PUSH_ARGS)
1402 return true;
44bb111a 1403
450b1728 1404 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1405 an outgoing argument. */
1406#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1407 {
1408 tree fn = emit_block_move_libcall_fn (false);
1409 (void) fn;
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 return false;
1412 }
44bb111a 1413#endif
44bb111a 1414
a357a6d4
GK
1415 /* If any argument goes in memory, then it might clobber an outgoing
1416 argument. */
1417 {
1418 CUMULATIVE_ARGS args_so_far;
1419 tree fn, arg;
450b1728 1420
a357a6d4
GK
1421 fn = emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
450b1728 1423
a357a6d4
GK
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1426 {
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
44bb111a 1430 return false;
a357a6d4
GK
1431#ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1433 NULL_TREE, 1))
1434 return false;
1435#endif
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1437 }
1438 }
1439 return true;
44bb111a
RH
1440}
1441
502b8322 1442/* A subroutine of emit_block_move. Expand a movstr pattern;
4ca79136 1443 return true if successful. */
3ef1eef4 1444
4ca79136 1445static bool
502b8322 1446emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1447{
4ca79136
RH
1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1449 enum machine_mode mode;
266007a7 1450
4ca79136
RH
1451 /* Since this is a move insn, we don't care about volatility. */
1452 volatile_ok = 1;
1453
ee960939
OH
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1457
4ca79136
RH
1458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1459 mode = GET_MODE_WIDER_MODE (mode))
1460 {
1461 enum insn_code code = movstr_optab[(int) mode];
1462 insn_operand_predicate_fn pred;
1463
1464 if (code != CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1471 <= (GET_MODE_MASK (mode) >> 1)))
1472 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1473 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1474 || (*pred) (x, BLKmode))
1475 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1476 || (*pred) (y, BLKmode))
1477 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1478 || (*pred) (opalign, VOIDmode)))
1479 {
1480 rtx op2;
1481 rtx last = get_last_insn ();
1482 rtx pat;
1483
1484 op2 = convert_to_mode (mode, size, 1);
1485 pred = insn_data[(int) code].operand[2].predicate;
1486 if (pred != 0 && ! (*pred) (op2, mode))
1487 op2 = copy_to_mode_reg (mode, op2);
1488
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1493
1494 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1495 if (pat)
1496 {
1497 emit_insn (pat);
1498 volatile_ok = 0;
1499 return true;
bbf6f052 1500 }
4ca79136
RH
1501 else
1502 delete_insns_since (last);
bbf6f052 1503 }
4ca79136 1504 }
bbf6f052 1505
4ca79136
RH
1506 volatile_ok = 0;
1507 return false;
1508}
3ef1eef4 1509
4ca79136
RH
1510/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1512
4ca79136 1513static rtx
502b8322 1514emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
4ca79136 1515{
ee960939 1516 rtx dst_addr, src_addr;
4ca79136
RH
1517 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1518 enum machine_mode size_mode;
1519 rtx retval;
4bc973ae 1520
4ca79136 1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1522
ee960939
OH
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
4bc973ae 1526
ee960939
OH
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
4bc973ae 1530
ee960939
OH
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1534 emit_queue.
4bc973ae 1535
ee960939
OH
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1541
1542 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1543 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1544
ee960939
OH
1545 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1546 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1547
1548 dst_tree = make_tree (ptr_type_node, dst_addr);
1549 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1550
1551 if (TARGET_MEM_FUNCTIONS)
1552 size_mode = TYPE_MODE (sizetype);
1553 else
1554 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 1555
4ca79136
RH
1556 size = convert_to_mode (size_mode, size, 1);
1557 size = copy_to_mode_reg (size_mode, size);
1558
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1564
1565 For convenience, we generate the call to bcopy this way as well. */
1566
4ca79136
RH
1567 if (TARGET_MEM_FUNCTIONS)
1568 size_tree = make_tree (sizetype, size);
1569 else
1570 size_tree = make_tree (unsigned_type_node, size);
1571
1572 fn = emit_block_move_libcall_fn (true);
1573 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1574 if (TARGET_MEM_FUNCTIONS)
1575 {
1576 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1577 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1578 }
1579 else
1580 {
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1582 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1583 }
1584
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1587 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1588 call_expr, arg_list, NULL_TREE);
4ca79136
RH
1589
1590 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1591
ee960939
OH
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1595 decisions. */
4ca79136 1596 if (RTX_UNCHANGING_P (dst))
ee960939
OH
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1599 gen_rtx_CLOBBER (VOIDmode, dst),
1600 NULL_RTX));
4ca79136 1601
ee960939 1602 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 1603}
52cf7115 1604
4ca79136
RH
1605/* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
52cf7115 1608
4ca79136
RH
1609static GTY(()) tree block_move_fn;
1610
9661b15f 1611void
502b8322 1612init_block_move_fn (const char *asmspec)
4ca79136 1613{
9661b15f 1614 if (!block_move_fn)
4ca79136 1615 {
8fd3cf4e 1616 tree args, fn;
9661b15f 1617
4ca79136 1618 if (TARGET_MEM_FUNCTIONS)
52cf7115 1619 {
4ca79136
RH
1620 fn = get_identifier ("memcpy");
1621 args = build_function_type_list (ptr_type_node, ptr_type_node,
1622 const_ptr_type_node, sizetype,
1623 NULL_TREE);
1624 }
1625 else
1626 {
1627 fn = get_identifier ("bcopy");
1628 args = build_function_type_list (void_type_node, const_ptr_type_node,
1629 ptr_type_node, unsigned_type_node,
1630 NULL_TREE);
52cf7115
JL
1631 }
1632
4ca79136
RH
1633 fn = build_decl (FUNCTION_DECL, fn, args);
1634 DECL_EXTERNAL (fn) = 1;
1635 TREE_PUBLIC (fn) = 1;
1636 DECL_ARTIFICIAL (fn) = 1;
1637 TREE_NOTHROW (fn) = 1;
66c60e67 1638
4ca79136 1639 block_move_fn = fn;
bbf6f052 1640 }
e9a25f70 1641
9661b15f
JJ
1642 if (asmspec)
1643 {
1644 SET_DECL_RTL (block_move_fn, NULL_RTX);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1646 }
1647}
1648
1649static tree
502b8322 1650emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1651{
1652 static bool emitted_extern;
1653
1654 if (!block_move_fn)
1655 init_block_move_fn (NULL);
1656
4ca79136
RH
1657 if (for_call && !emitted_extern)
1658 {
1659 emitted_extern = true;
9661b15f
JJ
1660 make_decl_rtl (block_move_fn, NULL);
1661 assemble_external (block_move_fn);
4ca79136
RH
1662 }
1663
9661b15f 1664 return block_move_fn;
bbf6f052 1665}
44bb111a
RH
1666
1667/* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669/* ??? It'd be nice to copy in hunks larger than QImode. */
1670
1671static void
502b8322
AJ
1672emit_block_move_via_loop (rtx x, rtx y, rtx size,
1673 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1674{
1675 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1676 enum machine_mode iter_mode;
1677
1678 iter_mode = GET_MODE (size);
1679 if (iter_mode == VOIDmode)
1680 iter_mode = word_mode;
1681
1682 top_label = gen_label_rtx ();
1683 cmp_label = gen_label_rtx ();
1684 iter = gen_reg_rtx (iter_mode);
1685
1686 emit_move_insn (iter, const0_rtx);
1687
1688 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1689 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1690 do_pending_stack_adjust ();
1691
2e040219 1692 emit_note (NOTE_INSN_LOOP_BEG);
44bb111a
RH
1693
1694 emit_jump (cmp_label);
1695 emit_label (top_label);
1696
1697 tmp = convert_modes (Pmode, iter_mode, iter, true);
1698 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1699 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1700 x = change_address (x, QImode, x_addr);
1701 y = change_address (y, QImode, y_addr);
1702
1703 emit_move_insn (x, y);
1704
1705 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1706 true, OPTAB_LIB_WIDEN);
1707 if (tmp != iter)
1708 emit_move_insn (iter, tmp);
1709
2e040219 1710 emit_note (NOTE_INSN_LOOP_CONT);
44bb111a
RH
1711 emit_label (cmp_label);
1712
1713 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1714 true, top_label);
1715
2e040219 1716 emit_note (NOTE_INSN_LOOP_END);
44bb111a 1717}
bbf6f052
RK
1718\f
1719/* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722void
502b8322 1723move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1724{
1725 int i;
381127e8 1726#ifdef HAVE_load_multiple
3a94c984 1727 rtx pat;
381127e8
RL
1728 rtx last;
1729#endif
bbf6f052 1730
72bb9717
RK
1731 if (nregs == 0)
1732 return;
1733
bbf6f052
RK
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1736
1737 /* See if the machine can do this with a load multiple insn. */
1738#ifdef HAVE_load_multiple
c3a02afe 1739 if (HAVE_load_multiple)
bbf6f052 1740 {
c3a02afe 1741 last = get_last_insn ();
38a448ca 1742 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1743 GEN_INT (nregs));
1744 if (pat)
1745 {
1746 emit_insn (pat);
1747 return;
1748 }
1749 else
1750 delete_insns_since (last);
bbf6f052 1751 }
bbf6f052
RK
1752#endif
1753
1754 for (i = 0; i < nregs; i++)
38a448ca 1755 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1756 operand_subword_force (x, i, mode));
1757}
1758
1759/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1760 The number of registers to be filled is NREGS. */
0040593d 1761
bbf6f052 1762void
502b8322 1763move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1764{
1765 int i;
bbf6f052 1766
2954d7db
RK
1767 if (nregs == 0)
1768 return;
1769
bbf6f052
RK
1770 /* See if the machine can do this with a store multiple insn. */
1771#ifdef HAVE_store_multiple
c3a02afe 1772 if (HAVE_store_multiple)
bbf6f052 1773 {
c6b97fac
AM
1774 rtx last = get_last_insn ();
1775 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1776 GEN_INT (nregs));
c3a02afe
RK
1777 if (pat)
1778 {
1779 emit_insn (pat);
1780 return;
1781 }
1782 else
1783 delete_insns_since (last);
bbf6f052 1784 }
bbf6f052
RK
1785#endif
1786
1787 for (i = 0; i < nregs; i++)
1788 {
1789 rtx tem = operand_subword (x, i, 1, BLKmode);
1790
1791 if (tem == 0)
1792 abort ();
1793
38a448ca 1794 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1795 }
1796}
1797
084a1106
JDA
1798/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1803
1804rtx
502b8322 1805gen_group_rtx (rtx orig)
084a1106
JDA
1806{
1807 int i, length;
1808 rtx *tmps;
1809
1810 if (GET_CODE (orig) != PARALLEL)
1811 abort ();
1812
1813 length = XVECLEN (orig, 0);
703ad42b 1814 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1815
1816 /* Skip a NULL entry in first slot. */
1817 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1818
1819 if (i)
1820 tmps[0] = 0;
1821
1822 for (; i < length; i++)
1823 {
1824 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1825 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826
1827 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1828 }
1829
1830 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1831}
1832
6e985040
AM
1833/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
450b1728 1836 if not known. */
fffa9c1d
JW
1837
1838void
6e985040 1839emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1840{
aac5cc16
RH
1841 rtx *tmps, src;
1842 int start, i;
fffa9c1d 1843
aac5cc16 1844 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1845 abort ();
1846
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
aac5cc16
RH
1849 if (XEXP (XVECEXP (dst, 0, 0), 0))
1850 start = 0;
fffa9c1d 1851 else
aac5cc16
RH
1852 start = 1;
1853
703ad42b 1854 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1855
aac5cc16
RH
1856 /* Process the pieces. */
1857 for (i = start; i < XVECLEN (dst, 0); i++)
1858 {
1859 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1860 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1861 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1862 int shift = 0;
1863
1864 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1865 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1866 {
6e985040
AM
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1869 if (
1870#ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1872 == (BYTES_BIG_ENDIAN ? upward : downward)
1873#else
1874 BYTES_BIG_ENDIAN
1875#endif
1876 )
1877 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16
RH
1878 bytelen = ssize - bytepos;
1879 if (bytelen <= 0)
729a2125 1880 abort ();
aac5cc16
RH
1881 }
1882
f3ce87a9
DE
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1886 src = orig_src;
1887 if (GET_CODE (orig_src) != MEM
1888 && (!CONSTANT_P (orig_src)
1889 || (GET_MODE (orig_src) != mode
1890 && GET_MODE (orig_src) != VOIDmode)))
1891 {
1892 if (GET_MODE (orig_src) == VOIDmode)
1893 src = gen_reg_rtx (mode);
1894 else
1895 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1896
f3ce87a9
DE
1897 emit_move_insn (src, orig_src);
1898 }
1899
aac5cc16
RH
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src) == MEM
6e985040
AM
1902 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1903 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1904 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1905 && bytelen == GET_MODE_SIZE (mode))
1906 {
1907 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1908 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1909 }
7c4a6db0
JW
1910 else if (GET_CODE (src) == CONCAT)
1911 {
015b1ad1
JDA
1912 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1913 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914
1915 if ((bytepos == 0 && bytelen == slen0)
1916 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1917 {
015b1ad1
JDA
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1921 to be extracted. */
1922 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
1923 if (! CONSTANT_P (tmps[i])
1924 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1925 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
1926 (bytepos % slen0) * BITS_PER_UNIT,
1927 1, NULL_RTX, mode, mode, ssize);
cbb92744 1928 }
58f69841
JH
1929 else if (bytepos == 0)
1930 {
015b1ad1 1931 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1932 emit_move_insn (mem, src);
04050c69 1933 tmps[i] = adjust_address (mem, mode, 0);
58f69841 1934 }
7c4a6db0
JW
1935 else
1936 abort ();
1937 }
9c0631a7
AH
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst))
1942 && GET_CODE (src) == REG)
1943 {
1944 int slen = GET_MODE_SIZE (GET_MODE (src));
1945 rtx mem;
1946
1947 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1948 emit_move_insn (mem, src);
1949 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 }
f3ce87a9 1951 else if (CONSTANT_P (src)
2ee5437b
RH
1952 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1953 tmps[i] = src;
fffa9c1d 1954 else
19caa751
RK
1955 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1956 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 1957 mode, mode, ssize);
fffa9c1d 1958
6e985040 1959 if (shift)
19caa751
RK
1960 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1961 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 1962 }
19caa751 1963
3a94c984 1964 emit_queue ();
aac5cc16
RH
1965
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1968 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1969}
1970
084a1106
JDA
1971/* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1973
1974void
502b8322 1975emit_group_move (rtx dst, rtx src)
084a1106
JDA
1976{
1977 int i;
1978
1979 if (GET_CODE (src) != PARALLEL
1980 || GET_CODE (dst) != PARALLEL
1981 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1982 abort ();
1983
1984 /* Skip first entry if NULL. */
1985 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1987 XEXP (XVECEXP (src, 0, i), 0));
1988}
1989
6e985040
AM
1990/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1993 known. */
fffa9c1d
JW
1994
1995void
6e985040 1996emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1997{
aac5cc16
RH
1998 rtx *tmps, dst;
1999 int start, i;
fffa9c1d 2000
aac5cc16 2001 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2002 abort ();
2003
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
aac5cc16
RH
2006 if (XEXP (XVECEXP (src, 0, 0), 0))
2007 start = 0;
fffa9c1d 2008 else
aac5cc16
RH
2009 start = 1;
2010
703ad42b 2011 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2012
aac5cc16
RH
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2015 {
aac5cc16
RH
2016 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2017 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2018 emit_move_insn (tmps[i], reg);
2019 }
3a94c984 2020 emit_queue ();
fffa9c1d 2021
aac5cc16
RH
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2024 dst = orig_dst;
10a9f2be
JW
2025 if (GET_CODE (dst) == PARALLEL)
2026 {
2027 rtx temp;
2028
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst, src))
2033 return;
2034
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2037 the temporary. */
2038
2039 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
2040 emit_group_store (temp, src, type, ssize);
2041 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
2042 return;
2043 }
75897075 2044 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2045 {
2046 dst = gen_reg_rtx (GET_MODE (orig_dst));
2047 /* Make life a bit easier for combine. */
8ae91fc0 2048 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2049 }
aac5cc16
RH
2050
2051 /* Process the pieces. */
2052 for (i = start; i < XVECLEN (src, 0); i++)
2053 {
770ae6cc 2054 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2055 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2056 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2057 rtx dest = dst;
aac5cc16
RH
2058
2059 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2060 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2061 {
6e985040
AM
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2064 if (
2065#ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2067 == (BYTES_BIG_ENDIAN ? upward : downward)
2068#else
2069 BYTES_BIG_ENDIAN
2070#endif
2071 )
aac5cc16
RH
2072 {
2073 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2074 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2075 tmps[i], 0, OPTAB_WIDEN);
2076 }
2077 bytelen = ssize - bytepos;
71bc0330 2078 }
fffa9c1d 2079
6ddae612
JJ
2080 if (GET_CODE (dst) == CONCAT)
2081 {
2082 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2083 dest = XEXP (dst, 0);
2084 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 {
2086 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2087 dest = XEXP (dst, 1);
2088 }
0d446150
JH
2089 else if (bytepos == 0 && XVECLEN (src, 0))
2090 {
2091 dest = assign_stack_temp (GET_MODE (dest),
2092 GET_MODE_SIZE (GET_MODE (dest)), 0);
2093 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2094 tmps[i]);
2095 dst = dest;
2096 break;
2097 }
6ddae612
JJ
2098 else
2099 abort ();
2100 }
2101
aac5cc16 2102 /* Optimize the access just a bit. */
6ddae612 2103 if (GET_CODE (dest) == MEM
6e985040
AM
2104 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2105 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 2106 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2107 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2108 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2109 else
6ddae612 2110 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2111 mode, tmps[i], ssize);
fffa9c1d 2112 }
729a2125 2113
3a94c984 2114 emit_queue ();
aac5cc16
RH
2115
2116 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2117 if (orig_dst != dst)
aac5cc16 2118 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2119}
2120
c36fce9a
GRK
2121/* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2124
c988af2b
RS
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
c36fce9a
GRK
2129
2130rtx
502b8322 2131copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 2132{
19caa751
RK
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2136 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2137
2138 if (tgtblk == 0)
2139 {
1da68f56
RK
2140 tgtblk = assign_temp (build_qualified_type (type,
2141 (TYPE_QUALS (type)
2142 | TYPE_QUAL_CONST)),
2143 0, 1, 1);
19caa751
RK
2144 preserve_temp_slots (tgtblk);
2145 }
3a94c984 2146
1ed1b4fb 2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2148 into a new pseudo which is a full word. */
0d7839da 2149
19caa751
RK
2150 if (GET_MODE (srcreg) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2152 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751 2153
c988af2b
RS
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2157
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes % UNITS_PER_WORD != 0
2164 && (targetm.calls.return_in_msb (type)
2165 ? !BYTES_BIG_ENDIAN
2166 : BYTES_BIG_ENDIAN))
2167 padding_correction
19caa751
RK
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2169
2170 /* Copy the structure BITSIZE bites at a time.
3a94c984 2171
19caa751
RK
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
c988af2b 2175 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2178 {
3a94c984 2179 /* We need a new source operand each time xbitpos is on a
c988af2b 2180 word boundary and when xbitpos == padding_correction
19caa751
RK
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2183 || xbitpos == padding_correction)
b47f8cfc
JH
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2185 GET_MODE (srcreg));
19caa751
RK
2186
2187 /* We need a new destination operand each time bitpos is on
2188 a word boundary. */
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2191
19caa751
RK
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
04050c69
RK
2198 BITS_PER_WORD),
2199 BITS_PER_WORD);
19caa751
RK
2200 }
2201
2202 return tgtblk;
c36fce9a
GRK
2203}
2204
94b25f81
RK
2205/* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2207
2208void
502b8322 2209use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2210{
0304dfbb
DE
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2213 abort ();
b3f8cf4a
RK
2214
2215 *call_fusage
38a448ca
RH
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2218}
2219
94b25f81
RK
2220/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2222
2223void
502b8322 2224use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2225{
0304dfbb 2226 int i;
bbf6f052 2227
0304dfbb
DE
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2230
2231 for (i = 0; i < nregs; i++)
e50126e8 2232 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2233}
fffa9c1d
JW
2234
2235/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2238
2239void
502b8322 2240use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2241{
2242 int i;
2243
6bd35f86
DE
2244 for (i = 0; i < XVECLEN (regs, 0); i++)
2245 {
2246 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2247
6bd35f86
DE
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
e9a25f70 2251 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2252 use_reg (call_fusage, reg);
2253 }
fffa9c1d 2254}
bbf6f052 2255\f
57814e5e 2256
cf5124f6
RS
2257/* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2262
57814e5e 2263int
502b8322
AJ
2264can_store_by_pieces (unsigned HOST_WIDE_INT len,
2265 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2266 void *constfundata, unsigned int align)
57814e5e 2267{
98166639 2268 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2269 HOST_WIDE_INT offset = 0;
2270 enum machine_mode mode, tmode;
2271 enum insn_code icode;
2272 int reverse;
2273 rtx cst;
2274
2c430630
RS
2275 if (len == 0)
2276 return 1;
2277
4977bab6 2278 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2279 return 0;
2280
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2282 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2283 align = MOVE_MAX * BITS_PER_UNIT;
2284
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2287
2288 for (reverse = 0;
2289 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2290 reverse++)
2291 {
2292 l = len;
2293 mode = VOIDmode;
cf5124f6 2294 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2295 while (max_size > 1)
2296 {
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2301
2302 if (mode == VOIDmode)
2303 break;
2304
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode))
2308 {
2309 unsigned int size = GET_MODE_SIZE (mode);
2310
2311 while (l >= size)
2312 {
2313 if (reverse)
2314 offset -= size;
2315
2316 cst = (*constfun) (constfundata, offset, mode);
2317 if (!LEGITIMATE_CONSTANT_P (cst))
2318 return 0;
2319
2320 if (!reverse)
2321 offset += size;
2322
2323 l -= size;
2324 }
2325 }
2326
2327 max_size = GET_MODE_SIZE (mode);
2328 }
2329
2330 /* The code above should have handled everything. */
2331 if (l != 0)
2332 abort ();
2333 }
2334
2335 return 1;
2336}
2337
2338/* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2344 stpcpy. */
57814e5e 2345
8fd3cf4e 2346rtx
502b8322
AJ
2347store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2348 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2349 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2350{
2351 struct store_by_pieces data;
2352
2c430630
RS
2353 if (len == 0)
2354 {
2355 if (endp == 2)
2356 abort ();
2357 return to;
2358 }
2359
4977bab6 2360 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2361 abort ();
2362 to = protect_from_queue (to, 1);
2363 data.constfun = constfun;
2364 data.constfundata = constfundata;
2365 data.len = len;
2366 data.to = to;
2367 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2368 if (endp)
2369 {
2370 rtx to1;
2371
2372 if (data.reverse)
2373 abort ();
2374 if (data.autinc_to)
2375 {
2376 if (endp == 2)
2377 {
2378 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2379 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2380 else
2381 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2382 -1));
2383 }
2384 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2385 data.offset);
2386 }
2387 else
2388 {
2389 if (endp == 2)
2390 --data.offset;
2391 to1 = adjust_address (data.to, QImode, data.offset);
2392 }
2393 return to1;
2394 }
2395 else
2396 return data.to;
57814e5e
JJ
2397}
2398
19caa751
RK
2399/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2402
2403static void
342e2b74 2404clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2405{
57814e5e
JJ
2406 struct store_by_pieces data;
2407
2c430630
RS
2408 if (len == 0)
2409 return;
2410
57814e5e 2411 data.constfun = clear_by_pieces_1;
df4ae160 2412 data.constfundata = NULL;
57814e5e
JJ
2413 data.len = len;
2414 data.to = to;
2415 store_by_pieces_1 (&data, align);
2416}
2417
2418/* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2420
2421static rtx
502b8322
AJ
2422clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2424 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2425{
2426 return const0_rtx;
2427}
2428
2429/* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2433
2434static void
502b8322
AJ
2435store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2436 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2437{
2438 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2439 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2440 enum machine_mode mode = VOIDmode, tmode;
2441 enum insn_code icode;
9de08200 2442
57814e5e
JJ
2443 data->offset = 0;
2444 data->to_addr = to_addr;
2445 data->autinc_to
9de08200
RK
2446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2448
57814e5e
JJ
2449 data->explicit_inc_to = 0;
2450 data->reverse
9de08200 2451 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2452 if (data->reverse)
2453 data->offset = data->len;
9de08200 2454
57814e5e 2455 /* If storing requires more than two move insns,
9de08200
RK
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
57814e5e
JJ
2458 if (!data->autinc_to
2459 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2460 {
3a94c984 2461 /* Determine the main mode we'll be using. */
fbe1758d
AM
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2465 mode = tmode;
2466
57814e5e 2467 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2468 {
57814e5e
JJ
2469 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = -1;
9de08200 2472 }
3bdf5ad1 2473
57814e5e
JJ
2474 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2475 && ! data->autinc_to)
9de08200 2476 {
57814e5e
JJ
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = 1;
9de08200 2480 }
3bdf5ad1 2481
57814e5e
JJ
2482 if ( !data->autinc_to && CONSTANT_P (to_addr))
2483 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2484 }
2485
e1565e65 2486 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2487 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2488 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2489
57814e5e 2490 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2491 successively smaller modes. */
2492
2493 while (max_size > 1)
2494 {
9de08200
RK
2495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2497 if (GET_MODE_SIZE (tmode) < max_size)
2498 mode = tmode;
2499
2500 if (mode == VOIDmode)
2501 break;
2502
2503 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2504 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2505 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2506
2507 max_size = GET_MODE_SIZE (mode);
2508 }
2509
2510 /* The code above should have handled everything. */
57814e5e 2511 if (data->len != 0)
9de08200
RK
2512 abort ();
2513}
2514
57814e5e 2515/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2518
2519static void
502b8322
AJ
2520store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2521 struct store_by_pieces *data)
9de08200 2522{
3bdf5ad1 2523 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2524 rtx to1, cst;
9de08200
RK
2525
2526 while (data->len >= size)
2527 {
3bdf5ad1
RK
2528 if (data->reverse)
2529 data->offset -= size;
9de08200 2530
3bdf5ad1 2531 if (data->autinc_to)
630036c6
JJ
2532 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 data->offset);
3a94c984 2534 else
f4ef873c 2535 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2536
940da324 2537 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2538 emit_insn (gen_add2_insn (data->to_addr,
2539 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2540
57814e5e
JJ
2541 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2542 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2543
940da324 2544 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2546
3bdf5ad1
RK
2547 if (! data->reverse)
2548 data->offset += size;
9de08200
RK
2549
2550 data->len -= size;
2551 }
2552}
2553\f
19caa751 2554/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2555 its length in bytes. */
e9a25f70
JL
2556
2557rtx
502b8322 2558clear_storage (rtx object, rtx size)
bbf6f052 2559{
e9a25f70 2560 rtx retval = 0;
8ac61af7
RK
2561 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2563
fcf1b822
RK
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2566 if (GET_MODE (object) != BLKmode
fcf1b822 2567 && GET_CODE (size) == CONST_INT
4ca79136 2568 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2570 else
bbf6f052 2571 {
9de08200
RK
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2574
6972c506 2575 if (size == const0_rtx)
2c430630
RS
2576 ;
2577 else if (GET_CODE (size) == CONST_INT
78762e3b 2578 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2579 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2580 else if (clear_storage_via_clrstr (object, size, align))
2581 ;
9de08200 2582 else
4ca79136
RH
2583 retval = clear_storage_via_libcall (object, size);
2584 }
2585
2586 return retval;
2587}
2588
2589/* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2591
2592static bool
502b8322 2593clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
4ca79136
RH
2594{
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2598
2599 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2600 enum machine_mode mode;
2601
2602 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2603 mode = GET_MODE_WIDER_MODE (mode))
2604 {
2605 enum insn_code code = clrstr_optab[(int) mode];
2606 insn_operand_predicate_fn pred;
2607
2608 if (code != CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2615 <= (GET_MODE_MASK (mode) >> 1)))
2616 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2617 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2618 || (*pred) (object, BLKmode))
2619 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2620 || (*pred) (opalign, VOIDmode)))
9de08200 2621 {
4ca79136
RH
2622 rtx op1;
2623 rtx last = get_last_insn ();
2624 rtx pat;
9de08200 2625
4ca79136
RH
2626 op1 = convert_to_mode (mode, size, 1);
2627 pred = insn_data[(int) code].operand[1].predicate;
2628 if (pred != 0 && ! (*pred) (op1, mode))
2629 op1 = copy_to_mode_reg (mode, op1);
9de08200 2630
4ca79136
RH
2631 pat = GEN_FCN ((int) code) (object, op1, opalign);
2632 if (pat)
9de08200 2633 {
4ca79136
RH
2634 emit_insn (pat);
2635 return true;
2636 }
2637 else
2638 delete_insns_since (last);
2639 }
2640 }
9de08200 2641
4ca79136
RH
2642 return false;
2643}
9de08200 2644
4ca79136
RH
2645/* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
9de08200 2647
4ca79136 2648static rtx
502b8322 2649clear_storage_via_libcall (rtx object, rtx size)
4ca79136
RH
2650{
2651 tree call_expr, arg_list, fn, object_tree, size_tree;
2652 enum machine_mode size_mode;
2653 rtx retval;
9de08200 2654
4ca79136 2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2656
4ca79136
RH
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
52cf7115 2660
4ca79136
RH
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
52cf7115 2664
4ca79136
RH
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2668 emit_queue.
52cf7115 2669
4ca79136
RH
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2675
4ca79136 2676 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2677
4ca79136
RH
2678 if (TARGET_MEM_FUNCTIONS)
2679 size_mode = TYPE_MODE (sizetype);
2680 else
2681 size_mode = TYPE_MODE (unsigned_type_node);
2682 size = convert_to_mode (size_mode, size, 1);
2683 size = copy_to_mode_reg (size_mode, size);
52cf7115 2684
4ca79136
RH
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
4bc973ae 2690
4ca79136 2691 For convenience, we generate the call to bzero this way as well. */
4bc973ae 2692
4ca79136
RH
2693 object_tree = make_tree (ptr_type_node, object);
2694 if (TARGET_MEM_FUNCTIONS)
2695 size_tree = make_tree (sizetype, size);
2696 else
2697 size_tree = make_tree (unsigned_type_node, size);
2698
2699 fn = clear_storage_libcall_fn (true);
2700 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2701 if (TARGET_MEM_FUNCTIONS)
2702 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2703 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2704
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
4ca79136
RH
2709
2710 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2711
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2717
2718 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2719}
2720
2721/* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2724
2725static GTY(()) tree block_clear_fn;
66c60e67 2726
9661b15f 2727void
502b8322 2728init_block_clear_fn (const char *asmspec)
4ca79136 2729{
9661b15f 2730 if (!block_clear_fn)
4ca79136 2731 {
9661b15f
JJ
2732 tree fn, args;
2733
4ca79136
RH
2734 if (TARGET_MEM_FUNCTIONS)
2735 {
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2739 NULL_TREE);
2740 }
2741 else
2742 {
2743 fn = get_identifier ("bzero");
2744 args = build_function_type_list (void_type_node, ptr_type_node,
2745 unsigned_type_node, NULL_TREE);
9de08200 2746 }
4ca79136
RH
2747
2748 fn = build_decl (FUNCTION_DECL, fn, args);
2749 DECL_EXTERNAL (fn) = 1;
2750 TREE_PUBLIC (fn) = 1;
2751 DECL_ARTIFICIAL (fn) = 1;
2752 TREE_NOTHROW (fn) = 1;
2753
2754 block_clear_fn = fn;
bbf6f052 2755 }
e9a25f70 2756
9661b15f
JJ
2757 if (asmspec)
2758 {
2759 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2761 }
2762}
2763
2764static tree
502b8322 2765clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2766{
2767 static bool emitted_extern;
2768
2769 if (!block_clear_fn)
2770 init_block_clear_fn (NULL);
2771
4ca79136
RH
2772 if (for_call && !emitted_extern)
2773 {
2774 emitted_extern = true;
9661b15f
JJ
2775 make_decl_rtl (block_clear_fn, NULL);
2776 assemble_external (block_clear_fn);
4ca79136 2777 }
bbf6f052 2778
9661b15f 2779 return block_clear_fn;
4ca79136
RH
2780}
2781\f
bbf6f052
RK
2782/* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2786
2787 Return the last instruction emitted. */
2788
2789rtx
502b8322 2790emit_move_insn (rtx x, rtx y)
bbf6f052
RK
2791{
2792 enum machine_mode mode = GET_MODE (x);
de1b33dd 2793 rtx y_cst = NULL_RTX;
0c19a26f 2794 rtx last_insn, set;
bbf6f052
RK
2795
2796 x = protect_from_queue (x, 1);
2797 y = protect_from_queue (y, 0);
2798
2799 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2800 abort ();
2801
ee5332b8
RH
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y) == CONSTANT_P_RTX)
2804 ;
51286de6 2805 else if (CONSTANT_P (y))
de1b33dd 2806 {
51286de6 2807 if (optimize
075fc17a 2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
2809 && (last_insn = compress_float_constant (x, y)))
2810 return last_insn;
2811
0c19a26f
RS
2812 y_cst = y;
2813
51286de6
RH
2814 if (!LEGITIMATE_CONSTANT_P (y))
2815 {
51286de6 2816 y = force_const_mem (mode, y);
3a04ff64
RH
2817
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2821 if (!y)
2822 y = y_cst;
51286de6 2823 }
de1b33dd 2824 }
bbf6f052
RK
2825
2826 /* If X or Y are memory references, verify that their addresses are valid
2827 for the machine. */
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2831 || (flag_force_addr
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2833 x = validize_mem (x);
bbf6f052
RK
2834
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 || (flag_force_addr
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2839 y = validize_mem (y);
bbf6f052
RK
2840
2841 if (mode == BLKmode)
2842 abort ();
2843
de1b33dd
AO
2844 last_insn = emit_move_insn_1 (x, y);
2845
0c19a26f
RS
2846 if (y_cst && GET_CODE (x) == REG
2847 && (set = single_set (last_insn)) != NULL_RTX
2848 && SET_DEST (set) == x
2849 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
2851
2852 return last_insn;
261c4230
RS
2853}
2854
2855/* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2858
2859rtx
502b8322 2860emit_move_insn_1 (rtx x, rtx y)
261c4230
RS
2861{
2862 enum machine_mode mode = GET_MODE (x);
2863 enum machine_mode submode;
2864 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 2865
dbbbbf3b 2866 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2867 abort ();
76bbe028 2868
bbf6f052
RK
2869 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 return
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2872
89742723 2873 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2874 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 2875 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
2876 && (mov_optab->handlers[(int) submode].insn_code
2877 != CODE_FOR_nothing))
2878 {
2879 /* Don't split destination if it is a stack push. */
2880 int stack = push_operand (x, GET_MODE (x));
7308a047 2881
79ce92d7 2882#ifdef PUSH_ROUNDING
0e9cbd11
KH
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
1a06f5fe 2885 if (stack
bb93b973
RK
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2887 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
2888 {
2889 rtx temp;
bb93b973 2890 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
2891
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp = expand_binop (Pmode,
2895#ifdef STACK_GROWS_DOWNWARD
2896 sub_optab,
2897#else
2898 add_optab,
2899#endif
2900 stack_pointer_rtx,
2901 GEN_INT
bb93b973
RK
2902 (PUSH_ROUNDING
2903 (GET_MODE_SIZE (GET_MODE (x)))),
2904 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2905
1a06f5fe
JH
2906 if (temp != stack_pointer_rtx)
2907 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 2908
1a06f5fe
JH
2909#ifdef STACK_GROWS_DOWNWARD
2910 offset1 = 0;
2911 offset2 = GET_MODE_SIZE (submode);
2912#else
2913 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2914 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2915 + GET_MODE_SIZE (submode));
2916#endif
bb93b973 2917
1a06f5fe
JH
2918 emit_move_insn (change_address (x, submode,
2919 gen_rtx_PLUS (Pmode,
2920 stack_pointer_rtx,
2921 GEN_INT (offset1))),
2922 gen_realpart (submode, y));
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2925 stack_pointer_rtx,
2926 GEN_INT (offset2))),
2927 gen_imagpart (submode, y));
2928 }
e9c0bd54 2929 else
79ce92d7 2930#endif
7308a047
RS
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2933
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
e9c0bd54 2936 if (stack)
c937357e 2937 {
e33c0d66
RS
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
c937357e 2940#ifdef STACK_GROWS_DOWNWARD
a79b3dc7
RS
2941 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y));
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_realpart (submode, y));
c937357e 2945#else
a79b3dc7
RS
2946 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_imagpart (submode, y));
c937357e
RS
2950#endif
2951 }
2952 else
2953 {
235ae7be
DM
2954 rtx realpart_x, realpart_y;
2955 rtx imagpart_x, imagpart_y;
2956
405f63da
MM
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
3a94c984 2966 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2967 && (reload_in_progress | reload_completed) == 0)
2968 {
bb93b973
RK
2969 int packed_dest_p
2970 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 int packed_src_p
2972 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
2973
2974 if (packed_dest_p || packed_src_p)
2975 {
2976 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2977 ? MODE_FLOAT : MODE_INT);
2978
1da68f56
RK
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2981
2982 if (reg_mode != BLKmode)
2983 {
2984 rtx mem = assign_stack_temp (reg_mode,
2985 GET_MODE_SIZE (mode), 0);
f4ef873c 2986 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2987
1da68f56
RK
2988 cfun->cannot_inline
2989 = N_("function using short complex types cannot be inline");
405f63da
MM
2990
2991 if (packed_dest_p)
2992 {
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 2994
405f63da
MM
2995 emit_move_insn_1 (cmem, y);
2996 return emit_move_insn_1 (sreg, mem);
2997 }
2998 else
2999 {
3000 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3001
405f63da
MM
3002 emit_move_insn_1 (mem, sreg);
3003 return emit_move_insn_1 (x, cmem);
3004 }
3005 }
3006 }
3007 }
3008
235ae7be
DM
3009 realpart_x = gen_realpart (submode, x);
3010 realpart_y = gen_realpart (submode, y);
3011 imagpart_x = gen_imagpart (submode, x);
3012 imagpart_y = gen_imagpart (submode, y);
3013
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3018 if (x != y
235ae7be
DM
3019 && ! (reload_in_progress || reload_completed)
3020 && (GET_CODE (realpart_x) == SUBREG
3021 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3022 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3023
a79b3dc7
RS
3024 emit_move_insn (realpart_x, realpart_y);
3025 emit_move_insn (imagpart_x, imagpart_y);
c937357e 3026 }
7308a047 3027
7a1ab50a 3028 return get_last_insn ();
7308a047
RS
3029 }
3030
a3600c71
HPN
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode) == MODE_CC
3035 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3036 {
3037 enum insn_code insn_code;
3038 enum machine_mode tmode = VOIDmode;
3039 rtx x1 = x, y1 = y;
3040
3041 if (mode != CCmode
3042 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3043 tmode = CCmode;
3044 else
3045 for (tmode = QImode; tmode != VOIDmode;
3046 tmode = GET_MODE_WIDER_MODE (tmode))
3047 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3048 break;
3049
3050 if (tmode == VOIDmode)
3051 abort ();
3052
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3059
3060 if (reload_in_progress)
3061 {
3062 x = gen_lowpart_common (tmode, x1);
3063 if (x == 0 && GET_CODE (x1) == MEM)
3064 {
3065 x = adjust_address_nv (x1, tmode, 0);
3066 copy_replacements (x1, x);
3067 }
3068
3069 y = gen_lowpart_common (tmode, y1);
3070 if (y == 0 && GET_CODE (y1) == MEM)
3071 {
3072 y = adjust_address_nv (y1, tmode, 0);
3073 copy_replacements (y1, y);
3074 }
3075 }
3076 else
3077 {
3078 x = gen_lowpart (tmode, x);
3079 y = gen_lowpart (tmode, y);
3080 }
502b8322 3081
a3600c71
HPN
3082 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3083 return emit_insn (GEN_FCN (insn_code) (x, y));
3084 }
3085
5581fc91
RS
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3091 && (submode = int_mode_for_mode (mode)) != BLKmode
3092 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3093 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3094 (simplify_gen_subreg (submode, x, mode, 0),
3095 simplify_gen_subreg (submode, y, mode, 0)));
3096
cffa2189
R
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
bbf6f052 3099 even if they must turn into multiple assembler instructions. */
cffa2189 3100 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3101 {
3102 rtx last_insn = 0;
3ef1eef4 3103 rtx seq, inner;
235ae7be 3104 int need_clobber;
bb93b973 3105 int i;
3a94c984 3106
a98c9f1a
RK
3107#ifdef PUSH_ROUNDING
3108
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x, GET_MODE (x)))
3112 {
918a6124
GK
3113 rtx temp;
3114 enum rtx_code code;
0fb7aeda 3115
918a6124
GK
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp = expand_binop (Pmode,
3119#ifdef STACK_GROWS_DOWNWARD
3120 sub_optab,
3121#else
3122 add_optab,
3123#endif
3124 stack_pointer_rtx,
3125 GEN_INT
bb93b973
RK
3126 (PUSH_ROUNDING
3127 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3128 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3129
0fb7aeda
KH
3130 if (temp != stack_pointer_rtx)
3131 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3132
3133 code = GET_CODE (XEXP (x, 0));
bb93b973 3134
918a6124
GK
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code == POST_INC)
0fb7aeda 3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3138 GEN_INT (-((HOST_WIDE_INT)
3139 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3140 else if (code == POST_DEC)
0fb7aeda 3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3143 else
3144 temp = stack_pointer_rtx;
3145
3146 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3147 }
3148#endif
3a94c984 3149
3ef1eef4
RK
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && GET_CODE (x) == MEM
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3154 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3155 if (reload_in_progress && GET_CODE (y) == MEM
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3157 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3158
235ae7be 3159 start_sequence ();
15a7a8ec 3160
235ae7be 3161 need_clobber = 0;
bbf6f052 3162 for (i = 0;
3a94c984 3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3164 i++)
3165 {
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart = operand_subword (y, i, 1, mode);
3168
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart == 0 && CONSTANT_P (y))
3173 {
3174 y = force_const_mem (mode, y);
3175 ypart = operand_subword (y, i, 1, mode);
3176 }
3177 else if (ypart == 0)
3178 ypart = operand_subword_force (y, i, mode);
3179
3180 if (xpart == 0 || ypart == 0)
3181 abort ();
3182
235ae7be
DM
3183 need_clobber |= (GET_CODE (xpart) == SUBREG);
3184
bbf6f052
RK
3185 last_insn = emit_move_insn (xpart, ypart);
3186 }
6551fa4d 3187
2f937369 3188 seq = get_insns ();
235ae7be
DM
3189 end_sequence ();
3190
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3195 if (x != y
3196 && ! (reload_in_progress || reload_completed)
3197 && need_clobber != 0)
bb93b973 3198 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3199
3200 emit_insn (seq);
3201
bbf6f052
RK
3202 return last_insn;
3203 }
3204 else
3205 abort ();
3206}
51286de6
RH
3207
3208/* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3211
3212static rtx
502b8322 3213compress_float_constant (rtx x, rtx y)
51286de6
RH
3214{
3215 enum machine_mode dstmode = GET_MODE (x);
3216 enum machine_mode orig_srcmode = GET_MODE (y);
3217 enum machine_mode srcmode;
3218 REAL_VALUE_TYPE r;
3219
3220 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3221
3222 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3223 srcmode != orig_srcmode;
3224 srcmode = GET_MODE_WIDER_MODE (srcmode))
3225 {
3226 enum insn_code ic;
3227 rtx trunc_y, last_insn;
3228
3229 /* Skip if the target can't extend this way. */
3230 ic = can_extend_p (dstmode, srcmode, 0);
3231 if (ic == CODE_FOR_nothing)
3232 continue;
3233
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode, &r))
3236 continue;
3237
3238 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3239
3240 if (LEGITIMATE_CONSTANT_P (trunc_y))
3241 {
3242 /* Skip if the target needs extra instructions to perform
3243 the extension. */
3244 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3245 continue;
3246 }
3247 else if (float_extend_from_mem[dstmode][srcmode])
3248 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3249 else
3250 continue;
3251
3252 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3253 last_insn = get_last_insn ();
3254
3255 if (GET_CODE (x) == REG)
0c19a26f 3256 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3257
3258 return last_insn;
3259 }
3260
3261 return NULL_RTX;
3262}
bbf6f052
RK
3263\f
3264/* Pushing data onto the stack. */
3265
3266/* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3270
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3274
3275rtx
502b8322 3276push_block (rtx size, int extra, int below)
bbf6f052 3277{
b3694847 3278 rtx temp;
88f63c77
RK
3279
3280 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3281 if (CONSTANT_P (size))
3282 anti_adjust_stack (plus_constant (size, extra));
3283 else if (GET_CODE (size) == REG && extra == 0)
3284 anti_adjust_stack (size);
3285 else
3286 {
ce48579b 3287 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3288 if (extra != 0)
906c4e36 3289 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3290 temp, 0, OPTAB_LIB_WIDEN);
3291 anti_adjust_stack (temp);
3292 }
3293
f73ad30e 3294#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3295 if (0)
f73ad30e
JH
3296#else
3297 if (1)
bbf6f052 3298#endif
f73ad30e 3299 {
f73ad30e
JH
3300 temp = virtual_outgoing_args_rtx;
3301 if (extra != 0 && below)
3302 temp = plus_constant (temp, extra);
3303 }
3304 else
3305 {
3306 if (GET_CODE (size) == CONST_INT)
3307 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3308 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3309 else if (extra != 0 && !below)
3310 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3311 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3312 else
3313 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3314 negate_rtx (Pmode, size));
3315 }
bbf6f052
RK
3316
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3318}
3319
21d93687
RK
3320#ifdef PUSH_ROUNDING
3321
566aa174 3322/* Emit single push insn. */
21d93687 3323
566aa174 3324static void
502b8322 3325emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3326{
566aa174 3327 rtx dest_addr;
918a6124 3328 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3329 rtx dest;
371b8fc0
JH
3330 enum insn_code icode;
3331 insn_operand_predicate_fn pred;
566aa174 3332
371b8fc0
JH
3333 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode = push_optab->handlers[(int) mode].insn_code;
3337 if (icode != CODE_FOR_nothing)
3338 {
3339 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3340 && !((*pred) (x, mode))))
371b8fc0
JH
3341 x = force_reg (mode, x);
3342 emit_insn (GEN_FCN (icode) (x));
3343 return;
3344 }
566aa174
JH
3345 if (GET_MODE_SIZE (mode) == rounded_size)
3346 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3350 access to type. */
3351 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3352 {
3353 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3354 HOST_WIDE_INT offset;
3355
3356 emit_move_insn (stack_pointer_rtx,
3357 expand_binop (Pmode,
3358#ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360#else
3361 add_optab,
3362#endif
3363 stack_pointer_rtx,
3364 GEN_INT (rounded_size),
3365 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3366
3367 offset = (HOST_WIDE_INT) padding_size;
3368#ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE == POST_DEC)
3370 /* We have already decremented the stack pointer, so get the
3371 previous value. */
3372 offset += (HOST_WIDE_INT) rounded_size;
3373#else
3374 if (STACK_PUSH_CODE == POST_INC)
3375 /* We have already incremented the stack pointer, so get the
3376 previous value. */
3377 offset -= (HOST_WIDE_INT) rounded_size;
3378#endif
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3380 }
566aa174
JH
3381 else
3382 {
3383#ifdef STACK_GROWS_DOWNWARD
329d586f 3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3386 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3387#else
329d586f 3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (rounded_size));
3391#endif
3392 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3393 }
3394
3395 dest = gen_rtx_MEM (mode, dest_addr);
3396
566aa174
JH
3397 if (type != 0)
3398 {
3399 set_mem_attributes (dest, type, 1);
c3d32120
RK
3400
3401 if (flag_optimize_sibling_calls)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest, 0);
566aa174
JH
3407 }
3408 emit_move_insn (dest, x);
566aa174 3409}
21d93687 3410#endif
566aa174 3411
bbf6f052
RK
3412/* Generate code to push X onto the stack, assuming it has mode MODE and
3413 type TYPE.
3414 MODE is redundant except when X is a CONST_INT (since they don't
3415 carry mode info).
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3418
f1eaaf73 3419 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3420
cd048831
RK
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
cd048831
RK
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3428 registers.
bbf6f052
RK
3429
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3431 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3432
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3437
e5e809f4
JL
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3439
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
bbf6f052
RK
3443
3444void
502b8322
AJ
3445emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3446 unsigned int align, int partial, rtx reg, int extra,
3447 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3448 rtx alignment_pad)
bbf6f052
RK
3449{
3450 rtx xinner;
3451 enum direction stack_direction
3452#ifdef STACK_GROWS_DOWNWARD
3453 = downward;
3454#else
3455 = upward;
3456#endif
3457
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3462
0fb7aeda 3463 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3464 FIXME: why? */
3465 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3466 if (where_pad != none)
3467 where_pad = (where_pad == downward ? upward : downward);
3468
3469 xinner = x = protect_from_queue (x, 0);
3470
3471 if (mode == BLKmode)
3472 {
3473 /* Copy a block into the stack, entirely or partially. */
3474
b3694847 3475 rtx temp;
bbf6f052
RK
3476 int used = partial * UNITS_PER_WORD;
3477 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3478 int skip;
3a94c984 3479
bbf6f052
RK
3480 if (size == 0)
3481 abort ();
3482
3483 used -= offset;
3484
3485 /* USED is now the # of bytes we need not copy to the stack
3486 because registers will take care of them. */
3487
3488 if (partial != 0)
f4ef873c 3489 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3490
3491 /* If the partial register-part of the arg counts in its stack size,
3492 skip the part of stack space corresponding to the registers.
3493 Otherwise, start copying to the beginning of the stack space,
3494 by setting SKIP to 0. */
e5e809f4 3495 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3496
3497#ifdef PUSH_ROUNDING
3498 /* Do it with several push insns if that doesn't take lots of insns
3499 and if there is no difficulty with push insns that skip bytes
3500 on the stack for alignment purposes. */
3501 if (args_addr == 0
f73ad30e 3502 && PUSH_ARGS
bbf6f052
RK
3503 && GET_CODE (size) == CONST_INT
3504 && skip == 0
f26aca6d 3505 && MEM_ALIGN (xinner) >= align
15914757 3506 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3507 /* Here we avoid the case of a structure whose weak alignment
3508 forces many pushes of a small amount of data,
3509 and such small pushes do rounding that causes trouble. */
e1565e65 3510 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3511 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3512 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3513 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3514 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3515 {
3516 /* Push padding now if padding above and stack grows down,
3517 or if padding below and stack grows up.
3518 But if space already allocated, this has already been done. */
3519 if (extra && args_addr == 0
3520 && where_pad != none && where_pad != stack_direction)
906c4e36 3521 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3522
8fd3cf4e 3523 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3524 }
3525 else
3a94c984 3526#endif /* PUSH_ROUNDING */
bbf6f052 3527 {
7ab923cc
JJ
3528 rtx target;
3529
bbf6f052
RK
3530 /* Otherwise make space on the stack and copy the data
3531 to the address of that space. */
3532
3533 /* Deduct words put into registers from the size we must copy. */
3534 if (partial != 0)
3535 {
3536 if (GET_CODE (size) == CONST_INT)
906c4e36 3537 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3538 else
3539 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3540 GEN_INT (used), NULL_RTX, 0,
3541 OPTAB_LIB_WIDEN);
bbf6f052
RK
3542 }
3543
3544 /* Get the address of the stack space.
3545 In this case, we do not deal with EXTRA separately.
3546 A single stack adjust will do. */
3547 if (! args_addr)
3548 {
3549 temp = push_block (size, extra, where_pad == downward);
3550 extra = 0;
3551 }
3552 else if (GET_CODE (args_so_far) == CONST_INT)
3553 temp = memory_address (BLKmode,
3554 plus_constant (args_addr,
3555 skip + INTVAL (args_so_far)));
3556 else
3557 temp = memory_address (BLKmode,
38a448ca
RH
3558 plus_constant (gen_rtx_PLUS (Pmode,
3559 args_addr,
3560 args_so_far),
bbf6f052 3561 skip));
4ca79136
RH
3562
3563 if (!ACCUMULATE_OUTGOING_ARGS)
3564 {
3565 /* If the source is referenced relative to the stack pointer,
3566 copy it to another register to stabilize it. We do not need
3567 to do this if we know that we won't be changing sp. */
3568
3569 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3570 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3571 temp = copy_to_reg (temp);
3572 }
3573
3a94c984 3574 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3575
3a94c984
KH
3576 if (type != 0)
3577 {
3578 set_mem_attributes (target, type, 1);
3579 /* Function incoming arguments may overlap with sibling call
3580 outgoing arguments and we cannot allow reordering of reads
3581 from function arguments with stores to outgoing arguments
3582 of sibling calls. */
ba4828e0 3583 set_mem_alias_set (target, 0);
3a94c984 3584 }
4ca79136 3585
44bb111a
RH
3586 /* ALIGN may well be better aligned than TYPE, e.g. due to
3587 PARM_BOUNDARY. Assume the caller isn't lying. */
3588 set_mem_align (target, align);
4ca79136 3589
44bb111a 3590 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3591 }
3592 }
3593 else if (partial > 0)
3594 {
3595 /* Scalar partly in registers. */
3596
3597 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3598 int i;
3599 int not_stack;
3600 /* # words of start of argument
3601 that we must make space for but need not store. */
3602 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3603 int args_offset = INTVAL (args_so_far);
3604 int skip;
3605
3606 /* Push padding now if padding above and stack grows down,
3607 or if padding below and stack grows up.
3608 But if space already allocated, this has already been done. */
3609 if (extra && args_addr == 0
3610 && where_pad != none && where_pad != stack_direction)
906c4e36 3611 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3612
3613 /* If we make space by pushing it, we might as well push
3614 the real data. Otherwise, we can leave OFFSET nonzero
3615 and leave the space uninitialized. */
3616 if (args_addr == 0)
3617 offset = 0;
3618
3619 /* Now NOT_STACK gets the number of words that we don't need to
3620 allocate on the stack. */
3621 not_stack = partial - offset;
3622
3623 /* If the partial register-part of the arg counts in its stack size,
3624 skip the part of stack space corresponding to the registers.
3625 Otherwise, start copying to the beginning of the stack space,
3626 by setting SKIP to 0. */
e5e809f4 3627 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3628
3629 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3630 x = validize_mem (force_const_mem (mode, x));
3631
3632 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3633 SUBREGs of such registers are not allowed. */
3634 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3635 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3636 x = copy_to_reg (x);
3637
3638 /* Loop over all the words allocated on the stack for this arg. */
3639 /* We can do it by words, because any scalar bigger than a word
3640 has a size a multiple of a word. */
3641#ifndef PUSH_ARGS_REVERSED
3642 for (i = not_stack; i < size; i++)
3643#else
3644 for (i = size - 1; i >= not_stack; i--)
3645#endif
3646 if (i >= not_stack + offset)
3647 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3648 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3649 0, args_addr,
3650 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3651 * UNITS_PER_WORD)),
4fc026cd 3652 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3653 }
3654 else
3655 {
3656 rtx addr;
3bdf5ad1 3657 rtx dest;
bbf6f052
RK
3658
3659 /* Push padding now if padding above and stack grows down,
3660 or if padding below and stack grows up.
3661 But if space already allocated, this has already been done. */
3662 if (extra && args_addr == 0
3663 && where_pad != none && where_pad != stack_direction)
906c4e36 3664 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3665
3666#ifdef PUSH_ROUNDING
f73ad30e 3667 if (args_addr == 0 && PUSH_ARGS)
566aa174 3668 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3669 else
3670#endif
921b3427
RK
3671 {
3672 if (GET_CODE (args_so_far) == CONST_INT)
3673 addr
3674 = memory_address (mode,
3a94c984 3675 plus_constant (args_addr,
921b3427 3676 INTVAL (args_so_far)));
3a94c984 3677 else
38a448ca
RH
3678 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3679 args_so_far));
566aa174
JH
3680 dest = gen_rtx_MEM (mode, addr);
3681 if (type != 0)
3682 {
3683 set_mem_attributes (dest, type, 1);
3684 /* Function incoming arguments may overlap with sibling call
3685 outgoing arguments and we cannot allow reordering of reads
3686 from function arguments with stores to outgoing arguments
3687 of sibling calls. */
ba4828e0 3688 set_mem_alias_set (dest, 0);
566aa174 3689 }
bbf6f052 3690
566aa174 3691 emit_move_insn (dest, x);
566aa174 3692 }
bbf6f052
RK
3693 }
3694
bbf6f052
RK
3695 /* If part should go in registers, copy that part
3696 into the appropriate registers. Do this now, at the end,
3697 since mem-to-mem copies above may do function calls. */
cd048831 3698 if (partial > 0 && reg != 0)
fffa9c1d
JW
3699 {
3700 /* Handle calls that pass values in multiple non-contiguous locations.
3701 The Irix 6 ABI has examples of this. */
3702 if (GET_CODE (reg) == PARALLEL)
6e985040 3703 emit_group_load (reg, x, type, -1);
fffa9c1d
JW
3704 else
3705 move_block_to_reg (REGNO (reg), x, partial, mode);
3706 }
bbf6f052
RK
3707
3708 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3709 anti_adjust_stack (GEN_INT (extra));
3a94c984 3710
3ea2292a 3711 if (alignment_pad && args_addr == 0)
4fc026cd 3712 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3713}
3714\f
296b4ed9
RK
3715/* Return X if X can be used as a subtarget in a sequence of arithmetic
3716 operations. */
3717
3718static rtx
502b8322 3719get_subtarget (rtx x)
296b4ed9
RK
3720{
3721 return ((x == 0
3722 /* Only registers can be subtargets. */
3723 || GET_CODE (x) != REG
3724 /* If the register is readonly, it can't be set more than once. */
3725 || RTX_UNCHANGING_P (x)
3726 /* Don't use hard regs to avoid extending their life. */
3727 || REGNO (x) < FIRST_PSEUDO_REGISTER
3728 /* Avoid subtargets inside loops,
3729 since they hide some invariant expressions. */
3730 || preserve_subexpressions_p ())
3731 ? 0 : x);
3732}
3733
bbf6f052
RK
3734/* Expand an assignment that stores the value of FROM into TO.
3735 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3736 (This may contain a QUEUED rtx;
3737 if the value is constant, this rtx is a constant.)
b90f141a 3738 Otherwise, the returned value is NULL_RTX. */
bbf6f052
RK
3739
3740rtx
b90f141a 3741expand_assignment (tree to, tree from, int want_value)
bbf6f052 3742{
b3694847 3743 rtx to_rtx = 0;
bbf6f052
RK
3744 rtx result;
3745
3746 /* Don't crash if the lhs of the assignment was erroneous. */
3747
3748 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3749 {
3750 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3751 return want_value ? result : NULL_RTX;
3752 }
bbf6f052
RK
3753
3754 /* Assignment of a structure component needs special treatment
3755 if the structure component's rtx is not simply a MEM.
6be58303
JW
3756 Assignment of an array element at a constant index, and assignment of
3757 an array element in an unaligned packed structure field, has the same
3758 problem. */
bbf6f052 3759
08293add 3760 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
3761 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3762 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3763 {
3764 enum machine_mode mode1;
770ae6cc 3765 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3766 rtx orig_to_rtx;
7bb0943f 3767 tree offset;
bbf6f052
RK
3768 int unsignedp;
3769 int volatilep = 0;
0088fcb1
RK
3770 tree tem;
3771
3772 push_temp_slots ();
839c4796 3773 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3774 &unsignedp, &volatilep);
bbf6f052
RK
3775
3776 /* If we are going to use store_bit_field and extract_bit_field,
3777 make sure to_rtx will be safe for multiple use. */
3778
3779 if (mode1 == VOIDmode && want_value)
3780 tem = stabilize_reference (tem);
3781
1ed1b4fb
RK
3782 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3783
7bb0943f
RS
3784 if (offset != 0)
3785 {
e3c8ea67 3786 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3787
3788 if (GET_CODE (to_rtx) != MEM)
3789 abort ();
bd070e1a 3790
bd070e1a 3791#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3792 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3793 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3794#else
3795 if (GET_MODE (offset_rtx) != ptr_mode)
3796 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3797#endif
bd070e1a 3798
9a7b9f4f
JL
3799 /* A constant address in TO_RTX can have VOIDmode, we must not try
3800 to call force_reg for that case. Avoid that case. */
89752202
HB
3801 if (GET_CODE (to_rtx) == MEM
3802 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3803 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3804 && bitsize > 0
3a94c984 3805 && (bitpos % bitsize) == 0
89752202 3806 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3807 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3808 {
e3c8ea67 3809 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3810 bitpos = 0;
3811 }
3812
0d4903b8 3813 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
3814 highest_pow2_factor_for_type (TREE_TYPE (to),
3815 offset));
7bb0943f 3816 }
c5c76735 3817
998d7deb
RH
3818 if (GET_CODE (to_rtx) == MEM)
3819 {
998d7deb
RH
3820 /* If the field is at offset zero, we could have been given the
3821 DECL_RTX of the parent struct. Don't munge it. */
3822 to_rtx = shallow_copy_rtx (to_rtx);
3823
6f1087be 3824 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 3825 }
effbcc6a 3826
a06ef755
RK
3827 /* Deal with volatile and readonly fields. The former is only done
3828 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3829 if (volatilep && GET_CODE (to_rtx) == MEM)
3830 {
3831 if (to_rtx == orig_to_rtx)
3832 to_rtx = copy_rtx (to_rtx);
3833 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
3834 }
3835
956d6950 3836 if (TREE_CODE (to) == COMPONENT_REF
d76bc29c
EB
3837 && TREE_READONLY (TREE_OPERAND (to, 1))
3838 /* We can't assert that a MEM won't be set more than once
3839 if the component is not addressable because another
3840 non-addressable component may be referenced by the same MEM. */
3841 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
956d6950 3842 {
a06ef755 3843 if (to_rtx == orig_to_rtx)
956d6950 3844 to_rtx = copy_rtx (to_rtx);
956d6950
JL
3845 RTX_UNCHANGING_P (to_rtx) = 1;
3846 }
3847
a84b4898 3848 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
3849 {
3850 if (to_rtx == orig_to_rtx)
3851 to_rtx = copy_rtx (to_rtx);
3852 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3853 }
3854
a06ef755
RK
3855 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3856 (want_value
3857 /* Spurious cast for HPUX compiler. */
3858 ? ((enum machine_mode)
3859 TYPE_MODE (TREE_TYPE (to)))
3860 : VOIDmode),
3861 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 3862
a06ef755
RK
3863 preserve_temp_slots (result);
3864 free_temp_slots ();
3865 pop_temp_slots ();
a69beca1 3866
a06ef755
RK
3867 /* If the value is meaningful, convert RESULT to the proper mode.
3868 Otherwise, return nothing. */
3869 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3870 TYPE_MODE (TREE_TYPE (from)),
3871 result,
3872 TREE_UNSIGNED (TREE_TYPE (to)))
3873 : NULL_RTX);
bbf6f052
RK
3874 }
3875
cd1db108
RS
3876 /* If the rhs is a function call and its value is not an aggregate,
3877 call the function before we start to compute the lhs.
3878 This is needed for correct code for cases such as
3879 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3880 requires loading up part of an address in a separate insn.
3881
1858863b
JW
3882 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3883 since it might be a promoted variable where the zero- or sign- extension
3884 needs to be done. Handling this in the normal way is safe because no
3885 computation is done before the call. */
61f71b34 3886 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 3887 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3888 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3889 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3890 {
0088fcb1
RK
3891 rtx value;
3892
3893 push_temp_slots ();
3894 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3895 if (to_rtx == 0)
37a08a29 3896 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 3897
fffa9c1d
JW
3898 /* Handle calls that return values in multiple non-contiguous locations.
3899 The Irix 6 ABI has examples of this. */
3900 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3901 emit_group_load (to_rtx, value, TREE_TYPE (from),
3902 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 3903 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 3904 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 3905 else
6419e5b0 3906 {
5ae6cd0d 3907 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 3908 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
3909 emit_move_insn (to_rtx, value);
3910 }
cd1db108
RS
3911 preserve_temp_slots (to_rtx);
3912 free_temp_slots ();
0088fcb1 3913 pop_temp_slots ();
709f5be1 3914 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3915 }
3916
bbf6f052
RK
3917 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3918 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3919
3920 if (to_rtx == 0)
37a08a29 3921 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 3922
86d38d25 3923 /* Don't move directly into a return register. */
14a774a9
RK
3924 if (TREE_CODE (to) == RESULT_DECL
3925 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3926 {
0088fcb1
RK
3927 rtx temp;
3928
3929 push_temp_slots ();
3930 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3931
3932 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
3933 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3934 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
3935 else
3936 emit_move_insn (to_rtx, temp);
3937
86d38d25
RS
3938 preserve_temp_slots (to_rtx);
3939 free_temp_slots ();
0088fcb1 3940 pop_temp_slots ();
709f5be1 3941 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3942 }
3943
bbf6f052
RK
3944 /* In case we are returning the contents of an object which overlaps
3945 the place the value is being stored, use a safe function when copying
3946 a value through a pointer into a structure value return block. */
3947 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3948 && current_function_returns_struct
3949 && !current_function_returns_pcc_struct)
3950 {
0088fcb1
RK
3951 rtx from_rtx, size;
3952
3953 push_temp_slots ();
33a20d10 3954 size = expr_size (from);
37a08a29 3955 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 3956
4ca79136
RH
3957 if (TARGET_MEM_FUNCTIONS)
3958 emit_library_call (memmove_libfunc, LCT_NORMAL,
3959 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3960 XEXP (from_rtx, 0), Pmode,
3961 convert_to_mode (TYPE_MODE (sizetype),
3962 size, TREE_UNSIGNED (sizetype)),
3963 TYPE_MODE (sizetype));
3964 else
3965 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3966 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3967 XEXP (to_rtx, 0), Pmode,
3968 convert_to_mode (TYPE_MODE (integer_type_node),
3969 size,
3970 TREE_UNSIGNED (integer_type_node)),
3971 TYPE_MODE (integer_type_node));
bbf6f052
RK
3972
3973 preserve_temp_slots (to_rtx);
3974 free_temp_slots ();
0088fcb1 3975 pop_temp_slots ();
709f5be1 3976 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3977 }
3978
3979 /* Compute FROM and store the value in the rtx we got. */
3980
0088fcb1 3981 push_temp_slots ();
bbf6f052
RK
3982 result = store_expr (from, to_rtx, want_value);
3983 preserve_temp_slots (result);
3984 free_temp_slots ();
0088fcb1 3985 pop_temp_slots ();
709f5be1 3986 return want_value ? result : NULL_RTX;
bbf6f052
RK
3987}
3988
3989/* Generate code for computing expression EXP,
3990 and storing the value into TARGET.
bbf6f052
RK
3991 TARGET may contain a QUEUED rtx.
3992
8403445a 3993 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
3994 not in TARGET, so that we can be sure to use the proper
3995 value in a containing expression even if TARGET has something
3996 else stored in it. If possible, we copy the value through a pseudo
3997 and return that pseudo. Or, if the value is constant, we try to
3998 return the constant. In some cases, we return a pseudo
3999 copied *from* TARGET.
4000
4001 If the mode is BLKmode then we may return TARGET itself.
4002 It turns out that in BLKmode it doesn't cause a problem.
4003 because C has no operators that could combine two different
4004 assignments into the same BLKmode object with different values
4005 with no sequence point. Will other languages need this to
4006 be more thorough?
4007
8403445a 4008 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4009 to catch quickly any cases where the caller uses the value
8403445a
AM
4010 and fails to set WANT_VALUE.
4011
4012 If WANT_VALUE & 2 is set, this is a store into a call param on the
4013 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4014
4015rtx
502b8322 4016store_expr (tree exp, rtx target, int want_value)
bbf6f052 4017{
b3694847 4018 rtx temp;
bbf6f052 4019 int dont_return_target = 0;
e5408e52 4020 int dont_store_target = 0;
bbf6f052 4021
847311f4
AL
4022 if (VOID_TYPE_P (TREE_TYPE (exp)))
4023 {
4024 /* C++ can generate ?: expressions with a throw expression in one
4025 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4026 store the throw expression's nonexistent result. */
847311f4
AL
4027 if (want_value)
4028 abort ();
4029 expand_expr (exp, const0_rtx, VOIDmode, 0);
4030 return NULL_RTX;
4031 }
bbf6f052
RK
4032 if (TREE_CODE (exp) == COMPOUND_EXPR)
4033 {
4034 /* Perform first part of compound expression, then assign from second
4035 part. */
8403445a
AM
4036 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4037 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4038 emit_queue ();
709f5be1 4039 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4040 }
4041 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4042 {
4043 /* For conditional expression, get safe form of the target. Then
4044 test the condition, doing the appropriate assignment on either
4045 side. This avoids the creation of unnecessary temporaries.
4046 For non-BLKmode, it is more efficient not to do this. */
4047
4048 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4049
4050 emit_queue ();
4051 target = protect_from_queue (target, 1);
4052
dabf8373 4053 do_pending_stack_adjust ();
bbf6f052
RK
4054 NO_DEFER_POP;
4055 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4056 start_cleanup_deferral ();
8403445a 4057 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4058 end_cleanup_deferral ();
bbf6f052
RK
4059 emit_queue ();
4060 emit_jump_insn (gen_jump (lab2));
4061 emit_barrier ();
4062 emit_label (lab1);
956d6950 4063 start_cleanup_deferral ();
8403445a 4064 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4065 end_cleanup_deferral ();
bbf6f052
RK
4066 emit_queue ();
4067 emit_label (lab2);
4068 OK_DEFER_POP;
a3a58acc 4069
8403445a 4070 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4071 }
bbf6f052 4072 else if (queued_subexp_p (target))
709f5be1
RS
4073 /* If target contains a postincrement, let's not risk
4074 using it as the place to generate the rhs. */
bbf6f052
RK
4075 {
4076 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4077 {
4078 /* Expand EXP into a new pseudo. */
4079 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4080 temp = expand_expr (exp, temp, GET_MODE (target),
4081 (want_value & 2
4082 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4083 }
4084 else
8403445a
AM
4085 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4086 (want_value & 2
4087 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4088
4089 /* If target is volatile, ANSI requires accessing the value
4090 *from* the target, if it is accessed. So make that happen.
4091 In no case return the target itself. */
8403445a 4092 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4093 dont_return_target = 1;
bbf6f052 4094 }
8403445a
AM
4095 else if ((want_value & 1) != 0
4096 && GET_CODE (target) == MEM
4097 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4098 && GET_MODE (target) != BLKmode)
4099 /* If target is in memory and caller wants value in a register instead,
4100 arrange that. Pass TARGET as target for expand_expr so that,
4101 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4102 We know expand_expr will not use the target in that case.
4103 Don't do this if TARGET is volatile because we are supposed
4104 to write it and then read it. */
4105 {
8403445a
AM
4106 temp = expand_expr (exp, target, GET_MODE (target),
4107 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4108 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4109 {
4110 /* If TEMP is already in the desired TARGET, only copy it from
4111 memory and don't store it there again. */
4112 if (temp == target
4113 || (rtx_equal_p (temp, target)
4114 && ! side_effects_p (temp) && ! side_effects_p (target)))
4115 dont_store_target = 1;
4116 temp = copy_to_reg (temp);
4117 }
12f06d17
CH
4118 dont_return_target = 1;
4119 }
1499e0a8 4120 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4121 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4122 than the declared mode, compute the result into its declared mode
4123 and then convert to the wider mode. Our value is the computed
4124 expression. */
4125 {
b76b08ef
RK
4126 rtx inner_target = 0;
4127
5a32d038 4128 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4129 which will often result in some optimizations. Do the conversion
4130 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4131 the extend. But don't do this if the type of EXP is a subtype
4132 of something else since then the conversion might involve
4133 more than just converting modes. */
8403445a
AM
4134 if ((want_value & 1) == 0
4135 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4136 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4137 {
4138 if (TREE_UNSIGNED (TREE_TYPE (exp))
4139 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4140 exp = convert
4141 ((*lang_hooks.types.signed_or_unsigned_type)
4142 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4143
b0c48229
NB
4144 exp = convert ((*lang_hooks.types.type_for_mode)
4145 (GET_MODE (SUBREG_REG (target)),
4146 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4147 exp);
b76b08ef
RK
4148
4149 inner_target = SUBREG_REG (target);
f635a84d 4150 }
3a94c984 4151
8403445a
AM
4152 temp = expand_expr (exp, inner_target, VOIDmode,
4153 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4154
7abec5be 4155 /* If TEMP is a MEM and we want a result value, make the access
502b8322
AJ
4156 now so it gets done only once. Strictly speaking, this is
4157 only necessary if the MEM is volatile, or if the address
7abec5be
RH
4158 overlaps TARGET. But not performing the load twice also
4159 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4160 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4161 temp = copy_to_reg (temp);
4162
b258707c
RS
4163 /* If TEMP is a VOIDmode constant, use convert_modes to make
4164 sure that we properly convert it. */
4165 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4166 {
4167 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4168 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4169 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4170 GET_MODE (target), temp,
4171 SUBREG_PROMOTED_UNSIGNED_P (target));
4172 }
b258707c 4173
1499e0a8
RK
4174 convert_move (SUBREG_REG (target), temp,
4175 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4176
4177 /* If we promoted a constant, change the mode back down to match
4178 target. Otherwise, the caller might get confused by a result whose
4179 mode is larger than expected. */
4180
8403445a 4181 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4182 {
b3ca30df
JJ
4183 if (GET_MODE (temp) != VOIDmode)
4184 {
4185 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4186 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4187 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4188 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4189 }
4190 else
4191 temp = convert_modes (GET_MODE (target),
4192 GET_MODE (SUBREG_REG (target)),
4193 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4194 }
4195
8403445a 4196 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4197 }
bbf6f052
RK
4198 else
4199 {
8403445a
AM
4200 temp = expand_expr (exp, target, GET_MODE (target),
4201 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
766f36c7 4202 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4203 If TARGET is a volatile mem ref, either return TARGET
4204 or return a reg copied *from* TARGET; ANSI requires this.
4205
4206 Otherwise, if TEMP is not TARGET, return TEMP
4207 if it is constant (for efficiency),
4208 or if we really want the correct value. */
bbf6f052
RK
4209 if (!(target && GET_CODE (target) == REG
4210 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4211 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4212 && ! rtx_equal_p (temp, target)
8403445a 4213 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4214 dont_return_target = 1;
4215 }
4216
b258707c
RS
4217 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4218 the same as that of TARGET, adjust the constant. This is needed, for
4219 example, in case it is a CONST_DOUBLE and we want only a word-sized
4220 value. */
4221 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4222 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4223 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4224 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4225 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4226
bbf6f052 4227 /* If value was not generated in the target, store it there.
37a08a29
RK
4228 Convert the value to TARGET's type first if necessary.
4229 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4230 one or both of them are volatile memory refs, we have to distinguish
4231 two cases:
4232 - expand_expr has used TARGET. In this case, we must not generate
4233 another copy. This can be detected by TARGET being equal according
4234 to == .
4235 - expand_expr has not used TARGET - that means that the source just
4236 happens to have the same RTX form. Since temp will have been created
4237 by expand_expr, it will compare unequal according to == .
4238 We must generate a copy in this case, to reach the correct number
4239 of volatile memory references. */
bbf6f052 4240
6036acbb 4241 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4242 || (temp != target && (side_effects_p (temp)
4243 || side_effects_p (target))))
e5408e52 4244 && TREE_CODE (exp) != ERROR_MARK
a9772b60
JJ
4245 && ! dont_store_target
4246 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4247 but TARGET is not valid memory reference, TEMP will differ
4248 from TARGET although it is really the same location. */
4249 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
e56fc090
HPN
4250 || target != DECL_RTL_IF_SET (exp))
4251 /* If there's nothing to copy, don't bother. Don't call expr_size
4252 unless necessary, because some front-ends (C++) expr_size-hook
4253 aborts on objects that are not supposed to be bit-copied or
4254 bit-initialized. */
4255 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4256 {
4257 target = protect_from_queue (target, 1);
4258 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4259 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4260 {
4261 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4262 if (dont_return_target)
4263 {
4264 /* In this case, we will return TEMP,
4265 so make sure it has the proper mode.
4266 But don't forget to store the value into TARGET. */
4267 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4268 emit_move_insn (target, temp);
4269 }
4270 else
4271 convert_move (target, temp, unsignedp);
4272 }
4273
4274 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4275 {
c24ae149
RK
4276 /* Handle copying a string constant into an array. The string
4277 constant may be shorter than the array. So copy just the string's
4278 actual length, and clear the rest. First get the size of the data
4279 type of the string, which is actually the size of the target. */
4280 rtx size = expr_size (exp);
bbf6f052 4281
e87b4f3f
RS
4282 if (GET_CODE (size) == CONST_INT
4283 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4284 emit_block_move (target, temp, size,
4285 (want_value & 2
4286 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4287 else
bbf6f052 4288 {
e87b4f3f
RS
4289 /* Compute the size of the data to copy from the string. */
4290 tree copy_size
c03b7665 4291 = size_binop (MIN_EXPR,
b50d17a1 4292 make_tree (sizetype, size),
fed3cef0 4293 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4294 rtx copy_size_rtx
4295 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4296 (want_value & 2
4297 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4298 rtx label = 0;
4299
4300 /* Copy that much. */
267b28bd
SE
4301 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4302 TREE_UNSIGNED (sizetype));
8403445a
AM
4303 emit_block_move (target, temp, copy_size_rtx,
4304 (want_value & 2
4305 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4306
88f63c77
RK
4307 /* Figure out how much is left in TARGET that we have to clear.
4308 Do all calculations in ptr_mode. */
e87b4f3f
RS
4309 if (GET_CODE (copy_size_rtx) == CONST_INT)
4310 {
c24ae149
RK
4311 size = plus_constant (size, -INTVAL (copy_size_rtx));
4312 target = adjust_address (target, BLKmode,
4313 INTVAL (copy_size_rtx));
e87b4f3f
RS
4314 }
4315 else
4316 {
fa06ab5c 4317 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4318 copy_size_rtx, NULL_RTX, 0,
4319 OPTAB_LIB_WIDEN);
e87b4f3f 4320
c24ae149
RK
4321#ifdef POINTERS_EXTEND_UNSIGNED
4322 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd
SE
4323 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4324 TREE_UNSIGNED (sizetype));
c24ae149
RK
4325#endif
4326
4327 target = offset_address (target, copy_size_rtx,
4328 highest_pow2_factor (copy_size));
e87b4f3f 4329 label = gen_label_rtx ();
c5d5d461 4330 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4331 GET_MODE (size), 0, label);
e87b4f3f
RS
4332 }
4333
4334 if (size != const0_rtx)
37a08a29 4335 clear_storage (target, size);
22619c3f 4336
e87b4f3f
RS
4337 if (label)
4338 emit_label (label);
bbf6f052
RK
4339 }
4340 }
fffa9c1d
JW
4341 /* Handle calls that return values in multiple non-contiguous locations.
4342 The Irix 6 ABI has examples of this. */
4343 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4344 emit_group_load (target, temp, TREE_TYPE (exp),
4345 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4346 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4347 emit_block_move (target, temp, expr_size (exp),
4348 (want_value & 2
4349 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052
RK
4350 else
4351 emit_move_insn (target, temp);
4352 }
709f5be1 4353
766f36c7 4354 /* If we don't want a value, return NULL_RTX. */
8403445a 4355 if ((want_value & 1) == 0)
766f36c7
RK
4356 return NULL_RTX;
4357
4358 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4359 ??? The latter test doesn't seem to make sense. */
4360 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4361 return temp;
766f36c7
RK
4362
4363 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4364 else if ((want_value & 1) != 0
4365 && GET_MODE (target) != BLKmode
766f36c7
RK
4366 && ! (GET_CODE (target) == REG
4367 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4368 return copy_to_reg (target);
3a94c984 4369
766f36c7 4370 else
709f5be1 4371 return target;
bbf6f052
RK
4372}
4373\f
40209195 4374/* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
9de08200
RK
4375
4376static int
502b8322 4377is_zeros_p (tree exp)
9de08200
RK
4378{
4379 tree elt;
4380
4381 switch (TREE_CODE (exp))
4382 {
4383 case CONVERT_EXPR:
4384 case NOP_EXPR:
4385 case NON_LVALUE_EXPR:
ed239f5a 4386 case VIEW_CONVERT_EXPR:
9de08200
RK
4387 return is_zeros_p (TREE_OPERAND (exp, 0));
4388
4389 case INTEGER_CST:
05bccae2 4390 return integer_zerop (exp);
9de08200
RK
4391
4392 case COMPLEX_CST:
4393 return
4394 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4395
4396 case REAL_CST:
41c9120b 4397 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4398
69ef87e2
AH
4399 case VECTOR_CST:
4400 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4401 elt = TREE_CHAIN (elt))
4402 if (!is_zeros_p (TREE_VALUE (elt)))
4403 return 0;
4404
4405 return 1;
4406
9de08200 4407 case CONSTRUCTOR:
e1a43f73
PB
4408 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4409 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4410 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4411 if (! is_zeros_p (TREE_VALUE (elt)))
4412 return 0;
4413
4414 return 1;
3a94c984 4415
e9a25f70
JL
4416 default:
4417 return 0;
9de08200 4418 }
9de08200
RK
4419}
4420
4421/* Return 1 if EXP contains mostly (3/4) zeros. */
4422
40209195 4423int
502b8322 4424mostly_zeros_p (tree exp)
9de08200 4425{
9de08200
RK
4426 if (TREE_CODE (exp) == CONSTRUCTOR)
4427 {
e1a43f73
PB
4428 int elts = 0, zeros = 0;
4429 tree elt = CONSTRUCTOR_ELTS (exp);
4430 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4431 {
4432 /* If there are no ranges of true bits, it is all zero. */
4433 return elt == NULL_TREE;
4434 }
4435 for (; elt; elt = TREE_CHAIN (elt))
4436 {
4437 /* We do not handle the case where the index is a RANGE_EXPR,
4438 so the statistic will be somewhat inaccurate.
4439 We do make a more accurate count in store_constructor itself,
4440 so since this function is only used for nested array elements,
0f41302f 4441 this should be close enough. */
e1a43f73
PB
4442 if (mostly_zeros_p (TREE_VALUE (elt)))
4443 zeros++;
4444 elts++;
4445 }
9de08200
RK
4446
4447 return 4 * zeros >= 3 * elts;
4448 }
4449
4450 return is_zeros_p (exp);
4451}
4452\f
e1a43f73
PB
4453/* Helper function for store_constructor.
4454 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4455 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4456 CLEARED is as for store_constructor.
23cb1766 4457 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4458
4459 This provides a recursive shortcut back to store_constructor when it isn't
4460 necessary to go through store_field. This is so that we can pass through
4461 the cleared field to let store_constructor know that we may not have to
4462 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4463
4464static void
502b8322
AJ
4465store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4466 HOST_WIDE_INT bitpos, enum machine_mode mode,
4467 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4468{
4469 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4470 && bitpos % BITS_PER_UNIT == 0
cc2902df 4471 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4472 let store_field do the bitfield handling. This is unlikely to
4473 generate unnecessary clear instructions anyways. */
4474 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4475 {
61cb205c
RK
4476 if (GET_CODE (target) == MEM)
4477 target
4478 = adjust_address (target,
4479 GET_MODE (target) == BLKmode
4480 || 0 != (bitpos
4481 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4482 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4483
e0339ef7 4484
04050c69 4485 /* Update the alias set, if required. */
10b76d73
RK
4486 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4487 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4488 {
4489 target = copy_rtx (target);
4490 set_mem_alias_set (target, alias_set);
4491 }
e0339ef7 4492
04050c69 4493 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4494 }
4495 else
a06ef755
RK
4496 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4497 alias_set);
e1a43f73
PB
4498}
4499
bbf6f052 4500/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4501 TARGET is either a REG or a MEM; we know it cannot conflict, since
4502 safe_from_p has been called.
b7010412
RK
4503 CLEARED is true if TARGET is known to have been zero'd.
4504 SIZE is the number of bytes of TARGET we are allowed to modify: this
4505 may not be the same as the size of EXP if we are assigning to a field
4506 which has been packed to exclude padding bits. */
bbf6f052
RK
4507
4508static void
502b8322 4509store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4510{
4af3895e 4511 tree type = TREE_TYPE (exp);
a5efcd63 4512#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4513 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4514#endif
4af3895e 4515
e44842fe
RK
4516 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4517 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4518 {
b3694847 4519 tree elt;
bbf6f052 4520
2c430630
RS
4521 /* If size is zero or the target is already cleared, do nothing. */
4522 if (size == 0 || cleared)
4523 cleared = 1;
04050c69 4524 /* We either clear the aggregate or indicate the value is dead. */
2c430630
RS
4525 else if ((TREE_CODE (type) == UNION_TYPE
4526 || TREE_CODE (type) == QUAL_UNION_TYPE)
4527 && ! CONSTRUCTOR_ELTS (exp))
04050c69 4528 /* If the constructor is empty, clear the union. */
a59f8640 4529 {
04050c69
RK
4530 clear_storage (target, expr_size (exp));
4531 cleared = 1;
a59f8640 4532 }
4af3895e
JVA
4533
4534 /* If we are building a static constructor into a register,
4535 set the initial value as zero so we can fold the value into
67225c15
RK
4536 a constant. But if more than one register is involved,
4537 this probably loses. */
2c430630 4538 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4539 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4540 {
04050c69 4541 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4542 cleared = 1;
4543 }
4544
4545 /* If the constructor has fewer fields than the structure
4546 or if we are initializing the structure to mostly zeros,
0d97bf4c 4547 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4548 register whose mode size isn't equal to SIZE since clear_storage
4549 can't handle this case. */
2c430630
RS
4550 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4551 || mostly_zeros_p (exp))
fcf1b822 4552 && (GET_CODE (target) != REG
04050c69
RK
4553 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4554 == size)))
9de08200 4555 {
337f4314
RK
4556 rtx xtarget = target;
4557
4558 if (readonly_fields_p (type))
4559 {
4560 xtarget = copy_rtx (xtarget);
4561 RTX_UNCHANGING_P (xtarget) = 1;
4562 }
4563
4564 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
4565 cleared = 1;
4566 }
04050c69
RK
4567
4568 if (! cleared)
38a448ca 4569 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4570
4571 /* Store each element of the constructor into
4572 the corresponding field of TARGET. */
4573
4574 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4575 {
b3694847 4576 tree field = TREE_PURPOSE (elt);
34c73909 4577 tree value = TREE_VALUE (elt);
b3694847 4578 enum machine_mode mode;
770ae6cc
RK
4579 HOST_WIDE_INT bitsize;
4580 HOST_WIDE_INT bitpos = 0;
770ae6cc 4581 tree offset;
b50d17a1 4582 rtx to_rtx = target;
bbf6f052 4583
f32fd778
RS
4584 /* Just ignore missing fields.
4585 We cleared the whole structure, above,
4586 if any fields are missing. */
4587 if (field == 0)
4588 continue;
4589
8b6000fc 4590 if (cleared && is_zeros_p (value))
e1a43f73 4591 continue;
9de08200 4592
770ae6cc
RK
4593 if (host_integerp (DECL_SIZE (field), 1))
4594 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4595 else
4596 bitsize = -1;
4597
bbf6f052
RK
4598 mode = DECL_MODE (field);
4599 if (DECL_BIT_FIELD (field))
4600 mode = VOIDmode;
4601
770ae6cc
RK
4602 offset = DECL_FIELD_OFFSET (field);
4603 if (host_integerp (offset, 0)
4604 && host_integerp (bit_position (field), 0))
4605 {
4606 bitpos = int_bit_position (field);
4607 offset = 0;
4608 }
b50d17a1 4609 else
770ae6cc 4610 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4611
b50d17a1
RK
4612 if (offset)
4613 {
4614 rtx offset_rtx;
4615
7a6cdb44 4616 if (CONTAINS_PLACEHOLDER_P (offset))
7fa96708 4617 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4618 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4619
b50d17a1
RK
4620 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4621 if (GET_CODE (to_rtx) != MEM)
4622 abort ();
4623
bd070e1a 4624#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4625 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4626 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4627#else
4628 if (GET_MODE (offset_rtx) != ptr_mode)
4629 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4630#endif
bd070e1a 4631
0d4903b8
RK
4632 to_rtx = offset_address (to_rtx, offset_rtx,
4633 highest_pow2_factor (offset));
b50d17a1 4634 }
c5c76735 4635
cf04eb80
RK
4636 if (TREE_READONLY (field))
4637 {
9151b3bf 4638 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4639 to_rtx = copy_rtx (to_rtx);
4640
cf04eb80
RK
4641 RTX_UNCHANGING_P (to_rtx) = 1;
4642 }
4643
34c73909
R
4644#ifdef WORD_REGISTER_OPERATIONS
4645 /* If this initializes a field that is smaller than a word, at the
4646 start of a word, try to widen it to a full word.
4647 This special case allows us to output C++ member function
4648 initializations in a form that the optimizers can understand. */
770ae6cc 4649 if (GET_CODE (target) == REG
34c73909
R
4650 && bitsize < BITS_PER_WORD
4651 && bitpos % BITS_PER_WORD == 0
4652 && GET_MODE_CLASS (mode) == MODE_INT
4653 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4654 && exp_size >= 0
4655 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4656 {
4657 tree type = TREE_TYPE (value);
04050c69 4658
34c73909
R
4659 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4660 {
b0c48229
NB
4661 type = (*lang_hooks.types.type_for_size)
4662 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
4663 value = convert (type, value);
4664 }
04050c69 4665
34c73909
R
4666 if (BYTES_BIG_ENDIAN)
4667 value
4668 = fold (build (LSHIFT_EXPR, type, value,
4669 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4670 bitsize = BITS_PER_WORD;
4671 mode = word_mode;
4672 }
4673#endif
10b76d73
RK
4674
4675 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4676 && DECL_NONADDRESSABLE_P (field))
4677 {
4678 to_rtx = copy_rtx (to_rtx);
4679 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4680 }
4681
c5c76735 4682 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4683 value, type, cleared,
10b76d73 4684 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4685 }
4686 }
e6834654
SS
4687 else if (TREE_CODE (type) == ARRAY_TYPE
4688 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4689 {
b3694847
SS
4690 tree elt;
4691 int i;
e1a43f73 4692 int need_to_clear;
4af3895e 4693 tree domain = TYPE_DOMAIN (type);
4af3895e 4694 tree elttype = TREE_TYPE (type);
e6834654 4695 int const_bounds_p;
ae0ed63a
JM
4696 HOST_WIDE_INT minelt = 0;
4697 HOST_WIDE_INT maxelt = 0;
85f3d674 4698
e6834654
SS
4699 /* Vectors are like arrays, but the domain is stored via an array
4700 type indirectly. */
4701 if (TREE_CODE (type) == VECTOR_TYPE)
4702 {
4703 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4704 the same field as TYPE_DOMAIN, we are not guaranteed that
4705 it always will. */
4706 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4707 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4708 }
4709
4710 const_bounds_p = (TYPE_MIN_VALUE (domain)
4711 && TYPE_MAX_VALUE (domain)
4712 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4713 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4714
85f3d674
RK
4715 /* If we have constant bounds for the range of the type, get them. */
4716 if (const_bounds_p)
4717 {
4718 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4719 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4720 }
bbf6f052 4721
e1a43f73 4722 /* If the constructor has fewer elements than the array,
38e01259 4723 clear the whole array first. Similarly if this is
e1a43f73
PB
4724 static constructor of a non-BLKmode object. */
4725 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4726 need_to_clear = 1;
4727 else
4728 {
4729 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4730 need_to_clear = ! const_bounds_p;
4731
e1a43f73
PB
4732 /* This loop is a more accurate version of the loop in
4733 mostly_zeros_p (it handles RANGE_EXPR in an index).
4734 It is also needed to check for missing elements. */
4735 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4736 elt != NULL_TREE && ! need_to_clear;
df0faff1 4737 elt = TREE_CHAIN (elt))
e1a43f73
PB
4738 {
4739 tree index = TREE_PURPOSE (elt);
4740 HOST_WIDE_INT this_node_count;
19caa751 4741
e1a43f73
PB
4742 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4743 {
4744 tree lo_index = TREE_OPERAND (index, 0);
4745 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4746
19caa751
RK
4747 if (! host_integerp (lo_index, 1)
4748 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4749 {
4750 need_to_clear = 1;
4751 break;
4752 }
19caa751
RK
4753
4754 this_node_count = (tree_low_cst (hi_index, 1)
4755 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4756 }
4757 else
4758 this_node_count = 1;
85f3d674 4759
e1a43f73
PB
4760 count += this_node_count;
4761 if (mostly_zeros_p (TREE_VALUE (elt)))
4762 zero_count += this_node_count;
4763 }
85f3d674 4764
8e958f70 4765 /* Clear the entire array first if there are any missing elements,
0f41302f 4766 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4767 if (! need_to_clear
4768 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4769 need_to_clear = 1;
4770 }
85f3d674 4771
9376fcd6 4772 if (need_to_clear && size > 0)
9de08200
RK
4773 {
4774 if (! cleared)
725e58b1
RK
4775 {
4776 if (REG_P (target))
4777 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4778 else
4779 clear_storage (target, GEN_INT (size));
4780 }
9de08200
RK
4781 cleared = 1;
4782 }
df4556a3 4783 else if (REG_P (target))
bbf6f052 4784 /* Inform later passes that the old value is dead. */
38a448ca 4785 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4786
4787 /* Store each element of the constructor into
4788 the corresponding element of TARGET, determined
4789 by counting the elements. */
4790 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4791 elt;
4792 elt = TREE_CHAIN (elt), i++)
4793 {
b3694847 4794 enum machine_mode mode;
19caa751
RK
4795 HOST_WIDE_INT bitsize;
4796 HOST_WIDE_INT bitpos;
bbf6f052 4797 int unsignedp;
e1a43f73 4798 tree value = TREE_VALUE (elt);
03dc44a6
RS
4799 tree index = TREE_PURPOSE (elt);
4800 rtx xtarget = target;
bbf6f052 4801
e1a43f73
PB
4802 if (cleared && is_zeros_p (value))
4803 continue;
9de08200 4804
bbf6f052 4805 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4806 mode = TYPE_MODE (elttype);
4807 if (mode == BLKmode)
19caa751
RK
4808 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4809 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4810 : -1);
14a774a9
RK
4811 else
4812 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4813
e1a43f73
PB
4814 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4815 {
4816 tree lo_index = TREE_OPERAND (index, 0);
4817 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 4818 rtx index_r, pos_rtx, loop_end;
e1a43f73 4819 struct nesting *loop;
05c0b405
PB
4820 HOST_WIDE_INT lo, hi, count;
4821 tree position;
e1a43f73 4822
0f41302f 4823 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4824 if (const_bounds_p
4825 && host_integerp (lo_index, 0)
19caa751
RK
4826 && host_integerp (hi_index, 0)
4827 && (lo = tree_low_cst (lo_index, 0),
4828 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4829 count = hi - lo + 1,
4830 (GET_CODE (target) != MEM
4831 || count <= 2
19caa751
RK
4832 || (host_integerp (TYPE_SIZE (elttype), 1)
4833 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4834 <= 40 * 8)))))
e1a43f73 4835 {
05c0b405
PB
4836 lo -= minelt; hi -= minelt;
4837 for (; lo <= hi; lo++)
e1a43f73 4838 {
19caa751 4839 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
4840
4841 if (GET_CODE (target) == MEM
4842 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4843 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4844 && TYPE_NONALIASED_COMPONENT (type))
4845 {
4846 target = copy_rtx (target);
4847 MEM_KEEP_ALIAS_SET_P (target) = 1;
4848 }
4849
23cb1766 4850 store_constructor_field
04050c69
RK
4851 (target, bitsize, bitpos, mode, value, type, cleared,
4852 get_alias_set (elttype));
e1a43f73
PB
4853 }
4854 }
4855 else
4856 {
4977bab6 4857 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
4858 loop_end = gen_label_rtx ();
4859
4860 unsignedp = TREE_UNSIGNED (domain);
4861
4862 index = build_decl (VAR_DECL, NULL_TREE, domain);
4863
19e7881c 4864 index_r
e1a43f73
PB
4865 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4866 &unsignedp, 0));
19e7881c 4867 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4868 if (TREE_CODE (value) == SAVE_EXPR
4869 && SAVE_EXPR_RTL (value) == 0)
4870 {
0f41302f
MS
4871 /* Make sure value gets expanded once before the
4872 loop. */
e1a43f73
PB
4873 expand_expr (value, const0_rtx, VOIDmode, 0);
4874 emit_queue ();
4875 }
4876 store_expr (lo_index, index_r, 0);
4877 loop = expand_start_loop (0);
4878
0f41302f 4879 /* Assign value to element index. */
fed3cef0
RK
4880 position
4881 = convert (ssizetype,
4882 fold (build (MINUS_EXPR, TREE_TYPE (index),
4883 index, TYPE_MIN_VALUE (domain))));
4884 position = size_binop (MULT_EXPR, position,
4885 convert (ssizetype,
4886 TYPE_SIZE_UNIT (elttype)));
4887
e1a43f73 4888 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
4889 xtarget = offset_address (target, pos_rtx,
4890 highest_pow2_factor (position));
4891 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4892 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 4893 store_constructor (value, xtarget, cleared,
b7010412 4894 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4895 else
4896 store_expr (value, xtarget, 0);
4897
4898 expand_exit_loop_if_false (loop,
4899 build (LT_EXPR, integer_type_node,
4900 index, hi_index));
4901
4902 expand_increment (build (PREINCREMENT_EXPR,
4903 TREE_TYPE (index),
7b8b9722 4904 index, integer_one_node), 0, 0);
e1a43f73
PB
4905 expand_end_loop ();
4906 emit_label (loop_end);
e1a43f73
PB
4907 }
4908 }
19caa751
RK
4909 else if ((index != 0 && ! host_integerp (index, 0))
4910 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4911 {
03dc44a6
RS
4912 tree position;
4913
5b6c44ff 4914 if (index == 0)
fed3cef0 4915 index = ssize_int (1);
5b6c44ff 4916
e1a43f73 4917 if (minelt)
fed3cef0
RK
4918 index = convert (ssizetype,
4919 fold (build (MINUS_EXPR, index,
4920 TYPE_MIN_VALUE (domain))));
19caa751 4921
fed3cef0
RK
4922 position = size_binop (MULT_EXPR, index,
4923 convert (ssizetype,
4924 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
4925 xtarget = offset_address (target,
4926 expand_expr (position, 0, VOIDmode, 0),
4927 highest_pow2_factor (position));
4928 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 4929 store_expr (value, xtarget, 0);
03dc44a6
RS
4930 }
4931 else
4932 {
4933 if (index != 0)
19caa751
RK
4934 bitpos = ((tree_low_cst (index, 0) - minelt)
4935 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4936 else
19caa751
RK
4937 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4938
10b76d73 4939 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 4940 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
4941 && TYPE_NONALIASED_COMPONENT (type))
4942 {
4943 target = copy_rtx (target);
4944 MEM_KEEP_ALIAS_SET_P (target) = 1;
4945 }
4946
c5c76735 4947 store_constructor_field (target, bitsize, bitpos, mode, value,
04050c69 4948 type, cleared, get_alias_set (elttype));
23cb1766 4949
03dc44a6 4950 }
bbf6f052
RK
4951 }
4952 }
19caa751 4953
3a94c984 4954 /* Set constructor assignments. */
071a6595
PB
4955 else if (TREE_CODE (type) == SET_TYPE)
4956 {
e1a43f73 4957 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4958 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4959 tree domain = TYPE_DOMAIN (type);
4960 tree domain_min, domain_max, bitlength;
4961
9faa82d8 4962 /* The default implementation strategy is to extract the constant
071a6595
PB
4963 parts of the constructor, use that to initialize the target,
4964 and then "or" in whatever non-constant ranges we need in addition.
4965
4966 If a large set is all zero or all ones, it is
4967 probably better to set it using memset (if available) or bzero.
4968 Also, if a large set has just a single range, it may also be
4969 better to first clear all the first clear the set (using
0f41302f 4970 bzero/memset), and set the bits we want. */
3a94c984 4971
0f41302f 4972 /* Check for all zeros. */
9376fcd6 4973 if (elt == NULL_TREE && size > 0)
071a6595 4974 {
e1a43f73 4975 if (!cleared)
8ac61af7 4976 clear_storage (target, GEN_INT (size));
071a6595
PB
4977 return;
4978 }
4979
071a6595
PB
4980 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4981 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4982 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4983 size_diffop (domain_max, domain_min),
4984 ssize_int (1));
071a6595 4985
19caa751 4986 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4987
4988 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4989 are "complicated" (more than one range), initialize (the
3a94c984 4990 constant parts) by copying from a constant. */
e1a43f73
PB
4991 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4992 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4993 {
19caa751 4994 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4995 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
703ad42b 4996 char *bit_buffer = alloca (nbits);
b4ee5a72 4997 HOST_WIDE_INT word = 0;
19caa751
RK
4998 unsigned int bit_pos = 0;
4999 unsigned int ibit = 0;
5000 unsigned int offset = 0; /* In bytes from beginning of set. */
5001
e1a43f73 5002 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5003 for (;;)
071a6595 5004 {
b4ee5a72
PB
5005 if (bit_buffer[ibit])
5006 {
b09f3348 5007 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5008 word |= (1 << (set_word_size - 1 - bit_pos));
5009 else
5010 word |= 1 << bit_pos;
5011 }
19caa751 5012
b4ee5a72
PB
5013 bit_pos++; ibit++;
5014 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5015 {
e1a43f73
PB
5016 if (word != 0 || ! cleared)
5017 {
5018 rtx datum = GEN_INT (word);
5019 rtx to_rtx;
19caa751 5020
0f41302f
MS
5021 /* The assumption here is that it is safe to use
5022 XEXP if the set is multi-word, but not if
5023 it's single-word. */
e1a43f73 5024 if (GET_CODE (target) == MEM)
f4ef873c 5025 to_rtx = adjust_address (target, mode, offset);
3a94c984 5026 else if (offset == 0)
e1a43f73
PB
5027 to_rtx = target;
5028 else
5029 abort ();
5030 emit_move_insn (to_rtx, datum);
5031 }
19caa751 5032
b4ee5a72
PB
5033 if (ibit == nbits)
5034 break;
5035 word = 0;
5036 bit_pos = 0;
5037 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5038 }
5039 }
071a6595 5040 }
e1a43f73 5041 else if (!cleared)
19caa751
RK
5042 /* Don't bother clearing storage if the set is all ones. */
5043 if (TREE_CHAIN (elt) != NULL_TREE
5044 || (TREE_PURPOSE (elt) == NULL_TREE
5045 ? nbits != 1
5046 : ( ! host_integerp (TREE_VALUE (elt), 0)
5047 || ! host_integerp (TREE_PURPOSE (elt), 0)
5048 || (tree_low_cst (TREE_VALUE (elt), 0)
5049 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5050 != (HOST_WIDE_INT) nbits))))
8ac61af7 5051 clear_storage (target, expr_size (exp));
3a94c984 5052
e1a43f73 5053 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5054 {
3a94c984 5055 /* Start of range of element or NULL. */
071a6595 5056 tree startbit = TREE_PURPOSE (elt);
3a94c984 5057 /* End of range of element, or element value. */
071a6595
PB
5058 tree endbit = TREE_VALUE (elt);
5059 HOST_WIDE_INT startb, endb;
19caa751 5060 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5061
5062 bitlength_rtx = expand_expr (bitlength,
19caa751 5063 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5064
3a94c984 5065 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5066 if (startbit == NULL_TREE)
5067 {
5068 startbit = save_expr (endbit);
5069 endbit = startbit;
5070 }
19caa751 5071
071a6595
PB
5072 startbit = convert (sizetype, startbit);
5073 endbit = convert (sizetype, endbit);
5074 if (! integer_zerop (domain_min))
5075 {
5076 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5077 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5078 }
3a94c984 5079 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5080 EXPAND_CONST_ADDRESS);
3a94c984 5081 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5082 EXPAND_CONST_ADDRESS);
5083
5084 if (REG_P (target))
5085 {
1da68f56
RK
5086 targetx
5087 = assign_temp
b0c48229
NB
5088 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5089 (GET_MODE (target), 0),
1da68f56
RK
5090 TYPE_QUAL_CONST)),
5091 0, 1, 1);
071a6595
PB
5092 emit_move_insn (targetx, target);
5093 }
19caa751 5094
071a6595
PB
5095 else if (GET_CODE (target) == MEM)
5096 targetx = target;
5097 else
5098 abort ();
5099
4ca79136
RH
5100 /* Optimization: If startbit and endbit are constants divisible
5101 by BITS_PER_UNIT, call memset instead. */
5102 if (TARGET_MEM_FUNCTIONS
5103 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5104 && TREE_CODE (endbit) == INTEGER_CST
5105 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5106 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5107 {
ebb1b59a 5108 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5109 VOIDmode, 3,
e1a43f73
PB
5110 plus_constant (XEXP (targetx, 0),
5111 startb / BITS_PER_UNIT),
071a6595 5112 Pmode,
3b6f75e2 5113 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5114 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5115 TYPE_MODE (sizetype));
071a6595
PB
5116 }
5117 else
68d28100
RH
5118 emit_library_call (setbits_libfunc, LCT_NORMAL,
5119 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5120 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5121 startbit_rtx, TYPE_MODE (sizetype),
5122 endbit_rtx, TYPE_MODE (sizetype));
5123
071a6595
PB
5124 if (REG_P (target))
5125 emit_move_insn (target, targetx);
5126 }
5127 }
bbf6f052
RK
5128
5129 else
5130 abort ();
5131}
5132
5133/* Store the value of EXP (an expression tree)
5134 into a subfield of TARGET which has mode MODE and occupies
5135 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5136 If MODE is VOIDmode, it means that we are storing into a bit-field.
5137
5138 If VALUE_MODE is VOIDmode, return nothing in particular.
5139 UNSIGNEDP is not used in this case.
5140
5141 Otherwise, return an rtx for the value stored. This rtx
5142 has mode VALUE_MODE if that is convenient to do.
5143 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5144
a06ef755 5145 TYPE is the type of the underlying object,
ece32014
MM
5146
5147 ALIAS_SET is the alias set for the destination. This value will
5148 (in general) be different from that for TARGET, since TARGET is a
5149 reference to the containing structure. */
bbf6f052
RK
5150
5151static rtx
502b8322
AJ
5152store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5153 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5154 int unsignedp, tree type, int alias_set)
bbf6f052 5155{
906c4e36 5156 HOST_WIDE_INT width_mask = 0;
bbf6f052 5157
e9a25f70
JL
5158 if (TREE_CODE (exp) == ERROR_MARK)
5159 return const0_rtx;
5160
2be6a7e9
RK
5161 /* If we have nothing to store, do nothing unless the expression has
5162 side-effects. */
5163 if (bitsize == 0)
5164 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5165 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5166 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5167
5168 /* If we are storing into an unaligned field of an aligned union that is
5169 in a register, we may have the mode of TARGET being an integer mode but
5170 MODE == BLKmode. In that case, get an aligned object whose size and
5171 alignment are the same as TARGET and store TARGET into it (we can avoid
5172 the store if the field being stored is the entire width of TARGET). Then
5173 call ourselves recursively to store the field into a BLKmode version of
5174 that object. Finally, load from the object into TARGET. This is not
5175 very efficient in general, but should only be slightly more expensive
5176 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5177 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5178 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5179
5180 if (mode == BLKmode
5181 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5182 {
85a43a2f 5183 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5184 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5185
8752c357 5186 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5187 emit_move_insn (object, target);
5188
a06ef755
RK
5189 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5190 alias_set);
bbf6f052
RK
5191
5192 emit_move_insn (target, object);
5193
a06ef755 5194 /* We want to return the BLKmode version of the data. */
46093b97 5195 return blk_object;
bbf6f052 5196 }
c3b247b4
JM
5197
5198 if (GET_CODE (target) == CONCAT)
5199 {
5200 /* We're storing into a struct containing a single __complex. */
5201
5202 if (bitpos != 0)
5203 abort ();
5204 return store_expr (exp, target, 0);
5205 }
bbf6f052
RK
5206
5207 /* If the structure is in a register or if the component
5208 is a bit field, we cannot use addressing to access it.
5209 Use bit-field techniques or SUBREG to store in it. */
5210
4fa52007 5211 if (mode == VOIDmode
6ab06cbb
JW
5212 || (mode != BLKmode && ! direct_store[(int) mode]
5213 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5214 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5215 || GET_CODE (target) == REG
c980ac49 5216 || GET_CODE (target) == SUBREG
ccc98036
RS
5217 /* If the field isn't aligned enough to store as an ordinary memref,
5218 store it as a bit field. */
15b19a7d 5219 || (mode != BLKmode
9e5f281f
OH
5220 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5221 || bitpos % GET_MODE_ALIGNMENT (mode))
5222 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5223 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5224 /* If the RHS and field are a constant size and the size of the
5225 RHS isn't the same size as the bitfield, we must use bitfield
5226 operations. */
05bccae2
RK
5227 || (bitsize >= 0
5228 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5229 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5230 {
906c4e36 5231 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5232
ef19912d
RK
5233 /* If BITSIZE is narrower than the size of the type of EXP
5234 we will be narrowing TEMP. Normally, what's wanted are the
5235 low-order bits. However, if EXP's type is a record and this is
5236 big-endian machine, we want the upper BITSIZE bits. */
5237 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5238 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5239 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5240 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5241 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5242 - bitsize),
c1853da7 5243 NULL_RTX, 1);
ef19912d 5244
bbd6cf73
RK
5245 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5246 MODE. */
5247 if (mode != VOIDmode && mode != BLKmode
5248 && mode != TYPE_MODE (TREE_TYPE (exp)))
5249 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5250
a281e72d
RK
5251 /* If the modes of TARGET and TEMP are both BLKmode, both
5252 must be in memory and BITPOS must be aligned on a byte
5253 boundary. If so, we simply do a block copy. */
5254 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5255 {
5256 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5257 || bitpos % BITS_PER_UNIT != 0)
5258 abort ();
5259
f4ef873c 5260 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5261 emit_block_move (target, temp,
a06ef755 5262 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5263 / BITS_PER_UNIT),
5264 BLOCK_OP_NORMAL);
a281e72d
RK
5265
5266 return value_mode == VOIDmode ? const0_rtx : target;
5267 }
5268
bbf6f052 5269 /* Store the value in the bitfield. */
a06ef755
RK
5270 store_bit_field (target, bitsize, bitpos, mode, temp,
5271 int_size_in_bytes (type));
5272
bbf6f052
RK
5273 if (value_mode != VOIDmode)
5274 {
04050c69
RK
5275 /* The caller wants an rtx for the value.
5276 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5277 if (width_mask != 0
5278 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5279 {
9074de27 5280 tree count;
5c4d7cfb 5281 enum machine_mode tmode;
86a2c12a 5282
5c4d7cfb 5283 tmode = GET_MODE (temp);
86a2c12a
RS
5284 if (tmode == VOIDmode)
5285 tmode = value_mode;
22273300
JJ
5286
5287 if (unsignedp)
5288 return expand_and (tmode, temp,
2496c7bd 5289 gen_int_mode (width_mask, tmode),
22273300
JJ
5290 NULL_RTX);
5291
5c4d7cfb
RS
5292 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5293 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5294 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5295 }
04050c69 5296
bbf6f052 5297 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5298 NULL_RTX, value_mode, VOIDmode,
a06ef755 5299 int_size_in_bytes (type));
bbf6f052
RK
5300 }
5301 return const0_rtx;
5302 }
5303 else
5304 {
5305 rtx addr = XEXP (target, 0);
a06ef755 5306 rtx to_rtx = target;
bbf6f052
RK
5307
5308 /* If a value is wanted, it must be the lhs;
5309 so make the address stable for multiple use. */
5310
5311 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5312 && ! CONSTANT_ADDRESS_P (addr)
5313 /* A frame-pointer reference is already stable. */
5314 && ! (GET_CODE (addr) == PLUS
5315 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5316 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5317 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5318 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5319
5320 /* Now build a reference to just the desired component. */
5321
a06ef755
RK
5322 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5323
5324 if (to_rtx == target)
5325 to_rtx = copy_rtx (to_rtx);
792760b9 5326
c6df88cb 5327 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5328 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5329 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5330
5331 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5332 }
5333}
5334\f
5335/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5336 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5337 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5338
5339 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5340 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5341 If the position of the field is variable, we store a tree
5342 giving the variable offset (in units) in *POFFSET.
5343 This offset is in addition to the bit position.
5344 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5345
5346 If any of the extraction expressions is volatile,
5347 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5348
5349 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5350 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5351 is redundant.
5352
5353 If the field describes a variable-sized object, *PMODE is set to
5354 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5355 this case, but the address of the object can be found. */
bbf6f052
RK
5356
5357tree
502b8322
AJ
5358get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5359 HOST_WIDE_INT *pbitpos, tree *poffset,
5360 enum machine_mode *pmode, int *punsignedp,
5361 int *pvolatilep)
bbf6f052
RK
5362{
5363 tree size_tree = 0;
5364 enum machine_mode mode = VOIDmode;
fed3cef0 5365 tree offset = size_zero_node;
770ae6cc 5366 tree bit_offset = bitsize_zero_node;
738cc472 5367 tree placeholder_ptr = 0;
770ae6cc 5368 tree tem;
bbf6f052 5369
770ae6cc
RK
5370 /* First get the mode, signedness, and size. We do this from just the
5371 outermost expression. */
bbf6f052
RK
5372 if (TREE_CODE (exp) == COMPONENT_REF)
5373 {
5374 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5375 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5376 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5377
bbf6f052
RK
5378 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5379 }
5380 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5381 {
5382 size_tree = TREE_OPERAND (exp, 1);
5383 *punsignedp = TREE_UNSIGNED (exp);
5384 }
5385 else
5386 {
5387 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5388 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5389
ab87f8c8
JL
5390 if (mode == BLKmode)
5391 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5392 else
5393 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5394 }
3a94c984 5395
770ae6cc 5396 if (size_tree != 0)
bbf6f052 5397 {
770ae6cc 5398 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5399 mode = BLKmode, *pbitsize = -1;
5400 else
770ae6cc 5401 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5402 }
5403
5404 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5405 and find the ultimate containing object. */
bbf6f052
RK
5406 while (1)
5407 {
770ae6cc
RK
5408 if (TREE_CODE (exp) == BIT_FIELD_REF)
5409 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5410 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5411 {
770ae6cc
RK
5412 tree field = TREE_OPERAND (exp, 1);
5413 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5414
e7f3c83f
RK
5415 /* If this field hasn't been filled in yet, don't go
5416 past it. This should only happen when folding expressions
5417 made during type construction. */
770ae6cc 5418 if (this_offset == 0)
e7f3c83f 5419 break;
7a6cdb44 5420 else if (CONTAINS_PLACEHOLDER_P (this_offset))
770ae6cc 5421 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5422
7156dead 5423 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5424 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5425 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5426
a06ef755 5427 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5428 }
7156dead 5429
b4e3fabb
RK
5430 else if (TREE_CODE (exp) == ARRAY_REF
5431 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5432 {
742920c7 5433 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5434 tree array = TREE_OPERAND (exp, 0);
5435 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5436 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5437 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5438
770ae6cc
RK
5439 /* We assume all arrays have sizes that are a multiple of a byte.
5440 First subtract the lower bound, if any, in the type of the
5441 index, then convert to sizetype and multiply by the size of the
5442 array element. */
5443 if (low_bound != 0 && ! integer_zerop (low_bound))
5444 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5445 index, low_bound));
f8dac6eb 5446
7156dead
RK
5447 /* If the index has a self-referential type, pass it to a
5448 WITH_RECORD_EXPR; if the component size is, pass our
5449 component to one. */
7a6cdb44 5450 if (CONTAINS_PLACEHOLDER_P (index))
770ae6cc 5451 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7a6cdb44 5452 if (CONTAINS_PLACEHOLDER_P (unit_size))
b4e3fabb 5453 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5454
770ae6cc
RK
5455 offset = size_binop (PLUS_EXPR, offset,
5456 size_binop (MULT_EXPR,
5457 convert (sizetype, index),
7156dead 5458 unit_size));
bbf6f052 5459 }
7156dead 5460
738cc472
RK
5461 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5462 {
70072ed9
RK
5463 tree new = find_placeholder (exp, &placeholder_ptr);
5464
5465 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5466 We might have been called from tree optimization where we
5467 haven't set up an object yet. */
5468 if (new == 0)
5469 break;
5470 else
5471 exp = new;
5472
738cc472
RK
5473 continue;
5474 }
c1853da7
RK
5475
5476 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5477 conversions that don't change the mode, and all view conversions
5478 except those that need to "step up" the alignment. */
bbf6f052 5479 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5480 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5481 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5482 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5483 && STRICT_ALIGNMENT
5484 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5485 < BIGGEST_ALIGNMENT)
5486 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5487 || TYPE_ALIGN_OK (TREE_TYPE
5488 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5489 && ! ((TREE_CODE (exp) == NOP_EXPR
5490 || TREE_CODE (exp) == CONVERT_EXPR)
5491 && (TYPE_MODE (TREE_TYPE (exp))
5492 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5493 break;
7bb0943f
RS
5494
5495 /* If any reference in the chain is volatile, the effect is volatile. */
5496 if (TREE_THIS_VOLATILE (exp))
5497 *pvolatilep = 1;
839c4796 5498
bbf6f052
RK
5499 exp = TREE_OPERAND (exp, 0);
5500 }
5501
770ae6cc
RK
5502 /* If OFFSET is constant, see if we can return the whole thing as a
5503 constant bit position. Otherwise, split it up. */
5504 if (host_integerp (offset, 0)
5505 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5506 bitsize_unit_node))
5507 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5508 && host_integerp (tem, 0))
5509 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5510 else
5511 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5512
bbf6f052 5513 *pmode = mode;
bbf6f052
RK
5514 return exp;
5515}
921b3427 5516
ed239f5a
RK
5517/* Return 1 if T is an expression that get_inner_reference handles. */
5518
5519int
502b8322 5520handled_component_p (tree t)
ed239f5a
RK
5521{
5522 switch (TREE_CODE (t))
5523 {
5524 case BIT_FIELD_REF:
5525 case COMPONENT_REF:
5526 case ARRAY_REF:
5527 case ARRAY_RANGE_REF:
5528 case NON_LVALUE_EXPR:
5529 case VIEW_CONVERT_EXPR:
5530 return 1;
5531
1a8c4ca6
EB
5532 /* ??? Sure they are handled, but get_inner_reference may return
5533 a different PBITSIZE, depending upon whether the expression is
5534 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
ed239f5a
RK
5535 case NOP_EXPR:
5536 case CONVERT_EXPR:
5537 return (TYPE_MODE (TREE_TYPE (t))
5538 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5539
5540 default:
5541 return 0;
5542 }
5543}
bbf6f052 5544\f
3fe44edd
RK
5545/* Given an rtx VALUE that may contain additions and multiplications, return
5546 an equivalent value that just refers to a register, memory, or constant.
5547 This is done by generating instructions to perform the arithmetic and
5548 returning a pseudo-register containing the value.
c45a13a6
RK
5549
5550 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5551
5552rtx
502b8322 5553force_operand (rtx value, rtx target)
bbf6f052 5554{
8a28dbcc 5555 rtx op1, op2;
bbf6f052 5556 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5557 rtx subtarget = get_subtarget (target);
8a28dbcc 5558 enum rtx_code code = GET_CODE (value);
bbf6f052 5559
8b015896 5560 /* Check for a PIC address load. */
8a28dbcc 5561 if ((code == PLUS || code == MINUS)
8b015896
RH
5562 && XEXP (value, 0) == pic_offset_table_rtx
5563 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5564 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5565 || GET_CODE (XEXP (value, 1)) == CONST))
5566 {
5567 if (!subtarget)
5568 subtarget = gen_reg_rtx (GET_MODE (value));
5569 emit_move_insn (subtarget, value);
5570 return subtarget;
5571 }
5572
8a28dbcc 5573 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5574 {
8a28dbcc
JH
5575 if (!target)
5576 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5577 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5578 code == ZERO_EXTEND);
5579 return target;
bbf6f052
RK
5580 }
5581
8a28dbcc 5582 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
5583 {
5584 op2 = XEXP (value, 1);
8a28dbcc 5585 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5586 subtarget = 0;
8a28dbcc 5587 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5588 {
8a28dbcc 5589 code = PLUS;
bbf6f052
RK
5590 op2 = negate_rtx (GET_MODE (value), op2);
5591 }
5592
5593 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5594 operand a PLUS of a virtual register and something else. In that
5595 case, we want to emit the sum of the virtual register and the
5596 constant first and then add the other value. This allows virtual
5597 register instantiation to simply modify the constant rather than
5598 creating another one around this addition. */
5599 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5600 && GET_CODE (XEXP (value, 0)) == PLUS
5601 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5602 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5603 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5604 {
8a28dbcc
JH
5605 rtx temp = expand_simple_binop (GET_MODE (value), code,
5606 XEXP (XEXP (value, 0), 0), op2,
5607 subtarget, 0, OPTAB_LIB_WIDEN);
5608 return expand_simple_binop (GET_MODE (value), code, temp,
5609 force_operand (XEXP (XEXP (value,
5610 0), 1), 0),
5611 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5612 }
3a94c984 5613
8a28dbcc
JH
5614 op1 = force_operand (XEXP (value, 0), subtarget);
5615 op2 = force_operand (op2, NULL_RTX);
5616 switch (code)
5617 {
5618 case MULT:
5619 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5620 case DIV:
5621 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5622 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5623 target, 1, OPTAB_LIB_WIDEN);
5624 else
5625 return expand_divmod (0,
5626 FLOAT_MODE_P (GET_MODE (value))
5627 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5628 GET_MODE (value), op1, op2, target, 0);
5629 break;
5630 case MOD:
5631 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5632 target, 0);
5633 break;
5634 case UDIV:
5635 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5636 target, 1);
5637 break;
5638 case UMOD:
5639 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5640 target, 1);
5641 break;
5642 case ASHIFTRT:
5643 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5644 target, 0, OPTAB_LIB_WIDEN);
5645 break;
5646 default:
5647 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5648 target, 1, OPTAB_LIB_WIDEN);
5649 }
5650 }
5651 if (GET_RTX_CLASS (code) == '1')
5652 {
5653 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5654 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5655 }
34e81b5a
RK
5656
5657#ifdef INSN_SCHEDULING
5658 /* On machines that have insn scheduling, we want all memory reference to be
5659 explicit, so we need to deal with such paradoxical SUBREGs. */
5660 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5661 && (GET_MODE_SIZE (GET_MODE (value))
5662 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5663 value
5664 = simplify_gen_subreg (GET_MODE (value),
5665 force_reg (GET_MODE (SUBREG_REG (value)),
5666 force_operand (SUBREG_REG (value),
5667 NULL_RTX)),
5668 GET_MODE (SUBREG_REG (value)),
5669 SUBREG_BYTE (value));
5670#endif
5671
bbf6f052
RK
5672 return value;
5673}
5674\f
bbf6f052 5675/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5676 EXP can reference X, which is being modified. TOP_P is nonzero if this
5677 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5678 for EXP, as opposed to a recursive call to this function.
5679
5680 It is always safe for this routine to return zero since it merely
5681 searches for optimization opportunities. */
bbf6f052 5682
8f17b5c5 5683int
502b8322 5684safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5685{
5686 rtx exp_rtl = 0;
5687 int i, nops;
1da68f56 5688 static tree save_expr_list;
bbf6f052 5689
6676e72f
RK
5690 if (x == 0
5691 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5692 have no way of allocating temporaries of variable size
5693 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5694 So we assume here that something at a higher level has prevented a
f4510f37 5695 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5696 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5697 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5698 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5699 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5700 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5701 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5702 != INTEGER_CST)
1da68f56
RK
5703 && GET_MODE (x) == BLKmode)
5704 /* If X is in the outgoing argument area, it is always safe. */
5705 || (GET_CODE (x) == MEM
5706 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5707 || (GET_CODE (XEXP (x, 0)) == PLUS
5708 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5709 return 1;
5710
5711 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5712 find the underlying pseudo. */
5713 if (GET_CODE (x) == SUBREG)
5714 {
5715 x = SUBREG_REG (x);
5716 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5717 return 0;
5718 }
5719
1da68f56
RK
5720 /* A SAVE_EXPR might appear many times in the expression passed to the
5721 top-level safe_from_p call, and if it has a complex subexpression,
5722 examining it multiple times could result in a combinatorial explosion.
7ef0daad 5723 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
1da68f56
RK
5724 with optimization took about 28 minutes to compile -- even though it was
5725 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5726 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5727 we have processed. Note that the only test of top_p was above. */
5728
5729 if (top_p)
5730 {
5731 int rtn;
5732 tree t;
5733
5734 save_expr_list = 0;
5735
5736 rtn = safe_from_p (x, exp, 0);
5737
5738 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5739 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5740
5741 return rtn;
5742 }
bbf6f052 5743
1da68f56 5744 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5745 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5746 {
5747 case 'd':
a9772b60 5748 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5749 break;
5750
5751 case 'c':
5752 return 1;
5753
5754 case 'x':
5755 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5756 {
5757 while (1)
5758 {
5759 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5760 return 0;
5761 exp = TREE_CHAIN (exp);
5762 if (!exp)
5763 return 1;
5764 if (TREE_CODE (exp) != TREE_LIST)
5765 return safe_from_p (x, exp, 0);
5766 }
5767 }
ff439b5f
CB
5768 else if (TREE_CODE (exp) == ERROR_MARK)
5769 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5770 else
5771 return 0;
5772
bbf6f052
RK
5773 case '2':
5774 case '<':
f8d4be57
CE
5775 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5776 return 0;
5777 /* FALLTHRU */
5778
5779 case '1':
5780 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5781
5782 case 'e':
5783 case 'r':
5784 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5785 the expression. If it is set, we conflict iff we are that rtx or
5786 both are in memory. Otherwise, we check all operands of the
5787 expression recursively. */
5788
5789 switch (TREE_CODE (exp))
5790 {
5791 case ADDR_EXPR:
70072ed9
RK
5792 /* If the operand is static or we are static, we can't conflict.
5793 Likewise if we don't conflict with the operand at all. */
5794 if (staticp (TREE_OPERAND (exp, 0))
5795 || TREE_STATIC (exp)
5796 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5797 return 1;
5798
5799 /* Otherwise, the only way this can conflict is if we are taking
5800 the address of a DECL a that address if part of X, which is
5801 very rare. */
5802 exp = TREE_OPERAND (exp, 0);
5803 if (DECL_P (exp))
5804 {
5805 if (!DECL_RTL_SET_P (exp)
5806 || GET_CODE (DECL_RTL (exp)) != MEM)
5807 return 0;
5808 else
5809 exp_rtl = XEXP (DECL_RTL (exp), 0);
5810 }
5811 break;
bbf6f052
RK
5812
5813 case INDIRECT_REF:
1da68f56
RK
5814 if (GET_CODE (x) == MEM
5815 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5816 get_alias_set (exp)))
bbf6f052
RK
5817 return 0;
5818 break;
5819
5820 case CALL_EXPR:
f9808f81
MM
5821 /* Assume that the call will clobber all hard registers and
5822 all of memory. */
5823 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5824 || GET_CODE (x) == MEM)
5825 return 0;
bbf6f052
RK
5826 break;
5827
5828 case RTL_EXPR:
3bb5826a
RK
5829 /* If a sequence exists, we would have to scan every instruction
5830 in the sequence to see if it was safe. This is probably not
5831 worthwhile. */
5832 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5833 return 0;
5834
3bb5826a 5835 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5836 break;
5837
5838 case WITH_CLEANUP_EXPR:
6ad7895a 5839 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
5840 break;
5841
5dab5552 5842 case CLEANUP_POINT_EXPR:
e5e809f4 5843 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5844
bbf6f052
RK
5845 case SAVE_EXPR:
5846 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5847 if (exp_rtl)
5848 break;
5849
1da68f56
RK
5850 /* If we've already scanned this, don't do it again. Otherwise,
5851 show we've scanned it and record for clearing the flag if we're
5852 going on. */
5853 if (TREE_PRIVATE (exp))
5854 return 1;
ff439b5f 5855
1da68f56
RK
5856 TREE_PRIVATE (exp) = 1;
5857 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5858 {
1da68f56
RK
5859 TREE_PRIVATE (exp) = 0;
5860 return 0;
ff59bfe6 5861 }
1da68f56
RK
5862
5863 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5864 return 1;
bbf6f052 5865
8129842c
RS
5866 case BIND_EXPR:
5867 /* The only operand we look at is operand 1. The rest aren't
5868 part of the expression. */
e5e809f4 5869 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5870
e9a25f70
JL
5871 default:
5872 break;
bbf6f052
RK
5873 }
5874
5875 /* If we have an rtx, we do not need to scan our operands. */
5876 if (exp_rtl)
5877 break;
5878
8f17b5c5 5879 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5880 for (i = 0; i < nops; i++)
5881 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5882 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5883 return 0;
8f17b5c5
MM
5884
5885 /* If this is a language-specific tree code, it may require
5886 special handling. */
dbbbbf3b
JDA
5887 if ((unsigned int) TREE_CODE (exp)
5888 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 5889 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 5890 return 0;
bbf6f052
RK
5891 }
5892
5893 /* If we have an rtl, find any enclosed object. Then see if we conflict
5894 with it. */
5895 if (exp_rtl)
5896 {
5897 if (GET_CODE (exp_rtl) == SUBREG)
5898 {
5899 exp_rtl = SUBREG_REG (exp_rtl);
5900 if (GET_CODE (exp_rtl) == REG
5901 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5902 return 0;
5903 }
5904
5905 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5906 are memory and they conflict. */
bbf6f052
RK
5907 return ! (rtx_equal_p (x, exp_rtl)
5908 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 5909 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 5910 rtx_addr_varies_p)));
bbf6f052
RK
5911 }
5912
5913 /* If we reach here, it is safe. */
5914 return 1;
5915}
5916
01c8a7c8
RK
5917/* Subroutine of expand_expr: return rtx if EXP is a
5918 variable or parameter; else return 0. */
5919
5920static rtx
502b8322 5921var_rtx (tree exp)
01c8a7c8
RK
5922{
5923 STRIP_NOPS (exp);
5924 switch (TREE_CODE (exp))
5925 {
5926 case PARM_DECL:
5927 case VAR_DECL:
5928 return DECL_RTL (exp);
5929 default:
5930 return 0;
5931 }
5932}
dbecbbe4
JL
5933
5934#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 5935
dbecbbe4 5936void
502b8322 5937check_max_integer_computation_mode (tree exp)
dbecbbe4 5938{
5f652c07 5939 enum tree_code code;
dbecbbe4
JL
5940 enum machine_mode mode;
5941
5f652c07
JM
5942 /* Strip any NOPs that don't change the mode. */
5943 STRIP_NOPS (exp);
5944 code = TREE_CODE (exp);
5945
71bca506
JL
5946 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5947 if (code == NOP_EXPR
5948 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5949 return;
5950
dbecbbe4
JL
5951 /* First check the type of the overall operation. We need only look at
5952 unary, binary and relational operations. */
5953 if (TREE_CODE_CLASS (code) == '1'
5954 || TREE_CODE_CLASS (code) == '2'
5955 || TREE_CODE_CLASS (code) == '<')
5956 {
5957 mode = TYPE_MODE (TREE_TYPE (exp));
5958 if (GET_MODE_CLASS (mode) == MODE_INT
5959 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5960 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5961 }
5962
5963 /* Check operand of a unary op. */
5964 if (TREE_CODE_CLASS (code) == '1')
5965 {
5966 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5967 if (GET_MODE_CLASS (mode) == MODE_INT
5968 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5969 internal_error ("unsupported wide integer operation");
dbecbbe4 5970 }
3a94c984 5971
dbecbbe4
JL
5972 /* Check operands of a binary/comparison op. */
5973 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5974 {
5975 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5976 if (GET_MODE_CLASS (mode) == MODE_INT
5977 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5978 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5979
5980 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5981 if (GET_MODE_CLASS (mode) == MODE_INT
5982 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 5983 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
5984 }
5985}
5986#endif
14a774a9 5987\f
0d4903b8
RK
5988/* Return the highest power of two that EXP is known to be a multiple of.
5989 This is used in updating alignment of MEMs in array references. */
5990
9ceca302 5991static unsigned HOST_WIDE_INT
502b8322 5992highest_pow2_factor (tree exp)
0d4903b8 5993{
9ceca302 5994 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
5995
5996 switch (TREE_CODE (exp))
5997 {
5998 case INTEGER_CST:
e0f1be5c
JJ
5999 /* We can find the lowest bit that's a one. If the low
6000 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6001 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6002 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6003 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6004 later ICE. */
e0f1be5c 6005 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6006 return BIGGEST_ALIGNMENT;
e0f1be5c 6007 else
0d4903b8 6008 {
e0f1be5c
JJ
6009 /* Note: tree_low_cst is intentionally not used here,
6010 we don't care about the upper bits. */
6011 c0 = TREE_INT_CST_LOW (exp);
6012 c0 &= -c0;
6013 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6014 }
6015 break;
6016
65a07688 6017 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6018 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6019 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6020 return MIN (c0, c1);
6021
6022 case MULT_EXPR:
6023 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6024 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6025 return c0 * c1;
6026
6027 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6028 case CEIL_DIV_EXPR:
65a07688
RK
6029 if (integer_pow2p (TREE_OPERAND (exp, 1))
6030 && host_integerp (TREE_OPERAND (exp, 1), 1))
6031 {
6032 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6033 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6034 return MAX (1, c0 / c1);
6035 }
6036 break;
0d4903b8
RK
6037
6038 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6039 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6040 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6041
65a07688
RK
6042 case COMPOUND_EXPR:
6043 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6044
0d4903b8
RK
6045 case COND_EXPR:
6046 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6047 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6048 return MIN (c0, c1);
6049
6050 default:
6051 break;
6052 }
6053
6054 return 1;
6055}
818c0c94
RH
6056
6057/* Similar, except that it is known that the expression must be a multiple
6058 of the alignment of TYPE. */
6059
9ceca302 6060static unsigned HOST_WIDE_INT
502b8322 6061highest_pow2_factor_for_type (tree type, tree exp)
818c0c94 6062{
9ceca302 6063 unsigned HOST_WIDE_INT type_align, factor;
818c0c94
RH
6064
6065 factor = highest_pow2_factor (exp);
6066 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6067 return MAX (factor, type_align);
6068}
0d4903b8 6069\f
f47e9b4e
RK
6070/* Return an object on the placeholder list that matches EXP, a
6071 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6072 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6073 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6074 is a location which initially points to a starting location in the
738cc472
RK
6075 placeholder list (zero means start of the list) and where a pointer into
6076 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6077
6078tree
502b8322 6079find_placeholder (tree exp, tree *plist)
f47e9b4e
RK
6080{
6081 tree type = TREE_TYPE (exp);
6082 tree placeholder_expr;
6083
738cc472
RK
6084 for (placeholder_expr
6085 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6086 placeholder_expr != 0;
f47e9b4e
RK
6087 placeholder_expr = TREE_CHAIN (placeholder_expr))
6088 {
6089 tree need_type = TYPE_MAIN_VARIANT (type);
6090 tree elt;
6091
6092 /* Find the outermost reference that is of the type we want. If none,
6093 see if any object has a type that is a pointer to the type we
6094 want. */
6095 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6096 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6097 || TREE_CODE (elt) == COND_EXPR)
6098 ? TREE_OPERAND (elt, 1)
6099 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6100 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6101 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6102 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6103 ? TREE_OPERAND (elt, 0) : 0))
6104 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6105 {
6106 if (plist)
6107 *plist = placeholder_expr;
6108 return elt;
6109 }
6110
6111 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6112 elt
6113 = ((TREE_CODE (elt) == COMPOUND_EXPR
6114 || TREE_CODE (elt) == COND_EXPR)
6115 ? TREE_OPERAND (elt, 1)
6116 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6117 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6118 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6119 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6120 ? TREE_OPERAND (elt, 0) : 0))
6121 if (POINTER_TYPE_P (TREE_TYPE (elt))
6122 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6123 == need_type))
6124 {
6125 if (plist)
6126 *plist = placeholder_expr;
6127 return build1 (INDIRECT_REF, need_type, elt);
6128 }
6129 }
6130
70072ed9 6131 return 0;
f47e9b4e 6132}
eb698c58
RS
6133
6134/* Subroutine of expand_expr. Expand the two operands of a binary
6135 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6136 The value may be stored in TARGET if TARGET is nonzero. The
6137 MODIFIER argument is as documented by expand_expr. */
6138
6139static void
6140expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6141 enum expand_modifier modifier)
6142{
6143 if (! safe_from_p (target, exp1, 1))
6144 target = 0;
6145 if (operand_equal_p (exp0, exp1, 0))
6146 {
6147 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6148 *op1 = copy_rtx (*op0);
6149 }
6150 else
6151 {
c67e6e14
RS
6152 /* If we need to preserve evaluation order, copy exp0 into its own
6153 temporary variable so that it can't be clobbered by exp1. */
6154 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6155 exp0 = save_expr (exp0);
eb698c58
RS
6156 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6157 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6158 }
6159}
6160
f47e9b4e 6161\f
bbf6f052
RK
6162/* expand_expr: generate code for computing expression EXP.
6163 An rtx for the computed value is returned. The value is never null.
6164 In the case of a void EXP, const0_rtx is returned.
6165
6166 The value may be stored in TARGET if TARGET is nonzero.
6167 TARGET is just a suggestion; callers must assume that
6168 the rtx returned may not be the same as TARGET.
6169
6170 If TARGET is CONST0_RTX, it means that the value will be ignored.
6171
6172 If TMODE is not VOIDmode, it suggests generating the
6173 result in mode TMODE. But this is done only when convenient.
6174 Otherwise, TMODE is ignored and the value generated in its natural mode.
6175 TMODE is just a suggestion; callers must assume that
6176 the rtx returned may not have mode TMODE.
6177
d6a5ac33
RK
6178 Note that TARGET may have neither TMODE nor MODE. In that case, it
6179 probably will not be used.
bbf6f052
RK
6180
6181 If MODIFIER is EXPAND_SUM then when EXP is an addition
6182 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6183 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6184 products as above, or REG or MEM, or constant.
6185 Ordinarily in such cases we would output mul or add instructions
6186 and then return a pseudo reg containing the sum.
6187
6188 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6189 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6190 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6191 This is used for outputting expressions used in initializers.
6192
6193 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6194 with a constant address even if that address is not normally legitimate.
8403445a
AM
6195 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6196
6197 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6198 a call parameter. Such targets require special care as we haven't yet
6199 marked TARGET so that it's safe from being trashed by libcalls. We
6200 don't want to use TARGET for anything but the final result;
6201 Intermediate values must go elsewhere. Additionally, calls to
6202 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
bbf6f052
RK
6203
6204rtx
eb698c58
RS
6205expand_expr (tree exp, rtx target, enum machine_mode tmode,
6206 enum expand_modifier modifier)
bbf6f052 6207{
b3694847 6208 rtx op0, op1, temp;
bbf6f052
RK
6209 tree type = TREE_TYPE (exp);
6210 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6211 enum machine_mode mode;
6212 enum tree_code code = TREE_CODE (exp);
bbf6f052 6213 optab this_optab;
68557e14
ML
6214 rtx subtarget, original_target;
6215 int ignore;
bbf6f052
RK
6216 tree context;
6217
3a94c984 6218 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6219 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6220 {
6221 op0 = CONST0_RTX (tmode);
6222 if (op0 != 0)
6223 return op0;
6224 return const0_rtx;
6225 }
6226
6227 mode = TYPE_MODE (type);
6228 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6229 subtarget = get_subtarget (target);
68557e14
ML
6230 original_target = target;
6231 ignore = (target == const0_rtx
6232 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6233 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6234 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6235 && TREE_CODE (type) == VOID_TYPE));
6236
dd27116b
RK
6237 /* If we are going to ignore this result, we need only do something
6238 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6239 is, short-circuit the most common cases here. Note that we must
6240 not call expand_expr with anything but const0_rtx in case this
6241 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6242
dd27116b
RK
6243 if (ignore)
6244 {
6245 if (! TREE_SIDE_EFFECTS (exp))
6246 return const0_rtx;
6247
14a774a9
RK
6248 /* Ensure we reference a volatile object even if value is ignored, but
6249 don't do this if all we are doing is taking its address. */
dd27116b
RK
6250 if (TREE_THIS_VOLATILE (exp)
6251 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6252 && mode != VOIDmode && mode != BLKmode
6253 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6254 {
37a08a29 6255 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6256 if (GET_CODE (temp) == MEM)
6257 temp = copy_to_reg (temp);
6258 return const0_rtx;
6259 }
6260
14a774a9
RK
6261 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6262 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6263 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6264 modifier);
6265
14a774a9 6266 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6267 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6268 {
37a08a29
RK
6269 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6270 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6271 return const0_rtx;
6272 }
6273 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6274 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6275 /* If the second operand has no side effects, just evaluate
0f41302f 6276 the first. */
37a08a29
RK
6277 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6278 modifier);
14a774a9
RK
6279 else if (code == BIT_FIELD_REF)
6280 {
37a08a29
RK
6281 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6282 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6283 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6284 return const0_rtx;
6285 }
37a08a29 6286
90764a87 6287 target = 0;
dd27116b 6288 }
bbf6f052 6289
dbecbbe4 6290#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07 6291 /* Only check stuff here if the mode we want is different from the mode
fbe5a4a6 6292 of the expression; if it's the same, check_max_integer_computation_mode
5f652c07
JM
6293 will handle it. Do we really need to check this stuff at all? */
6294
ce3c0b53 6295 if (target
5f652c07 6296 && GET_MODE (target) != mode
ce3c0b53
JL
6297 && TREE_CODE (exp) != INTEGER_CST
6298 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6299 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6300 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6301 && TREE_CODE (exp) != COMPONENT_REF
6302 && TREE_CODE (exp) != BIT_FIELD_REF
6303 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6304 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6305 && TREE_CODE (exp) != VAR_DECL
6306 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6307 {
6308 enum machine_mode mode = GET_MODE (target);
6309
6310 if (GET_MODE_CLASS (mode) == MODE_INT
6311 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6312 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6313 }
6314
5f652c07
JM
6315 if (tmode != mode
6316 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6317 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6318 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6319 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6320 && TREE_CODE (exp) != COMPONENT_REF
6321 && TREE_CODE (exp) != BIT_FIELD_REF
6322 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6323 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6324 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6325 && TREE_CODE (exp) != RTL_EXPR
71bca506 6326 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6327 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6328 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6329
6330 check_max_integer_computation_mode (exp);
6331#endif
6332
e44842fe
RK
6333 /* If will do cse, generate all results into pseudo registers
6334 since 1) that allows cse to find more things
6335 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6336 cannot support. An exception is a CONSTRUCTOR into a multi-word
6337 MEM: that's much more likely to be most efficient into the MEM.
6338 Another is a CALL_EXPR which must return in memory. */
e44842fe 6339
bbf6f052 6340 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6341 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6342 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6343 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6344 target = 0;
bbf6f052 6345
bbf6f052
RK
6346 switch (code)
6347 {
6348 case LABEL_DECL:
b552441b
RS
6349 {
6350 tree function = decl_function_context (exp);
046e4e36
ZW
6351 /* Labels in containing functions, or labels used from initializers,
6352 must be forced. */
6353 if (modifier == EXPAND_INITIALIZER
6354 || (function != current_function_decl
6355 && function != inline_function_decl
6356 && function != 0))
6357 temp = force_label_rtx (exp);
ab87f8c8 6358 else
046e4e36 6359 temp = label_rtx (exp);
c5c76735 6360
046e4e36 6361 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
d0977240
RK
6362 if (function != current_function_decl
6363 && function != inline_function_decl && function != 0)
26fcb35a
RS
6364 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6365 return temp;
b552441b 6366 }
bbf6f052
RK
6367
6368 case PARM_DECL:
1877be45 6369 if (!DECL_RTL_SET_P (exp))
bbf6f052 6370 {
ddd2d57e 6371 error ("%Jprior parameter's size depends on '%D'", exp, exp);
4af3895e 6372 return CONST0_RTX (mode);
bbf6f052
RK
6373 }
6374
0f41302f 6375 /* ... fall through ... */
d6a5ac33 6376
bbf6f052 6377 case VAR_DECL:
2dca20cd
RS
6378 /* If a static var's type was incomplete when the decl was written,
6379 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6380 if (DECL_SIZE (exp) == 0
6381 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6382 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6383 layout_decl (exp, 0);
921b3427 6384
0f41302f 6385 /* ... fall through ... */
d6a5ac33 6386
2dca20cd 6387 case FUNCTION_DECL:
bbf6f052
RK
6388 case RESULT_DECL:
6389 if (DECL_RTL (exp) == 0)
6390 abort ();
d6a5ac33 6391
e44842fe
RK
6392 /* Ensure variable marked as used even if it doesn't go through
6393 a parser. If it hasn't be used yet, write out an external
6394 definition. */
6395 if (! TREE_USED (exp))
6396 {
6397 assemble_external (exp);
6398 TREE_USED (exp) = 1;
6399 }
6400
dc6d66b3
RK
6401 /* Show we haven't gotten RTL for this yet. */
6402 temp = 0;
6403
bbf6f052
RK
6404 /* Handle variables inherited from containing functions. */
6405 context = decl_function_context (exp);
6406
6407 /* We treat inline_function_decl as an alias for the current function
6408 because that is the inline function whose vars, types, etc.
6409 are being merged into the current function.
6410 See expand_inline_function. */
d6a5ac33 6411
bbf6f052
RK
6412 if (context != 0 && context != current_function_decl
6413 && context != inline_function_decl
6414 /* If var is static, we don't need a static chain to access it. */
6415 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6416 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6417 {
6418 rtx addr;
6419
6420 /* Mark as non-local and addressable. */
81feeecb 6421 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6422 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6423 abort ();
dffd7eb6 6424 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6425 if (GET_CODE (DECL_RTL (exp)) != MEM)
6426 abort ();
6427 addr = XEXP (DECL_RTL (exp), 0);
6428 if (GET_CODE (addr) == MEM)
792760b9
RK
6429 addr
6430 = replace_equiv_address (addr,
6431 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6432 else
6433 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6434
792760b9 6435 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6436 }
4af3895e 6437
bbf6f052
RK
6438 /* This is the case of an array whose size is to be determined
6439 from its initializer, while the initializer is still being parsed.
6440 See expand_decl. */
d6a5ac33 6441
dc6d66b3
RK
6442 else if (GET_CODE (DECL_RTL (exp)) == MEM
6443 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6444 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6445
6446 /* If DECL_RTL is memory, we are in the normal case and either
6447 the address is not valid or it is not a register and -fforce-addr
6448 is specified, get the address into a register. */
6449
dc6d66b3
RK
6450 else if (GET_CODE (DECL_RTL (exp)) == MEM
6451 && modifier != EXPAND_CONST_ADDRESS
6452 && modifier != EXPAND_SUM
6453 && modifier != EXPAND_INITIALIZER
6454 && (! memory_address_p (DECL_MODE (exp),
6455 XEXP (DECL_RTL (exp), 0))
6456 || (flag_force_addr
6457 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6458 temp = replace_equiv_address (DECL_RTL (exp),
6459 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6460
dc6d66b3 6461 /* If we got something, return it. But first, set the alignment
04956a1a 6462 if the address is a register. */
dc6d66b3
RK
6463 if (temp != 0)
6464 {
6465 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6466 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6467
6468 return temp;
6469 }
6470
1499e0a8
RK
6471 /* If the mode of DECL_RTL does not match that of the decl, it
6472 must be a promoted value. We return a SUBREG of the wanted mode,
6473 but mark it so that we know that it was already extended. */
6474
6475 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6476 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6477 {
1499e0a8
RK
6478 /* Get the signedness used for this variable. Ensure we get the
6479 same mode we got when the variable was declared. */
78911e8b 6480 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6481 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6482 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6483 abort ();
6484
ddef6bc7 6485 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6486 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6487 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6488 return temp;
6489 }
6490
bbf6f052
RK
6491 return DECL_RTL (exp);
6492
6493 case INTEGER_CST:
d8a50944 6494 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6495 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6496
d8a50944
RH
6497 /* ??? If overflow is set, fold will have done an incomplete job,
6498 which can result in (plus xx (const_int 0)), which can get
6499 simplified by validate_replace_rtx during virtual register
6500 instantiation, which can result in unrecognizable insns.
6501 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6502 if (TREE_CONSTANT_OVERFLOW (exp)
6503 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6504 temp = force_reg (mode, temp);
6505
6506 return temp;
6507
d744e06e
AH
6508 case VECTOR_CST:
6509 return const_vector_from_tree (exp);
6510
bbf6f052 6511 case CONST_DECL:
8403445a 6512 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6513
6514 case REAL_CST:
6515 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6516 which will be turned into memory by reload if necessary.
6517
bbf6f052
RK
6518 We used to force a register so that loop.c could see it. But
6519 this does not allow gen_* patterns to perform optimizations with
6520 the constants. It also produces two insns in cases like "x = 1.0;".
6521 On most machines, floating-point constants are not permitted in
6522 many insns, so we'd end up copying it to a register in any case.
6523
6524 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6525 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6526 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6527
6528 case COMPLEX_CST:
9ad58e09
RS
6529 /* Handle evaluating a complex constant in a CONCAT target. */
6530 if (original_target && GET_CODE (original_target) == CONCAT)
6531 {
6532 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6533 rtx rtarg, itarg;
6534
6535 rtarg = XEXP (original_target, 0);
6536 itarg = XEXP (original_target, 1);
6537
6538 /* Move the real and imaginary parts separately. */
6539 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6540 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6541
6542 if (op0 != rtarg)
6543 emit_move_insn (rtarg, op0);
6544 if (op1 != itarg)
6545 emit_move_insn (itarg, op1);
6546
6547 return original_target;
6548 }
6549
71c0e7fc 6550 /* ... fall through ... */
9ad58e09 6551
bbf6f052 6552 case STRING_CST:
afc6aaab 6553 temp = output_constant_def (exp, 1);
bbf6f052 6554
afc6aaab 6555 /* temp contains a constant address.
bbf6f052
RK
6556 On RISC machines where a constant address isn't valid,
6557 make some insns to get that address into a register. */
afc6aaab 6558 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6559 && modifier != EXPAND_INITIALIZER
6560 && modifier != EXPAND_SUM
afc6aaab
ZW
6561 && (! memory_address_p (mode, XEXP (temp, 0))
6562 || flag_force_addr))
6563 return replace_equiv_address (temp,
6564 copy_rtx (XEXP (temp, 0)));
6565 return temp;
bbf6f052 6566
bf1e5319 6567 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6568 {
6569 rtx to_return;
72954a4f
JM
6570 struct file_stack fs;
6571
6572 fs.location = input_location;
6573 fs.next = expr_wfl_stack;
b24f65cd 6574 input_filename = EXPR_WFL_FILENAME (exp);
d479d37f 6575 input_line = EXPR_WFL_LINENO (exp);
72954a4f 6576 expr_wfl_stack = &fs;
b24f65cd 6577 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
0cea056b 6578 emit_line_note (input_location);
6ad7895a 6579 /* Possibly avoid switching back and forth here. */
72954a4f
JM
6580 to_return = expand_expr (EXPR_WFL_NODE (exp),
6581 (ignore ? const0_rtx : target),
6582 tmode, modifier);
6583 if (expr_wfl_stack != &fs)
6584 abort ();
6585 input_location = fs.location;
6586 expr_wfl_stack = fs.next;
b24f65cd
APB
6587 return to_return;
6588 }
bf1e5319 6589
bbf6f052
RK
6590 case SAVE_EXPR:
6591 context = decl_function_context (exp);
d6a5ac33 6592
d0977240
RK
6593 /* If this SAVE_EXPR was at global context, assume we are an
6594 initialization function and move it into our context. */
6595 if (context == 0)
6596 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6597
bbf6f052
RK
6598 /* We treat inline_function_decl as an alias for the current function
6599 because that is the inline function whose vars, types, etc.
6600 are being merged into the current function.
6601 See expand_inline_function. */
6602 if (context == current_function_decl || context == inline_function_decl)
6603 context = 0;
6604
6605 /* If this is non-local, handle it. */
6606 if (context)
6607 {
d0977240
RK
6608 /* The following call just exists to abort if the context is
6609 not of a containing function. */
6610 find_function_data (context);
6611
bbf6f052
RK
6612 temp = SAVE_EXPR_RTL (exp);
6613 if (temp && GET_CODE (temp) == REG)
6614 {
f29a2bd1 6615 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6616 temp = SAVE_EXPR_RTL (exp);
6617 }
6618 if (temp == 0 || GET_CODE (temp) != MEM)
6619 abort ();
792760b9
RK
6620 return
6621 replace_equiv_address (temp,
6622 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6623 }
6624 if (SAVE_EXPR_RTL (exp) == 0)
6625 {
06089a8b
RK
6626 if (mode == VOIDmode)
6627 temp = const0_rtx;
6628 else
1da68f56
RK
6629 temp = assign_temp (build_qualified_type (type,
6630 (TYPE_QUALS (type)
6631 | TYPE_QUAL_CONST)),
6632 3, 0, 0);
1499e0a8 6633
bbf6f052 6634 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6635 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6636 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6637 save_expr_regs);
ff78f773
RK
6638
6639 /* If the mode of TEMP does not match that of the expression, it
6640 must be a promoted value. We pass store_expr a SUBREG of the
6641 wanted mode but mark it so that we know that it was already
3ac1a319 6642 extended. */
ff78f773
RK
6643
6644 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6645 {
ddef6bc7 6646 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6647 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6648 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6649 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6650 }
6651
4c7a0be9 6652 if (temp == const0_rtx)
37a08a29 6653 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6654 else
8403445a
AM
6655 store_expr (TREE_OPERAND (exp, 0), temp,
6656 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6657
6658 TREE_USED (exp) = 1;
bbf6f052 6659 }
1499e0a8
RK
6660
6661 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6662 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6663 but mark it so that we know that it was already extended. */
1499e0a8
RK
6664
6665 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6666 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6667 {
e70d22c8
RK
6668 /* Compute the signedness and make the proper SUBREG. */
6669 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6670 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6671 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6672 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6673 return temp;
6674 }
6675
bbf6f052
RK
6676 return SAVE_EXPR_RTL (exp);
6677
679163cf
MS
6678 case UNSAVE_EXPR:
6679 {
6680 rtx temp;
6681 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
6682 TREE_OPERAND (exp, 0)
6683 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
6684 return temp;
6685 }
6686
b50d17a1 6687 case PLACEHOLDER_EXPR:
e9a25f70 6688 {
f47e9b4e 6689 tree old_list = placeholder_list;
738cc472 6690 tree placeholder_expr = 0;
e9a25f70 6691
f47e9b4e 6692 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
6693 if (exp == 0)
6694 abort ();
6695
f47e9b4e 6696 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 6697 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
6698 placeholder_list = old_list;
6699 return temp;
e9a25f70 6700 }
b50d17a1 6701
b50d17a1
RK
6702 case WITH_RECORD_EXPR:
6703 /* Put the object on the placeholder list, expand our first operand,
6704 and pop the list. */
6705 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6706 placeholder_list);
37a08a29
RK
6707 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6708 modifier);
b50d17a1
RK
6709 placeholder_list = TREE_CHAIN (placeholder_list);
6710 return target;
6711
70e6ca43
APB
6712 case GOTO_EXPR:
6713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6714 expand_goto (TREE_OPERAND (exp, 0));
6715 else
6716 expand_computed_goto (TREE_OPERAND (exp, 0));
6717 return const0_rtx;
6718
bbf6f052 6719 case EXIT_EXPR:
df4ae160 6720 expand_exit_loop_if_false (NULL,
e44842fe 6721 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6722 return const0_rtx;
6723
f42e28dd
APB
6724 case LABELED_BLOCK_EXPR:
6725 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6726 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6727 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6728 do_pending_stack_adjust ();
f42e28dd
APB
6729 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6730 return const0_rtx;
6731
6732 case EXIT_BLOCK_EXPR:
6733 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6734 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6735 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6736 return const0_rtx;
6737
bbf6f052 6738 case LOOP_EXPR:
0088fcb1 6739 push_temp_slots ();
bbf6f052 6740 expand_start_loop (1);
b0832fe1 6741 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6742 expand_end_loop ();
0088fcb1 6743 pop_temp_slots ();
bbf6f052
RK
6744
6745 return const0_rtx;
6746
6747 case BIND_EXPR:
6748 {
6749 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
6750
6751 /* Need to open a binding contour here because
e976b8b2 6752 if there are any cleanups they must be contained here. */
8e91754e 6753 expand_start_bindings (2);
bbf6f052 6754
2df53c0b
RS
6755 /* Mark the corresponding BLOCK for output in its proper place. */
6756 if (TREE_OPERAND (exp, 2) != 0
6757 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 6758 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
6759
6760 /* If VARS have not yet been expanded, expand them now. */
6761 while (vars)
6762 {
19e7881c 6763 if (!DECL_RTL_SET_P (vars))
4977bab6 6764 expand_decl (vars);
bbf6f052
RK
6765 expand_decl_init (vars);
6766 vars = TREE_CHAIN (vars);
6767 }
6768
37a08a29 6769 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
6770
6771 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6772
6773 return temp;
6774 }
6775
6776 case RTL_EXPR:
83b853c9
JM
6777 if (RTL_EXPR_SEQUENCE (exp))
6778 {
6779 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6780 abort ();
2f937369 6781 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
6782 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6783 }
64dc53f3
MM
6784 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6785 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6786 return RTL_EXPR_RTL (exp);
6787
6788 case CONSTRUCTOR:
dd27116b
RK
6789 /* If we don't need the result, just ensure we evaluate any
6790 subexpressions. */
6791 if (ignore)
6792 {
6793 tree elt;
37a08a29 6794
dd27116b 6795 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6796 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6797
dd27116b
RK
6798 return const0_rtx;
6799 }
3207b172 6800
4af3895e
JVA
6801 /* All elts simple constants => refer to a constant in memory. But
6802 if this is a non-BLKmode mode, let it store a field at a time
6803 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6804 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6805 store directly into the target unless the type is large enough
6806 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6807 all operands are constant, put it in memory as well.
6808
6809 FIXME: Avoid trying to fill vector constructors piece-meal.
6810 Output them with output_constant_def below unless we're sure
6811 they're zeros. This should go away when vector initializers
6812 are treated like VECTOR_CST instead of arrays.
6813 */
dd27116b 6814 else if ((TREE_STATIC (exp)
3207b172 6815 && ((mode == BLKmode
e5e809f4 6816 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6817 || TREE_ADDRESSABLE (exp)
19caa751 6818 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6819 && (! MOVE_BY_PIECES_P
19caa751
RK
6820 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6821 TYPE_ALIGN (type)))
0fb7aeda
KH
6822 && ((TREE_CODE (type) == VECTOR_TYPE
6823 && !is_zeros_p (exp))
6824 || ! mostly_zeros_p (exp)))))
f59700f9
RK
6825 || ((modifier == EXPAND_INITIALIZER
6826 || modifier == EXPAND_CONST_ADDRESS)
6827 && TREE_CONSTANT (exp)))
bbf6f052 6828 {
bd7cf17e 6829 rtx constructor = output_constant_def (exp, 1);
19caa751 6830
b552441b
RS
6831 if (modifier != EXPAND_CONST_ADDRESS
6832 && modifier != EXPAND_INITIALIZER
792760b9
RK
6833 && modifier != EXPAND_SUM)
6834 constructor = validize_mem (constructor);
6835
bbf6f052
RK
6836 return constructor;
6837 }
bbf6f052
RK
6838 else
6839 {
e9ac02a6
JW
6840 /* Handle calls that pass values in multiple non-contiguous
6841 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6842 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6843 || GET_CODE (target) == PARALLEL
6844 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6845 target
6846 = assign_temp (build_qualified_type (type,
6847 (TYPE_QUALS (type)
6848 | (TREE_READONLY (exp)
6849 * TYPE_QUAL_CONST))),
c24ae149 6850 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6851
de8920be 6852 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6853 return target;
6854 }
6855
6856 case INDIRECT_REF:
6857 {
6858 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6859 tree index;
3a94c984
KH
6860 tree string = string_constant (exp1, &index);
6861
06eaa86f 6862 /* Try to optimize reads from const strings. */
0fb7aeda
KH
6863 if (string
6864 && TREE_CODE (string) == STRING_CST
6865 && TREE_CODE (index) == INTEGER_CST
05bccae2 6866 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
6867 && GET_MODE_CLASS (mode) == MODE_INT
6868 && GET_MODE_SIZE (mode) == 1
37a08a29 6869 && modifier != EXPAND_WRITE)
0fb7aeda 6870 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 6871 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6872
405f0da6
JW
6873 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6874 op0 = memory_address (mode, op0);
38a448ca 6875 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6876 set_mem_attributes (temp, exp, 0);
1125706f 6877
14a774a9
RK
6878 /* If we are writing to this object and its type is a record with
6879 readonly fields, we must mark it as readonly so it will
6880 conflict with readonly references to those fields. */
37a08a29 6881 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
6882 RTX_UNCHANGING_P (temp) = 1;
6883
8c8a8e34
JW
6884 return temp;
6885 }
bbf6f052
RK
6886
6887 case ARRAY_REF:
742920c7
RK
6888 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6889 abort ();
bbf6f052 6890
bbf6f052 6891 {
742920c7
RK
6892 tree array = TREE_OPERAND (exp, 0);
6893 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6894 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6895 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6896 HOST_WIDE_INT i;
b50d17a1 6897
d4c89139
PB
6898 /* Optimize the special-case of a zero lower bound.
6899
6900 We convert the low_bound to sizetype to avoid some problems
6901 with constant folding. (E.g. suppose the lower bound is 1,
6902 and its mode is QI. Without the conversion, (ARRAY
6903 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6904 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6905
742920c7 6906 if (! integer_zerop (low_bound))
fed3cef0 6907 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6908
742920c7 6909 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6910 This is not done in fold so it won't happen inside &.
6911 Don't fold if this is for wide characters since it's too
6912 difficult to do correctly and this is a very rare case. */
742920c7 6913
017e1b43
RH
6914 if (modifier != EXPAND_CONST_ADDRESS
6915 && modifier != EXPAND_INITIALIZER
6916 && modifier != EXPAND_MEMORY
cb5fa0f8 6917 && TREE_CODE (array) == STRING_CST
742920c7 6918 && TREE_CODE (index) == INTEGER_CST
05bccae2 6919 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6920 && GET_MODE_CLASS (mode) == MODE_INT
6921 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6922 return gen_int_mode (TREE_STRING_POINTER (array)
6923 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 6924
742920c7
RK
6925 /* If this is a constant index into a constant array,
6926 just get the value from the array. Handle both the cases when
6927 we have an explicit constructor and when our operand is a variable
6928 that was declared const. */
4af3895e 6929
017e1b43
RH
6930 if (modifier != EXPAND_CONST_ADDRESS
6931 && modifier != EXPAND_INITIALIZER
6932 && modifier != EXPAND_MEMORY
6933 && TREE_CODE (array) == CONSTRUCTOR
6934 && ! TREE_SIDE_EFFECTS (array)
05bccae2 6935 && TREE_CODE (index) == INTEGER_CST
3a94c984 6936 && 0 > compare_tree_int (index,
05bccae2
RK
6937 list_length (CONSTRUCTOR_ELTS
6938 (TREE_OPERAND (exp, 0)))))
742920c7 6939 {
05bccae2
RK
6940 tree elem;
6941
6942 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6943 i = TREE_INT_CST_LOW (index);
6944 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6945 ;
6946
6947 if (elem)
37a08a29
RK
6948 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6949 modifier);
742920c7 6950 }
3a94c984 6951
742920c7 6952 else if (optimize >= 1
cb5fa0f8
RK
6953 && modifier != EXPAND_CONST_ADDRESS
6954 && modifier != EXPAND_INITIALIZER
017e1b43 6955 && modifier != EXPAND_MEMORY
742920c7
RK
6956 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6957 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6958 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6959 {
08293add 6960 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6961 {
6962 tree init = DECL_INITIAL (array);
6963
742920c7
RK
6964 if (TREE_CODE (init) == CONSTRUCTOR)
6965 {
665f2503 6966 tree elem;
742920c7 6967
05bccae2 6968 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6969 (elem
6970 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6971 elem = TREE_CHAIN (elem))
6972 ;
6973
c54b0a5e 6974 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6975 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 6976 tmode, modifier);
742920c7
RK
6977 }
6978 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6979 && 0 > compare_tree_int (index,
6980 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6981 {
6982 tree type = TREE_TYPE (TREE_TYPE (init));
6983 enum machine_mode mode = TYPE_MODE (type);
6984
6985 if (GET_MODE_CLASS (mode) == MODE_INT
6986 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
6987 return gen_int_mode (TREE_STRING_POINTER (init)
6988 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 6989 }
742920c7
RK
6990 }
6991 }
6992 }
afc6aaab 6993 goto normal_inner_ref;
bbf6f052
RK
6994
6995 case COMPONENT_REF:
4af3895e 6996 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
6997 appropriate field if it is present. */
6998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
6999 {
7000 tree elt;
7001
7002 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7003 elt = TREE_CHAIN (elt))
86b5812c
RK
7004 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7005 /* We can normally use the value of the field in the
7006 CONSTRUCTOR. However, if this is a bitfield in
7007 an integral mode that we can fit in a HOST_WIDE_INT,
7008 we must mask only the number of bits in the bitfield,
7009 since this is done implicitly by the constructor. If
7010 the bitfield does not meet either of those conditions,
7011 we can't do this optimization. */
7012 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7013 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7014 == MODE_INT)
7015 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7016 <= HOST_BITS_PER_WIDE_INT))))
7017 {
8403445a
AM
7018 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7019 && modifier == EXPAND_STACK_PARM)
7020 target = 0;
3a94c984 7021 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7022 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7023 {
9df2c88c
RK
7024 HOST_WIDE_INT bitsize
7025 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7026 enum machine_mode imode
7027 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
7028
7029 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7030 {
7031 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7032 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7033 }
7034 else
7035 {
7036 tree count
e5e809f4
JL
7037 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7038 0);
86b5812c
RK
7039
7040 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7041 target, 0);
7042 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7043 target, 0);
7044 }
7045 }
7046
7047 return op0;
7048 }
4af3895e 7049 }
afc6aaab 7050 goto normal_inner_ref;
4af3895e 7051
afc6aaab
ZW
7052 case BIT_FIELD_REF:
7053 case ARRAY_RANGE_REF:
7054 normal_inner_ref:
bbf6f052
RK
7055 {
7056 enum machine_mode mode1;
770ae6cc 7057 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7058 tree offset;
bbf6f052 7059 int volatilep = 0;
839c4796 7060 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7061 &mode1, &unsignedp, &volatilep);
f47e9b4e 7062 rtx orig_op0;
bbf6f052 7063
e7f3c83f
RK
7064 /* If we got back the original object, something is wrong. Perhaps
7065 we are evaluating an expression too early. In any event, don't
7066 infinitely recurse. */
7067 if (tem == exp)
7068 abort ();
7069
3d27140a 7070 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7071 computation, since it will need a temporary and TARGET is known
7072 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7073
f47e9b4e
RK
7074 orig_op0 = op0
7075 = expand_expr (tem,
7076 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7077 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7078 != INTEGER_CST)
8403445a 7079 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7080 ? target : NULL_RTX),
7081 VOIDmode,
7082 (modifier == EXPAND_INITIALIZER
8403445a
AM
7083 || modifier == EXPAND_CONST_ADDRESS
7084 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7085 ? modifier : EXPAND_NORMAL);
bbf6f052 7086
8c8a8e34 7087 /* If this is a constant, put it into a register if it is a
14a774a9 7088 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7089 if (CONSTANT_P (op0))
7090 {
7091 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7092 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7093 && offset == 0)
8c8a8e34
JW
7094 op0 = force_reg (mode, op0);
7095 else
7096 op0 = validize_mem (force_const_mem (mode, op0));
7097 }
7098
8d2e5f72
RK
7099 /* Otherwise, if this object not in memory and we either have an
7100 offset or a BLKmode result, put it there. This case can't occur in
7101 C, but can in Ada if we have unchecked conversion of an expression
7102 from a scalar type to an array or record type or for an
7103 ARRAY_RANGE_REF whose type is BLKmode. */
7104 else if (GET_CODE (op0) != MEM
7105 && (offset != 0
7106 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7107 {
7108 /* If the operand is a SAVE_EXPR, we can deal with this by
7109 forcing the SAVE_EXPR into memory. */
7110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7111 {
7112 put_var_into_stack (TREE_OPERAND (exp, 0),
7113 /*rescan=*/true);
7114 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7115 }
7116 else
7117 {
7118 tree nt
7119 = build_qualified_type (TREE_TYPE (tem),
7120 (TYPE_QUALS (TREE_TYPE (tem))
7121 | TYPE_QUAL_CONST));
7122 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7123
8d2e5f72
RK
7124 emit_move_insn (memloc, op0);
7125 op0 = memloc;
7126 }
7127 }
7128
7bb0943f
RS
7129 if (offset != 0)
7130 {
8403445a
AM
7131 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7132 EXPAND_SUM);
7bb0943f
RS
7133
7134 if (GET_CODE (op0) != MEM)
7135 abort ();
2d48c13d 7136
2d48c13d 7137#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7138 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7139 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7140#else
7141 if (GET_MODE (offset_rtx) != ptr_mode)
7142 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7143#endif
7144
14a774a9 7145 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7146 to call force_reg for that case. Avoid that case. */
89752202
HB
7147 if (GET_CODE (op0) == MEM
7148 && GET_MODE (op0) == BLKmode
efd07ca7 7149 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7150 && bitsize != 0
3a94c984 7151 && (bitpos % bitsize) == 0
89752202 7152 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7153 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7154 {
e3c8ea67 7155 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7156 bitpos = 0;
7157 }
7158
0d4903b8
RK
7159 op0 = offset_address (op0, offset_rtx,
7160 highest_pow2_factor (offset));
7bb0943f
RS
7161 }
7162
1ce7f3c2
RK
7163 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7164 record its alignment as BIGGEST_ALIGNMENT. */
7165 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7166 && is_aligning_offset (offset, tem))
7167 set_mem_align (op0, BIGGEST_ALIGNMENT);
7168
bbf6f052
RK
7169 /* Don't forget about volatility even if this is a bitfield. */
7170 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7171 {
f47e9b4e
RK
7172 if (op0 == orig_op0)
7173 op0 = copy_rtx (op0);
7174
bbf6f052
RK
7175 MEM_VOLATILE_P (op0) = 1;
7176 }
7177
010f87c4
JJ
7178 /* The following code doesn't handle CONCAT.
7179 Assume only bitpos == 0 can be used for CONCAT, due to
7180 one element arrays having the same mode as its element. */
7181 if (GET_CODE (op0) == CONCAT)
7182 {
7183 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7184 abort ();
7185 return op0;
7186 }
7187
ccc98036
RS
7188 /* In cases where an aligned union has an unaligned object
7189 as a field, we might be extracting a BLKmode value from
7190 an integer-mode (e.g., SImode) object. Handle this case
7191 by doing the extract into an object as wide as the field
7192 (which we know to be the width of a basic mode), then
cb5fa0f8 7193 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7194 if (mode1 == VOIDmode
ccc98036 7195 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7196 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7197 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7198 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7199 && modifier != EXPAND_CONST_ADDRESS
7200 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7201 /* If the field isn't aligned enough to fetch as a memref,
7202 fetch it as a bit field. */
7203 || (mode1 != BLKmode
9e5f281f
OH
7204 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7205 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
a8f3bf52
RK
7206 && ((modifier == EXPAND_CONST_ADDRESS
7207 || modifier == EXPAND_INITIALIZER)
7208 ? STRICT_ALIGNMENT
7209 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7210 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7211 /* If the type and the field are a constant size and the
7212 size of the type isn't the same size as the bitfield,
7213 we must use bitfield operations. */
7214 || (bitsize >= 0
7215 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7216 == INTEGER_CST)
7217 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7218 bitsize)))
bbf6f052 7219 {
bbf6f052
RK
7220 enum machine_mode ext_mode = mode;
7221
14a774a9
RK
7222 if (ext_mode == BLKmode
7223 && ! (target != 0 && GET_CODE (op0) == MEM
7224 && GET_CODE (target) == MEM
7225 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7226 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7227
7228 if (ext_mode == BLKmode)
a281e72d 7229 {
7a06d606
RK
7230 if (target == 0)
7231 target = assign_temp (type, 0, 1, 1);
7232
7233 if (bitsize == 0)
7234 return target;
7235
a281e72d
RK
7236 /* In this case, BITPOS must start at a byte boundary and
7237 TARGET, if specified, must be a MEM. */
7238 if (GET_CODE (op0) != MEM
7239 || (target != 0 && GET_CODE (target) != MEM)
7240 || bitpos % BITS_PER_UNIT != 0)
7241 abort ();
7242
7a06d606
RK
7243 emit_block_move (target,
7244 adjust_address (op0, VOIDmode,
7245 bitpos / BITS_PER_UNIT),
a06ef755 7246 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7247 / BITS_PER_UNIT),
8403445a
AM
7248 (modifier == EXPAND_STACK_PARM
7249 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7250
a281e72d
RK
7251 return target;
7252 }
bbf6f052 7253
dc6d66b3
RK
7254 op0 = validize_mem (op0);
7255
7256 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7257 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7258
8403445a
AM
7259 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7260 (modifier == EXPAND_STACK_PARM
7261 ? NULL_RTX : target),
7262 ext_mode, ext_mode,
bbf6f052 7263 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7264
7265 /* If the result is a record type and BITSIZE is narrower than
7266 the mode of OP0, an integral mode, and this is a big endian
7267 machine, we must put the field into the high-order bits. */
7268 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7269 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7270 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7271 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7272 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7273 - bitsize),
7274 op0, 1);
7275
bbf6f052
RK
7276 if (mode == BLKmode)
7277 {
c3d32120 7278 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7279 ((*lang_hooks.types.type_for_mode)
7280 (ext_mode, 0),
c3d32120 7281 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7282
7283 emit_move_insn (new, op0);
7284 op0 = copy_rtx (new);
7285 PUT_MODE (op0, BLKmode);
c3d32120 7286 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7287 }
7288
7289 return op0;
7290 }
7291
05019f83
RK
7292 /* If the result is BLKmode, use that to access the object
7293 now as well. */
7294 if (mode == BLKmode)
7295 mode1 = BLKmode;
7296
bbf6f052
RK
7297 /* Get a reference to just this component. */
7298 if (modifier == EXPAND_CONST_ADDRESS
7299 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7300 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7301 else
f4ef873c 7302 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7303
f47e9b4e
RK
7304 if (op0 == orig_op0)
7305 op0 = copy_rtx (op0);
7306
3bdf5ad1 7307 set_mem_attributes (op0, exp, 0);
dc6d66b3 7308 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7309 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7310
bbf6f052 7311 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7312 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7313 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7314 || modifier == EXPAND_INITIALIZER)
bbf6f052 7315 return op0;
0d15e60c 7316 else if (target == 0)
bbf6f052 7317 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7318
bbf6f052
RK
7319 convert_move (target, op0, unsignedp);
7320 return target;
7321 }
7322
4a8d0c9c
RH
7323 case VTABLE_REF:
7324 {
7325 rtx insn, before = get_last_insn (), vtbl_ref;
7326
7327 /* Evaluate the interior expression. */
7328 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7329 tmode, modifier);
7330
7331 /* Get or create an instruction off which to hang a note. */
7332 if (REG_P (subtarget))
7333 {
7334 target = subtarget;
7335 insn = get_last_insn ();
7336 if (insn == before)
7337 abort ();
7338 if (! INSN_P (insn))
7339 insn = prev_nonnote_insn (insn);
7340 }
7341 else
7342 {
7343 target = gen_reg_rtx (GET_MODE (subtarget));
7344 insn = emit_move_insn (target, subtarget);
7345 }
7346
7347 /* Collect the data for the note. */
7348 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7349 vtbl_ref = plus_constant (vtbl_ref,
7350 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7351 /* Discard the initial CONST that was added. */
7352 vtbl_ref = XEXP (vtbl_ref, 0);
7353
7354 REG_NOTES (insn)
7355 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7356
7357 return target;
7358 }
7359
bbf6f052
RK
7360 /* Intended for a reference to a buffer of a file-object in Pascal.
7361 But it's not certain that a special tree code will really be
7362 necessary for these. INDIRECT_REF might work for them. */
7363 case BUFFER_REF:
7364 abort ();
7365
7308a047 7366 case IN_EXPR:
7308a047 7367 {
d6a5ac33
RK
7368 /* Pascal set IN expression.
7369
7370 Algorithm:
7371 rlo = set_low - (set_low%bits_per_word);
7372 the_word = set [ (index - rlo)/bits_per_word ];
7373 bit_index = index % bits_per_word;
7374 bitmask = 1 << bit_index;
7375 return !!(the_word & bitmask); */
7376
7308a047
RS
7377 tree set = TREE_OPERAND (exp, 0);
7378 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7379 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7380 tree set_type = TREE_TYPE (set);
7308a047
RS
7381 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7382 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7383 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7384 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7385 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7386 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7387 rtx setaddr = XEXP (setval, 0);
7388 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7389 rtx rlow;
7390 rtx diff, quo, rem, addr, bit, result;
7308a047 7391
d6a5ac33
RK
7392 /* If domain is empty, answer is no. Likewise if index is constant
7393 and out of bounds. */
51723711 7394 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7395 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7396 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7397 || (TREE_CODE (index) == INTEGER_CST
7398 && TREE_CODE (set_low_bound) == INTEGER_CST
7399 && tree_int_cst_lt (index, set_low_bound))
7400 || (TREE_CODE (set_high_bound) == INTEGER_CST
7401 && TREE_CODE (index) == INTEGER_CST
7402 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7403 return const0_rtx;
7404
d6a5ac33
RK
7405 if (target == 0)
7406 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7407
7408 /* If we get here, we have to generate the code for both cases
7409 (in range and out of range). */
7410
7411 op0 = gen_label_rtx ();
7412 op1 = gen_label_rtx ();
7413
7414 if (! (GET_CODE (index_val) == CONST_INT
7415 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7416 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7417 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7418
7419 if (! (GET_CODE (index_val) == CONST_INT
7420 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7421 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7422 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7423
7424 /* Calculate the element number of bit zero in the first word
7425 of the set. */
7426 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7427 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7428 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7429 else
17938e57
RK
7430 rlow = expand_binop (index_mode, and_optab, lo_r,
7431 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7432 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7433
d6a5ac33
RK
7434 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7435 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7436
7437 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7438 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7439 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7440 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7441
7308a047 7442 addr = memory_address (byte_mode,
d6a5ac33
RK
7443 expand_binop (index_mode, add_optab, diff,
7444 setaddr, NULL_RTX, iunsignedp,
17938e57 7445 OPTAB_LIB_WIDEN));
d6a5ac33 7446
3a94c984 7447 /* Extract the bit we want to examine. */
7308a047 7448 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7449 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7450 make_tree (TREE_TYPE (index), rem),
7451 NULL_RTX, 1);
7452 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7453 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7454 1, OPTAB_LIB_WIDEN);
17938e57
RK
7455
7456 if (result != target)
7457 convert_move (target, result, 1);
7308a047
RS
7458
7459 /* Output the code to handle the out-of-range case. */
7460 emit_jump (op0);
7461 emit_label (op1);
7462 emit_move_insn (target, const0_rtx);
7463 emit_label (op0);
7464 return target;
7465 }
7466
bbf6f052 7467 case WITH_CLEANUP_EXPR:
6ad7895a 7468 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7469 {
6ad7895a 7470 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7471 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7472 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7473 CLEANUP_EH_ONLY (exp));
e976b8b2 7474
bbf6f052 7475 /* That's it for this cleanup. */
6ad7895a 7476 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7477 }
6ad7895a 7478 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7479
5dab5552
MS
7480 case CLEANUP_POINT_EXPR:
7481 {
e976b8b2
MS
7482 /* Start a new binding layer that will keep track of all cleanup
7483 actions to be performed. */
8e91754e 7484 expand_start_bindings (2);
e976b8b2 7485
d93d4205 7486 target_temp_slot_level = temp_slot_level;
e976b8b2 7487
37a08a29 7488 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7489 /* If we're going to use this value, load it up now. */
7490 if (! ignore)
7491 op0 = force_not_mem (op0);
d93d4205 7492 preserve_temp_slots (op0);
e976b8b2 7493 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7494 }
7495 return op0;
7496
bbf6f052
RK
7497 case CALL_EXPR:
7498 /* Check for a built-in function. */
7499 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7500 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7501 == FUNCTION_DECL)
bbf6f052 7502 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7503 {
c70eaeaf
KG
7504 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7505 == BUILT_IN_FRONTEND)
8403445a
AM
7506 return (*lang_hooks.expand_expr) (exp, original_target,
7507 tmode, modifier);
c70eaeaf
KG
7508 else
7509 return expand_builtin (exp, target, subtarget, tmode, ignore);
7510 }
d6a5ac33 7511
8129842c 7512 return expand_call (exp, target, ignore);
bbf6f052
RK
7513
7514 case NON_LVALUE_EXPR:
7515 case NOP_EXPR:
7516 case CONVERT_EXPR:
7517 case REFERENCE_EXPR:
4a53008b 7518 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7519 return const0_rtx;
4a53008b 7520
bbf6f052
RK
7521 if (TREE_CODE (type) == UNION_TYPE)
7522 {
7523 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7524
c3d32120
RK
7525 /* If both input and output are BLKmode, this conversion isn't doing
7526 anything except possibly changing memory attribute. */
7527 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7528 {
7529 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7530 modifier);
7531
7532 result = copy_rtx (result);
7533 set_mem_attributes (result, exp, 0);
7534 return result;
7535 }
14a774a9 7536
bbf6f052 7537 if (target == 0)
1da68f56 7538 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7539
bbf6f052
RK
7540 if (GET_CODE (target) == MEM)
7541 /* Store data into beginning of memory target. */
7542 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7543 adjust_address (target, TYPE_MODE (valtype), 0),
7544 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7545
bbf6f052
RK
7546 else if (GET_CODE (target) == REG)
7547 /* Store this field into a union of the proper type. */
14a774a9
RK
7548 store_field (target,
7549 MIN ((int_size_in_bytes (TREE_TYPE
7550 (TREE_OPERAND (exp, 0)))
7551 * BITS_PER_UNIT),
8752c357 7552 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7553 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7554 VOIDmode, 0, type, 0);
bbf6f052
RK
7555 else
7556 abort ();
7557
7558 /* Return the entire union. */
7559 return target;
7560 }
d6a5ac33 7561
7f62854a
RK
7562 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7563 {
7564 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7565 modifier);
7f62854a
RK
7566
7567 /* If the signedness of the conversion differs and OP0 is
7568 a promoted SUBREG, clear that indication since we now
7569 have to do the proper extension. */
7570 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7571 && GET_CODE (op0) == SUBREG)
7572 SUBREG_PROMOTED_VAR_P (op0) = 0;
7573
7574 return op0;
7575 }
7576
fdf473ae 7577 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7578 if (GET_MODE (op0) == mode)
7579 return op0;
12342f90 7580
d6a5ac33
RK
7581 /* If OP0 is a constant, just convert it into the proper mode. */
7582 if (CONSTANT_P (op0))
fdf473ae
RH
7583 {
7584 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7585 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7586
0fb7aeda 7587 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7588 return simplify_gen_subreg (mode, op0, inner_mode,
7589 subreg_lowpart_offset (mode,
7590 inner_mode));
7591 else
7592 return convert_modes (mode, inner_mode, op0,
7593 TREE_UNSIGNED (inner_type));
7594 }
12342f90 7595
26fcb35a 7596 if (modifier == EXPAND_INITIALIZER)
38a448ca 7597 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7598
bbf6f052 7599 if (target == 0)
d6a5ac33
RK
7600 return
7601 convert_to_mode (mode, op0,
7602 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7603 else
d6a5ac33
RK
7604 convert_move (target, op0,
7605 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7606 return target;
7607
ed239f5a 7608 case VIEW_CONVERT_EXPR:
37a08a29 7609 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7610
7611 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7612 Otherwise, if neither mode is BLKmode and both are integral and within
7613 a word, we can use gen_lowpart. If neither is true, make sure the
7614 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7615 if (TYPE_MODE (type) == GET_MODE (op0))
7616 ;
7617 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7618 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7619 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7620 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7621 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7622 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7623 else if (GET_CODE (op0) != MEM)
ed239f5a 7624 {
c11c10d8
RK
7625 /* If the operand is not a MEM, force it into memory. Since we
7626 are going to be be changing the mode of the MEM, don't call
7627 force_const_mem for constants because we don't allow pool
7628 constants to change mode. */
ed239f5a 7629 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7630
c11c10d8
RK
7631 if (TREE_ADDRESSABLE (exp))
7632 abort ();
ed239f5a 7633
c11c10d8
RK
7634 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7635 target
7636 = assign_stack_temp_for_type
7637 (TYPE_MODE (inner_type),
7638 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7639
c11c10d8
RK
7640 emit_move_insn (target, op0);
7641 op0 = target;
ed239f5a
RK
7642 }
7643
c11c10d8
RK
7644 /* At this point, OP0 is in the correct mode. If the output type is such
7645 that the operand is known to be aligned, indicate that it is.
7646 Otherwise, we need only be concerned about alignment for non-BLKmode
7647 results. */
ed239f5a
RK
7648 if (GET_CODE (op0) == MEM)
7649 {
7650 op0 = copy_rtx (op0);
7651
ed239f5a
RK
7652 if (TYPE_ALIGN_OK (type))
7653 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7654 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7655 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7656 {
7657 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7658 HOST_WIDE_INT temp_size
7659 = MAX (int_size_in_bytes (inner_type),
7660 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7661 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7662 temp_size, 0, type);
c4e59f51 7663 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7664
c11c10d8
RK
7665 if (TREE_ADDRESSABLE (exp))
7666 abort ();
7667
ed239f5a
RK
7668 if (GET_MODE (op0) == BLKmode)
7669 emit_block_move (new_with_op0_mode, op0,
44bb111a 7670 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7671 (modifier == EXPAND_STACK_PARM
7672 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7673 else
7674 emit_move_insn (new_with_op0_mode, op0);
7675
7676 op0 = new;
7677 }
0fb7aeda 7678
c4e59f51 7679 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7680 }
7681
7682 return op0;
7683
bbf6f052 7684 case PLUS_EXPR:
91ce572a 7685 this_optab = ! unsignedp && flag_trapv
a9785c70 7686 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7687 ? addv_optab : add_optab;
bbf6f052
RK
7688
7689 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7690 something else, make sure we add the register to the constant and
7691 then to the other thing. This case can occur during strength
7692 reduction and doing it this way will produce better code if the
7693 frame pointer or argument pointer is eliminated.
7694
7695 fold-const.c will ensure that the constant is always in the inner
7696 PLUS_EXPR, so the only case we need to do anything about is if
7697 sp, ap, or fp is our second argument, in which case we must swap
7698 the innermost first argument and our second argument. */
7699
7700 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7701 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7702 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7703 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7704 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7705 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7706 {
7707 tree t = TREE_OPERAND (exp, 1);
7708
7709 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7710 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7711 }
7712
88f63c77 7713 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7714 something, we might be forming a constant. So try to use
7715 plus_constant. If it produces a sum and we can't accept it,
7716 use force_operand. This allows P = &ARR[const] to generate
7717 efficient code on machines where a SYMBOL_REF is not a valid
7718 address.
7719
7720 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7721 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7722 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7723 {
8403445a
AM
7724 if (modifier == EXPAND_STACK_PARM)
7725 target = 0;
c980ac49
RS
7726 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7727 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7728 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7729 {
cbbc503e
JL
7730 rtx constant_part;
7731
c980ac49
RS
7732 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7733 EXPAND_SUM);
cbbc503e
JL
7734 /* Use immed_double_const to ensure that the constant is
7735 truncated according to the mode of OP1, then sign extended
7736 to a HOST_WIDE_INT. Using the constant directly can result
7737 in non-canonical RTL in a 64x32 cross compile. */
7738 constant_part
7739 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7740 (HOST_WIDE_INT) 0,
a5efcd63 7741 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7742 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7743 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7744 op1 = force_operand (op1, target);
7745 return op1;
7746 }
bbf6f052 7747
c980ac49
RS
7748 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7749 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7750 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7751 {
cbbc503e
JL
7752 rtx constant_part;
7753
c980ac49 7754 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7755 (modifier == EXPAND_INITIALIZER
7756 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7757 if (! CONSTANT_P (op0))
7758 {
7759 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7760 VOIDmode, modifier);
f0e9957a
RS
7761 /* Return a PLUS if modifier says it's OK. */
7762 if (modifier == EXPAND_SUM
7763 || modifier == EXPAND_INITIALIZER)
7764 return simplify_gen_binary (PLUS, mode, op0, op1);
7765 goto binop2;
c980ac49 7766 }
cbbc503e
JL
7767 /* Use immed_double_const to ensure that the constant is
7768 truncated according to the mode of OP1, then sign extended
7769 to a HOST_WIDE_INT. Using the constant directly can result
7770 in non-canonical RTL in a 64x32 cross compile. */
7771 constant_part
7772 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7773 (HOST_WIDE_INT) 0,
2a94e396 7774 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7775 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7776 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7777 op0 = force_operand (op0, target);
7778 return op0;
7779 }
bbf6f052
RK
7780 }
7781
7782 /* No sense saving up arithmetic to be done
7783 if it's all in the wrong mode to form part of an address.
7784 And force_operand won't know whether to sign-extend or
7785 zero-extend. */
7786 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7787 || mode != ptr_mode)
4ef7870a 7788 {
eb698c58
RS
7789 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7790 subtarget, &op0, &op1, 0);
6e7727eb
EB
7791 if (op0 == const0_rtx)
7792 return op1;
7793 if (op1 == const0_rtx)
7794 return op0;
4ef7870a
EB
7795 goto binop2;
7796 }
bbf6f052 7797
eb698c58
RS
7798 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7799 subtarget, &op0, &op1, modifier);
f0e9957a 7800 return simplify_gen_binary (PLUS, mode, op0, op1);
bbf6f052
RK
7801
7802 case MINUS_EXPR:
ea87523e
RK
7803 /* For initializers, we are allowed to return a MINUS of two
7804 symbolic constants. Here we handle all cases when both operands
7805 are constant. */
bbf6f052
RK
7806 /* Handle difference of two symbolic constants,
7807 for the sake of an initializer. */
7808 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7809 && really_constant_p (TREE_OPERAND (exp, 0))
7810 && really_constant_p (TREE_OPERAND (exp, 1)))
7811 {
eb698c58
RS
7812 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7813 NULL_RTX, &op0, &op1, modifier);
ea87523e 7814
ea87523e
RK
7815 /* If the last operand is a CONST_INT, use plus_constant of
7816 the negated constant. Else make the MINUS. */
7817 if (GET_CODE (op1) == CONST_INT)
7818 return plus_constant (op0, - INTVAL (op1));
7819 else
38a448ca 7820 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 7821 }
ae431183 7822
91ce572a
CC
7823 this_optab = ! unsignedp && flag_trapv
7824 && (GET_MODE_CLASS(mode) == MODE_INT)
7825 ? subv_optab : sub_optab;
1717e19e
UW
7826
7827 /* No sense saving up arithmetic to be done
7828 if it's all in the wrong mode to form part of an address.
7829 And force_operand won't know whether to sign-extend or
7830 zero-extend. */
7831 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7832 || mode != ptr_mode)
7833 goto binop;
7834
eb698c58
RS
7835 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7836 subtarget, &op0, &op1, modifier);
1717e19e
UW
7837
7838 /* Convert A - const to A + (-const). */
7839 if (GET_CODE (op1) == CONST_INT)
7840 {
7841 op1 = negate_rtx (mode, op1);
f0e9957a 7842 return simplify_gen_binary (PLUS, mode, op0, op1);
1717e19e
UW
7843 }
7844
7845 goto binop2;
bbf6f052
RK
7846
7847 case MULT_EXPR:
bbf6f052
RK
7848 /* If first operand is constant, swap them.
7849 Thus the following special case checks need only
7850 check the second operand. */
7851 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7852 {
b3694847 7853 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7854 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7855 TREE_OPERAND (exp, 1) = t1;
7856 }
7857
7858 /* Attempt to return something suitable for generating an
7859 indexed address, for machines that support that. */
7860
88f63c77 7861 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7862 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7863 {
48a5f2fa
DJ
7864 tree exp1 = TREE_OPERAND (exp, 1);
7865
921b3427
RK
7866 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7867 EXPAND_SUM);
bbf6f052 7868
bbf6f052 7869 if (GET_CODE (op0) != REG)
906c4e36 7870 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7871 if (GET_CODE (op0) != REG)
7872 op0 = copy_to_mode_reg (mode, op0);
7873
48a5f2fa
DJ
7874 return gen_rtx_MULT (mode, op0,
7875 gen_int_mode (tree_low_cst (exp1, 0),
7876 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
7877 }
7878
8403445a
AM
7879 if (modifier == EXPAND_STACK_PARM)
7880 target = 0;
7881
bbf6f052
RK
7882 /* Check for multiplying things that have been extended
7883 from a narrower type. If this machine supports multiplying
7884 in that narrower type with a result in the desired type,
7885 do it that way, and avoid the explicit type-conversion. */
7886 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7887 && TREE_CODE (type) == INTEGER_TYPE
7888 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7889 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7890 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7891 && int_fits_type_p (TREE_OPERAND (exp, 1),
7892 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7893 /* Don't use a widening multiply if a shift will do. */
7894 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7895 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7896 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7897 ||
7898 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7899 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7900 ==
7901 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7902 /* If both operands are extended, they must either both
7903 be zero-extended or both be sign-extended. */
7904 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7905 ==
7906 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7907 {
7908 enum machine_mode innermode
7909 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7910 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7911 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7912 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7913 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7914 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7915 {
b10af0c8
TG
7916 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7917 {
b10af0c8 7918 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7919 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7920 TREE_OPERAND (exp, 1),
7921 NULL_RTX, &op0, &op1, 0);
b10af0c8 7922 else
eb698c58
RS
7923 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7924 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7925 NULL_RTX, &op0, &op1, 0);
b10af0c8
TG
7926 goto binop2;
7927 }
7928 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7929 && innermode == word_mode)
7930 {
7931 rtx htem;
7932 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7933 NULL_RTX, VOIDmode, 0);
7934 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7935 op1 = convert_modes (innermode, mode,
7936 expand_expr (TREE_OPERAND (exp, 1),
7937 NULL_RTX, VOIDmode, 0),
7938 unsignedp);
b10af0c8
TG
7939 else
7940 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7941 NULL_RTX, VOIDmode, 0);
7942 temp = expand_binop (mode, other_optab, op0, op1, target,
7943 unsignedp, OPTAB_LIB_WIDEN);
7944 htem = expand_mult_highpart_adjust (innermode,
7945 gen_highpart (innermode, temp),
7946 op0, op1,
7947 gen_highpart (innermode, temp),
7948 unsignedp);
7949 emit_move_insn (gen_highpart (innermode, temp), htem);
7950 return temp;
7951 }
bbf6f052
RK
7952 }
7953 }
eb698c58
RS
7954 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7955 subtarget, &op0, &op1, 0);
bbf6f052
RK
7956 return expand_mult (mode, op0, op1, target, unsignedp);
7957
7958 case TRUNC_DIV_EXPR:
7959 case FLOOR_DIV_EXPR:
7960 case CEIL_DIV_EXPR:
7961 case ROUND_DIV_EXPR:
7962 case EXACT_DIV_EXPR:
8403445a
AM
7963 if (modifier == EXPAND_STACK_PARM)
7964 target = 0;
bbf6f052
RK
7965 /* Possible optimization: compute the dividend with EXPAND_SUM
7966 then if the divisor is constant can optimize the case
7967 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7968 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7969 subtarget, &op0, &op1, 0);
bbf6f052
RK
7970 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7971
7972 case RDIV_EXPR:
b7e9703c
JH
7973 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7974 expensive divide. If not, combine will rebuild the original
7975 computation. */
7976 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 7977 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
7978 && !real_onep (TREE_OPERAND (exp, 0)))
7979 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7980 build (RDIV_EXPR, type,
7981 build_real (type, dconst1),
7982 TREE_OPERAND (exp, 1))),
8e37cba8 7983 target, tmode, modifier);
ef89d648 7984 this_optab = sdiv_optab;
bbf6f052
RK
7985 goto binop;
7986
7987 case TRUNC_MOD_EXPR:
7988 case FLOOR_MOD_EXPR:
7989 case CEIL_MOD_EXPR:
7990 case ROUND_MOD_EXPR:
8403445a
AM
7991 if (modifier == EXPAND_STACK_PARM)
7992 target = 0;
eb698c58
RS
7993 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7994 subtarget, &op0, &op1, 0);
bbf6f052
RK
7995 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7996
7997 case FIX_ROUND_EXPR:
7998 case FIX_FLOOR_EXPR:
7999 case FIX_CEIL_EXPR:
8000 abort (); /* Not used for C. */
8001
8002 case FIX_TRUNC_EXPR:
906c4e36 8003 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8004 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8005 target = gen_reg_rtx (mode);
8006 expand_fix (target, op0, unsignedp);
8007 return target;
8008
8009 case FLOAT_EXPR:
906c4e36 8010 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8011 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8012 target = gen_reg_rtx (mode);
8013 /* expand_float can't figure out what to do if FROM has VOIDmode.
8014 So give it the correct mode. With -O, cse will optimize this. */
8015 if (GET_MODE (op0) == VOIDmode)
8016 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8017 op0);
8018 expand_float (target, op0,
8019 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8020 return target;
8021
8022 case NEGATE_EXPR:
5b22bee8 8023 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8024 if (modifier == EXPAND_STACK_PARM)
8025 target = 0;
91ce572a 8026 temp = expand_unop (mode,
0fb7aeda
KH
8027 ! unsignedp && flag_trapv
8028 && (GET_MODE_CLASS(mode) == MODE_INT)
8029 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
8030 if (temp == 0)
8031 abort ();
8032 return temp;
8033
8034 case ABS_EXPR:
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8036 if (modifier == EXPAND_STACK_PARM)
8037 target = 0;
bbf6f052 8038
11017cc7 8039 /* ABS_EXPR is not valid for complex arguments. */
d6a5ac33
RK
8040 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8041 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
11017cc7 8042 abort ();
2d7050fd 8043
bbf6f052
RK
8044 /* Unsigned abs is simply the operand. Testing here means we don't
8045 risk generating incorrect code below. */
8046 if (TREE_UNSIGNED (type))
8047 return op0;
8048
91ce572a 8049 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8050 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8051
8052 case MAX_EXPR:
8053 case MIN_EXPR:
8054 target = original_target;
8403445a
AM
8055 if (target == 0
8056 || modifier == EXPAND_STACK_PARM
fc155707 8057 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8058 || GET_MODE (target) != mode
bbf6f052
RK
8059 || (GET_CODE (target) == REG
8060 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8061 target = gen_reg_rtx (mode);
eb698c58
RS
8062 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8063 target, &op0, &op1, 0);
bbf6f052
RK
8064
8065 /* First try to do it with a special MIN or MAX instruction.
8066 If that does not win, use a conditional jump to select the proper
8067 value. */
8068 this_optab = (TREE_UNSIGNED (type)
8069 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8070 : (code == MIN_EXPR ? smin_optab : smax_optab));
8071
8072 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8073 OPTAB_WIDEN);
8074 if (temp != 0)
8075 return temp;
8076
fa2981d8
JW
8077 /* At this point, a MEM target is no longer useful; we will get better
8078 code without it. */
3a94c984 8079
fa2981d8
JW
8080 if (GET_CODE (target) == MEM)
8081 target = gen_reg_rtx (mode);
8082
e3be1116
RS
8083 /* If op1 was placed in target, swap op0 and op1. */
8084 if (target != op0 && target == op1)
8085 {
8086 rtx tem = op0;
8087 op0 = op1;
8088 op1 = tem;
8089 }
8090
ee456b1c
RK
8091 if (target != op0)
8092 emit_move_insn (target, op0);
d6a5ac33 8093
bbf6f052 8094 op0 = gen_label_rtx ();
d6a5ac33 8095
f81497d9
RS
8096 /* If this mode is an integer too wide to compare properly,
8097 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8098 if (GET_MODE_CLASS (mode) == MODE_INT
8099 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8100 {
f81497d9 8101 if (code == MAX_EXPR)
d6a5ac33
RK
8102 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8103 target, op1, NULL_RTX, op0);
bbf6f052 8104 else
d6a5ac33
RK
8105 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8106 op1, target, NULL_RTX, op0);
bbf6f052 8107 }
f81497d9
RS
8108 else
8109 {
b30f05db
BS
8110 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8111 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8112 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8113 op0);
f81497d9 8114 }
b30f05db 8115 emit_move_insn (target, op1);
bbf6f052
RK
8116 emit_label (op0);
8117 return target;
8118
bbf6f052
RK
8119 case BIT_NOT_EXPR:
8120 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8121 if (modifier == EXPAND_STACK_PARM)
8122 target = 0;
bbf6f052
RK
8123 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8124 if (temp == 0)
8125 abort ();
8126 return temp;
8127
d6a5ac33
RK
8128 /* ??? Can optimize bitwise operations with one arg constant.
8129 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8130 and (a bitwise1 b) bitwise2 b (etc)
8131 but that is probably not worth while. */
8132
8133 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8134 boolean values when we want in all cases to compute both of them. In
8135 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8136 as actual zero-or-1 values and then bitwise anding. In cases where
8137 there cannot be any side effects, better code would be made by
8138 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8139 how to recognize those cases. */
8140
bbf6f052
RK
8141 case TRUTH_AND_EXPR:
8142 case BIT_AND_EXPR:
8143 this_optab = and_optab;
8144 goto binop;
8145
bbf6f052
RK
8146 case TRUTH_OR_EXPR:
8147 case BIT_IOR_EXPR:
8148 this_optab = ior_optab;
8149 goto binop;
8150
874726a8 8151 case TRUTH_XOR_EXPR:
bbf6f052
RK
8152 case BIT_XOR_EXPR:
8153 this_optab = xor_optab;
8154 goto binop;
8155
8156 case LSHIFT_EXPR:
8157 case RSHIFT_EXPR:
8158 case LROTATE_EXPR:
8159 case RROTATE_EXPR:
e5e809f4 8160 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8161 subtarget = 0;
8403445a
AM
8162 if (modifier == EXPAND_STACK_PARM)
8163 target = 0;
bbf6f052
RK
8164 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8165 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8166 unsignedp);
8167
d6a5ac33
RK
8168 /* Could determine the answer when only additive constants differ. Also,
8169 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8170 case LT_EXPR:
8171 case LE_EXPR:
8172 case GT_EXPR:
8173 case GE_EXPR:
8174 case EQ_EXPR:
8175 case NE_EXPR:
1eb8759b
RH
8176 case UNORDERED_EXPR:
8177 case ORDERED_EXPR:
8178 case UNLT_EXPR:
8179 case UNLE_EXPR:
8180 case UNGT_EXPR:
8181 case UNGE_EXPR:
8182 case UNEQ_EXPR:
8403445a
AM
8183 temp = do_store_flag (exp,
8184 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8185 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8186 if (temp != 0)
8187 return temp;
d6a5ac33 8188
0f41302f 8189 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8190 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8191 && original_target
8192 && GET_CODE (original_target) == REG
8193 && (GET_MODE (original_target)
8194 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8195 {
d6a5ac33
RK
8196 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8197 VOIDmode, 0);
8198
c0a3eeac
UW
8199 /* If temp is constant, we can just compute the result. */
8200 if (GET_CODE (temp) == CONST_INT)
8201 {
8202 if (INTVAL (temp) != 0)
8203 emit_move_insn (target, const1_rtx);
8204 else
8205 emit_move_insn (target, const0_rtx);
8206
8207 return target;
8208 }
8209
bbf6f052 8210 if (temp != original_target)
c0a3eeac
UW
8211 {
8212 enum machine_mode mode1 = GET_MODE (temp);
8213 if (mode1 == VOIDmode)
8214 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8215
c0a3eeac
UW
8216 temp = copy_to_mode_reg (mode1, temp);
8217 }
d6a5ac33 8218
bbf6f052 8219 op1 = gen_label_rtx ();
c5d5d461 8220 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8221 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8222 emit_move_insn (temp, const1_rtx);
8223 emit_label (op1);
8224 return temp;
8225 }
d6a5ac33 8226
bbf6f052
RK
8227 /* If no set-flag instruction, must generate a conditional
8228 store into a temporary variable. Drop through
8229 and handle this like && and ||. */
8230
8231 case TRUTH_ANDIF_EXPR:
8232 case TRUTH_ORIF_EXPR:
e44842fe 8233 if (! ignore
8403445a
AM
8234 && (target == 0
8235 || modifier == EXPAND_STACK_PARM
8236 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8237 /* Make sure we don't have a hard reg (such as function's return
8238 value) live across basic blocks, if not optimizing. */
8239 || (!optimize && GET_CODE (target) == REG
8240 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8241 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8242
8243 if (target)
8244 emit_clr_insn (target);
8245
bbf6f052
RK
8246 op1 = gen_label_rtx ();
8247 jumpifnot (exp, op1);
e44842fe
RK
8248
8249 if (target)
8250 emit_0_to_1_insn (target);
8251
bbf6f052 8252 emit_label (op1);
e44842fe 8253 return ignore ? const0_rtx : target;
bbf6f052
RK
8254
8255 case TRUTH_NOT_EXPR:
8403445a
AM
8256 if (modifier == EXPAND_STACK_PARM)
8257 target = 0;
bbf6f052
RK
8258 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8259 /* The parser is careful to generate TRUTH_NOT_EXPR
8260 only with operands that are always zero or one. */
906c4e36 8261 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8262 target, 1, OPTAB_LIB_WIDEN);
8263 if (temp == 0)
8264 abort ();
8265 return temp;
8266
8267 case COMPOUND_EXPR:
8268 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8269 emit_queue ();
8270 return expand_expr (TREE_OPERAND (exp, 1),
8271 (ignore ? const0_rtx : target),
8403445a 8272 VOIDmode, modifier);
bbf6f052
RK
8273
8274 case COND_EXPR:
ac01eace
RK
8275 /* If we would have a "singleton" (see below) were it not for a
8276 conversion in each arm, bring that conversion back out. */
8277 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8278 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8279 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8280 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8281 {
d6edb99e
ZW
8282 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8283 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8284
8285 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8286 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8287 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8288 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8289 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8290 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8291 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8292 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8293 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8294 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8295 TREE_OPERAND (exp, 0),
d6edb99e 8296 iftrue, iffalse)),
ac01eace
RK
8297 target, tmode, modifier);
8298 }
8299
bbf6f052
RK
8300 {
8301 /* Note that COND_EXPRs whose type is a structure or union
8302 are required to be constructed to contain assignments of
8303 a temporary variable, so that we can evaluate them here
8304 for side effect only. If type is void, we must do likewise. */
8305
8306 /* If an arm of the branch requires a cleanup,
8307 only that cleanup is performed. */
8308
8309 tree singleton = 0;
8310 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8311
8312 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8313 convert it to our mode, if necessary. */
8314 if (integer_onep (TREE_OPERAND (exp, 1))
8315 && integer_zerop (TREE_OPERAND (exp, 2))
8316 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8317 {
dd27116b
RK
8318 if (ignore)
8319 {
8320 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8321 modifier);
dd27116b
RK
8322 return const0_rtx;
8323 }
8324
8403445a
AM
8325 if (modifier == EXPAND_STACK_PARM)
8326 target = 0;
37a08a29 8327 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8328 if (GET_MODE (op0) == mode)
8329 return op0;
d6a5ac33 8330
bbf6f052
RK
8331 if (target == 0)
8332 target = gen_reg_rtx (mode);
8333 convert_move (target, op0, unsignedp);
8334 return target;
8335 }
8336
ac01eace
RK
8337 /* Check for X ? A + B : A. If we have this, we can copy A to the
8338 output and conditionally add B. Similarly for unary operations.
8339 Don't do this if X has side-effects because those side effects
8340 might affect A or B and the "?" operation is a sequence point in
8341 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8342
8343 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8344 && operand_equal_p (TREE_OPERAND (exp, 2),
8345 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8346 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8347 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8348 && operand_equal_p (TREE_OPERAND (exp, 1),
8349 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8350 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8351 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8352 && operand_equal_p (TREE_OPERAND (exp, 2),
8353 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8354 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8355 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8356 && operand_equal_p (TREE_OPERAND (exp, 1),
8357 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8358 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8359
01c8a7c8
RK
8360 /* If we are not to produce a result, we have no target. Otherwise,
8361 if a target was specified use it; it will not be used as an
3a94c984 8362 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8363 temporary. */
8364
8365 if (ignore)
8366 temp = 0;
8403445a
AM
8367 else if (modifier == EXPAND_STACK_PARM)
8368 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8369 else if (original_target
e5e809f4 8370 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8371 || (singleton && GET_CODE (original_target) == REG
8372 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8373 && original_target == var_rtx (singleton)))
8374 && GET_MODE (original_target) == mode
7c00d1fe
RK
8375#ifdef HAVE_conditional_move
8376 && (! can_conditionally_move_p (mode)
8377 || GET_CODE (original_target) == REG
8378 || TREE_ADDRESSABLE (type))
8379#endif
8125d7e9
BS
8380 && (GET_CODE (original_target) != MEM
8381 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8382 temp = original_target;
8383 else if (TREE_ADDRESSABLE (type))
8384 abort ();
8385 else
8386 temp = assign_temp (type, 0, 0, 1);
8387
ac01eace
RK
8388 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8389 do the test of X as a store-flag operation, do this as
8390 A + ((X != 0) << log C). Similarly for other simple binary
8391 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8392 if (temp && singleton && binary_op
bbf6f052
RK
8393 && (TREE_CODE (binary_op) == PLUS_EXPR
8394 || TREE_CODE (binary_op) == MINUS_EXPR
8395 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8396 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8397 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8398 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8399 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8400 {
8401 rtx result;
61f6c84f 8402 tree cond;
91ce572a 8403 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8404 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8405 ? addv_optab : add_optab)
8406 : TREE_CODE (binary_op) == MINUS_EXPR
8407 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8408 ? subv_optab : sub_optab)
8409 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8410 : xor_optab);
bbf6f052 8411
61f6c84f 8412 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8413 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8414 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8415 else
8416 cond = TREE_OPERAND (exp, 0);
bbf6f052 8417
61f6c84f
JJ
8418 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8419 ? temp : NULL_RTX),
bbf6f052
RK
8420 mode, BRANCH_COST <= 1);
8421
ac01eace
RK
8422 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8423 result = expand_shift (LSHIFT_EXPR, mode, result,
8424 build_int_2 (tree_log2
8425 (TREE_OPERAND
8426 (binary_op, 1)),
8427 0),
e5e809f4 8428 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8429 ? temp : NULL_RTX), 0);
8430
bbf6f052
RK
8431 if (result)
8432 {
906c4e36 8433 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8434 return expand_binop (mode, boptab, op1, result, temp,
8435 unsignedp, OPTAB_LIB_WIDEN);
8436 }
bbf6f052 8437 }
3a94c984 8438
dabf8373 8439 do_pending_stack_adjust ();
bbf6f052
RK
8440 NO_DEFER_POP;
8441 op0 = gen_label_rtx ();
8442
8443 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8444 {
8445 if (temp != 0)
8446 {
8447 /* If the target conflicts with the other operand of the
8448 binary op, we can't use it. Also, we can't use the target
8449 if it is a hard register, because evaluating the condition
8450 might clobber it. */
8451 if ((binary_op
e5e809f4 8452 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8453 || (GET_CODE (temp) == REG
8454 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8455 temp = gen_reg_rtx (mode);
8403445a
AM
8456 store_expr (singleton, temp,
8457 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8458 }
8459 else
906c4e36 8460 expand_expr (singleton,
2937cf87 8461 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8462 if (singleton == TREE_OPERAND (exp, 1))
8463 jumpif (TREE_OPERAND (exp, 0), op0);
8464 else
8465 jumpifnot (TREE_OPERAND (exp, 0), op0);
8466
956d6950 8467 start_cleanup_deferral ();
bbf6f052
RK
8468 if (binary_op && temp == 0)
8469 /* Just touch the other operand. */
8470 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8471 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8472 else if (binary_op)
8473 store_expr (build (TREE_CODE (binary_op), type,
8474 make_tree (type, temp),
8475 TREE_OPERAND (binary_op, 1)),
8403445a 8476 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8477 else
8478 store_expr (build1 (TREE_CODE (unary_op), type,
8479 make_tree (type, temp)),
8403445a 8480 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8481 op1 = op0;
bbf6f052 8482 }
bbf6f052
RK
8483 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8484 comparison operator. If we have one of these cases, set the
8485 output to A, branch on A (cse will merge these two references),
8486 then set the output to FOO. */
8487 else if (temp
8488 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8489 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8490 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8491 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8492 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8493 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8494 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8495 {
3a94c984
KH
8496 if (GET_CODE (temp) == REG
8497 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8498 temp = gen_reg_rtx (mode);
8403445a
AM
8499 store_expr (TREE_OPERAND (exp, 1), temp,
8500 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8501 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8502
956d6950 8503 start_cleanup_deferral ();
c37b68d4
RS
8504 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8505 store_expr (TREE_OPERAND (exp, 2), temp,
8506 modifier == EXPAND_STACK_PARM ? 2 : 0);
8507 else
8508 expand_expr (TREE_OPERAND (exp, 2),
8509 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8510 op1 = op0;
8511 }
8512 else if (temp
8513 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8514 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8515 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8516 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8517 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8518 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8519 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8520 {
3a94c984
KH
8521 if (GET_CODE (temp) == REG
8522 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8523 temp = gen_reg_rtx (mode);
8403445a
AM
8524 store_expr (TREE_OPERAND (exp, 2), temp,
8525 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8526 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8527
956d6950 8528 start_cleanup_deferral ();
c37b68d4
RS
8529 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8530 store_expr (TREE_OPERAND (exp, 1), temp,
8531 modifier == EXPAND_STACK_PARM ? 2 : 0);
8532 else
8533 expand_expr (TREE_OPERAND (exp, 1),
8534 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8535 op1 = op0;
8536 }
8537 else
8538 {
8539 op1 = gen_label_rtx ();
8540 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8541
956d6950 8542 start_cleanup_deferral ();
3a94c984 8543
2ac84cfe 8544 /* One branch of the cond can be void, if it never returns. For
3a94c984 8545 example A ? throw : E */
2ac84cfe 8546 if (temp != 0
3a94c984 8547 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8548 store_expr (TREE_OPERAND (exp, 1), temp,
8549 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8550 else
906c4e36
RK
8551 expand_expr (TREE_OPERAND (exp, 1),
8552 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8553 end_cleanup_deferral ();
bbf6f052
RK
8554 emit_queue ();
8555 emit_jump_insn (gen_jump (op1));
8556 emit_barrier ();
8557 emit_label (op0);
956d6950 8558 start_cleanup_deferral ();
2ac84cfe 8559 if (temp != 0
3a94c984 8560 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8561 store_expr (TREE_OPERAND (exp, 2), temp,
8562 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8563 else
906c4e36
RK
8564 expand_expr (TREE_OPERAND (exp, 2),
8565 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8566 }
8567
956d6950 8568 end_cleanup_deferral ();
bbf6f052
RK
8569
8570 emit_queue ();
8571 emit_label (op1);
8572 OK_DEFER_POP;
5dab5552 8573
bbf6f052
RK
8574 return temp;
8575 }
8576
8577 case TARGET_EXPR:
8578 {
8579 /* Something needs to be initialized, but we didn't know
8580 where that thing was when building the tree. For example,
8581 it could be the return value of a function, or a parameter
8582 to a function which lays down in the stack, or a temporary
8583 variable which must be passed by reference.
8584
8585 We guarantee that the expression will either be constructed
8586 or copied into our original target. */
8587
8588 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8589 tree cleanups = NULL_TREE;
5c062816 8590 tree exp1;
bbf6f052
RK
8591
8592 if (TREE_CODE (slot) != VAR_DECL)
8593 abort ();
8594
9c51f375
RK
8595 if (! ignore)
8596 target = original_target;
8597
6fbfac92
JM
8598 /* Set this here so that if we get a target that refers to a
8599 register variable that's already been used, put_reg_into_stack
3a94c984 8600 knows that it should fix up those uses. */
6fbfac92
JM
8601 TREE_USED (slot) = 1;
8602
bbf6f052
RK
8603 if (target == 0)
8604 {
19e7881c 8605 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8606 {
8607 target = DECL_RTL (slot);
5c062816 8608 /* If we have already expanded the slot, so don't do
ac993f4f 8609 it again. (mrs) */
5c062816
MS
8610 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8611 return target;
ac993f4f 8612 }
bbf6f052
RK
8613 else
8614 {
e9a25f70 8615 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8616 /* All temp slots at this level must not conflict. */
8617 preserve_temp_slots (target);
19e7881c 8618 SET_DECL_RTL (slot, target);
e9a25f70 8619 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8620 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 8621
e287fd6e
RK
8622 /* Since SLOT is not known to the called function
8623 to belong to its stack frame, we must build an explicit
8624 cleanup. This case occurs when we must build up a reference
8625 to pass the reference as an argument. In this case,
8626 it is very likely that such a reference need not be
8627 built here. */
8628
8629 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
8630 TREE_OPERAND (exp, 2)
8631 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 8632 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8633 }
bbf6f052
RK
8634 }
8635 else
8636 {
8637 /* This case does occur, when expanding a parameter which
8638 needs to be constructed on the stack. The target
8639 is the actual stack address that we want to initialize.
8640 The function we call will perform the cleanup in this case. */
8641
8c042b47
RS
8642 /* If we have already assigned it space, use that space,
8643 not target that we were passed in, as our target
8644 parameter is only a hint. */
19e7881c 8645 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8646 {
8647 target = DECL_RTL (slot);
8648 /* If we have already expanded the slot, so don't do
8c042b47 8649 it again. (mrs) */
3a94c984
KH
8650 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8651 return target;
8c042b47 8652 }
21002281
JW
8653 else
8654 {
19e7881c 8655 SET_DECL_RTL (slot, target);
21002281
JW
8656 /* If we must have an addressable slot, then make sure that
8657 the RTL that we just stored in slot is OK. */
8658 if (TREE_ADDRESSABLE (slot))
f29a2bd1 8659 put_var_into_stack (slot, /*rescan=*/true);
21002281 8660 }
bbf6f052
RK
8661 }
8662
4847c938 8663 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8664 /* Mark it as expanded. */
8665 TREE_OPERAND (exp, 1) = NULL_TREE;
8666
8403445a 8667 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 8668
659e5a7a 8669 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 8670
41531e5b 8671 return target;
bbf6f052
RK
8672 }
8673
8674 case INIT_EXPR:
8675 {
8676 tree lhs = TREE_OPERAND (exp, 0);
8677 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8678
b90f141a 8679 temp = expand_assignment (lhs, rhs, ! ignore);
bbf6f052
RK
8680 return temp;
8681 }
8682
8683 case MODIFY_EXPR:
8684 {
8685 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8686 That's so we don't compute a pointer and save it over a
8687 call. If lhs is simple, compute it first so we can give it
8688 as a target if the rhs is just a call. This avoids an
8689 extra temp and copy and that prevents a partial-subsumption
8690 which makes bad code. Actually we could treat
8691 component_ref's of vars like vars. */
bbf6f052
RK
8692
8693 tree lhs = TREE_OPERAND (exp, 0);
8694 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8695
8696 temp = 0;
8697
bbf6f052
RK
8698 /* Check for |= or &= of a bitfield of size one into another bitfield
8699 of size 1. In this case, (unless we need the result of the
8700 assignment) we can do this more efficiently with a
8701 test followed by an assignment, if necessary.
8702
8703 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8704 things change so we do, this code should be enhanced to
8705 support it. */
8706 if (ignore
8707 && TREE_CODE (lhs) == COMPONENT_REF
8708 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8709 || TREE_CODE (rhs) == BIT_AND_EXPR)
8710 && TREE_OPERAND (rhs, 0) == lhs
8711 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8712 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8713 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8714 {
8715 rtx label = gen_label_rtx ();
8716
8717 do_jump (TREE_OPERAND (rhs, 1),
8718 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8719 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8720 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8721 (TREE_CODE (rhs) == BIT_IOR_EXPR
8722 ? integer_one_node
8723 : integer_zero_node)),
b90f141a 8724 0);
e7c33f54 8725 do_pending_stack_adjust ();
bbf6f052
RK
8726 emit_label (label);
8727 return const0_rtx;
8728 }
8729
b90f141a 8730 temp = expand_assignment (lhs, rhs, ! ignore);
0fb7aeda 8731
bbf6f052
RK
8732 return temp;
8733 }
8734
6e7f84a7
APB
8735 case RETURN_EXPR:
8736 if (!TREE_OPERAND (exp, 0))
8737 expand_null_return ();
8738 else
8739 expand_return (TREE_OPERAND (exp, 0));
8740 return const0_rtx;
8741
bbf6f052
RK
8742 case PREINCREMENT_EXPR:
8743 case PREDECREMENT_EXPR:
7b8b9722 8744 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8745
8746 case POSTINCREMENT_EXPR:
8747 case POSTDECREMENT_EXPR:
8748 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8749 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8750
8751 case ADDR_EXPR:
8403445a
AM
8752 if (modifier == EXPAND_STACK_PARM)
8753 target = 0;
bbf6f052
RK
8754 /* Are we taking the address of a nested function? */
8755 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8756 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8757 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8758 && ! TREE_STATIC (exp))
bbf6f052
RK
8759 {
8760 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8761 op0 = force_operand (op0, target);
8762 }
682ba3a6
RK
8763 /* If we are taking the address of something erroneous, just
8764 return a zero. */
8765 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8766 return const0_rtx;
d6b6783b
RK
8767 /* If we are taking the address of a constant and are at the
8768 top level, we have to use output_constant_def since we can't
8769 call force_const_mem at top level. */
8770 else if (cfun == 0
8771 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8772 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8773 == 'c')))
8774 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8775 else
8776 {
e287fd6e
RK
8777 /* We make sure to pass const0_rtx down if we came in with
8778 ignore set, to avoid doing the cleanups twice for something. */
8779 op0 = expand_expr (TREE_OPERAND (exp, 0),
8780 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8781 (modifier == EXPAND_INITIALIZER
8782 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8783
119af78a
RK
8784 /* If we are going to ignore the result, OP0 will have been set
8785 to const0_rtx, so just return it. Don't get confused and
8786 think we are taking the address of the constant. */
8787 if (ignore)
8788 return op0;
8789
73b7f58c
BS
8790 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8791 clever and returns a REG when given a MEM. */
8792 op0 = protect_from_queue (op0, 1);
3539e816 8793
c5c76735
JL
8794 /* We would like the object in memory. If it is a constant, we can
8795 have it be statically allocated into memory. For a non-constant,
8796 we need to allocate some memory and store the value into it. */
896102d0
RK
8797
8798 if (CONSTANT_P (op0))
8799 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8800 op0);
682ba3a6 8801 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 8802 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 8803 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 8804 {
6c7d86ec
RK
8805 /* If the operand is a SAVE_EXPR, we can deal with this by
8806 forcing the SAVE_EXPR into memory. */
8807 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8808 {
f29a2bd1
MM
8809 put_var_into_stack (TREE_OPERAND (exp, 0),
8810 /*rescan=*/true);
6c7d86ec
RK
8811 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8812 }
df6018fd 8813 else
6c7d86ec
RK
8814 {
8815 /* If this object is in a register, it can't be BLKmode. */
8816 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8817 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
8818
8819 if (GET_CODE (op0) == PARALLEL)
8820 /* Handle calls that pass values in multiple
8821 non-contiguous locations. The Irix 6 ABI has examples
8822 of this. */
6e985040 8823 emit_group_store (memloc, op0, inner_type,
6c7d86ec
RK
8824 int_size_in_bytes (inner_type));
8825 else
8826 emit_move_insn (memloc, op0);
0fb7aeda 8827
6c7d86ec
RK
8828 op0 = memloc;
8829 }
896102d0
RK
8830 }
8831
bbf6f052
RK
8832 if (GET_CODE (op0) != MEM)
8833 abort ();
3a94c984 8834
34e81b5a 8835 mark_temp_addr_taken (op0);
bbf6f052 8836 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8837 {
34e81b5a 8838 op0 = XEXP (op0, 0);
5ae6cd0d 8839 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
34e81b5a 8840 op0 = convert_memory_address (ptr_mode, op0);
34e81b5a 8841 return op0;
88f63c77 8842 }
987c71d9 8843
c952ff4b
RK
8844 /* If OP0 is not aligned as least as much as the type requires, we
8845 need to make a temporary, copy OP0 to it, and take the address of
8846 the temporary. We want to use the alignment of the type, not of
8847 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8848 the test for BLKmode means that can't happen. The test for
8849 BLKmode is because we never make mis-aligned MEMs with
8850 non-BLKmode.
8851
8852 We don't need to do this at all if the machine doesn't have
8853 strict alignment. */
8854 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8855 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
8856 > MEM_ALIGN (op0))
8857 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
8858 {
8859 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 8860 rtx new;
a06ef755 8861
c3d32120
RK
8862 if (TYPE_ALIGN_OK (inner_type))
8863 abort ();
8864
bdaa131b
JM
8865 if (TREE_ADDRESSABLE (inner_type))
8866 {
8867 /* We can't make a bitwise copy of this object, so fail. */
8868 error ("cannot take the address of an unaligned member");
8869 return const0_rtx;
8870 }
8871
8872 new = assign_stack_temp_for_type
8873 (TYPE_MODE (inner_type),
8874 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8875 : int_size_in_bytes (inner_type),
8876 1, build_qualified_type (inner_type,
8877 (TYPE_QUALS (inner_type)
8878 | TYPE_QUAL_CONST)));
8879
44bb111a 8880 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
8881 (modifier == EXPAND_STACK_PARM
8882 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 8883
a06ef755
RK
8884 op0 = new;
8885 }
8886
bbf6f052
RK
8887 op0 = force_operand (XEXP (op0, 0), target);
8888 }
987c71d9 8889
05c8e58b
HPN
8890 if (flag_force_addr
8891 && GET_CODE (op0) != REG
8892 && modifier != EXPAND_CONST_ADDRESS
8893 && modifier != EXPAND_INITIALIZER
8894 && modifier != EXPAND_SUM)
987c71d9
RK
8895 op0 = force_reg (Pmode, op0);
8896
dc6d66b3
RK
8897 if (GET_CODE (op0) == REG
8898 && ! REG_USERVAR_P (op0))
bdb429a5 8899 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 8900
5ae6cd0d 8901 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9fcfcce7 8902 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8903
bbf6f052
RK
8904 return op0;
8905
8906 case ENTRY_VALUE_EXPR:
8907 abort ();
8908
7308a047
RS
8909 /* COMPLEX type for Extended Pascal & Fortran */
8910 case COMPLEX_EXPR:
8911 {
8912 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8913 rtx insns;
7308a047
RS
8914
8915 /* Get the rtx code of the operands. */
8916 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8917 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8918
8919 if (! target)
8920 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8921
6551fa4d 8922 start_sequence ();
7308a047
RS
8923
8924 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8925 emit_move_insn (gen_realpart (mode, target), op0);
8926 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8927
6551fa4d
JW
8928 insns = get_insns ();
8929 end_sequence ();
8930
7308a047 8931 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8932 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8933 each with a separate pseudo as destination.
8934 It's not correct for flow to treat them as a unit. */
6d6e61ce 8935 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8936 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8937 else
2f937369 8938 emit_insn (insns);
7308a047
RS
8939
8940 return target;
8941 }
8942
8943 case REALPART_EXPR:
2d7050fd
RS
8944 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8945 return gen_realpart (mode, op0);
3a94c984 8946
7308a047 8947 case IMAGPART_EXPR:
2d7050fd
RS
8948 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8949 return gen_imagpart (mode, op0);
7308a047
RS
8950
8951 case CONJ_EXPR:
8952 {
62acb978 8953 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8954 rtx imag_t;
6551fa4d 8955 rtx insns;
3a94c984
KH
8956
8957 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8958
8959 if (! target)
d6a5ac33 8960 target = gen_reg_rtx (mode);
3a94c984 8961
6551fa4d 8962 start_sequence ();
7308a047
RS
8963
8964 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8965 emit_move_insn (gen_realpart (partmode, target),
8966 gen_realpart (partmode, op0));
7308a047 8967
62acb978 8968 imag_t = gen_imagpart (partmode, target);
91ce572a 8969 temp = expand_unop (partmode,
0fb7aeda
KH
8970 ! unsignedp && flag_trapv
8971 && (GET_MODE_CLASS(partmode) == MODE_INT)
8972 ? negv_optab : neg_optab,
3a94c984 8973 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8974 if (temp != imag_t)
8975 emit_move_insn (imag_t, temp);
8976
6551fa4d
JW
8977 insns = get_insns ();
8978 end_sequence ();
8979
3a94c984 8980 /* Conjugate should appear as a single unit
d6a5ac33 8981 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8982 each with a separate pseudo as destination.
8983 It's not correct for flow to treat them as a unit. */
6d6e61ce 8984 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8985 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8986 else
2f937369 8987 emit_insn (insns);
7308a047
RS
8988
8989 return target;
8990 }
8991
e976b8b2
MS
8992 case TRY_CATCH_EXPR:
8993 {
8994 tree handler = TREE_OPERAND (exp, 1);
8995
8996 expand_eh_region_start ();
8997
8998 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8999
52a11cbf 9000 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
9001
9002 return op0;
9003 }
9004
b335b813
PB
9005 case TRY_FINALLY_EXPR:
9006 {
9007 tree try_block = TREE_OPERAND (exp, 0);
9008 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 9009
8ad8135a 9010 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
9011 {
9012 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9013 is not sufficient, so we cannot expand the block twice.
9014 So we play games with GOTO_SUBROUTINE_EXPR to let us
9015 expand the thing only once. */
8ad8135a
RH
9016 /* When not optimizing, we go ahead with this form since
9017 (1) user breakpoints operate more predictably without
9018 code duplication, and
9019 (2) we're not running any of the global optimizers
9020 that would explode in time/space with the highly
9021 connected CFG created by the indirect branching. */
8943a0b4
RH
9022
9023 rtx finally_label = gen_label_rtx ();
9024 rtx done_label = gen_label_rtx ();
9025 rtx return_link = gen_reg_rtx (Pmode);
9026 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9027 (tree) finally_label, (tree) return_link);
9028 TREE_SIDE_EFFECTS (cleanup) = 1;
9029
9030 /* Start a new binding layer that will keep track of all cleanup
9031 actions to be performed. */
9032 expand_start_bindings (2);
9033 target_temp_slot_level = temp_slot_level;
9034
9035 expand_decl_cleanup (NULL_TREE, cleanup);
9036 op0 = expand_expr (try_block, target, tmode, modifier);
9037
9038 preserve_temp_slots (op0);
9039 expand_end_bindings (NULL_TREE, 0, 0);
9040 emit_jump (done_label);
9041 emit_label (finally_label);
9042 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9043 emit_indirect_jump (return_link);
9044 emit_label (done_label);
9045 }
9046 else
9047 {
9048 expand_start_bindings (2);
9049 target_temp_slot_level = temp_slot_level;
b335b813 9050
8943a0b4
RH
9051 expand_decl_cleanup (NULL_TREE, finally_block);
9052 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9053
8943a0b4
RH
9054 preserve_temp_slots (op0);
9055 expand_end_bindings (NULL_TREE, 0, 0);
9056 }
b335b813 9057
b335b813
PB
9058 return op0;
9059 }
9060
3a94c984 9061 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9062 {
9063 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9064 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9065 rtx return_address = gen_label_rtx ();
3a94c984
KH
9066 emit_move_insn (return_link,
9067 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9068 emit_jump (subr);
9069 emit_label (return_address);
9070 return const0_rtx;
9071 }
9072
d3707adb
RH
9073 case VA_ARG_EXPR:
9074 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9075
52a11cbf 9076 case EXC_PTR_EXPR:
86c99549 9077 return get_exception_pointer (cfun);
52a11cbf 9078
67231816
RH
9079 case FDESC_EXPR:
9080 /* Function descriptors are not valid except for as
9081 initialization constants, and should not be expanded. */
9082 abort ();
9083
bbf6f052 9084 default:
c9d892a8 9085 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
9086 }
9087
9088 /* Here to do an ordinary binary operator, generating an instruction
9089 from the optab already placed in `this_optab'. */
9090 binop:
eb698c58
RS
9091 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9092 subtarget, &op0, &op1, 0);
bbf6f052 9093 binop2:
8403445a
AM
9094 if (modifier == EXPAND_STACK_PARM)
9095 target = 0;
bbf6f052
RK
9096 temp = expand_binop (mode, this_optab, op0, op1, target,
9097 unsignedp, OPTAB_LIB_WIDEN);
9098 if (temp == 0)
9099 abort ();
9100 return temp;
9101}
b93a436e 9102\f
1ce7f3c2
RK
9103/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9104 when applied to the address of EXP produces an address known to be
9105 aligned more than BIGGEST_ALIGNMENT. */
9106
9107static int
502b8322 9108is_aligning_offset (tree offset, tree exp)
1ce7f3c2
RK
9109{
9110 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9111 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9112 || TREE_CODE (offset) == NOP_EXPR
9113 || TREE_CODE (offset) == CONVERT_EXPR
9114 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9115 offset = TREE_OPERAND (offset, 0);
9116
9117 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9118 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9119 if (TREE_CODE (offset) != BIT_AND_EXPR
9120 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9121 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9122 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9123 return 0;
9124
9125 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9126 It must be NEGATE_EXPR. Then strip any more conversions. */
9127 offset = TREE_OPERAND (offset, 0);
9128 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9129 || TREE_CODE (offset) == NOP_EXPR
9130 || TREE_CODE (offset) == CONVERT_EXPR)
9131 offset = TREE_OPERAND (offset, 0);
9132
9133 if (TREE_CODE (offset) != NEGATE_EXPR)
9134 return 0;
9135
9136 offset = TREE_OPERAND (offset, 0);
9137 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9138 || TREE_CODE (offset) == NOP_EXPR
9139 || TREE_CODE (offset) == CONVERT_EXPR)
9140 offset = TREE_OPERAND (offset, 0);
9141
9142 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9143 whose type is the same as EXP. */
9144 return (TREE_CODE (offset) == ADDR_EXPR
9145 && (TREE_OPERAND (offset, 0) == exp
9146 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9147 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9148 == TREE_TYPE (exp)))));
9149}
9150\f
e0a2f705 9151/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9152 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9153 in bytes within the string that ARG is accessing. The type of the
9154 offset will be `sizetype'. */
b93a436e 9155
28f4ec01 9156tree
502b8322 9157string_constant (tree arg, tree *ptr_offset)
b93a436e
JL
9158{
9159 STRIP_NOPS (arg);
9160
9161 if (TREE_CODE (arg) == ADDR_EXPR
9162 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9163 {
fed3cef0 9164 *ptr_offset = size_zero_node;
b93a436e
JL
9165 return TREE_OPERAND (arg, 0);
9166 }
9167 else if (TREE_CODE (arg) == PLUS_EXPR)
9168 {
9169 tree arg0 = TREE_OPERAND (arg, 0);
9170 tree arg1 = TREE_OPERAND (arg, 1);
9171
9172 STRIP_NOPS (arg0);
9173 STRIP_NOPS (arg1);
9174
9175 if (TREE_CODE (arg0) == ADDR_EXPR
9176 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9177 {
fed3cef0 9178 *ptr_offset = convert (sizetype, arg1);
b93a436e 9179 return TREE_OPERAND (arg0, 0);
bbf6f052 9180 }
b93a436e
JL
9181 else if (TREE_CODE (arg1) == ADDR_EXPR
9182 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9183 {
fed3cef0 9184 *ptr_offset = convert (sizetype, arg0);
b93a436e 9185 return TREE_OPERAND (arg1, 0);
bbf6f052 9186 }
b93a436e 9187 }
ca695ac9 9188
b93a436e
JL
9189 return 0;
9190}
ca695ac9 9191\f
b93a436e
JL
9192/* Expand code for a post- or pre- increment or decrement
9193 and return the RTX for the result.
9194 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9195
b93a436e 9196static rtx
502b8322 9197expand_increment (tree exp, int post, int ignore)
ca695ac9 9198{
b3694847
SS
9199 rtx op0, op1;
9200 rtx temp, value;
9201 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9202 optab this_optab = add_optab;
9203 int icode;
9204 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9205 int op0_is_copy = 0;
9206 int single_insn = 0;
9207 /* 1 means we can't store into OP0 directly,
9208 because it is a subreg narrower than a word,
9209 and we don't dare clobber the rest of the word. */
9210 int bad_subreg = 0;
1499e0a8 9211
b93a436e
JL
9212 /* Stabilize any component ref that might need to be
9213 evaluated more than once below. */
9214 if (!post
9215 || TREE_CODE (incremented) == BIT_FIELD_REF
9216 || (TREE_CODE (incremented) == COMPONENT_REF
9217 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9218 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9219 incremented = stabilize_reference (incremented);
9220 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9221 ones into save exprs so that they don't accidentally get evaluated
9222 more than once by the code below. */
9223 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9224 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9225 incremented = save_expr (incremented);
e9a25f70 9226
b93a436e
JL
9227 /* Compute the operands as RTX.
9228 Note whether OP0 is the actual lvalue or a copy of it:
9229 I believe it is a copy iff it is a register or subreg
6d2f8887 9230 and insns were generated in computing it. */
e9a25f70 9231
b93a436e 9232 temp = get_last_insn ();
37a08a29 9233 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9234
b93a436e
JL
9235 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9236 in place but instead must do sign- or zero-extension during assignment,
9237 so we copy it into a new register and let the code below use it as
9238 a copy.
e9a25f70 9239
b93a436e
JL
9240 Note that we can safely modify this SUBREG since it is know not to be
9241 shared (it was made by the expand_expr call above). */
9242
9243 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9244 {
9245 if (post)
9246 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9247 else
9248 bad_subreg = 1;
9249 }
9250 else if (GET_CODE (op0) == SUBREG
9251 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9252 {
9253 /* We cannot increment this SUBREG in place. If we are
9254 post-incrementing, get a copy of the old value. Otherwise,
9255 just mark that we cannot increment in place. */
9256 if (post)
9257 op0 = copy_to_reg (op0);
9258 else
9259 bad_subreg = 1;
e9a25f70
JL
9260 }
9261
b93a436e
JL
9262 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9263 && temp != get_last_insn ());
37a08a29 9264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9265
b93a436e
JL
9266 /* Decide whether incrementing or decrementing. */
9267 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9268 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9269 this_optab = sub_optab;
9270
9271 /* Convert decrement by a constant into a negative increment. */
9272 if (this_optab == sub_optab
9273 && GET_CODE (op1) == CONST_INT)
ca695ac9 9274 {
3a94c984 9275 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9276 this_optab = add_optab;
ca695ac9 9277 }
1499e0a8 9278
91ce572a 9279 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9280 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9281
b93a436e
JL
9282 /* For a preincrement, see if we can do this with a single instruction. */
9283 if (!post)
9284 {
9285 icode = (int) this_optab->handlers[(int) mode].insn_code;
9286 if (icode != (int) CODE_FOR_nothing
9287 /* Make sure that OP0 is valid for operands 0 and 1
9288 of the insn we want to queue. */
a995e389
RH
9289 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9290 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9291 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9292 single_insn = 1;
9293 }
bbf6f052 9294
b93a436e
JL
9295 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9296 then we cannot just increment OP0. We must therefore contrive to
9297 increment the original value. Then, for postincrement, we can return
9298 OP0 since it is a copy of the old value. For preincrement, expand here
9299 unless we can do it with a single insn.
bbf6f052 9300
b93a436e
JL
9301 Likewise if storing directly into OP0 would clobber high bits
9302 we need to preserve (bad_subreg). */
9303 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9304 {
b93a436e
JL
9305 /* This is the easiest way to increment the value wherever it is.
9306 Problems with multiple evaluation of INCREMENTED are prevented
9307 because either (1) it is a component_ref or preincrement,
9308 in which case it was stabilized above, or (2) it is an array_ref
9309 with constant index in an array in a register, which is
9310 safe to reevaluate. */
9311 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9312 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9313 ? MINUS_EXPR : PLUS_EXPR),
9314 TREE_TYPE (exp),
9315 incremented,
9316 TREE_OPERAND (exp, 1));
a358cee0 9317
b93a436e
JL
9318 while (TREE_CODE (incremented) == NOP_EXPR
9319 || TREE_CODE (incremented) == CONVERT_EXPR)
9320 {
9321 newexp = convert (TREE_TYPE (incremented), newexp);
9322 incremented = TREE_OPERAND (incremented, 0);
9323 }
bbf6f052 9324
b90f141a 9325 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
b93a436e
JL
9326 return post ? op0 : temp;
9327 }
bbf6f052 9328
b93a436e
JL
9329 if (post)
9330 {
9331 /* We have a true reference to the value in OP0.
9332 If there is an insn to add or subtract in this mode, queue it.
9333 Queueing the increment insn avoids the register shuffling
9334 that often results if we must increment now and first save
9335 the old value for subsequent use. */
bbf6f052 9336
b93a436e
JL
9337#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9338 op0 = stabilize (op0);
9339#endif
41dfd40c 9340
b93a436e
JL
9341 icode = (int) this_optab->handlers[(int) mode].insn_code;
9342 if (icode != (int) CODE_FOR_nothing
9343 /* Make sure that OP0 is valid for operands 0 and 1
9344 of the insn we want to queue. */
a995e389
RH
9345 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9346 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9347 {
a995e389 9348 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9349 op1 = force_reg (mode, op1);
bbf6f052 9350
b93a436e
JL
9351 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9352 }
9353 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9354 {
9355 rtx addr = (general_operand (XEXP (op0, 0), mode)
9356 ? force_reg (Pmode, XEXP (op0, 0))
9357 : copy_to_reg (XEXP (op0, 0)));
9358 rtx temp, result;
ca695ac9 9359
792760b9 9360 op0 = replace_equiv_address (op0, addr);
b93a436e 9361 temp = force_reg (GET_MODE (op0), op0);
a995e389 9362 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9363 op1 = force_reg (mode, op1);
ca695ac9 9364
b93a436e
JL
9365 /* The increment queue is LIFO, thus we have to `queue'
9366 the instructions in reverse order. */
9367 enqueue_insn (op0, gen_move_insn (op0, temp));
9368 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9369 return result;
bbf6f052
RK
9370 }
9371 }
ca695ac9 9372
b93a436e
JL
9373 /* Preincrement, or we can't increment with one simple insn. */
9374 if (post)
9375 /* Save a copy of the value before inc or dec, to return it later. */
9376 temp = value = copy_to_reg (op0);
9377 else
9378 /* Arrange to return the incremented value. */
9379 /* Copy the rtx because expand_binop will protect from the queue,
9380 and the results of that would be invalid for us to return
9381 if our caller does emit_queue before using our result. */
9382 temp = copy_rtx (value = op0);
bbf6f052 9383
b93a436e 9384 /* Increment however we can. */
37a08a29 9385 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9386 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9387
b93a436e
JL
9388 /* Make sure the value is stored into OP0. */
9389 if (op1 != op0)
9390 emit_move_insn (op0, op1);
5718612f 9391
b93a436e
JL
9392 return temp;
9393}
9394\f
b93a436e
JL
9395/* Generate code to calculate EXP using a store-flag instruction
9396 and return an rtx for the result. EXP is either a comparison
9397 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9398
b93a436e 9399 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9400
cc2902df 9401 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9402 cheap.
ca695ac9 9403
b93a436e
JL
9404 Return zero if there is no suitable set-flag instruction
9405 available on this machine.
ca695ac9 9406
b93a436e
JL
9407 Once expand_expr has been called on the arguments of the comparison,
9408 we are committed to doing the store flag, since it is not safe to
9409 re-evaluate the expression. We emit the store-flag insn by calling
9410 emit_store_flag, but only expand the arguments if we have a reason
9411 to believe that emit_store_flag will be successful. If we think that
9412 it will, but it isn't, we have to simulate the store-flag with a
9413 set/jump/set sequence. */
ca695ac9 9414
b93a436e 9415static rtx
502b8322 9416do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
9417{
9418 enum rtx_code code;
9419 tree arg0, arg1, type;
9420 tree tem;
9421 enum machine_mode operand_mode;
9422 int invert = 0;
9423 int unsignedp;
9424 rtx op0, op1;
9425 enum insn_code icode;
9426 rtx subtarget = target;
381127e8 9427 rtx result, label;
ca695ac9 9428
b93a436e
JL
9429 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9430 result at the end. We can't simply invert the test since it would
9431 have already been inverted if it were valid. This case occurs for
9432 some floating-point comparisons. */
ca695ac9 9433
b93a436e
JL
9434 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9435 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9436
b93a436e
JL
9437 arg0 = TREE_OPERAND (exp, 0);
9438 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9439
9440 /* Don't crash if the comparison was erroneous. */
9441 if (arg0 == error_mark_node || arg1 == error_mark_node)
9442 return const0_rtx;
9443
b93a436e
JL
9444 type = TREE_TYPE (arg0);
9445 operand_mode = TYPE_MODE (type);
9446 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9447
b93a436e
JL
9448 /* We won't bother with BLKmode store-flag operations because it would mean
9449 passing a lot of information to emit_store_flag. */
9450 if (operand_mode == BLKmode)
9451 return 0;
ca695ac9 9452
b93a436e
JL
9453 /* We won't bother with store-flag operations involving function pointers
9454 when function pointers must be canonicalized before comparisons. */
9455#ifdef HAVE_canonicalize_funcptr_for_compare
9456 if (HAVE_canonicalize_funcptr_for_compare
9457 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9458 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9459 == FUNCTION_TYPE))
9460 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9461 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9462 == FUNCTION_TYPE))))
9463 return 0;
ca695ac9
JB
9464#endif
9465
b93a436e
JL
9466 STRIP_NOPS (arg0);
9467 STRIP_NOPS (arg1);
ca695ac9 9468
b93a436e
JL
9469 /* Get the rtx comparison code to use. We know that EXP is a comparison
9470 operation of some type. Some comparisons against 1 and -1 can be
9471 converted to comparisons with zero. Do so here so that the tests
9472 below will be aware that we have a comparison with zero. These
9473 tests will not catch constants in the first operand, but constants
9474 are rarely passed as the first operand. */
ca695ac9 9475
b93a436e
JL
9476 switch (TREE_CODE (exp))
9477 {
9478 case EQ_EXPR:
9479 code = EQ;
bbf6f052 9480 break;
b93a436e
JL
9481 case NE_EXPR:
9482 code = NE;
bbf6f052 9483 break;
b93a436e
JL
9484 case LT_EXPR:
9485 if (integer_onep (arg1))
9486 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9487 else
9488 code = unsignedp ? LTU : LT;
ca695ac9 9489 break;
b93a436e
JL
9490 case LE_EXPR:
9491 if (! unsignedp && integer_all_onesp (arg1))
9492 arg1 = integer_zero_node, code = LT;
9493 else
9494 code = unsignedp ? LEU : LE;
ca695ac9 9495 break;
b93a436e
JL
9496 case GT_EXPR:
9497 if (! unsignedp && integer_all_onesp (arg1))
9498 arg1 = integer_zero_node, code = GE;
9499 else
9500 code = unsignedp ? GTU : GT;
9501 break;
9502 case GE_EXPR:
9503 if (integer_onep (arg1))
9504 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9505 else
9506 code = unsignedp ? GEU : GE;
ca695ac9 9507 break;
1eb8759b
RH
9508
9509 case UNORDERED_EXPR:
9510 code = UNORDERED;
9511 break;
9512 case ORDERED_EXPR:
9513 code = ORDERED;
9514 break;
9515 case UNLT_EXPR:
9516 code = UNLT;
9517 break;
9518 case UNLE_EXPR:
9519 code = UNLE;
9520 break;
9521 case UNGT_EXPR:
9522 code = UNGT;
9523 break;
9524 case UNGE_EXPR:
9525 code = UNGE;
9526 break;
9527 case UNEQ_EXPR:
9528 code = UNEQ;
9529 break;
1eb8759b 9530
ca695ac9 9531 default:
b93a436e 9532 abort ();
bbf6f052 9533 }
bbf6f052 9534
b93a436e
JL
9535 /* Put a constant second. */
9536 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9537 {
9538 tem = arg0; arg0 = arg1; arg1 = tem;
9539 code = swap_condition (code);
ca695ac9 9540 }
bbf6f052 9541
b93a436e
JL
9542 /* If this is an equality or inequality test of a single bit, we can
9543 do this by shifting the bit being tested to the low-order bit and
9544 masking the result with the constant 1. If the condition was EQ,
9545 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
9546 than an scc insn even if we have it.
9547
9548 The code to make this transformation was moved into fold_single_bit_test,
9549 so we just call into the folder and expand its result. */
d39985fa 9550
b93a436e
JL
9551 if ((code == NE || code == EQ)
9552 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9553 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae
JL
9554 {
9555 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9556 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 9557 arg0, arg1, type),
60cd4dae
JL
9558 target, VOIDmode, EXPAND_NORMAL);
9559 }
bbf6f052 9560
b93a436e 9561 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9562 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 9563 return 0;
1eb8759b 9564
b93a436e
JL
9565 icode = setcc_gen_code[(int) code];
9566 if (icode == CODE_FOR_nothing
a995e389 9567 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 9568 {
b93a436e
JL
9569 /* We can only do this if it is one of the special cases that
9570 can be handled without an scc insn. */
9571 if ((code == LT && integer_zerop (arg1))
9572 || (! only_cheap && code == GE && integer_zerop (arg1)))
9573 ;
9574 else if (BRANCH_COST >= 0
9575 && ! only_cheap && (code == NE || code == EQ)
9576 && TREE_CODE (type) != REAL_TYPE
9577 && ((abs_optab->handlers[(int) operand_mode].insn_code
9578 != CODE_FOR_nothing)
9579 || (ffs_optab->handlers[(int) operand_mode].insn_code
9580 != CODE_FOR_nothing)))
9581 ;
9582 else
9583 return 0;
ca695ac9 9584 }
3a94c984 9585
296b4ed9 9586 if (! get_subtarget (target)
e3be1116 9587 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
9588 subtarget = 0;
9589
eb698c58 9590 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
9591
9592 if (target == 0)
9593 target = gen_reg_rtx (mode);
9594
9595 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9596 because, if the emit_store_flag does anything it will succeed and
9597 OP0 and OP1 will not be used subsequently. */
ca695ac9 9598
b93a436e
JL
9599 result = emit_store_flag (target, code,
9600 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9601 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9602 operand_mode, unsignedp, 1);
ca695ac9 9603
b93a436e
JL
9604 if (result)
9605 {
9606 if (invert)
9607 result = expand_binop (mode, xor_optab, result, const1_rtx,
9608 result, 0, OPTAB_LIB_WIDEN);
9609 return result;
ca695ac9 9610 }
bbf6f052 9611
b93a436e
JL
9612 /* If this failed, we have to do this with set/compare/jump/set code. */
9613 if (GET_CODE (target) != REG
9614 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9615 target = gen_reg_rtx (GET_MODE (target));
9616
9617 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9618 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 9619 operand_mode, NULL_RTX);
b93a436e
JL
9620 if (GET_CODE (result) == CONST_INT)
9621 return (((result == const0_rtx && ! invert)
9622 || (result != const0_rtx && invert))
9623 ? const0_rtx : const1_rtx);
ca695ac9 9624
8f08e8c0
JL
9625 /* The code of RESULT may not match CODE if compare_from_rtx
9626 decided to swap its operands and reverse the original code.
9627
9628 We know that compare_from_rtx returns either a CONST_INT or
9629 a new comparison code, so it is safe to just extract the
9630 code from RESULT. */
9631 code = GET_CODE (result);
9632
b93a436e
JL
9633 label = gen_label_rtx ();
9634 if (bcc_gen_fctn[(int) code] == 0)
9635 abort ();
0f41302f 9636
b93a436e
JL
9637 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9638 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9639 emit_label (label);
bbf6f052 9640
b93a436e 9641 return target;
ca695ac9 9642}
b93a436e 9643\f
b93a436e 9644
ad82abb8
ZW
9645/* Stubs in case we haven't got a casesi insn. */
9646#ifndef HAVE_casesi
9647# define HAVE_casesi 0
9648# define gen_casesi(a, b, c, d, e) (0)
9649# define CODE_FOR_casesi CODE_FOR_nothing
9650#endif
9651
9652/* If the machine does not have a case insn that compares the bounds,
9653 this means extra overhead for dispatch tables, which raises the
9654 threshold for using them. */
9655#ifndef CASE_VALUES_THRESHOLD
9656#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9657#endif /* CASE_VALUES_THRESHOLD */
9658
9659unsigned int
502b8322 9660case_values_threshold (void)
ad82abb8
ZW
9661{
9662 return CASE_VALUES_THRESHOLD;
9663}
9664
9665/* Attempt to generate a casesi instruction. Returns 1 if successful,
9666 0 otherwise (i.e. if there is no casesi instruction). */
9667int
502b8322
AJ
9668try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9669 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
9670{
9671 enum machine_mode index_mode = SImode;
9672 int index_bits = GET_MODE_BITSIZE (index_mode);
9673 rtx op1, op2, index;
9674 enum machine_mode op_mode;
9675
9676 if (! HAVE_casesi)
9677 return 0;
9678
9679 /* Convert the index to SImode. */
9680 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9681 {
9682 enum machine_mode omode = TYPE_MODE (index_type);
9683 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9684
9685 /* We must handle the endpoints in the original mode. */
9686 index_expr = build (MINUS_EXPR, index_type,
9687 index_expr, minval);
9688 minval = integer_zero_node;
9689 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9690 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 9691 omode, 1, default_label);
ad82abb8
ZW
9692 /* Now we can safely truncate. */
9693 index = convert_to_mode (index_mode, index, 0);
9694 }
9695 else
9696 {
9697 if (TYPE_MODE (index_type) != index_mode)
9698 {
b0c48229
NB
9699 index_expr = convert ((*lang_hooks.types.type_for_size)
9700 (index_bits, 0), index_expr);
ad82abb8
ZW
9701 index_type = TREE_TYPE (index_expr);
9702 }
9703
9704 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9705 }
9706 emit_queue ();
9707 index = protect_from_queue (index, 0);
9708 do_pending_stack_adjust ();
9709
9710 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9711 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9712 (index, op_mode))
9713 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 9714
ad82abb8
ZW
9715 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9716
9717 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9718 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9719 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9720 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9721 (op1, op_mode))
9722 op1 = copy_to_mode_reg (op_mode, op1);
9723
9724 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9725
9726 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9727 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9728 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9729 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9730 (op2, op_mode))
9731 op2 = copy_to_mode_reg (op_mode, op2);
9732
9733 emit_jump_insn (gen_casesi (index, op1, op2,
9734 table_label, default_label));
9735 return 1;
9736}
9737
9738/* Attempt to generate a tablejump instruction; same concept. */
9739#ifndef HAVE_tablejump
9740#define HAVE_tablejump 0
9741#define gen_tablejump(x, y) (0)
9742#endif
9743
9744/* Subroutine of the next function.
9745
9746 INDEX is the value being switched on, with the lowest value
b93a436e
JL
9747 in the table already subtracted.
9748 MODE is its expected mode (needed if INDEX is constant).
9749 RANGE is the length of the jump table.
9750 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9751
b93a436e
JL
9752 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9753 index value is out of range. */
0f41302f 9754
ad82abb8 9755static void
502b8322
AJ
9756do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9757 rtx default_label)
ca695ac9 9758{
b3694847 9759 rtx temp, vector;
88d3b7f0 9760
74f6d071
JH
9761 if (INTVAL (range) > cfun->max_jumptable_ents)
9762 cfun->max_jumptable_ents = INTVAL (range);
1877be45 9763
b93a436e
JL
9764 /* Do an unsigned comparison (in the proper mode) between the index
9765 expression and the value which represents the length of the range.
9766 Since we just finished subtracting the lower bound of the range
9767 from the index expression, this comparison allows us to simultaneously
9768 check that the original index expression value is both greater than
9769 or equal to the minimum value of the range and less than or equal to
9770 the maximum value of the range. */
709f5be1 9771
c5d5d461 9772 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 9773 default_label);
bbf6f052 9774
b93a436e
JL
9775 /* If index is in range, it must fit in Pmode.
9776 Convert to Pmode so we can index with it. */
9777 if (mode != Pmode)
9778 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9779
b93a436e
JL
9780 /* Don't let a MEM slip thru, because then INDEX that comes
9781 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9782 and break_out_memory_refs will go to work on it and mess it up. */
9783#ifdef PIC_CASE_VECTOR_ADDRESS
9784 if (flag_pic && GET_CODE (index) != REG)
9785 index = copy_to_mode_reg (Pmode, index);
9786#endif
ca695ac9 9787
b93a436e
JL
9788 /* If flag_force_addr were to affect this address
9789 it could interfere with the tricky assumptions made
9790 about addresses that contain label-refs,
9791 which may be valid only very near the tablejump itself. */
9792 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9793 GET_MODE_SIZE, because this indicates how large insns are. The other
9794 uses should all be Pmode, because they are addresses. This code
9795 could fail if addresses and insns are not the same size. */
9796 index = gen_rtx_PLUS (Pmode,
9797 gen_rtx_MULT (Pmode, index,
9798 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9799 gen_rtx_LABEL_REF (Pmode, table_label));
9800#ifdef PIC_CASE_VECTOR_ADDRESS
9801 if (flag_pic)
9802 index = PIC_CASE_VECTOR_ADDRESS (index);
9803 else
bbf6f052 9804#endif
b93a436e
JL
9805 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9806 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9807 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9808 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 9809 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
9810 convert_move (temp, vector, 0);
9811
9812 emit_jump_insn (gen_tablejump (temp, table_label));
9813
9814 /* If we are generating PIC code or if the table is PC-relative, the
9815 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9816 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9817 emit_barrier ();
bbf6f052 9818}
b93a436e 9819
ad82abb8 9820int
502b8322
AJ
9821try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9822 rtx table_label, rtx default_label)
ad82abb8
ZW
9823{
9824 rtx index;
9825
9826 if (! HAVE_tablejump)
9827 return 0;
9828
9829 index_expr = fold (build (MINUS_EXPR, index_type,
9830 convert (index_type, index_expr),
9831 convert (index_type, minval)));
9832 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9833 emit_queue ();
9834 index = protect_from_queue (index, 0);
9835 do_pending_stack_adjust ();
9836
9837 do_tablejump (index, TYPE_MODE (index_type),
9838 convert_modes (TYPE_MODE (index_type),
9839 TYPE_MODE (TREE_TYPE (range)),
9840 expand_expr (range, NULL_RTX,
9841 VOIDmode, 0),
9842 TREE_UNSIGNED (TREE_TYPE (range))),
9843 table_label, default_label);
9844 return 1;
9845}
e2500fed 9846
cb2a532e
AH
9847/* Nonzero if the mode is a valid vector mode for this architecture.
9848 This returns nonzero even if there is no hardware support for the
9849 vector mode, but we can emulate with narrower modes. */
9850
9851int
502b8322 9852vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9853{
9854 enum mode_class class = GET_MODE_CLASS (mode);
9855 enum machine_mode innermode;
9856
9857 /* Doh! What's going on? */
9858 if (class != MODE_VECTOR_INT
9859 && class != MODE_VECTOR_FLOAT)
9860 return 0;
9861
9862 /* Hardware support. Woo hoo! */
9863 if (VECTOR_MODE_SUPPORTED_P (mode))
9864 return 1;
9865
9866 innermode = GET_MODE_INNER (mode);
9867
9868 /* We should probably return 1 if requesting V4DI and we have no DI,
9869 but we have V2DI, but this is probably very unlikely. */
9870
9871 /* If we have support for the inner mode, we can safely emulate it.
9872 We may not have V2DI, but me can emulate with a pair of DIs. */
9873 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9874}
9875
d744e06e
AH
9876/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9877static rtx
502b8322 9878const_vector_from_tree (tree exp)
d744e06e
AH
9879{
9880 rtvec v;
9881 int units, i;
9882 tree link, elt;
9883 enum machine_mode inner, mode;
9884
9885 mode = TYPE_MODE (TREE_TYPE (exp));
9886
9887 if (is_zeros_p (exp))
9888 return CONST0_RTX (mode);
9889
9890 units = GET_MODE_NUNITS (mode);
9891 inner = GET_MODE_INNER (mode);
9892
9893 v = rtvec_alloc (units);
9894
9895 link = TREE_VECTOR_CST_ELTS (exp);
9896 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9897 {
9898 elt = TREE_VALUE (link);
9899
9900 if (TREE_CODE (elt) == REAL_CST)
9901 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9902 inner);
9903 else
9904 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9905 TREE_INT_CST_HIGH (elt),
9906 inner);
9907 }
9908
5f6c070d
AH
9909 /* Initialize remaining elements to 0. */
9910 for (; i < units; ++i)
9911 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9912
d744e06e
AH
9913 return gen_rtx_raw_CONST_VECTOR (mode, v);
9914}
9915
e2500fed 9916#include "gt-expr.h"