]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
dbxout.c: Follow spelling conventions.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
1574ef13 3 2000, 2001, 2002 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
11ad4784 25#include "real.h"
bbf6f052
RK
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052 32#include "function.h"
bbf6f052 33#include "insn-config.h"
34e81b5a 34#include "insn-attr.h"
3a94c984 35/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 36#include "expr.h"
e78d8e51
ZW
37#include "optabs.h"
38#include "libfuncs.h"
bbf6f052 39#include "recog.h"
3ef1eef4 40#include "reload.h"
bbf6f052 41#include "output.h"
bbf6f052 42#include "typeclass.h"
10f0ad3d 43#include "toplev.h"
d7db6646 44#include "ggc.h"
ac79cd5a 45#include "langhooks.h"
e2c49ac2 46#include "intl.h"
b1474bb7 47#include "tm_p.h"
bbf6f052 48
bbf6f052 49/* Decide whether a function's arguments should be processed
bbc8a071
RK
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
bbf6f052 54
bbf6f052 55#ifdef PUSH_ROUNDING
bbc8a071 56
3319a347 57#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 58#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 59#endif
bbc8a071 60
bbf6f052
RK
61#endif
62
63#ifndef STACK_PUSH_CODE
64#ifdef STACK_GROWS_DOWNWARD
65#define STACK_PUSH_CODE PRE_DEC
66#else
67#define STACK_PUSH_CODE PRE_INC
68#endif
69#endif
70
18543a22
ILT
71/* Assume that case vectors are not pc-relative. */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
4ca79136
RH
76/* Convert defined/undefined to boolean. */
77#ifdef TARGET_MEM_FUNCTIONS
78#undef TARGET_MEM_FUNCTIONS
79#define TARGET_MEM_FUNCTIONS 1
80#else
81#define TARGET_MEM_FUNCTIONS 0
82#endif
83
84
bbf6f052
RK
85/* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91int cse_not_expected;
92
14a774a9
RK
93/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94static tree placeholder_list = 0;
95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
4969d05d
RK
98struct move_by_pieces
99{
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
3bdf5ad1
RK
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
4969d05d
RK
110 int reverse;
111};
112
57814e5e 113/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
114 be performed. */
115
57814e5e 116struct store_by_pieces
9de08200
RK
117{
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
3bdf5ad1
RK
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
57814e5e
JJ
124 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
125 PTR constfundata;
9de08200
RK
126 int reverse;
127};
128
711d877c 129static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
130static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT,
132 unsigned int));
711d877c
KG
133static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *));
44bb111a 135static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
4ca79136
RH
136static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
137static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
138static tree emit_block_move_libcall_fn PARAMS ((int));
44bb111a 139static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
57814e5e
JJ
140static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
3bdf5ad1
RK
142static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
57814e5e
JJ
144static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 147 enum machine_mode,
57814e5e 148 struct store_by_pieces *));
4ca79136
RH
149static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
150static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
151static tree clear_storage_libcall_fn PARAMS ((int));
51286de6 152static rtx compress_float_constant PARAMS ((rtx, rtx));
296b4ed9 153static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
154static int is_zeros_p PARAMS ((tree));
155static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
156static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
157 HOST_WIDE_INT, enum machine_mode,
04050c69
RK
158 tree, tree, int, int));
159static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
770ae6cc
RK
160static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
a06ef755
RK
162 tree, enum machine_mode, int, tree,
163 int));
711d877c 164static rtx var_rtx PARAMS ((tree));
0d4903b8 165static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
818c0c94 166static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
1ce7f3c2 167static int is_aligning_offset PARAMS ((tree, tree));
711d877c 168static rtx expand_increment PARAMS ((tree, int, int));
711d877c
KG
169static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
171static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
172 rtx, rtx));
711d877c 173static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
21d93687 174#ifdef PUSH_ROUNDING
566aa174 175static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
21d93687 176#endif
ad82abb8 177static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
bbf6f052 178
4fa52007
RK
179/* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
182
183static char direct_load[NUM_MACHINE_MODES];
184static char direct_store[NUM_MACHINE_MODES];
185
51286de6
RH
186/* Record for each mode whether we can float-extend from memory. */
187
188static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
189
7e24ffc9
HPN
190/* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
192
193#ifndef MOVE_RATIO
266007a7 194#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
195#define MOVE_RATIO 2
196#else
3a94c984 197/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 198#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
199#endif
200#endif
e87b4f3f 201
fbe1758d 202/* This macro is used to determine whether move_by_pieces should be called
3a94c984 203 to perform a structure copy. */
fbe1758d 204#ifndef MOVE_BY_PIECES_P
19caa751 205#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
207#endif
208
78762e3b
RS
209/* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
211
212#ifndef CLEAR_RATIO
213#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214#define CLEAR_RATIO 2
215#else
216/* If we are optimizing for space, cut down the default clear ratio. */
217#define CLEAR_RATIO (optimize_size ? 3 : 15)
218#endif
219#endif
220
221/* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223#ifndef CLEAR_BY_PIECES_P
224#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226#endif
227
266007a7 228/* This array records the insn_code of insns to perform block moves. */
e6677db3 229enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 230
9de08200
RK
231/* This array records the insn_code of insns to perform block clears. */
232enum insn_code clrstr_optab[NUM_MACHINE_MODES];
233
cc2902df 234/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
235
236#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 237#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 238#endif
bbf6f052 239\f
4fa52007 240/* This is run once per compilation to set up which modes can be used
266007a7 241 directly in memory and to initialize the block move optab. */
4fa52007
RK
242
243void
244init_expr_once ()
245{
246 rtx insn, pat;
247 enum machine_mode mode;
cff48d8f 248 int num_clobbers;
9ec36da5 249 rtx mem, mem1;
bf1660a6 250 rtx reg;
9ec36da5 251
e2549997
RS
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
9ec36da5
JL
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 257
bf1660a6
JL
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
261
1f8c3c5b
RH
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
4fa52007
RK
265
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
268 {
269 int regno;
4fa52007
RK
270
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
e2549997 273 PUT_MODE (mem1, mode);
bf1660a6 274 PUT_MODE (reg, mode);
4fa52007 275
e6fe56a4
RK
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
278
7308a047
RS
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
283 {
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
e6fe56a4 286
bf1660a6 287 REGNO (reg) = regno;
e6fe56a4 288
7308a047
RS
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
e6fe56a4 293
e2549997
RS
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
298
7308a047
RS
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
e2549997
RS
303
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
7308a047 308 }
4fa52007
RK
309 }
310
51286de6
RH
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
315 {
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 318 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
319 {
320 enum insn_code ic;
321
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
325
326 PUT_MODE (mem, srcmode);
0fb7aeda 327
51286de6
RH
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
330 }
331 }
4fa52007 332}
cff48d8f 333
bbf6f052
RK
334/* This is run at the start of compiling a function. */
335
336void
337init_expr ()
338{
e2500fed 339 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
bbf6f052 340
49ad7cfa 341 pending_chain = 0;
bbf6f052 342 pending_stack_adjust = 0;
1503a7ec 343 stack_pointer_delta = 0;
bbf6f052 344 inhibit_defer_pop = 0;
bbf6f052 345 saveregs_value = 0;
0006469d 346 apply_args_value = 0;
e87b4f3f 347 forced_labels = 0;
bbf6f052
RK
348}
349
49ad7cfa 350/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 351
bbf6f052 352void
49ad7cfa 353finish_expr_for_function ()
bbf6f052 354{
49ad7cfa
BS
355 if (pending_chain)
356 abort ();
bbf6f052
RK
357}
358\f
359/* Manage the queue of increment instructions to be output
360 for POSTINCREMENT_EXPR expressions, etc. */
361
bbf6f052
RK
362/* Queue up to increment (or change) VAR later. BODY says how:
363 BODY should be the same thing you would pass to emit_insn
364 to increment right away. It will go to emit_insn later on.
365
366 The value is a QUEUED expression to be used in place of VAR
367 where you want to guarantee the pre-incrementation value of VAR. */
368
369static rtx
370enqueue_insn (var, body)
371 rtx var, body;
372{
c5c76735
JL
373 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
374 body, pending_chain);
bbf6f052
RK
375 return pending_chain;
376}
377
378/* Use protect_from_queue to convert a QUEUED expression
379 into something that you can put immediately into an instruction.
380 If the queued incrementation has not happened yet,
381 protect_from_queue returns the variable itself.
382 If the incrementation has happened, protect_from_queue returns a temp
383 that contains a copy of the old value of the variable.
384
385 Any time an rtx which might possibly be a QUEUED is to be put
386 into an instruction, it must be passed through protect_from_queue first.
387 QUEUED expressions are not meaningful in instructions.
388
389 Do not pass a value through protect_from_queue and then hold
390 on to it for a while before putting it in an instruction!
391 If the queue is flushed in between, incorrect code will result. */
392
393rtx
394protect_from_queue (x, modify)
b3694847 395 rtx x;
bbf6f052
RK
396 int modify;
397{
b3694847 398 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
399
400#if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
403 return x;
404#endif
405
406 if (code != QUEUED)
407 {
e9baa644
RK
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
412 shared. */
bbf6f052
RK
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
415 {
f1ec5147
RK
416 rtx y = XEXP (x, 0);
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 418
bbf6f052
RK
419 if (QUEUED_INSN (y))
420 {
f1ec5147
RK
421 rtx temp = gen_reg_rtx (GET_MODE (x));
422
e9baa644 423 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
424 QUEUED_INSN (y));
425 return temp;
426 }
f1ec5147 427
73b7f58c
BS
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
f1ec5147 430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 431 }
f1ec5147 432
bbf6f052
RK
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
435 if (code == MEM)
3f15938e
RS
436 {
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
439 {
440 x = copy_rtx (x);
441 XEXP (x, 0) = tem;
442 }
443 }
bbf6f052
RK
444 else if (code == PLUS || code == MULT)
445 {
3f15938e
RS
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
449 {
450 x = copy_rtx (x);
451 XEXP (x, 0) = new0;
452 XEXP (x, 1) = new1;
453 }
bbf6f052
RK
454 }
455 return x;
456 }
73b7f58c
BS
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
459 emit_queue. */
bbf6f052 460 if (QUEUED_INSN (x) == 0)
73b7f58c 461 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
462 /* If the increment has happened and a pre-increment copy exists,
463 use that copy. */
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
470 QUEUED_INSN (x));
471 return QUEUED_COPY (x);
472}
473
474/* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
478
1f06ee8d 479int
bbf6f052
RK
480queued_subexp_p (x)
481 rtx x;
482{
b3694847 483 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
484 switch (code)
485 {
486 case QUEUED:
487 return 1;
488 case MEM:
489 return queued_subexp_p (XEXP (x, 0));
490 case MULT:
491 case PLUS:
492 case MINUS:
e9a25f70
JL
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
495 default:
496 return 0;
bbf6f052 497 }
bbf6f052
RK
498}
499
500/* Perform all the pending incrementations. */
501
502void
503emit_queue ()
504{
b3694847 505 rtx p;
381127e8 506 while ((p = pending_chain))
bbf6f052 507 {
41b083c4
R
508 rtx body = QUEUED_BODY (p);
509
2f937369
DM
510 switch (GET_CODE (body))
511 {
512 case INSN:
513 case JUMP_INSN:
514 case CALL_INSN:
515 case CODE_LABEL:
516 case BARRIER:
517 case NOTE:
518 QUEUED_INSN (p) = body;
519 emit_insn (body);
520 break;
521
522#ifdef ENABLE_CHECKING
523 case SEQUENCE:
524 abort ();
525 break;
526#endif
527
528 default:
529 QUEUED_INSN (p) = emit_insn (body);
530 break;
41b083c4 531 }
2f937369 532
bbf6f052
RK
533 pending_chain = QUEUED_NEXT (p);
534 }
535}
bbf6f052
RK
536\f
537/* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
541
542void
543convert_move (to, from, unsignedp)
b3694847 544 rtx to, from;
bbf6f052
RK
545 int unsignedp;
546{
547 enum machine_mode to_mode = GET_MODE (to);
548 enum machine_mode from_mode = GET_MODE (from);
549 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
550 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
551 enum insn_code code;
552 rtx libcall;
553
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
556
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
559
560 if (to_real != from_real)
561 abort ();
562
1499e0a8
RK
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
566
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
575
bbf6f052
RK
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 {
579 emit_move_insn (to, from);
580 return;
581 }
582
0b4565c9
BS
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 {
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 abort ();
3a94c984 587
0b4565c9 588 if (VECTOR_MODE_P (to_mode))
bafe341a 589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 590 else
bafe341a 591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
592
593 emit_move_insn (to, from);
594 return;
595 }
596
597 if (to_real != from_real)
598 abort ();
599
bbf6f052
RK
600 if (to_real)
601 {
642dfa8b 602 rtx value, insns;
81d79e2c 603
2b01c326 604 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 605 {
2b01c326
RK
606 /* Try converting directly if the insn is supported. */
607 if ((code = can_extend_p (to_mode, from_mode, 0))
608 != CODE_FOR_nothing)
609 {
610 emit_unop_insn (code, to, from, UNKNOWN);
611 return;
612 }
bbf6f052 613 }
3a94c984 614
b424402e
RS
615#ifdef HAVE_trunchfqf2
616 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
619 return;
620 }
621#endif
704af6a1
JL
622#ifdef HAVE_trunctqfqf2
623 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
626 return;
627 }
628#endif
b424402e
RS
629#ifdef HAVE_truncsfqf2
630 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
633 return;
634 }
635#endif
636#ifdef HAVE_truncdfqf2
637 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
643#ifdef HAVE_truncxfqf2
644 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
650#ifdef HAVE_trunctfqf2
651 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
03747aa3
RK
657
658#ifdef HAVE_trunctqfhf2
659 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
660 {
661 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
662 return;
663 }
664#endif
b424402e
RS
665#ifdef HAVE_truncsfhf2
666 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
669 return;
670 }
671#endif
672#ifdef HAVE_truncdfhf2
673 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_truncxfhf2
680 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686#ifdef HAVE_trunctfhf2
687 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
688 {
689 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
2b01c326
RK
693
694#ifdef HAVE_truncsftqf2
695 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
698 return;
699 }
700#endif
701#ifdef HAVE_truncdftqf2
702 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
703 {
704 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
705 return;
706 }
707#endif
708#ifdef HAVE_truncxftqf2
709 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
710 {
711 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
712 return;
713 }
714#endif
715#ifdef HAVE_trunctftqf2
716 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
717 {
718 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
719 return;
720 }
721#endif
722
bbf6f052
RK
723#ifdef HAVE_truncdfsf2
724 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
725 {
726 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
727 return;
728 }
729#endif
b092b471
JW
730#ifdef HAVE_truncxfsf2
731 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
732 {
733 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
734 return;
735 }
736#endif
bbf6f052
RK
737#ifdef HAVE_trunctfsf2
738 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
739 {
740 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
741 return;
742 }
743#endif
b092b471
JW
744#ifdef HAVE_truncxfdf2
745 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
746 {
747 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
748 return;
749 }
750#endif
bbf6f052
RK
751#ifdef HAVE_trunctfdf2
752 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
753 {
754 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
755 return;
756 }
757#endif
758
b092b471
JW
759 libcall = (rtx) 0;
760 switch (from_mode)
761 {
762 case SFmode:
763 switch (to_mode)
764 {
765 case DFmode:
766 libcall = extendsfdf2_libfunc;
767 break;
768
769 case XFmode:
770 libcall = extendsfxf2_libfunc;
771 break;
772
773 case TFmode:
774 libcall = extendsftf2_libfunc;
775 break;
3a94c984 776
e9a25f70
JL
777 default:
778 break;
b092b471
JW
779 }
780 break;
781
782 case DFmode:
783 switch (to_mode)
784 {
785 case SFmode:
786 libcall = truncdfsf2_libfunc;
787 break;
788
789 case XFmode:
790 libcall = extenddfxf2_libfunc;
791 break;
792
793 case TFmode:
794 libcall = extenddftf2_libfunc;
795 break;
3a94c984 796
e9a25f70
JL
797 default:
798 break;
b092b471
JW
799 }
800 break;
801
802 case XFmode:
803 switch (to_mode)
804 {
805 case SFmode:
806 libcall = truncxfsf2_libfunc;
807 break;
808
809 case DFmode:
810 libcall = truncxfdf2_libfunc;
811 break;
3a94c984 812
e9a25f70
JL
813 default:
814 break;
b092b471
JW
815 }
816 break;
817
818 case TFmode:
819 switch (to_mode)
820 {
821 case SFmode:
822 libcall = trunctfsf2_libfunc;
823 break;
824
825 case DFmode:
826 libcall = trunctfdf2_libfunc;
827 break;
3a94c984 828
e9a25f70
JL
829 default:
830 break;
b092b471
JW
831 }
832 break;
3a94c984 833
e9a25f70
JL
834 default:
835 break;
b092b471
JW
836 }
837
838 if (libcall == (rtx) 0)
839 /* This conversion is not implemented yet. */
bbf6f052
RK
840 abort ();
841
642dfa8b 842 start_sequence ();
ebb1b59a 843 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 844 1, from, from_mode);
642dfa8b
BS
845 insns = get_insns ();
846 end_sequence ();
847 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
848 from));
bbf6f052
RK
849 return;
850 }
851
852 /* Now both modes are integers. */
853
854 /* Handle expanding beyond a word. */
855 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
856 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
857 {
858 rtx insns;
859 rtx lowpart;
860 rtx fill_value;
861 rtx lowfrom;
862 int i;
863 enum machine_mode lowpart_mode;
864 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
865
866 /* Try converting directly if the insn is supported. */
867 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
868 != CODE_FOR_nothing)
869 {
cd1b4b44
RK
870 /* If FROM is a SUBREG, put it into a register. Do this
871 so that we always generate the same set of insns for
872 better cse'ing; if an intermediate assignment occurred,
873 we won't be doing the operation directly on the SUBREG. */
874 if (optimize > 0 && GET_CODE (from) == SUBREG)
875 from = force_reg (from_mode, from);
bbf6f052
RK
876 emit_unop_insn (code, to, from, equiv_code);
877 return;
878 }
879 /* Next, try converting via full word. */
880 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
881 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
882 != CODE_FOR_nothing))
883 {
a81fee56 884 if (GET_CODE (to) == REG)
38a448ca 885 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
886 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
887 emit_unop_insn (code, to,
888 gen_lowpart (word_mode, to), equiv_code);
889 return;
890 }
891
892 /* No special multiword conversion insn; do it by hand. */
893 start_sequence ();
894
5c5033c3
RK
895 /* Since we will turn this into a no conflict block, we must ensure
896 that the source does not overlap the target. */
897
898 if (reg_overlap_mentioned_p (to, from))
899 from = force_reg (from_mode, from);
900
bbf6f052
RK
901 /* Get a copy of FROM widened to a word, if necessary. */
902 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
903 lowpart_mode = word_mode;
904 else
905 lowpart_mode = from_mode;
906
907 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
908
909 lowpart = gen_lowpart (lowpart_mode, to);
910 emit_move_insn (lowpart, lowfrom);
911
912 /* Compute the value to put in each remaining word. */
913 if (unsignedp)
914 fill_value = const0_rtx;
915 else
916 {
917#ifdef HAVE_slt
918 if (HAVE_slt
a995e389 919 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
920 && STORE_FLAG_VALUE == -1)
921 {
906c4e36 922 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 923 lowpart_mode, 0);
bbf6f052
RK
924 fill_value = gen_reg_rtx (word_mode);
925 emit_insn (gen_slt (fill_value));
926 }
927 else
928#endif
929 {
930 fill_value
931 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
932 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 933 NULL_RTX, 0);
bbf6f052
RK
934 fill_value = convert_to_mode (word_mode, fill_value, 1);
935 }
936 }
937
938 /* Fill the remaining words. */
939 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
940 {
941 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
942 rtx subword = operand_subword (to, index, 1, to_mode);
943
944 if (subword == 0)
945 abort ();
946
947 if (fill_value != subword)
948 emit_move_insn (subword, fill_value);
949 }
950
951 insns = get_insns ();
952 end_sequence ();
953
906c4e36 954 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 955 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
956 return;
957 }
958
d3c64ee3
RS
959 /* Truncating multi-word to a word or less. */
960 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
961 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 962 {
431a6eca
JW
963 if (!((GET_CODE (from) == MEM
964 && ! MEM_VOLATILE_P (from)
965 && direct_load[(int) to_mode]
966 && ! mode_dependent_address_p (XEXP (from, 0)))
967 || GET_CODE (from) == REG
968 || GET_CODE (from) == SUBREG))
969 from = force_reg (from_mode, from);
bbf6f052
RK
970 convert_move (to, gen_lowpart (word_mode, from), 0);
971 return;
972 }
973
3a94c984 974 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
975 if (to_mode == PQImode)
976 {
977 if (from_mode != QImode)
978 from = convert_to_mode (QImode, from, unsignedp);
979
980#ifdef HAVE_truncqipqi2
981 if (HAVE_truncqipqi2)
982 {
983 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
984 return;
985 }
986#endif /* HAVE_truncqipqi2 */
987 abort ();
988 }
989
990 if (from_mode == PQImode)
991 {
992 if (to_mode != QImode)
993 {
994 from = convert_to_mode (QImode, from, unsignedp);
995 from_mode = QImode;
996 }
997 else
998 {
999#ifdef HAVE_extendpqiqi2
1000 if (HAVE_extendpqiqi2)
1001 {
1002 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1003 return;
1004 }
1005#endif /* HAVE_extendpqiqi2 */
1006 abort ();
1007 }
1008 }
1009
bbf6f052
RK
1010 if (to_mode == PSImode)
1011 {
1012 if (from_mode != SImode)
1013 from = convert_to_mode (SImode, from, unsignedp);
1014
1f584163
DE
1015#ifdef HAVE_truncsipsi2
1016 if (HAVE_truncsipsi2)
bbf6f052 1017 {
1f584163 1018 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1019 return;
1020 }
1f584163 1021#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1022 abort ();
1023 }
1024
1025 if (from_mode == PSImode)
1026 {
1027 if (to_mode != SImode)
1028 {
1029 from = convert_to_mode (SImode, from, unsignedp);
1030 from_mode = SImode;
1031 }
1032 else
1033 {
1f584163 1034#ifdef HAVE_extendpsisi2
43d75418 1035 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 1036 {
1f584163 1037 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1038 return;
1039 }
1f584163 1040#endif /* HAVE_extendpsisi2 */
43d75418
R
1041#ifdef HAVE_zero_extendpsisi2
1042 if (unsignedp && HAVE_zero_extendpsisi2)
1043 {
1044 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1045 return;
1046 }
1047#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1048 abort ();
1049 }
1050 }
1051
0407367d
RK
1052 if (to_mode == PDImode)
1053 {
1054 if (from_mode != DImode)
1055 from = convert_to_mode (DImode, from, unsignedp);
1056
1057#ifdef HAVE_truncdipdi2
1058 if (HAVE_truncdipdi2)
1059 {
1060 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1061 return;
1062 }
1063#endif /* HAVE_truncdipdi2 */
1064 abort ();
1065 }
1066
1067 if (from_mode == PDImode)
1068 {
1069 if (to_mode != DImode)
1070 {
1071 from = convert_to_mode (DImode, from, unsignedp);
1072 from_mode = DImode;
1073 }
1074 else
1075 {
1076#ifdef HAVE_extendpdidi2
1077 if (HAVE_extendpdidi2)
1078 {
1079 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1080 return;
1081 }
1082#endif /* HAVE_extendpdidi2 */
1083 abort ();
1084 }
1085 }
1086
bbf6f052
RK
1087 /* Now follow all the conversions between integers
1088 no more than a word long. */
1089
1090 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1091 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1092 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1093 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1094 {
d3c64ee3
RS
1095 if (!((GET_CODE (from) == MEM
1096 && ! MEM_VOLATILE_P (from)
1097 && direct_load[(int) to_mode]
1098 && ! mode_dependent_address_p (XEXP (from, 0)))
1099 || GET_CODE (from) == REG
1100 || GET_CODE (from) == SUBREG))
1101 from = force_reg (from_mode, from);
34aa3599
RK
1102 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1103 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1104 from = copy_to_reg (from);
bbf6f052
RK
1105 emit_move_insn (to, gen_lowpart (to_mode, from));
1106 return;
1107 }
1108
d3c64ee3 1109 /* Handle extension. */
bbf6f052
RK
1110 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1111 {
1112 /* Convert directly if that works. */
1113 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1114 != CODE_FOR_nothing)
1115 {
9413de45
RK
1116 if (flag_force_mem)
1117 from = force_not_mem (from);
1118
bbf6f052
RK
1119 emit_unop_insn (code, to, from, equiv_code);
1120 return;
1121 }
1122 else
1123 {
1124 enum machine_mode intermediate;
2b28d92e
NC
1125 rtx tmp;
1126 tree shift_amount;
bbf6f052
RK
1127
1128 /* Search for a mode to convert via. */
1129 for (intermediate = from_mode; intermediate != VOIDmode;
1130 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1131 if (((can_extend_p (to_mode, intermediate, unsignedp)
1132 != CODE_FOR_nothing)
1133 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1134 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1135 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1136 && (can_extend_p (intermediate, from_mode, unsignedp)
1137 != CODE_FOR_nothing))
1138 {
1139 convert_move (to, convert_to_mode (intermediate, from,
1140 unsignedp), unsignedp);
1141 return;
1142 }
1143
2b28d92e 1144 /* No suitable intermediate mode.
3a94c984 1145 Generate what we need with shifts. */
2b28d92e
NC
1146 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1147 - GET_MODE_BITSIZE (from_mode), 0);
1148 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1149 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1150 to, unsignedp);
3a94c984 1151 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1152 to, unsignedp);
1153 if (tmp != to)
1154 emit_move_insn (to, tmp);
1155 return;
bbf6f052
RK
1156 }
1157 }
1158
3a94c984 1159 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1160
1161 if (from_mode == DImode && to_mode == SImode)
1162 {
1163#ifdef HAVE_truncdisi2
1164 if (HAVE_truncdisi2)
1165 {
1166 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1167 return;
1168 }
1169#endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
1174 if (from_mode == DImode && to_mode == HImode)
1175 {
1176#ifdef HAVE_truncdihi2
1177 if (HAVE_truncdihi2)
1178 {
1179 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1180 return;
1181 }
1182#endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == DImode && to_mode == QImode)
1188 {
1189#ifdef HAVE_truncdiqi2
1190 if (HAVE_truncdiqi2)
1191 {
1192 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1193 return;
1194 }
1195#endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
1200 if (from_mode == SImode && to_mode == HImode)
1201 {
1202#ifdef HAVE_truncsihi2
1203 if (HAVE_truncsihi2)
1204 {
1205 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1206 return;
1207 }
1208#endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 if (from_mode == SImode && to_mode == QImode)
1214 {
1215#ifdef HAVE_truncsiqi2
1216 if (HAVE_truncsiqi2)
1217 {
1218 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1219 return;
1220 }
1221#endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1224 }
1225
1226 if (from_mode == HImode && to_mode == QImode)
1227 {
1228#ifdef HAVE_trunchiqi2
1229 if (HAVE_trunchiqi2)
1230 {
1231 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1232 return;
1233 }
1234#endif
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1236 return;
1237 }
1238
b9bcad65
RK
1239 if (from_mode == TImode && to_mode == DImode)
1240 {
1241#ifdef HAVE_trunctidi2
1242 if (HAVE_trunctidi2)
1243 {
1244 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1245 return;
1246 }
1247#endif
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1249 return;
1250 }
1251
1252 if (from_mode == TImode && to_mode == SImode)
1253 {
1254#ifdef HAVE_trunctisi2
1255 if (HAVE_trunctisi2)
1256 {
1257 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1258 return;
1259 }
1260#endif
1261 convert_move (to, force_reg (from_mode, from), unsignedp);
1262 return;
1263 }
1264
1265 if (from_mode == TImode && to_mode == HImode)
1266 {
1267#ifdef HAVE_trunctihi2
1268 if (HAVE_trunctihi2)
1269 {
1270 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1271 return;
1272 }
1273#endif
1274 convert_move (to, force_reg (from_mode, from), unsignedp);
1275 return;
1276 }
1277
1278 if (from_mode == TImode && to_mode == QImode)
1279 {
1280#ifdef HAVE_trunctiqi2
1281 if (HAVE_trunctiqi2)
1282 {
1283 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1284 return;
1285 }
1286#endif
1287 convert_move (to, force_reg (from_mode, from), unsignedp);
1288 return;
1289 }
1290
bbf6f052
RK
1291 /* Handle truncation of volatile memrefs, and so on;
1292 the things that couldn't be truncated directly,
1293 and for which there was no special instruction. */
1294 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1295 {
1296 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1297 emit_move_insn (to, temp);
1298 return;
1299 }
1300
1301 /* Mode combination is not recognized. */
1302 abort ();
1303}
1304
1305/* Return an rtx for a value that would result
1306 from converting X to mode MODE.
1307 Both X and MODE may be floating, or both integer.
1308 UNSIGNEDP is nonzero if X is an unsigned value.
1309 This can be done by referring to a part of X in place
5d901c31
RS
1310 or by copying to a new temporary with conversion.
1311
1312 This function *must not* call protect_from_queue
1313 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1314
1315rtx
1316convert_to_mode (mode, x, unsignedp)
1317 enum machine_mode mode;
1318 rtx x;
1319 int unsignedp;
5ffe63ed
RS
1320{
1321 return convert_modes (mode, VOIDmode, x, unsignedp);
1322}
1323
1324/* Return an rtx for a value that would result
1325 from converting X from mode OLDMODE to mode MODE.
1326 Both modes may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1328
1329 This can be done by referring to a part of X in place
1330 or by copying to a new temporary with conversion.
1331
1332 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1333
1334 This function *must not* call protect_from_queue
1335 except when putting X into an insn (in which case convert_move does it). */
1336
1337rtx
1338convert_modes (mode, oldmode, x, unsignedp)
1339 enum machine_mode mode, oldmode;
1340 rtx x;
1341 int unsignedp;
bbf6f052 1342{
b3694847 1343 rtx temp;
5ffe63ed 1344
1499e0a8
RK
1345 /* If FROM is a SUBREG that indicates that we have already done at least
1346 the required extension, strip it. */
1347
1348 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1349 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1350 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1351 x = gen_lowpart (mode, x);
bbf6f052 1352
64791b18
RK
1353 if (GET_MODE (x) != VOIDmode)
1354 oldmode = GET_MODE (x);
3a94c984 1355
5ffe63ed 1356 if (mode == oldmode)
bbf6f052
RK
1357 return x;
1358
1359 /* There is one case that we must handle specially: If we are converting
906c4e36 1360 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1361 we are to interpret the constant as unsigned, gen_lowpart will do
1362 the wrong if the constant appears negative. What we want to do is
1363 make the high-order word of the constant zero, not all ones. */
1364
1365 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1366 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1367 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1368 {
1369 HOST_WIDE_INT val = INTVAL (x);
1370
1371 if (oldmode != VOIDmode
1372 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1373 {
1374 int width = GET_MODE_BITSIZE (oldmode);
1375
1376 /* We need to zero extend VAL. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1378 }
1379
1380 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1381 }
bbf6f052
RK
1382
1383 /* We can do this with a gen_lowpart if both desired and current modes
1384 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1385 non-volatile MEM. Except for the constant case where MODE is no
1386 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1387
ba2e110c
RK
1388 if ((GET_CODE (x) == CONST_INT
1389 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1390 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1391 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1392 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1393 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1394 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1395 && direct_load[(int) mode])
2bf29316
JW
1396 || (GET_CODE (x) == REG
1397 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1398 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1399 {
1400 /* ?? If we don't know OLDMODE, we have to assume here that
1401 X does not need sign- or zero-extension. This may not be
1402 the case, but it's the best we can do. */
1403 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1404 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1405 {
1406 HOST_WIDE_INT val = INTVAL (x);
1407 int width = GET_MODE_BITSIZE (oldmode);
1408
1409 /* We must sign or zero-extend in this case. Start by
1410 zero-extending, then sign extend if we need to. */
1411 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1412 if (! unsignedp
1413 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1414 val |= (HOST_WIDE_INT) (-1) << width;
1415
2496c7bd 1416 return gen_int_mode (val, mode);
ba2e110c
RK
1417 }
1418
1419 return gen_lowpart (mode, x);
1420 }
bbf6f052
RK
1421
1422 temp = gen_reg_rtx (mode);
1423 convert_move (temp, x, unsignedp);
1424 return temp;
1425}
1426\f
fbe1758d 1427/* This macro is used to determine what the largest unit size that
3a94c984 1428 move_by_pieces can use is. */
fbe1758d
AM
1429
1430/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1431 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1432 number of bytes we can move with a single instruction. */
fbe1758d
AM
1433
1434#ifndef MOVE_MAX_PIECES
1435#define MOVE_MAX_PIECES MOVE_MAX
1436#endif
1437
cf5124f6
RS
1438/* STORE_MAX_PIECES is the number of bytes at a time that we can
1439 store efficiently. Due to internal GCC limitations, this is
1440 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1441 for an immediate constant. */
1442
1443#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1444
21d93687
RK
1445/* Generate several move instructions to copy LEN bytes from block FROM to
1446 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1447 and TO through protect_from_queue before calling.
566aa174 1448
21d93687
RK
1449 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1450 used to push FROM to the stack.
566aa174 1451
19caa751 1452 ALIGN is maximum alignment we can assume. */
bbf6f052 1453
2e245dac 1454void
bbf6f052
RK
1455move_by_pieces (to, from, len, align)
1456 rtx to, from;
3bdf5ad1 1457 unsigned HOST_WIDE_INT len;
729a2125 1458 unsigned int align;
bbf6f052
RK
1459{
1460 struct move_by_pieces data;
566aa174 1461 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1462 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1463 enum machine_mode mode = VOIDmode, tmode;
1464 enum insn_code icode;
bbf6f052
RK
1465
1466 data.offset = 0;
bbf6f052 1467 data.from_addr = from_addr;
566aa174
JH
1468 if (to)
1469 {
1470 to_addr = XEXP (to, 0);
1471 data.to = to;
1472 data.autinc_to
1473 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1474 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1475 data.reverse
1476 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1477 }
1478 else
1479 {
1480 to_addr = NULL_RTX;
1481 data.to = NULL_RTX;
1482 data.autinc_to = 1;
1483#ifdef STACK_GROWS_DOWNWARD
1484 data.reverse = 1;
1485#else
1486 data.reverse = 0;
1487#endif
1488 }
1489 data.to_addr = to_addr;
bbf6f052 1490 data.from = from;
bbf6f052
RK
1491 data.autinc_from
1492 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1493 || GET_CODE (from_addr) == POST_INC
1494 || GET_CODE (from_addr) == POST_DEC);
1495
1496 data.explicit_inc_from = 0;
1497 data.explicit_inc_to = 0;
bbf6f052
RK
1498 if (data.reverse) data.offset = len;
1499 data.len = len;
1500
1501 /* If copying requires more than two move insns,
1502 copy addresses to registers (to make displacements shorter)
1503 and use post-increment if available. */
1504 if (!(data.autinc_from && data.autinc_to)
1505 && move_by_pieces_ninsns (len, align) > 2)
1506 {
3a94c984 1507 /* Find the mode of the largest move... */
fbe1758d
AM
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) < max_size)
1511 mode = tmode;
1512
1513 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1514 {
1515 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1516 data.autinc_from = 1;
1517 data.explicit_inc_from = -1;
1518 }
fbe1758d 1519 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1520 {
1521 data.from_addr = copy_addr_to_reg (from_addr);
1522 data.autinc_from = 1;
1523 data.explicit_inc_from = 1;
1524 }
bbf6f052
RK
1525 if (!data.autinc_from && CONSTANT_P (from_addr))
1526 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1527 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1528 {
1529 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1530 data.autinc_to = 1;
1531 data.explicit_inc_to = -1;
1532 }
fbe1758d 1533 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1534 {
1535 data.to_addr = copy_addr_to_reg (to_addr);
1536 data.autinc_to = 1;
1537 data.explicit_inc_to = 1;
1538 }
bbf6f052
RK
1539 if (!data.autinc_to && CONSTANT_P (to_addr))
1540 data.to_addr = copy_addr_to_reg (to_addr);
1541 }
1542
e1565e65 1543 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1544 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1545 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1546
1547 /* First move what we can in the largest integer mode, then go to
1548 successively smaller modes. */
1549
1550 while (max_size > 1)
1551 {
e7c33f54
RK
1552 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1553 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1554 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1555 mode = tmode;
1556
1557 if (mode == VOIDmode)
1558 break;
1559
1560 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1561 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1562 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1563
1564 max_size = GET_MODE_SIZE (mode);
1565 }
1566
1567 /* The code above should have handled everything. */
2a8e278c 1568 if (data.len > 0)
bbf6f052
RK
1569 abort ();
1570}
1571
1572/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1573 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1574
3bdf5ad1 1575static unsigned HOST_WIDE_INT
bbf6f052 1576move_by_pieces_ninsns (l, align)
3bdf5ad1 1577 unsigned HOST_WIDE_INT l;
729a2125 1578 unsigned int align;
bbf6f052 1579{
3bdf5ad1
RK
1580 unsigned HOST_WIDE_INT n_insns = 0;
1581 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1582
e1565e65 1583 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1584 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1585 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1586
1587 while (max_size > 1)
1588 {
1589 enum machine_mode mode = VOIDmode, tmode;
1590 enum insn_code icode;
1591
e7c33f54
RK
1592 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1593 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1594 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1595 mode = tmode;
1596
1597 if (mode == VOIDmode)
1598 break;
1599
1600 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1601 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1602 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1603
1604 max_size = GET_MODE_SIZE (mode);
1605 }
1606
13c6f0d5
NS
1607 if (l)
1608 abort ();
bbf6f052
RK
1609 return n_insns;
1610}
1611
1612/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1613 with move instructions for mode MODE. GENFUN is the gen_... function
1614 to make a move insn for that mode. DATA has all the other info. */
1615
1616static void
1617move_by_pieces_1 (genfun, mode, data)
711d877c 1618 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1619 enum machine_mode mode;
1620 struct move_by_pieces *data;
1621{
3bdf5ad1 1622 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1623 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1624
1625 while (data->len >= size)
1626 {
3bdf5ad1
RK
1627 if (data->reverse)
1628 data->offset -= size;
1629
566aa174 1630 if (data->to)
3bdf5ad1 1631 {
566aa174 1632 if (data->autinc_to)
630036c6
JJ
1633 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1634 data->offset);
566aa174 1635 else
f4ef873c 1636 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1637 }
3bdf5ad1
RK
1638
1639 if (data->autinc_from)
630036c6
JJ
1640 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1641 data->offset);
3bdf5ad1 1642 else
f4ef873c 1643 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1644
940da324 1645 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1646 emit_insn (gen_add2_insn (data->to_addr,
1647 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1648 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1649 emit_insn (gen_add2_insn (data->from_addr,
1650 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1651
566aa174
JH
1652 if (data->to)
1653 emit_insn ((*genfun) (to1, from1));
1654 else
21d93687
RK
1655 {
1656#ifdef PUSH_ROUNDING
1657 emit_single_push_insn (mode, from1, NULL);
1658#else
1659 abort ();
1660#endif
1661 }
3bdf5ad1 1662
940da324 1663 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1664 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1665 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1666 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1667
3bdf5ad1
RK
1668 if (! data->reverse)
1669 data->offset += size;
bbf6f052
RK
1670
1671 data->len -= size;
1672 }
1673}
1674\f
4ca79136
RH
1675/* Emit code to move a block Y to a block X. This may be done with
1676 string-move instructions, with multiple scalar move instructions,
1677 or with a library call.
bbf6f052 1678
4ca79136 1679 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1680 SIZE is an rtx that says how long they are.
19caa751 1681 ALIGN is the maximum alignment we can assume they have.
44bb111a 1682 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1683
e9a25f70
JL
1684 Return the address of the new block, if memcpy is called and returns it,
1685 0 otherwise. */
1686
1687rtx
44bb111a 1688emit_block_move (x, y, size, method)
4ca79136 1689 rtx x, y, size;
44bb111a 1690 enum block_op_methods method;
bbf6f052 1691{
44bb111a 1692 bool may_use_call;
e9a25f70 1693 rtx retval = 0;
44bb111a
RH
1694 unsigned int align;
1695
1696 switch (method)
1697 {
1698 case BLOCK_OP_NORMAL:
1699 may_use_call = true;
1700 break;
1701
1702 case BLOCK_OP_CALL_PARM:
1703 may_use_call = block_move_libcall_safe_for_call_parm ();
1704
1705 /* Make inhibit_defer_pop nonzero around the library call
1706 to force it to pop the arguments right away. */
1707 NO_DEFER_POP;
1708 break;
1709
1710 case BLOCK_OP_NO_LIBCALL:
1711 may_use_call = false;
1712 break;
1713
1714 default:
1715 abort ();
1716 }
1717
1718 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1719
bbf6f052
RK
1720 if (GET_MODE (x) != BLKmode)
1721 abort ();
bbf6f052
RK
1722 if (GET_MODE (y) != BLKmode)
1723 abort ();
1724
1725 x = protect_from_queue (x, 1);
1726 y = protect_from_queue (y, 0);
5d901c31 1727 size = protect_from_queue (size, 0);
bbf6f052
RK
1728
1729 if (GET_CODE (x) != MEM)
1730 abort ();
1731 if (GET_CODE (y) != MEM)
1732 abort ();
1733 if (size == 0)
1734 abort ();
1735
cb38fd88
RH
1736 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1737 can be incorrect is coming from __builtin_memcpy. */
1738 if (GET_CODE (size) == CONST_INT)
1739 {
1740 x = shallow_copy_rtx (x);
1741 y = shallow_copy_rtx (y);
1742 set_mem_size (x, size);
1743 set_mem_size (y, size);
1744 }
1745
fbe1758d 1746 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052 1747 move_by_pieces (x, y, INTVAL (size), align);
4ca79136
RH
1748 else if (emit_block_move_via_movstr (x, y, size, align))
1749 ;
44bb111a 1750 else if (may_use_call)
4ca79136 1751 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1752 else
1753 emit_block_move_via_loop (x, y, size, align);
1754
1755 if (method == BLOCK_OP_CALL_PARM)
1756 OK_DEFER_POP;
266007a7 1757
4ca79136
RH
1758 return retval;
1759}
266007a7 1760
44bb111a
RH
1761/* A subroutine of emit_block_move. Returns true if calling the
1762 block move libcall will not clobber any parameters which may have
1763 already been placed on the stack. */
1764
1765static bool
1766block_move_libcall_safe_for_call_parm ()
1767{
1768 if (PUSH_ARGS)
1769 return true;
1770 else
1771 {
1772 /* Check to see whether memcpy takes all register arguments. */
1773 static enum {
1774 takes_regs_uninit, takes_regs_no, takes_regs_yes
1775 } takes_regs = takes_regs_uninit;
1776
1777 switch (takes_regs)
1778 {
1779 case takes_regs_uninit:
1780 {
1781 CUMULATIVE_ARGS args_so_far;
1782 tree fn, arg;
1783
1784 fn = emit_block_move_libcall_fn (false);
1785 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1786
1787 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1788 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1789 {
98c0d8d1 1790 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
44bb111a
RH
1791 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1792 if (!tmp || !REG_P (tmp))
1793 goto fail_takes_regs;
1794#ifdef FUNCTION_ARG_PARTIAL_NREGS
1795 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1796 NULL_TREE, 1))
1797 goto fail_takes_regs;
1798#endif
1799 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1800 }
1801 }
1802 takes_regs = takes_regs_yes;
1803 /* FALLTHRU */
1804
1805 case takes_regs_yes:
1806 return true;
1807
1808 fail_takes_regs:
1809 takes_regs = takes_regs_no;
1810 /* FALLTHRU */
1811 case takes_regs_no:
1812 return false;
1813
1814 default:
1815 abort ();
1816 }
1817 }
1818}
1819
4ca79136
RH
1820/* A subroutine of emit_block_move. Expand a movstr pattern;
1821 return true if successful. */
3ef1eef4 1822
4ca79136
RH
1823static bool
1824emit_block_move_via_movstr (x, y, size, align)
1825 rtx x, y, size;
1826 unsigned int align;
1827{
1828 /* Try the most limited insn first, because there's no point
1829 including more than one in the machine description unless
1830 the more limited one has some advantage. */
266007a7 1831
4ca79136
RH
1832 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1833 enum machine_mode mode;
266007a7 1834
4ca79136
RH
1835 /* Since this is a move insn, we don't care about volatility. */
1836 volatile_ok = 1;
1837
1838 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1839 mode = GET_MODE_WIDER_MODE (mode))
1840 {
1841 enum insn_code code = movstr_optab[(int) mode];
1842 insn_operand_predicate_fn pred;
1843
1844 if (code != CODE_FOR_nothing
1845 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1846 here because if SIZE is less than the mode mask, as it is
1847 returned by the macro, it will definitely be less than the
1848 actual mode mask. */
1849 && ((GET_CODE (size) == CONST_INT
1850 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1851 <= (GET_MODE_MASK (mode) >> 1)))
1852 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1853 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1854 || (*pred) (x, BLKmode))
1855 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1856 || (*pred) (y, BLKmode))
1857 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1858 || (*pred) (opalign, VOIDmode)))
1859 {
1860 rtx op2;
1861 rtx last = get_last_insn ();
1862 rtx pat;
1863
1864 op2 = convert_to_mode (mode, size, 1);
1865 pred = insn_data[(int) code].operand[2].predicate;
1866 if (pred != 0 && ! (*pred) (op2, mode))
1867 op2 = copy_to_mode_reg (mode, op2);
1868
1869 /* ??? When called via emit_block_move_for_call, it'd be
1870 nice if there were some way to inform the backend, so
1871 that it doesn't fail the expansion because it thinks
1872 emitting the libcall would be more efficient. */
1873
1874 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1875 if (pat)
1876 {
1877 emit_insn (pat);
1878 volatile_ok = 0;
1879 return true;
bbf6f052 1880 }
4ca79136
RH
1881 else
1882 delete_insns_since (last);
bbf6f052 1883 }
4ca79136 1884 }
bbf6f052 1885
4ca79136
RH
1886 volatile_ok = 0;
1887 return false;
1888}
3ef1eef4 1889
4ca79136
RH
1890/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1891 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1892
4ca79136
RH
1893static rtx
1894emit_block_move_via_libcall (dst, src, size)
1895 rtx dst, src, size;
1896{
1897 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1898 enum machine_mode size_mode;
1899 rtx retval;
4bc973ae 1900
4ca79136 1901 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1902
4ca79136
RH
1903 It is unsafe to save the value generated by protect_from_queue
1904 and reuse it later. Consider what happens if emit_queue is
1905 called before the return value from protect_from_queue is used.
4bc973ae 1906
4ca79136
RH
1907 Expansion of the CALL_EXPR below will call emit_queue before
1908 we are finished emitting RTL for argument setup. So if we are
1909 not careful we could get the wrong value for an argument.
4bc973ae 1910
4ca79136
RH
1911 To avoid this problem we go ahead and emit code to copy X, Y &
1912 SIZE into new pseudos. We can then place those new pseudos
1913 into an RTL_EXPR and use them later, even after a call to
1914 emit_queue.
4bc973ae 1915
4ca79136
RH
1916 Note this is not strictly needed for library calls since they
1917 do not call emit_queue before loading their arguments. However,
1918 we may need to have library calls call emit_queue in the future
1919 since failing to do so could cause problems for targets which
1920 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1921
1922 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1923 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1924
1925 if (TARGET_MEM_FUNCTIONS)
1926 size_mode = TYPE_MODE (sizetype);
1927 else
1928 size_mode = TYPE_MODE (unsigned_type_node);
1929 size = convert_to_mode (size_mode, size, 1);
1930 size = copy_to_mode_reg (size_mode, size);
1931
1932 /* It is incorrect to use the libcall calling conventions to call
1933 memcpy in this context. This could be a user call to memcpy and
1934 the user may wish to examine the return value from memcpy. For
1935 targets where libcalls and normal calls have different conventions
1936 for returning pointers, we could end up generating incorrect code.
1937
1938 For convenience, we generate the call to bcopy this way as well. */
1939
1940 dst_tree = make_tree (ptr_type_node, dst);
1941 src_tree = make_tree (ptr_type_node, src);
1942 if (TARGET_MEM_FUNCTIONS)
1943 size_tree = make_tree (sizetype, size);
1944 else
1945 size_tree = make_tree (unsigned_type_node, size);
1946
1947 fn = emit_block_move_libcall_fn (true);
1948 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1949 if (TARGET_MEM_FUNCTIONS)
1950 {
1951 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1952 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1953 }
1954 else
1955 {
1956 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1957 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1958 }
1959
1960 /* Now we have to build up the CALL_EXPR itself. */
1961 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1962 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1963 call_expr, arg_list, NULL_TREE);
1964 TREE_SIDE_EFFECTS (call_expr) = 1;
1965
1966 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1967
1968 /* If we are initializing a readonly value, show the above call
1969 clobbered it. Otherwise, a load from it may erroneously be
1970 hoisted from a loop. */
1971 if (RTX_UNCHANGING_P (dst))
1972 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1973
1974 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1975}
52cf7115 1976
4ca79136
RH
1977/* A subroutine of emit_block_move_via_libcall. Create the tree node
1978 for the function we use for block copies. The first time FOR_CALL
1979 is true, we call assemble_external. */
52cf7115 1980
4ca79136
RH
1981static GTY(()) tree block_move_fn;
1982
1983static tree
1984emit_block_move_libcall_fn (for_call)
1985 int for_call;
1986{
1987 static bool emitted_extern;
1988 tree fn = block_move_fn, args;
52cf7115 1989
4ca79136
RH
1990 if (!fn)
1991 {
1992 if (TARGET_MEM_FUNCTIONS)
52cf7115 1993 {
4ca79136
RH
1994 fn = get_identifier ("memcpy");
1995 args = build_function_type_list (ptr_type_node, ptr_type_node,
1996 const_ptr_type_node, sizetype,
1997 NULL_TREE);
1998 }
1999 else
2000 {
2001 fn = get_identifier ("bcopy");
2002 args = build_function_type_list (void_type_node, const_ptr_type_node,
2003 ptr_type_node, unsigned_type_node,
2004 NULL_TREE);
52cf7115
JL
2005 }
2006
4ca79136
RH
2007 fn = build_decl (FUNCTION_DECL, fn, args);
2008 DECL_EXTERNAL (fn) = 1;
2009 TREE_PUBLIC (fn) = 1;
2010 DECL_ARTIFICIAL (fn) = 1;
2011 TREE_NOTHROW (fn) = 1;
66c60e67 2012
4ca79136 2013 block_move_fn = fn;
bbf6f052 2014 }
e9a25f70 2015
4ca79136
RH
2016 if (for_call && !emitted_extern)
2017 {
2018 emitted_extern = true;
2019 make_decl_rtl (fn, NULL);
2020 assemble_external (fn);
2021 }
2022
2023 return fn;
bbf6f052 2024}
44bb111a
RH
2025
2026/* A subroutine of emit_block_move. Copy the data via an explicit
2027 loop. This is used only when libcalls are forbidden. */
2028/* ??? It'd be nice to copy in hunks larger than QImode. */
2029
2030static void
2031emit_block_move_via_loop (x, y, size, align)
2032 rtx x, y, size;
2033 unsigned int align ATTRIBUTE_UNUSED;
2034{
2035 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2036 enum machine_mode iter_mode;
2037
2038 iter_mode = GET_MODE (size);
2039 if (iter_mode == VOIDmode)
2040 iter_mode = word_mode;
2041
2042 top_label = gen_label_rtx ();
2043 cmp_label = gen_label_rtx ();
2044 iter = gen_reg_rtx (iter_mode);
2045
2046 emit_move_insn (iter, const0_rtx);
2047
2048 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2049 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2050 do_pending_stack_adjust ();
2051
2052 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2053
2054 emit_jump (cmp_label);
2055 emit_label (top_label);
2056
2057 tmp = convert_modes (Pmode, iter_mode, iter, true);
2058 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2059 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2060 x = change_address (x, QImode, x_addr);
2061 y = change_address (y, QImode, y_addr);
2062
2063 emit_move_insn (x, y);
2064
2065 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2066 true, OPTAB_LIB_WIDEN);
2067 if (tmp != iter)
2068 emit_move_insn (iter, tmp);
2069
2070 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2071 emit_label (cmp_label);
2072
2073 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2074 true, top_label);
2075
2076 emit_note (NULL, NOTE_INSN_LOOP_END);
2077}
bbf6f052
RK
2078\f
2079/* Copy all or part of a value X into registers starting at REGNO.
2080 The number of registers to be filled is NREGS. */
2081
2082void
2083move_block_to_reg (regno, x, nregs, mode)
2084 int regno;
2085 rtx x;
2086 int nregs;
2087 enum machine_mode mode;
2088{
2089 int i;
381127e8 2090#ifdef HAVE_load_multiple
3a94c984 2091 rtx pat;
381127e8
RL
2092 rtx last;
2093#endif
bbf6f052 2094
72bb9717
RK
2095 if (nregs == 0)
2096 return;
2097
bbf6f052
RK
2098 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2099 x = validize_mem (force_const_mem (mode, x));
2100
2101 /* See if the machine can do this with a load multiple insn. */
2102#ifdef HAVE_load_multiple
c3a02afe 2103 if (HAVE_load_multiple)
bbf6f052 2104 {
c3a02afe 2105 last = get_last_insn ();
38a448ca 2106 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
2107 GEN_INT (nregs));
2108 if (pat)
2109 {
2110 emit_insn (pat);
2111 return;
2112 }
2113 else
2114 delete_insns_since (last);
bbf6f052 2115 }
bbf6f052
RK
2116#endif
2117
2118 for (i = 0; i < nregs; i++)
38a448ca 2119 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
2120 operand_subword_force (x, i, mode));
2121}
2122
2123/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
2124 The number of registers to be filled is NREGS. SIZE indicates the number
2125 of bytes in the object X. */
2126
bbf6f052 2127void
0040593d 2128move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
2129 int regno;
2130 rtx x;
2131 int nregs;
0040593d 2132 int size;
bbf6f052
RK
2133{
2134 int i;
381127e8 2135#ifdef HAVE_store_multiple
3a94c984 2136 rtx pat;
381127e8
RL
2137 rtx last;
2138#endif
58a32c5c 2139 enum machine_mode mode;
bbf6f052 2140
2954d7db
RK
2141 if (nregs == 0)
2142 return;
2143
58a32c5c
DE
2144 /* If SIZE is that of a mode no bigger than a word, just use that
2145 mode's store operation. */
2146 if (size <= UNITS_PER_WORD
0d7839da
SE
2147 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
2148 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
58a32c5c 2149 {
792760b9 2150 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
58a32c5c
DE
2151 return;
2152 }
3a94c984 2153
0040593d 2154 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
2155 to the left before storing to memory. Note that the previous test
2156 doesn't handle all cases (e.g. SIZE == 3). */
0d7839da
SE
2157 if (size < UNITS_PER_WORD
2158 && BYTES_BIG_ENDIAN
2159 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
0040593d
JW
2160 {
2161 rtx tem = operand_subword (x, 0, 1, BLKmode);
2162 rtx shift;
2163
2164 if (tem == 0)
2165 abort ();
2166
2167 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 2168 gen_rtx_REG (word_mode, regno),
0040593d
JW
2169 build_int_2 ((UNITS_PER_WORD - size)
2170 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2171 emit_move_insn (tem, shift);
2172 return;
2173 }
2174
bbf6f052
RK
2175 /* See if the machine can do this with a store multiple insn. */
2176#ifdef HAVE_store_multiple
c3a02afe 2177 if (HAVE_store_multiple)
bbf6f052 2178 {
c3a02afe 2179 last = get_last_insn ();
38a448ca 2180 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
2181 GEN_INT (nregs));
2182 if (pat)
2183 {
2184 emit_insn (pat);
2185 return;
2186 }
2187 else
2188 delete_insns_since (last);
bbf6f052 2189 }
bbf6f052
RK
2190#endif
2191
2192 for (i = 0; i < nregs; i++)
2193 {
2194 rtx tem = operand_subword (x, i, 1, BLKmode);
2195
2196 if (tem == 0)
2197 abort ();
2198
38a448ca 2199 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
2200 }
2201}
2202
aac5cc16
RH
2203/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2204 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2205 block SRC in bytes, or -1 if not known. */
d6a7951f 2206/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
aac5cc16
RH
2207 the balance will be in what would be the low-order memory addresses, i.e.
2208 left justified for big endian, right justified for little endian. This
2209 happens to be true for the targets currently using this support. If this
2210 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2211 would be needed. */
fffa9c1d
JW
2212
2213void
04050c69 2214emit_group_load (dst, orig_src, ssize)
aac5cc16 2215 rtx dst, orig_src;
729a2125 2216 int ssize;
fffa9c1d 2217{
aac5cc16
RH
2218 rtx *tmps, src;
2219 int start, i;
fffa9c1d 2220
aac5cc16 2221 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
2222 abort ();
2223
2224 /* Check for a NULL entry, used to indicate that the parameter goes
2225 both on the stack and in registers. */
aac5cc16
RH
2226 if (XEXP (XVECEXP (dst, 0, 0), 0))
2227 start = 0;
fffa9c1d 2228 else
aac5cc16
RH
2229 start = 1;
2230
3a94c984 2231 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 2232
aac5cc16
RH
2233 /* Process the pieces. */
2234 for (i = start; i < XVECLEN (dst, 0); i++)
2235 {
2236 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
2237 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2238 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2239 int shift = 0;
2240
2241 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2242 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
2243 {
2244 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2245 bytelen = ssize - bytepos;
2246 if (bytelen <= 0)
729a2125 2247 abort ();
aac5cc16
RH
2248 }
2249
f3ce87a9
DE
2250 /* If we won't be loading directly from memory, protect the real source
2251 from strange tricks we might play; but make sure that the source can
2252 be loaded directly into the destination. */
2253 src = orig_src;
2254 if (GET_CODE (orig_src) != MEM
2255 && (!CONSTANT_P (orig_src)
2256 || (GET_MODE (orig_src) != mode
2257 && GET_MODE (orig_src) != VOIDmode)))
2258 {
2259 if (GET_MODE (orig_src) == VOIDmode)
2260 src = gen_reg_rtx (mode);
2261 else
2262 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 2263
f3ce87a9
DE
2264 emit_move_insn (src, orig_src);
2265 }
2266
aac5cc16
RH
2267 /* Optimize the access just a bit. */
2268 if (GET_CODE (src) == MEM
04050c69 2269 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
729a2125 2270 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2271 && bytelen == GET_MODE_SIZE (mode))
2272 {
2273 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2274 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2275 }
7c4a6db0
JW
2276 else if (GET_CODE (src) == CONCAT)
2277 {
015b1ad1
JDA
2278 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2279 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2280
2281 if ((bytepos == 0 && bytelen == slen0)
2282 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 2283 {
015b1ad1
JDA
2284 /* The following assumes that the concatenated objects all
2285 have the same size. In this case, a simple calculation
2286 can be used to determine the object and the bit field
2287 to be extracted. */
2288 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
2289 if (! CONSTANT_P (tmps[i])
2290 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2291 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
2292 (bytepos % slen0) * BITS_PER_UNIT,
2293 1, NULL_RTX, mode, mode, ssize);
cbb92744 2294 }
58f69841
JH
2295 else if (bytepos == 0)
2296 {
015b1ad1 2297 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 2298 emit_move_insn (mem, src);
04050c69 2299 tmps[i] = adjust_address (mem, mode, 0);
58f69841 2300 }
7c4a6db0
JW
2301 else
2302 abort ();
2303 }
f3ce87a9 2304 else if (CONSTANT_P (src)
2ee5437b
RH
2305 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2306 tmps[i] = src;
fffa9c1d 2307 else
19caa751
RK
2308 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2309 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 2310 mode, mode, ssize);
fffa9c1d 2311
aac5cc16 2312 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2313 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2314 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2315 }
19caa751 2316
3a94c984 2317 emit_queue ();
aac5cc16
RH
2318
2319 /* Copy the extracted pieces into the proper (probable) hard regs. */
2320 for (i = start; i < XVECLEN (dst, 0); i++)
2321 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2322}
2323
aac5cc16
RH
2324/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2325 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2326 block DST, or -1 if not known. */
fffa9c1d
JW
2327
2328void
04050c69 2329emit_group_store (orig_dst, src, ssize)
aac5cc16 2330 rtx orig_dst, src;
729a2125 2331 int ssize;
fffa9c1d 2332{
aac5cc16
RH
2333 rtx *tmps, dst;
2334 int start, i;
fffa9c1d 2335
aac5cc16 2336 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2337 abort ();
2338
2339 /* Check for a NULL entry, used to indicate that the parameter goes
2340 both on the stack and in registers. */
aac5cc16
RH
2341 if (XEXP (XVECEXP (src, 0, 0), 0))
2342 start = 0;
fffa9c1d 2343 else
aac5cc16
RH
2344 start = 1;
2345
3a94c984 2346 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2347
aac5cc16
RH
2348 /* Copy the (probable) hard regs into pseudos. */
2349 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2350 {
aac5cc16
RH
2351 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2352 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2353 emit_move_insn (tmps[i], reg);
2354 }
3a94c984 2355 emit_queue ();
fffa9c1d 2356
aac5cc16
RH
2357 /* If we won't be storing directly into memory, protect the real destination
2358 from strange tricks we might play. */
2359 dst = orig_dst;
10a9f2be
JW
2360 if (GET_CODE (dst) == PARALLEL)
2361 {
2362 rtx temp;
2363
2364 /* We can get a PARALLEL dst if there is a conditional expression in
2365 a return statement. In that case, the dst and src are the same,
2366 so no action is necessary. */
2367 if (rtx_equal_p (dst, src))
2368 return;
2369
2370 /* It is unclear if we can ever reach here, but we may as well handle
2371 it. Allocate a temporary, and split this into a store/load to/from
2372 the temporary. */
2373
2374 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
04050c69
RK
2375 emit_group_store (temp, src, ssize);
2376 emit_group_load (dst, temp, ssize);
10a9f2be
JW
2377 return;
2378 }
75897075 2379 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2380 {
2381 dst = gen_reg_rtx (GET_MODE (orig_dst));
2382 /* Make life a bit easier for combine. */
2383 emit_move_insn (dst, const0_rtx);
2384 }
aac5cc16
RH
2385
2386 /* Process the pieces. */
2387 for (i = start; i < XVECLEN (src, 0); i++)
2388 {
770ae6cc 2389 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2390 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2391 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2392 rtx dest = dst;
aac5cc16
RH
2393
2394 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2395 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2396 {
aac5cc16
RH
2397 if (BYTES_BIG_ENDIAN)
2398 {
2399 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2400 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2401 tmps[i], 0, OPTAB_WIDEN);
2402 }
2403 bytelen = ssize - bytepos;
71bc0330 2404 }
fffa9c1d 2405
6ddae612
JJ
2406 if (GET_CODE (dst) == CONCAT)
2407 {
2408 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2409 dest = XEXP (dst, 0);
2410 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2411 {
2412 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2413 dest = XEXP (dst, 1);
2414 }
2415 else
2416 abort ();
2417 }
2418
aac5cc16 2419 /* Optimize the access just a bit. */
6ddae612
JJ
2420 if (GET_CODE (dest) == MEM
2421 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
729a2125 2422 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2423 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2424 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2425 else
6ddae612 2426 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2427 mode, tmps[i], ssize);
fffa9c1d 2428 }
729a2125 2429
3a94c984 2430 emit_queue ();
aac5cc16
RH
2431
2432 /* Copy from the pseudo into the (probable) hard reg. */
2433 if (GET_CODE (dst) == REG)
2434 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2435}
2436
c36fce9a
GRK
2437/* Generate code to copy a BLKmode object of TYPE out of a
2438 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2439 is null, a stack temporary is created. TGTBLK is returned.
2440
2441 The primary purpose of this routine is to handle functions
2442 that return BLKmode structures in registers. Some machines
2443 (the PA for example) want to return all small structures
3a94c984 2444 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2445
2446rtx
19caa751 2447copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2448 rtx tgtblk;
2449 rtx srcreg;
2450 tree type;
2451{
19caa751
RK
2452 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2453 rtx src = NULL, dst = NULL;
2454 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2455 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2456
2457 if (tgtblk == 0)
2458 {
1da68f56
RK
2459 tgtblk = assign_temp (build_qualified_type (type,
2460 (TYPE_QUALS (type)
2461 | TYPE_QUAL_CONST)),
2462 0, 1, 1);
19caa751
RK
2463 preserve_temp_slots (tgtblk);
2464 }
3a94c984 2465
1ed1b4fb
RK
2466 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2467 into a new pseudo which is a full word.
0d7839da 2468
1ed1b4fb
RK
2469 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2470 the wrong part of the register gets copied so we fake a type conversion
2471 in place. */
19caa751
RK
2472 if (GET_MODE (srcreg) != BLKmode
2473 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1ed1b4fb
RK
2474 {
2475 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2476 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2477 else
2478 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2479 }
19caa751
RK
2480
2481 /* Structures whose size is not a multiple of a word are aligned
2482 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2483 machine, this means we must skip the empty high order bytes when
2484 calculating the bit offset. */
0d7839da
SE
2485 if (BYTES_BIG_ENDIAN
2486 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2487 && bytes % UNITS_PER_WORD)
19caa751
RK
2488 big_endian_correction
2489 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2490
2491 /* Copy the structure BITSIZE bites at a time.
3a94c984 2492
19caa751
RK
2493 We could probably emit more efficient code for machines which do not use
2494 strict alignment, but it doesn't seem worth the effort at the current
2495 time. */
2496 for (bitpos = 0, xbitpos = big_endian_correction;
2497 bitpos < bytes * BITS_PER_UNIT;
2498 bitpos += bitsize, xbitpos += bitsize)
2499 {
3a94c984 2500 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2501 word boundary and when xbitpos == big_endian_correction
2502 (the first time through). */
2503 if (xbitpos % BITS_PER_WORD == 0
2504 || xbitpos == big_endian_correction)
b47f8cfc
JH
2505 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2506 GET_MODE (srcreg));
19caa751
RK
2507
2508 /* We need a new destination operand each time bitpos is on
2509 a word boundary. */
2510 if (bitpos % BITS_PER_WORD == 0)
2511 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2512
19caa751
RK
2513 /* Use xbitpos for the source extraction (right justified) and
2514 xbitpos for the destination store (left justified). */
2515 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2516 extract_bit_field (src, bitsize,
2517 xbitpos % BITS_PER_WORD, 1,
2518 NULL_RTX, word_mode, word_mode,
04050c69
RK
2519 BITS_PER_WORD),
2520 BITS_PER_WORD);
19caa751
RK
2521 }
2522
2523 return tgtblk;
c36fce9a
GRK
2524}
2525
94b25f81
RK
2526/* Add a USE expression for REG to the (possibly empty) list pointed
2527 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2528
2529void
b3f8cf4a
RK
2530use_reg (call_fusage, reg)
2531 rtx *call_fusage, reg;
2532{
0304dfbb
DE
2533 if (GET_CODE (reg) != REG
2534 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2535 abort ();
b3f8cf4a
RK
2536
2537 *call_fusage
38a448ca
RH
2538 = gen_rtx_EXPR_LIST (VOIDmode,
2539 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2540}
2541
94b25f81
RK
2542/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2543 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2544
2545void
0304dfbb
DE
2546use_regs (call_fusage, regno, nregs)
2547 rtx *call_fusage;
bbf6f052
RK
2548 int regno;
2549 int nregs;
2550{
0304dfbb 2551 int i;
bbf6f052 2552
0304dfbb
DE
2553 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2554 abort ();
2555
2556 for (i = 0; i < nregs; i++)
e50126e8 2557 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2558}
fffa9c1d
JW
2559
2560/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2561 PARALLEL REGS. This is for calls that pass values in multiple
2562 non-contiguous locations. The Irix 6 ABI has examples of this. */
2563
2564void
2565use_group_regs (call_fusage, regs)
2566 rtx *call_fusage;
2567 rtx regs;
2568{
2569 int i;
2570
6bd35f86
DE
2571 for (i = 0; i < XVECLEN (regs, 0); i++)
2572 {
2573 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2574
6bd35f86
DE
2575 /* A NULL entry means the parameter goes both on the stack and in
2576 registers. This can also be a MEM for targets that pass values
2577 partially on the stack and partially in registers. */
e9a25f70 2578 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2579 use_reg (call_fusage, reg);
2580 }
fffa9c1d 2581}
bbf6f052 2582\f
57814e5e 2583
cf5124f6
RS
2584/* Determine whether the LEN bytes generated by CONSTFUN can be
2585 stored to memory using several move instructions. CONSTFUNDATA is
2586 a pointer which will be passed as argument in every CONSTFUN call.
2587 ALIGN is maximum alignment we can assume. Return nonzero if a
2588 call to store_by_pieces should succeed. */
2589
57814e5e
JJ
2590int
2591can_store_by_pieces (len, constfun, constfundata, align)
2592 unsigned HOST_WIDE_INT len;
2593 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2594 PTR constfundata;
2595 unsigned int align;
2596{
98166639 2597 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2598 HOST_WIDE_INT offset = 0;
2599 enum machine_mode mode, tmode;
2600 enum insn_code icode;
2601 int reverse;
2602 rtx cst;
2603
2604 if (! MOVE_BY_PIECES_P (len, align))
2605 return 0;
2606
2607 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2608 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2609 align = MOVE_MAX * BITS_PER_UNIT;
2610
2611 /* We would first store what we can in the largest integer mode, then go to
2612 successively smaller modes. */
2613
2614 for (reverse = 0;
2615 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2616 reverse++)
2617 {
2618 l = len;
2619 mode = VOIDmode;
cf5124f6 2620 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2621 while (max_size > 1)
2622 {
2623 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2624 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2625 if (GET_MODE_SIZE (tmode) < max_size)
2626 mode = tmode;
2627
2628 if (mode == VOIDmode)
2629 break;
2630
2631 icode = mov_optab->handlers[(int) mode].insn_code;
2632 if (icode != CODE_FOR_nothing
2633 && align >= GET_MODE_ALIGNMENT (mode))
2634 {
2635 unsigned int size = GET_MODE_SIZE (mode);
2636
2637 while (l >= size)
2638 {
2639 if (reverse)
2640 offset -= size;
2641
2642 cst = (*constfun) (constfundata, offset, mode);
2643 if (!LEGITIMATE_CONSTANT_P (cst))
2644 return 0;
2645
2646 if (!reverse)
2647 offset += size;
2648
2649 l -= size;
2650 }
2651 }
2652
2653 max_size = GET_MODE_SIZE (mode);
2654 }
2655
2656 /* The code above should have handled everything. */
2657 if (l != 0)
2658 abort ();
2659 }
2660
2661 return 1;
2662}
2663
2664/* Generate several move instructions to store LEN bytes generated by
2665 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2666 pointer which will be passed as argument in every CONSTFUN call.
2667 ALIGN is maximum alignment we can assume. */
2668
2669void
2670store_by_pieces (to, len, constfun, constfundata, align)
2671 rtx to;
2672 unsigned HOST_WIDE_INT len;
2673 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2674 PTR constfundata;
2675 unsigned int align;
2676{
2677 struct store_by_pieces data;
2678
2679 if (! MOVE_BY_PIECES_P (len, align))
2680 abort ();
2681 to = protect_from_queue (to, 1);
2682 data.constfun = constfun;
2683 data.constfundata = constfundata;
2684 data.len = len;
2685 data.to = to;
2686 store_by_pieces_1 (&data, align);
2687}
2688
19caa751
RK
2689/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2690 rtx with BLKmode). The caller must pass TO through protect_from_queue
2691 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2692
2693static void
2694clear_by_pieces (to, len, align)
2695 rtx to;
3bdf5ad1 2696 unsigned HOST_WIDE_INT len;
729a2125 2697 unsigned int align;
9de08200 2698{
57814e5e
JJ
2699 struct store_by_pieces data;
2700
2701 data.constfun = clear_by_pieces_1;
df4ae160 2702 data.constfundata = NULL;
57814e5e
JJ
2703 data.len = len;
2704 data.to = to;
2705 store_by_pieces_1 (&data, align);
2706}
2707
2708/* Callback routine for clear_by_pieces.
2709 Return const0_rtx unconditionally. */
2710
2711static rtx
2712clear_by_pieces_1 (data, offset, mode)
2713 PTR data ATTRIBUTE_UNUSED;
2714 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2715 enum machine_mode mode ATTRIBUTE_UNUSED;
2716{
2717 return const0_rtx;
2718}
2719
2720/* Subroutine of clear_by_pieces and store_by_pieces.
2721 Generate several move instructions to store LEN bytes of block TO. (A MEM
2722 rtx with BLKmode). The caller must pass TO through protect_from_queue
2723 before calling. ALIGN is maximum alignment we can assume. */
2724
2725static void
2726store_by_pieces_1 (data, align)
2727 struct store_by_pieces *data;
2728 unsigned int align;
2729{
2730 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2731 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2732 enum machine_mode mode = VOIDmode, tmode;
2733 enum insn_code icode;
9de08200 2734
57814e5e
JJ
2735 data->offset = 0;
2736 data->to_addr = to_addr;
2737 data->autinc_to
9de08200
RK
2738 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2739 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2740
57814e5e
JJ
2741 data->explicit_inc_to = 0;
2742 data->reverse
9de08200 2743 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2744 if (data->reverse)
2745 data->offset = data->len;
9de08200 2746
57814e5e 2747 /* If storing requires more than two move insns,
9de08200
RK
2748 copy addresses to registers (to make displacements shorter)
2749 and use post-increment if available. */
57814e5e
JJ
2750 if (!data->autinc_to
2751 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2752 {
3a94c984 2753 /* Determine the main mode we'll be using. */
fbe1758d
AM
2754 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2755 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2756 if (GET_MODE_SIZE (tmode) < max_size)
2757 mode = tmode;
2758
57814e5e 2759 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2760 {
57814e5e
JJ
2761 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2762 data->autinc_to = 1;
2763 data->explicit_inc_to = -1;
9de08200 2764 }
3bdf5ad1 2765
57814e5e
JJ
2766 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2767 && ! data->autinc_to)
9de08200 2768 {
57814e5e
JJ
2769 data->to_addr = copy_addr_to_reg (to_addr);
2770 data->autinc_to = 1;
2771 data->explicit_inc_to = 1;
9de08200 2772 }
3bdf5ad1 2773
57814e5e
JJ
2774 if ( !data->autinc_to && CONSTANT_P (to_addr))
2775 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2776 }
2777
e1565e65 2778 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2779 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2780 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2781
57814e5e 2782 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2783 successively smaller modes. */
2784
2785 while (max_size > 1)
2786 {
9de08200
RK
2787 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2788 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2789 if (GET_MODE_SIZE (tmode) < max_size)
2790 mode = tmode;
2791
2792 if (mode == VOIDmode)
2793 break;
2794
2795 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2796 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2797 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2798
2799 max_size = GET_MODE_SIZE (mode);
2800 }
2801
2802 /* The code above should have handled everything. */
57814e5e 2803 if (data->len != 0)
9de08200
RK
2804 abort ();
2805}
2806
57814e5e 2807/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2808 with move instructions for mode MODE. GENFUN is the gen_... function
2809 to make a move insn for that mode. DATA has all the other info. */
2810
2811static void
57814e5e 2812store_by_pieces_2 (genfun, mode, data)
711d877c 2813 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2814 enum machine_mode mode;
57814e5e 2815 struct store_by_pieces *data;
9de08200 2816{
3bdf5ad1 2817 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2818 rtx to1, cst;
9de08200
RK
2819
2820 while (data->len >= size)
2821 {
3bdf5ad1
RK
2822 if (data->reverse)
2823 data->offset -= size;
9de08200 2824
3bdf5ad1 2825 if (data->autinc_to)
630036c6
JJ
2826 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2827 data->offset);
3a94c984 2828 else
f4ef873c 2829 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2830
940da324 2831 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2832 emit_insn (gen_add2_insn (data->to_addr,
2833 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2834
57814e5e
JJ
2835 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2836 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2837
940da324 2838 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2839 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2840
3bdf5ad1
RK
2841 if (! data->reverse)
2842 data->offset += size;
9de08200
RK
2843
2844 data->len -= size;
2845 }
2846}
2847\f
19caa751 2848/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2849 its length in bytes. */
e9a25f70
JL
2850
2851rtx
8ac61af7 2852clear_storage (object, size)
bbf6f052 2853 rtx object;
4c08eef0 2854 rtx size;
bbf6f052 2855{
e9a25f70 2856 rtx retval = 0;
8ac61af7
RK
2857 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2858 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2859
fcf1b822
RK
2860 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2861 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2862 if (GET_MODE (object) != BLKmode
fcf1b822 2863 && GET_CODE (size) == CONST_INT
4ca79136 2864 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2865 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2866 else
bbf6f052 2867 {
9de08200
RK
2868 object = protect_from_queue (object, 1);
2869 size = protect_from_queue (size, 0);
2870
2871 if (GET_CODE (size) == CONST_INT
78762e3b 2872 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2873 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2874 else if (clear_storage_via_clrstr (object, size, align))
2875 ;
9de08200 2876 else
4ca79136
RH
2877 retval = clear_storage_via_libcall (object, size);
2878 }
2879
2880 return retval;
2881}
2882
2883/* A subroutine of clear_storage. Expand a clrstr pattern;
2884 return true if successful. */
2885
2886static bool
2887clear_storage_via_clrstr (object, size, align)
2888 rtx object, size;
2889 unsigned int align;
2890{
2891 /* Try the most limited insn first, because there's no point
2892 including more than one in the machine description unless
2893 the more limited one has some advantage. */
2894
2895 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2896 enum machine_mode mode;
2897
2898 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2899 mode = GET_MODE_WIDER_MODE (mode))
2900 {
2901 enum insn_code code = clrstr_optab[(int) mode];
2902 insn_operand_predicate_fn pred;
2903
2904 if (code != CODE_FOR_nothing
2905 /* We don't need MODE to be narrower than
2906 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2907 the mode mask, as it is returned by the macro, it will
2908 definitely be less than the actual mode mask. */
2909 && ((GET_CODE (size) == CONST_INT
2910 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2911 <= (GET_MODE_MASK (mode) >> 1)))
2912 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2913 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2914 || (*pred) (object, BLKmode))
2915 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2916 || (*pred) (opalign, VOIDmode)))
9de08200 2917 {
4ca79136
RH
2918 rtx op1;
2919 rtx last = get_last_insn ();
2920 rtx pat;
9de08200 2921
4ca79136
RH
2922 op1 = convert_to_mode (mode, size, 1);
2923 pred = insn_data[(int) code].operand[1].predicate;
2924 if (pred != 0 && ! (*pred) (op1, mode))
2925 op1 = copy_to_mode_reg (mode, op1);
9de08200 2926
4ca79136
RH
2927 pat = GEN_FCN ((int) code) (object, op1, opalign);
2928 if (pat)
9de08200 2929 {
4ca79136
RH
2930 emit_insn (pat);
2931 return true;
2932 }
2933 else
2934 delete_insns_since (last);
2935 }
2936 }
9de08200 2937
4ca79136
RH
2938 return false;
2939}
9de08200 2940
4ca79136
RH
2941/* A subroutine of clear_storage. Expand a call to memset or bzero.
2942 Return the return value of memset, 0 otherwise. */
9de08200 2943
4ca79136
RH
2944static rtx
2945clear_storage_via_libcall (object, size)
2946 rtx object, size;
2947{
2948 tree call_expr, arg_list, fn, object_tree, size_tree;
2949 enum machine_mode size_mode;
2950 rtx retval;
9de08200 2951
4ca79136 2952 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 2953
4ca79136
RH
2954 It is unsafe to save the value generated by protect_from_queue
2955 and reuse it later. Consider what happens if emit_queue is
2956 called before the return value from protect_from_queue is used.
52cf7115 2957
4ca79136
RH
2958 Expansion of the CALL_EXPR below will call emit_queue before
2959 we are finished emitting RTL for argument setup. So if we are
2960 not careful we could get the wrong value for an argument.
52cf7115 2961
4ca79136
RH
2962 To avoid this problem we go ahead and emit code to copy OBJECT
2963 and SIZE into new pseudos. We can then place those new pseudos
2964 into an RTL_EXPR and use them later, even after a call to
2965 emit_queue.
52cf7115 2966
4ca79136
RH
2967 Note this is not strictly needed for library calls since they
2968 do not call emit_queue before loading their arguments. However,
2969 we may need to have library calls call emit_queue in the future
2970 since failing to do so could cause problems for targets which
2971 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 2972
4ca79136 2973 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2974
4ca79136
RH
2975 if (TARGET_MEM_FUNCTIONS)
2976 size_mode = TYPE_MODE (sizetype);
2977 else
2978 size_mode = TYPE_MODE (unsigned_type_node);
2979 size = convert_to_mode (size_mode, size, 1);
2980 size = copy_to_mode_reg (size_mode, size);
52cf7115 2981
4ca79136
RH
2982 /* It is incorrect to use the libcall calling conventions to call
2983 memset in this context. This could be a user call to memset and
2984 the user may wish to examine the return value from memset. For
2985 targets where libcalls and normal calls have different conventions
2986 for returning pointers, we could end up generating incorrect code.
4bc973ae 2987
4ca79136 2988 For convenience, we generate the call to bzero this way as well. */
4bc973ae 2989
4ca79136
RH
2990 object_tree = make_tree (ptr_type_node, object);
2991 if (TARGET_MEM_FUNCTIONS)
2992 size_tree = make_tree (sizetype, size);
2993 else
2994 size_tree = make_tree (unsigned_type_node, size);
2995
2996 fn = clear_storage_libcall_fn (true);
2997 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2998 if (TARGET_MEM_FUNCTIONS)
2999 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3000 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3001
3002 /* Now we have to build up the CALL_EXPR itself. */
3003 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3004 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3005 call_expr, arg_list, NULL_TREE);
3006 TREE_SIDE_EFFECTS (call_expr) = 1;
3007
3008 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3009
3010 /* If we are initializing a readonly value, show the above call
3011 clobbered it. Otherwise, a load from it may erroneously be
3012 hoisted from a loop. */
3013 if (RTX_UNCHANGING_P (object))
3014 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3015
3016 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3017}
3018
3019/* A subroutine of clear_storage_via_libcall. Create the tree node
3020 for the function we use for block clears. The first time FOR_CALL
3021 is true, we call assemble_external. */
3022
3023static GTY(()) tree block_clear_fn;
66c60e67 3024
4ca79136
RH
3025static tree
3026clear_storage_libcall_fn (for_call)
3027 int for_call;
3028{
3029 static bool emitted_extern;
3030 tree fn = block_clear_fn, args;
3031
3032 if (!fn)
3033 {
3034 if (TARGET_MEM_FUNCTIONS)
3035 {
3036 fn = get_identifier ("memset");
3037 args = build_function_type_list (ptr_type_node, ptr_type_node,
3038 integer_type_node, sizetype,
3039 NULL_TREE);
3040 }
3041 else
3042 {
3043 fn = get_identifier ("bzero");
3044 args = build_function_type_list (void_type_node, ptr_type_node,
3045 unsigned_type_node, NULL_TREE);
9de08200 3046 }
4ca79136
RH
3047
3048 fn = build_decl (FUNCTION_DECL, fn, args);
3049 DECL_EXTERNAL (fn) = 1;
3050 TREE_PUBLIC (fn) = 1;
3051 DECL_ARTIFICIAL (fn) = 1;
3052 TREE_NOTHROW (fn) = 1;
3053
3054 block_clear_fn = fn;
bbf6f052 3055 }
e9a25f70 3056
4ca79136
RH
3057 if (for_call && !emitted_extern)
3058 {
3059 emitted_extern = true;
3060 make_decl_rtl (fn, NULL);
3061 assemble_external (fn);
3062 }
bbf6f052 3063
4ca79136
RH
3064 return fn;
3065}
3066\f
bbf6f052
RK
3067/* Generate code to copy Y into X.
3068 Both Y and X must have the same mode, except that
3069 Y can be a constant with VOIDmode.
3070 This mode cannot be BLKmode; use emit_block_move for that.
3071
3072 Return the last instruction emitted. */
3073
3074rtx
3075emit_move_insn (x, y)
3076 rtx x, y;
3077{
3078 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
3079 rtx y_cst = NULL_RTX;
3080 rtx last_insn;
bbf6f052
RK
3081
3082 x = protect_from_queue (x, 1);
3083 y = protect_from_queue (y, 0);
3084
3085 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3086 abort ();
3087
ee5332b8
RH
3088 /* Never force constant_p_rtx to memory. */
3089 if (GET_CODE (y) == CONSTANT_P_RTX)
3090 ;
51286de6 3091 else if (CONSTANT_P (y))
de1b33dd 3092 {
51286de6
RH
3093 if (optimize
3094 && FLOAT_MODE_P (GET_MODE (x))
3095 && (last_insn = compress_float_constant (x, y)))
3096 return last_insn;
3097
3098 if (!LEGITIMATE_CONSTANT_P (y))
3099 {
3100 y_cst = y;
3101 y = force_const_mem (mode, y);
3102 }
de1b33dd 3103 }
bbf6f052
RK
3104
3105 /* If X or Y are memory references, verify that their addresses are valid
3106 for the machine. */
3107 if (GET_CODE (x) == MEM
3108 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3109 && ! push_operand (x, GET_MODE (x)))
3110 || (flag_force_addr
3111 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 3112 x = validize_mem (x);
bbf6f052
RK
3113
3114 if (GET_CODE (y) == MEM
3115 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3116 || (flag_force_addr
3117 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 3118 y = validize_mem (y);
bbf6f052
RK
3119
3120 if (mode == BLKmode)
3121 abort ();
3122
de1b33dd
AO
3123 last_insn = emit_move_insn_1 (x, y);
3124
3125 if (y_cst && GET_CODE (x) == REG)
3d238248 3126 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
3127
3128 return last_insn;
261c4230
RS
3129}
3130
3131/* Low level part of emit_move_insn.
3132 Called just like emit_move_insn, but assumes X and Y
3133 are basically valid. */
3134
3135rtx
3136emit_move_insn_1 (x, y)
3137 rtx x, y;
3138{
3139 enum machine_mode mode = GET_MODE (x);
3140 enum machine_mode submode;
3141 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 3142
dbbbbf3b 3143 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 3144 abort ();
76bbe028 3145
bbf6f052
RK
3146 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3147 return
3148 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3149
89742723 3150 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 3151 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
3152 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
3153 * BITS_PER_UNIT),
3154 (class == MODE_COMPLEX_INT
3155 ? MODE_INT : MODE_FLOAT),
3156 0))
7308a047
RS
3157 && (mov_optab->handlers[(int) submode].insn_code
3158 != CODE_FOR_nothing))
3159 {
3160 /* Don't split destination if it is a stack push. */
3161 int stack = push_operand (x, GET_MODE (x));
7308a047 3162
79ce92d7 3163#ifdef PUSH_ROUNDING
1a06f5fe
JH
3164 /* In case we output to the stack, but the size is smaller machine can
3165 push exactly, we need to use move instructions. */
3166 if (stack
bb93b973
RK
3167 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3168 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
3169 {
3170 rtx temp;
bb93b973 3171 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
3172
3173 /* Do not use anti_adjust_stack, since we don't want to update
3174 stack_pointer_delta. */
3175 temp = expand_binop (Pmode,
3176#ifdef STACK_GROWS_DOWNWARD
3177 sub_optab,
3178#else
3179 add_optab,
3180#endif
3181 stack_pointer_rtx,
3182 GEN_INT
bb93b973
RK
3183 (PUSH_ROUNDING
3184 (GET_MODE_SIZE (GET_MODE (x)))),
3185 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3186
1a06f5fe
JH
3187 if (temp != stack_pointer_rtx)
3188 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 3189
1a06f5fe
JH
3190#ifdef STACK_GROWS_DOWNWARD
3191 offset1 = 0;
3192 offset2 = GET_MODE_SIZE (submode);
3193#else
3194 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3195 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3196 + GET_MODE_SIZE (submode));
3197#endif
bb93b973 3198
1a06f5fe
JH
3199 emit_move_insn (change_address (x, submode,
3200 gen_rtx_PLUS (Pmode,
3201 stack_pointer_rtx,
3202 GEN_INT (offset1))),
3203 gen_realpart (submode, y));
3204 emit_move_insn (change_address (x, submode,
3205 gen_rtx_PLUS (Pmode,
3206 stack_pointer_rtx,
3207 GEN_INT (offset2))),
3208 gen_imagpart (submode, y));
3209 }
e9c0bd54 3210 else
79ce92d7 3211#endif
7308a047
RS
3212 /* If this is a stack, push the highpart first, so it
3213 will be in the argument order.
3214
3215 In that case, change_address is used only to convert
3216 the mode, not to change the address. */
e9c0bd54 3217 if (stack)
c937357e 3218 {
e33c0d66
RS
3219 /* Note that the real part always precedes the imag part in memory
3220 regardless of machine's endianness. */
c937357e
RS
3221#ifdef STACK_GROWS_DOWNWARD
3222 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3223 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3224 gen_imagpart (submode, y)));
c937357e 3225 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3226 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3227 gen_realpart (submode, y)));
c937357e
RS
3228#else
3229 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3230 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3231 gen_realpart (submode, y)));
c937357e 3232 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3233 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3234 gen_imagpart (submode, y)));
c937357e
RS
3235#endif
3236 }
3237 else
3238 {
235ae7be
DM
3239 rtx realpart_x, realpart_y;
3240 rtx imagpart_x, imagpart_y;
3241
405f63da
MM
3242 /* If this is a complex value with each part being smaller than a
3243 word, the usual calling sequence will likely pack the pieces into
3244 a single register. Unfortunately, SUBREG of hard registers only
3245 deals in terms of words, so we have a problem converting input
3246 arguments to the CONCAT of two registers that is used elsewhere
3247 for complex values. If this is before reload, we can copy it into
3248 memory and reload. FIXME, we should see about using extract and
3249 insert on integer registers, but complex short and complex char
3250 variables should be rarely used. */
3a94c984 3251 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
3252 && (reload_in_progress | reload_completed) == 0)
3253 {
bb93b973
RK
3254 int packed_dest_p
3255 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3256 int packed_src_p
3257 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
3258
3259 if (packed_dest_p || packed_src_p)
3260 {
3261 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3262 ? MODE_FLOAT : MODE_INT);
3263
1da68f56
RK
3264 enum machine_mode reg_mode
3265 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
3266
3267 if (reg_mode != BLKmode)
3268 {
3269 rtx mem = assign_stack_temp (reg_mode,
3270 GET_MODE_SIZE (mode), 0);
f4ef873c 3271 rtx cmem = adjust_address (mem, mode, 0);
405f63da 3272
1da68f56
RK
3273 cfun->cannot_inline
3274 = N_("function using short complex types cannot be inline");
405f63da
MM
3275
3276 if (packed_dest_p)
3277 {
3278 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 3279
405f63da
MM
3280 emit_move_insn_1 (cmem, y);
3281 return emit_move_insn_1 (sreg, mem);
3282 }
3283 else
3284 {
3285 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3286
405f63da
MM
3287 emit_move_insn_1 (mem, sreg);
3288 return emit_move_insn_1 (x, cmem);
3289 }
3290 }
3291 }
3292 }
3293
235ae7be
DM
3294 realpart_x = gen_realpart (submode, x);
3295 realpart_y = gen_realpart (submode, y);
3296 imagpart_x = gen_imagpart (submode, x);
3297 imagpart_y = gen_imagpart (submode, y);
3298
3299 /* Show the output dies here. This is necessary for SUBREGs
3300 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3301 hard regs shouldn't appear here except as return values.
3302 We never want to emit such a clobber after reload. */
3303 if (x != y
235ae7be
DM
3304 && ! (reload_in_progress || reload_completed)
3305 && (GET_CODE (realpart_x) == SUBREG
3306 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3307 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3308
c937357e 3309 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3310 (realpart_x, realpart_y));
c937357e 3311 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3312 (imagpart_x, imagpart_y));
c937357e 3313 }
7308a047 3314
7a1ab50a 3315 return get_last_insn ();
7308a047
RS
3316 }
3317
cffa2189
R
3318 /* This will handle any multi-word or full-word mode that lacks a move_insn
3319 pattern. However, you will get better code if you define such patterns,
bbf6f052 3320 even if they must turn into multiple assembler instructions. */
cffa2189 3321 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3322 {
3323 rtx last_insn = 0;
3ef1eef4 3324 rtx seq, inner;
235ae7be 3325 int need_clobber;
bb93b973 3326 int i;
3a94c984 3327
a98c9f1a
RK
3328#ifdef PUSH_ROUNDING
3329
3330 /* If X is a push on the stack, do the push now and replace
3331 X with a reference to the stack pointer. */
3332 if (push_operand (x, GET_MODE (x)))
3333 {
918a6124
GK
3334 rtx temp;
3335 enum rtx_code code;
0fb7aeda 3336
918a6124
GK
3337 /* Do not use anti_adjust_stack, since we don't want to update
3338 stack_pointer_delta. */
3339 temp = expand_binop (Pmode,
3340#ifdef STACK_GROWS_DOWNWARD
3341 sub_optab,
3342#else
3343 add_optab,
3344#endif
3345 stack_pointer_rtx,
3346 GEN_INT
bb93b973
RK
3347 (PUSH_ROUNDING
3348 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3349 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3350
0fb7aeda
KH
3351 if (temp != stack_pointer_rtx)
3352 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3353
3354 code = GET_CODE (XEXP (x, 0));
bb93b973 3355
918a6124
GK
3356 /* Just hope that small offsets off SP are OK. */
3357 if (code == POST_INC)
0fb7aeda 3358 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3359 GEN_INT (-((HOST_WIDE_INT)
3360 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3361 else if (code == POST_DEC)
0fb7aeda 3362 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3363 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3364 else
3365 temp = stack_pointer_rtx;
3366
3367 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3368 }
3369#endif
3a94c984 3370
3ef1eef4
RK
3371 /* If we are in reload, see if either operand is a MEM whose address
3372 is scheduled for replacement. */
3373 if (reload_in_progress && GET_CODE (x) == MEM
3374 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3375 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3376 if (reload_in_progress && GET_CODE (y) == MEM
3377 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3378 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3379
235ae7be 3380 start_sequence ();
15a7a8ec 3381
235ae7be 3382 need_clobber = 0;
bbf6f052 3383 for (i = 0;
3a94c984 3384 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3385 i++)
3386 {
3387 rtx xpart = operand_subword (x, i, 1, mode);
3388 rtx ypart = operand_subword (y, i, 1, mode);
3389
3390 /* If we can't get a part of Y, put Y into memory if it is a
3391 constant. Otherwise, force it into a register. If we still
3392 can't get a part of Y, abort. */
3393 if (ypart == 0 && CONSTANT_P (y))
3394 {
3395 y = force_const_mem (mode, y);
3396 ypart = operand_subword (y, i, 1, mode);
3397 }
3398 else if (ypart == 0)
3399 ypart = operand_subword_force (y, i, mode);
3400
3401 if (xpart == 0 || ypart == 0)
3402 abort ();
3403
235ae7be
DM
3404 need_clobber |= (GET_CODE (xpart) == SUBREG);
3405
bbf6f052
RK
3406 last_insn = emit_move_insn (xpart, ypart);
3407 }
6551fa4d 3408
2f937369 3409 seq = get_insns ();
235ae7be
DM
3410 end_sequence ();
3411
3412 /* Show the output dies here. This is necessary for SUBREGs
3413 of pseudos since we cannot track their lifetimes correctly;
3414 hard regs shouldn't appear here except as return values.
3415 We never want to emit such a clobber after reload. */
3416 if (x != y
3417 && ! (reload_in_progress || reload_completed)
3418 && need_clobber != 0)
bb93b973 3419 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3420
3421 emit_insn (seq);
3422
bbf6f052
RK
3423 return last_insn;
3424 }
3425 else
3426 abort ();
3427}
51286de6
RH
3428
3429/* If Y is representable exactly in a narrower mode, and the target can
3430 perform the extension directly from constant or memory, then emit the
3431 move as an extension. */
3432
3433static rtx
3434compress_float_constant (x, y)
3435 rtx x, y;
3436{
3437 enum machine_mode dstmode = GET_MODE (x);
3438 enum machine_mode orig_srcmode = GET_MODE (y);
3439 enum machine_mode srcmode;
3440 REAL_VALUE_TYPE r;
3441
3442 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3443
3444 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3445 srcmode != orig_srcmode;
3446 srcmode = GET_MODE_WIDER_MODE (srcmode))
3447 {
3448 enum insn_code ic;
3449 rtx trunc_y, last_insn;
3450
3451 /* Skip if the target can't extend this way. */
3452 ic = can_extend_p (dstmode, srcmode, 0);
3453 if (ic == CODE_FOR_nothing)
3454 continue;
3455
3456 /* Skip if the narrowed value isn't exact. */
3457 if (! exact_real_truncate (srcmode, &r))
3458 continue;
3459
3460 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3461
3462 if (LEGITIMATE_CONSTANT_P (trunc_y))
3463 {
3464 /* Skip if the target needs extra instructions to perform
3465 the extension. */
3466 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3467 continue;
3468 }
3469 else if (float_extend_from_mem[dstmode][srcmode])
3470 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3471 else
3472 continue;
3473
3474 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3475 last_insn = get_last_insn ();
3476
3477 if (GET_CODE (x) == REG)
3478 REG_NOTES (last_insn)
3479 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3480
3481 return last_insn;
3482 }
3483
3484 return NULL_RTX;
3485}
bbf6f052
RK
3486\f
3487/* Pushing data onto the stack. */
3488
3489/* Push a block of length SIZE (perhaps variable)
3490 and return an rtx to address the beginning of the block.
3491 Note that it is not possible for the value returned to be a QUEUED.
3492 The value may be virtual_outgoing_args_rtx.
3493
3494 EXTRA is the number of bytes of padding to push in addition to SIZE.
3495 BELOW nonzero means this padding comes at low addresses;
3496 otherwise, the padding comes at high addresses. */
3497
3498rtx
3499push_block (size, extra, below)
3500 rtx size;
3501 int extra, below;
3502{
b3694847 3503 rtx temp;
88f63c77
RK
3504
3505 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3506 if (CONSTANT_P (size))
3507 anti_adjust_stack (plus_constant (size, extra));
3508 else if (GET_CODE (size) == REG && extra == 0)
3509 anti_adjust_stack (size);
3510 else
3511 {
ce48579b 3512 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3513 if (extra != 0)
906c4e36 3514 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3515 temp, 0, OPTAB_LIB_WIDEN);
3516 anti_adjust_stack (temp);
3517 }
3518
f73ad30e 3519#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3520 if (0)
f73ad30e
JH
3521#else
3522 if (1)
bbf6f052 3523#endif
f73ad30e 3524 {
f73ad30e
JH
3525 temp = virtual_outgoing_args_rtx;
3526 if (extra != 0 && below)
3527 temp = plus_constant (temp, extra);
3528 }
3529 else
3530 {
3531 if (GET_CODE (size) == CONST_INT)
3532 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3533 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3534 else if (extra != 0 && !below)
3535 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3536 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3537 else
3538 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3539 negate_rtx (Pmode, size));
3540 }
bbf6f052
RK
3541
3542 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3543}
3544
21d93687
RK
3545#ifdef PUSH_ROUNDING
3546
566aa174 3547/* Emit single push insn. */
21d93687 3548
566aa174
JH
3549static void
3550emit_single_push_insn (mode, x, type)
3551 rtx x;
3552 enum machine_mode mode;
3553 tree type;
3554{
566aa174 3555 rtx dest_addr;
918a6124 3556 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3557 rtx dest;
371b8fc0
JH
3558 enum insn_code icode;
3559 insn_operand_predicate_fn pred;
566aa174 3560
371b8fc0
JH
3561 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3562 /* If there is push pattern, use it. Otherwise try old way of throwing
3563 MEM representing push operation to move expander. */
3564 icode = push_optab->handlers[(int) mode].insn_code;
3565 if (icode != CODE_FOR_nothing)
3566 {
3567 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3568 && !((*pred) (x, mode))))
371b8fc0
JH
3569 x = force_reg (mode, x);
3570 emit_insn (GEN_FCN (icode) (x));
3571 return;
3572 }
566aa174
JH
3573 if (GET_MODE_SIZE (mode) == rounded_size)
3574 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3575 else
3576 {
3577#ifdef STACK_GROWS_DOWNWARD
3578 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3579 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174
JH
3580#else
3581 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3582 GEN_INT (rounded_size));
3583#endif
3584 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3585 }
3586
3587 dest = gen_rtx_MEM (mode, dest_addr);
3588
566aa174
JH
3589 if (type != 0)
3590 {
3591 set_mem_attributes (dest, type, 1);
c3d32120
RK
3592
3593 if (flag_optimize_sibling_calls)
3594 /* Function incoming arguments may overlap with sibling call
3595 outgoing arguments and we cannot allow reordering of reads
3596 from function arguments with stores to outgoing arguments
3597 of sibling calls. */
3598 set_mem_alias_set (dest, 0);
566aa174
JH
3599 }
3600 emit_move_insn (dest, x);
566aa174 3601}
21d93687 3602#endif
566aa174 3603
bbf6f052
RK
3604/* Generate code to push X onto the stack, assuming it has mode MODE and
3605 type TYPE.
3606 MODE is redundant except when X is a CONST_INT (since they don't
3607 carry mode info).
3608 SIZE is an rtx for the size of data to be copied (in bytes),
3609 needed only if X is BLKmode.
3610
f1eaaf73 3611 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3612
cd048831
RK
3613 If PARTIAL and REG are both nonzero, then copy that many of the first
3614 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3615 The amount of space pushed is decreased by PARTIAL words,
3616 rounded *down* to a multiple of PARM_BOUNDARY.
3617 REG must be a hard register in this case.
cd048831
RK
3618 If REG is zero but PARTIAL is not, take any all others actions for an
3619 argument partially in registers, but do not actually load any
3620 registers.
bbf6f052
RK
3621
3622 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3623 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3624
3625 On a machine that lacks real push insns, ARGS_ADDR is the address of
3626 the bottom of the argument block for this call. We use indexing off there
3627 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3628 argument block has not been preallocated.
3629
e5e809f4
JL
3630 ARGS_SO_FAR is the size of args previously pushed for this call.
3631
3632 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3633 for arguments passed in registers. If nonzero, it will be the number
3634 of bytes required. */
bbf6f052
RK
3635
3636void
3637emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd 3638 args_addr, args_so_far, reg_parm_stack_space,
0fb7aeda 3639 alignment_pad)
b3694847 3640 rtx x;
bbf6f052
RK
3641 enum machine_mode mode;
3642 tree type;
3643 rtx size;
729a2125 3644 unsigned int align;
bbf6f052
RK
3645 int partial;
3646 rtx reg;
3647 int extra;
3648 rtx args_addr;
3649 rtx args_so_far;
e5e809f4 3650 int reg_parm_stack_space;
4fc026cd 3651 rtx alignment_pad;
bbf6f052
RK
3652{
3653 rtx xinner;
3654 enum direction stack_direction
3655#ifdef STACK_GROWS_DOWNWARD
3656 = downward;
3657#else
3658 = upward;
3659#endif
3660
3661 /* Decide where to pad the argument: `downward' for below,
3662 `upward' for above, or `none' for don't pad it.
3663 Default is below for small data on big-endian machines; else above. */
3664 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3665
0fb7aeda 3666 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3667 FIXME: why? */
3668 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3669 if (where_pad != none)
3670 where_pad = (where_pad == downward ? upward : downward);
3671
3672 xinner = x = protect_from_queue (x, 0);
3673
3674 if (mode == BLKmode)
3675 {
3676 /* Copy a block into the stack, entirely or partially. */
3677
b3694847 3678 rtx temp;
bbf6f052
RK
3679 int used = partial * UNITS_PER_WORD;
3680 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3681 int skip;
3a94c984 3682
bbf6f052
RK
3683 if (size == 0)
3684 abort ();
3685
3686 used -= offset;
3687
3688 /* USED is now the # of bytes we need not copy to the stack
3689 because registers will take care of them. */
3690
3691 if (partial != 0)
f4ef873c 3692 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3693
3694 /* If the partial register-part of the arg counts in its stack size,
3695 skip the part of stack space corresponding to the registers.
3696 Otherwise, start copying to the beginning of the stack space,
3697 by setting SKIP to 0. */
e5e809f4 3698 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3699
3700#ifdef PUSH_ROUNDING
3701 /* Do it with several push insns if that doesn't take lots of insns
3702 and if there is no difficulty with push insns that skip bytes
3703 on the stack for alignment purposes. */
3704 if (args_addr == 0
f73ad30e 3705 && PUSH_ARGS
bbf6f052
RK
3706 && GET_CODE (size) == CONST_INT
3707 && skip == 0
15914757 3708 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3709 /* Here we avoid the case of a structure whose weak alignment
3710 forces many pushes of a small amount of data,
3711 and such small pushes do rounding that causes trouble. */
e1565e65 3712 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3713 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3714 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3715 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3716 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3717 {
3718 /* Push padding now if padding above and stack grows down,
3719 or if padding below and stack grows up.
3720 But if space already allocated, this has already been done. */
3721 if (extra && args_addr == 0
3722 && where_pad != none && where_pad != stack_direction)
906c4e36 3723 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3724
566aa174 3725 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
bbf6f052
RK
3726 }
3727 else
3a94c984 3728#endif /* PUSH_ROUNDING */
bbf6f052 3729 {
7ab923cc
JJ
3730 rtx target;
3731
bbf6f052
RK
3732 /* Otherwise make space on the stack and copy the data
3733 to the address of that space. */
3734
3735 /* Deduct words put into registers from the size we must copy. */
3736 if (partial != 0)
3737 {
3738 if (GET_CODE (size) == CONST_INT)
906c4e36 3739 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3740 else
3741 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3742 GEN_INT (used), NULL_RTX, 0,
3743 OPTAB_LIB_WIDEN);
bbf6f052
RK
3744 }
3745
3746 /* Get the address of the stack space.
3747 In this case, we do not deal with EXTRA separately.
3748 A single stack adjust will do. */
3749 if (! args_addr)
3750 {
3751 temp = push_block (size, extra, where_pad == downward);
3752 extra = 0;
3753 }
3754 else if (GET_CODE (args_so_far) == CONST_INT)
3755 temp = memory_address (BLKmode,
3756 plus_constant (args_addr,
3757 skip + INTVAL (args_so_far)));
3758 else
3759 temp = memory_address (BLKmode,
38a448ca
RH
3760 plus_constant (gen_rtx_PLUS (Pmode,
3761 args_addr,
3762 args_so_far),
bbf6f052 3763 skip));
4ca79136
RH
3764
3765 if (!ACCUMULATE_OUTGOING_ARGS)
3766 {
3767 /* If the source is referenced relative to the stack pointer,
3768 copy it to another register to stabilize it. We do not need
3769 to do this if we know that we won't be changing sp. */
3770
3771 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3772 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3773 temp = copy_to_reg (temp);
3774 }
3775
3a94c984 3776 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3777
3a94c984
KH
3778 if (type != 0)
3779 {
3780 set_mem_attributes (target, type, 1);
3781 /* Function incoming arguments may overlap with sibling call
3782 outgoing arguments and we cannot allow reordering of reads
3783 from function arguments with stores to outgoing arguments
3784 of sibling calls. */
ba4828e0 3785 set_mem_alias_set (target, 0);
3a94c984 3786 }
4ca79136 3787
44bb111a
RH
3788 /* ALIGN may well be better aligned than TYPE, e.g. due to
3789 PARM_BOUNDARY. Assume the caller isn't lying. */
3790 set_mem_align (target, align);
4ca79136 3791
44bb111a 3792 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3793 }
3794 }
3795 else if (partial > 0)
3796 {
3797 /* Scalar partly in registers. */
3798
3799 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3800 int i;
3801 int not_stack;
3802 /* # words of start of argument
3803 that we must make space for but need not store. */
3804 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3805 int args_offset = INTVAL (args_so_far);
3806 int skip;
3807
3808 /* Push padding now if padding above and stack grows down,
3809 or if padding below and stack grows up.
3810 But if space already allocated, this has already been done. */
3811 if (extra && args_addr == 0
3812 && where_pad != none && where_pad != stack_direction)
906c4e36 3813 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3814
3815 /* If we make space by pushing it, we might as well push
3816 the real data. Otherwise, we can leave OFFSET nonzero
3817 and leave the space uninitialized. */
3818 if (args_addr == 0)
3819 offset = 0;
3820
3821 /* Now NOT_STACK gets the number of words that we don't need to
3822 allocate on the stack. */
3823 not_stack = partial - offset;
3824
3825 /* If the partial register-part of the arg counts in its stack size,
3826 skip the part of stack space corresponding to the registers.
3827 Otherwise, start copying to the beginning of the stack space,
3828 by setting SKIP to 0. */
e5e809f4 3829 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3830
3831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3832 x = validize_mem (force_const_mem (mode, x));
3833
3834 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3835 SUBREGs of such registers are not allowed. */
3836 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3837 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3838 x = copy_to_reg (x);
3839
3840 /* Loop over all the words allocated on the stack for this arg. */
3841 /* We can do it by words, because any scalar bigger than a word
3842 has a size a multiple of a word. */
3843#ifndef PUSH_ARGS_REVERSED
3844 for (i = not_stack; i < size; i++)
3845#else
3846 for (i = size - 1; i >= not_stack; i--)
3847#endif
3848 if (i >= not_stack + offset)
3849 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3850 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3851 0, args_addr,
3852 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3853 * UNITS_PER_WORD)),
4fc026cd 3854 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3855 }
3856 else
3857 {
3858 rtx addr;
921b3427 3859 rtx target = NULL_RTX;
3bdf5ad1 3860 rtx dest;
bbf6f052
RK
3861
3862 /* Push padding now if padding above and stack grows down,
3863 or if padding below and stack grows up.
3864 But if space already allocated, this has already been done. */
3865 if (extra && args_addr == 0
3866 && where_pad != none && where_pad != stack_direction)
906c4e36 3867 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3868
3869#ifdef PUSH_ROUNDING
f73ad30e 3870 if (args_addr == 0 && PUSH_ARGS)
566aa174 3871 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3872 else
3873#endif
921b3427
RK
3874 {
3875 if (GET_CODE (args_so_far) == CONST_INT)
3876 addr
3877 = memory_address (mode,
3a94c984 3878 plus_constant (args_addr,
921b3427 3879 INTVAL (args_so_far)));
3a94c984 3880 else
38a448ca
RH
3881 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3882 args_so_far));
921b3427 3883 target = addr;
566aa174
JH
3884 dest = gen_rtx_MEM (mode, addr);
3885 if (type != 0)
3886 {
3887 set_mem_attributes (dest, type, 1);
3888 /* Function incoming arguments may overlap with sibling call
3889 outgoing arguments and we cannot allow reordering of reads
3890 from function arguments with stores to outgoing arguments
3891 of sibling calls. */
ba4828e0 3892 set_mem_alias_set (dest, 0);
566aa174 3893 }
bbf6f052 3894
566aa174 3895 emit_move_insn (dest, x);
566aa174 3896 }
bbf6f052
RK
3897 }
3898
bbf6f052
RK
3899 /* If part should go in registers, copy that part
3900 into the appropriate registers. Do this now, at the end,
3901 since mem-to-mem copies above may do function calls. */
cd048831 3902 if (partial > 0 && reg != 0)
fffa9c1d
JW
3903 {
3904 /* Handle calls that pass values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (reg) == PARALLEL)
04050c69 3907 emit_group_load (reg, x, -1); /* ??? size? */
fffa9c1d
JW
3908 else
3909 move_block_to_reg (REGNO (reg), x, partial, mode);
3910 }
bbf6f052
RK
3911
3912 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3913 anti_adjust_stack (GEN_INT (extra));
3a94c984 3914
3ea2292a 3915 if (alignment_pad && args_addr == 0)
4fc026cd 3916 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3917}
3918\f
296b4ed9
RK
3919/* Return X if X can be used as a subtarget in a sequence of arithmetic
3920 operations. */
3921
3922static rtx
3923get_subtarget (x)
3924 rtx x;
3925{
3926 return ((x == 0
3927 /* Only registers can be subtargets. */
3928 || GET_CODE (x) != REG
3929 /* If the register is readonly, it can't be set more than once. */
3930 || RTX_UNCHANGING_P (x)
3931 /* Don't use hard regs to avoid extending their life. */
3932 || REGNO (x) < FIRST_PSEUDO_REGISTER
3933 /* Avoid subtargets inside loops,
3934 since they hide some invariant expressions. */
3935 || preserve_subexpressions_p ())
3936 ? 0 : x);
3937}
3938
bbf6f052
RK
3939/* Expand an assignment that stores the value of FROM into TO.
3940 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3941 (This may contain a QUEUED rtx;
3942 if the value is constant, this rtx is a constant.)
3943 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3944
3945 SUGGEST_REG is no longer actually used.
3946 It used to mean, copy the value through a register
3947 and return that register, if that is possible.
709f5be1 3948 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3949
3950rtx
3951expand_assignment (to, from, want_value, suggest_reg)
3952 tree to, from;
3953 int want_value;
c5c76735 3954 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052 3955{
b3694847 3956 rtx to_rtx = 0;
bbf6f052
RK
3957 rtx result;
3958
3959 /* Don't crash if the lhs of the assignment was erroneous. */
3960
3961 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3962 {
3963 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3964 return want_value ? result : NULL_RTX;
3965 }
bbf6f052
RK
3966
3967 /* Assignment of a structure component needs special treatment
3968 if the structure component's rtx is not simply a MEM.
6be58303
JW
3969 Assignment of an array element at a constant index, and assignment of
3970 an array element in an unaligned packed structure field, has the same
3971 problem. */
bbf6f052 3972
08293add 3973 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
b4e3fabb 3974 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
bbf6f052
RK
3975 {
3976 enum machine_mode mode1;
770ae6cc 3977 HOST_WIDE_INT bitsize, bitpos;
a06ef755 3978 rtx orig_to_rtx;
7bb0943f 3979 tree offset;
bbf6f052
RK
3980 int unsignedp;
3981 int volatilep = 0;
0088fcb1
RK
3982 tree tem;
3983
3984 push_temp_slots ();
839c4796 3985 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 3986 &unsignedp, &volatilep);
bbf6f052
RK
3987
3988 /* If we are going to use store_bit_field and extract_bit_field,
3989 make sure to_rtx will be safe for multiple use. */
3990
3991 if (mode1 == VOIDmode && want_value)
3992 tem = stabilize_reference (tem);
3993
1ed1b4fb
RK
3994 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3995
7bb0943f
RS
3996 if (offset != 0)
3997 {
e3c8ea67 3998 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
3999
4000 if (GET_CODE (to_rtx) != MEM)
4001 abort ();
bd070e1a 4002
bd070e1a 4003#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
4004 if (GET_MODE (offset_rtx) != Pmode)
4005 offset_rtx = convert_memory_address (Pmode, offset_rtx);
fa06ab5c
RK
4006#else
4007 if (GET_MODE (offset_rtx) != ptr_mode)
4008 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4009#endif
bd070e1a 4010
9a7b9f4f
JL
4011 /* A constant address in TO_RTX can have VOIDmode, we must not try
4012 to call force_reg for that case. Avoid that case. */
89752202
HB
4013 if (GET_CODE (to_rtx) == MEM
4014 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 4015 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 4016 && bitsize > 0
3a94c984 4017 && (bitpos % bitsize) == 0
89752202 4018 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 4019 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 4020 {
e3c8ea67 4021 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
4022 bitpos = 0;
4023 }
4024
0d4903b8 4025 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
4026 highest_pow2_factor_for_type (TREE_TYPE (to),
4027 offset));
7bb0943f 4028 }
c5c76735 4029
998d7deb
RH
4030 if (GET_CODE (to_rtx) == MEM)
4031 {
998d7deb
RH
4032 /* If the field is at offset zero, we could have been given the
4033 DECL_RTX of the parent struct. Don't munge it. */
4034 to_rtx = shallow_copy_rtx (to_rtx);
4035
6f1087be 4036 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 4037 }
effbcc6a 4038
a06ef755
RK
4039 /* Deal with volatile and readonly fields. The former is only done
4040 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4041 if (volatilep && GET_CODE (to_rtx) == MEM)
4042 {
4043 if (to_rtx == orig_to_rtx)
4044 to_rtx = copy_rtx (to_rtx);
4045 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
4046 }
4047
956d6950
JL
4048 if (TREE_CODE (to) == COMPONENT_REF
4049 && TREE_READONLY (TREE_OPERAND (to, 1)))
4050 {
a06ef755 4051 if (to_rtx == orig_to_rtx)
956d6950 4052 to_rtx = copy_rtx (to_rtx);
956d6950
JL
4053 RTX_UNCHANGING_P (to_rtx) = 1;
4054 }
4055
a84b4898 4056 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
4057 {
4058 if (to_rtx == orig_to_rtx)
4059 to_rtx = copy_rtx (to_rtx);
4060 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4061 }
4062
a06ef755
RK
4063 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4064 (want_value
4065 /* Spurious cast for HPUX compiler. */
4066 ? ((enum machine_mode)
4067 TYPE_MODE (TREE_TYPE (to)))
4068 : VOIDmode),
4069 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 4070
a06ef755
RK
4071 preserve_temp_slots (result);
4072 free_temp_slots ();
4073 pop_temp_slots ();
a69beca1 4074
a06ef755
RK
4075 /* If the value is meaningful, convert RESULT to the proper mode.
4076 Otherwise, return nothing. */
4077 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4078 TYPE_MODE (TREE_TYPE (from)),
4079 result,
4080 TREE_UNSIGNED (TREE_TYPE (to)))
4081 : NULL_RTX);
bbf6f052
RK
4082 }
4083
cd1db108
RS
4084 /* If the rhs is a function call and its value is not an aggregate,
4085 call the function before we start to compute the lhs.
4086 This is needed for correct code for cases such as
4087 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4088 requires loading up part of an address in a separate insn.
4089
1858863b
JW
4090 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4091 since it might be a promoted variable where the zero- or sign- extension
4092 needs to be done. Handling this in the normal way is safe because no
4093 computation is done before the call. */
1ad87b63 4094 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 4095 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
4096 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4097 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 4098 {
0088fcb1
RK
4099 rtx value;
4100
4101 push_temp_slots ();
4102 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 4103 if (to_rtx == 0)
37a08a29 4104 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4105
fffa9c1d
JW
4106 /* Handle calls that return values in multiple non-contiguous locations.
4107 The Irix 6 ABI has examples of this. */
4108 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4109 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4110 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4111 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4112 else
6419e5b0
DT
4113 {
4114#ifdef POINTERS_EXTEND_UNSIGNED
0d4903b8
RK
4115 if (POINTER_TYPE_P (TREE_TYPE (to))
4116 && GET_MODE (to_rtx) != GET_MODE (value))
6419e5b0
DT
4117 value = convert_memory_address (GET_MODE (to_rtx), value);
4118#endif
4119 emit_move_insn (to_rtx, value);
4120 }
cd1db108
RS
4121 preserve_temp_slots (to_rtx);
4122 free_temp_slots ();
0088fcb1 4123 pop_temp_slots ();
709f5be1 4124 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
4125 }
4126
bbf6f052
RK
4127 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4128 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4129
4130 if (to_rtx == 0)
37a08a29 4131 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4132
86d38d25 4133 /* Don't move directly into a return register. */
14a774a9
RK
4134 if (TREE_CODE (to) == RESULT_DECL
4135 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4136 {
0088fcb1
RK
4137 rtx temp;
4138
4139 push_temp_slots ();
4140 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4141
4142 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4143 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4144 else
4145 emit_move_insn (to_rtx, temp);
4146
86d38d25
RS
4147 preserve_temp_slots (to_rtx);
4148 free_temp_slots ();
0088fcb1 4149 pop_temp_slots ();
709f5be1 4150 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
4151 }
4152
bbf6f052
RK
4153 /* In case we are returning the contents of an object which overlaps
4154 the place the value is being stored, use a safe function when copying
4155 a value through a pointer into a structure value return block. */
4156 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4157 && current_function_returns_struct
4158 && !current_function_returns_pcc_struct)
4159 {
0088fcb1
RK
4160 rtx from_rtx, size;
4161
4162 push_temp_slots ();
33a20d10 4163 size = expr_size (from);
37a08a29 4164 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 4165
4ca79136
RH
4166 if (TARGET_MEM_FUNCTIONS)
4167 emit_library_call (memmove_libfunc, LCT_NORMAL,
4168 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4169 XEXP (from_rtx, 0), Pmode,
4170 convert_to_mode (TYPE_MODE (sizetype),
4171 size, TREE_UNSIGNED (sizetype)),
4172 TYPE_MODE (sizetype));
4173 else
4174 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4175 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4176 XEXP (to_rtx, 0), Pmode,
4177 convert_to_mode (TYPE_MODE (integer_type_node),
4178 size,
4179 TREE_UNSIGNED (integer_type_node)),
4180 TYPE_MODE (integer_type_node));
bbf6f052
RK
4181
4182 preserve_temp_slots (to_rtx);
4183 free_temp_slots ();
0088fcb1 4184 pop_temp_slots ();
709f5be1 4185 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
4186 }
4187
4188 /* Compute FROM and store the value in the rtx we got. */
4189
0088fcb1 4190 push_temp_slots ();
bbf6f052
RK
4191 result = store_expr (from, to_rtx, want_value);
4192 preserve_temp_slots (result);
4193 free_temp_slots ();
0088fcb1 4194 pop_temp_slots ();
709f5be1 4195 return want_value ? result : NULL_RTX;
bbf6f052
RK
4196}
4197
4198/* Generate code for computing expression EXP,
4199 and storing the value into TARGET.
bbf6f052
RK
4200 TARGET may contain a QUEUED rtx.
4201
709f5be1
RS
4202 If WANT_VALUE is nonzero, return a copy of the value
4203 not in TARGET, so that we can be sure to use the proper
4204 value in a containing expression even if TARGET has something
4205 else stored in it. If possible, we copy the value through a pseudo
4206 and return that pseudo. Or, if the value is constant, we try to
4207 return the constant. In some cases, we return a pseudo
4208 copied *from* TARGET.
4209
4210 If the mode is BLKmode then we may return TARGET itself.
4211 It turns out that in BLKmode it doesn't cause a problem.
4212 because C has no operators that could combine two different
4213 assignments into the same BLKmode object with different values
4214 with no sequence point. Will other languages need this to
4215 be more thorough?
4216
4217 If WANT_VALUE is 0, we return NULL, to make sure
4218 to catch quickly any cases where the caller uses the value
4219 and fails to set WANT_VALUE. */
bbf6f052
RK
4220
4221rtx
709f5be1 4222store_expr (exp, target, want_value)
b3694847
SS
4223 tree exp;
4224 rtx target;
709f5be1 4225 int want_value;
bbf6f052 4226{
b3694847 4227 rtx temp;
bbf6f052 4228 int dont_return_target = 0;
e5408e52 4229 int dont_store_target = 0;
bbf6f052
RK
4230
4231 if (TREE_CODE (exp) == COMPOUND_EXPR)
4232 {
4233 /* Perform first part of compound expression, then assign from second
4234 part. */
4235 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4236 emit_queue ();
709f5be1 4237 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4238 }
4239 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4240 {
4241 /* For conditional expression, get safe form of the target. Then
4242 test the condition, doing the appropriate assignment on either
4243 side. This avoids the creation of unnecessary temporaries.
4244 For non-BLKmode, it is more efficient not to do this. */
4245
4246 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4247
4248 emit_queue ();
4249 target = protect_from_queue (target, 1);
4250
dabf8373 4251 do_pending_stack_adjust ();
bbf6f052
RK
4252 NO_DEFER_POP;
4253 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4254 start_cleanup_deferral ();
709f5be1 4255 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 4256 end_cleanup_deferral ();
bbf6f052
RK
4257 emit_queue ();
4258 emit_jump_insn (gen_jump (lab2));
4259 emit_barrier ();
4260 emit_label (lab1);
956d6950 4261 start_cleanup_deferral ();
709f5be1 4262 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 4263 end_cleanup_deferral ();
bbf6f052
RK
4264 emit_queue ();
4265 emit_label (lab2);
4266 OK_DEFER_POP;
a3a58acc 4267
709f5be1 4268 return want_value ? target : NULL_RTX;
bbf6f052 4269 }
bbf6f052 4270 else if (queued_subexp_p (target))
709f5be1
RS
4271 /* If target contains a postincrement, let's not risk
4272 using it as the place to generate the rhs. */
bbf6f052
RK
4273 {
4274 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4275 {
4276 /* Expand EXP into a new pseudo. */
4277 temp = gen_reg_rtx (GET_MODE (target));
4278 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4279 }
4280 else
906c4e36 4281 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
4282
4283 /* If target is volatile, ANSI requires accessing the value
4284 *from* the target, if it is accessed. So make that happen.
4285 In no case return the target itself. */
4286 if (! MEM_VOLATILE_P (target) && want_value)
4287 dont_return_target = 1;
bbf6f052 4288 }
12f06d17
CH
4289 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4290 && GET_MODE (target) != BLKmode)
4291 /* If target is in memory and caller wants value in a register instead,
4292 arrange that. Pass TARGET as target for expand_expr so that,
4293 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4294 We know expand_expr will not use the target in that case.
4295 Don't do this if TARGET is volatile because we are supposed
4296 to write it and then read it. */
4297 {
1da93fe0 4298 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17 4299 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4300 {
4301 /* If TEMP is already in the desired TARGET, only copy it from
4302 memory and don't store it there again. */
4303 if (temp == target
4304 || (rtx_equal_p (temp, target)
4305 && ! side_effects_p (temp) && ! side_effects_p (target)))
4306 dont_store_target = 1;
4307 temp = copy_to_reg (temp);
4308 }
12f06d17
CH
4309 dont_return_target = 1;
4310 }
1499e0a8
RK
4311 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4312 /* If this is an scalar in a register that is stored in a wider mode
4313 than the declared mode, compute the result into its declared mode
4314 and then convert to the wider mode. Our value is the computed
4315 expression. */
4316 {
b76b08ef
RK
4317 rtx inner_target = 0;
4318
5a32d038 4319 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4320 which will often result in some optimizations. Do the conversion
4321 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4322 the extend. But don't do this if the type of EXP is a subtype
4323 of something else since then the conversion might involve
4324 more than just converting modes. */
4325 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4326 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4327 {
4328 if (TREE_UNSIGNED (TREE_TYPE (exp))
4329 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4330 exp = convert
4331 ((*lang_hooks.types.signed_or_unsigned_type)
4332 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4333
b0c48229
NB
4334 exp = convert ((*lang_hooks.types.type_for_mode)
4335 (GET_MODE (SUBREG_REG (target)),
4336 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4337 exp);
b76b08ef
RK
4338
4339 inner_target = SUBREG_REG (target);
f635a84d 4340 }
3a94c984 4341
b76b08ef 4342 temp = expand_expr (exp, inner_target, VOIDmode, 0);
b258707c 4343
766f36c7 4344 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
4345 the access now so it gets done only once. Likewise if
4346 it contains TARGET. */
4347 if (GET_CODE (temp) == MEM && want_value
4348 && (MEM_VOLATILE_P (temp)
4349 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
4350 temp = copy_to_reg (temp);
4351
b258707c
RS
4352 /* If TEMP is a VOIDmode constant, use convert_modes to make
4353 sure that we properly convert it. */
4354 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4355 {
4356 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4357 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4358 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4359 GET_MODE (target), temp,
4360 SUBREG_PROMOTED_UNSIGNED_P (target));
4361 }
b258707c 4362
1499e0a8
RK
4363 convert_move (SUBREG_REG (target), temp,
4364 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4365
4366 /* If we promoted a constant, change the mode back down to match
4367 target. Otherwise, the caller might get confused by a result whose
4368 mode is larger than expected. */
4369
b3ca30df 4370 if (want_value && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4371 {
b3ca30df
JJ
4372 if (GET_MODE (temp) != VOIDmode)
4373 {
4374 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4375 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4376 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4377 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4378 }
4379 else
4380 temp = convert_modes (GET_MODE (target),
4381 GET_MODE (SUBREG_REG (target)),
4382 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4383 }
4384
709f5be1 4385 return want_value ? temp : NULL_RTX;
1499e0a8 4386 }
bbf6f052
RK
4387 else
4388 {
4389 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 4390 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4391 If TARGET is a volatile mem ref, either return TARGET
4392 or return a reg copied *from* TARGET; ANSI requires this.
4393
4394 Otherwise, if TEMP is not TARGET, return TEMP
4395 if it is constant (for efficiency),
4396 or if we really want the correct value. */
bbf6f052
RK
4397 if (!(target && GET_CODE (target) == REG
4398 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4399 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4400 && ! rtx_equal_p (temp, target)
709f5be1 4401 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
4402 dont_return_target = 1;
4403 }
4404
b258707c
RS
4405 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4406 the same as that of TARGET, adjust the constant. This is needed, for
4407 example, in case it is a CONST_DOUBLE and we want only a word-sized
4408 value. */
4409 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4410 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4411 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4412 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4413 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4414
bbf6f052 4415 /* If value was not generated in the target, store it there.
37a08a29
RK
4416 Convert the value to TARGET's type first if necessary.
4417 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4418 one or both of them are volatile memory refs, we have to distinguish
4419 two cases:
4420 - expand_expr has used TARGET. In this case, we must not generate
4421 another copy. This can be detected by TARGET being equal according
4422 to == .
4423 - expand_expr has not used TARGET - that means that the source just
4424 happens to have the same RTX form. Since temp will have been created
4425 by expand_expr, it will compare unequal according to == .
4426 We must generate a copy in this case, to reach the correct number
4427 of volatile memory references. */
bbf6f052 4428
6036acbb 4429 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4430 || (temp != target && (side_effects_p (temp)
4431 || side_effects_p (target))))
e5408e52 4432 && TREE_CODE (exp) != ERROR_MARK
a9772b60
JJ
4433 && ! dont_store_target
4434 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4435 but TARGET is not valid memory reference, TEMP will differ
4436 from TARGET although it is really the same location. */
4437 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
e56fc090
HPN
4438 || target != DECL_RTL_IF_SET (exp))
4439 /* If there's nothing to copy, don't bother. Don't call expr_size
4440 unless necessary, because some front-ends (C++) expr_size-hook
4441 aborts on objects that are not supposed to be bit-copied or
4442 bit-initialized. */
4443 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4444 {
4445 target = protect_from_queue (target, 1);
4446 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4447 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4448 {
4449 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4450 if (dont_return_target)
4451 {
4452 /* In this case, we will return TEMP,
4453 so make sure it has the proper mode.
4454 But don't forget to store the value into TARGET. */
4455 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4456 emit_move_insn (target, temp);
4457 }
4458 else
4459 convert_move (target, temp, unsignedp);
4460 }
4461
4462 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4463 {
c24ae149
RK
4464 /* Handle copying a string constant into an array. The string
4465 constant may be shorter than the array. So copy just the string's
4466 actual length, and clear the rest. First get the size of the data
4467 type of the string, which is actually the size of the target. */
4468 rtx size = expr_size (exp);
bbf6f052 4469
e87b4f3f
RS
4470 if (GET_CODE (size) == CONST_INT
4471 && INTVAL (size) < TREE_STRING_LENGTH (exp))
44bb111a 4472 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
e87b4f3f 4473 else
bbf6f052 4474 {
e87b4f3f
RS
4475 /* Compute the size of the data to copy from the string. */
4476 tree copy_size
c03b7665 4477 = size_binop (MIN_EXPR,
b50d17a1 4478 make_tree (sizetype, size),
fed3cef0 4479 size_int (TREE_STRING_LENGTH (exp)));
906c4e36
RK
4480 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4481 VOIDmode, 0);
e87b4f3f
RS
4482 rtx label = 0;
4483
4484 /* Copy that much. */
c24ae149 4485 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
44bb111a 4486 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
e87b4f3f 4487
88f63c77
RK
4488 /* Figure out how much is left in TARGET that we have to clear.
4489 Do all calculations in ptr_mode. */
e87b4f3f
RS
4490 if (GET_CODE (copy_size_rtx) == CONST_INT)
4491 {
c24ae149
RK
4492 size = plus_constant (size, -INTVAL (copy_size_rtx));
4493 target = adjust_address (target, BLKmode,
4494 INTVAL (copy_size_rtx));
e87b4f3f
RS
4495 }
4496 else
4497 {
fa06ab5c 4498 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4499 copy_size_rtx, NULL_RTX, 0,
4500 OPTAB_LIB_WIDEN);
e87b4f3f 4501
c24ae149
RK
4502#ifdef POINTERS_EXTEND_UNSIGNED
4503 if (GET_MODE (copy_size_rtx) != Pmode)
4504 copy_size_rtx = convert_memory_address (Pmode,
4505 copy_size_rtx);
4506#endif
4507
4508 target = offset_address (target, copy_size_rtx,
4509 highest_pow2_factor (copy_size));
e87b4f3f 4510 label = gen_label_rtx ();
c5d5d461 4511 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4512 GET_MODE (size), 0, label);
e87b4f3f
RS
4513 }
4514
4515 if (size != const0_rtx)
37a08a29 4516 clear_storage (target, size);
22619c3f 4517
e87b4f3f
RS
4518 if (label)
4519 emit_label (label);
bbf6f052
RK
4520 }
4521 }
fffa9c1d
JW
4522 /* Handle calls that return values in multiple non-contiguous locations.
4523 The Irix 6 ABI has examples of this. */
4524 else if (GET_CODE (target) == PARALLEL)
04050c69 4525 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4526 else if (GET_MODE (temp) == BLKmode)
44bb111a 4527 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
bbf6f052
RK
4528 else
4529 emit_move_insn (target, temp);
4530 }
709f5be1 4531
766f36c7
RK
4532 /* If we don't want a value, return NULL_RTX. */
4533 if (! want_value)
4534 return NULL_RTX;
4535
4536 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4537 ??? The latter test doesn't seem to make sense. */
4538 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4539 return temp;
766f36c7
RK
4540
4541 /* Return TARGET itself if it is a hard register. */
4542 else if (want_value && GET_MODE (target) != BLKmode
4543 && ! (GET_CODE (target) == REG
4544 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4545 return copy_to_reg (target);
3a94c984 4546
766f36c7 4547 else
709f5be1 4548 return target;
bbf6f052
RK
4549}
4550\f
9de08200
RK
4551/* Return 1 if EXP just contains zeros. */
4552
4553static int
4554is_zeros_p (exp)
4555 tree exp;
4556{
4557 tree elt;
4558
4559 switch (TREE_CODE (exp))
4560 {
4561 case CONVERT_EXPR:
4562 case NOP_EXPR:
4563 case NON_LVALUE_EXPR:
ed239f5a 4564 case VIEW_CONVERT_EXPR:
9de08200
RK
4565 return is_zeros_p (TREE_OPERAND (exp, 0));
4566
4567 case INTEGER_CST:
05bccae2 4568 return integer_zerop (exp);
9de08200
RK
4569
4570 case COMPLEX_CST:
4571 return
4572 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4573
4574 case REAL_CST:
41c9120b 4575 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4576
69ef87e2
AH
4577 case VECTOR_CST:
4578 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4579 elt = TREE_CHAIN (elt))
4580 if (!is_zeros_p (TREE_VALUE (elt)))
4581 return 0;
4582
4583 return 1;
4584
9de08200 4585 case CONSTRUCTOR:
e1a43f73
PB
4586 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4587 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4588 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4589 if (! is_zeros_p (TREE_VALUE (elt)))
4590 return 0;
4591
4592 return 1;
3a94c984 4593
e9a25f70
JL
4594 default:
4595 return 0;
9de08200 4596 }
9de08200
RK
4597}
4598
4599/* Return 1 if EXP contains mostly (3/4) zeros. */
4600
4601static int
4602mostly_zeros_p (exp)
4603 tree exp;
4604{
9de08200
RK
4605 if (TREE_CODE (exp) == CONSTRUCTOR)
4606 {
e1a43f73
PB
4607 int elts = 0, zeros = 0;
4608 tree elt = CONSTRUCTOR_ELTS (exp);
4609 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4610 {
4611 /* If there are no ranges of true bits, it is all zero. */
4612 return elt == NULL_TREE;
4613 }
4614 for (; elt; elt = TREE_CHAIN (elt))
4615 {
4616 /* We do not handle the case where the index is a RANGE_EXPR,
4617 so the statistic will be somewhat inaccurate.
4618 We do make a more accurate count in store_constructor itself,
4619 so since this function is only used for nested array elements,
0f41302f 4620 this should be close enough. */
e1a43f73
PB
4621 if (mostly_zeros_p (TREE_VALUE (elt)))
4622 zeros++;
4623 elts++;
4624 }
9de08200
RK
4625
4626 return 4 * zeros >= 3 * elts;
4627 }
4628
4629 return is_zeros_p (exp);
4630}
4631\f
e1a43f73
PB
4632/* Helper function for store_constructor.
4633 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4634 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4635 CLEARED is as for store_constructor.
23cb1766 4636 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4637
4638 This provides a recursive shortcut back to store_constructor when it isn't
4639 necessary to go through store_field. This is so that we can pass through
4640 the cleared field to let store_constructor know that we may not have to
4641 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4642
4643static void
04050c69
RK
4644store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4645 alias_set)
e1a43f73 4646 rtx target;
770ae6cc
RK
4647 unsigned HOST_WIDE_INT bitsize;
4648 HOST_WIDE_INT bitpos;
e1a43f73
PB
4649 enum machine_mode mode;
4650 tree exp, type;
4651 int cleared;
23cb1766 4652 int alias_set;
e1a43f73
PB
4653{
4654 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4655 && bitpos % BITS_PER_UNIT == 0
cc2902df 4656 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4657 let store_field do the bitfield handling. This is unlikely to
4658 generate unnecessary clear instructions anyways. */
4659 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4660 {
61cb205c
RK
4661 if (GET_CODE (target) == MEM)
4662 target
4663 = adjust_address (target,
4664 GET_MODE (target) == BLKmode
4665 || 0 != (bitpos
4666 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4667 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4668
e0339ef7 4669
04050c69 4670 /* Update the alias set, if required. */
10b76d73
RK
4671 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4672 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4673 {
4674 target = copy_rtx (target);
4675 set_mem_alias_set (target, alias_set);
4676 }
e0339ef7 4677
04050c69 4678 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4679 }
4680 else
a06ef755
RK
4681 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4682 alias_set);
e1a43f73
PB
4683}
4684
bbf6f052 4685/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4686 TARGET is either a REG or a MEM; we know it cannot conflict, since
4687 safe_from_p has been called.
b7010412
RK
4688 CLEARED is true if TARGET is known to have been zero'd.
4689 SIZE is the number of bytes of TARGET we are allowed to modify: this
4690 may not be the same as the size of EXP if we are assigning to a field
4691 which has been packed to exclude padding bits. */
bbf6f052
RK
4692
4693static void
04050c69 4694store_constructor (exp, target, cleared, size)
bbf6f052
RK
4695 tree exp;
4696 rtx target;
e1a43f73 4697 int cleared;
13eb1f7f 4698 HOST_WIDE_INT size;
bbf6f052 4699{
4af3895e 4700 tree type = TREE_TYPE (exp);
a5efcd63 4701#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4702 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4703#endif
4af3895e 4704
e44842fe
RK
4705 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4706 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4707 {
b3694847 4708 tree elt;
bbf6f052 4709
04050c69 4710 /* We either clear the aggregate or indicate the value is dead. */
dd1db5ec
RK
4711 if ((TREE_CODE (type) == UNION_TYPE
4712 || TREE_CODE (type) == QUAL_UNION_TYPE)
04050c69
RK
4713 && ! cleared
4714 && ! CONSTRUCTOR_ELTS (exp))
4715 /* If the constructor is empty, clear the union. */
a59f8640 4716 {
04050c69
RK
4717 clear_storage (target, expr_size (exp));
4718 cleared = 1;
a59f8640 4719 }
4af3895e
JVA
4720
4721 /* If we are building a static constructor into a register,
4722 set the initial value as zero so we can fold the value into
67225c15
RK
4723 a constant. But if more than one register is involved,
4724 this probably loses. */
04050c69 4725 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4726 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4727 {
04050c69 4728 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4729 cleared = 1;
4730 }
4731
4732 /* If the constructor has fewer fields than the structure
4733 or if we are initializing the structure to mostly zeros,
0d97bf4c 4734 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4735 register whose mode size isn't equal to SIZE since clear_storage
4736 can't handle this case. */
04050c69 4737 else if (! cleared && size > 0
9376fcd6 4738 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4739 != fields_length (type))
fcf1b822
RK
4740 || mostly_zeros_p (exp))
4741 && (GET_CODE (target) != REG
04050c69
RK
4742 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4743 == size)))
9de08200 4744 {
04050c69 4745 clear_storage (target, GEN_INT (size));
9de08200
RK
4746 cleared = 1;
4747 }
04050c69
RK
4748
4749 if (! cleared)
38a448ca 4750 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4751
4752 /* Store each element of the constructor into
4753 the corresponding field of TARGET. */
4754
4755 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4756 {
b3694847 4757 tree field = TREE_PURPOSE (elt);
34c73909 4758 tree value = TREE_VALUE (elt);
b3694847 4759 enum machine_mode mode;
770ae6cc
RK
4760 HOST_WIDE_INT bitsize;
4761 HOST_WIDE_INT bitpos = 0;
bbf6f052 4762 int unsignedp;
770ae6cc 4763 tree offset;
b50d17a1 4764 rtx to_rtx = target;
bbf6f052 4765
f32fd778
RS
4766 /* Just ignore missing fields.
4767 We cleared the whole structure, above,
4768 if any fields are missing. */
4769 if (field == 0)
4770 continue;
4771
8b6000fc 4772 if (cleared && is_zeros_p (value))
e1a43f73 4773 continue;
9de08200 4774
770ae6cc
RK
4775 if (host_integerp (DECL_SIZE (field), 1))
4776 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4777 else
4778 bitsize = -1;
4779
bbf6f052
RK
4780 unsignedp = TREE_UNSIGNED (field);
4781 mode = DECL_MODE (field);
4782 if (DECL_BIT_FIELD (field))
4783 mode = VOIDmode;
4784
770ae6cc
RK
4785 offset = DECL_FIELD_OFFSET (field);
4786 if (host_integerp (offset, 0)
4787 && host_integerp (bit_position (field), 0))
4788 {
4789 bitpos = int_bit_position (field);
4790 offset = 0;
4791 }
b50d17a1 4792 else
770ae6cc 4793 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4794
b50d17a1
RK
4795 if (offset)
4796 {
4797 rtx offset_rtx;
4798
4799 if (contains_placeholder_p (offset))
7fa96708 4800 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4801 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4802
b50d17a1
RK
4803 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4804 if (GET_CODE (to_rtx) != MEM)
4805 abort ();
4806
bd070e1a 4807#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
4808 if (GET_MODE (offset_rtx) != Pmode)
4809 offset_rtx = convert_memory_address (Pmode, offset_rtx);
fa06ab5c
RK
4810#else
4811 if (GET_MODE (offset_rtx) != ptr_mode)
4812 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4813#endif
bd070e1a 4814
0d4903b8
RK
4815 to_rtx = offset_address (to_rtx, offset_rtx,
4816 highest_pow2_factor (offset));
b50d17a1 4817 }
c5c76735 4818
cf04eb80
RK
4819 if (TREE_READONLY (field))
4820 {
9151b3bf 4821 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4822 to_rtx = copy_rtx (to_rtx);
4823
cf04eb80
RK
4824 RTX_UNCHANGING_P (to_rtx) = 1;
4825 }
4826
34c73909
R
4827#ifdef WORD_REGISTER_OPERATIONS
4828 /* If this initializes a field that is smaller than a word, at the
4829 start of a word, try to widen it to a full word.
4830 This special case allows us to output C++ member function
4831 initializations in a form that the optimizers can understand. */
770ae6cc 4832 if (GET_CODE (target) == REG
34c73909
R
4833 && bitsize < BITS_PER_WORD
4834 && bitpos % BITS_PER_WORD == 0
4835 && GET_MODE_CLASS (mode) == MODE_INT
4836 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4837 && exp_size >= 0
4838 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4839 {
4840 tree type = TREE_TYPE (value);
04050c69 4841
34c73909
R
4842 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4843 {
b0c48229
NB
4844 type = (*lang_hooks.types.type_for_size)
4845 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
4846 value = convert (type, value);
4847 }
04050c69 4848
34c73909
R
4849 if (BYTES_BIG_ENDIAN)
4850 value
4851 = fold (build (LSHIFT_EXPR, type, value,
4852 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4853 bitsize = BITS_PER_WORD;
4854 mode = word_mode;
4855 }
4856#endif
10b76d73
RK
4857
4858 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4859 && DECL_NONADDRESSABLE_P (field))
4860 {
4861 to_rtx = copy_rtx (to_rtx);
4862 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4863 }
4864
c5c76735 4865 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 4866 value, type, cleared,
10b76d73 4867 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4868 }
4869 }
e6834654
SS
4870 else if (TREE_CODE (type) == ARRAY_TYPE
4871 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 4872 {
b3694847
SS
4873 tree elt;
4874 int i;
e1a43f73 4875 int need_to_clear;
4af3895e 4876 tree domain = TYPE_DOMAIN (type);
4af3895e 4877 tree elttype = TREE_TYPE (type);
e6834654 4878 int const_bounds_p;
ae0ed63a
JM
4879 HOST_WIDE_INT minelt = 0;
4880 HOST_WIDE_INT maxelt = 0;
85f3d674 4881
e6834654
SS
4882 /* Vectors are like arrays, but the domain is stored via an array
4883 type indirectly. */
4884 if (TREE_CODE (type) == VECTOR_TYPE)
4885 {
4886 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4887 the same field as TYPE_DOMAIN, we are not guaranteed that
4888 it always will. */
4889 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4890 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4891 }
4892
4893 const_bounds_p = (TYPE_MIN_VALUE (domain)
4894 && TYPE_MAX_VALUE (domain)
4895 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4896 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4897
85f3d674
RK
4898 /* If we have constant bounds for the range of the type, get them. */
4899 if (const_bounds_p)
4900 {
4901 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4902 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4903 }
bbf6f052 4904
e1a43f73 4905 /* If the constructor has fewer elements than the array,
38e01259 4906 clear the whole array first. Similarly if this is
e1a43f73
PB
4907 static constructor of a non-BLKmode object. */
4908 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4909 need_to_clear = 1;
4910 else
4911 {
4912 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4913 need_to_clear = ! const_bounds_p;
4914
e1a43f73
PB
4915 /* This loop is a more accurate version of the loop in
4916 mostly_zeros_p (it handles RANGE_EXPR in an index).
4917 It is also needed to check for missing elements. */
4918 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4919 elt != NULL_TREE && ! need_to_clear;
df0faff1 4920 elt = TREE_CHAIN (elt))
e1a43f73
PB
4921 {
4922 tree index = TREE_PURPOSE (elt);
4923 HOST_WIDE_INT this_node_count;
19caa751 4924
e1a43f73
PB
4925 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4926 {
4927 tree lo_index = TREE_OPERAND (index, 0);
4928 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4929
19caa751
RK
4930 if (! host_integerp (lo_index, 1)
4931 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4932 {
4933 need_to_clear = 1;
4934 break;
4935 }
19caa751
RK
4936
4937 this_node_count = (tree_low_cst (hi_index, 1)
4938 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4939 }
4940 else
4941 this_node_count = 1;
85f3d674 4942
e1a43f73
PB
4943 count += this_node_count;
4944 if (mostly_zeros_p (TREE_VALUE (elt)))
4945 zero_count += this_node_count;
4946 }
85f3d674 4947
8e958f70 4948 /* Clear the entire array first if there are any missing elements,
0f41302f 4949 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4950 if (! need_to_clear
4951 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4952 need_to_clear = 1;
4953 }
85f3d674 4954
9376fcd6 4955 if (need_to_clear && size > 0)
9de08200
RK
4956 {
4957 if (! cleared)
725e58b1
RK
4958 {
4959 if (REG_P (target))
4960 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4961 else
4962 clear_storage (target, GEN_INT (size));
4963 }
9de08200
RK
4964 cleared = 1;
4965 }
df4556a3 4966 else if (REG_P (target))
bbf6f052 4967 /* Inform later passes that the old value is dead. */
38a448ca 4968 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4969
4970 /* Store each element of the constructor into
4971 the corresponding element of TARGET, determined
4972 by counting the elements. */
4973 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4974 elt;
4975 elt = TREE_CHAIN (elt), i++)
4976 {
b3694847 4977 enum machine_mode mode;
19caa751
RK
4978 HOST_WIDE_INT bitsize;
4979 HOST_WIDE_INT bitpos;
bbf6f052 4980 int unsignedp;
e1a43f73 4981 tree value = TREE_VALUE (elt);
03dc44a6
RS
4982 tree index = TREE_PURPOSE (elt);
4983 rtx xtarget = target;
bbf6f052 4984
e1a43f73
PB
4985 if (cleared && is_zeros_p (value))
4986 continue;
9de08200 4987
bbf6f052 4988 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4989 mode = TYPE_MODE (elttype);
4990 if (mode == BLKmode)
19caa751
RK
4991 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4992 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4993 : -1);
14a774a9
RK
4994 else
4995 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4996
e1a43f73
PB
4997 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4998 {
4999 tree lo_index = TREE_OPERAND (index, 0);
5000 tree hi_index = TREE_OPERAND (index, 1);
0d4903b8 5001 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
e1a43f73 5002 struct nesting *loop;
05c0b405
PB
5003 HOST_WIDE_INT lo, hi, count;
5004 tree position;
e1a43f73 5005
0f41302f 5006 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
5007 if (const_bounds_p
5008 && host_integerp (lo_index, 0)
19caa751
RK
5009 && host_integerp (hi_index, 0)
5010 && (lo = tree_low_cst (lo_index, 0),
5011 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
5012 count = hi - lo + 1,
5013 (GET_CODE (target) != MEM
5014 || count <= 2
19caa751
RK
5015 || (host_integerp (TYPE_SIZE (elttype), 1)
5016 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5017 <= 40 * 8)))))
e1a43f73 5018 {
05c0b405
PB
5019 lo -= minelt; hi -= minelt;
5020 for (; lo <= hi; lo++)
e1a43f73 5021 {
19caa751 5022 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
5023
5024 if (GET_CODE (target) == MEM
5025 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5026 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5027 && TYPE_NONALIASED_COMPONENT (type))
5028 {
5029 target = copy_rtx (target);
5030 MEM_KEEP_ALIAS_SET_P (target) = 1;
5031 }
5032
23cb1766 5033 store_constructor_field
04050c69
RK
5034 (target, bitsize, bitpos, mode, value, type, cleared,
5035 get_alias_set (elttype));
e1a43f73
PB
5036 }
5037 }
5038 else
5039 {
5040 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5041 loop_top = gen_label_rtx ();
5042 loop_end = gen_label_rtx ();
5043
5044 unsignedp = TREE_UNSIGNED (domain);
5045
5046 index = build_decl (VAR_DECL, NULL_TREE, domain);
5047
19e7881c 5048 index_r
e1a43f73
PB
5049 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5050 &unsignedp, 0));
19e7881c 5051 SET_DECL_RTL (index, index_r);
e1a43f73
PB
5052 if (TREE_CODE (value) == SAVE_EXPR
5053 && SAVE_EXPR_RTL (value) == 0)
5054 {
0f41302f
MS
5055 /* Make sure value gets expanded once before the
5056 loop. */
e1a43f73
PB
5057 expand_expr (value, const0_rtx, VOIDmode, 0);
5058 emit_queue ();
5059 }
5060 store_expr (lo_index, index_r, 0);
5061 loop = expand_start_loop (0);
5062
0f41302f 5063 /* Assign value to element index. */
fed3cef0
RK
5064 position
5065 = convert (ssizetype,
5066 fold (build (MINUS_EXPR, TREE_TYPE (index),
5067 index, TYPE_MIN_VALUE (domain))));
5068 position = size_binop (MULT_EXPR, position,
5069 convert (ssizetype,
5070 TYPE_SIZE_UNIT (elttype)));
5071
e1a43f73 5072 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
5073 xtarget = offset_address (target, pos_rtx,
5074 highest_pow2_factor (position));
5075 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5076 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 5077 store_constructor (value, xtarget, cleared,
b7010412 5078 bitsize / BITS_PER_UNIT);
e1a43f73
PB
5079 else
5080 store_expr (value, xtarget, 0);
5081
5082 expand_exit_loop_if_false (loop,
5083 build (LT_EXPR, integer_type_node,
5084 index, hi_index));
5085
5086 expand_increment (build (PREINCREMENT_EXPR,
5087 TREE_TYPE (index),
7b8b9722 5088 index, integer_one_node), 0, 0);
e1a43f73
PB
5089 expand_end_loop ();
5090 emit_label (loop_end);
e1a43f73
PB
5091 }
5092 }
19caa751
RK
5093 else if ((index != 0 && ! host_integerp (index, 0))
5094 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 5095 {
03dc44a6
RS
5096 tree position;
5097
5b6c44ff 5098 if (index == 0)
fed3cef0 5099 index = ssize_int (1);
5b6c44ff 5100
e1a43f73 5101 if (minelt)
fed3cef0
RK
5102 index = convert (ssizetype,
5103 fold (build (MINUS_EXPR, index,
5104 TYPE_MIN_VALUE (domain))));
19caa751 5105
fed3cef0
RK
5106 position = size_binop (MULT_EXPR, index,
5107 convert (ssizetype,
5108 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
5109 xtarget = offset_address (target,
5110 expand_expr (position, 0, VOIDmode, 0),
5111 highest_pow2_factor (position));
5112 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5113 store_expr (value, xtarget, 0);
03dc44a6
RS
5114 }
5115 else
5116 {
5117 if (index != 0)
19caa751
RK
5118 bitpos = ((tree_low_cst (index, 0) - minelt)
5119 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 5120 else
19caa751
RK
5121 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5122
10b76d73 5123 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5124 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5125 && TYPE_NONALIASED_COMPONENT (type))
5126 {
5127 target = copy_rtx (target);
5128 MEM_KEEP_ALIAS_SET_P (target) = 1;
5129 }
5130
c5c76735 5131 store_constructor_field (target, bitsize, bitpos, mode, value,
04050c69 5132 type, cleared, get_alias_set (elttype));
23cb1766 5133
03dc44a6 5134 }
bbf6f052
RK
5135 }
5136 }
19caa751 5137
3a94c984 5138 /* Set constructor assignments. */
071a6595
PB
5139 else if (TREE_CODE (type) == SET_TYPE)
5140 {
e1a43f73 5141 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5142 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5143 tree domain = TYPE_DOMAIN (type);
5144 tree domain_min, domain_max, bitlength;
5145
9faa82d8 5146 /* The default implementation strategy is to extract the constant
071a6595
PB
5147 parts of the constructor, use that to initialize the target,
5148 and then "or" in whatever non-constant ranges we need in addition.
5149
5150 If a large set is all zero or all ones, it is
5151 probably better to set it using memset (if available) or bzero.
5152 Also, if a large set has just a single range, it may also be
5153 better to first clear all the first clear the set (using
0f41302f 5154 bzero/memset), and set the bits we want. */
3a94c984 5155
0f41302f 5156 /* Check for all zeros. */
9376fcd6 5157 if (elt == NULL_TREE && size > 0)
071a6595 5158 {
e1a43f73 5159 if (!cleared)
8ac61af7 5160 clear_storage (target, GEN_INT (size));
071a6595
PB
5161 return;
5162 }
5163
071a6595
PB
5164 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5165 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5166 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5167 size_diffop (domain_max, domain_min),
5168 ssize_int (1));
071a6595 5169
19caa751 5170 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5171
5172 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5173 are "complicated" (more than one range), initialize (the
3a94c984 5174 constant parts) by copying from a constant. */
e1a43f73
PB
5175 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5176 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5177 {
19caa751 5178 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5179 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 5180 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 5181 HOST_WIDE_INT word = 0;
19caa751
RK
5182 unsigned int bit_pos = 0;
5183 unsigned int ibit = 0;
5184 unsigned int offset = 0; /* In bytes from beginning of set. */
5185
e1a43f73 5186 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5187 for (;;)
071a6595 5188 {
b4ee5a72
PB
5189 if (bit_buffer[ibit])
5190 {
b09f3348 5191 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5192 word |= (1 << (set_word_size - 1 - bit_pos));
5193 else
5194 word |= 1 << bit_pos;
5195 }
19caa751 5196
b4ee5a72
PB
5197 bit_pos++; ibit++;
5198 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5199 {
e1a43f73
PB
5200 if (word != 0 || ! cleared)
5201 {
5202 rtx datum = GEN_INT (word);
5203 rtx to_rtx;
19caa751 5204
0f41302f
MS
5205 /* The assumption here is that it is safe to use
5206 XEXP if the set is multi-word, but not if
5207 it's single-word. */
e1a43f73 5208 if (GET_CODE (target) == MEM)
f4ef873c 5209 to_rtx = adjust_address (target, mode, offset);
3a94c984 5210 else if (offset == 0)
e1a43f73
PB
5211 to_rtx = target;
5212 else
5213 abort ();
5214 emit_move_insn (to_rtx, datum);
5215 }
19caa751 5216
b4ee5a72
PB
5217 if (ibit == nbits)
5218 break;
5219 word = 0;
5220 bit_pos = 0;
5221 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5222 }
5223 }
071a6595 5224 }
e1a43f73 5225 else if (!cleared)
19caa751
RK
5226 /* Don't bother clearing storage if the set is all ones. */
5227 if (TREE_CHAIN (elt) != NULL_TREE
5228 || (TREE_PURPOSE (elt) == NULL_TREE
5229 ? nbits != 1
5230 : ( ! host_integerp (TREE_VALUE (elt), 0)
5231 || ! host_integerp (TREE_PURPOSE (elt), 0)
5232 || (tree_low_cst (TREE_VALUE (elt), 0)
5233 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5234 != (HOST_WIDE_INT) nbits))))
8ac61af7 5235 clear_storage (target, expr_size (exp));
3a94c984 5236
e1a43f73 5237 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5238 {
3a94c984 5239 /* Start of range of element or NULL. */
071a6595 5240 tree startbit = TREE_PURPOSE (elt);
3a94c984 5241 /* End of range of element, or element value. */
071a6595
PB
5242 tree endbit = TREE_VALUE (elt);
5243 HOST_WIDE_INT startb, endb;
19caa751 5244 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5245
5246 bitlength_rtx = expand_expr (bitlength,
19caa751 5247 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5248
3a94c984 5249 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5250 if (startbit == NULL_TREE)
5251 {
5252 startbit = save_expr (endbit);
5253 endbit = startbit;
5254 }
19caa751 5255
071a6595
PB
5256 startbit = convert (sizetype, startbit);
5257 endbit = convert (sizetype, endbit);
5258 if (! integer_zerop (domain_min))
5259 {
5260 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5261 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5262 }
3a94c984 5263 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5264 EXPAND_CONST_ADDRESS);
3a94c984 5265 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5266 EXPAND_CONST_ADDRESS);
5267
5268 if (REG_P (target))
5269 {
1da68f56
RK
5270 targetx
5271 = assign_temp
b0c48229
NB
5272 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5273 (GET_MODE (target), 0),
1da68f56
RK
5274 TYPE_QUAL_CONST)),
5275 0, 1, 1);
071a6595
PB
5276 emit_move_insn (targetx, target);
5277 }
19caa751 5278
071a6595
PB
5279 else if (GET_CODE (target) == MEM)
5280 targetx = target;
5281 else
5282 abort ();
5283
4ca79136
RH
5284 /* Optimization: If startbit and endbit are constants divisible
5285 by BITS_PER_UNIT, call memset instead. */
5286 if (TARGET_MEM_FUNCTIONS
5287 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5288 && TREE_CODE (endbit) == INTEGER_CST
5289 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5290 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5291 {
ebb1b59a 5292 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5293 VOIDmode, 3,
e1a43f73
PB
5294 plus_constant (XEXP (targetx, 0),
5295 startb / BITS_PER_UNIT),
071a6595 5296 Pmode,
3b6f75e2 5297 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5298 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5299 TYPE_MODE (sizetype));
071a6595
PB
5300 }
5301 else
19caa751 5302 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
5303 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5304 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5305 startbit_rtx, TYPE_MODE (sizetype),
5306 endbit_rtx, TYPE_MODE (sizetype));
5307
071a6595
PB
5308 if (REG_P (target))
5309 emit_move_insn (target, targetx);
5310 }
5311 }
bbf6f052
RK
5312
5313 else
5314 abort ();
5315}
5316
5317/* Store the value of EXP (an expression tree)
5318 into a subfield of TARGET which has mode MODE and occupies
5319 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5320 If MODE is VOIDmode, it means that we are storing into a bit-field.
5321
5322 If VALUE_MODE is VOIDmode, return nothing in particular.
5323 UNSIGNEDP is not used in this case.
5324
5325 Otherwise, return an rtx for the value stored. This rtx
5326 has mode VALUE_MODE if that is convenient to do.
5327 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5328
a06ef755 5329 TYPE is the type of the underlying object,
ece32014
MM
5330
5331 ALIAS_SET is the alias set for the destination. This value will
5332 (in general) be different from that for TARGET, since TARGET is a
5333 reference to the containing structure. */
bbf6f052
RK
5334
5335static rtx
a06ef755
RK
5336store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5337 alias_set)
bbf6f052 5338 rtx target;
770ae6cc
RK
5339 HOST_WIDE_INT bitsize;
5340 HOST_WIDE_INT bitpos;
bbf6f052
RK
5341 enum machine_mode mode;
5342 tree exp;
5343 enum machine_mode value_mode;
5344 int unsignedp;
a06ef755 5345 tree type;
ece32014 5346 int alias_set;
bbf6f052 5347{
906c4e36 5348 HOST_WIDE_INT width_mask = 0;
bbf6f052 5349
e9a25f70
JL
5350 if (TREE_CODE (exp) == ERROR_MARK)
5351 return const0_rtx;
5352
2be6a7e9
RK
5353 /* If we have nothing to store, do nothing unless the expression has
5354 side-effects. */
5355 if (bitsize == 0)
5356 return expand_expr (exp, const0_rtx, VOIDmode, 0);
a06ef755 5357 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5358 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5359
5360 /* If we are storing into an unaligned field of an aligned union that is
5361 in a register, we may have the mode of TARGET being an integer mode but
5362 MODE == BLKmode. In that case, get an aligned object whose size and
5363 alignment are the same as TARGET and store TARGET into it (we can avoid
5364 the store if the field being stored is the entire width of TARGET). Then
5365 call ourselves recursively to store the field into a BLKmode version of
5366 that object. Finally, load from the object into TARGET. This is not
5367 very efficient in general, but should only be slightly more expensive
5368 than the otherwise-required unaligned accesses. Perhaps this can be
5369 cleaned up later. */
5370
5371 if (mode == BLKmode
5372 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5373 {
1da68f56
RK
5374 rtx object
5375 = assign_temp
a06ef755 5376 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
1da68f56 5377 0, 1, 1);
c4e59f51 5378 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5379
8752c357 5380 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5381 emit_move_insn (object, target);
5382
a06ef755
RK
5383 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5384 alias_set);
bbf6f052
RK
5385
5386 emit_move_insn (target, object);
5387
a06ef755 5388 /* We want to return the BLKmode version of the data. */
46093b97 5389 return blk_object;
bbf6f052 5390 }
c3b247b4
JM
5391
5392 if (GET_CODE (target) == CONCAT)
5393 {
5394 /* We're storing into a struct containing a single __complex. */
5395
5396 if (bitpos != 0)
5397 abort ();
5398 return store_expr (exp, target, 0);
5399 }
bbf6f052
RK
5400
5401 /* If the structure is in a register or if the component
5402 is a bit field, we cannot use addressing to access it.
5403 Use bit-field techniques or SUBREG to store in it. */
5404
4fa52007 5405 if (mode == VOIDmode
6ab06cbb
JW
5406 || (mode != BLKmode && ! direct_store[(int) mode]
5407 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5408 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5409 || GET_CODE (target) == REG
c980ac49 5410 || GET_CODE (target) == SUBREG
ccc98036
RS
5411 /* If the field isn't aligned enough to store as an ordinary memref,
5412 store it as a bit field. */
04050c69
RK
5413 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5414 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
14a774a9 5415 || bitpos % GET_MODE_ALIGNMENT (mode)))
14a774a9
RK
5416 /* If the RHS and field are a constant size and the size of the
5417 RHS isn't the same size as the bitfield, we must use bitfield
5418 operations. */
05bccae2
RK
5419 || (bitsize >= 0
5420 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5421 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5422 {
906c4e36 5423 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5424
ef19912d
RK
5425 /* If BITSIZE is narrower than the size of the type of EXP
5426 we will be narrowing TEMP. Normally, what's wanted are the
5427 low-order bits. However, if EXP's type is a record and this is
5428 big-endian machine, we want the upper BITSIZE bits. */
5429 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5430 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5431 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5432 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5433 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5434 - bitsize),
5435 temp, 1);
5436
bbd6cf73
RK
5437 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5438 MODE. */
5439 if (mode != VOIDmode && mode != BLKmode
5440 && mode != TYPE_MODE (TREE_TYPE (exp)))
5441 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5442
a281e72d
RK
5443 /* If the modes of TARGET and TEMP are both BLKmode, both
5444 must be in memory and BITPOS must be aligned on a byte
5445 boundary. If so, we simply do a block copy. */
5446 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5447 {
5448 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5449 || bitpos % BITS_PER_UNIT != 0)
5450 abort ();
5451
f4ef873c 5452 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5453 emit_block_move (target, temp,
a06ef755 5454 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5455 / BITS_PER_UNIT),
5456 BLOCK_OP_NORMAL);
a281e72d
RK
5457
5458 return value_mode == VOIDmode ? const0_rtx : target;
5459 }
5460
bbf6f052 5461 /* Store the value in the bitfield. */
a06ef755
RK
5462 store_bit_field (target, bitsize, bitpos, mode, temp,
5463 int_size_in_bytes (type));
5464
bbf6f052
RK
5465 if (value_mode != VOIDmode)
5466 {
04050c69
RK
5467 /* The caller wants an rtx for the value.
5468 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5469 if (width_mask != 0
5470 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5471 {
9074de27 5472 tree count;
5c4d7cfb 5473 enum machine_mode tmode;
86a2c12a 5474
5c4d7cfb 5475 tmode = GET_MODE (temp);
86a2c12a
RS
5476 if (tmode == VOIDmode)
5477 tmode = value_mode;
22273300
JJ
5478
5479 if (unsignedp)
5480 return expand_and (tmode, temp,
2496c7bd 5481 gen_int_mode (width_mask, tmode),
22273300
JJ
5482 NULL_RTX);
5483
5c4d7cfb
RS
5484 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5485 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5486 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5487 }
04050c69 5488
bbf6f052 5489 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5490 NULL_RTX, value_mode, VOIDmode,
a06ef755 5491 int_size_in_bytes (type));
bbf6f052
RK
5492 }
5493 return const0_rtx;
5494 }
5495 else
5496 {
5497 rtx addr = XEXP (target, 0);
a06ef755 5498 rtx to_rtx = target;
bbf6f052
RK
5499
5500 /* If a value is wanted, it must be the lhs;
5501 so make the address stable for multiple use. */
5502
5503 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5504 && ! CONSTANT_ADDRESS_P (addr)
5505 /* A frame-pointer reference is already stable. */
5506 && ! (GET_CODE (addr) == PLUS
5507 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5508 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5509 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5510 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5511
5512 /* Now build a reference to just the desired component. */
5513
a06ef755
RK
5514 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5515
5516 if (to_rtx == target)
5517 to_rtx = copy_rtx (to_rtx);
792760b9 5518
c6df88cb 5519 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5520 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5521 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5522
5523 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5524 }
5525}
5526\f
5527/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5528 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5529 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5530
5531 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5532 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5533 If the position of the field is variable, we store a tree
5534 giving the variable offset (in units) in *POFFSET.
5535 This offset is in addition to the bit position.
5536 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5537
5538 If any of the extraction expressions is volatile,
5539 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5540
5541 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5542 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5543 is redundant.
5544
5545 If the field describes a variable-sized object, *PMODE is set to
5546 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5547 this case, but the address of the object can be found. */
bbf6f052
RK
5548
5549tree
4969d05d 5550get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
a06ef755 5551 punsignedp, pvolatilep)
bbf6f052 5552 tree exp;
770ae6cc
RK
5553 HOST_WIDE_INT *pbitsize;
5554 HOST_WIDE_INT *pbitpos;
7bb0943f 5555 tree *poffset;
bbf6f052
RK
5556 enum machine_mode *pmode;
5557 int *punsignedp;
5558 int *pvolatilep;
5559{
5560 tree size_tree = 0;
5561 enum machine_mode mode = VOIDmode;
fed3cef0 5562 tree offset = size_zero_node;
770ae6cc 5563 tree bit_offset = bitsize_zero_node;
738cc472 5564 tree placeholder_ptr = 0;
770ae6cc 5565 tree tem;
bbf6f052 5566
770ae6cc
RK
5567 /* First get the mode, signedness, and size. We do this from just the
5568 outermost expression. */
bbf6f052
RK
5569 if (TREE_CODE (exp) == COMPONENT_REF)
5570 {
5571 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5572 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5573 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5574
bbf6f052
RK
5575 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5576 }
5577 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5578 {
5579 size_tree = TREE_OPERAND (exp, 1);
5580 *punsignedp = TREE_UNSIGNED (exp);
5581 }
5582 else
5583 {
5584 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5585 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5586
ab87f8c8
JL
5587 if (mode == BLKmode)
5588 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5589 else
5590 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5591 }
3a94c984 5592
770ae6cc 5593 if (size_tree != 0)
bbf6f052 5594 {
770ae6cc 5595 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5596 mode = BLKmode, *pbitsize = -1;
5597 else
770ae6cc 5598 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5599 }
5600
5601 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5602 and find the ultimate containing object. */
bbf6f052
RK
5603 while (1)
5604 {
770ae6cc
RK
5605 if (TREE_CODE (exp) == BIT_FIELD_REF)
5606 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5607 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5608 {
770ae6cc
RK
5609 tree field = TREE_OPERAND (exp, 1);
5610 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5611
e7f3c83f
RK
5612 /* If this field hasn't been filled in yet, don't go
5613 past it. This should only happen when folding expressions
5614 made during type construction. */
770ae6cc 5615 if (this_offset == 0)
e7f3c83f 5616 break;
770ae6cc
RK
5617 else if (! TREE_CONSTANT (this_offset)
5618 && contains_placeholder_p (this_offset))
5619 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5620
7156dead 5621 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5622 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5623 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5624
a06ef755 5625 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5626 }
7156dead 5627
b4e3fabb
RK
5628 else if (TREE_CODE (exp) == ARRAY_REF
5629 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5630 {
742920c7 5631 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5632 tree array = TREE_OPERAND (exp, 0);
5633 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5634 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5635 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5636
770ae6cc
RK
5637 /* We assume all arrays have sizes that are a multiple of a byte.
5638 First subtract the lower bound, if any, in the type of the
5639 index, then convert to sizetype and multiply by the size of the
5640 array element. */
5641 if (low_bound != 0 && ! integer_zerop (low_bound))
5642 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5643 index, low_bound));
f8dac6eb 5644
7156dead
RK
5645 /* If the index has a self-referential type, pass it to a
5646 WITH_RECORD_EXPR; if the component size is, pass our
5647 component to one. */
770ae6cc
RK
5648 if (! TREE_CONSTANT (index)
5649 && contains_placeholder_p (index))
5650 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5651 if (! TREE_CONSTANT (unit_size)
5652 && contains_placeholder_p (unit_size))
b4e3fabb 5653 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5654
770ae6cc
RK
5655 offset = size_binop (PLUS_EXPR, offset,
5656 size_binop (MULT_EXPR,
5657 convert (sizetype, index),
7156dead 5658 unit_size));
bbf6f052 5659 }
7156dead 5660
738cc472
RK
5661 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5662 {
70072ed9
RK
5663 tree new = find_placeholder (exp, &placeholder_ptr);
5664
5665 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5666 We might have been called from tree optimization where we
5667 haven't set up an object yet. */
5668 if (new == 0)
5669 break;
5670 else
5671 exp = new;
5672
738cc472
RK
5673 continue;
5674 }
bbf6f052 5675 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
ed239f5a 5676 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
bbf6f052
RK
5677 && ! ((TREE_CODE (exp) == NOP_EXPR
5678 || TREE_CODE (exp) == CONVERT_EXPR)
5679 && (TYPE_MODE (TREE_TYPE (exp))
5680 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5681 break;
7bb0943f
RS
5682
5683 /* If any reference in the chain is volatile, the effect is volatile. */
5684 if (TREE_THIS_VOLATILE (exp))
5685 *pvolatilep = 1;
839c4796 5686
bbf6f052
RK
5687 exp = TREE_OPERAND (exp, 0);
5688 }
5689
770ae6cc
RK
5690 /* If OFFSET is constant, see if we can return the whole thing as a
5691 constant bit position. Otherwise, split it up. */
5692 if (host_integerp (offset, 0)
5693 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5694 bitsize_unit_node))
5695 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5696 && host_integerp (tem, 0))
5697 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5698 else
5699 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5700
bbf6f052 5701 *pmode = mode;
bbf6f052
RK
5702 return exp;
5703}
921b3427 5704
ed239f5a
RK
5705/* Return 1 if T is an expression that get_inner_reference handles. */
5706
5707int
5708handled_component_p (t)
5709 tree t;
5710{
5711 switch (TREE_CODE (t))
5712 {
5713 case BIT_FIELD_REF:
5714 case COMPONENT_REF:
5715 case ARRAY_REF:
5716 case ARRAY_RANGE_REF:
5717 case NON_LVALUE_EXPR:
5718 case VIEW_CONVERT_EXPR:
5719 return 1;
5720
5721 case NOP_EXPR:
5722 case CONVERT_EXPR:
5723 return (TYPE_MODE (TREE_TYPE (t))
5724 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5725
5726 default:
5727 return 0;
5728 }
5729}
bbf6f052 5730\f
3fe44edd
RK
5731/* Given an rtx VALUE that may contain additions and multiplications, return
5732 an equivalent value that just refers to a register, memory, or constant.
5733 This is done by generating instructions to perform the arithmetic and
5734 returning a pseudo-register containing the value.
c45a13a6
RK
5735
5736 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5737
5738rtx
5739force_operand (value, target)
5740 rtx value, target;
5741{
8a28dbcc 5742 rtx op1, op2;
bbf6f052 5743 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5744 rtx subtarget = get_subtarget (target);
8a28dbcc 5745 enum rtx_code code = GET_CODE (value);
bbf6f052 5746
8b015896 5747 /* Check for a PIC address load. */
8a28dbcc 5748 if ((code == PLUS || code == MINUS)
8b015896
RH
5749 && XEXP (value, 0) == pic_offset_table_rtx
5750 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5751 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5752 || GET_CODE (XEXP (value, 1)) == CONST))
5753 {
5754 if (!subtarget)
5755 subtarget = gen_reg_rtx (GET_MODE (value));
5756 emit_move_insn (subtarget, value);
5757 return subtarget;
5758 }
5759
8a28dbcc 5760 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5761 {
8a28dbcc
JH
5762 if (!target)
5763 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5764 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5765 code == ZERO_EXTEND);
5766 return target;
bbf6f052
RK
5767 }
5768
8a28dbcc 5769 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
5770 {
5771 op2 = XEXP (value, 1);
8a28dbcc 5772 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5773 subtarget = 0;
8a28dbcc 5774 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5775 {
8a28dbcc 5776 code = PLUS;
bbf6f052
RK
5777 op2 = negate_rtx (GET_MODE (value), op2);
5778 }
5779
5780 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5781 operand a PLUS of a virtual register and something else. In that
5782 case, we want to emit the sum of the virtual register and the
5783 constant first and then add the other value. This allows virtual
5784 register instantiation to simply modify the constant rather than
5785 creating another one around this addition. */
5786 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5787 && GET_CODE (XEXP (value, 0)) == PLUS
5788 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5789 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5790 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5791 {
8a28dbcc
JH
5792 rtx temp = expand_simple_binop (GET_MODE (value), code,
5793 XEXP (XEXP (value, 0), 0), op2,
5794 subtarget, 0, OPTAB_LIB_WIDEN);
5795 return expand_simple_binop (GET_MODE (value), code, temp,
5796 force_operand (XEXP (XEXP (value,
5797 0), 1), 0),
5798 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5799 }
3a94c984 5800
8a28dbcc
JH
5801 op1 = force_operand (XEXP (value, 0), subtarget);
5802 op2 = force_operand (op2, NULL_RTX);
5803 switch (code)
5804 {
5805 case MULT:
5806 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5807 case DIV:
5808 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5809 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5810 target, 1, OPTAB_LIB_WIDEN);
5811 else
5812 return expand_divmod (0,
5813 FLOAT_MODE_P (GET_MODE (value))
5814 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5815 GET_MODE (value), op1, op2, target, 0);
5816 break;
5817 case MOD:
5818 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5819 target, 0);
5820 break;
5821 case UDIV:
5822 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5823 target, 1);
5824 break;
5825 case UMOD:
5826 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5827 target, 1);
5828 break;
5829 case ASHIFTRT:
5830 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5831 target, 0, OPTAB_LIB_WIDEN);
5832 break;
5833 default:
5834 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5835 target, 1, OPTAB_LIB_WIDEN);
5836 }
5837 }
5838 if (GET_RTX_CLASS (code) == '1')
5839 {
5840 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5841 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5842 }
34e81b5a
RK
5843
5844#ifdef INSN_SCHEDULING
5845 /* On machines that have insn scheduling, we want all memory reference to be
5846 explicit, so we need to deal with such paradoxical SUBREGs. */
5847 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5848 && (GET_MODE_SIZE (GET_MODE (value))
5849 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5850 value
5851 = simplify_gen_subreg (GET_MODE (value),
5852 force_reg (GET_MODE (SUBREG_REG (value)),
5853 force_operand (SUBREG_REG (value),
5854 NULL_RTX)),
5855 GET_MODE (SUBREG_REG (value)),
5856 SUBREG_BYTE (value));
5857#endif
5858
bbf6f052
RK
5859 return value;
5860}
5861\f
bbf6f052 5862/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5863 EXP can reference X, which is being modified. TOP_P is nonzero if this
5864 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5865 for EXP, as opposed to a recursive call to this function.
5866
5867 It is always safe for this routine to return zero since it merely
5868 searches for optimization opportunities. */
bbf6f052 5869
8f17b5c5 5870int
e5e809f4 5871safe_from_p (x, exp, top_p)
bbf6f052
RK
5872 rtx x;
5873 tree exp;
e5e809f4 5874 int top_p;
bbf6f052
RK
5875{
5876 rtx exp_rtl = 0;
5877 int i, nops;
1da68f56 5878 static tree save_expr_list;
bbf6f052 5879
6676e72f
RK
5880 if (x == 0
5881 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5882 have no way of allocating temporaries of variable size
5883 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5884 So we assume here that something at a higher level has prevented a
f4510f37 5885 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5886 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5887 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5888 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5889 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5890 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5891 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5892 != INTEGER_CST)
1da68f56
RK
5893 && GET_MODE (x) == BLKmode)
5894 /* If X is in the outgoing argument area, it is always safe. */
5895 || (GET_CODE (x) == MEM
5896 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5897 || (GET_CODE (XEXP (x, 0)) == PLUS
5898 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5899 return 1;
5900
5901 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5902 find the underlying pseudo. */
5903 if (GET_CODE (x) == SUBREG)
5904 {
5905 x = SUBREG_REG (x);
5906 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5907 return 0;
5908 }
5909
1da68f56
RK
5910 /* A SAVE_EXPR might appear many times in the expression passed to the
5911 top-level safe_from_p call, and if it has a complex subexpression,
5912 examining it multiple times could result in a combinatorial explosion.
5913 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5914 with optimization took about 28 minutes to compile -- even though it was
5915 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5916 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5917 we have processed. Note that the only test of top_p was above. */
5918
5919 if (top_p)
5920 {
5921 int rtn;
5922 tree t;
5923
5924 save_expr_list = 0;
5925
5926 rtn = safe_from_p (x, exp, 0);
5927
5928 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5929 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5930
5931 return rtn;
5932 }
bbf6f052 5933
1da68f56 5934 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5935 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5936 {
5937 case 'd':
a9772b60 5938 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5939 break;
5940
5941 case 'c':
5942 return 1;
5943
5944 case 'x':
5945 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5946 return ((TREE_VALUE (exp) == 0
e5e809f4 5947 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5948 && (TREE_CHAIN (exp) == 0
e5e809f4 5949 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5950 else if (TREE_CODE (exp) == ERROR_MARK)
5951 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5952 else
5953 return 0;
5954
5955 case '1':
e5e809f4 5956 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5957
5958 case '2':
5959 case '<':
e5e809f4
JL
5960 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5961 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5962
5963 case 'e':
5964 case 'r':
5965 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5966 the expression. If it is set, we conflict iff we are that rtx or
5967 both are in memory. Otherwise, we check all operands of the
5968 expression recursively. */
5969
5970 switch (TREE_CODE (exp))
5971 {
5972 case ADDR_EXPR:
70072ed9
RK
5973 /* If the operand is static or we are static, we can't conflict.
5974 Likewise if we don't conflict with the operand at all. */
5975 if (staticp (TREE_OPERAND (exp, 0))
5976 || TREE_STATIC (exp)
5977 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5978 return 1;
5979
5980 /* Otherwise, the only way this can conflict is if we are taking
5981 the address of a DECL a that address if part of X, which is
5982 very rare. */
5983 exp = TREE_OPERAND (exp, 0);
5984 if (DECL_P (exp))
5985 {
5986 if (!DECL_RTL_SET_P (exp)
5987 || GET_CODE (DECL_RTL (exp)) != MEM)
5988 return 0;
5989 else
5990 exp_rtl = XEXP (DECL_RTL (exp), 0);
5991 }
5992 break;
bbf6f052
RK
5993
5994 case INDIRECT_REF:
1da68f56
RK
5995 if (GET_CODE (x) == MEM
5996 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5997 get_alias_set (exp)))
bbf6f052
RK
5998 return 0;
5999 break;
6000
6001 case CALL_EXPR:
f9808f81
MM
6002 /* Assume that the call will clobber all hard registers and
6003 all of memory. */
6004 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6005 || GET_CODE (x) == MEM)
6006 return 0;
bbf6f052
RK
6007 break;
6008
6009 case RTL_EXPR:
3bb5826a
RK
6010 /* If a sequence exists, we would have to scan every instruction
6011 in the sequence to see if it was safe. This is probably not
6012 worthwhile. */
6013 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
6014 return 0;
6015
3bb5826a 6016 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
6017 break;
6018
6019 case WITH_CLEANUP_EXPR:
6ad7895a 6020 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
6021 break;
6022
5dab5552 6023 case CLEANUP_POINT_EXPR:
e5e809f4 6024 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 6025
bbf6f052
RK
6026 case SAVE_EXPR:
6027 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
6028 if (exp_rtl)
6029 break;
6030
1da68f56
RK
6031 /* If we've already scanned this, don't do it again. Otherwise,
6032 show we've scanned it and record for clearing the flag if we're
6033 going on. */
6034 if (TREE_PRIVATE (exp))
6035 return 1;
ff439b5f 6036
1da68f56
RK
6037 TREE_PRIVATE (exp) = 1;
6038 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 6039 {
1da68f56
RK
6040 TREE_PRIVATE (exp) = 0;
6041 return 0;
ff59bfe6 6042 }
1da68f56
RK
6043
6044 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 6045 return 1;
bbf6f052 6046
8129842c
RS
6047 case BIND_EXPR:
6048 /* The only operand we look at is operand 1. The rest aren't
6049 part of the expression. */
e5e809f4 6050 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 6051
bbf6f052 6052 case METHOD_CALL_EXPR:
4fe9b91c 6053 /* This takes an rtx argument, but shouldn't appear here. */
bbf6f052 6054 abort ();
3a94c984 6055
e9a25f70
JL
6056 default:
6057 break;
bbf6f052
RK
6058 }
6059
6060 /* If we have an rtx, we do not need to scan our operands. */
6061 if (exp_rtl)
6062 break;
6063
8f17b5c5 6064 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
6065 for (i = 0; i < nops; i++)
6066 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6067 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6068 return 0;
8f17b5c5
MM
6069
6070 /* If this is a language-specific tree code, it may require
6071 special handling. */
dbbbbf3b
JDA
6072 if ((unsigned int) TREE_CODE (exp)
6073 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 6074 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 6075 return 0;
bbf6f052
RK
6076 }
6077
6078 /* If we have an rtl, find any enclosed object. Then see if we conflict
6079 with it. */
6080 if (exp_rtl)
6081 {
6082 if (GET_CODE (exp_rtl) == SUBREG)
6083 {
6084 exp_rtl = SUBREG_REG (exp_rtl);
6085 if (GET_CODE (exp_rtl) == REG
6086 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6087 return 0;
6088 }
6089
6090 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6091 are memory and they conflict. */
bbf6f052
RK
6092 return ! (rtx_equal_p (x, exp_rtl)
6093 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 6094 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6095 rtx_addr_varies_p)));
bbf6f052
RK
6096 }
6097
6098 /* If we reach here, it is safe. */
6099 return 1;
6100}
6101
01c8a7c8
RK
6102/* Subroutine of expand_expr: return rtx if EXP is a
6103 variable or parameter; else return 0. */
6104
6105static rtx
6106var_rtx (exp)
6107 tree exp;
6108{
6109 STRIP_NOPS (exp);
6110 switch (TREE_CODE (exp))
6111 {
6112 case PARM_DECL:
6113 case VAR_DECL:
6114 return DECL_RTL (exp);
6115 default:
6116 return 0;
6117 }
6118}
dbecbbe4
JL
6119
6120#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 6121
dbecbbe4
JL
6122void
6123check_max_integer_computation_mode (exp)
3a94c984 6124 tree exp;
dbecbbe4 6125{
5f652c07 6126 enum tree_code code;
dbecbbe4
JL
6127 enum machine_mode mode;
6128
5f652c07
JM
6129 /* Strip any NOPs that don't change the mode. */
6130 STRIP_NOPS (exp);
6131 code = TREE_CODE (exp);
6132
71bca506
JL
6133 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6134 if (code == NOP_EXPR
6135 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6136 return;
6137
dbecbbe4
JL
6138 /* First check the type of the overall operation. We need only look at
6139 unary, binary and relational operations. */
6140 if (TREE_CODE_CLASS (code) == '1'
6141 || TREE_CODE_CLASS (code) == '2'
6142 || TREE_CODE_CLASS (code) == '<')
6143 {
6144 mode = TYPE_MODE (TREE_TYPE (exp));
6145 if (GET_MODE_CLASS (mode) == MODE_INT
6146 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6147 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6148 }
6149
6150 /* Check operand of a unary op. */
6151 if (TREE_CODE_CLASS (code) == '1')
6152 {
6153 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6154 if (GET_MODE_CLASS (mode) == MODE_INT
6155 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6156 internal_error ("unsupported wide integer operation");
dbecbbe4 6157 }
3a94c984 6158
dbecbbe4
JL
6159 /* Check operands of a binary/comparison op. */
6160 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6161 {
6162 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6163 if (GET_MODE_CLASS (mode) == MODE_INT
6164 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6165 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6166
6167 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6168 if (GET_MODE_CLASS (mode) == MODE_INT
6169 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6170 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6171 }
6172}
6173#endif
14a774a9 6174\f
0d4903b8
RK
6175/* Return the highest power of two that EXP is known to be a multiple of.
6176 This is used in updating alignment of MEMs in array references. */
6177
6178static HOST_WIDE_INT
6179highest_pow2_factor (exp)
6180 tree exp;
6181{
6182 HOST_WIDE_INT c0, c1;
6183
6184 switch (TREE_CODE (exp))
6185 {
6186 case INTEGER_CST:
e0f1be5c
JJ
6187 /* We can find the lowest bit that's a one. If the low
6188 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6189 We need to handle this case since we can find it in a COND_EXPR,
6190 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6191 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6192 later ICE. */
e0f1be5c 6193 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6194 return BIGGEST_ALIGNMENT;
e0f1be5c 6195 else
0d4903b8 6196 {
e0f1be5c
JJ
6197 /* Note: tree_low_cst is intentionally not used here,
6198 we don't care about the upper bits. */
6199 c0 = TREE_INT_CST_LOW (exp);
6200 c0 &= -c0;
6201 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6202 }
6203 break;
6204
65a07688 6205 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6206 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6207 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6208 return MIN (c0, c1);
6209
6210 case MULT_EXPR:
6211 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6212 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6213 return c0 * c1;
6214
6215 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6216 case CEIL_DIV_EXPR:
65a07688
RK
6217 if (integer_pow2p (TREE_OPERAND (exp, 1))
6218 && host_integerp (TREE_OPERAND (exp, 1), 1))
6219 {
6220 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6221 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6222 return MAX (1, c0 / c1);
6223 }
6224 break;
0d4903b8
RK
6225
6226 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6227 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6228 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6229
65a07688
RK
6230 case COMPOUND_EXPR:
6231 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6232
0d4903b8
RK
6233 case COND_EXPR:
6234 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6235 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6236 return MIN (c0, c1);
6237
6238 default:
6239 break;
6240 }
6241
6242 return 1;
6243}
818c0c94
RH
6244
6245/* Similar, except that it is known that the expression must be a multiple
6246 of the alignment of TYPE. */
6247
6248static HOST_WIDE_INT
6249highest_pow2_factor_for_type (type, exp)
6250 tree type;
6251 tree exp;
6252{
6253 HOST_WIDE_INT type_align, factor;
6254
6255 factor = highest_pow2_factor (exp);
6256 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6257 return MAX (factor, type_align);
6258}
0d4903b8 6259\f
f47e9b4e
RK
6260/* Return an object on the placeholder list that matches EXP, a
6261 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6262 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6263 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6264 is a location which initially points to a starting location in the
738cc472
RK
6265 placeholder list (zero means start of the list) and where a pointer into
6266 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6267
6268tree
6269find_placeholder (exp, plist)
6270 tree exp;
6271 tree *plist;
6272{
6273 tree type = TREE_TYPE (exp);
6274 tree placeholder_expr;
6275
738cc472
RK
6276 for (placeholder_expr
6277 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6278 placeholder_expr != 0;
f47e9b4e
RK
6279 placeholder_expr = TREE_CHAIN (placeholder_expr))
6280 {
6281 tree need_type = TYPE_MAIN_VARIANT (type);
6282 tree elt;
6283
6284 /* Find the outermost reference that is of the type we want. If none,
6285 see if any object has a type that is a pointer to the type we
6286 want. */
6287 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6288 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6289 || TREE_CODE (elt) == COND_EXPR)
6290 ? TREE_OPERAND (elt, 1)
6291 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6292 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6293 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6294 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6295 ? TREE_OPERAND (elt, 0) : 0))
6296 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6297 {
6298 if (plist)
6299 *plist = placeholder_expr;
6300 return elt;
6301 }
6302
6303 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6304 elt
6305 = ((TREE_CODE (elt) == COMPOUND_EXPR
6306 || TREE_CODE (elt) == COND_EXPR)
6307 ? TREE_OPERAND (elt, 1)
6308 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6309 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6310 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6311 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6312 ? TREE_OPERAND (elt, 0) : 0))
6313 if (POINTER_TYPE_P (TREE_TYPE (elt))
6314 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6315 == need_type))
6316 {
6317 if (plist)
6318 *plist = placeholder_expr;
6319 return build1 (INDIRECT_REF, need_type, elt);
6320 }
6321 }
6322
70072ed9 6323 return 0;
f47e9b4e
RK
6324}
6325\f
bbf6f052
RK
6326/* expand_expr: generate code for computing expression EXP.
6327 An rtx for the computed value is returned. The value is never null.
6328 In the case of a void EXP, const0_rtx is returned.
6329
6330 The value may be stored in TARGET if TARGET is nonzero.
6331 TARGET is just a suggestion; callers must assume that
6332 the rtx returned may not be the same as TARGET.
6333
6334 If TARGET is CONST0_RTX, it means that the value will be ignored.
6335
6336 If TMODE is not VOIDmode, it suggests generating the
6337 result in mode TMODE. But this is done only when convenient.
6338 Otherwise, TMODE is ignored and the value generated in its natural mode.
6339 TMODE is just a suggestion; callers must assume that
6340 the rtx returned may not have mode TMODE.
6341
d6a5ac33
RK
6342 Note that TARGET may have neither TMODE nor MODE. In that case, it
6343 probably will not be used.
bbf6f052
RK
6344
6345 If MODIFIER is EXPAND_SUM then when EXP is an addition
6346 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6347 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6348 products as above, or REG or MEM, or constant.
6349 Ordinarily in such cases we would output mul or add instructions
6350 and then return a pseudo reg containing the sum.
6351
6352 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6353 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6354 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6355 This is used for outputting expressions used in initializers.
6356
6357 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6358 with a constant address even if that address is not normally legitimate.
6359 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
6360
6361rtx
6362expand_expr (exp, target, tmode, modifier)
b3694847 6363 tree exp;
bbf6f052
RK
6364 rtx target;
6365 enum machine_mode tmode;
6366 enum expand_modifier modifier;
6367{
b3694847 6368 rtx op0, op1, temp;
bbf6f052
RK
6369 tree type = TREE_TYPE (exp);
6370 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6371 enum machine_mode mode;
6372 enum tree_code code = TREE_CODE (exp);
bbf6f052 6373 optab this_optab;
68557e14
ML
6374 rtx subtarget, original_target;
6375 int ignore;
bbf6f052
RK
6376 tree context;
6377
3a94c984 6378 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6379 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6380 {
6381 op0 = CONST0_RTX (tmode);
6382 if (op0 != 0)
6383 return op0;
6384 return const0_rtx;
6385 }
6386
6387 mode = TYPE_MODE (type);
6388 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6389 subtarget = get_subtarget (target);
68557e14
ML
6390 original_target = target;
6391 ignore = (target == const0_rtx
6392 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6393 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6394 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6395 && TREE_CODE (type) == VOID_TYPE));
6396
dd27116b
RK
6397 /* If we are going to ignore this result, we need only do something
6398 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6399 is, short-circuit the most common cases here. Note that we must
6400 not call expand_expr with anything but const0_rtx in case this
6401 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6402
dd27116b
RK
6403 if (ignore)
6404 {
6405 if (! TREE_SIDE_EFFECTS (exp))
6406 return const0_rtx;
6407
14a774a9
RK
6408 /* Ensure we reference a volatile object even if value is ignored, but
6409 don't do this if all we are doing is taking its address. */
dd27116b
RK
6410 if (TREE_THIS_VOLATILE (exp)
6411 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6412 && mode != VOIDmode && mode != BLKmode
6413 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6414 {
37a08a29 6415 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6416 if (GET_CODE (temp) == MEM)
6417 temp = copy_to_reg (temp);
6418 return const0_rtx;
6419 }
6420
14a774a9
RK
6421 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6422 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6423 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6424 modifier);
6425
14a774a9 6426 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6427 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6428 {
37a08a29
RK
6429 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6430 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6431 return const0_rtx;
6432 }
6433 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6434 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6435 /* If the second operand has no side effects, just evaluate
0f41302f 6436 the first. */
37a08a29
RK
6437 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6438 modifier);
14a774a9
RK
6439 else if (code == BIT_FIELD_REF)
6440 {
37a08a29
RK
6441 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6442 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6443 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6444 return const0_rtx;
6445 }
37a08a29 6446
90764a87 6447 target = 0;
dd27116b 6448 }
bbf6f052 6449
dbecbbe4 6450#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
6451 /* Only check stuff here if the mode we want is different from the mode
6452 of the expression; if it's the same, check_max_integer_computiation_mode
6453 will handle it. Do we really need to check this stuff at all? */
6454
ce3c0b53 6455 if (target
5f652c07 6456 && GET_MODE (target) != mode
ce3c0b53
JL
6457 && TREE_CODE (exp) != INTEGER_CST
6458 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6459 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6460 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6461 && TREE_CODE (exp) != COMPONENT_REF
6462 && TREE_CODE (exp) != BIT_FIELD_REF
6463 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6464 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6465 && TREE_CODE (exp) != VAR_DECL
6466 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6467 {
6468 enum machine_mode mode = GET_MODE (target);
6469
6470 if (GET_MODE_CLASS (mode) == MODE_INT
6471 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6472 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6473 }
6474
5f652c07
JM
6475 if (tmode != mode
6476 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6477 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6478 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6479 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6480 && TREE_CODE (exp) != COMPONENT_REF
6481 && TREE_CODE (exp) != BIT_FIELD_REF
6482 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6483 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6484 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6485 && TREE_CODE (exp) != RTL_EXPR
71bca506 6486 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6487 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6488 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6489
6490 check_max_integer_computation_mode (exp);
6491#endif
6492
e44842fe
RK
6493 /* If will do cse, generate all results into pseudo registers
6494 since 1) that allows cse to find more things
6495 and 2) otherwise cse could produce an insn the machine
c24ae149
RK
6496 cannot support. And exception is a CONSTRUCTOR into a multi-word
6497 MEM: that's much more likely to be most efficient into the MEM. */
e44842fe 6498
bbf6f052 6499 if (! cse_not_expected && mode != BLKmode && target
c24ae149
RK
6500 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6501 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
bbf6f052
RK
6502 target = subtarget;
6503
bbf6f052
RK
6504 switch (code)
6505 {
6506 case LABEL_DECL:
b552441b
RS
6507 {
6508 tree function = decl_function_context (exp);
6509 /* Handle using a label in a containing function. */
d0977240
RK
6510 if (function != current_function_decl
6511 && function != inline_function_decl && function != 0)
b552441b
RS
6512 {
6513 struct function *p = find_function_data (function);
49ad7cfa
BS
6514 p->expr->x_forced_labels
6515 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6516 p->expr->x_forced_labels);
b552441b 6517 }
ab87f8c8
JL
6518 else
6519 {
ab87f8c8
JL
6520 if (modifier == EXPAND_INITIALIZER)
6521 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6522 label_rtx (exp),
6523 forced_labels);
6524 }
c5c76735 6525
38a448ca
RH
6526 temp = gen_rtx_MEM (FUNCTION_MODE,
6527 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6528 if (function != current_function_decl
6529 && function != inline_function_decl && function != 0)
26fcb35a
RS
6530 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6531 return temp;
b552441b 6532 }
bbf6f052
RK
6533
6534 case PARM_DECL:
6535 if (DECL_RTL (exp) == 0)
6536 {
6537 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6538 return CONST0_RTX (mode);
bbf6f052
RK
6539 }
6540
0f41302f 6541 /* ... fall through ... */
d6a5ac33 6542
bbf6f052 6543 case VAR_DECL:
2dca20cd
RS
6544 /* If a static var's type was incomplete when the decl was written,
6545 but the type is complete now, lay out the decl now. */
d0f062fb 6546 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6547 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6548 {
ed239f5a
RK
6549 rtx value = DECL_RTL_IF_SET (exp);
6550
2dca20cd 6551 layout_decl (exp, 0);
ed239f5a
RK
6552
6553 /* If the RTL was already set, update its mode and memory
6554 attributes. */
6555 if (value != 0)
6556 {
6557 PUT_MODE (value, DECL_MODE (exp));
6558 SET_DECL_RTL (exp, 0);
6559 set_mem_attributes (value, exp, 1);
6560 SET_DECL_RTL (exp, value);
6561 }
505ddab6 6562 }
921b3427 6563
0f41302f 6564 /* ... fall through ... */
d6a5ac33 6565
2dca20cd 6566 case FUNCTION_DECL:
bbf6f052
RK
6567 case RESULT_DECL:
6568 if (DECL_RTL (exp) == 0)
6569 abort ();
d6a5ac33 6570
e44842fe
RK
6571 /* Ensure variable marked as used even if it doesn't go through
6572 a parser. If it hasn't be used yet, write out an external
6573 definition. */
6574 if (! TREE_USED (exp))
6575 {
6576 assemble_external (exp);
6577 TREE_USED (exp) = 1;
6578 }
6579
dc6d66b3
RK
6580 /* Show we haven't gotten RTL for this yet. */
6581 temp = 0;
6582
bbf6f052
RK
6583 /* Handle variables inherited from containing functions. */
6584 context = decl_function_context (exp);
6585
6586 /* We treat inline_function_decl as an alias for the current function
6587 because that is the inline function whose vars, types, etc.
6588 are being merged into the current function.
6589 See expand_inline_function. */
d6a5ac33 6590
bbf6f052
RK
6591 if (context != 0 && context != current_function_decl
6592 && context != inline_function_decl
6593 /* If var is static, we don't need a static chain to access it. */
6594 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6595 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6596 {
6597 rtx addr;
6598
6599 /* Mark as non-local and addressable. */
81feeecb 6600 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6601 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6602 abort ();
dffd7eb6 6603 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6604 if (GET_CODE (DECL_RTL (exp)) != MEM)
6605 abort ();
6606 addr = XEXP (DECL_RTL (exp), 0);
6607 if (GET_CODE (addr) == MEM)
792760b9
RK
6608 addr
6609 = replace_equiv_address (addr,
6610 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6611 else
6612 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6613
792760b9 6614 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6615 }
4af3895e 6616
bbf6f052
RK
6617 /* This is the case of an array whose size is to be determined
6618 from its initializer, while the initializer is still being parsed.
6619 See expand_decl. */
d6a5ac33 6620
dc6d66b3
RK
6621 else if (GET_CODE (DECL_RTL (exp)) == MEM
6622 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6623 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6624
6625 /* If DECL_RTL is memory, we are in the normal case and either
6626 the address is not valid or it is not a register and -fforce-addr
6627 is specified, get the address into a register. */
6628
dc6d66b3
RK
6629 else if (GET_CODE (DECL_RTL (exp)) == MEM
6630 && modifier != EXPAND_CONST_ADDRESS
6631 && modifier != EXPAND_SUM
6632 && modifier != EXPAND_INITIALIZER
6633 && (! memory_address_p (DECL_MODE (exp),
6634 XEXP (DECL_RTL (exp), 0))
6635 || (flag_force_addr
6636 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6637 temp = replace_equiv_address (DECL_RTL (exp),
6638 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6639
dc6d66b3 6640 /* If we got something, return it. But first, set the alignment
04956a1a 6641 if the address is a register. */
dc6d66b3
RK
6642 if (temp != 0)
6643 {
6644 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6645 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6646
6647 return temp;
6648 }
6649
1499e0a8
RK
6650 /* If the mode of DECL_RTL does not match that of the decl, it
6651 must be a promoted value. We return a SUBREG of the wanted mode,
6652 but mark it so that we know that it was already extended. */
6653
6654 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6655 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6656 {
1499e0a8
RK
6657 /* Get the signedness used for this variable. Ensure we get the
6658 same mode we got when the variable was declared. */
78911e8b 6659 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6660 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6661 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6662 abort ();
6663
ddef6bc7 6664 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6665 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6666 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6667 return temp;
6668 }
6669
bbf6f052
RK
6670 return DECL_RTL (exp);
6671
6672 case INTEGER_CST:
d8a50944 6673 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6674 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6675
d8a50944
RH
6676 /* ??? If overflow is set, fold will have done an incomplete job,
6677 which can result in (plus xx (const_int 0)), which can get
6678 simplified by validate_replace_rtx during virtual register
6679 instantiation, which can result in unrecognizable insns.
6680 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6681 if (TREE_CONSTANT_OVERFLOW (exp)
6682 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6683 temp = force_reg (mode, temp);
6684
6685 return temp;
6686
bbf6f052 6687 case CONST_DECL:
37a08a29 6688 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
bbf6f052
RK
6689
6690 case REAL_CST:
6691 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6692 which will be turned into memory by reload if necessary.
6693
bbf6f052
RK
6694 We used to force a register so that loop.c could see it. But
6695 this does not allow gen_* patterns to perform optimizations with
6696 the constants. It also produces two insns in cases like "x = 1.0;".
6697 On most machines, floating-point constants are not permitted in
6698 many insns, so we'd end up copying it to a register in any case.
6699
6700 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6701 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6702 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6703
6704 case COMPLEX_CST:
6705 case STRING_CST:
6706 if (! TREE_CST_RTL (exp))
bd7cf17e 6707 output_constant_def (exp, 1);
bbf6f052
RK
6708
6709 /* TREE_CST_RTL probably contains a constant address.
6710 On RISC machines where a constant address isn't valid,
6711 make some insns to get that address into a register. */
6712 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6713 && modifier != EXPAND_CONST_ADDRESS
6714 && modifier != EXPAND_INITIALIZER
6715 && modifier != EXPAND_SUM
d6a5ac33
RK
6716 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6717 || (flag_force_addr
6718 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
792760b9
RK
6719 return replace_equiv_address (TREE_CST_RTL (exp),
6720 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
bbf6f052
RK
6721 return TREE_CST_RTL (exp);
6722
bf1e5319 6723 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6724 {
6725 rtx to_return;
3b304f5b 6726 const char *saved_input_filename = input_filename;
b24f65cd
APB
6727 int saved_lineno = lineno;
6728 input_filename = EXPR_WFL_FILENAME (exp);
6729 lineno = EXPR_WFL_LINENO (exp);
6730 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6731 emit_line_note (input_filename, lineno);
6ad7895a 6732 /* Possibly avoid switching back and forth here. */
b0ca54af 6733 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
b24f65cd
APB
6734 input_filename = saved_input_filename;
6735 lineno = saved_lineno;
6736 return to_return;
6737 }
bf1e5319 6738
bbf6f052
RK
6739 case SAVE_EXPR:
6740 context = decl_function_context (exp);
d6a5ac33 6741
d0977240
RK
6742 /* If this SAVE_EXPR was at global context, assume we are an
6743 initialization function and move it into our context. */
6744 if (context == 0)
6745 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6746
bbf6f052
RK
6747 /* We treat inline_function_decl as an alias for the current function
6748 because that is the inline function whose vars, types, etc.
6749 are being merged into the current function.
6750 See expand_inline_function. */
6751 if (context == current_function_decl || context == inline_function_decl)
6752 context = 0;
6753
6754 /* If this is non-local, handle it. */
6755 if (context)
6756 {
d0977240
RK
6757 /* The following call just exists to abort if the context is
6758 not of a containing function. */
6759 find_function_data (context);
6760
bbf6f052
RK
6761 temp = SAVE_EXPR_RTL (exp);
6762 if (temp && GET_CODE (temp) == REG)
6763 {
6764 put_var_into_stack (exp);
6765 temp = SAVE_EXPR_RTL (exp);
6766 }
6767 if (temp == 0 || GET_CODE (temp) != MEM)
6768 abort ();
792760b9
RK
6769 return
6770 replace_equiv_address (temp,
6771 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6772 }
6773 if (SAVE_EXPR_RTL (exp) == 0)
6774 {
06089a8b
RK
6775 if (mode == VOIDmode)
6776 temp = const0_rtx;
6777 else
1da68f56
RK
6778 temp = assign_temp (build_qualified_type (type,
6779 (TYPE_QUALS (type)
6780 | TYPE_QUAL_CONST)),
6781 3, 0, 0);
1499e0a8 6782
bbf6f052 6783 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6784 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6785 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6786 save_expr_regs);
ff78f773
RK
6787
6788 /* If the mode of TEMP does not match that of the expression, it
6789 must be a promoted value. We pass store_expr a SUBREG of the
6790 wanted mode but mark it so that we know that it was already
6791 extended. Note that `unsignedp' was modified above in
6792 this case. */
6793
6794 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6795 {
ddef6bc7 6796 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
ff78f773 6797 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6798 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6799 }
6800
4c7a0be9 6801 if (temp == const0_rtx)
37a08a29 6802 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9
JW
6803 else
6804 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6805
6806 TREE_USED (exp) = 1;
bbf6f052 6807 }
1499e0a8
RK
6808
6809 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6810 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6811 but mark it so that we know that it was already extended. */
1499e0a8
RK
6812
6813 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6814 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6815 {
e70d22c8
RK
6816 /* Compute the signedness and make the proper SUBREG. */
6817 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6818 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6819 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6820 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6821 return temp;
6822 }
6823
bbf6f052
RK
6824 return SAVE_EXPR_RTL (exp);
6825
679163cf
MS
6826 case UNSAVE_EXPR:
6827 {
6828 rtx temp;
6829 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
6830 TREE_OPERAND (exp, 0)
6831 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
6832 return temp;
6833 }
6834
b50d17a1 6835 case PLACEHOLDER_EXPR:
e9a25f70 6836 {
f47e9b4e 6837 tree old_list = placeholder_list;
738cc472 6838 tree placeholder_expr = 0;
e9a25f70 6839
f47e9b4e 6840 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
6841 if (exp == 0)
6842 abort ();
6843
f47e9b4e 6844 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 6845 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
6846 placeholder_list = old_list;
6847 return temp;
e9a25f70 6848 }
b50d17a1 6849
b50d17a1
RK
6850 case WITH_RECORD_EXPR:
6851 /* Put the object on the placeholder list, expand our first operand,
6852 and pop the list. */
6853 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6854 placeholder_list);
37a08a29
RK
6855 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6856 modifier);
b50d17a1
RK
6857 placeholder_list = TREE_CHAIN (placeholder_list);
6858 return target;
6859
70e6ca43
APB
6860 case GOTO_EXPR:
6861 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6862 expand_goto (TREE_OPERAND (exp, 0));
6863 else
6864 expand_computed_goto (TREE_OPERAND (exp, 0));
6865 return const0_rtx;
6866
bbf6f052 6867 case EXIT_EXPR:
df4ae160 6868 expand_exit_loop_if_false (NULL,
e44842fe 6869 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6870 return const0_rtx;
6871
f42e28dd
APB
6872 case LABELED_BLOCK_EXPR:
6873 if (LABELED_BLOCK_BODY (exp))
b0832fe1 6874 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 6875 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 6876 do_pending_stack_adjust ();
f42e28dd
APB
6877 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6878 return const0_rtx;
6879
6880 case EXIT_BLOCK_EXPR:
6881 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6882 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6883 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6884 return const0_rtx;
6885
bbf6f052 6886 case LOOP_EXPR:
0088fcb1 6887 push_temp_slots ();
bbf6f052 6888 expand_start_loop (1);
b0832fe1 6889 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 6890 expand_end_loop ();
0088fcb1 6891 pop_temp_slots ();
bbf6f052
RK
6892
6893 return const0_rtx;
6894
6895 case BIND_EXPR:
6896 {
6897 tree vars = TREE_OPERAND (exp, 0);
6898 int vars_need_expansion = 0;
6899
6900 /* Need to open a binding contour here because
e976b8b2 6901 if there are any cleanups they must be contained here. */
8e91754e 6902 expand_start_bindings (2);
bbf6f052 6903
2df53c0b
RS
6904 /* Mark the corresponding BLOCK for output in its proper place. */
6905 if (TREE_OPERAND (exp, 2) != 0
6906 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 6907 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
6908
6909 /* If VARS have not yet been expanded, expand them now. */
6910 while (vars)
6911 {
19e7881c 6912 if (!DECL_RTL_SET_P (vars))
bbf6f052
RK
6913 {
6914 vars_need_expansion = 1;
6915 expand_decl (vars);
6916 }
6917 expand_decl_init (vars);
6918 vars = TREE_CHAIN (vars);
6919 }
6920
37a08a29 6921 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
6922
6923 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6924
6925 return temp;
6926 }
6927
6928 case RTL_EXPR:
83b853c9
JM
6929 if (RTL_EXPR_SEQUENCE (exp))
6930 {
6931 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6932 abort ();
2f937369 6933 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
6934 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6935 }
64dc53f3
MM
6936 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6937 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6938 return RTL_EXPR_RTL (exp);
6939
6940 case CONSTRUCTOR:
dd27116b
RK
6941 /* If we don't need the result, just ensure we evaluate any
6942 subexpressions. */
6943 if (ignore)
6944 {
6945 tree elt;
37a08a29 6946
dd27116b 6947 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6948 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6949
dd27116b
RK
6950 return const0_rtx;
6951 }
3207b172 6952
4af3895e
JVA
6953 /* All elts simple constants => refer to a constant in memory. But
6954 if this is a non-BLKmode mode, let it store a field at a time
6955 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6956 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6957 store directly into the target unless the type is large enough
6958 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6959 all operands are constant, put it in memory as well.
6960
6961 FIXME: Avoid trying to fill vector constructors piece-meal.
6962 Output them with output_constant_def below unless we're sure
6963 they're zeros. This should go away when vector initializers
6964 are treated like VECTOR_CST instead of arrays.
6965 */
dd27116b 6966 else if ((TREE_STATIC (exp)
3207b172 6967 && ((mode == BLKmode
e5e809f4 6968 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6969 || TREE_ADDRESSABLE (exp)
19caa751 6970 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6971 && (! MOVE_BY_PIECES_P
19caa751
RK
6972 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6973 TYPE_ALIGN (type)))
0fb7aeda
KH
6974 && ((TREE_CODE (type) == VECTOR_TYPE
6975 && !is_zeros_p (exp))
6976 || ! mostly_zeros_p (exp)))))
dd27116b 6977 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 6978 {
bd7cf17e 6979 rtx constructor = output_constant_def (exp, 1);
19caa751 6980
b552441b
RS
6981 if (modifier != EXPAND_CONST_ADDRESS
6982 && modifier != EXPAND_INITIALIZER
792760b9
RK
6983 && modifier != EXPAND_SUM)
6984 constructor = validize_mem (constructor);
6985
bbf6f052
RK
6986 return constructor;
6987 }
bbf6f052
RK
6988 else
6989 {
e9ac02a6
JW
6990 /* Handle calls that pass values in multiple non-contiguous
6991 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6992 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6993 || GET_CODE (target) == PARALLEL)
1da68f56
RK
6994 target
6995 = assign_temp (build_qualified_type (type,
6996 (TYPE_QUALS (type)
6997 | (TREE_READONLY (exp)
6998 * TYPE_QUAL_CONST))),
c24ae149 6999 0, TREE_ADDRESSABLE (exp), 1);
07604beb 7000
de8920be 7001 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
7002 return target;
7003 }
7004
7005 case INDIRECT_REF:
7006 {
7007 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 7008 tree index;
3a94c984
KH
7009 tree string = string_constant (exp1, &index);
7010
06eaa86f 7011 /* Try to optimize reads from const strings. */
0fb7aeda
KH
7012 if (string
7013 && TREE_CODE (string) == STRING_CST
7014 && TREE_CODE (index) == INTEGER_CST
05bccae2 7015 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
7016 && GET_MODE_CLASS (mode) == MODE_INT
7017 && GET_MODE_SIZE (mode) == 1
37a08a29 7018 && modifier != EXPAND_WRITE)
0fb7aeda 7019 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 7020 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7021
405f0da6
JW
7022 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7023 op0 = memory_address (mode, op0);
38a448ca 7024 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 7025 set_mem_attributes (temp, exp, 0);
1125706f 7026
14a774a9
RK
7027 /* If we are writing to this object and its type is a record with
7028 readonly fields, we must mark it as readonly so it will
7029 conflict with readonly references to those fields. */
37a08a29 7030 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
7031 RTX_UNCHANGING_P (temp) = 1;
7032
8c8a8e34
JW
7033 return temp;
7034 }
bbf6f052
RK
7035
7036 case ARRAY_REF:
742920c7
RK
7037 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7038 abort ();
bbf6f052 7039
bbf6f052 7040 {
742920c7
RK
7041 tree array = TREE_OPERAND (exp, 0);
7042 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7043 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 7044 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 7045 HOST_WIDE_INT i;
b50d17a1 7046
d4c89139
PB
7047 /* Optimize the special-case of a zero lower bound.
7048
7049 We convert the low_bound to sizetype to avoid some problems
7050 with constant folding. (E.g. suppose the lower bound is 1,
7051 and its mode is QI. Without the conversion, (ARRAY
7052 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 7053 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 7054
742920c7 7055 if (! integer_zerop (low_bound))
fed3cef0 7056 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 7057
742920c7 7058 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7059 This is not done in fold so it won't happen inside &.
7060 Don't fold if this is for wide characters since it's too
7061 difficult to do correctly and this is a very rare case. */
742920c7 7062
cb5fa0f8
RK
7063 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7064 && TREE_CODE (array) == STRING_CST
742920c7 7065 && TREE_CODE (index) == INTEGER_CST
05bccae2 7066 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
7067 && GET_MODE_CLASS (mode) == MODE_INT
7068 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7069 return gen_int_mode (TREE_STRING_POINTER (array)
7070 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7071
742920c7
RK
7072 /* If this is a constant index into a constant array,
7073 just get the value from the array. Handle both the cases when
7074 we have an explicit constructor and when our operand is a variable
7075 that was declared const. */
4af3895e 7076
cb5fa0f8
RK
7077 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7078 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 7079 && TREE_CODE (index) == INTEGER_CST
3a94c984 7080 && 0 > compare_tree_int (index,
05bccae2
RK
7081 list_length (CONSTRUCTOR_ELTS
7082 (TREE_OPERAND (exp, 0)))))
742920c7 7083 {
05bccae2
RK
7084 tree elem;
7085
7086 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7087 i = TREE_INT_CST_LOW (index);
7088 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7089 ;
7090
7091 if (elem)
37a08a29
RK
7092 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7093 modifier);
742920c7 7094 }
3a94c984 7095
742920c7 7096 else if (optimize >= 1
cb5fa0f8
RK
7097 && modifier != EXPAND_CONST_ADDRESS
7098 && modifier != EXPAND_INITIALIZER
742920c7
RK
7099 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7100 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7101 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7102 {
08293add 7103 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7104 {
7105 tree init = DECL_INITIAL (array);
7106
742920c7
RK
7107 if (TREE_CODE (init) == CONSTRUCTOR)
7108 {
665f2503 7109 tree elem;
742920c7 7110
05bccae2 7111 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7112 (elem
7113 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7114 elem = TREE_CHAIN (elem))
7115 ;
7116
c54b0a5e 7117 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7118 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7119 tmode, modifier);
742920c7
RK
7120 }
7121 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7122 && 0 > compare_tree_int (index,
7123 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7124 {
7125 tree type = TREE_TYPE (TREE_TYPE (init));
7126 enum machine_mode mode = TYPE_MODE (type);
7127
7128 if (GET_MODE_CLASS (mode) == MODE_INT
7129 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7130 return gen_int_mode (TREE_STRING_POINTER (init)
7131 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7132 }
742920c7
RK
7133 }
7134 }
7135 }
3a94c984 7136 /* Fall through. */
bbf6f052
RK
7137
7138 case COMPONENT_REF:
7139 case BIT_FIELD_REF:
b4e3fabb 7140 case ARRAY_RANGE_REF:
4af3895e 7141 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
7142 appropriate field if it is present. Don't do this if we have
7143 already written the data since we want to refer to that copy
7144 and varasm.c assumes that's what we'll do. */
b4e3fabb 7145 if (code == COMPONENT_REF
7a0b7b9a
RK
7146 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7147 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
7148 {
7149 tree elt;
7150
7151 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7152 elt = TREE_CHAIN (elt))
86b5812c
RK
7153 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7154 /* We can normally use the value of the field in the
7155 CONSTRUCTOR. However, if this is a bitfield in
7156 an integral mode that we can fit in a HOST_WIDE_INT,
7157 we must mask only the number of bits in the bitfield,
7158 since this is done implicitly by the constructor. If
7159 the bitfield does not meet either of those conditions,
7160 we can't do this optimization. */
7161 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7162 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7163 == MODE_INT)
7164 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7165 <= HOST_BITS_PER_WIDE_INT))))
7166 {
3a94c984 7167 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7168 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7169 {
9df2c88c
RK
7170 HOST_WIDE_INT bitsize
7171 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7172 enum machine_mode imode
7173 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
7174
7175 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7176 {
7177 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7178 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7179 }
7180 else
7181 {
7182 tree count
e5e809f4
JL
7183 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7184 0);
86b5812c
RK
7185
7186 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7187 target, 0);
7188 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7189 target, 0);
7190 }
7191 }
7192
7193 return op0;
7194 }
4af3895e
JVA
7195 }
7196
bbf6f052
RK
7197 {
7198 enum machine_mode mode1;
770ae6cc 7199 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7200 tree offset;
bbf6f052 7201 int volatilep = 0;
839c4796 7202 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7203 &mode1, &unsignedp, &volatilep);
f47e9b4e 7204 rtx orig_op0;
bbf6f052 7205
e7f3c83f
RK
7206 /* If we got back the original object, something is wrong. Perhaps
7207 we are evaluating an expression too early. In any event, don't
7208 infinitely recurse. */
7209 if (tem == exp)
7210 abort ();
7211
3d27140a 7212 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7213 computation, since it will need a temporary and TARGET is known
7214 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7215
f47e9b4e
RK
7216 orig_op0 = op0
7217 = expand_expr (tem,
7218 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7219 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7220 != INTEGER_CST)
7221 ? target : NULL_RTX),
7222 VOIDmode,
7223 (modifier == EXPAND_INITIALIZER
7224 || modifier == EXPAND_CONST_ADDRESS)
7225 ? modifier : EXPAND_NORMAL);
bbf6f052 7226
8c8a8e34 7227 /* If this is a constant, put it into a register if it is a
14a774a9 7228 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7229 if (CONSTANT_P (op0))
7230 {
7231 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7232 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7233 && offset == 0)
8c8a8e34
JW
7234 op0 = force_reg (mode, op0);
7235 else
7236 op0 = validize_mem (force_const_mem (mode, op0));
7237 }
7238
7bb0943f
RS
7239 if (offset != 0)
7240 {
e3c8ea67 7241 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f 7242
a2725049 7243 /* If this object is in a register, put it into memory.
14a774a9
RK
7244 This case can't occur in C, but can in Ada if we have
7245 unchecked conversion of an expression from a scalar type to
7246 an array or record type. */
7247 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7248 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7249 {
d04218c0
RK
7250 /* If the operand is a SAVE_EXPR, we can deal with this by
7251 forcing the SAVE_EXPR into memory. */
7252 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45
RK
7253 {
7254 put_var_into_stack (TREE_OPERAND (exp, 0));
7255 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7256 }
d04218c0
RK
7257 else
7258 {
7259 tree nt
7260 = build_qualified_type (TREE_TYPE (tem),
7261 (TYPE_QUALS (TREE_TYPE (tem))
7262 | TYPE_QUAL_CONST));
7263 rtx memloc = assign_temp (nt, 1, 1, 1);
7264
d04218c0
RK
7265 emit_move_insn (memloc, op0);
7266 op0 = memloc;
7267 }
14a774a9
RK
7268 }
7269
7bb0943f
RS
7270 if (GET_CODE (op0) != MEM)
7271 abort ();
2d48c13d 7272
2d48c13d 7273#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
7274 if (GET_MODE (offset_rtx) != Pmode)
7275 offset_rtx = convert_memory_address (Pmode, offset_rtx);
fa06ab5c
RK
7276#else
7277 if (GET_MODE (offset_rtx) != ptr_mode)
7278 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7279#endif
7280
14a774a9 7281 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7282 to call force_reg for that case. Avoid that case. */
89752202
HB
7283 if (GET_CODE (op0) == MEM
7284 && GET_MODE (op0) == BLKmode
efd07ca7 7285 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7286 && bitsize != 0
3a94c984 7287 && (bitpos % bitsize) == 0
89752202 7288 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7289 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7290 {
e3c8ea67 7291 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7292 bitpos = 0;
7293 }
7294
0d4903b8
RK
7295 op0 = offset_address (op0, offset_rtx,
7296 highest_pow2_factor (offset));
7bb0943f
RS
7297 }
7298
1ce7f3c2
RK
7299 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7300 record its alignment as BIGGEST_ALIGNMENT. */
7301 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7302 && is_aligning_offset (offset, tem))
7303 set_mem_align (op0, BIGGEST_ALIGNMENT);
7304
bbf6f052
RK
7305 /* Don't forget about volatility even if this is a bitfield. */
7306 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7307 {
f47e9b4e
RK
7308 if (op0 == orig_op0)
7309 op0 = copy_rtx (op0);
7310
bbf6f052
RK
7311 MEM_VOLATILE_P (op0) = 1;
7312 }
7313
010f87c4
JJ
7314 /* The following code doesn't handle CONCAT.
7315 Assume only bitpos == 0 can be used for CONCAT, due to
7316 one element arrays having the same mode as its element. */
7317 if (GET_CODE (op0) == CONCAT)
7318 {
7319 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7320 abort ();
7321 return op0;
7322 }
7323
ccc98036
RS
7324 /* In cases where an aligned union has an unaligned object
7325 as a field, we might be extracting a BLKmode value from
7326 an integer-mode (e.g., SImode) object. Handle this case
7327 by doing the extract into an object as wide as the field
7328 (which we know to be the width of a basic mode), then
cb5fa0f8 7329 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7330 if (mode1 == VOIDmode
ccc98036 7331 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7332 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7333 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7334 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7335 && modifier != EXPAND_CONST_ADDRESS
7336 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7337 /* If the field isn't aligned enough to fetch as a memref,
7338 fetch it as a bit field. */
7339 || (mode1 != BLKmode
38b3baae 7340 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
cb5fa0f8
RK
7341 && ((TYPE_ALIGN (TREE_TYPE (tem))
7342 < GET_MODE_ALIGNMENT (mode))
7343 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7344 /* If the type and the field are a constant size and the
7345 size of the type isn't the same size as the bitfield,
7346 we must use bitfield operations. */
7347 || (bitsize >= 0
7348 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7349 == INTEGER_CST)
7350 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7351 bitsize)))
bbf6f052 7352 {
bbf6f052
RK
7353 enum machine_mode ext_mode = mode;
7354
14a774a9
RK
7355 if (ext_mode == BLKmode
7356 && ! (target != 0 && GET_CODE (op0) == MEM
7357 && GET_CODE (target) == MEM
7358 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7359 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7360
7361 if (ext_mode == BLKmode)
a281e72d
RK
7362 {
7363 /* In this case, BITPOS must start at a byte boundary and
7364 TARGET, if specified, must be a MEM. */
7365 if (GET_CODE (op0) != MEM
7366 || (target != 0 && GET_CODE (target) != MEM)
7367 || bitpos % BITS_PER_UNIT != 0)
7368 abort ();
7369
f4ef873c 7370 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
7371 if (target == 0)
7372 target = assign_temp (type, 0, 1, 1);
7373
7374 emit_block_move (target, op0,
a06ef755 7375 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
7376 / BITS_PER_UNIT),
7377 BLOCK_OP_NORMAL);
3a94c984 7378
a281e72d
RK
7379 return target;
7380 }
bbf6f052 7381
dc6d66b3
RK
7382 op0 = validize_mem (op0);
7383
7384 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7385 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3
RK
7386
7387 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 7388 unsignedp, target, ext_mode, ext_mode,
bbf6f052 7389 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7390
7391 /* If the result is a record type and BITSIZE is narrower than
7392 the mode of OP0, an integral mode, and this is a big endian
7393 machine, we must put the field into the high-order bits. */
7394 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7395 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7396 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7397 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7398 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7399 - bitsize),
7400 op0, 1);
7401
bbf6f052
RK
7402 if (mode == BLKmode)
7403 {
c3d32120 7404 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7405 ((*lang_hooks.types.type_for_mode)
7406 (ext_mode, 0),
c3d32120 7407 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7408
7409 emit_move_insn (new, op0);
7410 op0 = copy_rtx (new);
7411 PUT_MODE (op0, BLKmode);
c3d32120 7412 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7413 }
7414
7415 return op0;
7416 }
7417
05019f83
RK
7418 /* If the result is BLKmode, use that to access the object
7419 now as well. */
7420 if (mode == BLKmode)
7421 mode1 = BLKmode;
7422
bbf6f052
RK
7423 /* Get a reference to just this component. */
7424 if (modifier == EXPAND_CONST_ADDRESS
7425 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7426 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7427 else
f4ef873c 7428 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7429
f47e9b4e
RK
7430 if (op0 == orig_op0)
7431 op0 = copy_rtx (op0);
7432
3bdf5ad1 7433 set_mem_attributes (op0, exp, 0);
dc6d66b3 7434 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7435 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7436
bbf6f052 7437 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7438 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7439 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7440 || modifier == EXPAND_INITIALIZER)
bbf6f052 7441 return op0;
0d15e60c 7442 else if (target == 0)
bbf6f052 7443 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7444
bbf6f052
RK
7445 convert_move (target, op0, unsignedp);
7446 return target;
7447 }
7448
4a8d0c9c
RH
7449 case VTABLE_REF:
7450 {
7451 rtx insn, before = get_last_insn (), vtbl_ref;
7452
7453 /* Evaluate the interior expression. */
7454 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7455 tmode, modifier);
7456
7457 /* Get or create an instruction off which to hang a note. */
7458 if (REG_P (subtarget))
7459 {
7460 target = subtarget;
7461 insn = get_last_insn ();
7462 if (insn == before)
7463 abort ();
7464 if (! INSN_P (insn))
7465 insn = prev_nonnote_insn (insn);
7466 }
7467 else
7468 {
7469 target = gen_reg_rtx (GET_MODE (subtarget));
7470 insn = emit_move_insn (target, subtarget);
7471 }
7472
7473 /* Collect the data for the note. */
7474 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7475 vtbl_ref = plus_constant (vtbl_ref,
7476 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7477 /* Discard the initial CONST that was added. */
7478 vtbl_ref = XEXP (vtbl_ref, 0);
7479
7480 REG_NOTES (insn)
7481 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7482
7483 return target;
7484 }
7485
bbf6f052
RK
7486 /* Intended for a reference to a buffer of a file-object in Pascal.
7487 But it's not certain that a special tree code will really be
7488 necessary for these. INDIRECT_REF might work for them. */
7489 case BUFFER_REF:
7490 abort ();
7491
7308a047 7492 case IN_EXPR:
7308a047 7493 {
d6a5ac33
RK
7494 /* Pascal set IN expression.
7495
7496 Algorithm:
7497 rlo = set_low - (set_low%bits_per_word);
7498 the_word = set [ (index - rlo)/bits_per_word ];
7499 bit_index = index % bits_per_word;
7500 bitmask = 1 << bit_index;
7501 return !!(the_word & bitmask); */
7502
7308a047
RS
7503 tree set = TREE_OPERAND (exp, 0);
7504 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7505 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7506 tree set_type = TREE_TYPE (set);
7308a047
RS
7507 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7508 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7509 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7510 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7511 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7512 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7513 rtx setaddr = XEXP (setval, 0);
7514 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7515 rtx rlow;
7516 rtx diff, quo, rem, addr, bit, result;
7308a047 7517
d6a5ac33
RK
7518 /* If domain is empty, answer is no. Likewise if index is constant
7519 and out of bounds. */
51723711 7520 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7521 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7522 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7523 || (TREE_CODE (index) == INTEGER_CST
7524 && TREE_CODE (set_low_bound) == INTEGER_CST
7525 && tree_int_cst_lt (index, set_low_bound))
7526 || (TREE_CODE (set_high_bound) == INTEGER_CST
7527 && TREE_CODE (index) == INTEGER_CST
7528 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7529 return const0_rtx;
7530
d6a5ac33
RK
7531 if (target == 0)
7532 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7533
7534 /* If we get here, we have to generate the code for both cases
7535 (in range and out of range). */
7536
7537 op0 = gen_label_rtx ();
7538 op1 = gen_label_rtx ();
7539
7540 if (! (GET_CODE (index_val) == CONST_INT
7541 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7542 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7543 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7544
7545 if (! (GET_CODE (index_val) == CONST_INT
7546 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7547 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7548 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7549
7550 /* Calculate the element number of bit zero in the first word
7551 of the set. */
7552 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7553 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7554 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7555 else
17938e57
RK
7556 rlow = expand_binop (index_mode, and_optab, lo_r,
7557 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7558 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7559
d6a5ac33
RK
7560 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7561 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7562
7563 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7564 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7565 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7566 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7567
7308a047 7568 addr = memory_address (byte_mode,
d6a5ac33
RK
7569 expand_binop (index_mode, add_optab, diff,
7570 setaddr, NULL_RTX, iunsignedp,
17938e57 7571 OPTAB_LIB_WIDEN));
d6a5ac33 7572
3a94c984 7573 /* Extract the bit we want to examine. */
7308a047 7574 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7575 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7576 make_tree (TREE_TYPE (index), rem),
7577 NULL_RTX, 1);
7578 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7579 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7580 1, OPTAB_LIB_WIDEN);
17938e57
RK
7581
7582 if (result != target)
7583 convert_move (target, result, 1);
7308a047
RS
7584
7585 /* Output the code to handle the out-of-range case. */
7586 emit_jump (op0);
7587 emit_label (op1);
7588 emit_move_insn (target, const0_rtx);
7589 emit_label (op0);
7590 return target;
7591 }
7592
bbf6f052 7593 case WITH_CLEANUP_EXPR:
6ad7895a 7594 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7595 {
6ad7895a 7596 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7597 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7598 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7599 CLEANUP_EH_ONLY (exp));
e976b8b2 7600
bbf6f052 7601 /* That's it for this cleanup. */
6ad7895a 7602 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7603 }
6ad7895a 7604 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7605
5dab5552
MS
7606 case CLEANUP_POINT_EXPR:
7607 {
e976b8b2
MS
7608 /* Start a new binding layer that will keep track of all cleanup
7609 actions to be performed. */
8e91754e 7610 expand_start_bindings (2);
e976b8b2 7611
d93d4205 7612 target_temp_slot_level = temp_slot_level;
e976b8b2 7613
37a08a29 7614 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7615 /* If we're going to use this value, load it up now. */
7616 if (! ignore)
7617 op0 = force_not_mem (op0);
d93d4205 7618 preserve_temp_slots (op0);
e976b8b2 7619 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7620 }
7621 return op0;
7622
bbf6f052
RK
7623 case CALL_EXPR:
7624 /* Check for a built-in function. */
7625 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7626 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7627 == FUNCTION_DECL)
bbf6f052 7628 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7629 {
c70eaeaf
KG
7630 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7631 == BUILT_IN_FRONTEND)
c9d892a8
NB
7632 return (*lang_hooks.expand_expr)
7633 (exp, original_target, tmode, modifier);
c70eaeaf
KG
7634 else
7635 return expand_builtin (exp, target, subtarget, tmode, ignore);
7636 }
d6a5ac33 7637
8129842c 7638 return expand_call (exp, target, ignore);
bbf6f052
RK
7639
7640 case NON_LVALUE_EXPR:
7641 case NOP_EXPR:
7642 case CONVERT_EXPR:
7643 case REFERENCE_EXPR:
4a53008b 7644 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7645 return const0_rtx;
4a53008b 7646
bbf6f052
RK
7647 if (TREE_CODE (type) == UNION_TYPE)
7648 {
7649 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7650
c3d32120
RK
7651 /* If both input and output are BLKmode, this conversion isn't doing
7652 anything except possibly changing memory attribute. */
7653 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7654 {
7655 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7656 modifier);
7657
7658 result = copy_rtx (result);
7659 set_mem_attributes (result, exp, 0);
7660 return result;
7661 }
14a774a9 7662
bbf6f052 7663 if (target == 0)
1da68f56 7664 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7665
bbf6f052
RK
7666 if (GET_CODE (target) == MEM)
7667 /* Store data into beginning of memory target. */
7668 store_expr (TREE_OPERAND (exp, 0),
f4ef873c 7669 adjust_address (target, TYPE_MODE (valtype), 0), 0);
1499e0a8 7670
bbf6f052
RK
7671 else if (GET_CODE (target) == REG)
7672 /* Store this field into a union of the proper type. */
14a774a9
RK
7673 store_field (target,
7674 MIN ((int_size_in_bytes (TREE_TYPE
7675 (TREE_OPERAND (exp, 0)))
7676 * BITS_PER_UNIT),
8752c357 7677 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7678 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7679 VOIDmode, 0, type, 0);
bbf6f052
RK
7680 else
7681 abort ();
7682
7683 /* Return the entire union. */
7684 return target;
7685 }
d6a5ac33 7686
7f62854a
RK
7687 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7688 {
7689 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7690 modifier);
7f62854a
RK
7691
7692 /* If the signedness of the conversion differs and OP0 is
7693 a promoted SUBREG, clear that indication since we now
7694 have to do the proper extension. */
7695 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7696 && GET_CODE (op0) == SUBREG)
7697 SUBREG_PROMOTED_VAR_P (op0) = 0;
7698
7699 return op0;
7700 }
7701
fdf473ae 7702 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7703 if (GET_MODE (op0) == mode)
7704 return op0;
12342f90 7705
d6a5ac33
RK
7706 /* If OP0 is a constant, just convert it into the proper mode. */
7707 if (CONSTANT_P (op0))
fdf473ae
RH
7708 {
7709 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7710 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7711
0fb7aeda 7712 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7713 return simplify_gen_subreg (mode, op0, inner_mode,
7714 subreg_lowpart_offset (mode,
7715 inner_mode));
7716 else
7717 return convert_modes (mode, inner_mode, op0,
7718 TREE_UNSIGNED (inner_type));
7719 }
12342f90 7720
26fcb35a 7721 if (modifier == EXPAND_INITIALIZER)
38a448ca 7722 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7723
bbf6f052 7724 if (target == 0)
d6a5ac33
RK
7725 return
7726 convert_to_mode (mode, op0,
7727 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7728 else
d6a5ac33
RK
7729 convert_move (target, op0,
7730 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7731 return target;
7732
ed239f5a 7733 case VIEW_CONVERT_EXPR:
37a08a29 7734 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7735
7736 /* If the input and output modes are both the same, we are done.
7737 Otherwise, if neither mode is BLKmode and both are within a word, we
c11c10d8
RK
7738 can use gen_lowpart. If neither is true, make sure the operand is
7739 in memory and convert the MEM to the new mode. */
ed239f5a
RK
7740 if (TYPE_MODE (type) == GET_MODE (op0))
7741 ;
7742 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7743 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7744 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7745 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7746 else if (GET_CODE (op0) != MEM)
ed239f5a 7747 {
c11c10d8
RK
7748 /* If the operand is not a MEM, force it into memory. Since we
7749 are going to be be changing the mode of the MEM, don't call
7750 force_const_mem for constants because we don't allow pool
7751 constants to change mode. */
ed239f5a 7752 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7753
c11c10d8
RK
7754 if (TREE_ADDRESSABLE (exp))
7755 abort ();
ed239f5a 7756
c11c10d8
RK
7757 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7758 target
7759 = assign_stack_temp_for_type
7760 (TYPE_MODE (inner_type),
7761 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7762
c11c10d8
RK
7763 emit_move_insn (target, op0);
7764 op0 = target;
ed239f5a
RK
7765 }
7766
c11c10d8
RK
7767 /* At this point, OP0 is in the correct mode. If the output type is such
7768 that the operand is known to be aligned, indicate that it is.
7769 Otherwise, we need only be concerned about alignment for non-BLKmode
7770 results. */
ed239f5a
RK
7771 if (GET_CODE (op0) == MEM)
7772 {
7773 op0 = copy_rtx (op0);
7774
ed239f5a
RK
7775 if (TYPE_ALIGN_OK (type))
7776 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7777 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7778 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7779 {
7780 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7781 HOST_WIDE_INT temp_size
7782 = MAX (int_size_in_bytes (inner_type),
7783 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7784 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7785 temp_size, 0, type);
c4e59f51 7786 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7787
c11c10d8
RK
7788 if (TREE_ADDRESSABLE (exp))
7789 abort ();
7790
ed239f5a
RK
7791 if (GET_MODE (op0) == BLKmode)
7792 emit_block_move (new_with_op0_mode, op0,
44bb111a
RH
7793 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7794 BLOCK_OP_NORMAL);
ed239f5a
RK
7795 else
7796 emit_move_insn (new_with_op0_mode, op0);
7797
7798 op0 = new;
7799 }
0fb7aeda 7800
c4e59f51 7801 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7802 }
7803
7804 return op0;
7805
bbf6f052 7806 case PLUS_EXPR:
0f41302f
MS
7807 /* We come here from MINUS_EXPR when the second operand is a
7808 constant. */
bbf6f052 7809 plus_expr:
91ce572a 7810 this_optab = ! unsignedp && flag_trapv
a9785c70 7811 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7812 ? addv_optab : add_optab;
bbf6f052
RK
7813
7814 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7815 something else, make sure we add the register to the constant and
7816 then to the other thing. This case can occur during strength
7817 reduction and doing it this way will produce better code if the
7818 frame pointer or argument pointer is eliminated.
7819
7820 fold-const.c will ensure that the constant is always in the inner
7821 PLUS_EXPR, so the only case we need to do anything about is if
7822 sp, ap, or fp is our second argument, in which case we must swap
7823 the innermost first argument and our second argument. */
7824
7825 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7826 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7827 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7828 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7829 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7830 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7831 {
7832 tree t = TREE_OPERAND (exp, 1);
7833
7834 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7835 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7836 }
7837
88f63c77 7838 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7839 something, we might be forming a constant. So try to use
7840 plus_constant. If it produces a sum and we can't accept it,
7841 use force_operand. This allows P = &ARR[const] to generate
7842 efficient code on machines where a SYMBOL_REF is not a valid
7843 address.
7844
7845 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7846 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7847 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7848 {
c980ac49
RS
7849 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7850 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7851 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7852 {
cbbc503e
JL
7853 rtx constant_part;
7854
c980ac49
RS
7855 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7856 EXPAND_SUM);
cbbc503e
JL
7857 /* Use immed_double_const to ensure that the constant is
7858 truncated according to the mode of OP1, then sign extended
7859 to a HOST_WIDE_INT. Using the constant directly can result
7860 in non-canonical RTL in a 64x32 cross compile. */
7861 constant_part
7862 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7863 (HOST_WIDE_INT) 0,
a5efcd63 7864 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7865 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7866 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7867 op1 = force_operand (op1, target);
7868 return op1;
7869 }
bbf6f052 7870
c980ac49
RS
7871 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7872 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7873 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7874 {
cbbc503e
JL
7875 rtx constant_part;
7876
c980ac49 7877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7878 (modifier == EXPAND_INITIALIZER
7879 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7880 if (! CONSTANT_P (op0))
7881 {
7882 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7883 VOIDmode, modifier);
709f5be1
RS
7884 /* Don't go to both_summands if modifier
7885 says it's not right to return a PLUS. */
7886 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7887 goto binop2;
c980ac49
RS
7888 goto both_summands;
7889 }
cbbc503e
JL
7890 /* Use immed_double_const to ensure that the constant is
7891 truncated according to the mode of OP1, then sign extended
7892 to a HOST_WIDE_INT. Using the constant directly can result
7893 in non-canonical RTL in a 64x32 cross compile. */
7894 constant_part
7895 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7896 (HOST_WIDE_INT) 0,
2a94e396 7897 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7898 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7899 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7900 op0 = force_operand (op0, target);
7901 return op0;
7902 }
bbf6f052
RK
7903 }
7904
7905 /* No sense saving up arithmetic to be done
7906 if it's all in the wrong mode to form part of an address.
7907 And force_operand won't know whether to sign-extend or
7908 zero-extend. */
7909 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7910 || mode != ptr_mode)
c980ac49 7911 goto binop;
bbf6f052 7912
e5e809f4 7913 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7914 subtarget = 0;
7915
37a08a29
RK
7916 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7917 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 7918
c980ac49 7919 both_summands:
bbf6f052
RK
7920 /* Make sure any term that's a sum with a constant comes last. */
7921 if (GET_CODE (op0) == PLUS
7922 && CONSTANT_P (XEXP (op0, 1)))
7923 {
7924 temp = op0;
7925 op0 = op1;
7926 op1 = temp;
7927 }
7928 /* If adding to a sum including a constant,
7929 associate it to put the constant outside. */
7930 if (GET_CODE (op1) == PLUS
7931 && CONSTANT_P (XEXP (op1, 1)))
7932 {
7933 rtx constant_term = const0_rtx;
7934
7935 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7936 if (temp != 0)
7937 op0 = temp;
6f90e075
JW
7938 /* Ensure that MULT comes first if there is one. */
7939 else if (GET_CODE (op0) == MULT)
38a448ca 7940 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7941 else
38a448ca 7942 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7943
7944 /* Let's also eliminate constants from op0 if possible. */
7945 op0 = eliminate_constant_term (op0, &constant_term);
7946
7947 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 7948 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
7949 result we want will then be OP0 + OP1. */
7950
7951 temp = simplify_binary_operation (PLUS, mode, constant_term,
7952 XEXP (op1, 1));
7953 if (temp != 0)
7954 op1 = temp;
7955 else
38a448ca 7956 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7957 }
7958
7959 /* Put a constant term last and put a multiplication first. */
7960 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7961 temp = op1, op1 = op0, op0 = temp;
7962
7963 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7964 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7965
7966 case MINUS_EXPR:
ea87523e
RK
7967 /* For initializers, we are allowed to return a MINUS of two
7968 symbolic constants. Here we handle all cases when both operands
7969 are constant. */
bbf6f052
RK
7970 /* Handle difference of two symbolic constants,
7971 for the sake of an initializer. */
7972 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7973 && really_constant_p (TREE_OPERAND (exp, 0))
7974 && really_constant_p (TREE_OPERAND (exp, 1)))
7975 {
37a08a29
RK
7976 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7977 modifier);
7978 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7979 modifier);
ea87523e 7980
ea87523e
RK
7981 /* If the last operand is a CONST_INT, use plus_constant of
7982 the negated constant. Else make the MINUS. */
7983 if (GET_CODE (op1) == CONST_INT)
7984 return plus_constant (op0, - INTVAL (op1));
7985 else
38a448ca 7986 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7987 }
7988 /* Convert A - const to A + (-const). */
7989 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7990 {
ae431183
RK
7991 tree negated = fold (build1 (NEGATE_EXPR, type,
7992 TREE_OPERAND (exp, 1)));
7993
ae431183 7994 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7995 /* If we can't negate the constant in TYPE, leave it alone and
7996 expand_binop will negate it for us. We used to try to do it
7997 here in the signed version of TYPE, but that doesn't work
7998 on POINTER_TYPEs. */;
ae431183
RK
7999 else
8000 {
8001 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
8002 goto plus_expr;
8003 }
bbf6f052 8004 }
91ce572a
CC
8005 this_optab = ! unsignedp && flag_trapv
8006 && (GET_MODE_CLASS(mode) == MODE_INT)
8007 ? subv_optab : sub_optab;
bbf6f052
RK
8008 goto binop;
8009
8010 case MULT_EXPR:
bbf6f052
RK
8011 /* If first operand is constant, swap them.
8012 Thus the following special case checks need only
8013 check the second operand. */
8014 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8015 {
b3694847 8016 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
8017 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8018 TREE_OPERAND (exp, 1) = t1;
8019 }
8020
8021 /* Attempt to return something suitable for generating an
8022 indexed address, for machines that support that. */
8023
88f63c77 8024 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 8025 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 8026 {
921b3427
RK
8027 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8028 EXPAND_SUM);
bbf6f052 8029
3b40e71b
RH
8030 /* If we knew for certain that this is arithmetic for an array
8031 reference, and we knew the bounds of the array, then we could
8032 apply the distributive law across (PLUS X C) for constant C.
8033 Without such knowledge, we risk overflowing the computation
8034 when both X and C are large, but X+C isn't. */
8035 /* ??? Could perhaps special-case EXP being unsigned and C being
8036 positive. In that case we are certain that X+C is no smaller
8037 than X and so the transformed expression will overflow iff the
8038 original would have. */
bbf6f052
RK
8039
8040 if (GET_CODE (op0) != REG)
906c4e36 8041 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
8042 if (GET_CODE (op0) != REG)
8043 op0 = copy_to_mode_reg (mode, op0);
8044
c5c76735
JL
8045 return
8046 gen_rtx_MULT (mode, op0,
3b40e71b 8047 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
bbf6f052
RK
8048 }
8049
e5e809f4 8050 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8051 subtarget = 0;
8052
8053 /* Check for multiplying things that have been extended
8054 from a narrower type. If this machine supports multiplying
8055 in that narrower type with a result in the desired type,
8056 do it that way, and avoid the explicit type-conversion. */
8057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8058 && TREE_CODE (type) == INTEGER_TYPE
8059 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8060 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8061 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8062 && int_fits_type_p (TREE_OPERAND (exp, 1),
8063 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8064 /* Don't use a widening multiply if a shift will do. */
8065 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 8066 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
8067 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8068 ||
8069 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8070 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8071 ==
8072 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8073 /* If both operands are extended, they must either both
8074 be zero-extended or both be sign-extended. */
8075 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8076 ==
8077 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8078 {
8079 enum machine_mode innermode
8080 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
8081 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8082 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
8083 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8084 ? umul_widen_optab : smul_widen_optab);
b10af0c8 8085 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 8086 {
b10af0c8
TG
8087 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8088 {
8089 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8090 NULL_RTX, VOIDmode, 0);
8091 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8092 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8093 VOIDmode, 0);
8094 else
8095 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8096 NULL_RTX, VOIDmode, 0);
8097 goto binop2;
8098 }
8099 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8100 && innermode == word_mode)
8101 {
8102 rtx htem;
8103 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8104 NULL_RTX, VOIDmode, 0);
8105 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
8106 op1 = convert_modes (innermode, mode,
8107 expand_expr (TREE_OPERAND (exp, 1),
8108 NULL_RTX, VOIDmode, 0),
8109 unsignedp);
b10af0c8
TG
8110 else
8111 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8112 NULL_RTX, VOIDmode, 0);
8113 temp = expand_binop (mode, other_optab, op0, op1, target,
8114 unsignedp, OPTAB_LIB_WIDEN);
8115 htem = expand_mult_highpart_adjust (innermode,
8116 gen_highpart (innermode, temp),
8117 op0, op1,
8118 gen_highpart (innermode, temp),
8119 unsignedp);
8120 emit_move_insn (gen_highpart (innermode, temp), htem);
8121 return temp;
8122 }
bbf6f052
RK
8123 }
8124 }
8125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8126 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8127 return expand_mult (mode, op0, op1, target, unsignedp);
8128
8129 case TRUNC_DIV_EXPR:
8130 case FLOOR_DIV_EXPR:
8131 case CEIL_DIV_EXPR:
8132 case ROUND_DIV_EXPR:
8133 case EXACT_DIV_EXPR:
e5e809f4 8134 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8135 subtarget = 0;
8136 /* Possible optimization: compute the dividend with EXPAND_SUM
8137 then if the divisor is constant can optimize the case
8138 where some terms of the dividend have coeffs divisible by it. */
8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8140 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8141 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8142
8143 case RDIV_EXPR:
b7e9703c
JH
8144 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8145 expensive divide. If not, combine will rebuild the original
8146 computation. */
8147 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 8148 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
8149 && !real_onep (TREE_OPERAND (exp, 0)))
8150 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8151 build (RDIV_EXPR, type,
8152 build_real (type, dconst1),
8153 TREE_OPERAND (exp, 1))),
8154 target, tmode, unsignedp);
ef89d648 8155 this_optab = sdiv_optab;
bbf6f052
RK
8156 goto binop;
8157
8158 case TRUNC_MOD_EXPR:
8159 case FLOOR_MOD_EXPR:
8160 case CEIL_MOD_EXPR:
8161 case ROUND_MOD_EXPR:
e5e809f4 8162 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8163 subtarget = 0;
8164 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8165 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8166 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8167
8168 case FIX_ROUND_EXPR:
8169 case FIX_FLOOR_EXPR:
8170 case FIX_CEIL_EXPR:
8171 abort (); /* Not used for C. */
8172
8173 case FIX_TRUNC_EXPR:
906c4e36 8174 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8175 if (target == 0)
8176 target = gen_reg_rtx (mode);
8177 expand_fix (target, op0, unsignedp);
8178 return target;
8179
8180 case FLOAT_EXPR:
906c4e36 8181 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8182 if (target == 0)
8183 target = gen_reg_rtx (mode);
8184 /* expand_float can't figure out what to do if FROM has VOIDmode.
8185 So give it the correct mode. With -O, cse will optimize this. */
8186 if (GET_MODE (op0) == VOIDmode)
8187 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8188 op0);
8189 expand_float (target, op0,
8190 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8191 return target;
8192
8193 case NEGATE_EXPR:
5b22bee8 8194 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
91ce572a 8195 temp = expand_unop (mode,
0fb7aeda
KH
8196 ! unsignedp && flag_trapv
8197 && (GET_MODE_CLASS(mode) == MODE_INT)
8198 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
8199 if (temp == 0)
8200 abort ();
8201 return temp;
8202
8203 case ABS_EXPR:
8204 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8205
2d7050fd 8206 /* Handle complex values specially. */
d6a5ac33
RK
8207 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8208 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8209 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 8210
bbf6f052
RK
8211 /* Unsigned abs is simply the operand. Testing here means we don't
8212 risk generating incorrect code below. */
8213 if (TREE_UNSIGNED (type))
8214 return op0;
8215
91ce572a 8216 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8217 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8218
8219 case MAX_EXPR:
8220 case MIN_EXPR:
8221 target = original_target;
e5e809f4 8222 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 8223 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8224 || GET_MODE (target) != mode
bbf6f052
RK
8225 || (GET_CODE (target) == REG
8226 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8227 target = gen_reg_rtx (mode);
906c4e36 8228 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8229 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8230
8231 /* First try to do it with a special MIN or MAX instruction.
8232 If that does not win, use a conditional jump to select the proper
8233 value. */
8234 this_optab = (TREE_UNSIGNED (type)
8235 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8236 : (code == MIN_EXPR ? smin_optab : smax_optab));
8237
8238 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8239 OPTAB_WIDEN);
8240 if (temp != 0)
8241 return temp;
8242
fa2981d8
JW
8243 /* At this point, a MEM target is no longer useful; we will get better
8244 code without it. */
3a94c984 8245
fa2981d8
JW
8246 if (GET_CODE (target) == MEM)
8247 target = gen_reg_rtx (mode);
8248
ee456b1c
RK
8249 if (target != op0)
8250 emit_move_insn (target, op0);
d6a5ac33 8251
bbf6f052 8252 op0 = gen_label_rtx ();
d6a5ac33 8253
f81497d9
RS
8254 /* If this mode is an integer too wide to compare properly,
8255 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8256 if (GET_MODE_CLASS (mode) == MODE_INT
8257 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8258 {
f81497d9 8259 if (code == MAX_EXPR)
d6a5ac33
RK
8260 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8261 target, op1, NULL_RTX, op0);
bbf6f052 8262 else
d6a5ac33
RK
8263 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8264 op1, target, NULL_RTX, op0);
bbf6f052 8265 }
f81497d9
RS
8266 else
8267 {
b30f05db
BS
8268 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8269 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8270 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8271 op0);
f81497d9 8272 }
b30f05db 8273 emit_move_insn (target, op1);
bbf6f052
RK
8274 emit_label (op0);
8275 return target;
8276
bbf6f052
RK
8277 case BIT_NOT_EXPR:
8278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8279 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8280 if (temp == 0)
8281 abort ();
8282 return temp;
8283
8284 case FFS_EXPR:
8285 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8286 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8287 if (temp == 0)
8288 abort ();
8289 return temp;
8290
d6a5ac33
RK
8291 /* ??? Can optimize bitwise operations with one arg constant.
8292 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8293 and (a bitwise1 b) bitwise2 b (etc)
8294 but that is probably not worth while. */
8295
8296 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8297 boolean values when we want in all cases to compute both of them. In
8298 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8299 as actual zero-or-1 values and then bitwise anding. In cases where
8300 there cannot be any side effects, better code would be made by
8301 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8302 how to recognize those cases. */
8303
bbf6f052
RK
8304 case TRUTH_AND_EXPR:
8305 case BIT_AND_EXPR:
8306 this_optab = and_optab;
8307 goto binop;
8308
bbf6f052
RK
8309 case TRUTH_OR_EXPR:
8310 case BIT_IOR_EXPR:
8311 this_optab = ior_optab;
8312 goto binop;
8313
874726a8 8314 case TRUTH_XOR_EXPR:
bbf6f052
RK
8315 case BIT_XOR_EXPR:
8316 this_optab = xor_optab;
8317 goto binop;
8318
8319 case LSHIFT_EXPR:
8320 case RSHIFT_EXPR:
8321 case LROTATE_EXPR:
8322 case RROTATE_EXPR:
e5e809f4 8323 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8324 subtarget = 0;
8325 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8326 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8327 unsignedp);
8328
d6a5ac33
RK
8329 /* Could determine the answer when only additive constants differ. Also,
8330 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8331 case LT_EXPR:
8332 case LE_EXPR:
8333 case GT_EXPR:
8334 case GE_EXPR:
8335 case EQ_EXPR:
8336 case NE_EXPR:
1eb8759b
RH
8337 case UNORDERED_EXPR:
8338 case ORDERED_EXPR:
8339 case UNLT_EXPR:
8340 case UNLE_EXPR:
8341 case UNGT_EXPR:
8342 case UNGE_EXPR:
8343 case UNEQ_EXPR:
bbf6f052
RK
8344 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8345 if (temp != 0)
8346 return temp;
d6a5ac33 8347
0f41302f 8348 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8349 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8350 && original_target
8351 && GET_CODE (original_target) == REG
8352 && (GET_MODE (original_target)
8353 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8354 {
d6a5ac33
RK
8355 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8356 VOIDmode, 0);
8357
c0a3eeac
UW
8358 /* If temp is constant, we can just compute the result. */
8359 if (GET_CODE (temp) == CONST_INT)
8360 {
8361 if (INTVAL (temp) != 0)
8362 emit_move_insn (target, const1_rtx);
8363 else
8364 emit_move_insn (target, const0_rtx);
8365
8366 return target;
8367 }
8368
bbf6f052 8369 if (temp != original_target)
c0a3eeac
UW
8370 {
8371 enum machine_mode mode1 = GET_MODE (temp);
8372 if (mode1 == VOIDmode)
8373 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8374
c0a3eeac
UW
8375 temp = copy_to_mode_reg (mode1, temp);
8376 }
d6a5ac33 8377
bbf6f052 8378 op1 = gen_label_rtx ();
c5d5d461 8379 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8380 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8381 emit_move_insn (temp, const1_rtx);
8382 emit_label (op1);
8383 return temp;
8384 }
d6a5ac33 8385
bbf6f052
RK
8386 /* If no set-flag instruction, must generate a conditional
8387 store into a temporary variable. Drop through
8388 and handle this like && and ||. */
8389
8390 case TRUTH_ANDIF_EXPR:
8391 case TRUTH_ORIF_EXPR:
e44842fe 8392 if (! ignore
e5e809f4 8393 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8394 /* Make sure we don't have a hard reg (such as function's return
8395 value) live across basic blocks, if not optimizing. */
8396 || (!optimize && GET_CODE (target) == REG
8397 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8398 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8399
8400 if (target)
8401 emit_clr_insn (target);
8402
bbf6f052
RK
8403 op1 = gen_label_rtx ();
8404 jumpifnot (exp, op1);
e44842fe
RK
8405
8406 if (target)
8407 emit_0_to_1_insn (target);
8408
bbf6f052 8409 emit_label (op1);
e44842fe 8410 return ignore ? const0_rtx : target;
bbf6f052
RK
8411
8412 case TRUTH_NOT_EXPR:
8413 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8414 /* The parser is careful to generate TRUTH_NOT_EXPR
8415 only with operands that are always zero or one. */
906c4e36 8416 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8417 target, 1, OPTAB_LIB_WIDEN);
8418 if (temp == 0)
8419 abort ();
8420 return temp;
8421
8422 case COMPOUND_EXPR:
8423 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8424 emit_queue ();
8425 return expand_expr (TREE_OPERAND (exp, 1),
8426 (ignore ? const0_rtx : target),
8427 VOIDmode, 0);
8428
8429 case COND_EXPR:
ac01eace
RK
8430 /* If we would have a "singleton" (see below) were it not for a
8431 conversion in each arm, bring that conversion back out. */
8432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8433 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8434 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8435 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8436 {
d6edb99e
ZW
8437 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8438 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8439
8440 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8441 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8442 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8443 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8444 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8445 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8446 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8447 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8448 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8449 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8450 TREE_OPERAND (exp, 0),
d6edb99e 8451 iftrue, iffalse)),
ac01eace
RK
8452 target, tmode, modifier);
8453 }
8454
bbf6f052
RK
8455 {
8456 /* Note that COND_EXPRs whose type is a structure or union
8457 are required to be constructed to contain assignments of
8458 a temporary variable, so that we can evaluate them here
8459 for side effect only. If type is void, we must do likewise. */
8460
8461 /* If an arm of the branch requires a cleanup,
8462 only that cleanup is performed. */
8463
8464 tree singleton = 0;
8465 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8466
8467 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8468 convert it to our mode, if necessary. */
8469 if (integer_onep (TREE_OPERAND (exp, 1))
8470 && integer_zerop (TREE_OPERAND (exp, 2))
8471 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8472 {
dd27116b
RK
8473 if (ignore)
8474 {
8475 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8476 modifier);
dd27116b
RK
8477 return const0_rtx;
8478 }
8479
37a08a29 8480 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8481 if (GET_MODE (op0) == mode)
8482 return op0;
d6a5ac33 8483
bbf6f052
RK
8484 if (target == 0)
8485 target = gen_reg_rtx (mode);
8486 convert_move (target, op0, unsignedp);
8487 return target;
8488 }
8489
ac01eace
RK
8490 /* Check for X ? A + B : A. If we have this, we can copy A to the
8491 output and conditionally add B. Similarly for unary operations.
8492 Don't do this if X has side-effects because those side effects
8493 might affect A or B and the "?" operation is a sequence point in
8494 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8495
8496 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8497 && operand_equal_p (TREE_OPERAND (exp, 2),
8498 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8499 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8500 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8501 && operand_equal_p (TREE_OPERAND (exp, 1),
8502 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8503 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8504 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8505 && operand_equal_p (TREE_OPERAND (exp, 2),
8506 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8507 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8508 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8509 && operand_equal_p (TREE_OPERAND (exp, 1),
8510 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8511 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8512
01c8a7c8
RK
8513 /* If we are not to produce a result, we have no target. Otherwise,
8514 if a target was specified use it; it will not be used as an
3a94c984 8515 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8516 temporary. */
8517
8518 if (ignore)
8519 temp = 0;
8520 else if (original_target
e5e809f4 8521 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8522 || (singleton && GET_CODE (original_target) == REG
8523 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8524 && original_target == var_rtx (singleton)))
8525 && GET_MODE (original_target) == mode
7c00d1fe
RK
8526#ifdef HAVE_conditional_move
8527 && (! can_conditionally_move_p (mode)
8528 || GET_CODE (original_target) == REG
8529 || TREE_ADDRESSABLE (type))
8530#endif
8125d7e9
BS
8531 && (GET_CODE (original_target) != MEM
8532 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8533 temp = original_target;
8534 else if (TREE_ADDRESSABLE (type))
8535 abort ();
8536 else
8537 temp = assign_temp (type, 0, 0, 1);
8538
ac01eace
RK
8539 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8540 do the test of X as a store-flag operation, do this as
8541 A + ((X != 0) << log C). Similarly for other simple binary
8542 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8543 if (temp && singleton && binary_op
bbf6f052
RK
8544 && (TREE_CODE (binary_op) == PLUS_EXPR
8545 || TREE_CODE (binary_op) == MINUS_EXPR
8546 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8547 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8548 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8549 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8550 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8551 {
8552 rtx result;
91ce572a 8553 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8554 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8555 ? addv_optab : add_optab)
8556 : TREE_CODE (binary_op) == MINUS_EXPR
8557 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8558 ? subv_optab : sub_optab)
8559 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8560 : xor_optab);
bbf6f052
RK
8561
8562 /* If we had X ? A : A + 1, do this as A + (X == 0).
8563
8564 We have to invert the truth value here and then put it
8565 back later if do_store_flag fails. We cannot simply copy
8566 TREE_OPERAND (exp, 0) to another variable and modify that
8567 because invert_truthvalue can modify the tree pointed to
8568 by its argument. */
8569 if (singleton == TREE_OPERAND (exp, 1))
8570 TREE_OPERAND (exp, 0)
8571 = invert_truthvalue (TREE_OPERAND (exp, 0));
8572
8573 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 8574 (safe_from_p (temp, singleton, 1)
906c4e36 8575 ? temp : NULL_RTX),
bbf6f052
RK
8576 mode, BRANCH_COST <= 1);
8577
ac01eace
RK
8578 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8579 result = expand_shift (LSHIFT_EXPR, mode, result,
8580 build_int_2 (tree_log2
8581 (TREE_OPERAND
8582 (binary_op, 1)),
8583 0),
e5e809f4 8584 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8585 ? temp : NULL_RTX), 0);
8586
bbf6f052
RK
8587 if (result)
8588 {
906c4e36 8589 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8590 return expand_binop (mode, boptab, op1, result, temp,
8591 unsignedp, OPTAB_LIB_WIDEN);
8592 }
8593 else if (singleton == TREE_OPERAND (exp, 1))
8594 TREE_OPERAND (exp, 0)
8595 = invert_truthvalue (TREE_OPERAND (exp, 0));
8596 }
3a94c984 8597
dabf8373 8598 do_pending_stack_adjust ();
bbf6f052
RK
8599 NO_DEFER_POP;
8600 op0 = gen_label_rtx ();
8601
8602 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8603 {
8604 if (temp != 0)
8605 {
8606 /* If the target conflicts with the other operand of the
8607 binary op, we can't use it. Also, we can't use the target
8608 if it is a hard register, because evaluating the condition
8609 might clobber it. */
8610 if ((binary_op
e5e809f4 8611 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8612 || (GET_CODE (temp) == REG
8613 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8614 temp = gen_reg_rtx (mode);
8615 store_expr (singleton, temp, 0);
8616 }
8617 else
906c4e36 8618 expand_expr (singleton,
2937cf87 8619 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8620 if (singleton == TREE_OPERAND (exp, 1))
8621 jumpif (TREE_OPERAND (exp, 0), op0);
8622 else
8623 jumpifnot (TREE_OPERAND (exp, 0), op0);
8624
956d6950 8625 start_cleanup_deferral ();
bbf6f052
RK
8626 if (binary_op && temp == 0)
8627 /* Just touch the other operand. */
8628 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8629 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8630 else if (binary_op)
8631 store_expr (build (TREE_CODE (binary_op), type,
8632 make_tree (type, temp),
8633 TREE_OPERAND (binary_op, 1)),
8634 temp, 0);
8635 else
8636 store_expr (build1 (TREE_CODE (unary_op), type,
8637 make_tree (type, temp)),
8638 temp, 0);
8639 op1 = op0;
bbf6f052 8640 }
bbf6f052
RK
8641 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8642 comparison operator. If we have one of these cases, set the
8643 output to A, branch on A (cse will merge these two references),
8644 then set the output to FOO. */
8645 else if (temp
8646 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8647 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8648 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8649 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8650 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8651 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8652 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8653 {
3a94c984
KH
8654 if (GET_CODE (temp) == REG
8655 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8656 temp = gen_reg_rtx (mode);
8657 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8658 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8659
956d6950 8660 start_cleanup_deferral ();
bbf6f052
RK
8661 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8662 op1 = op0;
8663 }
8664 else if (temp
8665 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8666 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8667 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8668 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8669 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8670 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8671 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8672 {
3a94c984
KH
8673 if (GET_CODE (temp) == REG
8674 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8675 temp = gen_reg_rtx (mode);
8676 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8677 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8678
956d6950 8679 start_cleanup_deferral ();
bbf6f052
RK
8680 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8681 op1 = op0;
8682 }
8683 else
8684 {
8685 op1 = gen_label_rtx ();
8686 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8687
956d6950 8688 start_cleanup_deferral ();
3a94c984 8689
2ac84cfe 8690 /* One branch of the cond can be void, if it never returns. For
3a94c984 8691 example A ? throw : E */
2ac84cfe 8692 if (temp != 0
3a94c984 8693 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
8694 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8695 else
906c4e36
RK
8696 expand_expr (TREE_OPERAND (exp, 1),
8697 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8698 end_cleanup_deferral ();
bbf6f052
RK
8699 emit_queue ();
8700 emit_jump_insn (gen_jump (op1));
8701 emit_barrier ();
8702 emit_label (op0);
956d6950 8703 start_cleanup_deferral ();
2ac84cfe 8704 if (temp != 0
3a94c984 8705 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
8706 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8707 else
906c4e36
RK
8708 expand_expr (TREE_OPERAND (exp, 2),
8709 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8710 }
8711
956d6950 8712 end_cleanup_deferral ();
bbf6f052
RK
8713
8714 emit_queue ();
8715 emit_label (op1);
8716 OK_DEFER_POP;
5dab5552 8717
bbf6f052
RK
8718 return temp;
8719 }
8720
8721 case TARGET_EXPR:
8722 {
8723 /* Something needs to be initialized, but we didn't know
8724 where that thing was when building the tree. For example,
8725 it could be the return value of a function, or a parameter
8726 to a function which lays down in the stack, or a temporary
8727 variable which must be passed by reference.
8728
8729 We guarantee that the expression will either be constructed
8730 or copied into our original target. */
8731
8732 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8733 tree cleanups = NULL_TREE;
5c062816 8734 tree exp1;
bbf6f052
RK
8735
8736 if (TREE_CODE (slot) != VAR_DECL)
8737 abort ();
8738
9c51f375
RK
8739 if (! ignore)
8740 target = original_target;
8741
6fbfac92
JM
8742 /* Set this here so that if we get a target that refers to a
8743 register variable that's already been used, put_reg_into_stack
3a94c984 8744 knows that it should fix up those uses. */
6fbfac92
JM
8745 TREE_USED (slot) = 1;
8746
bbf6f052
RK
8747 if (target == 0)
8748 {
19e7881c 8749 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8750 {
8751 target = DECL_RTL (slot);
5c062816 8752 /* If we have already expanded the slot, so don't do
ac993f4f 8753 it again. (mrs) */
5c062816
MS
8754 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8755 return target;
ac993f4f 8756 }
bbf6f052
RK
8757 else
8758 {
e9a25f70 8759 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8760 /* All temp slots at this level must not conflict. */
8761 preserve_temp_slots (target);
19e7881c 8762 SET_DECL_RTL (slot, target);
e9a25f70 8763 if (TREE_ADDRESSABLE (slot))
4361b41d 8764 put_var_into_stack (slot);
bbf6f052 8765
e287fd6e
RK
8766 /* Since SLOT is not known to the called function
8767 to belong to its stack frame, we must build an explicit
8768 cleanup. This case occurs when we must build up a reference
8769 to pass the reference as an argument. In this case,
8770 it is very likely that such a reference need not be
8771 built here. */
8772
8773 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
8774 TREE_OPERAND (exp, 2)
8775 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 8776 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8777 }
bbf6f052
RK
8778 }
8779 else
8780 {
8781 /* This case does occur, when expanding a parameter which
8782 needs to be constructed on the stack. The target
8783 is the actual stack address that we want to initialize.
8784 The function we call will perform the cleanup in this case. */
8785
8c042b47
RS
8786 /* If we have already assigned it space, use that space,
8787 not target that we were passed in, as our target
8788 parameter is only a hint. */
19e7881c 8789 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8790 {
8791 target = DECL_RTL (slot);
8792 /* If we have already expanded the slot, so don't do
8c042b47 8793 it again. (mrs) */
3a94c984
KH
8794 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8795 return target;
8c042b47 8796 }
21002281
JW
8797 else
8798 {
19e7881c 8799 SET_DECL_RTL (slot, target);
21002281
JW
8800 /* If we must have an addressable slot, then make sure that
8801 the RTL that we just stored in slot is OK. */
8802 if (TREE_ADDRESSABLE (slot))
4361b41d 8803 put_var_into_stack (slot);
21002281 8804 }
bbf6f052
RK
8805 }
8806
4847c938 8807 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8808 /* Mark it as expanded. */
8809 TREE_OPERAND (exp, 1) = NULL_TREE;
8810
41531e5b 8811 store_expr (exp1, target, 0);
61d6b1cc 8812
659e5a7a 8813 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 8814
41531e5b 8815 return target;
bbf6f052
RK
8816 }
8817
8818 case INIT_EXPR:
8819 {
8820 tree lhs = TREE_OPERAND (exp, 0);
8821 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8822
8823 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
bbf6f052
RK
8824 return temp;
8825 }
8826
8827 case MODIFY_EXPR:
8828 {
8829 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
8830 That's so we don't compute a pointer and save it over a
8831 call. If lhs is simple, compute it first so we can give it
8832 as a target if the rhs is just a call. This avoids an
8833 extra temp and copy and that prevents a partial-subsumption
8834 which makes bad code. Actually we could treat
8835 component_ref's of vars like vars. */
bbf6f052
RK
8836
8837 tree lhs = TREE_OPERAND (exp, 0);
8838 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
8839
8840 temp = 0;
8841
bbf6f052
RK
8842 /* Check for |= or &= of a bitfield of size one into another bitfield
8843 of size 1. In this case, (unless we need the result of the
8844 assignment) we can do this more efficiently with a
8845 test followed by an assignment, if necessary.
8846
8847 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8848 things change so we do, this code should be enhanced to
8849 support it. */
8850 if (ignore
8851 && TREE_CODE (lhs) == COMPONENT_REF
8852 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8853 || TREE_CODE (rhs) == BIT_AND_EXPR)
8854 && TREE_OPERAND (rhs, 0) == lhs
8855 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8856 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8857 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8858 {
8859 rtx label = gen_label_rtx ();
8860
8861 do_jump (TREE_OPERAND (rhs, 1),
8862 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8863 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8864 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8865 (TREE_CODE (rhs) == BIT_IOR_EXPR
8866 ? integer_one_node
8867 : integer_zero_node)),
8868 0, 0);
e7c33f54 8869 do_pending_stack_adjust ();
bbf6f052
RK
8870 emit_label (label);
8871 return const0_rtx;
8872 }
8873
bbf6f052 8874 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
0fb7aeda 8875
bbf6f052
RK
8876 return temp;
8877 }
8878
6e7f84a7
APB
8879 case RETURN_EXPR:
8880 if (!TREE_OPERAND (exp, 0))
8881 expand_null_return ();
8882 else
8883 expand_return (TREE_OPERAND (exp, 0));
8884 return const0_rtx;
8885
bbf6f052
RK
8886 case PREINCREMENT_EXPR:
8887 case PREDECREMENT_EXPR:
7b8b9722 8888 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8889
8890 case POSTINCREMENT_EXPR:
8891 case POSTDECREMENT_EXPR:
8892 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8893 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8894
8895 case ADDR_EXPR:
8896 /* Are we taking the address of a nested function? */
8897 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8898 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8899 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8900 && ! TREE_STATIC (exp))
bbf6f052
RK
8901 {
8902 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8903 op0 = force_operand (op0, target);
8904 }
682ba3a6
RK
8905 /* If we are taking the address of something erroneous, just
8906 return a zero. */
8907 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8908 return const0_rtx;
d6b6783b
RK
8909 /* If we are taking the address of a constant and are at the
8910 top level, we have to use output_constant_def since we can't
8911 call force_const_mem at top level. */
8912 else if (cfun == 0
8913 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8914 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8915 == 'c')))
8916 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
8917 else
8918 {
e287fd6e
RK
8919 /* We make sure to pass const0_rtx down if we came in with
8920 ignore set, to avoid doing the cleanups twice for something. */
8921 op0 = expand_expr (TREE_OPERAND (exp, 0),
8922 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8923 (modifier == EXPAND_INITIALIZER
8924 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8925
119af78a
RK
8926 /* If we are going to ignore the result, OP0 will have been set
8927 to const0_rtx, so just return it. Don't get confused and
8928 think we are taking the address of the constant. */
8929 if (ignore)
8930 return op0;
8931
73b7f58c
BS
8932 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8933 clever and returns a REG when given a MEM. */
8934 op0 = protect_from_queue (op0, 1);
3539e816 8935
c5c76735
JL
8936 /* We would like the object in memory. If it is a constant, we can
8937 have it be statically allocated into memory. For a non-constant,
8938 we need to allocate some memory and store the value into it. */
896102d0
RK
8939
8940 if (CONSTANT_P (op0))
8941 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8942 op0);
682ba3a6 8943 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
8944 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8945 || GET_CODE (op0) == PARALLEL)
896102d0 8946 {
6c7d86ec
RK
8947 /* If the operand is a SAVE_EXPR, we can deal with this by
8948 forcing the SAVE_EXPR into memory. */
8949 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8950 {
8951 put_var_into_stack (TREE_OPERAND (exp, 0));
8952 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8953 }
df6018fd 8954 else
6c7d86ec
RK
8955 {
8956 /* If this object is in a register, it can't be BLKmode. */
8957 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 8958 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
8959
8960 if (GET_CODE (op0) == PARALLEL)
8961 /* Handle calls that pass values in multiple
8962 non-contiguous locations. The Irix 6 ABI has examples
8963 of this. */
0fb7aeda 8964 emit_group_store (memloc, op0,
6c7d86ec
RK
8965 int_size_in_bytes (inner_type));
8966 else
8967 emit_move_insn (memloc, op0);
0fb7aeda 8968
6c7d86ec
RK
8969 op0 = memloc;
8970 }
896102d0
RK
8971 }
8972
bbf6f052
RK
8973 if (GET_CODE (op0) != MEM)
8974 abort ();
3a94c984 8975
34e81b5a 8976 mark_temp_addr_taken (op0);
bbf6f052 8977 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 8978 {
34e81b5a 8979 op0 = XEXP (op0, 0);
88f63c77 8980#ifdef POINTERS_EXTEND_UNSIGNED
34e81b5a 8981 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
88f63c77 8982 && mode == ptr_mode)
34e81b5a 8983 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 8984#endif
34e81b5a 8985 return op0;
88f63c77 8986 }
987c71d9 8987
c952ff4b
RK
8988 /* If OP0 is not aligned as least as much as the type requires, we
8989 need to make a temporary, copy OP0 to it, and take the address of
8990 the temporary. We want to use the alignment of the type, not of
8991 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8992 the test for BLKmode means that can't happen. The test for
8993 BLKmode is because we never make mis-aligned MEMs with
8994 non-BLKmode.
8995
8996 We don't need to do this at all if the machine doesn't have
8997 strict alignment. */
8998 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8999 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
9000 > MEM_ALIGN (op0))
9001 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
9002 {
9003 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9004 rtx new
9005 = assign_stack_temp_for_type
9006 (TYPE_MODE (inner_type),
9007 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
e27cc84b 9008 : int_size_in_bytes (inner_type),
a06ef755
RK
9009 1, build_qualified_type (inner_type,
9010 (TYPE_QUALS (inner_type)
9011 | TYPE_QUAL_CONST)));
9012
c3d32120
RK
9013 if (TYPE_ALIGN_OK (inner_type))
9014 abort ();
9015
44bb111a
RH
9016 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9017 BLOCK_OP_NORMAL);
a06ef755
RK
9018 op0 = new;
9019 }
9020
bbf6f052
RK
9021 op0 = force_operand (XEXP (op0, 0), target);
9022 }
987c71d9 9023
05c8e58b
HPN
9024 if (flag_force_addr
9025 && GET_CODE (op0) != REG
9026 && modifier != EXPAND_CONST_ADDRESS
9027 && modifier != EXPAND_INITIALIZER
9028 && modifier != EXPAND_SUM)
987c71d9
RK
9029 op0 = force_reg (Pmode, op0);
9030
dc6d66b3
RK
9031 if (GET_CODE (op0) == REG
9032 && ! REG_USERVAR_P (op0))
bdb429a5 9033 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 9034
88f63c77
RK
9035#ifdef POINTERS_EXTEND_UNSIGNED
9036 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9037 && mode == ptr_mode)
9fcfcce7 9038 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
9039#endif
9040
bbf6f052
RK
9041 return op0;
9042
9043 case ENTRY_VALUE_EXPR:
9044 abort ();
9045
7308a047
RS
9046 /* COMPLEX type for Extended Pascal & Fortran */
9047 case COMPLEX_EXPR:
9048 {
9049 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 9050 rtx insns;
7308a047
RS
9051
9052 /* Get the rtx code of the operands. */
9053 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9054 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9055
9056 if (! target)
9057 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9058
6551fa4d 9059 start_sequence ();
7308a047
RS
9060
9061 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
9062 emit_move_insn (gen_realpart (mode, target), op0);
9063 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 9064
6551fa4d
JW
9065 insns = get_insns ();
9066 end_sequence ();
9067
7308a047 9068 /* Complex construction should appear as a single unit. */
6551fa4d
JW
9069 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9070 each with a separate pseudo as destination.
9071 It's not correct for flow to treat them as a unit. */
6d6e61ce 9072 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9073 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9074 else
2f937369 9075 emit_insn (insns);
7308a047
RS
9076
9077 return target;
9078 }
9079
9080 case REALPART_EXPR:
2d7050fd
RS
9081 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9082 return gen_realpart (mode, op0);
3a94c984 9083
7308a047 9084 case IMAGPART_EXPR:
2d7050fd
RS
9085 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9086 return gen_imagpart (mode, op0);
7308a047
RS
9087
9088 case CONJ_EXPR:
9089 {
62acb978 9090 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 9091 rtx imag_t;
6551fa4d 9092 rtx insns;
3a94c984
KH
9093
9094 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
9095
9096 if (! target)
d6a5ac33 9097 target = gen_reg_rtx (mode);
3a94c984 9098
6551fa4d 9099 start_sequence ();
7308a047
RS
9100
9101 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
9102 emit_move_insn (gen_realpart (partmode, target),
9103 gen_realpart (partmode, op0));
7308a047 9104
62acb978 9105 imag_t = gen_imagpart (partmode, target);
91ce572a 9106 temp = expand_unop (partmode,
0fb7aeda
KH
9107 ! unsignedp && flag_trapv
9108 && (GET_MODE_CLASS(partmode) == MODE_INT)
9109 ? negv_optab : neg_optab,
3a94c984 9110 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
9111 if (temp != imag_t)
9112 emit_move_insn (imag_t, temp);
9113
6551fa4d
JW
9114 insns = get_insns ();
9115 end_sequence ();
9116
3a94c984 9117 /* Conjugate should appear as a single unit
d6a5ac33 9118 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
9119 each with a separate pseudo as destination.
9120 It's not correct for flow to treat them as a unit. */
6d6e61ce 9121 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9122 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9123 else
2f937369 9124 emit_insn (insns);
7308a047
RS
9125
9126 return target;
9127 }
9128
e976b8b2
MS
9129 case TRY_CATCH_EXPR:
9130 {
9131 tree handler = TREE_OPERAND (exp, 1);
9132
9133 expand_eh_region_start ();
9134
9135 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9136
52a11cbf 9137 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
9138
9139 return op0;
9140 }
9141
b335b813
PB
9142 case TRY_FINALLY_EXPR:
9143 {
9144 tree try_block = TREE_OPERAND (exp, 0);
9145 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 9146
8ad8135a 9147 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
9148 {
9149 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9150 is not sufficient, so we cannot expand the block twice.
9151 So we play games with GOTO_SUBROUTINE_EXPR to let us
9152 expand the thing only once. */
8ad8135a
RH
9153 /* When not optimizing, we go ahead with this form since
9154 (1) user breakpoints operate more predictably without
9155 code duplication, and
9156 (2) we're not running any of the global optimizers
9157 that would explode in time/space with the highly
9158 connected CFG created by the indirect branching. */
8943a0b4
RH
9159
9160 rtx finally_label = gen_label_rtx ();
9161 rtx done_label = gen_label_rtx ();
9162 rtx return_link = gen_reg_rtx (Pmode);
9163 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9164 (tree) finally_label, (tree) return_link);
9165 TREE_SIDE_EFFECTS (cleanup) = 1;
9166
9167 /* Start a new binding layer that will keep track of all cleanup
9168 actions to be performed. */
9169 expand_start_bindings (2);
9170 target_temp_slot_level = temp_slot_level;
9171
9172 expand_decl_cleanup (NULL_TREE, cleanup);
9173 op0 = expand_expr (try_block, target, tmode, modifier);
9174
9175 preserve_temp_slots (op0);
9176 expand_end_bindings (NULL_TREE, 0, 0);
9177 emit_jump (done_label);
9178 emit_label (finally_label);
9179 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9180 emit_indirect_jump (return_link);
9181 emit_label (done_label);
9182 }
9183 else
9184 {
9185 expand_start_bindings (2);
9186 target_temp_slot_level = temp_slot_level;
b335b813 9187
8943a0b4
RH
9188 expand_decl_cleanup (NULL_TREE, finally_block);
9189 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9190
8943a0b4
RH
9191 preserve_temp_slots (op0);
9192 expand_end_bindings (NULL_TREE, 0, 0);
9193 }
b335b813 9194
b335b813
PB
9195 return op0;
9196 }
9197
3a94c984 9198 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9199 {
9200 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9201 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9202 rtx return_address = gen_label_rtx ();
3a94c984
KH
9203 emit_move_insn (return_link,
9204 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9205 emit_jump (subr);
9206 emit_label (return_address);
9207 return const0_rtx;
9208 }
9209
d3707adb
RH
9210 case VA_ARG_EXPR:
9211 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9212
52a11cbf 9213 case EXC_PTR_EXPR:
86c99549 9214 return get_exception_pointer (cfun);
52a11cbf 9215
67231816
RH
9216 case FDESC_EXPR:
9217 /* Function descriptors are not valid except for as
9218 initialization constants, and should not be expanded. */
9219 abort ();
9220
bbf6f052 9221 default:
c9d892a8 9222 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
9223 }
9224
9225 /* Here to do an ordinary binary operator, generating an instruction
9226 from the optab already placed in `this_optab'. */
9227 binop:
e5e809f4 9228 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
9229 subtarget = 0;
9230 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 9231 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
9232 binop2:
9233 temp = expand_binop (mode, this_optab, op0, op1, target,
9234 unsignedp, OPTAB_LIB_WIDEN);
9235 if (temp == 0)
9236 abort ();
9237 return temp;
9238}
b93a436e 9239\f
1ce7f3c2
RK
9240/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9241 when applied to the address of EXP produces an address known to be
9242 aligned more than BIGGEST_ALIGNMENT. */
9243
9244static int
9245is_aligning_offset (offset, exp)
9246 tree offset;
9247 tree exp;
9248{
9249 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9250 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9251 || TREE_CODE (offset) == NOP_EXPR
9252 || TREE_CODE (offset) == CONVERT_EXPR
9253 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9254 offset = TREE_OPERAND (offset, 0);
9255
9256 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9257 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9258 if (TREE_CODE (offset) != BIT_AND_EXPR
9259 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9260 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9261 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9262 return 0;
9263
9264 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9265 It must be NEGATE_EXPR. Then strip any more conversions. */
9266 offset = TREE_OPERAND (offset, 0);
9267 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9268 || TREE_CODE (offset) == NOP_EXPR
9269 || TREE_CODE (offset) == CONVERT_EXPR)
9270 offset = TREE_OPERAND (offset, 0);
9271
9272 if (TREE_CODE (offset) != NEGATE_EXPR)
9273 return 0;
9274
9275 offset = TREE_OPERAND (offset, 0);
9276 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9277 || TREE_CODE (offset) == NOP_EXPR
9278 || TREE_CODE (offset) == CONVERT_EXPR)
9279 offset = TREE_OPERAND (offset, 0);
9280
9281 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9282 whose type is the same as EXP. */
9283 return (TREE_CODE (offset) == ADDR_EXPR
9284 && (TREE_OPERAND (offset, 0) == exp
9285 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9286 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9287 == TREE_TYPE (exp)))));
9288}
9289\f
e0a2f705 9290/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9291 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9292 in bytes within the string that ARG is accessing. The type of the
9293 offset will be `sizetype'. */
b93a436e 9294
28f4ec01 9295tree
b93a436e
JL
9296string_constant (arg, ptr_offset)
9297 tree arg;
9298 tree *ptr_offset;
9299{
9300 STRIP_NOPS (arg);
9301
9302 if (TREE_CODE (arg) == ADDR_EXPR
9303 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9304 {
fed3cef0 9305 *ptr_offset = size_zero_node;
b93a436e
JL
9306 return TREE_OPERAND (arg, 0);
9307 }
9308 else if (TREE_CODE (arg) == PLUS_EXPR)
9309 {
9310 tree arg0 = TREE_OPERAND (arg, 0);
9311 tree arg1 = TREE_OPERAND (arg, 1);
9312
9313 STRIP_NOPS (arg0);
9314 STRIP_NOPS (arg1);
9315
9316 if (TREE_CODE (arg0) == ADDR_EXPR
9317 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9318 {
fed3cef0 9319 *ptr_offset = convert (sizetype, arg1);
b93a436e 9320 return TREE_OPERAND (arg0, 0);
bbf6f052 9321 }
b93a436e
JL
9322 else if (TREE_CODE (arg1) == ADDR_EXPR
9323 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9324 {
fed3cef0 9325 *ptr_offset = convert (sizetype, arg0);
b93a436e 9326 return TREE_OPERAND (arg1, 0);
bbf6f052 9327 }
b93a436e 9328 }
ca695ac9 9329
b93a436e
JL
9330 return 0;
9331}
ca695ac9 9332\f
b93a436e
JL
9333/* Expand code for a post- or pre- increment or decrement
9334 and return the RTX for the result.
9335 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9336
b93a436e
JL
9337static rtx
9338expand_increment (exp, post, ignore)
b3694847 9339 tree exp;
b93a436e 9340 int post, ignore;
ca695ac9 9341{
b3694847
SS
9342 rtx op0, op1;
9343 rtx temp, value;
9344 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9345 optab this_optab = add_optab;
9346 int icode;
9347 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9348 int op0_is_copy = 0;
9349 int single_insn = 0;
9350 /* 1 means we can't store into OP0 directly,
9351 because it is a subreg narrower than a word,
9352 and we don't dare clobber the rest of the word. */
9353 int bad_subreg = 0;
1499e0a8 9354
b93a436e
JL
9355 /* Stabilize any component ref that might need to be
9356 evaluated more than once below. */
9357 if (!post
9358 || TREE_CODE (incremented) == BIT_FIELD_REF
9359 || (TREE_CODE (incremented) == COMPONENT_REF
9360 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9361 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9362 incremented = stabilize_reference (incremented);
9363 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9364 ones into save exprs so that they don't accidentally get evaluated
9365 more than once by the code below. */
9366 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9367 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9368 incremented = save_expr (incremented);
e9a25f70 9369
b93a436e
JL
9370 /* Compute the operands as RTX.
9371 Note whether OP0 is the actual lvalue or a copy of it:
9372 I believe it is a copy iff it is a register or subreg
6d2f8887 9373 and insns were generated in computing it. */
e9a25f70 9374
b93a436e 9375 temp = get_last_insn ();
37a08a29 9376 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9377
b93a436e
JL
9378 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9379 in place but instead must do sign- or zero-extension during assignment,
9380 so we copy it into a new register and let the code below use it as
9381 a copy.
e9a25f70 9382
b93a436e
JL
9383 Note that we can safely modify this SUBREG since it is know not to be
9384 shared (it was made by the expand_expr call above). */
9385
9386 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9387 {
9388 if (post)
9389 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9390 else
9391 bad_subreg = 1;
9392 }
9393 else if (GET_CODE (op0) == SUBREG
9394 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9395 {
9396 /* We cannot increment this SUBREG in place. If we are
9397 post-incrementing, get a copy of the old value. Otherwise,
9398 just mark that we cannot increment in place. */
9399 if (post)
9400 op0 = copy_to_reg (op0);
9401 else
9402 bad_subreg = 1;
e9a25f70
JL
9403 }
9404
b93a436e
JL
9405 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9406 && temp != get_last_insn ());
37a08a29 9407 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9408
b93a436e
JL
9409 /* Decide whether incrementing or decrementing. */
9410 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9411 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9412 this_optab = sub_optab;
9413
9414 /* Convert decrement by a constant into a negative increment. */
9415 if (this_optab == sub_optab
9416 && GET_CODE (op1) == CONST_INT)
ca695ac9 9417 {
3a94c984 9418 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9419 this_optab = add_optab;
ca695ac9 9420 }
1499e0a8 9421
91ce572a 9422 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9423 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9424
b93a436e
JL
9425 /* For a preincrement, see if we can do this with a single instruction. */
9426 if (!post)
9427 {
9428 icode = (int) this_optab->handlers[(int) mode].insn_code;
9429 if (icode != (int) CODE_FOR_nothing
9430 /* Make sure that OP0 is valid for operands 0 and 1
9431 of the insn we want to queue. */
a995e389
RH
9432 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9433 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9434 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9435 single_insn = 1;
9436 }
bbf6f052 9437
b93a436e
JL
9438 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9439 then we cannot just increment OP0. We must therefore contrive to
9440 increment the original value. Then, for postincrement, we can return
9441 OP0 since it is a copy of the old value. For preincrement, expand here
9442 unless we can do it with a single insn.
bbf6f052 9443
b93a436e
JL
9444 Likewise if storing directly into OP0 would clobber high bits
9445 we need to preserve (bad_subreg). */
9446 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9447 {
b93a436e
JL
9448 /* This is the easiest way to increment the value wherever it is.
9449 Problems with multiple evaluation of INCREMENTED are prevented
9450 because either (1) it is a component_ref or preincrement,
9451 in which case it was stabilized above, or (2) it is an array_ref
9452 with constant index in an array in a register, which is
9453 safe to reevaluate. */
9454 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9455 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9456 ? MINUS_EXPR : PLUS_EXPR),
9457 TREE_TYPE (exp),
9458 incremented,
9459 TREE_OPERAND (exp, 1));
a358cee0 9460
b93a436e
JL
9461 while (TREE_CODE (incremented) == NOP_EXPR
9462 || TREE_CODE (incremented) == CONVERT_EXPR)
9463 {
9464 newexp = convert (TREE_TYPE (incremented), newexp);
9465 incremented = TREE_OPERAND (incremented, 0);
9466 }
bbf6f052 9467
b93a436e
JL
9468 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9469 return post ? op0 : temp;
9470 }
bbf6f052 9471
b93a436e
JL
9472 if (post)
9473 {
9474 /* We have a true reference to the value in OP0.
9475 If there is an insn to add or subtract in this mode, queue it.
9476 Queueing the increment insn avoids the register shuffling
9477 that often results if we must increment now and first save
9478 the old value for subsequent use. */
bbf6f052 9479
b93a436e
JL
9480#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9481 op0 = stabilize (op0);
9482#endif
41dfd40c 9483
b93a436e
JL
9484 icode = (int) this_optab->handlers[(int) mode].insn_code;
9485 if (icode != (int) CODE_FOR_nothing
9486 /* Make sure that OP0 is valid for operands 0 and 1
9487 of the insn we want to queue. */
a995e389
RH
9488 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9489 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9490 {
a995e389 9491 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9492 op1 = force_reg (mode, op1);
bbf6f052 9493
b93a436e
JL
9494 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9495 }
9496 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9497 {
9498 rtx addr = (general_operand (XEXP (op0, 0), mode)
9499 ? force_reg (Pmode, XEXP (op0, 0))
9500 : copy_to_reg (XEXP (op0, 0)));
9501 rtx temp, result;
ca695ac9 9502
792760b9 9503 op0 = replace_equiv_address (op0, addr);
b93a436e 9504 temp = force_reg (GET_MODE (op0), op0);
a995e389 9505 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9506 op1 = force_reg (mode, op1);
ca695ac9 9507
b93a436e
JL
9508 /* The increment queue is LIFO, thus we have to `queue'
9509 the instructions in reverse order. */
9510 enqueue_insn (op0, gen_move_insn (op0, temp));
9511 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9512 return result;
bbf6f052
RK
9513 }
9514 }
ca695ac9 9515
b93a436e
JL
9516 /* Preincrement, or we can't increment with one simple insn. */
9517 if (post)
9518 /* Save a copy of the value before inc or dec, to return it later. */
9519 temp = value = copy_to_reg (op0);
9520 else
9521 /* Arrange to return the incremented value. */
9522 /* Copy the rtx because expand_binop will protect from the queue,
9523 and the results of that would be invalid for us to return
9524 if our caller does emit_queue before using our result. */
9525 temp = copy_rtx (value = op0);
bbf6f052 9526
b93a436e 9527 /* Increment however we can. */
37a08a29 9528 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9529 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9530
b93a436e
JL
9531 /* Make sure the value is stored into OP0. */
9532 if (op1 != op0)
9533 emit_move_insn (op0, op1);
5718612f 9534
b93a436e
JL
9535 return temp;
9536}
9537\f
b93a436e
JL
9538/* At the start of a function, record that we have no previously-pushed
9539 arguments waiting to be popped. */
bbf6f052 9540
b93a436e
JL
9541void
9542init_pending_stack_adjust ()
9543{
9544 pending_stack_adjust = 0;
9545}
bbf6f052 9546
b93a436e 9547/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9548 so the adjustment won't get done.
9549
9550 Note, if the current function calls alloca, then it must have a
9551 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9552
b93a436e
JL
9553void
9554clear_pending_stack_adjust ()
9555{
9556#ifdef EXIT_IGNORE_STACK
9557 if (optimize > 0
060fbabf
JL
9558 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9559 && EXIT_IGNORE_STACK
b93a436e
JL
9560 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9561 && ! flag_inline_functions)
1503a7ec
JH
9562 {
9563 stack_pointer_delta -= pending_stack_adjust,
9564 pending_stack_adjust = 0;
9565 }
b93a436e
JL
9566#endif
9567}
bbf6f052 9568
b93a436e
JL
9569/* Pop any previously-pushed arguments that have not been popped yet. */
9570
9571void
9572do_pending_stack_adjust ()
9573{
9574 if (inhibit_defer_pop == 0)
ca695ac9 9575 {
b93a436e
JL
9576 if (pending_stack_adjust != 0)
9577 adjust_stack (GEN_INT (pending_stack_adjust));
9578 pending_stack_adjust = 0;
bbf6f052 9579 }
bbf6f052
RK
9580}
9581\f
b93a436e 9582/* Expand conditional expressions. */
bbf6f052 9583
b93a436e
JL
9584/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9585 LABEL is an rtx of code CODE_LABEL, in this function and all the
9586 functions here. */
bbf6f052 9587
b93a436e
JL
9588void
9589jumpifnot (exp, label)
ca695ac9 9590 tree exp;
b93a436e 9591 rtx label;
bbf6f052 9592{
b93a436e
JL
9593 do_jump (exp, label, NULL_RTX);
9594}
bbf6f052 9595
b93a436e 9596/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9597
b93a436e
JL
9598void
9599jumpif (exp, label)
9600 tree exp;
9601 rtx label;
9602{
9603 do_jump (exp, NULL_RTX, label);
9604}
ca695ac9 9605
b93a436e
JL
9606/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9607 the result is zero, or IF_TRUE_LABEL if the result is one.
9608 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9609 meaning fall through in that case.
ca695ac9 9610
b93a436e
JL
9611 do_jump always does any pending stack adjust except when it does not
9612 actually perform a jump. An example where there is no jump
9613 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9614
b93a436e
JL
9615 This function is responsible for optimizing cases such as
9616 &&, || and comparison operators in EXP. */
5718612f 9617
b93a436e
JL
9618void
9619do_jump (exp, if_false_label, if_true_label)
9620 tree exp;
9621 rtx if_false_label, if_true_label;
9622{
b3694847 9623 enum tree_code code = TREE_CODE (exp);
b93a436e
JL
9624 /* Some cases need to create a label to jump to
9625 in order to properly fall through.
9626 These cases set DROP_THROUGH_LABEL nonzero. */
9627 rtx drop_through_label = 0;
9628 rtx temp;
b93a436e
JL
9629 int i;
9630 tree type;
9631 enum machine_mode mode;
ca695ac9 9632
dbecbbe4
JL
9633#ifdef MAX_INTEGER_COMPUTATION_MODE
9634 check_max_integer_computation_mode (exp);
9635#endif
9636
b93a436e 9637 emit_queue ();
ca695ac9 9638
b93a436e 9639 switch (code)
ca695ac9 9640 {
b93a436e 9641 case ERROR_MARK:
ca695ac9 9642 break;
bbf6f052 9643
b93a436e
JL
9644 case INTEGER_CST:
9645 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9646 if (temp)
9647 emit_jump (temp);
9648 break;
bbf6f052 9649
b93a436e
JL
9650#if 0
9651 /* This is not true with #pragma weak */
9652 case ADDR_EXPR:
9653 /* The address of something can never be zero. */
9654 if (if_true_label)
9655 emit_jump (if_true_label);
9656 break;
9657#endif
bbf6f052 9658
b93a436e
JL
9659 case NOP_EXPR:
9660 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9661 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
b4e3fabb
RK
9662 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9663 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
b93a436e
JL
9664 goto normal;
9665 case CONVERT_EXPR:
9666 /* If we are narrowing the operand, we have to do the compare in the
9667 narrower mode. */
9668 if ((TYPE_PRECISION (TREE_TYPE (exp))
9669 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9670 goto normal;
9671 case NON_LVALUE_EXPR:
9672 case REFERENCE_EXPR:
9673 case ABS_EXPR:
9674 case NEGATE_EXPR:
9675 case LROTATE_EXPR:
9676 case RROTATE_EXPR:
cc2902df 9677 /* These cannot change zero->nonzero or vice versa. */
b93a436e
JL
9678 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9679 break;
bbf6f052 9680
14a774a9
RK
9681 case WITH_RECORD_EXPR:
9682 /* Put the object on the placeholder list, recurse through our first
9683 operand, and pop the list. */
9684 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9685 placeholder_list);
9686 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9687 placeholder_list = TREE_CHAIN (placeholder_list);
9688 break;
9689
b93a436e
JL
9690#if 0
9691 /* This is never less insns than evaluating the PLUS_EXPR followed by
9692 a test and can be longer if the test is eliminated. */
9693 case PLUS_EXPR:
9694 /* Reduce to minus. */
9695 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9696 TREE_OPERAND (exp, 0),
9697 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9698 TREE_OPERAND (exp, 1))));
9699 /* Process as MINUS. */
ca695ac9 9700#endif
bbf6f052 9701
b93a436e 9702 case MINUS_EXPR:
cc2902df 9703 /* Nonzero iff operands of minus differ. */
b30f05db
BS
9704 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9705 TREE_OPERAND (exp, 0),
9706 TREE_OPERAND (exp, 1)),
9707 NE, NE, if_false_label, if_true_label);
b93a436e 9708 break;
bbf6f052 9709
b93a436e
JL
9710 case BIT_AND_EXPR:
9711 /* If we are AND'ing with a small constant, do this comparison in the
9712 smallest type that fits. If the machine doesn't have comparisons
9713 that small, it will be converted back to the wider comparison.
9714 This helps if we are testing the sign bit of a narrower object.
9715 combine can't do this for us because it can't know whether a
9716 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9717
b93a436e
JL
9718 if (! SLOW_BYTE_ACCESS
9719 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9720 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9721 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e 9722 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
b0c48229 9723 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
b93a436e
JL
9724 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9725 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9726 != CODE_FOR_nothing))
9727 {
9728 do_jump (convert (type, exp), if_false_label, if_true_label);
9729 break;
9730 }
9731 goto normal;
bbf6f052 9732
b93a436e
JL
9733 case TRUTH_NOT_EXPR:
9734 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9735 break;
bbf6f052 9736
b93a436e
JL
9737 case TRUTH_ANDIF_EXPR:
9738 if (if_false_label == 0)
9739 if_false_label = drop_through_label = gen_label_rtx ();
9740 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9741 start_cleanup_deferral ();
9742 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9743 end_cleanup_deferral ();
9744 break;
bbf6f052 9745
b93a436e
JL
9746 case TRUTH_ORIF_EXPR:
9747 if (if_true_label == 0)
9748 if_true_label = drop_through_label = gen_label_rtx ();
9749 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9750 start_cleanup_deferral ();
9751 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9752 end_cleanup_deferral ();
9753 break;
bbf6f052 9754
b93a436e
JL
9755 case COMPOUND_EXPR:
9756 push_temp_slots ();
9757 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9758 preserve_temp_slots (NULL_RTX);
9759 free_temp_slots ();
9760 pop_temp_slots ();
9761 emit_queue ();
9762 do_pending_stack_adjust ();
9763 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9764 break;
bbf6f052 9765
b93a436e
JL
9766 case COMPONENT_REF:
9767 case BIT_FIELD_REF:
9768 case ARRAY_REF:
b4e3fabb 9769 case ARRAY_RANGE_REF:
b93a436e 9770 {
770ae6cc
RK
9771 HOST_WIDE_INT bitsize, bitpos;
9772 int unsignedp;
b93a436e
JL
9773 enum machine_mode mode;
9774 tree type;
9775 tree offset;
9776 int volatilep = 0;
bbf6f052 9777
b93a436e
JL
9778 /* Get description of this reference. We don't actually care
9779 about the underlying object here. */
19caa751 9780 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
a06ef755 9781 &unsignedp, &volatilep);
bbf6f052 9782
b0c48229 9783 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
b93a436e
JL
9784 if (! SLOW_BYTE_ACCESS
9785 && type != 0 && bitsize >= 0
9786 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9787 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9788 != CODE_FOR_nothing))
9789 {
9790 do_jump (convert (type, exp), if_false_label, if_true_label);
9791 break;
9792 }
9793 goto normal;
9794 }
bbf6f052 9795
b93a436e
JL
9796 case COND_EXPR:
9797 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9798 if (integer_onep (TREE_OPERAND (exp, 1))
9799 && integer_zerop (TREE_OPERAND (exp, 2)))
9800 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9801
b93a436e
JL
9802 else if (integer_zerop (TREE_OPERAND (exp, 1))
9803 && integer_onep (TREE_OPERAND (exp, 2)))
9804 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9805
b93a436e
JL
9806 else
9807 {
b3694847 9808 rtx label1 = gen_label_rtx ();
b93a436e 9809 drop_through_label = gen_label_rtx ();
bbf6f052 9810
b93a436e 9811 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9812
b93a436e
JL
9813 start_cleanup_deferral ();
9814 /* Now the THEN-expression. */
9815 do_jump (TREE_OPERAND (exp, 1),
9816 if_false_label ? if_false_label : drop_through_label,
9817 if_true_label ? if_true_label : drop_through_label);
9818 /* In case the do_jump just above never jumps. */
9819 do_pending_stack_adjust ();
9820 emit_label (label1);
bbf6f052 9821
b93a436e
JL
9822 /* Now the ELSE-expression. */
9823 do_jump (TREE_OPERAND (exp, 2),
9824 if_false_label ? if_false_label : drop_through_label,
9825 if_true_label ? if_true_label : drop_through_label);
9826 end_cleanup_deferral ();
9827 }
9828 break;
bbf6f052 9829
b93a436e
JL
9830 case EQ_EXPR:
9831 {
9832 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9833
9ec36da5
JL
9834 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9835 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9836 {
9837 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9838 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9839 do_jump
9840 (fold
9841 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9842 fold (build (EQ_EXPR, TREE_TYPE (exp),
9843 fold (build1 (REALPART_EXPR,
9844 TREE_TYPE (inner_type),
9845 exp0)),
9846 fold (build1 (REALPART_EXPR,
9847 TREE_TYPE (inner_type),
9848 exp1)))),
9849 fold (build (EQ_EXPR, TREE_TYPE (exp),
9850 fold (build1 (IMAGPART_EXPR,
9851 TREE_TYPE (inner_type),
9852 exp0)),
9853 fold (build1 (IMAGPART_EXPR,
9854 TREE_TYPE (inner_type),
9855 exp1)))))),
9856 if_false_label, if_true_label);
9857 }
9ec36da5
JL
9858
9859 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9860 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9861
b93a436e 9862 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9863 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9864 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9865 else
b30f05db 9866 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9867 break;
9868 }
bbf6f052 9869
b93a436e
JL
9870 case NE_EXPR:
9871 {
9872 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9873
9ec36da5
JL
9874 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9875 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9876 {
9877 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9878 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9879 do_jump
9880 (fold
9881 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9882 fold (build (NE_EXPR, TREE_TYPE (exp),
9883 fold (build1 (REALPART_EXPR,
9884 TREE_TYPE (inner_type),
9885 exp0)),
9886 fold (build1 (REALPART_EXPR,
9887 TREE_TYPE (inner_type),
9888 exp1)))),
9889 fold (build (NE_EXPR, TREE_TYPE (exp),
9890 fold (build1 (IMAGPART_EXPR,
9891 TREE_TYPE (inner_type),
9892 exp0)),
9893 fold (build1 (IMAGPART_EXPR,
9894 TREE_TYPE (inner_type),
9895 exp1)))))),
9896 if_false_label, if_true_label);
9897 }
9ec36da5
JL
9898
9899 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9900 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9901
b93a436e 9902 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9903 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9904 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9905 else
b30f05db 9906 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9907 break;
9908 }
bbf6f052 9909
b93a436e 9910 case LT_EXPR:
1c0290ea
BS
9911 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9912 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9913 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9914 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9915 else
b30f05db 9916 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9917 break;
bbf6f052 9918
b93a436e 9919 case LE_EXPR:
1c0290ea
BS
9920 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9921 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9922 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9923 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9924 else
b30f05db 9925 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9926 break;
bbf6f052 9927
b93a436e 9928 case GT_EXPR:
1c0290ea
BS
9929 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9930 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9931 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9932 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9933 else
b30f05db 9934 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9935 break;
bbf6f052 9936
b93a436e 9937 case GE_EXPR:
1c0290ea
BS
9938 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9939 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9940 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9941 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9942 else
b30f05db 9943 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9944 break;
bbf6f052 9945
1eb8759b
RH
9946 case UNORDERED_EXPR:
9947 case ORDERED_EXPR:
9948 {
9949 enum rtx_code cmp, rcmp;
9950 int do_rev;
9951
9952 if (code == UNORDERED_EXPR)
9953 cmp = UNORDERED, rcmp = ORDERED;
9954 else
9955 cmp = ORDERED, rcmp = UNORDERED;
3a94c984 9956 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
9957
9958 do_rev = 0;
9959 if (! can_compare_p (cmp, mode, ccp_jump)
9960 && (can_compare_p (rcmp, mode, ccp_jump)
9961 /* If the target doesn't provide either UNORDERED or ORDERED
9962 comparisons, canonicalize on UNORDERED for the library. */
9963 || rcmp == UNORDERED))
9964 do_rev = 1;
9965
0fb7aeda 9966 if (! do_rev)
1eb8759b
RH
9967 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9968 else
9969 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9970 }
9971 break;
9972
9973 {
9974 enum rtx_code rcode1;
9975 enum tree_code tcode2;
9976
9977 case UNLT_EXPR:
9978 rcode1 = UNLT;
9979 tcode2 = LT_EXPR;
9980 goto unordered_bcc;
9981 case UNLE_EXPR:
9982 rcode1 = UNLE;
9983 tcode2 = LE_EXPR;
9984 goto unordered_bcc;
9985 case UNGT_EXPR:
9986 rcode1 = UNGT;
9987 tcode2 = GT_EXPR;
9988 goto unordered_bcc;
9989 case UNGE_EXPR:
9990 rcode1 = UNGE;
9991 tcode2 = GE_EXPR;
9992 goto unordered_bcc;
9993 case UNEQ_EXPR:
9994 rcode1 = UNEQ;
9995 tcode2 = EQ_EXPR;
9996 goto unordered_bcc;
7913f3d0 9997
1eb8759b 9998 unordered_bcc:
0fb7aeda 9999 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
10000 if (can_compare_p (rcode1, mode, ccp_jump))
10001 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10002 if_true_label);
10003 else
10004 {
10005 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10006 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10007 tree cmp0, cmp1;
10008
3a94c984 10009 /* If the target doesn't support combined unordered
1eb8759b
RH
10010 compares, decompose into UNORDERED + comparison. */
10011 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10012 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10013 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10014 do_jump (exp, if_false_label, if_true_label);
10015 }
10016 }
10017 break;
10018
5f2d6cfa
MM
10019 /* Special case:
10020 __builtin_expect (<test>, 0) and
10021 __builtin_expect (<test>, 1)
10022
10023 We need to do this here, so that <test> is not converted to a SCC
10024 operation on machines that use condition code registers and COMPARE
10025 like the PowerPC, and then the jump is done based on whether the SCC
10026 operation produced a 1 or 0. */
10027 case CALL_EXPR:
10028 /* Check for a built-in function. */
10029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10030 {
10031 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10032 tree arglist = TREE_OPERAND (exp, 1);
10033
10034 if (TREE_CODE (fndecl) == FUNCTION_DECL
10035 && DECL_BUILT_IN (fndecl)
10036 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10037 && arglist != NULL_TREE
10038 && TREE_CHAIN (arglist) != NULL_TREE)
10039 {
10040 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10041 if_true_label);
10042
10043 if (seq != NULL_RTX)
10044 {
10045 emit_insn (seq);
10046 return;
10047 }
10048 }
10049 }
10050 /* fall through and generate the normal code. */
10051
b93a436e
JL
10052 default:
10053 normal:
10054 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10055#if 0
10056 /* This is not needed any more and causes poor code since it causes
10057 comparisons and tests from non-SI objects to have different code
10058 sequences. */
10059 /* Copy to register to avoid generating bad insns by cse
10060 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10061 if (!cse_not_expected && GET_CODE (temp) == MEM)
10062 temp = copy_to_reg (temp);
ca695ac9 10063#endif
b93a436e 10064 do_pending_stack_adjust ();
b30f05db
BS
10065 /* Do any postincrements in the expression that was tested. */
10066 emit_queue ();
10067
0fb7aeda 10068 if (GET_CODE (temp) == CONST_INT
998a298e
GK
10069 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10070 || GET_CODE (temp) == LABEL_REF)
b30f05db
BS
10071 {
10072 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10073 if (target)
10074 emit_jump (target);
10075 }
b93a436e 10076 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 10077 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
10078 /* Note swapping the labels gives us not-equal. */
10079 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10080 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
10081 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10082 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
a06ef755 10083 GET_MODE (temp), NULL_RTX,
b30f05db 10084 if_false_label, if_true_label);
b93a436e
JL
10085 else
10086 abort ();
10087 }
bbf6f052 10088
b93a436e
JL
10089 if (drop_through_label)
10090 {
10091 /* If do_jump produces code that might be jumped around,
10092 do any stack adjusts from that code, before the place
10093 where control merges in. */
10094 do_pending_stack_adjust ();
10095 emit_label (drop_through_label);
10096 }
bbf6f052 10097}
b93a436e
JL
10098\f
10099/* Given a comparison expression EXP for values too wide to be compared
10100 with one insn, test the comparison and jump to the appropriate label.
10101 The code of EXP is ignored; we always test GT if SWAP is 0,
10102 and LT if SWAP is 1. */
bbf6f052 10103
b93a436e
JL
10104static void
10105do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10106 tree exp;
10107 int swap;
10108 rtx if_false_label, if_true_label;
10109{
10110 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10111 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10112 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 10113 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 10114
b30f05db 10115 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
10116}
10117
b93a436e
JL
10118/* Compare OP0 with OP1, word at a time, in mode MODE.
10119 UNSIGNEDP says to do unsigned comparison.
10120 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10121
b93a436e
JL
10122void
10123do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10124 enum machine_mode mode;
10125 int unsignedp;
10126 rtx op0, op1;
10127 rtx if_false_label, if_true_label;
f81497d9 10128{
b93a436e
JL
10129 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10130 rtx drop_through_label = 0;
10131 int i;
f81497d9 10132
b93a436e
JL
10133 if (! if_true_label || ! if_false_label)
10134 drop_through_label = gen_label_rtx ();
10135 if (! if_true_label)
10136 if_true_label = drop_through_label;
10137 if (! if_false_label)
10138 if_false_label = drop_through_label;
f81497d9 10139
b93a436e
JL
10140 /* Compare a word at a time, high order first. */
10141 for (i = 0; i < nwords; i++)
10142 {
b93a436e 10143 rtx op0_word, op1_word;
bbf6f052 10144
b93a436e
JL
10145 if (WORDS_BIG_ENDIAN)
10146 {
10147 op0_word = operand_subword_force (op0, i, mode);
10148 op1_word = operand_subword_force (op1, i, mode);
10149 }
10150 else
10151 {
10152 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10153 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10154 }
bbf6f052 10155
b93a436e 10156 /* All but high-order word must be compared as unsigned. */
b30f05db 10157 do_compare_rtx_and_jump (op0_word, op1_word, GT,
a06ef755 10158 (unsignedp || i > 0), word_mode, NULL_RTX,
b30f05db 10159 NULL_RTX, if_true_label);
bbf6f052 10160
b93a436e 10161 /* Consider lower words only if these are equal. */
b30f05db 10162 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
a06ef755 10163 NULL_RTX, NULL_RTX, if_false_label);
b93a436e 10164 }
bbf6f052 10165
b93a436e
JL
10166 if (if_false_label)
10167 emit_jump (if_false_label);
10168 if (drop_through_label)
10169 emit_label (drop_through_label);
bbf6f052
RK
10170}
10171
b93a436e
JL
10172/* Given an EQ_EXPR expression EXP for values too wide to be compared
10173 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10174
b93a436e
JL
10175static void
10176do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10177 tree exp;
10178 rtx if_false_label, if_true_label;
bbf6f052 10179{
b93a436e
JL
10180 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10181 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10182 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10183 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10184 int i;
10185 rtx drop_through_label = 0;
bbf6f052 10186
b93a436e
JL
10187 if (! if_false_label)
10188 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10189
b93a436e 10190 for (i = 0; i < nwords; i++)
b30f05db
BS
10191 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10192 operand_subword_force (op1, i, mode),
10193 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
a06ef755 10194 word_mode, NULL_RTX, if_false_label, NULL_RTX);
bbf6f052 10195
b93a436e
JL
10196 if (if_true_label)
10197 emit_jump (if_true_label);
10198 if (drop_through_label)
10199 emit_label (drop_through_label);
bbf6f052 10200}
b93a436e
JL
10201\f
10202/* Jump according to whether OP0 is 0.
10203 We assume that OP0 has an integer mode that is too wide
10204 for the available compare insns. */
bbf6f052 10205
f5963e61 10206void
b93a436e
JL
10207do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10208 rtx op0;
10209 rtx if_false_label, if_true_label;
ca695ac9 10210{
b93a436e
JL
10211 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10212 rtx part;
10213 int i;
10214 rtx drop_through_label = 0;
bbf6f052 10215
b93a436e
JL
10216 /* The fastest way of doing this comparison on almost any machine is to
10217 "or" all the words and compare the result. If all have to be loaded
10218 from memory and this is a very wide item, it's possible this may
10219 be slower, but that's highly unlikely. */
bbf6f052 10220
b93a436e
JL
10221 part = gen_reg_rtx (word_mode);
10222 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10223 for (i = 1; i < nwords && part != 0; i++)
10224 part = expand_binop (word_mode, ior_optab, part,
10225 operand_subword_force (op0, i, GET_MODE (op0)),
10226 part, 1, OPTAB_WIDEN);
bbf6f052 10227
b93a436e
JL
10228 if (part != 0)
10229 {
b30f05db 10230 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
a06ef755 10231 NULL_RTX, if_false_label, if_true_label);
bbf6f052 10232
b93a436e
JL
10233 return;
10234 }
bbf6f052 10235
b93a436e
JL
10236 /* If we couldn't do the "or" simply, do this with a series of compares. */
10237 if (! if_false_label)
10238 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10239
b93a436e 10240 for (i = 0; i < nwords; i++)
b30f05db 10241 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
a06ef755 10242 const0_rtx, EQ, 1, word_mode, NULL_RTX,
b30f05db 10243 if_false_label, NULL_RTX);
bbf6f052 10244
b93a436e
JL
10245 if (if_true_label)
10246 emit_jump (if_true_label);
0f41302f 10247
b93a436e
JL
10248 if (drop_through_label)
10249 emit_label (drop_through_label);
bbf6f052 10250}
b93a436e 10251\f
b30f05db 10252/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
10253 (including code to compute the values to be compared)
10254 and set (CC0) according to the result.
b30f05db 10255 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10256
b93a436e 10257 We force a stack adjustment unless there are currently
b30f05db 10258 things pushed on the stack that aren't yet used.
ca695ac9 10259
b30f05db 10260 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
a06ef755 10261 compared. */
b30f05db
BS
10262
10263rtx
a06ef755 10264compare_from_rtx (op0, op1, code, unsignedp, mode, size)
b3694847 10265 rtx op0, op1;
b30f05db
BS
10266 enum rtx_code code;
10267 int unsignedp;
10268 enum machine_mode mode;
10269 rtx size;
b93a436e 10270{
129e0ee8 10271 enum rtx_code ucode;
b30f05db 10272 rtx tem;
76bbe028 10273
b30f05db
BS
10274 /* If one operand is constant, make it the second one. Only do this
10275 if the other operand is not constant as well. */
ca695ac9 10276
8c9864f3 10277 if (swap_commutative_operands_p (op0, op1))
bbf6f052 10278 {
b30f05db
BS
10279 tem = op0;
10280 op0 = op1;
10281 op1 = tem;
10282 code = swap_condition (code);
ca695ac9 10283 }
bbf6f052 10284
b30f05db 10285 if (flag_force_mem)
b93a436e 10286 {
b30f05db
BS
10287 op0 = force_not_mem (op0);
10288 op1 = force_not_mem (op1);
10289 }
bbf6f052 10290
b30f05db
BS
10291 do_pending_stack_adjust ();
10292
129e0ee8
RS
10293 ucode = unsignedp ? unsigned_condition (code) : code;
10294 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
b30f05db
BS
10295 return tem;
10296
10297#if 0
10298 /* There's no need to do this now that combine.c can eliminate lots of
10299 sign extensions. This can be less efficient in certain cases on other
10300 machines. */
10301
10302 /* If this is a signed equality comparison, we can do it as an
10303 unsigned comparison since zero-extension is cheaper than sign
10304 extension and comparisons with zero are done as unsigned. This is
10305 the case even on machines that can do fast sign extension, since
10306 zero-extension is easier to combine with other operations than
10307 sign-extension is. If we are comparing against a constant, we must
10308 convert it to what it would look like unsigned. */
10309 if ((code == EQ || code == NE) && ! unsignedp
10310 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10311 {
10312 if (GET_CODE (op1) == CONST_INT
10313 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10314 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10315 unsignedp = 1;
b93a436e
JL
10316 }
10317#endif
3a94c984 10318
a06ef755 10319 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
0f41302f 10320
b2e426a0 10321#if HAVE_cc0
b30f05db 10322 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
b2e426a0
IS
10323#else
10324 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10325#endif
ca695ac9 10326}
bbf6f052 10327
b30f05db 10328/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 10329 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10330
b93a436e 10331 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
a06ef755 10332 compared. */
ca695ac9 10333
b30f05db 10334void
a06ef755 10335do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
b30f05db 10336 if_false_label, if_true_label)
b3694847 10337 rtx op0, op1;
b93a436e
JL
10338 enum rtx_code code;
10339 int unsignedp;
10340 enum machine_mode mode;
10341 rtx size;
b30f05db 10342 rtx if_false_label, if_true_label;
bbf6f052 10343{
129e0ee8 10344 enum rtx_code ucode;
b93a436e 10345 rtx tem;
b30f05db
BS
10346 int dummy_true_label = 0;
10347
10348 /* Reverse the comparison if that is safe and we want to jump if it is
10349 false. */
10350 if (! if_true_label && ! FLOAT_MODE_P (mode))
10351 {
10352 if_true_label = if_false_label;
10353 if_false_label = 0;
10354 code = reverse_condition (code);
10355 }
bbf6f052 10356
b93a436e
JL
10357 /* If one operand is constant, make it the second one. Only do this
10358 if the other operand is not constant as well. */
e7c33f54 10359
8c9864f3 10360 if (swap_commutative_operands_p (op0, op1))
ca695ac9 10361 {
b93a436e
JL
10362 tem = op0;
10363 op0 = op1;
10364 op1 = tem;
10365 code = swap_condition (code);
10366 }
bbf6f052 10367
b93a436e
JL
10368 if (flag_force_mem)
10369 {
10370 op0 = force_not_mem (op0);
10371 op1 = force_not_mem (op1);
10372 }
bbf6f052 10373
b93a436e 10374 do_pending_stack_adjust ();
ca695ac9 10375
129e0ee8
RS
10376 ucode = unsignedp ? unsigned_condition (code) : code;
10377 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
b30f05db
BS
10378 {
10379 if (tem == const_true_rtx)
10380 {
10381 if (if_true_label)
10382 emit_jump (if_true_label);
10383 }
10384 else
10385 {
10386 if (if_false_label)
10387 emit_jump (if_false_label);
10388 }
10389 return;
10390 }
ca695ac9 10391
b93a436e
JL
10392#if 0
10393 /* There's no need to do this now that combine.c can eliminate lots of
10394 sign extensions. This can be less efficient in certain cases on other
10395 machines. */
ca695ac9 10396
b93a436e
JL
10397 /* If this is a signed equality comparison, we can do it as an
10398 unsigned comparison since zero-extension is cheaper than sign
10399 extension and comparisons with zero are done as unsigned. This is
10400 the case even on machines that can do fast sign extension, since
10401 zero-extension is easier to combine with other operations than
10402 sign-extension is. If we are comparing against a constant, we must
10403 convert it to what it would look like unsigned. */
10404 if ((code == EQ || code == NE) && ! unsignedp
10405 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10406 {
10407 if (GET_CODE (op1) == CONST_INT
10408 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10409 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10410 unsignedp = 1;
10411 }
10412#endif
ca695ac9 10413
b30f05db
BS
10414 if (! if_true_label)
10415 {
10416 dummy_true_label = 1;
10417 if_true_label = gen_label_rtx ();
10418 }
10419
a06ef755 10420 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
b30f05db
BS
10421 if_true_label);
10422
10423 if (if_false_label)
10424 emit_jump (if_false_label);
10425 if (dummy_true_label)
10426 emit_label (if_true_label);
10427}
10428
10429/* Generate code for a comparison expression EXP (including code to compute
10430 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10431 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10432 generated code will drop through.
10433 SIGNED_CODE should be the rtx operation for this comparison for
10434 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10435
10436 We force a stack adjustment unless there are currently
10437 things pushed on the stack that aren't yet used. */
10438
10439static void
10440do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10441 if_true_label)
b3694847 10442 tree exp;
b30f05db
BS
10443 enum rtx_code signed_code, unsigned_code;
10444 rtx if_false_label, if_true_label;
10445{
b3694847
SS
10446 rtx op0, op1;
10447 tree type;
10448 enum machine_mode mode;
b30f05db
BS
10449 int unsignedp;
10450 enum rtx_code code;
10451
10452 /* Don't crash if the comparison was erroneous. */
a06ef755 10453 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
b30f05db
BS
10454 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10455 return;
10456
a06ef755 10457 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6b16805e
JJ
10458 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10459 return;
10460
b30f05db
BS
10461 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10462 mode = TYPE_MODE (type);
6b16805e
JJ
10463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10464 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10465 || (GET_MODE_BITSIZE (mode)
31a7659b
JDA
10466 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10467 1)))))))
6b16805e
JJ
10468 {
10469 /* op0 might have been replaced by promoted constant, in which
10470 case the type of second argument should be used. */
10471 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10472 mode = TYPE_MODE (type);
10473 }
b30f05db
BS
10474 unsignedp = TREE_UNSIGNED (type);
10475 code = unsignedp ? unsigned_code : signed_code;
10476
10477#ifdef HAVE_canonicalize_funcptr_for_compare
10478 /* If function pointers need to be "canonicalized" before they can
10479 be reliably compared, then canonicalize them. */
10480 if (HAVE_canonicalize_funcptr_for_compare
10481 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10482 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10483 == FUNCTION_TYPE))
10484 {
10485 rtx new_op0 = gen_reg_rtx (mode);
10486
10487 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10488 op0 = new_op0;
10489 }
10490
10491 if (HAVE_canonicalize_funcptr_for_compare
10492 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10493 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10494 == FUNCTION_TYPE))
10495 {
10496 rtx new_op1 = gen_reg_rtx (mode);
10497
10498 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10499 op1 = new_op1;
10500 }
10501#endif
10502
10503 /* Do any postincrements in the expression that was tested. */
10504 emit_queue ();
10505
10506 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10507 ((mode == BLKmode)
10508 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
b30f05db 10509 if_false_label, if_true_label);
b93a436e
JL
10510}
10511\f
10512/* Generate code to calculate EXP using a store-flag instruction
10513 and return an rtx for the result. EXP is either a comparison
10514 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10515
b93a436e 10516 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10517
cc2902df 10518 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 10519 cheap.
ca695ac9 10520
b93a436e
JL
10521 Return zero if there is no suitable set-flag instruction
10522 available on this machine.
ca695ac9 10523
b93a436e
JL
10524 Once expand_expr has been called on the arguments of the comparison,
10525 we are committed to doing the store flag, since it is not safe to
10526 re-evaluate the expression. We emit the store-flag insn by calling
10527 emit_store_flag, but only expand the arguments if we have a reason
10528 to believe that emit_store_flag will be successful. If we think that
10529 it will, but it isn't, we have to simulate the store-flag with a
10530 set/jump/set sequence. */
ca695ac9 10531
b93a436e
JL
10532static rtx
10533do_store_flag (exp, target, mode, only_cheap)
10534 tree exp;
10535 rtx target;
10536 enum machine_mode mode;
10537 int only_cheap;
10538{
10539 enum rtx_code code;
10540 tree arg0, arg1, type;
10541 tree tem;
10542 enum machine_mode operand_mode;
10543 int invert = 0;
10544 int unsignedp;
10545 rtx op0, op1;
10546 enum insn_code icode;
10547 rtx subtarget = target;
381127e8 10548 rtx result, label;
ca695ac9 10549
b93a436e
JL
10550 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10551 result at the end. We can't simply invert the test since it would
10552 have already been inverted if it were valid. This case occurs for
10553 some floating-point comparisons. */
ca695ac9 10554
b93a436e
JL
10555 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10556 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10557
b93a436e
JL
10558 arg0 = TREE_OPERAND (exp, 0);
10559 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
10560
10561 /* Don't crash if the comparison was erroneous. */
10562 if (arg0 == error_mark_node || arg1 == error_mark_node)
10563 return const0_rtx;
10564
b93a436e
JL
10565 type = TREE_TYPE (arg0);
10566 operand_mode = TYPE_MODE (type);
10567 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10568
b93a436e
JL
10569 /* We won't bother with BLKmode store-flag operations because it would mean
10570 passing a lot of information to emit_store_flag. */
10571 if (operand_mode == BLKmode)
10572 return 0;
ca695ac9 10573
b93a436e
JL
10574 /* We won't bother with store-flag operations involving function pointers
10575 when function pointers must be canonicalized before comparisons. */
10576#ifdef HAVE_canonicalize_funcptr_for_compare
10577 if (HAVE_canonicalize_funcptr_for_compare
10578 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10579 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10580 == FUNCTION_TYPE))
10581 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10582 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10583 == FUNCTION_TYPE))))
10584 return 0;
ca695ac9
JB
10585#endif
10586
b93a436e
JL
10587 STRIP_NOPS (arg0);
10588 STRIP_NOPS (arg1);
ca695ac9 10589
b93a436e
JL
10590 /* Get the rtx comparison code to use. We know that EXP is a comparison
10591 operation of some type. Some comparisons against 1 and -1 can be
10592 converted to comparisons with zero. Do so here so that the tests
10593 below will be aware that we have a comparison with zero. These
10594 tests will not catch constants in the first operand, but constants
10595 are rarely passed as the first operand. */
ca695ac9 10596
b93a436e
JL
10597 switch (TREE_CODE (exp))
10598 {
10599 case EQ_EXPR:
10600 code = EQ;
bbf6f052 10601 break;
b93a436e
JL
10602 case NE_EXPR:
10603 code = NE;
bbf6f052 10604 break;
b93a436e
JL
10605 case LT_EXPR:
10606 if (integer_onep (arg1))
10607 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10608 else
10609 code = unsignedp ? LTU : LT;
ca695ac9 10610 break;
b93a436e
JL
10611 case LE_EXPR:
10612 if (! unsignedp && integer_all_onesp (arg1))
10613 arg1 = integer_zero_node, code = LT;
10614 else
10615 code = unsignedp ? LEU : LE;
ca695ac9 10616 break;
b93a436e
JL
10617 case GT_EXPR:
10618 if (! unsignedp && integer_all_onesp (arg1))
10619 arg1 = integer_zero_node, code = GE;
10620 else
10621 code = unsignedp ? GTU : GT;
10622 break;
10623 case GE_EXPR:
10624 if (integer_onep (arg1))
10625 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10626 else
10627 code = unsignedp ? GEU : GE;
ca695ac9 10628 break;
1eb8759b
RH
10629
10630 case UNORDERED_EXPR:
10631 code = UNORDERED;
10632 break;
10633 case ORDERED_EXPR:
10634 code = ORDERED;
10635 break;
10636 case UNLT_EXPR:
10637 code = UNLT;
10638 break;
10639 case UNLE_EXPR:
10640 code = UNLE;
10641 break;
10642 case UNGT_EXPR:
10643 code = UNGT;
10644 break;
10645 case UNGE_EXPR:
10646 code = UNGE;
10647 break;
10648 case UNEQ_EXPR:
10649 code = UNEQ;
10650 break;
1eb8759b 10651
ca695ac9 10652 default:
b93a436e 10653 abort ();
bbf6f052 10654 }
bbf6f052 10655
b93a436e
JL
10656 /* Put a constant second. */
10657 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10658 {
10659 tem = arg0; arg0 = arg1; arg1 = tem;
10660 code = swap_condition (code);
ca695ac9 10661 }
bbf6f052 10662
b93a436e
JL
10663 /* If this is an equality or inequality test of a single bit, we can
10664 do this by shifting the bit being tested to the low-order bit and
10665 masking the result with the constant 1. If the condition was EQ,
10666 we xor it with 1. This does not require an scc insn and is faster
10667 than an scc insn even if we have it. */
d39985fa 10668
b93a436e
JL
10669 if ((code == NE || code == EQ)
10670 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10671 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10672 {
10673 tree inner = TREE_OPERAND (arg0, 0);
10674 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10675 int ops_unsignedp;
bbf6f052 10676
b93a436e
JL
10677 /* If INNER is a right shift of a constant and it plus BITNUM does
10678 not overflow, adjust BITNUM and INNER. */
ca695ac9 10679
b93a436e
JL
10680 if (TREE_CODE (inner) == RSHIFT_EXPR
10681 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10682 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10683 && bitnum < TYPE_PRECISION (type)
10684 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10685 bitnum - TYPE_PRECISION (type)))
ca695ac9 10686 {
b93a436e
JL
10687 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10688 inner = TREE_OPERAND (inner, 0);
ca695ac9 10689 }
ca695ac9 10690
b93a436e
JL
10691 /* If we are going to be able to omit the AND below, we must do our
10692 operations as unsigned. If we must use the AND, we have a choice.
10693 Normally unsigned is faster, but for some machines signed is. */
10694 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10695#ifdef LOAD_EXTEND_OP
10696 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10697#else
10698 : 1
10699#endif
10700 );
bbf6f052 10701
296b4ed9 10702 if (! get_subtarget (subtarget)
a47fed55 10703 || GET_MODE (subtarget) != operand_mode
e5e809f4 10704 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10705 subtarget = 0;
bbf6f052 10706
b93a436e 10707 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10708
b93a436e 10709 if (bitnum != 0)
681cb233 10710 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10711 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10712
b93a436e
JL
10713 if (GET_MODE (op0) != mode)
10714 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10715
b93a436e
JL
10716 if ((code == EQ && ! invert) || (code == NE && invert))
10717 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10718 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10719
b93a436e
JL
10720 /* Put the AND last so it can combine with more things. */
10721 if (bitnum != TYPE_PRECISION (type) - 1)
22273300 10722 op0 = expand_and (mode, op0, const1_rtx, subtarget);
bbf6f052 10723
b93a436e
JL
10724 return op0;
10725 }
bbf6f052 10726
b93a436e 10727 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10728 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10729 return 0;
1eb8759b 10730
b93a436e
JL
10731 icode = setcc_gen_code[(int) code];
10732 if (icode == CODE_FOR_nothing
a995e389 10733 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10734 {
b93a436e
JL
10735 /* We can only do this if it is one of the special cases that
10736 can be handled without an scc insn. */
10737 if ((code == LT && integer_zerop (arg1))
10738 || (! only_cheap && code == GE && integer_zerop (arg1)))
10739 ;
10740 else if (BRANCH_COST >= 0
10741 && ! only_cheap && (code == NE || code == EQ)
10742 && TREE_CODE (type) != REAL_TYPE
10743 && ((abs_optab->handlers[(int) operand_mode].insn_code
10744 != CODE_FOR_nothing)
10745 || (ffs_optab->handlers[(int) operand_mode].insn_code
10746 != CODE_FOR_nothing)))
10747 ;
10748 else
10749 return 0;
ca695ac9 10750 }
3a94c984 10751
296b4ed9 10752 if (! get_subtarget (target)
a47fed55 10753 || GET_MODE (subtarget) != operand_mode
e5e809f4 10754 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10755 subtarget = 0;
10756
10757 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10758 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10759
10760 if (target == 0)
10761 target = gen_reg_rtx (mode);
10762
10763 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10764 because, if the emit_store_flag does anything it will succeed and
10765 OP0 and OP1 will not be used subsequently. */
ca695ac9 10766
b93a436e
JL
10767 result = emit_store_flag (target, code,
10768 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10769 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10770 operand_mode, unsignedp, 1);
ca695ac9 10771
b93a436e
JL
10772 if (result)
10773 {
10774 if (invert)
10775 result = expand_binop (mode, xor_optab, result, const1_rtx,
10776 result, 0, OPTAB_LIB_WIDEN);
10777 return result;
ca695ac9 10778 }
bbf6f052 10779
b93a436e
JL
10780 /* If this failed, we have to do this with set/compare/jump/set code. */
10781 if (GET_CODE (target) != REG
10782 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10783 target = gen_reg_rtx (GET_MODE (target));
10784
10785 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10786 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 10787 operand_mode, NULL_RTX);
b93a436e
JL
10788 if (GET_CODE (result) == CONST_INT)
10789 return (((result == const0_rtx && ! invert)
10790 || (result != const0_rtx && invert))
10791 ? const0_rtx : const1_rtx);
ca695ac9 10792
8f08e8c0
JL
10793 /* The code of RESULT may not match CODE if compare_from_rtx
10794 decided to swap its operands and reverse the original code.
10795
10796 We know that compare_from_rtx returns either a CONST_INT or
10797 a new comparison code, so it is safe to just extract the
10798 code from RESULT. */
10799 code = GET_CODE (result);
10800
b93a436e
JL
10801 label = gen_label_rtx ();
10802 if (bcc_gen_fctn[(int) code] == 0)
10803 abort ();
0f41302f 10804
b93a436e
JL
10805 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10806 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10807 emit_label (label);
bbf6f052 10808
b93a436e 10809 return target;
ca695ac9 10810}
b93a436e 10811\f
b93a436e 10812
ad82abb8
ZW
10813/* Stubs in case we haven't got a casesi insn. */
10814#ifndef HAVE_casesi
10815# define HAVE_casesi 0
10816# define gen_casesi(a, b, c, d, e) (0)
10817# define CODE_FOR_casesi CODE_FOR_nothing
10818#endif
10819
10820/* If the machine does not have a case insn that compares the bounds,
10821 this means extra overhead for dispatch tables, which raises the
10822 threshold for using them. */
10823#ifndef CASE_VALUES_THRESHOLD
10824#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10825#endif /* CASE_VALUES_THRESHOLD */
10826
10827unsigned int
10828case_values_threshold ()
10829{
10830 return CASE_VALUES_THRESHOLD;
10831}
10832
10833/* Attempt to generate a casesi instruction. Returns 1 if successful,
10834 0 otherwise (i.e. if there is no casesi instruction). */
10835int
10836try_casesi (index_type, index_expr, minval, range,
10837 table_label, default_label)
10838 tree index_type, index_expr, minval, range;
10839 rtx table_label ATTRIBUTE_UNUSED;
10840 rtx default_label;
10841{
10842 enum machine_mode index_mode = SImode;
10843 int index_bits = GET_MODE_BITSIZE (index_mode);
10844 rtx op1, op2, index;
10845 enum machine_mode op_mode;
10846
10847 if (! HAVE_casesi)
10848 return 0;
10849
10850 /* Convert the index to SImode. */
10851 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10852 {
10853 enum machine_mode omode = TYPE_MODE (index_type);
10854 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10855
10856 /* We must handle the endpoints in the original mode. */
10857 index_expr = build (MINUS_EXPR, index_type,
10858 index_expr, minval);
10859 minval = integer_zero_node;
10860 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10861 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 10862 omode, 1, default_label);
ad82abb8
ZW
10863 /* Now we can safely truncate. */
10864 index = convert_to_mode (index_mode, index, 0);
10865 }
10866 else
10867 {
10868 if (TYPE_MODE (index_type) != index_mode)
10869 {
b0c48229
NB
10870 index_expr = convert ((*lang_hooks.types.type_for_size)
10871 (index_bits, 0), index_expr);
ad82abb8
ZW
10872 index_type = TREE_TYPE (index_expr);
10873 }
10874
10875 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10876 }
10877 emit_queue ();
10878 index = protect_from_queue (index, 0);
10879 do_pending_stack_adjust ();
10880
10881 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10882 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10883 (index, op_mode))
10884 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10885
ad82abb8
ZW
10886 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10887
10888 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10889 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10890 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10891 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10892 (op1, op_mode))
10893 op1 = copy_to_mode_reg (op_mode, op1);
10894
10895 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10896
10897 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10898 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10899 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10900 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10901 (op2, op_mode))
10902 op2 = copy_to_mode_reg (op_mode, op2);
10903
10904 emit_jump_insn (gen_casesi (index, op1, op2,
10905 table_label, default_label));
10906 return 1;
10907}
10908
10909/* Attempt to generate a tablejump instruction; same concept. */
10910#ifndef HAVE_tablejump
10911#define HAVE_tablejump 0
10912#define gen_tablejump(x, y) (0)
10913#endif
10914
10915/* Subroutine of the next function.
10916
10917 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10918 in the table already subtracted.
10919 MODE is its expected mode (needed if INDEX is constant).
10920 RANGE is the length of the jump table.
10921 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10922
b93a436e
JL
10923 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10924 index value is out of range. */
0f41302f 10925
ad82abb8 10926static void
b93a436e
JL
10927do_tablejump (index, mode, range, table_label, default_label)
10928 rtx index, range, table_label, default_label;
10929 enum machine_mode mode;
ca695ac9 10930{
b3694847 10931 rtx temp, vector;
88d3b7f0 10932
b93a436e
JL
10933 /* Do an unsigned comparison (in the proper mode) between the index
10934 expression and the value which represents the length of the range.
10935 Since we just finished subtracting the lower bound of the range
10936 from the index expression, this comparison allows us to simultaneously
10937 check that the original index expression value is both greater than
10938 or equal to the minimum value of the range and less than or equal to
10939 the maximum value of the range. */
709f5be1 10940
c5d5d461 10941 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 10942 default_label);
bbf6f052 10943
b93a436e
JL
10944 /* If index is in range, it must fit in Pmode.
10945 Convert to Pmode so we can index with it. */
10946 if (mode != Pmode)
10947 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10948
b93a436e
JL
10949 /* Don't let a MEM slip thru, because then INDEX that comes
10950 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10951 and break_out_memory_refs will go to work on it and mess it up. */
10952#ifdef PIC_CASE_VECTOR_ADDRESS
10953 if (flag_pic && GET_CODE (index) != REG)
10954 index = copy_to_mode_reg (Pmode, index);
10955#endif
ca695ac9 10956
b93a436e
JL
10957 /* If flag_force_addr were to affect this address
10958 it could interfere with the tricky assumptions made
10959 about addresses that contain label-refs,
10960 which may be valid only very near the tablejump itself. */
10961 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10962 GET_MODE_SIZE, because this indicates how large insns are. The other
10963 uses should all be Pmode, because they are addresses. This code
10964 could fail if addresses and insns are not the same size. */
10965 index = gen_rtx_PLUS (Pmode,
10966 gen_rtx_MULT (Pmode, index,
10967 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10968 gen_rtx_LABEL_REF (Pmode, table_label));
10969#ifdef PIC_CASE_VECTOR_ADDRESS
10970 if (flag_pic)
10971 index = PIC_CASE_VECTOR_ADDRESS (index);
10972 else
bbf6f052 10973#endif
b93a436e
JL
10974 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10975 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10976 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10977 RTX_UNCHANGING_P (vector) = 1;
10978 convert_move (temp, vector, 0);
10979
10980 emit_jump_insn (gen_tablejump (temp, table_label));
10981
10982 /* If we are generating PIC code or if the table is PC-relative, the
10983 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10984 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10985 emit_barrier ();
bbf6f052 10986}
b93a436e 10987
ad82abb8
ZW
10988int
10989try_tablejump (index_type, index_expr, minval, range,
10990 table_label, default_label)
10991 tree index_type, index_expr, minval, range;
10992 rtx table_label, default_label;
10993{
10994 rtx index;
10995
10996 if (! HAVE_tablejump)
10997 return 0;
10998
10999 index_expr = fold (build (MINUS_EXPR, index_type,
11000 convert (index_type, index_expr),
11001 convert (index_type, minval)));
11002 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11003 emit_queue ();
11004 index = protect_from_queue (index, 0);
11005 do_pending_stack_adjust ();
11006
11007 do_tablejump (index, TYPE_MODE (index_type),
11008 convert_modes (TYPE_MODE (index_type),
11009 TYPE_MODE (TREE_TYPE (range)),
11010 expand_expr (range, NULL_RTX,
11011 VOIDmode, 0),
11012 TREE_UNSIGNED (TREE_TYPE (range))),
11013 table_label, default_label);
11014 return 1;
11015}
e2500fed 11016
cb2a532e
AH
11017/* Nonzero if the mode is a valid vector mode for this architecture.
11018 This returns nonzero even if there is no hardware support for the
11019 vector mode, but we can emulate with narrower modes. */
11020
11021int
11022vector_mode_valid_p (mode)
11023 enum machine_mode mode;
11024{
11025 enum mode_class class = GET_MODE_CLASS (mode);
11026 enum machine_mode innermode;
11027
11028 /* Doh! What's going on? */
11029 if (class != MODE_VECTOR_INT
11030 && class != MODE_VECTOR_FLOAT)
11031 return 0;
11032
11033 /* Hardware support. Woo hoo! */
11034 if (VECTOR_MODE_SUPPORTED_P (mode))
11035 return 1;
11036
11037 innermode = GET_MODE_INNER (mode);
11038
11039 /* We should probably return 1 if requesting V4DI and we have no DI,
11040 but we have V2DI, but this is probably very unlikely. */
11041
11042 /* If we have support for the inner mode, we can safely emulate it.
11043 We may not have V2DI, but me can emulate with a pair of DIs. */
11044 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11045}
11046
e2500fed 11047#include "gt-expr.h"