]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
* dbxout.c (dbxout_type): Check Objective-C++ lang.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
ad616de1 3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d 18along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
6de9cd9a 50#include "tree-iterator.h"
2f8e398b
PB
51#include "tree-pass.h"
52#include "tree-flow.h"
c988af2b 53#include "target.h"
2f8e398b 54#include "timevar.h"
bbf6f052 55
bbf6f052 56/* Decide whether a function's arguments should be processed
bbc8a071
RK
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
bbf6f052 61
bbf6f052 62#ifdef PUSH_ROUNDING
bbc8a071 63
2da4124d 64#ifndef PUSH_ARGS_REVERSED
3319a347 65#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 66#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 67#endif
2da4124d 68#endif
bbc8a071 69
bbf6f052
RK
70#endif
71
72#ifndef STACK_PUSH_CODE
73#ifdef STACK_GROWS_DOWNWARD
74#define STACK_PUSH_CODE PRE_DEC
75#else
76#define STACK_PUSH_CODE PRE_INC
77#endif
78#endif
79
4ca79136 80
bbf6f052
RK
81/* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87int cse_not_expected;
88
4969d05d
RK
89/* This structure is used by move_by_pieces to describe the move to
90 be performed. */
4969d05d
RK
91struct move_by_pieces
92{
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
3bdf5ad1
RK
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
4969d05d
RK
103 int reverse;
104};
105
57814e5e 106/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
107 be performed. */
108
57814e5e 109struct store_by_pieces
9de08200
RK
110{
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
3bdf5ad1
RK
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
502b8322 117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
fad205ff 118 void *constfundata;
9de08200
RK
119 int reverse;
120};
121
502b8322 122static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
45d78e7f 123 unsigned int,
502b8322
AJ
124 unsigned int);
125static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127static bool block_move_libcall_safe_for_call_parm (void);
70128ad9 128static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
8148fe65 129static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
502b8322
AJ
130static tree emit_block_move_libcall_fn (int);
131static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
8148fe65 137static rtx clear_storage_via_libcall (rtx, rtx, bool);
502b8322
AJ
138static tree clear_storage_libcall_fn (int);
139static rtx compress_float_constant (rtx, rtx);
140static rtx get_subtarget (rtx);
502b8322
AJ
141static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
f45bdcd0 146 tree, tree, int);
502b8322
AJ
147
148static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
d50a16c4 149static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
502b8322
AJ
150
151static int is_aligning_offset (tree, tree);
eb698c58
RS
152static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
bc15d0ef 154static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
502b8322 155static rtx do_store_flag (tree, rtx, enum machine_mode, int);
21d93687 156#ifdef PUSH_ROUNDING
502b8322 157static void emit_single_push_insn (enum machine_mode, rtx, tree);
21d93687 158#endif
502b8322
AJ
159static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160static rtx const_vector_from_tree (tree);
57aaef66 161static void write_complex_part (rtx, rtx, bool);
bbf6f052 162
4fa52007
RK
163/* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167static char direct_load[NUM_MACHINE_MODES];
168static char direct_store[NUM_MACHINE_MODES];
169
51286de6
RH
170/* Record for each mode whether we can float-extend from memory. */
171
172static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
fbe1758d 174/* This macro is used to determine whether move_by_pieces should be called
3a94c984 175 to perform a structure copy. */
fbe1758d 176#ifndef MOVE_BY_PIECES_P
19caa751 177#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
fbe1758d
AM
180#endif
181
78762e3b
RS
182/* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184#ifndef CLEAR_BY_PIECES_P
185#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
45d78e7f
JJ
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
78762e3b
RS
188#endif
189
4977bab6
ZW
190/* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193#ifndef STORE_BY_PIECES_P
45d78e7f
JJ
194#define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
4977bab6
ZW
197#endif
198
266007a7 199/* This array records the insn_code of insns to perform block moves. */
70128ad9 200enum insn_code movmem_optab[NUM_MACHINE_MODES];
266007a7 201
57e84f18
AS
202/* This array records the insn_code of insns to perform block sets. */
203enum insn_code setmem_optab[NUM_MACHINE_MODES];
9de08200 204
40c1d5f8 205/* These arrays record the insn_code of three different kinds of insns
118355a0
ZW
206 to perform block compares. */
207enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
40c1d5f8 208enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
118355a0
ZW
209enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
48ae6c13
RH
211/* Synchronization primitives. */
212enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
cc2902df 235/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
236
237#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 238#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 239#endif
bbf6f052 240\f
4fa52007 241/* This is run once per compilation to set up which modes can be used
266007a7 242 directly in memory and to initialize the block move optab. */
4fa52007
RK
243
244void
502b8322 245init_expr_once (void)
4fa52007
RK
246{
247 rtx insn, pat;
248 enum machine_mode mode;
cff48d8f 249 int num_clobbers;
9ec36da5 250 rtx mem, mem1;
bf1660a6 251 rtx reg;
9ec36da5 252
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
9ec36da5
JL
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 258
bf1660a6
JL
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
262
1f8c3c5b
RH
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
4fa52007
RK
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
4fa52007
RK
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
e2549997 274 PUT_MODE (mem1, mode);
bf1660a6 275 PUT_MODE (reg, mode);
4fa52007 276
e6fe56a4
RK
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
7308a047
RS
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
e6fe56a4 287
bf1660a6 288 REGNO (reg) = regno;
e6fe56a4 289
7308a047
RS
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
e6fe56a4 294
e2549997
RS
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
7308a047
RS
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
e2549997
RS
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
7308a047 309 }
4fa52007
RK
310 }
311
51286de6
RH
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
316 {
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 319 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
320 {
321 enum insn_code ic;
322
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
326
327 PUT_MODE (mem, srcmode);
0fb7aeda 328
51286de6
RH
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
331 }
332 }
4fa52007 333}
cff48d8f 334
bbf6f052
RK
335/* This is run at the start of compiling a function. */
336
337void
502b8322 338init_expr (void)
bbf6f052 339{
3a70d621 340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
bbf6f052 341}
bbf6f052
RK
342\f
343/* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
347
348void
502b8322 349convert_move (rtx to, rtx from, int unsignedp)
bbf6f052
RK
350{
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
354 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
355 enum insn_code code;
356 rtx libcall;
357
358 /* rtx code for making an equivalent value. */
37d0b254
SE
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052 361
bbf6f052 362
5b0264cb 363 gcc_assert (to_real == from_real);
bbf6f052 364
6de9cd9a
DN
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
1499e0a8
RK
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
5b0264cb 380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
1499e0a8 381
bbf6f052
RK
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
0b4565c9
BS
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
5b0264cb 391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
3a94c984 392
0b4565c9 393 if (VECTOR_MODE_P (to_mode))
bafe341a 394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 395 else
bafe341a 396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
397
398 emit_move_insn (to, from);
399 return;
400 }
401
06765df1
R
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
bbf6f052
RK
409 if (to_real)
410 {
642dfa8b 411 rtx value, insns;
85363ca0 412 convert_optab tab;
81d79e2c 413
5b0264cb
NS
414 gcc_assert (GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode));
416
e44846d6 417 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
85363ca0 418 tab = sext_optab;
85363ca0 419 else
5b0264cb 420 tab = trunc_optab;
2b01c326 421
85363ca0 422 /* Try converting directly if the insn is supported. */
2b01c326 423
85363ca0
ZW
424 code = tab->handlers[to_mode][from_mode].insn_code;
425 if (code != CODE_FOR_nothing)
b092b471 426 {
85363ca0
ZW
427 emit_unop_insn (code, to, from,
428 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
b092b471
JW
429 return;
430 }
b092b471 431
85363ca0
ZW
432 /* Otherwise use a libcall. */
433 libcall = tab->handlers[to_mode][from_mode].libfunc;
3a94c984 434
5b0264cb
NS
435 /* Is this conversion implemented yet? */
436 gcc_assert (libcall);
bbf6f052 437
642dfa8b 438 start_sequence ();
ebb1b59a 439 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 440 1, from, from_mode);
642dfa8b
BS
441 insns = get_insns ();
442 end_sequence ();
450b1728
EC
443 emit_libcall_block (insns, to, value,
444 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
445 from)
446 : gen_rtx_FLOAT_EXTEND (to_mode, from));
bbf6f052
RK
447 return;
448 }
449
85363ca0
ZW
450 /* Handle pointer conversion. */ /* SPEE 900220. */
451 /* Targets are expected to provide conversion insns between PxImode and
452 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
453 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
454 {
455 enum machine_mode full_mode
456 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
457
5b0264cb
NS
458 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
459 != CODE_FOR_nothing);
85363ca0
ZW
460
461 if (full_mode != from_mode)
462 from = convert_to_mode (full_mode, from, unsignedp);
463 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
464 to, from, UNKNOWN);
465 return;
466 }
467 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
468 {
d2348bd5 469 rtx new_from;
85363ca0
ZW
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
472
5b0264cb
NS
473 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
474 != CODE_FOR_nothing);
85363ca0 475
85363ca0 476 if (to_mode == full_mode)
d2348bd5
DD
477 {
478 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
479 to, from, UNKNOWN);
480 return;
481 }
482
483 new_from = gen_reg_rtx (full_mode);
484 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
485 new_from, from, UNKNOWN);
85363ca0 486
a1105617 487 /* else proceed to integer conversions below. */
85363ca0 488 from_mode = full_mode;
d2348bd5 489 from = new_from;
85363ca0
ZW
490 }
491
bbf6f052
RK
492 /* Now both modes are integers. */
493
494 /* Handle expanding beyond a word. */
495 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
496 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
497 {
498 rtx insns;
499 rtx lowpart;
500 rtx fill_value;
501 rtx lowfrom;
502 int i;
503 enum machine_mode lowpart_mode;
504 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
505
506 /* Try converting directly if the insn is supported. */
507 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
508 != CODE_FOR_nothing)
509 {
cd1b4b44
RK
510 /* If FROM is a SUBREG, put it into a register. Do this
511 so that we always generate the same set of insns for
512 better cse'ing; if an intermediate assignment occurred,
513 we won't be doing the operation directly on the SUBREG. */
514 if (optimize > 0 && GET_CODE (from) == SUBREG)
515 from = force_reg (from_mode, from);
bbf6f052
RK
516 emit_unop_insn (code, to, from, equiv_code);
517 return;
518 }
519 /* Next, try converting via full word. */
520 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
521 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
522 != CODE_FOR_nothing))
523 {
f8cfc6aa 524 if (REG_P (to))
6a2d136b
EB
525 {
526 if (reg_overlap_mentioned_p (to, from))
527 from = force_reg (from_mode, from);
528 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
529 }
bbf6f052
RK
530 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
531 emit_unop_insn (code, to,
532 gen_lowpart (word_mode, to), equiv_code);
533 return;
534 }
535
536 /* No special multiword conversion insn; do it by hand. */
537 start_sequence ();
538
5c5033c3
RK
539 /* Since we will turn this into a no conflict block, we must ensure
540 that the source does not overlap the target. */
541
542 if (reg_overlap_mentioned_p (to, from))
543 from = force_reg (from_mode, from);
544
bbf6f052
RK
545 /* Get a copy of FROM widened to a word, if necessary. */
546 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
547 lowpart_mode = word_mode;
548 else
549 lowpart_mode = from_mode;
550
551 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
552
553 lowpart = gen_lowpart (lowpart_mode, to);
554 emit_move_insn (lowpart, lowfrom);
555
556 /* Compute the value to put in each remaining word. */
557 if (unsignedp)
558 fill_value = const0_rtx;
559 else
560 {
561#ifdef HAVE_slt
562 if (HAVE_slt
a995e389 563 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
564 && STORE_FLAG_VALUE == -1)
565 {
906c4e36 566 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 567 lowpart_mode, 0);
bbf6f052
RK
568 fill_value = gen_reg_rtx (word_mode);
569 emit_insn (gen_slt (fill_value));
570 }
571 else
572#endif
573 {
574 fill_value
575 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
576 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 577 NULL_RTX, 0);
bbf6f052
RK
578 fill_value = convert_to_mode (word_mode, fill_value, 1);
579 }
580 }
581
582 /* Fill the remaining words. */
583 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
584 {
585 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
586 rtx subword = operand_subword (to, index, 1, to_mode);
587
5b0264cb 588 gcc_assert (subword);
bbf6f052
RK
589
590 if (fill_value != subword)
591 emit_move_insn (subword, fill_value);
592 }
593
594 insns = get_insns ();
595 end_sequence ();
596
906c4e36 597 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 598 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
599 return;
600 }
601
d3c64ee3
RS
602 /* Truncating multi-word to a word or less. */
603 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
604 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 605 {
3c0cb5de 606 if (!((MEM_P (from)
431a6eca
JW
607 && ! MEM_VOLATILE_P (from)
608 && direct_load[(int) to_mode]
609 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 610 || REG_P (from)
431a6eca
JW
611 || GET_CODE (from) == SUBREG))
612 from = force_reg (from_mode, from);
bbf6f052
RK
613 convert_move (to, gen_lowpart (word_mode, from), 0);
614 return;
615 }
616
bbf6f052
RK
617 /* Now follow all the conversions between integers
618 no more than a word long. */
619
620 /* For truncation, usually we can just refer to FROM in a narrower mode. */
621 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
622 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 623 GET_MODE_BITSIZE (from_mode)))
bbf6f052 624 {
3c0cb5de 625 if (!((MEM_P (from)
d3c64ee3
RS
626 && ! MEM_VOLATILE_P (from)
627 && direct_load[(int) to_mode]
628 && ! mode_dependent_address_p (XEXP (from, 0)))
f8cfc6aa 629 || REG_P (from)
d3c64ee3
RS
630 || GET_CODE (from) == SUBREG))
631 from = force_reg (from_mode, from);
f8cfc6aa 632 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
34aa3599
RK
633 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
634 from = copy_to_reg (from);
bbf6f052
RK
635 emit_move_insn (to, gen_lowpart (to_mode, from));
636 return;
637 }
638
d3c64ee3 639 /* Handle extension. */
bbf6f052
RK
640 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
641 {
642 /* Convert directly if that works. */
643 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
644 != CODE_FOR_nothing)
645 {
646 emit_unop_insn (code, to, from, equiv_code);
647 return;
648 }
649 else
650 {
651 enum machine_mode intermediate;
2b28d92e
NC
652 rtx tmp;
653 tree shift_amount;
bbf6f052
RK
654
655 /* Search for a mode to convert via. */
656 for (intermediate = from_mode; intermediate != VOIDmode;
657 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
658 if (((can_extend_p (to_mode, intermediate, unsignedp)
659 != CODE_FOR_nothing)
660 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
661 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
662 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
663 && (can_extend_p (intermediate, from_mode, unsignedp)
664 != CODE_FOR_nothing))
665 {
666 convert_move (to, convert_to_mode (intermediate, from,
667 unsignedp), unsignedp);
668 return;
669 }
670
2b28d92e 671 /* No suitable intermediate mode.
3a94c984 672 Generate what we need with shifts. */
4a90aeeb
NS
673 shift_amount = build_int_cst (NULL_TREE,
674 GET_MODE_BITSIZE (to_mode)
7d60be94 675 - GET_MODE_BITSIZE (from_mode));
2b28d92e
NC
676 from = gen_lowpart (to_mode, force_reg (from_mode, from));
677 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
678 to, unsignedp);
3a94c984 679 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
680 to, unsignedp);
681 if (tmp != to)
682 emit_move_insn (to, tmp);
683 return;
bbf6f052
RK
684 }
685 }
686
3a94c984 687 /* Support special truncate insns for certain modes. */
85363ca0 688 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
bbf6f052 689 {
85363ca0
ZW
690 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
691 to, from, UNKNOWN);
b9bcad65
RK
692 return;
693 }
694
bbf6f052
RK
695 /* Handle truncation of volatile memrefs, and so on;
696 the things that couldn't be truncated directly,
85363ca0
ZW
697 and for which there was no special instruction.
698
699 ??? Code above formerly short-circuited this, for most integer
700 mode pairs, with a force_reg in from_mode followed by a recursive
701 call to this routine. Appears always to have been wrong. */
bbf6f052
RK
702 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
703 {
704 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
705 emit_move_insn (to, temp);
706 return;
707 }
708
709 /* Mode combination is not recognized. */
5b0264cb 710 gcc_unreachable ();
bbf6f052
RK
711}
712
713/* Return an rtx for a value that would result
714 from converting X to mode MODE.
715 Both X and MODE may be floating, or both integer.
716 UNSIGNEDP is nonzero if X is an unsigned value.
717 This can be done by referring to a part of X in place
ad76cef8 718 or by copying to a new temporary with conversion. */
bbf6f052
RK
719
720rtx
502b8322 721convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
5ffe63ed
RS
722{
723 return convert_modes (mode, VOIDmode, x, unsignedp);
724}
725
726/* Return an rtx for a value that would result
727 from converting X from mode OLDMODE to mode MODE.
728 Both modes may be floating, or both integer.
729 UNSIGNEDP is nonzero if X is an unsigned value.
730
731 This can be done by referring to a part of X in place
732 or by copying to a new temporary with conversion.
733
ad76cef8 734 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
5ffe63ed
RS
735
736rtx
502b8322 737convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
bbf6f052 738{
b3694847 739 rtx temp;
5ffe63ed 740
1499e0a8
RK
741 /* If FROM is a SUBREG that indicates that we have already done at least
742 the required extension, strip it. */
743
744 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
745 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
746 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
747 x = gen_lowpart (mode, x);
bbf6f052 748
64791b18
RK
749 if (GET_MODE (x) != VOIDmode)
750 oldmode = GET_MODE (x);
3a94c984 751
5ffe63ed 752 if (mode == oldmode)
bbf6f052
RK
753 return x;
754
755 /* There is one case that we must handle specially: If we are converting
906c4e36 756 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
757 we are to interpret the constant as unsigned, gen_lowpart will do
758 the wrong if the constant appears negative. What we want to do is
759 make the high-order word of the constant zero, not all ones. */
760
761 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 762 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 763 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
764 {
765 HOST_WIDE_INT val = INTVAL (x);
766
767 if (oldmode != VOIDmode
768 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
769 {
770 int width = GET_MODE_BITSIZE (oldmode);
771
772 /* We need to zero extend VAL. */
773 val &= ((HOST_WIDE_INT) 1 << width) - 1;
774 }
775
776 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
777 }
bbf6f052
RK
778
779 /* We can do this with a gen_lowpart if both desired and current modes
780 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
781 non-volatile MEM. Except for the constant case where MODE is no
782 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 783
ba2e110c
RK
784 if ((GET_CODE (x) == CONST_INT
785 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 786 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 787 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 788 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 789 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
3c0cb5de 790 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
d57c66da 791 && direct_load[(int) mode])
f8cfc6aa 792 || (REG_P (x)
006c9f4a
SE
793 && (! HARD_REGISTER_P (x)
794 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
795 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
796 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
797 {
798 /* ?? If we don't know OLDMODE, we have to assume here that
799 X does not need sign- or zero-extension. This may not be
800 the case, but it's the best we can do. */
801 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
802 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
803 {
804 HOST_WIDE_INT val = INTVAL (x);
805 int width = GET_MODE_BITSIZE (oldmode);
806
807 /* We must sign or zero-extend in this case. Start by
808 zero-extending, then sign extend if we need to. */
809 val &= ((HOST_WIDE_INT) 1 << width) - 1;
810 if (! unsignedp
811 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
812 val |= (HOST_WIDE_INT) (-1) << width;
813
2496c7bd 814 return gen_int_mode (val, mode);
ba2e110c
RK
815 }
816
817 return gen_lowpart (mode, x);
818 }
bbf6f052 819
ebe75517
JH
820 /* Converting from integer constant into mode is always equivalent to an
821 subreg operation. */
822 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
823 {
5b0264cb 824 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
ebe75517
JH
825 return simplify_gen_subreg (mode, x, oldmode, 0);
826 }
827
bbf6f052
RK
828 temp = gen_reg_rtx (mode);
829 convert_move (temp, x, unsignedp);
830 return temp;
831}
832\f
cf5124f6
RS
833/* STORE_MAX_PIECES is the number of bytes at a time that we can
834 store efficiently. Due to internal GCC limitations, this is
835 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
836 for an immediate constant. */
837
838#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
839
8fd3cf4e
JJ
840/* Determine whether the LEN bytes can be moved by using several move
841 instructions. Return nonzero if a call to move_by_pieces should
842 succeed. */
843
844int
502b8322
AJ
845can_move_by_pieces (unsigned HOST_WIDE_INT len,
846 unsigned int align ATTRIBUTE_UNUSED)
8fd3cf4e
JJ
847{
848 return MOVE_BY_PIECES_P (len, align);
849}
850
21d93687 851/* Generate several move instructions to copy LEN bytes from block FROM to
ad76cef8 852 block TO. (These are MEM rtx's with BLKmode).
566aa174 853
21d93687
RK
854 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
855 used to push FROM to the stack.
566aa174 856
8fd3cf4e 857 ALIGN is maximum stack alignment we can assume.
bbf6f052 858
8fd3cf4e
JJ
859 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
860 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
861 stpcpy. */
862
863rtx
502b8322
AJ
864move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
865 unsigned int align, int endp)
bbf6f052
RK
866{
867 struct move_by_pieces data;
566aa174 868 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 869 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
870 enum machine_mode mode = VOIDmode, tmode;
871 enum insn_code icode;
bbf6f052 872
f26aca6d
DD
873 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
874
bbf6f052 875 data.offset = 0;
bbf6f052 876 data.from_addr = from_addr;
566aa174
JH
877 if (to)
878 {
879 to_addr = XEXP (to, 0);
880 data.to = to;
881 data.autinc_to
882 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
883 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
884 data.reverse
885 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
886 }
887 else
888 {
889 to_addr = NULL_RTX;
890 data.to = NULL_RTX;
891 data.autinc_to = 1;
892#ifdef STACK_GROWS_DOWNWARD
893 data.reverse = 1;
894#else
895 data.reverse = 0;
896#endif
897 }
898 data.to_addr = to_addr;
bbf6f052 899 data.from = from;
bbf6f052
RK
900 data.autinc_from
901 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
902 || GET_CODE (from_addr) == POST_INC
903 || GET_CODE (from_addr) == POST_DEC);
904
905 data.explicit_inc_from = 0;
906 data.explicit_inc_to = 0;
bbf6f052
RK
907 if (data.reverse) data.offset = len;
908 data.len = len;
909
910 /* If copying requires more than two move insns,
911 copy addresses to registers (to make displacements shorter)
912 and use post-increment if available. */
913 if (!(data.autinc_from && data.autinc_to)
45d78e7f 914 && move_by_pieces_ninsns (len, align, max_size) > 2)
bbf6f052 915 {
3a94c984 916 /* Find the mode of the largest move... */
fbe1758d
AM
917 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
918 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
919 if (GET_MODE_SIZE (tmode) < max_size)
920 mode = tmode;
921
922 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
923 {
924 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
925 data.autinc_from = 1;
926 data.explicit_inc_from = -1;
927 }
fbe1758d 928 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
929 {
930 data.from_addr = copy_addr_to_reg (from_addr);
931 data.autinc_from = 1;
932 data.explicit_inc_from = 1;
933 }
bbf6f052
RK
934 if (!data.autinc_from && CONSTANT_P (from_addr))
935 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 936 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
937 {
938 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
939 data.autinc_to = 1;
940 data.explicit_inc_to = -1;
941 }
fbe1758d 942 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
943 {
944 data.to_addr = copy_addr_to_reg (to_addr);
945 data.autinc_to = 1;
946 data.explicit_inc_to = 1;
947 }
bbf6f052
RK
948 if (!data.autinc_to && CONSTANT_P (to_addr))
949 data.to_addr = copy_addr_to_reg (to_addr);
950 }
951
f64d6991
DE
952 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
953 if (align >= GET_MODE_ALIGNMENT (tmode))
954 align = GET_MODE_ALIGNMENT (tmode);
955 else
956 {
957 enum machine_mode xmode;
958
959 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
960 tmode != VOIDmode;
961 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
962 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
963 || SLOW_UNALIGNED_ACCESS (tmode, align))
964 break;
965
966 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
967 }
bbf6f052
RK
968
969 /* First move what we can in the largest integer mode, then go to
970 successively smaller modes. */
971
972 while (max_size > 1)
973 {
e7c33f54
RK
974 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
975 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
976 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
977 mode = tmode;
978
979 if (mode == VOIDmode)
980 break;
981
982 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 983 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
984 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
985
986 max_size = GET_MODE_SIZE (mode);
987 }
988
989 /* The code above should have handled everything. */
5b0264cb 990 gcc_assert (!data.len);
8fd3cf4e
JJ
991
992 if (endp)
993 {
994 rtx to1;
995
5b0264cb 996 gcc_assert (!data.reverse);
8fd3cf4e
JJ
997 if (data.autinc_to)
998 {
999 if (endp == 2)
1000 {
1001 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1002 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1003 else
1004 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1005 -1));
1006 }
1007 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1008 data.offset);
1009 }
1010 else
1011 {
1012 if (endp == 2)
1013 --data.offset;
1014 to1 = adjust_address (data.to, QImode, data.offset);
1015 }
1016 return to1;
1017 }
1018 else
1019 return data.to;
bbf6f052
RK
1020}
1021
1022/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1023 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1024
3bdf5ad1 1025static unsigned HOST_WIDE_INT
45d78e7f
JJ
1026move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1027 unsigned int max_size)
bbf6f052 1028{
3bdf5ad1 1029 unsigned HOST_WIDE_INT n_insns = 0;
f64d6991 1030 enum machine_mode tmode;
bbf6f052 1031
f64d6991
DE
1032 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1033 if (align >= GET_MODE_ALIGNMENT (tmode))
1034 align = GET_MODE_ALIGNMENT (tmode);
1035 else
1036 {
1037 enum machine_mode tmode, xmode;
1038
1039 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1040 tmode != VOIDmode;
1041 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1042 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1043 || SLOW_UNALIGNED_ACCESS (tmode, align))
1044 break;
1045
1046 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1047 }
bbf6f052
RK
1048
1049 while (max_size > 1)
1050 {
f64d6991 1051 enum machine_mode mode = VOIDmode;
bbf6f052
RK
1052 enum insn_code icode;
1053
e7c33f54
RK
1054 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1055 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1056 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1057 mode = tmode;
1058
1059 if (mode == VOIDmode)
1060 break;
1061
1062 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1063 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1064 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1065
1066 max_size = GET_MODE_SIZE (mode);
1067 }
1068
5b0264cb 1069 gcc_assert (!l);
bbf6f052
RK
1070 return n_insns;
1071}
1072
1073/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1074 with move instructions for mode MODE. GENFUN is the gen_... function
1075 to make a move insn for that mode. DATA has all the other info. */
1076
1077static void
502b8322
AJ
1078move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1079 struct move_by_pieces *data)
bbf6f052 1080{
3bdf5ad1 1081 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1082 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1083
1084 while (data->len >= size)
1085 {
3bdf5ad1
RK
1086 if (data->reverse)
1087 data->offset -= size;
1088
566aa174 1089 if (data->to)
3bdf5ad1 1090 {
566aa174 1091 if (data->autinc_to)
630036c6
JJ
1092 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1093 data->offset);
566aa174 1094 else
f4ef873c 1095 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1096 }
3bdf5ad1
RK
1097
1098 if (data->autinc_from)
630036c6
JJ
1099 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1100 data->offset);
3bdf5ad1 1101 else
f4ef873c 1102 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1103
940da324 1104 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1105 emit_insn (gen_add2_insn (data->to_addr,
1106 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1108 emit_insn (gen_add2_insn (data->from_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1110
566aa174
JH
1111 if (data->to)
1112 emit_insn ((*genfun) (to1, from1));
1113 else
21d93687
RK
1114 {
1115#ifdef PUSH_ROUNDING
1116 emit_single_push_insn (mode, from1, NULL);
1117#else
5b0264cb 1118 gcc_unreachable ();
21d93687
RK
1119#endif
1120 }
3bdf5ad1 1121
940da324 1122 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1123 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1124 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1125 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1126
3bdf5ad1
RK
1127 if (! data->reverse)
1128 data->offset += size;
bbf6f052
RK
1129
1130 data->len -= size;
1131 }
1132}
1133\f
4ca79136
RH
1134/* Emit code to move a block Y to a block X. This may be done with
1135 string-move instructions, with multiple scalar move instructions,
1136 or with a library call.
bbf6f052 1137
4ca79136 1138 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1139 SIZE is an rtx that says how long they are.
19caa751 1140 ALIGN is the maximum alignment we can assume they have.
44bb111a 1141 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1142
e9a25f70
JL
1143 Return the address of the new block, if memcpy is called and returns it,
1144 0 otherwise. */
1145
1146rtx
502b8322 1147emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
bbf6f052 1148{
44bb111a 1149 bool may_use_call;
e9a25f70 1150 rtx retval = 0;
44bb111a
RH
1151 unsigned int align;
1152
1153 switch (method)
1154 {
1155 case BLOCK_OP_NORMAL:
8148fe65 1156 case BLOCK_OP_TAILCALL:
44bb111a
RH
1157 may_use_call = true;
1158 break;
1159
1160 case BLOCK_OP_CALL_PARM:
1161 may_use_call = block_move_libcall_safe_for_call_parm ();
1162
1163 /* Make inhibit_defer_pop nonzero around the library call
1164 to force it to pop the arguments right away. */
1165 NO_DEFER_POP;
1166 break;
1167
1168 case BLOCK_OP_NO_LIBCALL:
1169 may_use_call = false;
1170 break;
1171
1172 default:
5b0264cb 1173 gcc_unreachable ();
44bb111a
RH
1174 }
1175
1176 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1177
5b0264cb
NS
1178 gcc_assert (MEM_P (x));
1179 gcc_assert (MEM_P (y));
1180 gcc_assert (size);
bbf6f052 1181
82c82743
RH
1182 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1183 block copy is more efficient for other large modes, e.g. DCmode. */
1184 x = adjust_address (x, BLKmode, 0);
1185 y = adjust_address (y, BLKmode, 0);
1186
cb38fd88
RH
1187 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1188 can be incorrect is coming from __builtin_memcpy. */
1189 if (GET_CODE (size) == CONST_INT)
1190 {
6972c506
JJ
1191 if (INTVAL (size) == 0)
1192 return 0;
1193
cb38fd88
RH
1194 x = shallow_copy_rtx (x);
1195 y = shallow_copy_rtx (y);
1196 set_mem_size (x, size);
1197 set_mem_size (y, size);
1198 }
1199
fbe1758d 1200 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1201 move_by_pieces (x, y, INTVAL (size), align, 0);
70128ad9 1202 else if (emit_block_move_via_movmem (x, y, size, align))
4ca79136 1203 ;
44bb111a 1204 else if (may_use_call)
8148fe65
JJ
1205 retval = emit_block_move_via_libcall (x, y, size,
1206 method == BLOCK_OP_TAILCALL);
44bb111a
RH
1207 else
1208 emit_block_move_via_loop (x, y, size, align);
1209
1210 if (method == BLOCK_OP_CALL_PARM)
1211 OK_DEFER_POP;
266007a7 1212
4ca79136
RH
1213 return retval;
1214}
266007a7 1215
502b8322 1216/* A subroutine of emit_block_move. Returns true if calling the
44bb111a
RH
1217 block move libcall will not clobber any parameters which may have
1218 already been placed on the stack. */
1219
1220static bool
502b8322 1221block_move_libcall_safe_for_call_parm (void)
44bb111a 1222{
a357a6d4 1223 /* If arguments are pushed on the stack, then they're safe. */
44bb111a
RH
1224 if (PUSH_ARGS)
1225 return true;
44bb111a 1226
450b1728 1227 /* If registers go on the stack anyway, any argument is sure to clobber
a357a6d4
GK
1228 an outgoing argument. */
1229#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1230 {
1231 tree fn = emit_block_move_libcall_fn (false);
1232 (void) fn;
1233 if (REG_PARM_STACK_SPACE (fn) != 0)
1234 return false;
1235 }
44bb111a 1236#endif
44bb111a 1237
a357a6d4
GK
1238 /* If any argument goes in memory, then it might clobber an outgoing
1239 argument. */
1240 {
1241 CUMULATIVE_ARGS args_so_far;
1242 tree fn, arg;
450b1728 1243
a357a6d4 1244 fn = emit_block_move_libcall_fn (false);
0f6937fe 1245 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
450b1728 1246
a357a6d4
GK
1247 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1248 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1249 {
1250 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1251 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1252 if (!tmp || !REG_P (tmp))
44bb111a 1253 return false;
78a52f11 1254 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
a357a6d4 1255 return false;
a357a6d4
GK
1256 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1257 }
1258 }
1259 return true;
44bb111a
RH
1260}
1261
70128ad9 1262/* A subroutine of emit_block_move. Expand a movmem pattern;
4ca79136 1263 return true if successful. */
3ef1eef4 1264
4ca79136 1265static bool
70128ad9 1266emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
4ca79136 1267{
4ca79136 1268 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
a5e9c810 1269 int save_volatile_ok = volatile_ok;
4ca79136 1270 enum machine_mode mode;
266007a7 1271
4ca79136
RH
1272 /* Since this is a move insn, we don't care about volatility. */
1273 volatile_ok = 1;
1274
ee960939
OH
1275 /* Try the most limited insn first, because there's no point
1276 including more than one in the machine description unless
1277 the more limited one has some advantage. */
1278
4ca79136
RH
1279 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1280 mode = GET_MODE_WIDER_MODE (mode))
1281 {
70128ad9 1282 enum insn_code code = movmem_optab[(int) mode];
4ca79136
RH
1283 insn_operand_predicate_fn pred;
1284
1285 if (code != CODE_FOR_nothing
1286 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1287 here because if SIZE is less than the mode mask, as it is
1288 returned by the macro, it will definitely be less than the
1289 actual mode mask. */
1290 && ((GET_CODE (size) == CONST_INT
1291 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1292 <= (GET_MODE_MASK (mode) >> 1)))
1293 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1294 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1295 || (*pred) (x, BLKmode))
1296 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1297 || (*pred) (y, BLKmode))
1298 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1299 || (*pred) (opalign, VOIDmode)))
1300 {
1301 rtx op2;
1302 rtx last = get_last_insn ();
1303 rtx pat;
1304
1305 op2 = convert_to_mode (mode, size, 1);
1306 pred = insn_data[(int) code].operand[2].predicate;
1307 if (pred != 0 && ! (*pred) (op2, mode))
1308 op2 = copy_to_mode_reg (mode, op2);
1309
1310 /* ??? When called via emit_block_move_for_call, it'd be
1311 nice if there were some way to inform the backend, so
1312 that it doesn't fail the expansion because it thinks
1313 emitting the libcall would be more efficient. */
1314
1315 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1316 if (pat)
1317 {
1318 emit_insn (pat);
a5e9c810 1319 volatile_ok = save_volatile_ok;
4ca79136 1320 return true;
bbf6f052 1321 }
4ca79136
RH
1322 else
1323 delete_insns_since (last);
bbf6f052 1324 }
4ca79136 1325 }
bbf6f052 1326
a5e9c810 1327 volatile_ok = save_volatile_ok;
4ca79136
RH
1328 return false;
1329}
3ef1eef4 1330
8f99553f 1331/* A subroutine of emit_block_move. Expand a call to memcpy.
4ca79136 1332 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1333
4ca79136 1334static rtx
8148fe65 1335emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
4ca79136 1336{
ee960939 1337 rtx dst_addr, src_addr;
4ca79136
RH
1338 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1339 enum machine_mode size_mode;
1340 rtx retval;
4bc973ae 1341
ad76cef8
PB
1342 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1343 pseudos. We can then place those new pseudos into a VAR_DECL and
1344 use them later. */
ee960939
OH
1345
1346 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1347 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1348
ee960939
OH
1349 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1350 src_addr = convert_memory_address (ptr_mode, src_addr);
ee960939
OH
1351
1352 dst_tree = make_tree (ptr_type_node, dst_addr);
1353 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136 1354
8f99553f 1355 size_mode = TYPE_MODE (sizetype);
ee960939 1356
4ca79136
RH
1357 size = convert_to_mode (size_mode, size, 1);
1358 size = copy_to_mode_reg (size_mode, size);
1359
1360 /* It is incorrect to use the libcall calling conventions to call
1361 memcpy in this context. This could be a user call to memcpy and
1362 the user may wish to examine the return value from memcpy. For
1363 targets where libcalls and normal calls have different conventions
8f99553f 1364 for returning pointers, we could end up generating incorrect code. */
4ca79136 1365
8f99553f 1366 size_tree = make_tree (sizetype, size);
4ca79136
RH
1367
1368 fn = emit_block_move_libcall_fn (true);
1369 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f
JM
1370 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1371 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
4ca79136
RH
1372
1373 /* Now we have to build up the CALL_EXPR itself. */
1374 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
1375 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1376 call_expr, arg_list, NULL_TREE);
8148fe65 1377 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136
RH
1378
1379 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1380
8f99553f 1381 return retval;
4ca79136 1382}
52cf7115 1383
4ca79136
RH
1384/* A subroutine of emit_block_move_via_libcall. Create the tree node
1385 for the function we use for block copies. The first time FOR_CALL
1386 is true, we call assemble_external. */
52cf7115 1387
4ca79136
RH
1388static GTY(()) tree block_move_fn;
1389
9661b15f 1390void
502b8322 1391init_block_move_fn (const char *asmspec)
4ca79136 1392{
9661b15f 1393 if (!block_move_fn)
4ca79136 1394 {
8fd3cf4e 1395 tree args, fn;
9661b15f 1396
8f99553f
JM
1397 fn = get_identifier ("memcpy");
1398 args = build_function_type_list (ptr_type_node, ptr_type_node,
1399 const_ptr_type_node, sizetype,
1400 NULL_TREE);
52cf7115 1401
4ca79136
RH
1402 fn = build_decl (FUNCTION_DECL, fn, args);
1403 DECL_EXTERNAL (fn) = 1;
1404 TREE_PUBLIC (fn) = 1;
1405 DECL_ARTIFICIAL (fn) = 1;
1406 TREE_NOTHROW (fn) = 1;
66c60e67 1407
4ca79136 1408 block_move_fn = fn;
bbf6f052 1409 }
e9a25f70 1410
9661b15f 1411 if (asmspec)
0e6df31e 1412 set_user_assembler_name (block_move_fn, asmspec);
9661b15f
JJ
1413}
1414
1415static tree
502b8322 1416emit_block_move_libcall_fn (int for_call)
9661b15f
JJ
1417{
1418 static bool emitted_extern;
1419
1420 if (!block_move_fn)
1421 init_block_move_fn (NULL);
1422
4ca79136
RH
1423 if (for_call && !emitted_extern)
1424 {
1425 emitted_extern = true;
0e6df31e 1426 make_decl_rtl (block_move_fn);
9661b15f 1427 assemble_external (block_move_fn);
4ca79136
RH
1428 }
1429
9661b15f 1430 return block_move_fn;
bbf6f052 1431}
44bb111a
RH
1432
1433/* A subroutine of emit_block_move. Copy the data via an explicit
1434 loop. This is used only when libcalls are forbidden. */
1435/* ??? It'd be nice to copy in hunks larger than QImode. */
1436
1437static void
502b8322
AJ
1438emit_block_move_via_loop (rtx x, rtx y, rtx size,
1439 unsigned int align ATTRIBUTE_UNUSED)
44bb111a
RH
1440{
1441 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1442 enum machine_mode iter_mode;
1443
1444 iter_mode = GET_MODE (size);
1445 if (iter_mode == VOIDmode)
1446 iter_mode = word_mode;
1447
1448 top_label = gen_label_rtx ();
1449 cmp_label = gen_label_rtx ();
1450 iter = gen_reg_rtx (iter_mode);
1451
1452 emit_move_insn (iter, const0_rtx);
1453
1454 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1455 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1456 do_pending_stack_adjust ();
1457
44bb111a
RH
1458 emit_jump (cmp_label);
1459 emit_label (top_label);
1460
1461 tmp = convert_modes (Pmode, iter_mode, iter, true);
1462 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1463 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1464 x = change_address (x, QImode, x_addr);
1465 y = change_address (y, QImode, y_addr);
1466
1467 emit_move_insn (x, y);
1468
1469 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1470 true, OPTAB_LIB_WIDEN);
1471 if (tmp != iter)
1472 emit_move_insn (iter, tmp);
1473
44bb111a
RH
1474 emit_label (cmp_label);
1475
1476 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1477 true, top_label);
44bb111a 1478}
bbf6f052
RK
1479\f
1480/* Copy all or part of a value X into registers starting at REGNO.
1481 The number of registers to be filled is NREGS. */
1482
1483void
502b8322 1484move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
bbf6f052
RK
1485{
1486 int i;
381127e8 1487#ifdef HAVE_load_multiple
3a94c984 1488 rtx pat;
381127e8
RL
1489 rtx last;
1490#endif
bbf6f052 1491
72bb9717
RK
1492 if (nregs == 0)
1493 return;
1494
bbf6f052
RK
1495 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1496 x = validize_mem (force_const_mem (mode, x));
1497
1498 /* See if the machine can do this with a load multiple insn. */
1499#ifdef HAVE_load_multiple
c3a02afe 1500 if (HAVE_load_multiple)
bbf6f052 1501 {
c3a02afe 1502 last = get_last_insn ();
38a448ca 1503 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1504 GEN_INT (nregs));
1505 if (pat)
1506 {
1507 emit_insn (pat);
1508 return;
1509 }
1510 else
1511 delete_insns_since (last);
bbf6f052 1512 }
bbf6f052
RK
1513#endif
1514
1515 for (i = 0; i < nregs; i++)
38a448ca 1516 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1517 operand_subword_force (x, i, mode));
1518}
1519
1520/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 1521 The number of registers to be filled is NREGS. */
0040593d 1522
bbf6f052 1523void
502b8322 1524move_block_from_reg (int regno, rtx x, int nregs)
bbf6f052
RK
1525{
1526 int i;
bbf6f052 1527
2954d7db
RK
1528 if (nregs == 0)
1529 return;
1530
bbf6f052
RK
1531 /* See if the machine can do this with a store multiple insn. */
1532#ifdef HAVE_store_multiple
c3a02afe 1533 if (HAVE_store_multiple)
bbf6f052 1534 {
c6b97fac
AM
1535 rtx last = get_last_insn ();
1536 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1537 GEN_INT (nregs));
c3a02afe
RK
1538 if (pat)
1539 {
1540 emit_insn (pat);
1541 return;
1542 }
1543 else
1544 delete_insns_since (last);
bbf6f052 1545 }
bbf6f052
RK
1546#endif
1547
1548 for (i = 0; i < nregs; i++)
1549 {
1550 rtx tem = operand_subword (x, i, 1, BLKmode);
1551
5b0264cb 1552 gcc_assert (tem);
bbf6f052 1553
38a448ca 1554 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1555 }
1556}
1557
084a1106
JDA
1558/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1559 ORIG, where ORIG is a non-consecutive group of registers represented by
1560 a PARALLEL. The clone is identical to the original except in that the
1561 original set of registers is replaced by a new set of pseudo registers.
1562 The new set has the same modes as the original set. */
1563
1564rtx
502b8322 1565gen_group_rtx (rtx orig)
084a1106
JDA
1566{
1567 int i, length;
1568 rtx *tmps;
1569
5b0264cb 1570 gcc_assert (GET_CODE (orig) == PARALLEL);
084a1106
JDA
1571
1572 length = XVECLEN (orig, 0);
703ad42b 1573 tmps = alloca (sizeof (rtx) * length);
084a1106
JDA
1574
1575 /* Skip a NULL entry in first slot. */
1576 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1577
1578 if (i)
1579 tmps[0] = 0;
1580
1581 for (; i < length; i++)
1582 {
1583 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1584 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1585
1586 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1587 }
1588
1589 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1590}
1591
27e29549
RH
1592/* A subroutine of emit_group_load. Arguments as for emit_group_load,
1593 except that values are placed in TMPS[i], and must later be moved
daa956d0 1594 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
fffa9c1d 1595
27e29549
RH
1596static void
1597emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
fffa9c1d 1598{
27e29549 1599 rtx src;
aac5cc16 1600 int start, i;
7ef7000b 1601 enum machine_mode m = GET_MODE (orig_src);
fffa9c1d 1602
5b0264cb 1603 gcc_assert (GET_CODE (dst) == PARALLEL);
fffa9c1d 1604
f2978871
AM
1605 if (m != VOIDmode
1606 && !SCALAR_INT_MODE_P (m)
1607 && !MEM_P (orig_src)
1608 && GET_CODE (orig_src) != CONCAT)
782fa603
AH
1609 {
1610 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1611 if (imode == BLKmode)
1612 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1613 else
1614 src = gen_reg_rtx (imode);
1615 if (imode != BLKmode)
1616 src = gen_lowpart (GET_MODE (orig_src), src);
1617 emit_move_insn (src, orig_src);
1618 /* ...and back again. */
1619 if (imode != BLKmode)
1620 src = gen_lowpart (imode, src);
27e29549 1621 emit_group_load_1 (tmps, dst, src, type, ssize);
782fa603
AH
1622 return;
1623 }
1624
fffa9c1d
JW
1625 /* Check for a NULL entry, used to indicate that the parameter goes
1626 both on the stack and in registers. */
aac5cc16
RH
1627 if (XEXP (XVECEXP (dst, 0, 0), 0))
1628 start = 0;
fffa9c1d 1629 else
aac5cc16
RH
1630 start = 1;
1631
aac5cc16
RH
1632 /* Process the pieces. */
1633 for (i = start; i < XVECLEN (dst, 0); i++)
1634 {
1635 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1636 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1637 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1638 int shift = 0;
1639
1640 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1641 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16 1642 {
6e985040
AM
1643 /* Arrange to shift the fragment to where it belongs.
1644 extract_bit_field loads to the lsb of the reg. */
1645 if (
1646#ifdef BLOCK_REG_PADDING
1647 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1648 == (BYTES_BIG_ENDIAN ? upward : downward)
1649#else
1650 BYTES_BIG_ENDIAN
1651#endif
1652 )
1653 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
aac5cc16 1654 bytelen = ssize - bytepos;
5b0264cb 1655 gcc_assert (bytelen > 0);
aac5cc16
RH
1656 }
1657
f3ce87a9
DE
1658 /* If we won't be loading directly from memory, protect the real source
1659 from strange tricks we might play; but make sure that the source can
1660 be loaded directly into the destination. */
1661 src = orig_src;
3c0cb5de 1662 if (!MEM_P (orig_src)
f3ce87a9
DE
1663 && (!CONSTANT_P (orig_src)
1664 || (GET_MODE (orig_src) != mode
1665 && GET_MODE (orig_src) != VOIDmode)))
1666 {
1667 if (GET_MODE (orig_src) == VOIDmode)
1668 src = gen_reg_rtx (mode);
1669 else
1670 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 1671
f3ce87a9
DE
1672 emit_move_insn (src, orig_src);
1673 }
1674
aac5cc16 1675 /* Optimize the access just a bit. */
3c0cb5de 1676 if (MEM_P (src)
6e985040
AM
1677 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1678 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
729a2125 1679 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1680 && bytelen == GET_MODE_SIZE (mode))
1681 {
1682 tmps[i] = gen_reg_rtx (mode);
f4ef873c 1683 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 1684 }
d20b1190
EB
1685 else if (COMPLEX_MODE_P (mode)
1686 && GET_MODE (src) == mode
1687 && bytelen == GET_MODE_SIZE (mode))
1688 /* Let emit_move_complex do the bulk of the work. */
1689 tmps[i] = src;
7c4a6db0
JW
1690 else if (GET_CODE (src) == CONCAT)
1691 {
015b1ad1
JDA
1692 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1693 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1694
1695 if ((bytepos == 0 && bytelen == slen0)
1696 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 1697 {
015b1ad1
JDA
1698 /* The following assumes that the concatenated objects all
1699 have the same size. In this case, a simple calculation
1700 can be used to determine the object and the bit field
1701 to be extracted. */
1702 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744 1703 if (! CONSTANT_P (tmps[i])
f8cfc6aa 1704 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
cbb92744 1705 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1 1706 (bytepos % slen0) * BITS_PER_UNIT,
b3520980 1707 1, NULL_RTX, mode, mode);
cbb92744 1708 }
5b0264cb 1709 else
58f69841 1710 {
5b0264cb 1711 rtx mem;
f58c00e3 1712
5b0264cb
NS
1713 gcc_assert (!bytepos);
1714 mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 1715 emit_move_insn (mem, src);
f58c00e3
EB
1716 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1717 0, 1, NULL_RTX, mode, mode);
58f69841 1718 }
7c4a6db0 1719 }
9c0631a7
AH
1720 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1721 SIMD register, which is currently broken. While we get GCC
1722 to emit proper RTL for these cases, let's dump to memory. */
1723 else if (VECTOR_MODE_P (GET_MODE (dst))
f8cfc6aa 1724 && REG_P (src))
9c0631a7
AH
1725 {
1726 int slen = GET_MODE_SIZE (GET_MODE (src));
1727 rtx mem;
1728
1729 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1730 emit_move_insn (mem, src);
1731 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1732 }
d3a16cbd
FJ
1733 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1734 && XVECLEN (dst, 0) > 1)
1735 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
f3ce87a9 1736 else if (CONSTANT_P (src)
f8cfc6aa 1737 || (REG_P (src) && GET_MODE (src) == mode))
2ee5437b 1738 tmps[i] = src;
fffa9c1d 1739 else
19caa751
RK
1740 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1741 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
b3520980 1742 mode, mode);
fffa9c1d 1743
6e985040 1744 if (shift)
09b52670 1745 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
7d60be94 1746 build_int_cst (NULL_TREE, shift), tmps[i], 0);
fffa9c1d 1747 }
27e29549
RH
1748}
1749
1750/* Emit code to move a block SRC of type TYPE to a block DST,
1751 where DST is non-consecutive registers represented by a PARALLEL.
1752 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1753 if not known. */
1754
1755void
1756emit_group_load (rtx dst, rtx src, tree type, int ssize)
1757{
1758 rtx *tmps;
1759 int i;
1760
1761 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1762 emit_group_load_1 (tmps, dst, src, type, ssize);
19caa751 1763
aac5cc16 1764 /* Copy the extracted pieces into the proper (probable) hard regs. */
27e29549
RH
1765 for (i = 0; i < XVECLEN (dst, 0); i++)
1766 {
1767 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1768 if (d == NULL)
1769 continue;
1770 emit_move_insn (d, tmps[i]);
1771 }
1772}
1773
1774/* Similar, but load SRC into new pseudos in a format that looks like
1775 PARALLEL. This can later be fed to emit_group_move to get things
1776 in the right place. */
1777
1778rtx
1779emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1780{
1781 rtvec vec;
1782 int i;
1783
1784 vec = rtvec_alloc (XVECLEN (parallel, 0));
1785 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1786
1787 /* Convert the vector to look just like the original PARALLEL, except
1788 with the computed values. */
1789 for (i = 0; i < XVECLEN (parallel, 0); i++)
1790 {
1791 rtx e = XVECEXP (parallel, 0, i);
1792 rtx d = XEXP (e, 0);
1793
1794 if (d)
1795 {
1796 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1797 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1798 }
1799 RTVEC_ELT (vec, i) = e;
1800 }
1801
1802 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
fffa9c1d
JW
1803}
1804
084a1106
JDA
1805/* Emit code to move a block SRC to block DST, where SRC and DST are
1806 non-consecutive groups of registers, each represented by a PARALLEL. */
1807
1808void
502b8322 1809emit_group_move (rtx dst, rtx src)
084a1106
JDA
1810{
1811 int i;
1812
5b0264cb
NS
1813 gcc_assert (GET_CODE (src) == PARALLEL
1814 && GET_CODE (dst) == PARALLEL
1815 && XVECLEN (src, 0) == XVECLEN (dst, 0));
084a1106
JDA
1816
1817 /* Skip first entry if NULL. */
1818 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1819 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1820 XEXP (XVECEXP (src, 0, i), 0));
1821}
1822
27e29549
RH
1823/* Move a group of registers represented by a PARALLEL into pseudos. */
1824
1825rtx
1826emit_group_move_into_temps (rtx src)
1827{
1828 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1829 int i;
1830
1831 for (i = 0; i < XVECLEN (src, 0); i++)
1832 {
1833 rtx e = XVECEXP (src, 0, i);
1834 rtx d = XEXP (e, 0);
1835
1836 if (d)
1837 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1838 RTVEC_ELT (vec, i) = e;
1839 }
1840
1841 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1842}
1843
6e985040
AM
1844/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1845 where SRC is non-consecutive registers represented by a PARALLEL.
1846 SSIZE represents the total size of block ORIG_DST, or -1 if not
1847 known. */
fffa9c1d
JW
1848
1849void
6e985040 1850emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
fffa9c1d 1851{
aac5cc16
RH
1852 rtx *tmps, dst;
1853 int start, i;
7ef7000b 1854 enum machine_mode m = GET_MODE (orig_dst);
fffa9c1d 1855
5b0264cb 1856 gcc_assert (GET_CODE (src) == PARALLEL);
fffa9c1d 1857
0da34ce4
RH
1858 if (!SCALAR_INT_MODE_P (m)
1859 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
782fa603
AH
1860 {
1861 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1862 if (imode == BLKmode)
1863 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1864 else
1865 dst = gen_reg_rtx (imode);
1866 emit_group_store (dst, src, type, ssize);
1867 if (imode != BLKmode)
1868 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1869 emit_move_insn (orig_dst, dst);
1870 return;
1871 }
1872
fffa9c1d
JW
1873 /* Check for a NULL entry, used to indicate that the parameter goes
1874 both on the stack and in registers. */
aac5cc16
RH
1875 if (XEXP (XVECEXP (src, 0, 0), 0))
1876 start = 0;
fffa9c1d 1877 else
aac5cc16
RH
1878 start = 1;
1879
703ad42b 1880 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 1881
aac5cc16
RH
1882 /* Copy the (probable) hard regs into pseudos. */
1883 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1884 {
aac5cc16
RH
1885 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1886 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1887 emit_move_insn (tmps[i], reg);
1888 }
fffa9c1d 1889
aac5cc16
RH
1890 /* If we won't be storing directly into memory, protect the real destination
1891 from strange tricks we might play. */
1892 dst = orig_dst;
10a9f2be
JW
1893 if (GET_CODE (dst) == PARALLEL)
1894 {
1895 rtx temp;
1896
1897 /* We can get a PARALLEL dst if there is a conditional expression in
1898 a return statement. In that case, the dst and src are the same,
1899 so no action is necessary. */
1900 if (rtx_equal_p (dst, src))
1901 return;
1902
1903 /* It is unclear if we can ever reach here, but we may as well handle
1904 it. Allocate a temporary, and split this into a store/load to/from
1905 the temporary. */
1906
1907 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
6e985040
AM
1908 emit_group_store (temp, src, type, ssize);
1909 emit_group_load (dst, temp, type, ssize);
10a9f2be
JW
1910 return;
1911 }
3c0cb5de 1912 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
aac5cc16
RH
1913 {
1914 dst = gen_reg_rtx (GET_MODE (orig_dst));
1915 /* Make life a bit easier for combine. */
8ae91fc0 1916 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 1917 }
aac5cc16
RH
1918
1919 /* Process the pieces. */
1920 for (i = start; i < XVECLEN (src, 0); i++)
1921 {
770ae6cc 1922 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 1923 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 1924 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 1925 rtx dest = dst;
aac5cc16
RH
1926
1927 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1928 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 1929 {
6e985040
AM
1930 /* store_bit_field always takes its value from the lsb.
1931 Move the fragment to the lsb if it's not already there. */
1932 if (
1933#ifdef BLOCK_REG_PADDING
1934 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1935 == (BYTES_BIG_ENDIAN ? upward : downward)
1936#else
1937 BYTES_BIG_ENDIAN
1938#endif
1939 )
aac5cc16
RH
1940 {
1941 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
09b52670 1942 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
7d60be94
NS
1943 build_int_cst (NULL_TREE, shift),
1944 tmps[i], 0);
aac5cc16
RH
1945 }
1946 bytelen = ssize - bytepos;
71bc0330 1947 }
fffa9c1d 1948
6ddae612
JJ
1949 if (GET_CODE (dst) == CONCAT)
1950 {
1951 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1952 dest = XEXP (dst, 0);
1953 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1954 {
1955 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1956 dest = XEXP (dst, 1);
1957 }
5b0264cb 1958 else
0d446150 1959 {
5b0264cb 1960 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
0d446150
JH
1961 dest = assign_stack_temp (GET_MODE (dest),
1962 GET_MODE_SIZE (GET_MODE (dest)), 0);
1963 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1964 tmps[i]);
1965 dst = dest;
1966 break;
1967 }
6ddae612
JJ
1968 }
1969
aac5cc16 1970 /* Optimize the access just a bit. */
3c0cb5de 1971 if (MEM_P (dest)
6e985040
AM
1972 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1973 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
729a2125 1974 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 1975 && bytelen == GET_MODE_SIZE (mode))
6ddae612 1976 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 1977 else
6ddae612 1978 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
b3520980 1979 mode, tmps[i]);
fffa9c1d 1980 }
729a2125 1981
aac5cc16 1982 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 1983 if (orig_dst != dst)
aac5cc16 1984 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
1985}
1986
c36fce9a
GRK
1987/* Generate code to copy a BLKmode object of TYPE out of a
1988 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1989 is null, a stack temporary is created. TGTBLK is returned.
1990
c988af2b
RS
1991 The purpose of this routine is to handle functions that return
1992 BLKmode structures in registers. Some machines (the PA for example)
1993 want to return all small structures in registers regardless of the
1994 structure's alignment. */
c36fce9a
GRK
1995
1996rtx
502b8322 1997copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
c36fce9a 1998{
19caa751
RK
1999 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2000 rtx src = NULL, dst = NULL;
2001 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c988af2b 2002 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
19caa751
RK
2003
2004 if (tgtblk == 0)
2005 {
1da68f56
RK
2006 tgtblk = assign_temp (build_qualified_type (type,
2007 (TYPE_QUALS (type)
2008 | TYPE_QUAL_CONST)),
2009 0, 1, 1);
19caa751
RK
2010 preserve_temp_slots (tgtblk);
2011 }
3a94c984 2012
1ed1b4fb 2013 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2014 into a new pseudo which is a full word. */
0d7839da 2015
19caa751
RK
2016 if (GET_MODE (srcreg) != BLKmode
2017 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
8df83eae 2018 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
19caa751 2019
c988af2b
RS
2020 /* If the structure doesn't take up a whole number of words, see whether
2021 SRCREG is padded on the left or on the right. If it's on the left,
2022 set PADDING_CORRECTION to the number of bits to skip.
2023
2024 In most ABIs, the structure will be returned at the least end of
2025 the register, which translates to right padding on little-endian
2026 targets and left padding on big-endian targets. The opposite
2027 holds if the structure is returned at the most significant
2028 end of the register. */
2029 if (bytes % UNITS_PER_WORD != 0
2030 && (targetm.calls.return_in_msb (type)
2031 ? !BYTES_BIG_ENDIAN
2032 : BYTES_BIG_ENDIAN))
2033 padding_correction
19caa751
RK
2034 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2035
2036 /* Copy the structure BITSIZE bites at a time.
3a94c984 2037
19caa751
RK
2038 We could probably emit more efficient code for machines which do not use
2039 strict alignment, but it doesn't seem worth the effort at the current
2040 time. */
c988af2b 2041 for (bitpos = 0, xbitpos = padding_correction;
19caa751
RK
2042 bitpos < bytes * BITS_PER_UNIT;
2043 bitpos += bitsize, xbitpos += bitsize)
2044 {
3a94c984 2045 /* We need a new source operand each time xbitpos is on a
c988af2b 2046 word boundary and when xbitpos == padding_correction
19caa751
RK
2047 (the first time through). */
2048 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2049 || xbitpos == padding_correction)
b47f8cfc
JH
2050 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2051 GET_MODE (srcreg));
19caa751
RK
2052
2053 /* We need a new destination operand each time bitpos is on
2054 a word boundary. */
2055 if (bitpos % BITS_PER_WORD == 0)
2056 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2057
19caa751
RK
2058 /* Use xbitpos for the source extraction (right justified) and
2059 xbitpos for the destination store (left justified). */
2060 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2061 extract_bit_field (src, bitsize,
2062 xbitpos % BITS_PER_WORD, 1,
b3520980 2063 NULL_RTX, word_mode, word_mode));
19caa751
RK
2064 }
2065
2066 return tgtblk;
c36fce9a
GRK
2067}
2068
94b25f81
RK
2069/* Add a USE expression for REG to the (possibly empty) list pointed
2070 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2071
2072void
502b8322 2073use_reg (rtx *call_fusage, rtx reg)
b3f8cf4a 2074{
5b0264cb
NS
2075 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2076
b3f8cf4a 2077 *call_fusage
38a448ca
RH
2078 = gen_rtx_EXPR_LIST (VOIDmode,
2079 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2080}
2081
94b25f81
RK
2082/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2083 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2084
2085void
502b8322 2086use_regs (rtx *call_fusage, int regno, int nregs)
bbf6f052 2087{
0304dfbb 2088 int i;
bbf6f052 2089
5b0264cb 2090 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
0304dfbb
DE
2091
2092 for (i = 0; i < nregs; i++)
e50126e8 2093 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2094}
fffa9c1d
JW
2095
2096/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2097 PARALLEL REGS. This is for calls that pass values in multiple
2098 non-contiguous locations. The Irix 6 ABI has examples of this. */
2099
2100void
502b8322 2101use_group_regs (rtx *call_fusage, rtx regs)
fffa9c1d
JW
2102{
2103 int i;
2104
6bd35f86
DE
2105 for (i = 0; i < XVECLEN (regs, 0); i++)
2106 {
2107 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2108
6bd35f86
DE
2109 /* A NULL entry means the parameter goes both on the stack and in
2110 registers. This can also be a MEM for targets that pass values
2111 partially on the stack and partially in registers. */
f8cfc6aa 2112 if (reg != 0 && REG_P (reg))
6bd35f86
DE
2113 use_reg (call_fusage, reg);
2114 }
fffa9c1d 2115}
bbf6f052 2116\f
57814e5e 2117
cf5124f6
RS
2118/* Determine whether the LEN bytes generated by CONSTFUN can be
2119 stored to memory using several move instructions. CONSTFUNDATA is
2120 a pointer which will be passed as argument in every CONSTFUN call.
2121 ALIGN is maximum alignment we can assume. Return nonzero if a
2122 call to store_by_pieces should succeed. */
2123
57814e5e 2124int
502b8322
AJ
2125can_store_by_pieces (unsigned HOST_WIDE_INT len,
2126 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2127 void *constfundata, unsigned int align)
57814e5e 2128{
45d78e7f
JJ
2129 unsigned HOST_WIDE_INT l;
2130 unsigned int max_size;
57814e5e
JJ
2131 HOST_WIDE_INT offset = 0;
2132 enum machine_mode mode, tmode;
2133 enum insn_code icode;
2134 int reverse;
2135 rtx cst;
2136
2c430630
RS
2137 if (len == 0)
2138 return 1;
2139
4977bab6 2140 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2141 return 0;
2142
f64d6991
DE
2143 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2144 if (align >= GET_MODE_ALIGNMENT (tmode))
2145 align = GET_MODE_ALIGNMENT (tmode);
2146 else
2147 {
2148 enum machine_mode xmode;
2149
2150 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2151 tmode != VOIDmode;
2152 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2153 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2154 || SLOW_UNALIGNED_ACCESS (tmode, align))
2155 break;
2156
2157 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2158 }
57814e5e
JJ
2159
2160 /* We would first store what we can in the largest integer mode, then go to
2161 successively smaller modes. */
2162
2163 for (reverse = 0;
2164 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2165 reverse++)
2166 {
2167 l = len;
2168 mode = VOIDmode;
cf5124f6 2169 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2170 while (max_size > 1)
2171 {
2172 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2173 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2174 if (GET_MODE_SIZE (tmode) < max_size)
2175 mode = tmode;
2176
2177 if (mode == VOIDmode)
2178 break;
2179
2180 icode = mov_optab->handlers[(int) mode].insn_code;
2181 if (icode != CODE_FOR_nothing
2182 && align >= GET_MODE_ALIGNMENT (mode))
2183 {
2184 unsigned int size = GET_MODE_SIZE (mode);
2185
2186 while (l >= size)
2187 {
2188 if (reverse)
2189 offset -= size;
2190
2191 cst = (*constfun) (constfundata, offset, mode);
2192 if (!LEGITIMATE_CONSTANT_P (cst))
2193 return 0;
2194
2195 if (!reverse)
2196 offset += size;
2197
2198 l -= size;
2199 }
2200 }
2201
2202 max_size = GET_MODE_SIZE (mode);
2203 }
2204
2205 /* The code above should have handled everything. */
5b0264cb 2206 gcc_assert (!l);
57814e5e
JJ
2207 }
2208
2209 return 1;
2210}
2211
2212/* Generate several move instructions to store LEN bytes generated by
2213 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2214 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2215 ALIGN is maximum alignment we can assume.
2216 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2217 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2218 stpcpy. */
57814e5e 2219
8fd3cf4e 2220rtx
502b8322
AJ
2221store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2222 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2223 void *constfundata, unsigned int align, int endp)
57814e5e
JJ
2224{
2225 struct store_by_pieces data;
2226
2c430630
RS
2227 if (len == 0)
2228 {
5b0264cb 2229 gcc_assert (endp != 2);
2c430630
RS
2230 return to;
2231 }
2232
5b0264cb 2233 gcc_assert (STORE_BY_PIECES_P (len, align));
57814e5e
JJ
2234 data.constfun = constfun;
2235 data.constfundata = constfundata;
2236 data.len = len;
2237 data.to = to;
2238 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2239 if (endp)
2240 {
2241 rtx to1;
2242
5b0264cb 2243 gcc_assert (!data.reverse);
8fd3cf4e
JJ
2244 if (data.autinc_to)
2245 {
2246 if (endp == 2)
2247 {
2248 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2249 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2250 else
2251 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2252 -1));
2253 }
2254 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2255 data.offset);
2256 }
2257 else
2258 {
2259 if (endp == 2)
2260 --data.offset;
2261 to1 = adjust_address (data.to, QImode, data.offset);
2262 }
2263 return to1;
2264 }
2265 else
2266 return data.to;
57814e5e
JJ
2267}
2268
19caa751 2269/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
ad76cef8 2270 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
9de08200
RK
2271
2272static void
342e2b74 2273clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
9de08200 2274{
57814e5e
JJ
2275 struct store_by_pieces data;
2276
2c430630
RS
2277 if (len == 0)
2278 return;
2279
57814e5e 2280 data.constfun = clear_by_pieces_1;
df4ae160 2281 data.constfundata = NULL;
57814e5e
JJ
2282 data.len = len;
2283 data.to = to;
2284 store_by_pieces_1 (&data, align);
2285}
2286
2287/* Callback routine for clear_by_pieces.
2288 Return const0_rtx unconditionally. */
2289
2290static rtx
502b8322
AJ
2291clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2292 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2293 enum machine_mode mode ATTRIBUTE_UNUSED)
57814e5e
JJ
2294{
2295 return const0_rtx;
2296}
2297
2298/* Subroutine of clear_by_pieces and store_by_pieces.
2299 Generate several move instructions to store LEN bytes of block TO. (A MEM
ad76cef8 2300 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
57814e5e
JJ
2301
2302static void
502b8322
AJ
2303store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2304 unsigned int align ATTRIBUTE_UNUSED)
57814e5e
JJ
2305{
2306 rtx to_addr = XEXP (data->to, 0);
45d78e7f 2307 unsigned int max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2308 enum machine_mode mode = VOIDmode, tmode;
2309 enum insn_code icode;
9de08200 2310
57814e5e
JJ
2311 data->offset = 0;
2312 data->to_addr = to_addr;
2313 data->autinc_to
9de08200
RK
2314 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2315 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2316
57814e5e
JJ
2317 data->explicit_inc_to = 0;
2318 data->reverse
9de08200 2319 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2320 if (data->reverse)
2321 data->offset = data->len;
9de08200 2322
57814e5e 2323 /* If storing requires more than two move insns,
9de08200
RK
2324 copy addresses to registers (to make displacements shorter)
2325 and use post-increment if available. */
57814e5e 2326 if (!data->autinc_to
45d78e7f 2327 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
9de08200 2328 {
3a94c984 2329 /* Determine the main mode we'll be using. */
fbe1758d
AM
2330 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2331 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2332 if (GET_MODE_SIZE (tmode) < max_size)
2333 mode = tmode;
2334
57814e5e 2335 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2336 {
57814e5e
JJ
2337 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2338 data->autinc_to = 1;
2339 data->explicit_inc_to = -1;
9de08200 2340 }
3bdf5ad1 2341
57814e5e
JJ
2342 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2343 && ! data->autinc_to)
9de08200 2344 {
57814e5e
JJ
2345 data->to_addr = copy_addr_to_reg (to_addr);
2346 data->autinc_to = 1;
2347 data->explicit_inc_to = 1;
9de08200 2348 }
3bdf5ad1 2349
57814e5e
JJ
2350 if ( !data->autinc_to && CONSTANT_P (to_addr))
2351 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2352 }
2353
f64d6991
DE
2354 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2355 if (align >= GET_MODE_ALIGNMENT (tmode))
2356 align = GET_MODE_ALIGNMENT (tmode);
2357 else
2358 {
2359 enum machine_mode xmode;
2360
2361 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2362 tmode != VOIDmode;
2363 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2364 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2365 || SLOW_UNALIGNED_ACCESS (tmode, align))
2366 break;
2367
2368 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2369 }
9de08200 2370
57814e5e 2371 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2372 successively smaller modes. */
2373
2374 while (max_size > 1)
2375 {
9de08200
RK
2376 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2377 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2378 if (GET_MODE_SIZE (tmode) < max_size)
2379 mode = tmode;
2380
2381 if (mode == VOIDmode)
2382 break;
2383
2384 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2385 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2386 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2387
2388 max_size = GET_MODE_SIZE (mode);
2389 }
2390
2391 /* The code above should have handled everything. */
5b0264cb 2392 gcc_assert (!data->len);
9de08200
RK
2393}
2394
57814e5e 2395/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2396 with move instructions for mode MODE. GENFUN is the gen_... function
2397 to make a move insn for that mode. DATA has all the other info. */
2398
2399static void
502b8322
AJ
2400store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2401 struct store_by_pieces *data)
9de08200 2402{
3bdf5ad1 2403 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2404 rtx to1, cst;
9de08200
RK
2405
2406 while (data->len >= size)
2407 {
3bdf5ad1
RK
2408 if (data->reverse)
2409 data->offset -= size;
9de08200 2410
3bdf5ad1 2411 if (data->autinc_to)
630036c6
JJ
2412 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2413 data->offset);
3a94c984 2414 else
f4ef873c 2415 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2416
940da324 2417 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2418 emit_insn (gen_add2_insn (data->to_addr,
2419 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2420
57814e5e
JJ
2421 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2422 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2423
940da324 2424 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2425 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2426
3bdf5ad1
RK
2427 if (! data->reverse)
2428 data->offset += size;
9de08200
RK
2429
2430 data->len -= size;
2431 }
2432}
2433\f
19caa751 2434/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2435 its length in bytes. */
e9a25f70
JL
2436
2437rtx
8148fe65 2438clear_storage (rtx object, rtx size, enum block_op_methods method)
bbf6f052 2439{
57aaef66
RH
2440 enum machine_mode mode = GET_MODE (object);
2441 unsigned int align;
e9a25f70 2442
8148fe65
JJ
2443 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2444
fcf1b822
RK
2445 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2446 just move a zero. Otherwise, do this a piece at a time. */
57aaef66 2447 if (mode != BLKmode
fcf1b822 2448 && GET_CODE (size) == CONST_INT
57aaef66 2449 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
bbf6f052 2450 {
57aaef66
RH
2451 rtx zero = CONST0_RTX (mode);
2452 if (zero != NULL)
2453 {
2454 emit_move_insn (object, zero);
2455 return NULL;
2456 }
2457
2458 if (COMPLEX_MODE_P (mode))
2459 {
2460 zero = CONST0_RTX (GET_MODE_INNER (mode));
2461 if (zero != NULL)
2462 {
2463 write_complex_part (object, zero, 0);
2464 write_complex_part (object, zero, 1);
2465 return NULL;
2466 }
2467 }
4ca79136
RH
2468 }
2469
57aaef66
RH
2470 if (size == const0_rtx)
2471 return NULL;
2472
2473 align = MEM_ALIGN (object);
2474
2475 if (GET_CODE (size) == CONST_INT
2476 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2477 clear_by_pieces (object, INTVAL (size), align);
57e84f18 2478 else if (set_storage_via_setmem (object, size, const0_rtx, align))
57aaef66
RH
2479 ;
2480 else
8148fe65
JJ
2481 return clear_storage_via_libcall (object, size,
2482 method == BLOCK_OP_TAILCALL);
57aaef66
RH
2483
2484 return NULL;
4ca79136
RH
2485}
2486
8f99553f 2487/* A subroutine of clear_storage. Expand a call to memset.
4ca79136 2488 Return the return value of memset, 0 otherwise. */
9de08200 2489
4ca79136 2490static rtx
8148fe65 2491clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
4ca79136
RH
2492{
2493 tree call_expr, arg_list, fn, object_tree, size_tree;
2494 enum machine_mode size_mode;
2495 rtx retval;
9de08200 2496
ad76cef8
PB
2497 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2498 place those into new pseudos into a VAR_DECL and use them later. */
52cf7115 2499
4ca79136 2500 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2501
8f99553f 2502 size_mode = TYPE_MODE (sizetype);
4ca79136
RH
2503 size = convert_to_mode (size_mode, size, 1);
2504 size = copy_to_mode_reg (size_mode, size);
52cf7115 2505
4ca79136
RH
2506 /* It is incorrect to use the libcall calling conventions to call
2507 memset in this context. This could be a user call to memset and
2508 the user may wish to examine the return value from memset. For
2509 targets where libcalls and normal calls have different conventions
8f99553f 2510 for returning pointers, we could end up generating incorrect code. */
4bc973ae 2511
4ca79136 2512 object_tree = make_tree (ptr_type_node, object);
8f99553f 2513 size_tree = make_tree (sizetype, size);
4ca79136
RH
2514
2515 fn = clear_storage_libcall_fn (true);
2516 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
8f99553f 2517 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
4ca79136
RH
2518 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2519
2520 /* Now we have to build up the CALL_EXPR itself. */
2521 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3244e67d
RS
2522 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2523 call_expr, arg_list, NULL_TREE);
8148fe65 2524 CALL_EXPR_TAILCALL (call_expr) = tailcall;
4ca79136
RH
2525
2526 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2527
8f99553f 2528 return retval;
4ca79136
RH
2529}
2530
2531/* A subroutine of clear_storage_via_libcall. Create the tree node
2532 for the function we use for block clears. The first time FOR_CALL
2533 is true, we call assemble_external. */
2534
2535static GTY(()) tree block_clear_fn;
66c60e67 2536
9661b15f 2537void
502b8322 2538init_block_clear_fn (const char *asmspec)
4ca79136 2539{
9661b15f 2540 if (!block_clear_fn)
4ca79136 2541 {
9661b15f
JJ
2542 tree fn, args;
2543
8f99553f
JM
2544 fn = get_identifier ("memset");
2545 args = build_function_type_list (ptr_type_node, ptr_type_node,
2546 integer_type_node, sizetype,
2547 NULL_TREE);
4ca79136
RH
2548
2549 fn = build_decl (FUNCTION_DECL, fn, args);
2550 DECL_EXTERNAL (fn) = 1;
2551 TREE_PUBLIC (fn) = 1;
2552 DECL_ARTIFICIAL (fn) = 1;
2553 TREE_NOTHROW (fn) = 1;
2554
2555 block_clear_fn = fn;
bbf6f052 2556 }
e9a25f70 2557
9661b15f 2558 if (asmspec)
0e6df31e 2559 set_user_assembler_name (block_clear_fn, asmspec);
9661b15f
JJ
2560}
2561
2562static tree
502b8322 2563clear_storage_libcall_fn (int for_call)
9661b15f
JJ
2564{
2565 static bool emitted_extern;
2566
2567 if (!block_clear_fn)
2568 init_block_clear_fn (NULL);
2569
4ca79136
RH
2570 if (for_call && !emitted_extern)
2571 {
2572 emitted_extern = true;
0e6df31e 2573 make_decl_rtl (block_clear_fn);
9661b15f 2574 assemble_external (block_clear_fn);
4ca79136 2575 }
bbf6f052 2576
9661b15f 2577 return block_clear_fn;
4ca79136 2578}
57e84f18
AS
2579\f
2580/* Expand a setmem pattern; return true if successful. */
2581
2582bool
2583set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2584{
2585 /* Try the most limited insn first, because there's no point
2586 including more than one in the machine description unless
2587 the more limited one has some advantage. */
2588
2589 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2590 enum machine_mode mode;
2591
2592 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2593 mode = GET_MODE_WIDER_MODE (mode))
2594 {
2595 enum insn_code code = setmem_optab[(int) mode];
2596 insn_operand_predicate_fn pred;
2597
2598 if (code != CODE_FOR_nothing
2599 /* We don't need MODE to be narrower than
2600 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2601 the mode mask, as it is returned by the macro, it will
2602 definitely be less than the actual mode mask. */
2603 && ((GET_CODE (size) == CONST_INT
2604 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2605 <= (GET_MODE_MASK (mode) >> 1)))
2606 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2607 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2608 || (*pred) (object, BLKmode))
2609 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2610 || (*pred) (opalign, VOIDmode)))
2611 {
9ed92901
AS
2612 rtx opsize, opchar;
2613 enum machine_mode char_mode;
57e84f18
AS
2614 rtx last = get_last_insn ();
2615 rtx pat;
2616
2617 opsize = convert_to_mode (mode, size, 1);
2618 pred = insn_data[(int) code].operand[1].predicate;
2619 if (pred != 0 && ! (*pred) (opsize, mode))
2620 opsize = copy_to_mode_reg (mode, opsize);
2621
9ed92901
AS
2622 opchar = val;
2623 char_mode = insn_data[(int) code].operand[2].mode;
2624 if (char_mode != VOIDmode)
2625 {
2626 opchar = convert_to_mode (char_mode, opchar, 1);
2627 pred = insn_data[(int) code].operand[2].predicate;
2628 if (pred != 0 && ! (*pred) (opchar, char_mode))
2629 opchar = copy_to_mode_reg (char_mode, opchar);
2630 }
57e84f18
AS
2631
2632 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2633 if (pat)
2634 {
2635 emit_insn (pat);
2636 return true;
2637 }
2638 else
2639 delete_insns_since (last);
2640 }
2641 }
2642
2643 return false;
2644}
2645
4ca79136 2646\f
1466e387
RH
2647/* Write to one of the components of the complex value CPLX. Write VAL to
2648 the real part if IMAG_P is false, and the imaginary part if its true. */
bbf6f052 2649
1466e387
RH
2650static void
2651write_complex_part (rtx cplx, rtx val, bool imag_p)
2652{
ddf4e03f
RH
2653 enum machine_mode cmode;
2654 enum machine_mode imode;
2655 unsigned ibitsize;
2656
1466e387 2657 if (GET_CODE (cplx) == CONCAT)
1466e387 2658 {
ddf4e03f
RH
2659 emit_move_insn (XEXP (cplx, imag_p), val);
2660 return;
2661 }
2662
2663 cmode = GET_MODE (cplx);
2664 imode = GET_MODE_INNER (cmode);
2665 ibitsize = GET_MODE_BITSIZE (imode);
bbf6f052 2666
7a31c801
DE
2667 /* For MEMs simplify_gen_subreg may generate an invalid new address
2668 because, e.g., the original address is considered mode-dependent
2669 by the target, which restricts simplify_subreg from invoking
2670 adjust_address_nv. Instead of preparing fallback support for an
2671 invalid address, we call adjust_address_nv directly. */
2672 if (MEM_P (cplx))
2673 emit_move_insn (adjust_address_nv (cplx, imode,
2674 imag_p ? GET_MODE_SIZE (imode) : 0),
2675 val);
2676
ddf4e03f
RH
2677 /* If the sub-object is at least word sized, then we know that subregging
2678 will work. This special case is important, since store_bit_field
2679 wants to operate on integer modes, and there's rarely an OImode to
2680 correspond to TCmode. */
36d7571c
EB
2681 if (ibitsize >= BITS_PER_WORD
2682 /* For hard regs we have exact predicates. Assume we can split
2683 the original object if it spans an even number of hard regs.
2684 This special case is important for SCmode on 64-bit platforms
2685 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2686 || (REG_P (cplx)
36d7571c 2687 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2688 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2689 {
2690 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2691 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2692 if (part)
2693 {
2694 emit_move_insn (part, val);
2695 return;
2696 }
2697 else
2698 /* simplify_gen_subreg may fail for sub-word MEMs. */
2699 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
1466e387 2700 }
36d7571c
EB
2701
2702 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
1466e387
RH
2703}
2704
2705/* Extract one of the components of the complex value CPLX. Extract the
2706 real part if IMAG_P is false, and the imaginary part if it's true. */
2707
2708static rtx
2709read_complex_part (rtx cplx, bool imag_p)
bbf6f052 2710{
1466e387
RH
2711 enum machine_mode cmode, imode;
2712 unsigned ibitsize;
bbf6f052 2713
1466e387
RH
2714 if (GET_CODE (cplx) == CONCAT)
2715 return XEXP (cplx, imag_p);
bbf6f052 2716
1466e387
RH
2717 cmode = GET_MODE (cplx);
2718 imode = GET_MODE_INNER (cmode);
2719 ibitsize = GET_MODE_BITSIZE (imode);
2720
2721 /* Special case reads from complex constants that got spilled to memory. */
2722 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
de1b33dd 2723 {
1466e387
RH
2724 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2725 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2726 {
2727 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2728 if (CONSTANT_CLASS_P (part))
2729 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2730 }
2731 }
51286de6 2732
7a31c801
DE
2733 /* For MEMs simplify_gen_subreg may generate an invalid new address
2734 because, e.g., the original address is considered mode-dependent
2735 by the target, which restricts simplify_subreg from invoking
2736 adjust_address_nv. Instead of preparing fallback support for an
2737 invalid address, we call adjust_address_nv directly. */
2738 if (MEM_P (cplx))
2739 return adjust_address_nv (cplx, imode,
2740 imag_p ? GET_MODE_SIZE (imode) : 0);
2741
ddf4e03f
RH
2742 /* If the sub-object is at least word sized, then we know that subregging
2743 will work. This special case is important, since extract_bit_field
2744 wants to operate on integer modes, and there's rarely an OImode to
2745 correspond to TCmode. */
36d7571c
EB
2746 if (ibitsize >= BITS_PER_WORD
2747 /* For hard regs we have exact predicates. Assume we can split
2748 the original object if it spans an even number of hard regs.
2749 This special case is important for SCmode on 64-bit platforms
2750 where the natural size of floating-point regs is 32-bit. */
2ca202e7 2751 || (REG_P (cplx)
36d7571c 2752 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
7a31c801 2753 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
ddf4e03f
RH
2754 {
2755 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2756 imag_p ? GET_MODE_SIZE (imode) : 0);
36d7571c
EB
2757 if (ret)
2758 return ret;
2759 else
2760 /* simplify_gen_subreg may fail for sub-word MEMs. */
2761 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
ddf4e03f
RH
2762 }
2763
1466e387
RH
2764 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2765 true, NULL_RTX, imode, imode);
2766}
2767\f
539eaa3a 2768/* A subroutine of emit_move_insn_1. Yet another lowpart generator.
074e6d01 2769 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
539eaa3a
RH
2770 represented in NEW_MODE. If FORCE is true, this will never happen, as
2771 we'll force-create a SUBREG if needed. */
0c19a26f 2772
1466e387 2773static rtx
074e6d01 2774emit_move_change_mode (enum machine_mode new_mode,
539eaa3a 2775 enum machine_mode old_mode, rtx x, bool force)
1466e387 2776{
074e6d01 2777 rtx ret;
1466e387 2778
074e6d01 2779 if (reload_in_progress && MEM_P (x))
1466e387 2780 {
074e6d01
RH
2781 /* We can't use gen_lowpart here because it may call change_address
2782 which is not appropriate if we were called when a reload was in
2783 progress. We don't have to worry about changing the address since
2784 the size in bytes is supposed to be the same. Copy the MEM to
2785 change the mode and move any substitutions from the old MEM to
2786 the new one. */
1466e387 2787
074e6d01
RH
2788 ret = adjust_address_nv (x, new_mode, 0);
2789 copy_replacements (x, ret);
de1b33dd 2790 }
1466e387
RH
2791 else
2792 {
35fd3193 2793 /* Note that we do want simplify_subreg's behavior of validating
074e6d01
RH
2794 that the new mode is ok for a hard register. If we were to use
2795 simplify_gen_subreg, we would create the subreg, but would
2796 probably run into the target not being able to implement it. */
539eaa3a
RH
2797 /* Except, of course, when FORCE is true, when this is exactly what
2798 we want. Which is needed for CCmodes on some targets. */
2799 if (force)
2800 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2801 else
2802 ret = simplify_subreg (new_mode, x, old_mode, 0);
1466e387 2803 }
bbf6f052 2804
074e6d01
RH
2805 return ret;
2806}
2807
1466e387
RH
2808/* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2809 an integer mode of the same size as MODE. Returns the instruction
2810 emitted, or NULL if such a move could not be generated. */
bbf6f052 2811
1466e387
RH
2812static rtx
2813emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2814{
2815 enum machine_mode imode;
2816 enum insn_code code;
bbf6f052 2817
1466e387
RH
2818 /* There must exist a mode of the exact size we require. */
2819 imode = int_mode_for_mode (mode);
2820 if (imode == BLKmode)
2821 return NULL_RTX;
de1b33dd 2822
1466e387
RH
2823 /* The target must support moves in this mode. */
2824 code = mov_optab->handlers[imode].insn_code;
2825 if (code == CODE_FOR_nothing)
2826 return NULL_RTX;
de1b33dd 2827
539eaa3a
RH
2828 x = emit_move_change_mode (imode, mode, x, false);
2829 if (x == NULL_RTX)
2830 return NULL_RTX;
2831 y = emit_move_change_mode (imode, mode, y, false);
2832 if (y == NULL_RTX)
2833 return NULL_RTX;
2834 return emit_insn (GEN_FCN (code) (x, y));
261c4230
RS
2835}
2836
1466e387
RH
2837/* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2838 Return an equivalent MEM that does not use an auto-increment. */
261c4230 2839
1466e387
RH
2840static rtx
2841emit_move_resolve_push (enum machine_mode mode, rtx x)
261c4230 2842{
1466e387
RH
2843 enum rtx_code code = GET_CODE (XEXP (x, 0));
2844 HOST_WIDE_INT adjust;
2845 rtx temp;
261c4230 2846
1466e387
RH
2847 adjust = GET_MODE_SIZE (mode);
2848#ifdef PUSH_ROUNDING
2849 adjust = PUSH_ROUNDING (adjust);
2850#endif
2851 if (code == PRE_DEC || code == POST_DEC)
2852 adjust = -adjust;
76bbe028 2853
1466e387
RH
2854 /* Do not use anti_adjust_stack, since we don't want to update
2855 stack_pointer_delta. */
2856 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2857 GEN_INT (adjust), stack_pointer_rtx,
2858 0, OPTAB_LIB_WIDEN);
2859 if (temp != stack_pointer_rtx)
2860 emit_move_insn (stack_pointer_rtx, temp);
bbf6f052 2861
1466e387 2862 switch (code)
7308a047 2863 {
1466e387
RH
2864 case PRE_INC:
2865 case PRE_DEC:
2866 temp = stack_pointer_rtx;
2867 break;
2868 case POST_INC:
2869 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2870 break;
2871 case POST_DEC:
2872 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2873 break;
2874 default:
2875 gcc_unreachable ();
2876 }
7308a047 2877
1466e387
RH
2878 return replace_equiv_address (x, temp);
2879}
1a06f5fe 2880
1466e387
RH
2881/* A subroutine of emit_move_complex. Generate a move from Y into X.
2882 X is known to satisfy push_operand, and MODE is known to be complex.
2883 Returns the last instruction emitted. */
bb93b973 2884
1466e387
RH
2885static rtx
2886emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2887{
2888 enum machine_mode submode = GET_MODE_INNER (mode);
2889 bool imag_first;
bb93b973 2890
1466e387
RH
2891#ifdef PUSH_ROUNDING
2892 unsigned int submodesize = GET_MODE_SIZE (submode);
bb93b973 2893
1466e387
RH
2894 /* In case we output to the stack, but the size is smaller than the
2895 machine can push exactly, we need to use move instructions. */
2896 if (PUSH_ROUNDING (submodesize) != submodesize)
2897 {
2898 x = emit_move_resolve_push (mode, x);
2899 return emit_move_insn (x, y);
2900 }
79ce92d7 2901#endif
7308a047 2902
1466e387
RH
2903 /* Note that the real part always precedes the imag part in memory
2904 regardless of machine's endianness. */
2905 switch (GET_CODE (XEXP (x, 0)))
2906 {
2907 case PRE_DEC:
2908 case POST_DEC:
2909 imag_first = true;
2910 break;
2911 case PRE_INC:
2912 case POST_INC:
2913 imag_first = false;
2914 break;
2915 default:
2916 gcc_unreachable ();
2917 }
beb72684 2918
1466e387
RH
2919 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2920 read_complex_part (y, imag_first));
2921 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2922 read_complex_part (y, !imag_first));
2923}
405f63da 2924
1466e387
RH
2925/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2926 MODE is known to be complex. Returns the last instruction emitted. */
beb72684 2927
1466e387
RH
2928static rtx
2929emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2930{
2931 bool try_int;
405f63da 2932
1466e387
RH
2933 /* Need to take special care for pushes, to maintain proper ordering
2934 of the data, and possibly extra padding. */
2935 if (push_operand (x, mode))
2936 return emit_move_complex_push (mode, x, y);
7308a047 2937
1466e387
RH
2938 /* See if we can coerce the target into moving both values at once. */
2939
c6506442
DE
2940 /* Move floating point as parts. */
2941 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2942 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2943 try_int = false;
1466e387 2944 /* Not possible if the values are inherently not adjacent. */
c6506442 2945 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
1466e387
RH
2946 try_int = false;
2947 /* Is possible if both are registers (or subregs of registers). */
2948 else if (register_operand (x, mode) && register_operand (y, mode))
2949 try_int = true;
2950 /* If one of the operands is a memory, and alignment constraints
2951 are friendly enough, we may be able to do combined memory operations.
2952 We do not attempt this if Y is a constant because that combination is
2953 usually better with the by-parts thing below. */
2954 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2955 && (!STRICT_ALIGNMENT
2956 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2957 try_int = true;
2958 else
2959 try_int = false;
2960
2961 if (try_int)
a3600c71 2962 {
c6506442
DE
2963 rtx ret;
2964
2965 /* For memory to memory moves, optimal behavior can be had with the
2966 existing block move logic. */
2967 if (MEM_P (x) && MEM_P (y))
2968 {
2969 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2970 BLOCK_OP_NO_LIBCALL);
2971 return get_last_insn ();
2972 }
2973
2974 ret = emit_move_via_integer (mode, x, y);
1466e387
RH
2975 if (ret)
2976 return ret;
2977 }
a3600c71 2978
1466e387
RH
2979 /* Show the output dies here. This is necessary for SUBREGs
2980 of pseudos since we cannot track their lifetimes correctly;
2981 hard regs shouldn't appear here except as return values. */
2982 if (!reload_completed && !reload_in_progress
2983 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2984 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
a3600c71 2985
1466e387
RH
2986 write_complex_part (x, read_complex_part (y, false), false);
2987 write_complex_part (x, read_complex_part (y, true), true);
2988 return get_last_insn ();
2989}
a3600c71 2990
1466e387
RH
2991/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2992 MODE is known to be MODE_CC. Returns the last instruction emitted. */
a3600c71 2993
1466e387
RH
2994static rtx
2995emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2996{
2997 rtx ret;
a3600c71 2998
1466e387
RH
2999 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3000 if (mode != CCmode)
3001 {
3002 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3003 if (code != CODE_FOR_nothing)
539eaa3a
RH
3004 {
3005 x = emit_move_change_mode (CCmode, mode, x, true);
3006 y = emit_move_change_mode (CCmode, mode, y, true);
3007 return emit_insn (GEN_FCN (code) (x, y));
3008 }
1466e387
RH
3009 }
3010
3011 /* Otherwise, find the MODE_INT mode of the same width. */
3012 ret = emit_move_via_integer (mode, x, y);
3013 gcc_assert (ret != NULL);
3014 return ret;
3015}
3016
3017/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3018 MODE is any multi-word or full-word mode that lacks a move_insn
3019 pattern. Note that you will get better code if you define such
3020 patterns, even if they must turn into multiple assembler instructions. */
3021
3022static rtx
3023emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3024{
3025 rtx last_insn = 0;
3026 rtx seq, inner;
3027 bool need_clobber;
3028 int i;
3029
3030 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3031
3032 /* If X is a push on the stack, do the push now and replace
3033 X with a reference to the stack pointer. */
3034 if (push_operand (x, mode))
3035 x = emit_move_resolve_push (mode, x);
3036
3037 /* If we are in reload, see if either operand is a MEM whose address
3038 is scheduled for replacement. */
3039 if (reload_in_progress && MEM_P (x)
3040 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3041 x = replace_equiv_address_nv (x, inner);
3042 if (reload_in_progress && MEM_P (y)
3043 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3044 y = replace_equiv_address_nv (y, inner);
3045
3046 start_sequence ();
3047
3048 need_clobber = false;
3049 for (i = 0;
3050 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3051 i++)
3052 {
3053 rtx xpart = operand_subword (x, i, 1, mode);
3054 rtx ypart = operand_subword (y, i, 1, mode);
3055
3056 /* If we can't get a part of Y, put Y into memory if it is a
535a42b1
NS
3057 constant. Otherwise, force it into a register. Then we must
3058 be able to get a part of Y. */
1466e387 3059 if (ypart == 0 && CONSTANT_P (y))
a3600c71 3060 {
1466e387
RH
3061 y = force_const_mem (mode, y);
3062 ypart = operand_subword (y, i, 1, mode);
a3600c71 3063 }
1466e387
RH
3064 else if (ypart == 0)
3065 ypart = operand_subword_force (y, i, mode);
3066
3067 gcc_assert (xpart && ypart);
3068
3069 need_clobber |= (GET_CODE (xpart) == SUBREG);
502b8322 3070
1466e387 3071 last_insn = emit_move_insn (xpart, ypart);
a3600c71
HPN
3072 }
3073
1466e387
RH
3074 seq = get_insns ();
3075 end_sequence ();
3076
3077 /* Show the output dies here. This is necessary for SUBREGs
3078 of pseudos since we cannot track their lifetimes correctly;
3079 hard regs shouldn't appear here except as return values.
3080 We never want to emit such a clobber after reload. */
3081 if (x != y
3082 && ! (reload_in_progress || reload_completed)
3083 && need_clobber != 0)
3084 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3085
3086 emit_insn (seq);
3087
3088 return last_insn;
3089}
3090
3091/* Low level part of emit_move_insn.
3092 Called just like emit_move_insn, but assumes X and Y
3093 are basically valid. */
3094
3095rtx
3096emit_move_insn_1 (rtx x, rtx y)
3097{
3098 enum machine_mode mode = GET_MODE (x);
3099 enum insn_code code;
3100
3101 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3102
3103 code = mov_optab->handlers[mode].insn_code;
3104 if (code != CODE_FOR_nothing)
3105 return emit_insn (GEN_FCN (code) (x, y));
3106
3107 /* Expand complex moves by moving real part and imag part. */
3108 if (COMPLEX_MODE_P (mode))
3109 return emit_move_complex (mode, x, y);
3110
3111 if (GET_MODE_CLASS (mode) == MODE_CC)
3112 return emit_move_ccmode (mode, x, y);
3113
5581fc91
RS
3114 /* Try using a move pattern for the corresponding integer mode. This is
3115 only safe when simplify_subreg can convert MODE constants into integer
3116 constants. At present, it can only do this reliably if the value
3117 fits within a HOST_WIDE_INT. */
1466e387 3118 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 3119 {
1466e387
RH
3120 rtx ret = emit_move_via_integer (mode, x, y);
3121 if (ret)
3122 return ret;
3123 }
0fb7aeda 3124
1466e387
RH
3125 return emit_move_multi_word (mode, x, y);
3126}
918a6124 3127
1466e387
RH
3128/* Generate code to copy Y into X.
3129 Both Y and X must have the same mode, except that
3130 Y can be a constant with VOIDmode.
3131 This mode cannot be BLKmode; use emit_block_move for that.
3a94c984 3132
1466e387 3133 Return the last instruction emitted. */
3ef1eef4 3134
1466e387
RH
3135rtx
3136emit_move_insn (rtx x, rtx y)
3137{
3138 enum machine_mode mode = GET_MODE (x);
3139 rtx y_cst = NULL_RTX;
3140 rtx last_insn, set;
15a7a8ec 3141
1466e387
RH
3142 gcc_assert (mode != BLKmode
3143 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
bbf6f052 3144
1466e387
RH
3145 if (CONSTANT_P (y))
3146 {
3147 if (optimize
3148 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3149 && (last_insn = compress_float_constant (x, y)))
3150 return last_insn;
bbf6f052 3151
1466e387 3152 y_cst = y;
bbf6f052 3153
1466e387
RH
3154 if (!LEGITIMATE_CONSTANT_P (y))
3155 {
3156 y = force_const_mem (mode, y);
235ae7be 3157
1466e387
RH
3158 /* If the target's cannot_force_const_mem prevented the spill,
3159 assume that the target's move expanders will also take care
3160 of the non-legitimate constant. */
3161 if (!y)
3162 y = y_cst;
bbf6f052 3163 }
1466e387 3164 }
6551fa4d 3165
1466e387
RH
3166 /* If X or Y are memory references, verify that their addresses are valid
3167 for the machine. */
3168 if (MEM_P (x)
3169 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3170 && ! push_operand (x, GET_MODE (x)))
3171 || (flag_force_addr
3172 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3173 x = validize_mem (x);
235ae7be 3174
1466e387
RH
3175 if (MEM_P (y)
3176 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3177 || (flag_force_addr
3178 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3179 y = validize_mem (y);
235ae7be 3180
1466e387 3181 gcc_assert (mode != BLKmode);
235ae7be 3182
1466e387
RH
3183 last_insn = emit_move_insn_1 (x, y);
3184
3185 if (y_cst && REG_P (x)
3186 && (set = single_set (last_insn)) != NULL_RTX
3187 && SET_DEST (set) == x
3188 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3189 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3190
3191 return last_insn;
bbf6f052 3192}
51286de6
RH
3193
3194/* If Y is representable exactly in a narrower mode, and the target can
3195 perform the extension directly from constant or memory, then emit the
3196 move as an extension. */
3197
3198static rtx
502b8322 3199compress_float_constant (rtx x, rtx y)
51286de6
RH
3200{
3201 enum machine_mode dstmode = GET_MODE (x);
3202 enum machine_mode orig_srcmode = GET_MODE (y);
3203 enum machine_mode srcmode;
3204 REAL_VALUE_TYPE r;
e4541b7a 3205 int oldcost, newcost;
51286de6
RH
3206
3207 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3208
e4541b7a
DJ
3209 if (LEGITIMATE_CONSTANT_P (y))
3210 oldcost = rtx_cost (y, SET);
3211 else
3212 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3213
51286de6
RH
3214 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3215 srcmode != orig_srcmode;
3216 srcmode = GET_MODE_WIDER_MODE (srcmode))
3217 {
3218 enum insn_code ic;
3219 rtx trunc_y, last_insn;
3220
3221 /* Skip if the target can't extend this way. */
3222 ic = can_extend_p (dstmode, srcmode, 0);
3223 if (ic == CODE_FOR_nothing)
3224 continue;
3225
3226 /* Skip if the narrowed value isn't exact. */
3227 if (! exact_real_truncate (srcmode, &r))
3228 continue;
3229
3230 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3231
3232 if (LEGITIMATE_CONSTANT_P (trunc_y))
3233 {
3234 /* Skip if the target needs extra instructions to perform
3235 the extension. */
3236 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3237 continue;
e4541b7a
DJ
3238 /* This is valid, but may not be cheaper than the original. */
3239 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3240 if (oldcost < newcost)
3241 continue;
51286de6
RH
3242 }
3243 else if (float_extend_from_mem[dstmode][srcmode])
e4541b7a
DJ
3244 {
3245 trunc_y = force_const_mem (srcmode, trunc_y);
3246 /* This is valid, but may not be cheaper than the original. */
3247 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3248 if (oldcost < newcost)
3249 continue;
3250 trunc_y = validize_mem (trunc_y);
3251 }
51286de6
RH
3252 else
3253 continue;
e4541b7a 3254
51286de6
RH
3255 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3256 last_insn = get_last_insn ();
3257
f8cfc6aa 3258 if (REG_P (x))
0c19a26f 3259 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3260
3261 return last_insn;
3262 }
3263
3264 return NULL_RTX;
3265}
bbf6f052
RK
3266\f
3267/* Pushing data onto the stack. */
3268
3269/* Push a block of length SIZE (perhaps variable)
3270 and return an rtx to address the beginning of the block.
bbf6f052
RK
3271 The value may be virtual_outgoing_args_rtx.
3272
3273 EXTRA is the number of bytes of padding to push in addition to SIZE.
3274 BELOW nonzero means this padding comes at low addresses;
3275 otherwise, the padding comes at high addresses. */
3276
3277rtx
502b8322 3278push_block (rtx size, int extra, int below)
bbf6f052 3279{
b3694847 3280 rtx temp;
88f63c77
RK
3281
3282 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3283 if (CONSTANT_P (size))
3284 anti_adjust_stack (plus_constant (size, extra));
f8cfc6aa 3285 else if (REG_P (size) && extra == 0)
bbf6f052
RK
3286 anti_adjust_stack (size);
3287 else
3288 {
ce48579b 3289 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3290 if (extra != 0)
906c4e36 3291 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3292 temp, 0, OPTAB_LIB_WIDEN);
3293 anti_adjust_stack (temp);
3294 }
3295
f73ad30e 3296#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3297 if (0)
f73ad30e
JH
3298#else
3299 if (1)
bbf6f052 3300#endif
f73ad30e 3301 {
f73ad30e
JH
3302 temp = virtual_outgoing_args_rtx;
3303 if (extra != 0 && below)
3304 temp = plus_constant (temp, extra);
3305 }
3306 else
3307 {
3308 if (GET_CODE (size) == CONST_INT)
3309 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3310 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3311 else if (extra != 0 && !below)
3312 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3313 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3314 else
3315 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3316 negate_rtx (Pmode, size));
3317 }
bbf6f052
RK
3318
3319 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3320}
3321
21d93687
RK
3322#ifdef PUSH_ROUNDING
3323
566aa174 3324/* Emit single push insn. */
21d93687 3325
566aa174 3326static void
502b8322 3327emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
566aa174 3328{
566aa174 3329 rtx dest_addr;
918a6124 3330 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3331 rtx dest;
371b8fc0
JH
3332 enum insn_code icode;
3333 insn_operand_predicate_fn pred;
566aa174 3334
371b8fc0
JH
3335 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3336 /* If there is push pattern, use it. Otherwise try old way of throwing
3337 MEM representing push operation to move expander. */
3338 icode = push_optab->handlers[(int) mode].insn_code;
3339 if (icode != CODE_FOR_nothing)
3340 {
3341 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3342 && !((*pred) (x, mode))))
371b8fc0
JH
3343 x = force_reg (mode, x);
3344 emit_insn (GEN_FCN (icode) (x));
3345 return;
3346 }
566aa174
JH
3347 if (GET_MODE_SIZE (mode) == rounded_size)
3348 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
329d586f
KH
3349 /* If we are to pad downward, adjust the stack pointer first and
3350 then store X into the stack location using an offset. This is
3351 because emit_move_insn does not know how to pad; it does not have
3352 access to type. */
3353 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3354 {
3355 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3356 HOST_WIDE_INT offset;
3357
3358 emit_move_insn (stack_pointer_rtx,
3359 expand_binop (Pmode,
3360#ifdef STACK_GROWS_DOWNWARD
3361 sub_optab,
3362#else
3363 add_optab,
3364#endif
3365 stack_pointer_rtx,
3366 GEN_INT (rounded_size),
3367 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3368
3369 offset = (HOST_WIDE_INT) padding_size;
3370#ifdef STACK_GROWS_DOWNWARD
3371 if (STACK_PUSH_CODE == POST_DEC)
3372 /* We have already decremented the stack pointer, so get the
3373 previous value. */
3374 offset += (HOST_WIDE_INT) rounded_size;
3375#else
3376 if (STACK_PUSH_CODE == POST_INC)
3377 /* We have already incremented the stack pointer, so get the
3378 previous value. */
3379 offset -= (HOST_WIDE_INT) rounded_size;
3380#endif
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3382 }
566aa174
JH
3383 else
3384 {
3385#ifdef STACK_GROWS_DOWNWARD
329d586f 3386 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
566aa174 3387 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3388 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174 3389#else
329d586f 3390 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
566aa174
JH
3391 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3392 GEN_INT (rounded_size));
3393#endif
3394 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3395 }
3396
3397 dest = gen_rtx_MEM (mode, dest_addr);
3398
566aa174
JH
3399 if (type != 0)
3400 {
3401 set_mem_attributes (dest, type, 1);
c3d32120
RK
3402
3403 if (flag_optimize_sibling_calls)
3404 /* Function incoming arguments may overlap with sibling call
3405 outgoing arguments and we cannot allow reordering of reads
3406 from function arguments with stores to outgoing arguments
3407 of sibling calls. */
3408 set_mem_alias_set (dest, 0);
566aa174
JH
3409 }
3410 emit_move_insn (dest, x);
566aa174 3411}
21d93687 3412#endif
566aa174 3413
bbf6f052
RK
3414/* Generate code to push X onto the stack, assuming it has mode MODE and
3415 type TYPE.
3416 MODE is redundant except when X is a CONST_INT (since they don't
3417 carry mode info).
3418 SIZE is an rtx for the size of data to be copied (in bytes),
3419 needed only if X is BLKmode.
3420
f1eaaf73 3421 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3422
cd048831 3423 If PARTIAL and REG are both nonzero, then copy that many of the first
78a52f11
RH
3424 bytes of X into registers starting with REG, and push the rest of X.
3425 The amount of space pushed is decreased by PARTIAL bytes.
bbf6f052 3426 REG must be a hard register in this case.
cd048831
RK
3427 If REG is zero but PARTIAL is not, take any all others actions for an
3428 argument partially in registers, but do not actually load any
3429 registers.
bbf6f052
RK
3430
3431 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3432 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3433
3434 On a machine that lacks real push insns, ARGS_ADDR is the address of
3435 the bottom of the argument block for this call. We use indexing off there
3436 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3437 argument block has not been preallocated.
3438
e5e809f4
JL
3439 ARGS_SO_FAR is the size of args previously pushed for this call.
3440
3441 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3442 for arguments passed in registers. If nonzero, it will be the number
3443 of bytes required. */
bbf6f052
RK
3444
3445void
502b8322
AJ
3446emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3447 unsigned int align, int partial, rtx reg, int extra,
3448 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3449 rtx alignment_pad)
bbf6f052
RK
3450{
3451 rtx xinner;
3452 enum direction stack_direction
3453#ifdef STACK_GROWS_DOWNWARD
3454 = downward;
3455#else
3456 = upward;
3457#endif
3458
3459 /* Decide where to pad the argument: `downward' for below,
3460 `upward' for above, or `none' for don't pad it.
3461 Default is below for small data on big-endian machines; else above. */
3462 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3463
0fb7aeda 3464 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3465 FIXME: why? */
3466 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3467 if (where_pad != none)
3468 where_pad = (where_pad == downward ? upward : downward);
3469
ad76cef8 3470 xinner = x;
bbf6f052
RK
3471
3472 if (mode == BLKmode)
3473 {
3474 /* Copy a block into the stack, entirely or partially. */
3475
b3694847 3476 rtx temp;
78a52f11 3477 int used;
531547e9 3478 int offset;
bbf6f052 3479 int skip;
3a94c984 3480
78a52f11
RH
3481 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3482 used = partial - offset;
531547e9 3483
5b0264cb 3484 gcc_assert (size);
bbf6f052 3485
bbf6f052
RK
3486 /* USED is now the # of bytes we need not copy to the stack
3487 because registers will take care of them. */
3488
3489 if (partial != 0)
f4ef873c 3490 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3491
3492 /* If the partial register-part of the arg counts in its stack size,
3493 skip the part of stack space corresponding to the registers.
3494 Otherwise, start copying to the beginning of the stack space,
3495 by setting SKIP to 0. */
e5e809f4 3496 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3497
3498#ifdef PUSH_ROUNDING
3499 /* Do it with several push insns if that doesn't take lots of insns
3500 and if there is no difficulty with push insns that skip bytes
3501 on the stack for alignment purposes. */
3502 if (args_addr == 0
f73ad30e 3503 && PUSH_ARGS
bbf6f052
RK
3504 && GET_CODE (size) == CONST_INT
3505 && skip == 0
f26aca6d 3506 && MEM_ALIGN (xinner) >= align
15914757 3507 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3508 /* Here we avoid the case of a structure whose weak alignment
3509 forces many pushes of a small amount of data,
3510 and such small pushes do rounding that causes trouble. */
e1565e65 3511 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3512 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3513 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3514 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3515 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3516 {
3517 /* Push padding now if padding above and stack grows down,
3518 or if padding below and stack grows up.
3519 But if space already allocated, this has already been done. */
3520 if (extra && args_addr == 0
3521 && where_pad != none && where_pad != stack_direction)
906c4e36 3522 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3523
8fd3cf4e 3524 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3525 }
3526 else
3a94c984 3527#endif /* PUSH_ROUNDING */
bbf6f052 3528 {
7ab923cc
JJ
3529 rtx target;
3530
bbf6f052
RK
3531 /* Otherwise make space on the stack and copy the data
3532 to the address of that space. */
3533
3534 /* Deduct words put into registers from the size we must copy. */
3535 if (partial != 0)
3536 {
3537 if (GET_CODE (size) == CONST_INT)
906c4e36 3538 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3539 else
3540 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3541 GEN_INT (used), NULL_RTX, 0,
3542 OPTAB_LIB_WIDEN);
bbf6f052
RK
3543 }
3544
3545 /* Get the address of the stack space.
3546 In this case, we do not deal with EXTRA separately.
3547 A single stack adjust will do. */
3548 if (! args_addr)
3549 {
3550 temp = push_block (size, extra, where_pad == downward);
3551 extra = 0;
3552 }
3553 else if (GET_CODE (args_so_far) == CONST_INT)
3554 temp = memory_address (BLKmode,
3555 plus_constant (args_addr,
3556 skip + INTVAL (args_so_far)));
3557 else
3558 temp = memory_address (BLKmode,
38a448ca
RH
3559 plus_constant (gen_rtx_PLUS (Pmode,
3560 args_addr,
3561 args_so_far),
bbf6f052 3562 skip));
4ca79136
RH
3563
3564 if (!ACCUMULATE_OUTGOING_ARGS)
3565 {
3566 /* If the source is referenced relative to the stack pointer,
3567 copy it to another register to stabilize it. We do not need
3568 to do this if we know that we won't be changing sp. */
3569
3570 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3571 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3572 temp = copy_to_reg (temp);
3573 }
3574
3a94c984 3575 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3576
2bb16349
RH
3577 /* We do *not* set_mem_attributes here, because incoming arguments
3578 may overlap with sibling call outgoing arguments and we cannot
3579 allow reordering of reads from function arguments with stores
3580 to outgoing arguments of sibling calls. We do, however, want
3581 to record the alignment of the stack slot. */
44bb111a
RH
3582 /* ALIGN may well be better aligned than TYPE, e.g. due to
3583 PARM_BOUNDARY. Assume the caller isn't lying. */
3584 set_mem_align (target, align);
4ca79136 3585
44bb111a 3586 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3587 }
3588 }
3589 else if (partial > 0)
3590 {
3591 /* Scalar partly in registers. */
3592
3593 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3594 int i;
3595 int not_stack;
78a52f11 3596 /* # bytes of start of argument
bbf6f052 3597 that we must make space for but need not store. */
ac7e839c 3598 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052
RK
3599 int args_offset = INTVAL (args_so_far);
3600 int skip;
3601
3602 /* Push padding now if padding above and stack grows down,
3603 or if padding below and stack grows up.
3604 But if space already allocated, this has already been done. */
3605 if (extra && args_addr == 0
3606 && where_pad != none && where_pad != stack_direction)
906c4e36 3607 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3608
3609 /* If we make space by pushing it, we might as well push
3610 the real data. Otherwise, we can leave OFFSET nonzero
3611 and leave the space uninitialized. */
3612 if (args_addr == 0)
3613 offset = 0;
3614
3615 /* Now NOT_STACK gets the number of words that we don't need to
ac7e839c 3616 allocate on the stack. Convert OFFSET to words too. */
78a52f11 3617 not_stack = (partial - offset) / UNITS_PER_WORD;
ac7e839c 3618 offset /= UNITS_PER_WORD;
bbf6f052
RK
3619
3620 /* If the partial register-part of the arg counts in its stack size,
3621 skip the part of stack space corresponding to the registers.
3622 Otherwise, start copying to the beginning of the stack space,
3623 by setting SKIP to 0. */
e5e809f4 3624 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3625
3626 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3627 x = validize_mem (force_const_mem (mode, x));
3628
3629 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3630 SUBREGs of such registers are not allowed. */
f8cfc6aa 3631 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
bbf6f052
RK
3632 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3633 x = copy_to_reg (x);
3634
3635 /* Loop over all the words allocated on the stack for this arg. */
3636 /* We can do it by words, because any scalar bigger than a word
3637 has a size a multiple of a word. */
3638#ifndef PUSH_ARGS_REVERSED
3639 for (i = not_stack; i < size; i++)
3640#else
3641 for (i = size - 1; i >= not_stack; i--)
3642#endif
3643 if (i >= not_stack + offset)
3644 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3645 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3646 0, args_addr,
3647 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3648 * UNITS_PER_WORD)),
4fc026cd 3649 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3650 }
3651 else
3652 {
3653 rtx addr;
3bdf5ad1 3654 rtx dest;
bbf6f052
RK
3655
3656 /* Push padding now if padding above and stack grows down,
3657 or if padding below and stack grows up.
3658 But if space already allocated, this has already been done. */
3659 if (extra && args_addr == 0
3660 && where_pad != none && where_pad != stack_direction)
906c4e36 3661 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3662
3663#ifdef PUSH_ROUNDING
f73ad30e 3664 if (args_addr == 0 && PUSH_ARGS)
566aa174 3665 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3666 else
3667#endif
921b3427
RK
3668 {
3669 if (GET_CODE (args_so_far) == CONST_INT)
3670 addr
3671 = memory_address (mode,
3a94c984 3672 plus_constant (args_addr,
921b3427 3673 INTVAL (args_so_far)));
3a94c984 3674 else
38a448ca
RH
3675 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3676 args_so_far));
566aa174 3677 dest = gen_rtx_MEM (mode, addr);
2bb16349
RH
3678
3679 /* We do *not* set_mem_attributes here, because incoming arguments
3680 may overlap with sibling call outgoing arguments and we cannot
3681 allow reordering of reads from function arguments with stores
3682 to outgoing arguments of sibling calls. We do, however, want
3683 to record the alignment of the stack slot. */
3684 /* ALIGN may well be better aligned than TYPE, e.g. due to
3685 PARM_BOUNDARY. Assume the caller isn't lying. */
3686 set_mem_align (dest, align);
bbf6f052 3687
566aa174 3688 emit_move_insn (dest, x);
566aa174 3689 }
bbf6f052
RK
3690 }
3691
bbf6f052
RK
3692 /* If part should go in registers, copy that part
3693 into the appropriate registers. Do this now, at the end,
3694 since mem-to-mem copies above may do function calls. */
cd048831 3695 if (partial > 0 && reg != 0)
fffa9c1d
JW
3696 {
3697 /* Handle calls that pass values in multiple non-contiguous locations.
3698 The Irix 6 ABI has examples of this. */
3699 if (GET_CODE (reg) == PARALLEL)
6e985040 3700 emit_group_load (reg, x, type, -1);
fffa9c1d 3701 else
78a52f11
RH
3702 {
3703 gcc_assert (partial % UNITS_PER_WORD == 0);
3704 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3705 }
fffa9c1d 3706 }
bbf6f052
RK
3707
3708 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3709 anti_adjust_stack (GEN_INT (extra));
3a94c984 3710
3ea2292a 3711 if (alignment_pad && args_addr == 0)
4fc026cd 3712 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3713}
3714\f
296b4ed9
RK
3715/* Return X if X can be used as a subtarget in a sequence of arithmetic
3716 operations. */
3717
3718static rtx
502b8322 3719get_subtarget (rtx x)
296b4ed9 3720{
7c27e184
PB
3721 return (optimize
3722 || x == 0
296b4ed9 3723 /* Only registers can be subtargets. */
f8cfc6aa 3724 || !REG_P (x)
296b4ed9
RK
3725 /* Don't use hard regs to avoid extending their life. */
3726 || REGNO (x) < FIRST_PSEUDO_REGISTER
296b4ed9
RK
3727 ? 0 : x);
3728}
3729
8c1cfd5a
RH
3730/* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3731 FIELD is a bitfield. Returns true if the optimization was successful,
3732 and there's nothing else to do. */
3733
3734static bool
3735optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3736 unsigned HOST_WIDE_INT bitpos,
3737 enum machine_mode mode1, rtx str_rtx,
3738 tree to, tree src)
3739{
3740 enum machine_mode str_mode = GET_MODE (str_rtx);
3741 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3742 tree op0, op1;
3743 rtx value, result;
3744 optab binop;
3745
3746 if (mode1 != VOIDmode
3747 || bitsize >= BITS_PER_WORD
3748 || str_bitsize > BITS_PER_WORD
3749 || TREE_SIDE_EFFECTS (to)
3750 || TREE_THIS_VOLATILE (to))
3751 return false;
3752
3753 STRIP_NOPS (src);
3754 if (!BINARY_CLASS_P (src)
3755 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3756 return false;
3757
3758 op0 = TREE_OPERAND (src, 0);
3759 op1 = TREE_OPERAND (src, 1);
3760 STRIP_NOPS (op0);
3761
3762 if (!operand_equal_p (to, op0, 0))
3763 return false;
3764
3765 if (MEM_P (str_rtx))
3766 {
3767 unsigned HOST_WIDE_INT offset1;
3768
3769 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3770 str_mode = word_mode;
3771 str_mode = get_best_mode (bitsize, bitpos,
3772 MEM_ALIGN (str_rtx), str_mode, 0);
3773 if (str_mode == VOIDmode)
3774 return false;
3775 str_bitsize = GET_MODE_BITSIZE (str_mode);
3776
3777 offset1 = bitpos;
3778 bitpos %= str_bitsize;
3779 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3780 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3781 }
3782 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3783 return false;
3784
3785 /* If the bit field covers the whole REG/MEM, store_field
3786 will likely generate better code. */
3787 if (bitsize >= str_bitsize)
3788 return false;
3789
3790 /* We can't handle fields split across multiple entities. */
3791 if (bitpos + bitsize > str_bitsize)
3792 return false;
3793
3794 if (BYTES_BIG_ENDIAN)
3795 bitpos = str_bitsize - bitpos - bitsize;
3796
3797 switch (TREE_CODE (src))
3798 {
3799 case PLUS_EXPR:
3800 case MINUS_EXPR:
3801 /* For now, just optimize the case of the topmost bitfield
3802 where we don't need to do any masking and also
3803 1 bit bitfields where xor can be used.
3804 We might win by one instruction for the other bitfields
3805 too if insv/extv instructions aren't used, so that
3806 can be added later. */
3807 if (bitpos + bitsize != str_bitsize
3808 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3809 break;
3810
3811 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3812 value = convert_modes (str_mode,
3813 TYPE_MODE (TREE_TYPE (op1)), value,
3814 TYPE_UNSIGNED (TREE_TYPE (op1)));
3815
3816 /* We may be accessing data outside the field, which means
3817 we can alias adjacent data. */
3818 if (MEM_P (str_rtx))
3819 {
3820 str_rtx = shallow_copy_rtx (str_rtx);
3821 set_mem_alias_set (str_rtx, 0);
3822 set_mem_expr (str_rtx, 0);
3823 }
3824
3825 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3826 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3827 {
3828 value = expand_and (str_mode, value, const1_rtx, NULL);
3829 binop = xor_optab;
3830 }
3831 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3832 build_int_cst (NULL_TREE, bitpos),
3833 NULL_RTX, 1);
3834 result = expand_binop (str_mode, binop, str_rtx,
3835 value, str_rtx, 1, OPTAB_WIDEN);
3836 if (result != str_rtx)
3837 emit_move_insn (str_rtx, result);
3838 return true;
3839
92fb2d32
KH
3840 case BIT_IOR_EXPR:
3841 case BIT_XOR_EXPR:
3842 if (TREE_CODE (op1) != INTEGER_CST)
3843 break;
3844 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3845 value = convert_modes (GET_MODE (str_rtx),
3846 TYPE_MODE (TREE_TYPE (op1)), value,
3847 TYPE_UNSIGNED (TREE_TYPE (op1)));
3848
3849 /* We may be accessing data outside the field, which means
3850 we can alias adjacent data. */
3851 if (MEM_P (str_rtx))
3852 {
3853 str_rtx = shallow_copy_rtx (str_rtx);
3854 set_mem_alias_set (str_rtx, 0);
3855 set_mem_expr (str_rtx, 0);
3856 }
3857
3858 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3859 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3860 {
3861 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3862 - 1);
3863 value = expand_and (GET_MODE (str_rtx), value, mask,
3864 NULL_RTX);
3865 }
3866 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3867 build_int_cst (NULL_TREE, bitpos),
3868 NULL_RTX, 1);
3869 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3870 value, str_rtx, 1, OPTAB_WIDEN);
3871 if (result != str_rtx)
3872 emit_move_insn (str_rtx, result);
3873 return true;
3874
8c1cfd5a
RH
3875 default:
3876 break;
3877 }
3878
3879 return false;
3880}
3881
3882
e836a5a2 3883/* Expand an assignment that stores the value of FROM into TO. */
bbf6f052 3884
e836a5a2
KH
3885void
3886expand_assignment (tree to, tree from)
bbf6f052 3887{
b3694847 3888 rtx to_rtx = 0;
bbf6f052
RK
3889 rtx result;
3890
3891 /* Don't crash if the lhs of the assignment was erroneous. */
3892
3893 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3894 {
3895 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
e836a5a2 3896 return;
709f5be1 3897 }
bbf6f052
RK
3898
3899 /* Assignment of a structure component needs special treatment
3900 if the structure component's rtx is not simply a MEM.
6be58303
JW
3901 Assignment of an array element at a constant index, and assignment of
3902 an array element in an unaligned packed structure field, has the same
3903 problem. */
8c1cfd5a 3904 if (handled_component_p (to)
7c02ae17 3905 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
3906 {
3907 enum machine_mode mode1;
770ae6cc 3908 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3909 tree offset;
bbf6f052
RK
3910 int unsignedp;
3911 int volatilep = 0;
0088fcb1
RK
3912 tree tem;
3913
3914 push_temp_slots ();
839c4796 3915 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2614034e 3916 &unsignedp, &volatilep, true);
bbf6f052
RK
3917
3918 /* If we are going to use store_bit_field and extract_bit_field,
3919 make sure to_rtx will be safe for multiple use. */
3920
b258008a 3921 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
1ed1b4fb 3922
7bb0943f
RS
3923 if (offset != 0)
3924 {
e3c8ea67 3925 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f 3926
5b0264cb 3927 gcc_assert (MEM_P (to_rtx));
bd070e1a 3928
bd070e1a 3929#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 3930 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 3931 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
3932#else
3933 if (GET_MODE (offset_rtx) != ptr_mode)
3934 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 3935#endif
bd070e1a 3936
9a7b9f4f
JL
3937 /* A constant address in TO_RTX can have VOIDmode, we must not try
3938 to call force_reg for that case. Avoid that case. */
3c0cb5de 3939 if (MEM_P (to_rtx)
89752202 3940 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3941 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 3942 && bitsize > 0
3a94c984 3943 && (bitpos % bitsize) == 0
89752202 3944 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 3945 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 3946 {
e3c8ea67 3947 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
3948 bitpos = 0;
3949 }
3950
0d4903b8 3951 to_rtx = offset_address (to_rtx, offset_rtx,
d50a16c4
EB
3952 highest_pow2_factor_for_target (to,
3953 offset));
7bb0943f 3954 }
c5c76735 3955
8c1cfd5a
RH
3956 /* Handle expand_expr of a complex value returning a CONCAT. */
3957 if (GET_CODE (to_rtx) == CONCAT)
a06ef755 3958 {
0becc986
RH
3959 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3960 {
3961 gcc_assert (bitpos == 0);
3962 result = store_expr (from, to_rtx, false);
3963 }
3964 else
3965 {
3966 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3967 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3968 }
bbf6f052 3969 }
8c1cfd5a 3970 else
df62f18a 3971 {
8c1cfd5a 3972 if (MEM_P (to_rtx))
b8b139c7 3973 {
8c1cfd5a
RH
3974 /* If the field is at offset zero, we could have been given the
3975 DECL_RTX of the parent struct. Don't munge it. */
3976 to_rtx = shallow_copy_rtx (to_rtx);
b8b139c7 3977
8c1cfd5a 3978 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
b8b139c7 3979
8c1cfd5a
RH
3980 /* Deal with volatile and readonly fields. The former is only
3981 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3982 if (volatilep)
3983 MEM_VOLATILE_P (to_rtx) = 1;
2039d7aa 3984 if (component_uses_parent_alias_set (to))
8c1cfd5a 3985 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
df62f18a 3986 }
60ba25bf 3987
8c1cfd5a
RH
3988 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3989 to_rtx, to, from))
3990 result = NULL;
3991 else
3992 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3993 TREE_TYPE (tem), get_alias_set (to));
df62f18a
JJ
3994 }
3995
8c1cfd5a
RH
3996 if (result)
3997 preserve_temp_slots (result);
a06ef755
RK
3998 free_temp_slots ();
3999 pop_temp_slots ();
e836a5a2 4000 return;
bbf6f052
RK
4001 }
4002
cd1db108
RS
4003 /* If the rhs is a function call and its value is not an aggregate,
4004 call the function before we start to compute the lhs.
4005 This is needed for correct code for cases such as
4006 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4007 requires loading up part of an address in a separate insn.
4008
1858863b
JW
4009 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4010 since it might be a promoted variable where the zero- or sign- extension
4011 needs to be done. Handling this in the normal way is safe because no
4012 computation is done before the call. */
61f71b34 4013 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
b35cd3c1 4014 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b 4015 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
f8cfc6aa 4016 && REG_P (DECL_RTL (to))))
cd1db108 4017 {
0088fcb1
RK
4018 rtx value;
4019
4020 push_temp_slots ();
4021 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 4022 if (to_rtx == 0)
37a08a29 4023 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4024
fffa9c1d
JW
4025 /* Handle calls that return values in multiple non-contiguous locations.
4026 The Irix 6 ABI has examples of this. */
4027 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4028 emit_group_load (to_rtx, value, TREE_TYPE (from),
4029 int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4030 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4031 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4032 else
6419e5b0 4033 {
5ae6cd0d 4034 if (POINTER_TYPE_P (TREE_TYPE (to)))
6419e5b0 4035 value = convert_memory_address (GET_MODE (to_rtx), value);
6419e5b0
DT
4036 emit_move_insn (to_rtx, value);
4037 }
cd1db108
RS
4038 preserve_temp_slots (to_rtx);
4039 free_temp_slots ();
0088fcb1 4040 pop_temp_slots ();
e836a5a2 4041 return;
cd1db108
RS
4042 }
4043
bbf6f052
RK
4044 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4045 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4046
4047 if (to_rtx == 0)
37a08a29 4048 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4049
86d38d25 4050 /* Don't move directly into a return register. */
14a774a9 4051 if (TREE_CODE (to) == RESULT_DECL
f8cfc6aa 4052 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4053 {
0088fcb1
RK
4054 rtx temp;
4055
4056 push_temp_slots ();
4057 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4058
4059 if (GET_CODE (to_rtx) == PARALLEL)
6e985040
AM
4060 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4061 int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4062 else
4063 emit_move_insn (to_rtx, temp);
4064
86d38d25
RS
4065 preserve_temp_slots (to_rtx);
4066 free_temp_slots ();
0088fcb1 4067 pop_temp_slots ();
e836a5a2 4068 return;
86d38d25
RS
4069 }
4070
bbf6f052
RK
4071 /* In case we are returning the contents of an object which overlaps
4072 the place the value is being stored, use a safe function when copying
4073 a value through a pointer into a structure value return block. */
4074 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4075 && current_function_returns_struct
4076 && !current_function_returns_pcc_struct)
4077 {
0088fcb1
RK
4078 rtx from_rtx, size;
4079
4080 push_temp_slots ();
33a20d10 4081 size = expr_size (from);
37a08a29 4082 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 4083
8f99553f
JM
4084 emit_library_call (memmove_libfunc, LCT_NORMAL,
4085 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4086 XEXP (from_rtx, 0), Pmode,
4087 convert_to_mode (TYPE_MODE (sizetype),
4088 size, TYPE_UNSIGNED (sizetype)),
4089 TYPE_MODE (sizetype));
bbf6f052
RK
4090
4091 preserve_temp_slots (to_rtx);
4092 free_temp_slots ();
0088fcb1 4093 pop_temp_slots ();
e836a5a2 4094 return;
bbf6f052
RK
4095 }
4096
4097 /* Compute FROM and store the value in the rtx we got. */
4098
0088fcb1 4099 push_temp_slots ();
e836a5a2 4100 result = store_expr (from, to_rtx, 0);
bbf6f052
RK
4101 preserve_temp_slots (result);
4102 free_temp_slots ();
0088fcb1 4103 pop_temp_slots ();
e836a5a2 4104 return;
bbf6f052
RK
4105}
4106
4107/* Generate code for computing expression EXP,
4108 and storing the value into TARGET.
bbf6f052 4109
709f5be1
RS
4110 If the mode is BLKmode then we may return TARGET itself.
4111 It turns out that in BLKmode it doesn't cause a problem.
4112 because C has no operators that could combine two different
4113 assignments into the same BLKmode object with different values
4114 with no sequence point. Will other languages need this to
4115 be more thorough?
4116
6f4fd16d 4117 If CALL_PARAM_P is nonzero, this is a store into a call param on the
8403445a 4118 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4119
4120rtx
6f4fd16d 4121store_expr (tree exp, rtx target, int call_param_p)
bbf6f052 4122{
b3694847 4123 rtx temp;
0fab64a3 4124 rtx alt_rtl = NULL_RTX;
bbf6f052
RK
4125 int dont_return_target = 0;
4126
847311f4
AL
4127 if (VOID_TYPE_P (TREE_TYPE (exp)))
4128 {
4129 /* C++ can generate ?: expressions with a throw expression in one
4130 branch and an rvalue in the other. Here, we resolve attempts to
4d6922ee 4131 store the throw expression's nonexistent result. */
6f4fd16d 4132 gcc_assert (!call_param_p);
847311f4
AL
4133 expand_expr (exp, const0_rtx, VOIDmode, 0);
4134 return NULL_RTX;
4135 }
bbf6f052
RK
4136 if (TREE_CODE (exp) == COMPOUND_EXPR)
4137 {
4138 /* Perform first part of compound expression, then assign from second
4139 part. */
8403445a 4140 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6f4fd16d
KH
4141 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4142 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4143 }
4144 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4145 {
4146 /* For conditional expression, get safe form of the target. Then
4147 test the condition, doing the appropriate assignment on either
4148 side. This avoids the creation of unnecessary temporaries.
4149 For non-BLKmode, it is more efficient not to do this. */
4150
4151 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4152
dabf8373 4153 do_pending_stack_adjust ();
bbf6f052
RK
4154 NO_DEFER_POP;
4155 jumpifnot (TREE_OPERAND (exp, 0), lab1);
6f4fd16d 4156 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
bbf6f052
RK
4157 emit_jump_insn (gen_jump (lab2));
4158 emit_barrier ();
4159 emit_label (lab1);
6f4fd16d 4160 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
bbf6f052
RK
4161 emit_label (lab2);
4162 OK_DEFER_POP;
a3a58acc 4163
436d948e 4164 return NULL_RTX;
12f06d17 4165 }
1499e0a8 4166 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4167 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4168 than the declared mode, compute the result into its declared mode
4169 and then convert to the wider mode. Our value is the computed
4170 expression. */
4171 {
b76b08ef
RK
4172 rtx inner_target = 0;
4173
436d948e
KH
4174 /* We can do the conversion inside EXP, which will often result
4175 in some optimizations. Do the conversion in two steps: first
4176 change the signedness, if needed, then the extend. But don't
4177 do this if the type of EXP is a subtype of something else
4178 since then the conversion might involve more than just
4179 converting modes. */
4180 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
7e7d1b4b
RH
4181 && TREE_TYPE (TREE_TYPE (exp)) == 0
4182 && (!lang_hooks.reduce_bit_field_operations
4183 || (GET_MODE_PRECISION (GET_MODE (target))
4184 == TYPE_PRECISION (TREE_TYPE (exp)))))
f635a84d 4185 {
8df83eae 4186 if (TYPE_UNSIGNED (TREE_TYPE (exp))
f635a84d 4187 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4 4188 exp = convert
ae2bcd98 4189 (lang_hooks.types.signed_or_unsigned_type
ceef8ce4 4190 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4191
ae2bcd98 4192 exp = convert (lang_hooks.types.type_for_mode
b0c48229
NB
4193 (GET_MODE (SUBREG_REG (target)),
4194 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4195 exp);
b76b08ef
RK
4196
4197 inner_target = SUBREG_REG (target);
f635a84d 4198 }
3a94c984 4199
8403445a 4200 temp = expand_expr (exp, inner_target, VOIDmode,
6f4fd16d 4201 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c
RS
4202
4203 /* If TEMP is a VOIDmode constant, use convert_modes to make
4204 sure that we properly convert it. */
4205 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4206 {
4207 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4208 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4209 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4210 GET_MODE (target), temp,
4211 SUBREG_PROMOTED_UNSIGNED_P (target));
4212 }
b258707c 4213
1499e0a8
RK
4214 convert_move (SUBREG_REG (target), temp,
4215 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9 4216
436d948e 4217 return NULL_RTX;
1499e0a8 4218 }
bbf6f052
RK
4219 else
4220 {
0fab64a3 4221 temp = expand_expr_real (exp, target, GET_MODE (target),
6f4fd16d 4222 (call_param_p
0fab64a3
MM
4223 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4224 &alt_rtl);
766f36c7 4225 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4226 If TARGET is a volatile mem ref, either return TARGET
4227 or return a reg copied *from* TARGET; ANSI requires this.
4228
4229 Otherwise, if TEMP is not TARGET, return TEMP
4230 if it is constant (for efficiency),
4231 or if we really want the correct value. */
f8cfc6aa 4232 if (!(target && REG_P (target)
bbf6f052 4233 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3c0cb5de 4234 && !(MEM_P (target) && MEM_VOLATILE_P (target))
effbcc6a 4235 && ! rtx_equal_p (temp, target)
436d948e 4236 && CONSTANT_P (temp))
bbf6f052
RK
4237 dont_return_target = 1;
4238 }
4239
b258707c
RS
4240 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4241 the same as that of TARGET, adjust the constant. This is needed, for
4242 example, in case it is a CONST_DOUBLE and we want only a word-sized
4243 value. */
4244 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4245 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4246 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4247 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
8df83eae 4248 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
b258707c 4249
bbf6f052 4250 /* If value was not generated in the target, store it there.
1bbd65cd
EB
4251 Convert the value to TARGET's type first if necessary and emit the
4252 pending incrementations that have been queued when expanding EXP.
4253 Note that we cannot emit the whole queue blindly because this will
4254 effectively disable the POST_INC optimization later.
4255
37a08a29 4256 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4257 one or both of them are volatile memory refs, we have to distinguish
4258 two cases:
4259 - expand_expr has used TARGET. In this case, we must not generate
4260 another copy. This can be detected by TARGET being equal according
4261 to == .
4262 - expand_expr has not used TARGET - that means that the source just
4263 happens to have the same RTX form. Since temp will have been created
4264 by expand_expr, it will compare unequal according to == .
4265 We must generate a copy in this case, to reach the correct number
4266 of volatile memory references. */
bbf6f052 4267
6036acbb 4268 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4269 || (temp != target && (side_effects_p (temp)
4270 || side_effects_p (target))))
e5408e52 4271 && TREE_CODE (exp) != ERROR_MARK
9c5c5f2c
MM
4272 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4273 but TARGET is not valid memory reference, TEMP will differ
4274 from TARGET although it is really the same location. */
0fab64a3 4275 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
535a42b1
NS
4276 /* If there's nothing to copy, don't bother. Don't call
4277 expr_size unless necessary, because some front-ends (C++)
4278 expr_size-hook must not be given objects that are not
4279 supposed to be bit-copied or bit-initialized. */
e56fc090 4280 && expr_size (exp) != const0_rtx)
bbf6f052 4281 {
bbf6f052 4282 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4283 && GET_MODE (temp) != VOIDmode)
bbf6f052 4284 {
8df83eae 4285 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
bbf6f052
RK
4286 if (dont_return_target)
4287 {
4288 /* In this case, we will return TEMP,
4289 so make sure it has the proper mode.
4290 But don't forget to store the value into TARGET. */
4291 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4292 emit_move_insn (target, temp);
4293 }
4294 else
4295 convert_move (target, temp, unsignedp);
4296 }
4297
4298 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4299 {
c24ae149
RK
4300 /* Handle copying a string constant into an array. The string
4301 constant may be shorter than the array. So copy just the string's
4302 actual length, and clear the rest. First get the size of the data
4303 type of the string, which is actually the size of the target. */
4304 rtx size = expr_size (exp);
bbf6f052 4305
e87b4f3f
RS
4306 if (GET_CODE (size) == CONST_INT
4307 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a 4308 emit_block_move (target, temp, size,
6f4fd16d 4309 (call_param_p
8403445a 4310 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4311 else
bbf6f052 4312 {
e87b4f3f
RS
4313 /* Compute the size of the data to copy from the string. */
4314 tree copy_size
c03b7665 4315 = size_binop (MIN_EXPR,
b50d17a1 4316 make_tree (sizetype, size),
fed3cef0 4317 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4318 rtx copy_size_rtx
4319 = expand_expr (copy_size, NULL_RTX, VOIDmode,
6f4fd16d 4320 (call_param_p
8403445a 4321 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4322 rtx label = 0;
4323
4324 /* Copy that much. */
267b28bd 4325 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
8df83eae 4326 TYPE_UNSIGNED (sizetype));
8403445a 4327 emit_block_move (target, temp, copy_size_rtx,
6f4fd16d 4328 (call_param_p
8403445a 4329 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4330
88f63c77
RK
4331 /* Figure out how much is left in TARGET that we have to clear.
4332 Do all calculations in ptr_mode. */
e87b4f3f
RS
4333 if (GET_CODE (copy_size_rtx) == CONST_INT)
4334 {
c24ae149
RK
4335 size = plus_constant (size, -INTVAL (copy_size_rtx));
4336 target = adjust_address (target, BLKmode,
4337 INTVAL (copy_size_rtx));
e87b4f3f
RS
4338 }
4339 else
4340 {
fa06ab5c 4341 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4342 copy_size_rtx, NULL_RTX, 0,
4343 OPTAB_LIB_WIDEN);
e87b4f3f 4344
c24ae149
RK
4345#ifdef POINTERS_EXTEND_UNSIGNED
4346 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd 4347 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
8df83eae 4348 TYPE_UNSIGNED (sizetype));
c24ae149
RK
4349#endif
4350
4351 target = offset_address (target, copy_size_rtx,
4352 highest_pow2_factor (copy_size));
e87b4f3f 4353 label = gen_label_rtx ();
c5d5d461 4354 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4355 GET_MODE (size), 0, label);
e87b4f3f
RS
4356 }
4357
4358 if (size != const0_rtx)
8148fe65 4359 clear_storage (target, size, BLOCK_OP_NORMAL);
22619c3f 4360
e87b4f3f
RS
4361 if (label)
4362 emit_label (label);
bbf6f052
RK
4363 }
4364 }
fffa9c1d
JW
4365 /* Handle calls that return values in multiple non-contiguous locations.
4366 The Irix 6 ABI has examples of this. */
4367 else if (GET_CODE (target) == PARALLEL)
6e985040
AM
4368 emit_group_load (target, temp, TREE_TYPE (exp),
4369 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4370 else if (GET_MODE (temp) == BLKmode)
8403445a 4371 emit_block_move (target, temp, expr_size (exp),
6f4fd16d 4372 (call_param_p
8403445a 4373 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052 4374 else
b0dccb00
RH
4375 {
4376 temp = force_operand (temp, target);
4377 if (temp != target)
4378 emit_move_insn (target, temp);
4379 }
bbf6f052 4380 }
709f5be1 4381
436d948e 4382 return NULL_RTX;
bbf6f052
RK
4383}
4384\f
6fa91b48
SB
4385/* Examine CTOR to discover:
4386 * how many scalar fields are set to nonzero values,
4387 and place it in *P_NZ_ELTS;
4388 * how many scalar fields are set to non-constant values,
4389 and place it in *P_NC_ELTS; and
4390 * how many scalar fields in total are in CTOR,
6f642f98
RH
4391 and place it in *P_ELT_COUNT.
4392 * if a type is a union, and the initializer from the constructor
4393 is not the largest element in the union, then set *p_must_clear. */
9de08200 4394
6de9cd9a
DN
4395static void
4396categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4397 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4398 HOST_WIDE_INT *p_elt_count,
4399 bool *p_must_clear)
9de08200 4400{
6fa91b48 4401 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
6de9cd9a 4402 tree list;
9de08200 4403
6de9cd9a
DN
4404 nz_elts = 0;
4405 nc_elts = 0;
6fa91b48 4406 elt_count = 0;
caf93cb0 4407
6de9cd9a 4408 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
9de08200 4409 {
6de9cd9a
DN
4410 tree value = TREE_VALUE (list);
4411 tree purpose = TREE_PURPOSE (list);
4412 HOST_WIDE_INT mult;
9de08200 4413
6de9cd9a
DN
4414 mult = 1;
4415 if (TREE_CODE (purpose) == RANGE_EXPR)
4416 {
4417 tree lo_index = TREE_OPERAND (purpose, 0);
4418 tree hi_index = TREE_OPERAND (purpose, 1);
9de08200 4419
6de9cd9a
DN
4420 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4421 mult = (tree_low_cst (hi_index, 1)
4422 - tree_low_cst (lo_index, 1) + 1);
4423 }
9de08200 4424
6de9cd9a
DN
4425 switch (TREE_CODE (value))
4426 {
4427 case CONSTRUCTOR:
4428 {
6f642f98
RH
4429 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4430 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
6de9cd9a
DN
4431 nz_elts += mult * nz;
4432 nc_elts += mult * nc;
6f642f98 4433 elt_count += mult * ic;
6de9cd9a
DN
4434 }
4435 break;
9de08200 4436
6de9cd9a
DN
4437 case INTEGER_CST:
4438 case REAL_CST:
4439 if (!initializer_zerop (value))
4440 nz_elts += mult;
6fa91b48 4441 elt_count += mult;
6de9cd9a 4442 break;
97f8d136
RK
4443
4444 case STRING_CST:
4445 nz_elts += mult * TREE_STRING_LENGTH (value);
6fa91b48 4446 elt_count += mult * TREE_STRING_LENGTH (value);
97f8d136
RK
4447 break;
4448
6de9cd9a
DN
4449 case COMPLEX_CST:
4450 if (!initializer_zerop (TREE_REALPART (value)))
4451 nz_elts += mult;
4452 if (!initializer_zerop (TREE_IMAGPART (value)))
4453 nz_elts += mult;
6fa91b48 4454 elt_count += mult;
6de9cd9a 4455 break;
97f8d136 4456
6de9cd9a
DN
4457 case VECTOR_CST:
4458 {
4459 tree v;
4460 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
6fa91b48
SB
4461 {
4462 if (!initializer_zerop (TREE_VALUE (v)))
4463 nz_elts += mult;
4464 elt_count += mult;
4465 }
6de9cd9a
DN
4466 }
4467 break;
69ef87e2 4468
6de9cd9a
DN
4469 default:
4470 nz_elts += mult;
6fa91b48 4471 elt_count += mult;
6de9cd9a
DN
4472 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4473 nc_elts += mult;
4474 break;
4475 }
4476 }
69ef87e2 4477
6f642f98
RH
4478 if (!*p_must_clear
4479 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4480 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4481 {
4482 tree init_sub_type;
486e4326 4483 bool clear_this = true;
6f642f98 4484
6f642f98 4485 list = CONSTRUCTOR_ELTS (ctor);
486e4326 4486 if (list)
6f642f98 4487 {
486e4326
RH
4488 /* We don't expect more than one element of the union to be
4489 initialized. Not sure what we should do otherwise... */
4490 gcc_assert (TREE_CHAIN (list) == NULL);
4491
4492 init_sub_type = TREE_TYPE (TREE_VALUE (list));
4493
4494 /* ??? We could look at each element of the union, and find the
4495 largest element. Which would avoid comparing the size of the
4496 initialized element against any tail padding in the union.
4497 Doesn't seem worth the effort... */
4498 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4499 TYPE_SIZE (init_sub_type)) == 1)
4500 {
4501 /* And now we have to find out if the element itself is fully
4502 constructed. E.g. for union { struct { int a, b; } s; } u
4503 = { .s = { .a = 1 } }. */
4504 if (elt_count == count_type_elements (init_sub_type))
4505 clear_this = false;
4506 }
6f642f98 4507 }
486e4326
RH
4508
4509 *p_must_clear = clear_this;
6f642f98
RH
4510 }
4511
6de9cd9a
DN
4512 *p_nz_elts += nz_elts;
4513 *p_nc_elts += nc_elts;
6fa91b48 4514 *p_elt_count += elt_count;
6de9cd9a
DN
4515}
4516
4517void
4518categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
6fa91b48 4519 HOST_WIDE_INT *p_nc_elts,
6f642f98
RH
4520 HOST_WIDE_INT *p_elt_count,
4521 bool *p_must_clear)
6de9cd9a
DN
4522{
4523 *p_nz_elts = 0;
4524 *p_nc_elts = 0;
6fa91b48 4525 *p_elt_count = 0;
6f642f98
RH
4526 *p_must_clear = false;
4527 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4528 p_must_clear);
6de9cd9a
DN
4529}
4530
4531/* Count the number of scalars in TYPE. Return -1 on overflow or
4532 variable-sized. */
4533
4534HOST_WIDE_INT
4535count_type_elements (tree type)
4536{
4537 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4538 switch (TREE_CODE (type))
4539 {
4540 case ARRAY_TYPE:
4541 {
4542 tree telts = array_type_nelts (type);
4543 if (telts && host_integerp (telts, 1))
4544 {
5377d5ba 4545 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
6de9cd9a
DN
4546 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4547 if (n == 0)
4548 return 0;
5377d5ba 4549 else if (max / n > m)
6de9cd9a
DN
4550 return n * m;
4551 }
4552 return -1;
4553 }
4554
4555 case RECORD_TYPE:
4556 {
4557 HOST_WIDE_INT n = 0, t;
4558 tree f;
4559
4560 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4561 if (TREE_CODE (f) == FIELD_DECL)
4562 {
4563 t = count_type_elements (TREE_TYPE (f));
4564 if (t < 0)
4565 return -1;
4566 n += t;
4567 }
4568
4569 return n;
4570 }
9de08200 4571
6de9cd9a
DN
4572 case UNION_TYPE:
4573 case QUAL_UNION_TYPE:
4574 {
4575 /* Ho hum. How in the world do we guess here? Clearly it isn't
4576 right to count the fields. Guess based on the number of words. */
4577 HOST_WIDE_INT n = int_size_in_bytes (type);
4578 if (n < 0)
4579 return -1;
4580 return n / UNITS_PER_WORD;
4581 }
4582
4583 case COMPLEX_TYPE:
4584 return 2;
4585
4586 case VECTOR_TYPE:
3a021db2 4587 return TYPE_VECTOR_SUBPARTS (type);
6de9cd9a
DN
4588
4589 case INTEGER_TYPE:
4590 case REAL_TYPE:
4591 case ENUMERAL_TYPE:
4592 case BOOLEAN_TYPE:
4593 case CHAR_TYPE:
4594 case POINTER_TYPE:
4595 case OFFSET_TYPE:
4596 case REFERENCE_TYPE:
9de08200 4597 return 1;
3a94c984 4598
6de9cd9a
DN
4599 case VOID_TYPE:
4600 case METHOD_TYPE:
6de9cd9a
DN
4601 case FUNCTION_TYPE:
4602 case LANG_TYPE:
e9a25f70 4603 default:
5b0264cb 4604 gcc_unreachable ();
9de08200 4605 }
9de08200
RK
4606}
4607
4608/* Return 1 if EXP contains mostly (3/4) zeros. */
4609
e0ce7708 4610static int
502b8322 4611mostly_zeros_p (tree exp)
9de08200 4612{
9de08200 4613 if (TREE_CODE (exp) == CONSTRUCTOR)
caf93cb0 4614
9de08200 4615 {
6fa91b48 4616 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
6f642f98
RH
4617 bool must_clear;
4618
4619 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4620 if (must_clear)
4621 return 1;
6de9cd9a 4622
6de9cd9a 4623 elts = count_type_elements (TREE_TYPE (exp));
9de08200 4624
6de9cd9a 4625 return nz_elts < elts / 4;
9de08200
RK
4626 }
4627
6de9cd9a 4628 return initializer_zerop (exp);
9de08200
RK
4629}
4630\f
e1a43f73
PB
4631/* Helper function for store_constructor.
4632 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4633 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4634 CLEARED is as for store_constructor.
23cb1766 4635 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4636
4637 This provides a recursive shortcut back to store_constructor when it isn't
4638 necessary to go through store_field. This is so that we can pass through
4639 the cleared field to let store_constructor know that we may not have to
4640 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4641
4642static void
502b8322
AJ
4643store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4644 HOST_WIDE_INT bitpos, enum machine_mode mode,
4645 tree exp, tree type, int cleared, int alias_set)
e1a43f73
PB
4646{
4647 if (TREE_CODE (exp) == CONSTRUCTOR
6c89c39a
RK
4648 /* We can only call store_constructor recursively if the size and
4649 bit position are on a byte boundary. */
23ccec44 4650 && bitpos % BITS_PER_UNIT == 0
6c89c39a 4651 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
cc2902df 4652 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4653 let store_field do the bitfield handling. This is unlikely to
4654 generate unnecessary clear instructions anyways. */
3c0cb5de 4655 && (bitpos == 0 || MEM_P (target)))
e1a43f73 4656 {
3c0cb5de 4657 if (MEM_P (target))
61cb205c
RK
4658 target
4659 = adjust_address (target,
4660 GET_MODE (target) == BLKmode
4661 || 0 != (bitpos
4662 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4663 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4664
e0339ef7 4665
04050c69 4666 /* Update the alias set, if required. */
3c0cb5de 4667 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
10b76d73 4668 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4669 {
4670 target = copy_rtx (target);
4671 set_mem_alias_set (target, alias_set);
4672 }
e0339ef7 4673
dbb5c281 4674 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4675 }
4676 else
f45bdcd0 4677 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
e1a43f73
PB
4678}
4679
bbf6f052 4680/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4681 TARGET is either a REG or a MEM; we know it cannot conflict, since
4682 safe_from_p has been called.
dbb5c281
RK
4683 CLEARED is true if TARGET is known to have been zero'd.
4684 SIZE is the number of bytes of TARGET we are allowed to modify: this
b7010412
RK
4685 may not be the same as the size of EXP if we are assigning to a field
4686 which has been packed to exclude padding bits. */
bbf6f052
RK
4687
4688static void
502b8322 4689store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
bbf6f052 4690{
4af3895e 4691 tree type = TREE_TYPE (exp);
a5efcd63 4692#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4693 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4694#endif
4af3895e 4695
5b0264cb 4696 switch (TREE_CODE (type))
bbf6f052 4697 {
5b0264cb
NS
4698 case RECORD_TYPE:
4699 case UNION_TYPE:
4700 case QUAL_UNION_TYPE:
4701 {
4702 tree elt;
9de08200 4703
5b0264cb
NS
4704 /* If size is zero or the target is already cleared, do nothing. */
4705 if (size == 0 || cleared)
9de08200 4706 cleared = 1;
5b0264cb
NS
4707 /* We either clear the aggregate or indicate the value is dead. */
4708 else if ((TREE_CODE (type) == UNION_TYPE
4709 || TREE_CODE (type) == QUAL_UNION_TYPE)
4710 && ! CONSTRUCTOR_ELTS (exp))
4711 /* If the constructor is empty, clear the union. */
4712 {
8148fe65 4713 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5b0264cb
NS
4714 cleared = 1;
4715 }
bbf6f052 4716
5b0264cb
NS
4717 /* If we are building a static constructor into a register,
4718 set the initial value as zero so we can fold the value into
4719 a constant. But if more than one register is involved,
4720 this probably loses. */
4721 else if (REG_P (target) && TREE_STATIC (exp)
4722 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4723 {
4724 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4725 cleared = 1;
4726 }
3a94c984 4727
5b0264cb
NS
4728 /* If the constructor has fewer fields than the structure or
4729 if we are initializing the structure to mostly zeros, clear
4730 the whole structure first. Don't do this if TARGET is a
4731 register whose mode size isn't equal to SIZE since
4732 clear_storage can't handle this case. */
4733 else if (size > 0
4734 && ((list_length (CONSTRUCTOR_ELTS (exp))
4735 != fields_length (type))
4736 || mostly_zeros_p (exp))
4737 && (!REG_P (target)
4738 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4739 == size)))
4740 {
8148fe65 4741 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
4742 cleared = 1;
4743 }
b50d17a1 4744
5b0264cb
NS
4745 if (! cleared)
4746 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052 4747
5b0264cb
NS
4748 /* Store each element of the constructor into the
4749 corresponding field of TARGET. */
b50d17a1 4750
5b0264cb
NS
4751 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4752 {
4753 tree field = TREE_PURPOSE (elt);
4754 tree value = TREE_VALUE (elt);
4755 enum machine_mode mode;
4756 HOST_WIDE_INT bitsize;
4757 HOST_WIDE_INT bitpos = 0;
4758 tree offset;
4759 rtx to_rtx = target;
4760
4761 /* Just ignore missing fields. We cleared the whole
4762 structure, above, if any fields are missing. */
4763 if (field == 0)
4764 continue;
4765
4766 if (cleared && initializer_zerop (value))
4767 continue;
4768
4769 if (host_integerp (DECL_SIZE (field), 1))
4770 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4771 else
4772 bitsize = -1;
4773
4774 mode = DECL_MODE (field);
4775 if (DECL_BIT_FIELD (field))
4776 mode = VOIDmode;
4777
4778 offset = DECL_FIELD_OFFSET (field);
4779 if (host_integerp (offset, 0)
4780 && host_integerp (bit_position (field), 0))
4781 {
4782 bitpos = int_bit_position (field);
4783 offset = 0;
4784 }
4785 else
4786 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4787
4788 if (offset)
4789 {
4790 rtx offset_rtx;
4791
4792 offset
4793 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4794 make_tree (TREE_TYPE (exp),
4795 target));
4796
4797 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4798 gcc_assert (MEM_P (to_rtx));
4799
bd070e1a 4800#ifdef POINTERS_EXTEND_UNSIGNED
5b0264cb
NS
4801 if (GET_MODE (offset_rtx) != Pmode)
4802 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c 4803#else
5b0264cb
NS
4804 if (GET_MODE (offset_rtx) != ptr_mode)
4805 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4806#endif
bd070e1a 4807
5b0264cb
NS
4808 to_rtx = offset_address (to_rtx, offset_rtx,
4809 highest_pow2_factor (offset));
4810 }
c5c76735 4811
34c73909 4812#ifdef WORD_REGISTER_OPERATIONS
5b0264cb
NS
4813 /* If this initializes a field that is smaller than a
4814 word, at the start of a word, try to widen it to a full
4815 word. This special case allows us to output C++ member
4816 function initializations in a form that the optimizers
4817 can understand. */
4818 if (REG_P (target)
4819 && bitsize < BITS_PER_WORD
4820 && bitpos % BITS_PER_WORD == 0
4821 && GET_MODE_CLASS (mode) == MODE_INT
4822 && TREE_CODE (value) == INTEGER_CST
4823 && exp_size >= 0
4824 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4825 {
4826 tree type = TREE_TYPE (value);
4827
4828 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4829 {
4830 type = lang_hooks.types.type_for_size
4831 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4832 value = convert (type, value);
4833 }
4834
4835 if (BYTES_BIG_ENDIAN)
4836 value
4845b383
KH
4837 = fold_build2 (LSHIFT_EXPR, type, value,
4838 build_int_cst (NULL_TREE,
4839 BITS_PER_WORD - bitsize));
5b0264cb
NS
4840 bitsize = BITS_PER_WORD;
4841 mode = word_mode;
4842 }
34c73909 4843#endif
10b76d73 4844
5b0264cb
NS
4845 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4846 && DECL_NONADDRESSABLE_P (field))
4847 {
4848 to_rtx = copy_rtx (to_rtx);
4849 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4850 }
4851
4852 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4853 value, type, cleared,
4854 get_alias_set (TREE_TYPE (field)));
4855 }
4856 break;
4857 }
4858 case ARRAY_TYPE:
4859 {
4860 tree elt;
4861 int i;
4862 int need_to_clear;
4863 tree domain;
4864 tree elttype = TREE_TYPE (type);
4865 int const_bounds_p;
4866 HOST_WIDE_INT minelt = 0;
4867 HOST_WIDE_INT maxelt = 0;
4868
4869 domain = TYPE_DOMAIN (type);
4870 const_bounds_p = (TYPE_MIN_VALUE (domain)
4871 && TYPE_MAX_VALUE (domain)
4872 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4873 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4874
4875 /* If we have constant bounds for the range of the type, get them. */
4876 if (const_bounds_p)
4877 {
4878 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4879 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4880 }
3a021db2 4881
5b0264cb
NS
4882 /* If the constructor has fewer elements than the array, clear
4883 the whole array first. Similarly if this is static
4884 constructor of a non-BLKmode object. */
4885 if (cleared)
4886 need_to_clear = 0;
4887 else if (REG_P (target) && TREE_STATIC (exp))
4888 need_to_clear = 1;
4889 else
4890 {
4891 HOST_WIDE_INT count = 0, zero_count = 0;
4892 need_to_clear = ! const_bounds_p;
4893
4894 /* This loop is a more accurate version of the loop in
4895 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4896 is also needed to check for missing elements. */
4897 for (elt = CONSTRUCTOR_ELTS (exp);
4898 elt != NULL_TREE && ! need_to_clear;
4899 elt = TREE_CHAIN (elt))
4900 {
4901 tree index = TREE_PURPOSE (elt);
4902 HOST_WIDE_INT this_node_count;
4903
4904 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4905 {
4906 tree lo_index = TREE_OPERAND (index, 0);
4907 tree hi_index = TREE_OPERAND (index, 1);
4908
4909 if (! host_integerp (lo_index, 1)
4910 || ! host_integerp (hi_index, 1))
4911 {
4912 need_to_clear = 1;
4913 break;
4914 }
4915
4916 this_node_count = (tree_low_cst (hi_index, 1)
4917 - tree_low_cst (lo_index, 1) + 1);
4918 }
4919 else
4920 this_node_count = 1;
4921
4922 count += this_node_count;
4923 if (mostly_zeros_p (TREE_VALUE (elt)))
4924 zero_count += this_node_count;
4925 }
4926
4927 /* Clear the entire array first if there are any missing
4928 elements, or if the incidence of zero elements is >=
4929 75%. */
4930 if (! need_to_clear
4931 && (count < maxelt - minelt + 1
4932 || 4 * zero_count >= 3 * count))
4933 need_to_clear = 1;
4934 }
4935
4936 if (need_to_clear && size > 0)
4937 {
4938 if (REG_P (target))
4939 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4940 else
8148fe65 4941 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
4942 cleared = 1;
4943 }
3a021db2 4944
5b0264cb
NS
4945 if (!cleared && REG_P (target))
4946 /* Inform later passes that the old value is dead. */
4947 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3a021db2 4948
5b0264cb
NS
4949 /* Store each element of the constructor into the
4950 corresponding element of TARGET, determined by counting the
4951 elements. */
4952 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4953 elt;
4954 elt = TREE_CHAIN (elt), i++)
4955 {
4956 enum machine_mode mode;
4957 HOST_WIDE_INT bitsize;
4958 HOST_WIDE_INT bitpos;
4959 int unsignedp;
4960 tree value = TREE_VALUE (elt);
4961 tree index = TREE_PURPOSE (elt);
4962 rtx xtarget = target;
4963
4964 if (cleared && initializer_zerop (value))
4965 continue;
4966
4967 unsignedp = TYPE_UNSIGNED (elttype);
4968 mode = TYPE_MODE (elttype);
4969 if (mode == BLKmode)
4970 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4971 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4972 : -1);
4973 else
4974 bitsize = GET_MODE_BITSIZE (mode);
4975
4976 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4977 {
4978 tree lo_index = TREE_OPERAND (index, 0);
4979 tree hi_index = TREE_OPERAND (index, 1);
4980 rtx index_r, pos_rtx;
4981 HOST_WIDE_INT lo, hi, count;
4982 tree position;
4983
4984 /* If the range is constant and "small", unroll the loop. */
4985 if (const_bounds_p
4986 && host_integerp (lo_index, 0)
4987 && host_integerp (hi_index, 0)
4988 && (lo = tree_low_cst (lo_index, 0),
4989 hi = tree_low_cst (hi_index, 0),
4990 count = hi - lo + 1,
4991 (!MEM_P (target)
4992 || count <= 2
4993 || (host_integerp (TYPE_SIZE (elttype), 1)
4994 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4995 <= 40 * 8)))))
4996 {
4997 lo -= minelt; hi -= minelt;
4998 for (; lo <= hi; lo++)
4999 {
5000 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5001
5002 if (MEM_P (target)
5003 && !MEM_KEEP_ALIAS_SET_P (target)
5004 && TREE_CODE (type) == ARRAY_TYPE
5005 && TYPE_NONALIASED_COMPONENT (type))
5006 {
5007 target = copy_rtx (target);
5008 MEM_KEEP_ALIAS_SET_P (target) = 1;
5009 }
5010
5011 store_constructor_field
5012 (target, bitsize, bitpos, mode, value, type, cleared,
5013 get_alias_set (elttype));
5014 }
5015 }
5016 else
5017 {
5018 rtx loop_start = gen_label_rtx ();
5019 rtx loop_end = gen_label_rtx ();
5020 tree exit_cond;
5021
5022 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5023 unsignedp = TYPE_UNSIGNED (domain);
5024
5025 index = build_decl (VAR_DECL, NULL_TREE, domain);
5026
5027 index_r
5028 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5029 &unsignedp, 0));
5030 SET_DECL_RTL (index, index_r);
5031 store_expr (lo_index, index_r, 0);
5032
5033 /* Build the head of the loop. */
5034 do_pending_stack_adjust ();
5035 emit_label (loop_start);
5036
5037 /* Assign value to element index. */
5038 position
5039 = convert (ssizetype,
4845b383
KH
5040 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5041 index, TYPE_MIN_VALUE (domain)));
5b0264cb
NS
5042 position = size_binop (MULT_EXPR, position,
5043 convert (ssizetype,
5044 TYPE_SIZE_UNIT (elttype)));
5045
5046 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5047 xtarget = offset_address (target, pos_rtx,
5048 highest_pow2_factor (position));
5049 xtarget = adjust_address (xtarget, mode, 0);
5050 if (TREE_CODE (value) == CONSTRUCTOR)
5051 store_constructor (value, xtarget, cleared,
5052 bitsize / BITS_PER_UNIT);
5053 else
5054 store_expr (value, xtarget, 0);
5055
5056 /* Generate a conditional jump to exit the loop. */
5057 exit_cond = build2 (LT_EXPR, integer_type_node,
5058 index, hi_index);
5059 jumpif (exit_cond, loop_end);
5060
5061 /* Update the loop counter, and jump to the head of
5062 the loop. */
5063 expand_assignment (index,
5064 build2 (PLUS_EXPR, TREE_TYPE (index),
e836a5a2 5065 index, integer_one_node));
5b0264cb
NS
5066
5067 emit_jump (loop_start);
5068
5069 /* Build the end of the loop. */
5070 emit_label (loop_end);
5071 }
5072 }
5073 else if ((index != 0 && ! host_integerp (index, 0))
5074 || ! host_integerp (TYPE_SIZE (elttype), 1))
5075 {
5076 tree position;
5077
5078 if (index == 0)
5079 index = ssize_int (1);
5080
5081 if (minelt)
5082 index = fold_convert (ssizetype,
4845b383
KH
5083 fold_build2 (MINUS_EXPR,
5084 TREE_TYPE (index),
5085 index,
5086 TYPE_MIN_VALUE (domain)));
5b0264cb
NS
5087
5088 position = size_binop (MULT_EXPR, index,
5089 convert (ssizetype,
5090 TYPE_SIZE_UNIT (elttype)));
5091 xtarget = offset_address (target,
5092 expand_expr (position, 0, VOIDmode, 0),
5093 highest_pow2_factor (position));
5094 xtarget = adjust_address (xtarget, mode, 0);
5095 store_expr (value, xtarget, 0);
5096 }
5097 else
5098 {
5099 if (index != 0)
5100 bitpos = ((tree_low_cst (index, 0) - minelt)
5101 * tree_low_cst (TYPE_SIZE (elttype), 1));
5102 else
5103 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5104
5105 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5106 && TREE_CODE (type) == ARRAY_TYPE
5107 && TYPE_NONALIASED_COMPONENT (type))
5108 {
5109 target = copy_rtx (target);
5110 MEM_KEEP_ALIAS_SET_P (target) = 1;
5111 }
5112 store_constructor_field (target, bitsize, bitpos, mode, value,
5113 type, cleared, get_alias_set (elttype));
5114 }
5115 }
5116 break;
5117 }
3a021db2 5118
5b0264cb
NS
5119 case VECTOR_TYPE:
5120 {
5121 tree elt;
5122 int i;
5123 int need_to_clear;
5124 int icode = 0;
5125 tree elttype = TREE_TYPE (type);
5126 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5127 enum machine_mode eltmode = TYPE_MODE (elttype);
5128 HOST_WIDE_INT bitsize;
5129 HOST_WIDE_INT bitpos;
201dd46b 5130 rtvec vector = NULL;
5b0264cb
NS
5131 unsigned n_elts;
5132
5133 gcc_assert (eltmode != BLKmode);
5134
5135 n_elts = TYPE_VECTOR_SUBPARTS (type);
5136 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5137 {
5138 enum machine_mode mode = GET_MODE (target);
5139
5140 icode = (int) vec_init_optab->handlers[mode].insn_code;
5141 if (icode != CODE_FOR_nothing)
5142 {
5143 unsigned int i;
5144
201dd46b 5145 vector = rtvec_alloc (n_elts);
5b0264cb 5146 for (i = 0; i < n_elts; i++)
201dd46b 5147 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5b0264cb
NS
5148 }
5149 }
5150
5151 /* If the constructor has fewer elements than the vector,
5152 clear the whole array first. Similarly if this is static
5153 constructor of a non-BLKmode object. */
5154 if (cleared)
5155 need_to_clear = 0;
5156 else if (REG_P (target) && TREE_STATIC (exp))
5157 need_to_clear = 1;
5158 else
5159 {
5160 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5161
5162 for (elt = CONSTRUCTOR_ELTS (exp);
5163 elt != NULL_TREE;
5164 elt = TREE_CHAIN (elt))
5165 {
5166 int n_elts_here = tree_low_cst
5167 (int_const_binop (TRUNC_DIV_EXPR,
5168 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
5169 TYPE_SIZE (elttype), 0), 1);
5170
5171 count += n_elts_here;
5172 if (mostly_zeros_p (TREE_VALUE (elt)))
5173 zero_count += n_elts_here;
5174 }
3a021db2 5175
5b0264cb
NS
5176 /* Clear the entire vector first if there are any missing elements,
5177 or if the incidence of zero elements is >= 75%. */
5178 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5179 }
5180
5181 if (need_to_clear && size > 0 && !vector)
5182 {
5183 if (REG_P (target))
5184 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5185 else
8148fe65 5186 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5b0264cb
NS
5187 cleared = 1;
5188 }
5189
2ab1754e 5190 /* Inform later passes that the old value is dead. */
5b0264cb 5191 if (!cleared && REG_P (target))
2ab1754e 5192 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5b0264cb
NS
5193
5194 /* Store each element of the constructor into the corresponding
5195 element of TARGET, determined by counting the elements. */
5196 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5197 elt;
5198 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5199 {
5200 tree value = TREE_VALUE (elt);
5201 tree index = TREE_PURPOSE (elt);
5202 HOST_WIDE_INT eltpos;
5203
5204 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5205 if (cleared && initializer_zerop (value))
5206 continue;
5207
5208 if (index != 0)
5209 eltpos = tree_low_cst (index, 1);
5210 else
5211 eltpos = i;
5212
5213 if (vector)
5214 {
5215 /* Vector CONSTRUCTORs should only be built from smaller
5216 vectors in the case of BLKmode vectors. */
5217 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
201dd46b
RH
5218 RTVEC_ELT (vector, eltpos)
5219 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5b0264cb
NS
5220 }
5221 else
5222 {
5223 enum machine_mode value_mode =
5224 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
3a021db2
PB
5225 ? TYPE_MODE (TREE_TYPE (value))
5226 : eltmode;
5b0264cb
NS
5227 bitpos = eltpos * elt_size;
5228 store_constructor_field (target, bitsize, bitpos,
5229 value_mode, value, type,
5230 cleared, get_alias_set (elttype));
5231 }
5232 }
5233
5234 if (vector)
5235 emit_insn (GEN_FCN (icode)
5236 (target,
201dd46b 5237 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5b0264cb
NS
5238 break;
5239 }
08f2586c 5240
5b0264cb
NS
5241 default:
5242 gcc_unreachable ();
071a6595 5243 }
bbf6f052
RK
5244}
5245
5246/* Store the value of EXP (an expression tree)
5247 into a subfield of TARGET which has mode MODE and occupies
5248 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5249 If MODE is VOIDmode, it means that we are storing into a bit-field.
5250
f45bdcd0
KH
5251 Always return const0_rtx unless we have something particular to
5252 return.
bbf6f052 5253
a06ef755 5254 TYPE is the type of the underlying object,
ece32014
MM
5255
5256 ALIAS_SET is the alias set for the destination. This value will
5257 (in general) be different from that for TARGET, since TARGET is a
5258 reference to the containing structure. */
bbf6f052
RK
5259
5260static rtx
502b8322 5261store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
f45bdcd0 5262 enum machine_mode mode, tree exp, tree type, int alias_set)
bbf6f052 5263{
906c4e36 5264 HOST_WIDE_INT width_mask = 0;
bbf6f052 5265
e9a25f70
JL
5266 if (TREE_CODE (exp) == ERROR_MARK)
5267 return const0_rtx;
5268
2be6a7e9
RK
5269 /* If we have nothing to store, do nothing unless the expression has
5270 side-effects. */
5271 if (bitsize == 0)
5272 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5273 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5274 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5275
5276 /* If we are storing into an unaligned field of an aligned union that is
5277 in a register, we may have the mode of TARGET being an integer mode but
5278 MODE == BLKmode. In that case, get an aligned object whose size and
5279 alignment are the same as TARGET and store TARGET into it (we can avoid
5280 the store if the field being stored is the entire width of TARGET). Then
5281 call ourselves recursively to store the field into a BLKmode version of
5282 that object. Finally, load from the object into TARGET. This is not
5283 very efficient in general, but should only be slightly more expensive
5284 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5285 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5286 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5287
5288 if (mode == BLKmode
f8cfc6aa 5289 && (REG_P (target) || GET_CODE (target) == SUBREG))
bbf6f052 5290 {
85a43a2f 5291 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5292 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5293
8752c357 5294 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5295 emit_move_insn (object, target);
5296
f45bdcd0 5297 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
bbf6f052
RK
5298
5299 emit_move_insn (target, object);
5300
a06ef755 5301 /* We want to return the BLKmode version of the data. */
46093b97 5302 return blk_object;
bbf6f052 5303 }
c3b247b4
JM
5304
5305 if (GET_CODE (target) == CONCAT)
5306 {
5307 /* We're storing into a struct containing a single __complex. */
5308
5b0264cb 5309 gcc_assert (!bitpos);
f45bdcd0 5310 return store_expr (exp, target, 0);
c3b247b4 5311 }
bbf6f052
RK
5312
5313 /* If the structure is in a register or if the component
5314 is a bit field, we cannot use addressing to access it.
5315 Use bit-field techniques or SUBREG to store in it. */
5316
4fa52007 5317 if (mode == VOIDmode
6ab06cbb
JW
5318 || (mode != BLKmode && ! direct_store[(int) mode]
5319 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5320 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f8cfc6aa 5321 || REG_P (target)
c980ac49 5322 || GET_CODE (target) == SUBREG
ccc98036
RS
5323 /* If the field isn't aligned enough to store as an ordinary memref,
5324 store it as a bit field. */
15b19a7d 5325 || (mode != BLKmode
9e5f281f
OH
5326 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5327 || bitpos % GET_MODE_ALIGNMENT (mode))
5328 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
502b8322 5329 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5330 /* If the RHS and field are a constant size and the size of the
5331 RHS isn't the same size as the bitfield, we must use bitfield
5332 operations. */
05bccae2
RK
5333 || (bitsize >= 0
5334 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5335 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5336 {
48cc8d3b
RH
5337 rtx temp;
5338
5339 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5340 implies a mask operation. If the precision is the same size as
5341 the field we're storing into, that mask is redundant. This is
5342 particularly common with bit field assignments generated by the
5343 C front end. */
8d740330
RH
5344 if (TREE_CODE (exp) == NOP_EXPR)
5345 {
5346 tree type = TREE_TYPE (exp);
5347 if (INTEGRAL_TYPE_P (type)
5348 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5349 && bitsize == TYPE_PRECISION (type))
5350 {
5351 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5352 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5353 exp = TREE_OPERAND (exp, 0);
5354 }
5355 }
48cc8d3b
RH
5356
5357 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5358
ef19912d
RK
5359 /* If BITSIZE is narrower than the size of the type of EXP
5360 we will be narrowing TEMP. Normally, what's wanted are the
5361 low-order bits. However, if EXP's type is a record and this is
5362 big-endian machine, we want the upper BITSIZE bits. */
5363 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5364 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5365 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5366 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5367 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5368 - bitsize),
c1853da7 5369 NULL_RTX, 1);
ef19912d 5370
bbd6cf73
RK
5371 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5372 MODE. */
5373 if (mode != VOIDmode && mode != BLKmode
5374 && mode != TYPE_MODE (TREE_TYPE (exp)))
5375 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5376
a281e72d
RK
5377 /* If the modes of TARGET and TEMP are both BLKmode, both
5378 must be in memory and BITPOS must be aligned on a byte
5379 boundary. If so, we simply do a block copy. */
5380 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5381 {
5b0264cb
NS
5382 gcc_assert (MEM_P (target) && MEM_P (temp)
5383 && !(bitpos % BITS_PER_UNIT));
a281e72d 5384
f4ef873c 5385 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5386 emit_block_move (target, temp,
a06ef755 5387 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5388 / BITS_PER_UNIT),
5389 BLOCK_OP_NORMAL);
a281e72d 5390
f45bdcd0 5391 return const0_rtx;
a281e72d
RK
5392 }
5393
bbf6f052 5394 /* Store the value in the bitfield. */
b3520980 5395 store_bit_field (target, bitsize, bitpos, mode, temp);
a06ef755 5396
bbf6f052
RK
5397 return const0_rtx;
5398 }
5399 else
5400 {
bbf6f052 5401 /* Now build a reference to just the desired component. */
f45bdcd0 5402 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
a06ef755
RK
5403
5404 if (to_rtx == target)
5405 to_rtx = copy_rtx (to_rtx);
792760b9 5406
c6df88cb 5407 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5408 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5409 set_mem_alias_set (to_rtx, alias_set);
bbf6f052 5410
f45bdcd0 5411 return store_expr (exp, to_rtx, 0);
bbf6f052
RK
5412 }
5413}
5414\f
5415/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5416 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5417 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5418
5419 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5420 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5421 If the position of the field is variable, we store a tree
5422 giving the variable offset (in units) in *POFFSET.
5423 This offset is in addition to the bit position.
5424 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5425
5426 If any of the extraction expressions is volatile,
5427 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5428
5429 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5430 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5431 is redundant.
5432
5433 If the field describes a variable-sized object, *PMODE is set to
5434 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2614034e
EB
5435 this case, but the address of the object can be found.
5436
5437 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5438 look through nodes that serve as markers of a greater alignment than
5439 the one that can be deduced from the expression. These nodes make it
5440 possible for front-ends to prevent temporaries from being created by
5441 the middle-end on alignment considerations. For that purpose, the
5442 normal operating mode at high-level is to always pass FALSE so that
5443 the ultimate containing object is really returned; moreover, the
5444 associated predicate handled_component_p will always return TRUE
5445 on these nodes, thus indicating that they are essentially handled
5446 by get_inner_reference. TRUE should only be passed when the caller
5447 is scanning the expression in order to build another representation
5448 and specifically knows how to handle these nodes; as such, this is
5449 the normal operating mode in the RTL expanders. */
bbf6f052
RK
5450
5451tree
502b8322
AJ
5452get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5453 HOST_WIDE_INT *pbitpos, tree *poffset,
5454 enum machine_mode *pmode, int *punsignedp,
2614034e 5455 int *pvolatilep, bool keep_aligning)
bbf6f052
RK
5456{
5457 tree size_tree = 0;
5458 enum machine_mode mode = VOIDmode;
fed3cef0 5459 tree offset = size_zero_node;
770ae6cc 5460 tree bit_offset = bitsize_zero_node;
770ae6cc 5461 tree tem;
bbf6f052 5462
770ae6cc
RK
5463 /* First get the mode, signedness, and size. We do this from just the
5464 outermost expression. */
bbf6f052
RK
5465 if (TREE_CODE (exp) == COMPONENT_REF)
5466 {
5467 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5468 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5469 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5470
a150de29 5471 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
bbf6f052
RK
5472 }
5473 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5474 {
5475 size_tree = TREE_OPERAND (exp, 1);
a150de29 5476 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
bbf6f052
RK
5477 }
5478 else
5479 {
5480 mode = TYPE_MODE (TREE_TYPE (exp));
8df83eae 5481 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
770ae6cc 5482
ab87f8c8
JL
5483 if (mode == BLKmode)
5484 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5485 else
5486 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5487 }
3a94c984 5488
770ae6cc 5489 if (size_tree != 0)
bbf6f052 5490 {
770ae6cc 5491 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5492 mode = BLKmode, *pbitsize = -1;
5493 else
770ae6cc 5494 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5495 }
5496
5497 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5498 and find the ultimate containing object. */
bbf6f052
RK
5499 while (1)
5500 {
afe84921 5501 switch (TREE_CODE (exp))
bbf6f052 5502 {
afe84921
RH
5503 case BIT_FIELD_REF:
5504 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5505 TREE_OPERAND (exp, 2));
5506 break;
bbf6f052 5507
afe84921
RH
5508 case COMPONENT_REF:
5509 {
5510 tree field = TREE_OPERAND (exp, 1);
5511 tree this_offset = component_ref_field_offset (exp);
e7f3c83f 5512
afe84921
RH
5513 /* If this field hasn't been filled in yet, don't go past it.
5514 This should only happen when folding expressions made during
5515 type construction. */
5516 if (this_offset == 0)
5517 break;
e6d8c385 5518
afe84921
RH
5519 offset = size_binop (PLUS_EXPR, offset, this_offset);
5520 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5521 DECL_FIELD_BIT_OFFSET (field));
7156dead 5522
afe84921
RH
5523 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5524 }
5525 break;
7156dead 5526
afe84921
RH
5527 case ARRAY_REF:
5528 case ARRAY_RANGE_REF:
5529 {
5530 tree index = TREE_OPERAND (exp, 1);
5531 tree low_bound = array_ref_low_bound (exp);
5532 tree unit_size = array_ref_element_size (exp);
5533
5534 /* We assume all arrays have sizes that are a multiple of a byte.
5535 First subtract the lower bound, if any, in the type of the
5536 index, then convert to sizetype and multiply by the size of
5537 the array element. */
5538 if (! integer_zerop (low_bound))
4845b383
KH
5539 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5540 index, low_bound);
afe84921
RH
5541
5542 offset = size_binop (PLUS_EXPR, offset,
5543 size_binop (MULT_EXPR,
5544 convert (sizetype, index),
5545 unit_size));
5546 }
5547 break;
5548
5549 case REALPART_EXPR:
afe84921
RH
5550 break;
5551
5552 case IMAGPART_EXPR:
9f25f0ad
RH
5553 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5554 bitsize_int (*pbitsize));
afe84921
RH
5555 break;
5556
afe84921 5557 case VIEW_CONVERT_EXPR:
2614034e
EB
5558 if (keep_aligning && STRICT_ALIGNMENT
5559 && (TYPE_ALIGN (TREE_TYPE (exp))
afe84921 5560 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
afe84921
RH
5561 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5562 < BIGGEST_ALIGNMENT)
5563 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5564 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5565 goto done;
5566 break;
5567
5568 default:
5569 goto done;
5570 }
7bb0943f
RS
5571
5572 /* If any reference in the chain is volatile, the effect is volatile. */
5573 if (TREE_THIS_VOLATILE (exp))
5574 *pvolatilep = 1;
839c4796 5575
bbf6f052
RK
5576 exp = TREE_OPERAND (exp, 0);
5577 }
afe84921 5578 done:
bbf6f052 5579
770ae6cc
RK
5580 /* If OFFSET is constant, see if we can return the whole thing as a
5581 constant bit position. Otherwise, split it up. */
5582 if (host_integerp (offset, 0)
5583 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5584 bitsize_unit_node))
5585 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5586 && host_integerp (tem, 0))
5587 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5588 else
5589 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5590
bbf6f052 5591 *pmode = mode;
bbf6f052
RK
5592 return exp;
5593}
921b3427 5594
44de5aeb
RK
5595/* Return a tree of sizetype representing the size, in bytes, of the element
5596 of EXP, an ARRAY_REF. */
5597
5598tree
5599array_ref_element_size (tree exp)
5600{
5601 tree aligned_size = TREE_OPERAND (exp, 3);
5602 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5603
5604 /* If a size was specified in the ARRAY_REF, it's the size measured
5605 in alignment units of the element type. So multiply by that value. */
5606 if (aligned_size)
bc482be4
RH
5607 {
5608 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5609 sizetype from another type of the same width and signedness. */
5610 if (TREE_TYPE (aligned_size) != sizetype)
5611 aligned_size = fold_convert (sizetype, aligned_size);
5612 return size_binop (MULT_EXPR, aligned_size,
a4e9ffe5 5613 size_int (TYPE_ALIGN_UNIT (elmt_type)));
bc482be4 5614 }
44de5aeb 5615
caf93cb0 5616 /* Otherwise, take the size from that of the element type. Substitute
44de5aeb
RK
5617 any PLACEHOLDER_EXPR that we have. */
5618 else
5619 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5620}
5621
5622/* Return a tree representing the lower bound of the array mentioned in
5623 EXP, an ARRAY_REF. */
5624
5625tree
5626array_ref_low_bound (tree exp)
5627{
5628 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5629
5630 /* If a lower bound is specified in EXP, use it. */
5631 if (TREE_OPERAND (exp, 2))
5632 return TREE_OPERAND (exp, 2);
5633
5634 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5635 substituting for a PLACEHOLDER_EXPR as needed. */
5636 if (domain_type && TYPE_MIN_VALUE (domain_type))
5637 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5638
5639 /* Otherwise, return a zero of the appropriate type. */
5212068f 5640 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
44de5aeb
RK
5641}
5642
a7e5372d
ZD
5643/* Return a tree representing the upper bound of the array mentioned in
5644 EXP, an ARRAY_REF. */
5645
5646tree
5647array_ref_up_bound (tree exp)
5648{
5649 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5650
5651 /* If there is a domain type and it has an upper bound, use it, substituting
5652 for a PLACEHOLDER_EXPR as needed. */
5653 if (domain_type && TYPE_MAX_VALUE (domain_type))
5654 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5655
5656 /* Otherwise fail. */
5657 return NULL_TREE;
5658}
5659
44de5aeb
RK
5660/* Return a tree representing the offset, in bytes, of the field referenced
5661 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5662
5663tree
5664component_ref_field_offset (tree exp)
5665{
5666 tree aligned_offset = TREE_OPERAND (exp, 2);
5667 tree field = TREE_OPERAND (exp, 1);
5668
5669 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5670 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5671 value. */
5672 if (aligned_offset)
bc482be4
RH
5673 {
5674 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5675 sizetype from another type of the same width and signedness. */
5676 if (TREE_TYPE (aligned_offset) != sizetype)
5677 aligned_offset = fold_convert (sizetype, aligned_offset);
5678 return size_binop (MULT_EXPR, aligned_offset,
5679 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5680 }
44de5aeb 5681
caf93cb0 5682 /* Otherwise, take the offset from that of the field. Substitute
44de5aeb
RK
5683 any PLACEHOLDER_EXPR that we have. */
5684 else
5685 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5686}
5687
ed239f5a
RK
5688/* Return 1 if T is an expression that get_inner_reference handles. */
5689
5690int
502b8322 5691handled_component_p (tree t)
ed239f5a
RK
5692{
5693 switch (TREE_CODE (t))
5694 {
5695 case BIT_FIELD_REF:
5696 case COMPONENT_REF:
5697 case ARRAY_REF:
5698 case ARRAY_RANGE_REF:
ed239f5a 5699 case VIEW_CONVERT_EXPR:
afe84921
RH
5700 case REALPART_EXPR:
5701 case IMAGPART_EXPR:
ed239f5a
RK
5702 return 1;
5703
ed239f5a
RK
5704 default:
5705 return 0;
5706 }
5707}
bbf6f052 5708\f
3fe44edd
RK
5709/* Given an rtx VALUE that may contain additions and multiplications, return
5710 an equivalent value that just refers to a register, memory, or constant.
5711 This is done by generating instructions to perform the arithmetic and
5712 returning a pseudo-register containing the value.
c45a13a6
RK
5713
5714 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5715
5716rtx
502b8322 5717force_operand (rtx value, rtx target)
bbf6f052 5718{
8a28dbcc 5719 rtx op1, op2;
bbf6f052 5720 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5721 rtx subtarget = get_subtarget (target);
8a28dbcc 5722 enum rtx_code code = GET_CODE (value);
bbf6f052 5723
50654f6c
ZD
5724 /* Check for subreg applied to an expression produced by loop optimizer. */
5725 if (code == SUBREG
f8cfc6aa 5726 && !REG_P (SUBREG_REG (value))
3c0cb5de 5727 && !MEM_P (SUBREG_REG (value)))
50654f6c
ZD
5728 {
5729 value = simplify_gen_subreg (GET_MODE (value),
5730 force_reg (GET_MODE (SUBREG_REG (value)),
5731 force_operand (SUBREG_REG (value),
5732 NULL_RTX)),
5733 GET_MODE (SUBREG_REG (value)),
5734 SUBREG_BYTE (value));
5735 code = GET_CODE (value);
5736 }
5737
8b015896 5738 /* Check for a PIC address load. */
8a28dbcc 5739 if ((code == PLUS || code == MINUS)
8b015896
RH
5740 && XEXP (value, 0) == pic_offset_table_rtx
5741 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5742 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5743 || GET_CODE (XEXP (value, 1)) == CONST))
5744 {
5745 if (!subtarget)
5746 subtarget = gen_reg_rtx (GET_MODE (value));
5747 emit_move_insn (subtarget, value);
5748 return subtarget;
5749 }
5750
8a28dbcc 5751 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5752 {
8a28dbcc
JH
5753 if (!target)
5754 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5755 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5756 code == ZERO_EXTEND);
5757 return target;
bbf6f052
RK
5758 }
5759
ec8e098d 5760 if (ARITHMETIC_P (value))
bbf6f052
RK
5761 {
5762 op2 = XEXP (value, 1);
f8cfc6aa 5763 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
bbf6f052 5764 subtarget = 0;
8a28dbcc 5765 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5766 {
8a28dbcc 5767 code = PLUS;
bbf6f052
RK
5768 op2 = negate_rtx (GET_MODE (value), op2);
5769 }
5770
5771 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5772 operand a PLUS of a virtual register and something else. In that
5773 case, we want to emit the sum of the virtual register and the
5774 constant first and then add the other value. This allows virtual
5775 register instantiation to simply modify the constant rather than
5776 creating another one around this addition. */
5777 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052 5778 && GET_CODE (XEXP (value, 0)) == PLUS
f8cfc6aa 5779 && REG_P (XEXP (XEXP (value, 0), 0))
bbf6f052
RK
5780 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5781 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5782 {
8a28dbcc
JH
5783 rtx temp = expand_simple_binop (GET_MODE (value), code,
5784 XEXP (XEXP (value, 0), 0), op2,
5785 subtarget, 0, OPTAB_LIB_WIDEN);
5786 return expand_simple_binop (GET_MODE (value), code, temp,
5787 force_operand (XEXP (XEXP (value,
5788 0), 1), 0),
5789 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5790 }
3a94c984 5791
8a28dbcc
JH
5792 op1 = force_operand (XEXP (value, 0), subtarget);
5793 op2 = force_operand (op2, NULL_RTX);
5794 switch (code)
5795 {
5796 case MULT:
5797 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5798 case DIV:
5799 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5800 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5801 target, 1, OPTAB_LIB_WIDEN);
5802 else
5803 return expand_divmod (0,
5804 FLOAT_MODE_P (GET_MODE (value))
5805 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5806 GET_MODE (value), op1, op2, target, 0);
5807 break;
5808 case MOD:
5809 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5810 target, 0);
5811 break;
5812 case UDIV:
5813 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5814 target, 1);
5815 break;
5816 case UMOD:
5817 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5818 target, 1);
5819 break;
5820 case ASHIFTRT:
5821 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5822 target, 0, OPTAB_LIB_WIDEN);
5823 break;
5824 default:
5825 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5826 target, 1, OPTAB_LIB_WIDEN);
5827 }
5828 }
ec8e098d 5829 if (UNARY_P (value))
8a28dbcc
JH
5830 {
5831 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5832 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5833 }
34e81b5a
RK
5834
5835#ifdef INSN_SCHEDULING
5836 /* On machines that have insn scheduling, we want all memory reference to be
5837 explicit, so we need to deal with such paradoxical SUBREGs. */
3c0cb5de 5838 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
34e81b5a
RK
5839 && (GET_MODE_SIZE (GET_MODE (value))
5840 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5841 value
5842 = simplify_gen_subreg (GET_MODE (value),
5843 force_reg (GET_MODE (SUBREG_REG (value)),
5844 force_operand (SUBREG_REG (value),
5845 NULL_RTX)),
5846 GET_MODE (SUBREG_REG (value)),
5847 SUBREG_BYTE (value));
5848#endif
5849
bbf6f052
RK
5850 return value;
5851}
5852\f
bbf6f052 5853/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5854 EXP can reference X, which is being modified. TOP_P is nonzero if this
5855 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5856 for EXP, as opposed to a recursive call to this function.
5857
5858 It is always safe for this routine to return zero since it merely
5859 searches for optimization opportunities. */
bbf6f052 5860
8f17b5c5 5861int
502b8322 5862safe_from_p (rtx x, tree exp, int top_p)
bbf6f052
RK
5863{
5864 rtx exp_rtl = 0;
5865 int i, nops;
5866
6676e72f
RK
5867 if (x == 0
5868 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5869 have no way of allocating temporaries of variable size
5870 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5871 So we assume here that something at a higher level has prevented a
f4510f37 5872 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5873 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5874 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5875 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5876 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5877 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5878 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5879 != INTEGER_CST)
1da68f56
RK
5880 && GET_MODE (x) == BLKmode)
5881 /* If X is in the outgoing argument area, it is always safe. */
3c0cb5de 5882 || (MEM_P (x)
1da68f56
RK
5883 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5884 || (GET_CODE (XEXP (x, 0)) == PLUS
5885 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5886 return 1;
5887
5888 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5889 find the underlying pseudo. */
5890 if (GET_CODE (x) == SUBREG)
5891 {
5892 x = SUBREG_REG (x);
f8cfc6aa 5893 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
5894 return 0;
5895 }
5896
1da68f56 5897 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5898 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5899 {
6615c446 5900 case tcc_declaration:
a9772b60 5901 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
5902 break;
5903
6615c446 5904 case tcc_constant:
bbf6f052
RK
5905 return 1;
5906
6615c446 5907 case tcc_exceptional:
bbf6f052 5908 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
5909 {
5910 while (1)
5911 {
5912 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5913 return 0;
5914 exp = TREE_CHAIN (exp);
5915 if (!exp)
5916 return 1;
5917 if (TREE_CODE (exp) != TREE_LIST)
5918 return safe_from_p (x, exp, 0);
5919 }
5920 }
ff439b5f
CB
5921 else if (TREE_CODE (exp) == ERROR_MARK)
5922 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5923 else
5924 return 0;
5925
6615c446 5926 case tcc_statement:
350fae66
RK
5927 /* The only case we look at here is the DECL_INITIAL inside a
5928 DECL_EXPR. */
5929 return (TREE_CODE (exp) != DECL_EXPR
5930 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5931 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5932 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5933
6615c446
JO
5934 case tcc_binary:
5935 case tcc_comparison:
f8d4be57
CE
5936 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5937 return 0;
5d3cc252 5938 /* Fall through. */
f8d4be57 5939
6615c446 5940 case tcc_unary:
f8d4be57 5941 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 5942
6615c446
JO
5943 case tcc_expression:
5944 case tcc_reference:
bbf6f052
RK
5945 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5946 the expression. If it is set, we conflict iff we are that rtx or
5947 both are in memory. Otherwise, we check all operands of the
5948 expression recursively. */
5949
5950 switch (TREE_CODE (exp))
5951 {
5952 case ADDR_EXPR:
70072ed9
RK
5953 /* If the operand is static or we are static, we can't conflict.
5954 Likewise if we don't conflict with the operand at all. */
5955 if (staticp (TREE_OPERAND (exp, 0))
5956 || TREE_STATIC (exp)
5957 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5958 return 1;
5959
5960 /* Otherwise, the only way this can conflict is if we are taking
5961 the address of a DECL a that address if part of X, which is
5962 very rare. */
5963 exp = TREE_OPERAND (exp, 0);
5964 if (DECL_P (exp))
5965 {
5966 if (!DECL_RTL_SET_P (exp)
3c0cb5de 5967 || !MEM_P (DECL_RTL (exp)))
70072ed9
RK
5968 return 0;
5969 else
5970 exp_rtl = XEXP (DECL_RTL (exp), 0);
5971 }
5972 break;
bbf6f052 5973
7ccf35ed
DN
5974 case MISALIGNED_INDIRECT_REF:
5975 case ALIGN_INDIRECT_REF:
bbf6f052 5976 case INDIRECT_REF:
3c0cb5de 5977 if (MEM_P (x)
1da68f56
RK
5978 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5979 get_alias_set (exp)))
bbf6f052
RK
5980 return 0;
5981 break;
5982
5983 case CALL_EXPR:
f9808f81
MM
5984 /* Assume that the call will clobber all hard registers and
5985 all of memory. */
f8cfc6aa 5986 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
3c0cb5de 5987 || MEM_P (x))
f9808f81 5988 return 0;
bbf6f052
RK
5989 break;
5990
bbf6f052 5991 case WITH_CLEANUP_EXPR:
5dab5552 5992 case CLEANUP_POINT_EXPR:
ac45df5d 5993 /* Lowered by gimplify.c. */
5b0264cb 5994 gcc_unreachable ();
ac45df5d 5995
bbf6f052 5996 case SAVE_EXPR:
82c82743 5997 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052 5998
e9a25f70
JL
5999 default:
6000 break;
bbf6f052
RK
6001 }
6002
6003 /* If we have an rtx, we do not need to scan our operands. */
6004 if (exp_rtl)
6005 break;
6006
54e4aedb 6007 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
bbf6f052
RK
6008 for (i = 0; i < nops; i++)
6009 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6010 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6011 return 0;
8f17b5c5
MM
6012
6013 /* If this is a language-specific tree code, it may require
6014 special handling. */
dbbbbf3b
JDA
6015 if ((unsigned int) TREE_CODE (exp)
6016 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ae2bcd98 6017 && !lang_hooks.safe_from_p (x, exp))
8f17b5c5 6018 return 0;
6615c446
JO
6019 break;
6020
6021 case tcc_type:
6022 /* Should never get a type here. */
6023 gcc_unreachable ();
bbf6f052
RK
6024 }
6025
6026 /* If we have an rtl, find any enclosed object. Then see if we conflict
6027 with it. */
6028 if (exp_rtl)
6029 {
6030 if (GET_CODE (exp_rtl) == SUBREG)
6031 {
6032 exp_rtl = SUBREG_REG (exp_rtl);
f8cfc6aa 6033 if (REG_P (exp_rtl)
bbf6f052
RK
6034 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6035 return 0;
6036 }
6037
6038 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6039 are memory and they conflict. */
bbf6f052 6040 return ! (rtx_equal_p (x, exp_rtl)
3c0cb5de 6041 || (MEM_P (x) && MEM_P (exp_rtl)
21117a17 6042 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6043 rtx_addr_varies_p)));
bbf6f052
RK
6044 }
6045
6046 /* If we reach here, it is safe. */
6047 return 1;
6048}
6049
14a774a9 6050\f
0d4903b8
RK
6051/* Return the highest power of two that EXP is known to be a multiple of.
6052 This is used in updating alignment of MEMs in array references. */
6053
9ceca302 6054static unsigned HOST_WIDE_INT
502b8322 6055highest_pow2_factor (tree exp)
0d4903b8 6056{
9ceca302 6057 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6058
6059 switch (TREE_CODE (exp))
6060 {
6061 case INTEGER_CST:
e0f1be5c
JJ
6062 /* We can find the lowest bit that's a one. If the low
6063 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6064 We need to handle this case since we can find it in a COND_EXPR,
a98ebe2e 6065 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
e0f1be5c 6066 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6067 later ICE. */
e0f1be5c 6068 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6069 return BIGGEST_ALIGNMENT;
e0f1be5c 6070 else
0d4903b8 6071 {
e0f1be5c
JJ
6072 /* Note: tree_low_cst is intentionally not used here,
6073 we don't care about the upper bits. */
6074 c0 = TREE_INT_CST_LOW (exp);
6075 c0 &= -c0;
6076 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6077 }
6078 break;
6079
65a07688 6080 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6081 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6082 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6083 return MIN (c0, c1);
6084
6085 case MULT_EXPR:
6086 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6087 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6088 return c0 * c1;
6089
6090 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6091 case CEIL_DIV_EXPR:
65a07688
RK
6092 if (integer_pow2p (TREE_OPERAND (exp, 1))
6093 && host_integerp (TREE_OPERAND (exp, 1), 1))
6094 {
6095 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6096 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6097 return MAX (1, c0 / c1);
6098 }
6099 break;
0d4903b8
RK
6100
6101 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6fce44af 6102 case SAVE_EXPR:
0d4903b8
RK
6103 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6104
65a07688
RK
6105 case COMPOUND_EXPR:
6106 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6107
0d4903b8
RK
6108 case COND_EXPR:
6109 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6110 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6111 return MIN (c0, c1);
6112
6113 default:
6114 break;
6115 }
6116
6117 return 1;
6118}
818c0c94 6119
d50a16c4
EB
6120/* Similar, except that the alignment requirements of TARGET are
6121 taken into account. Assume it is at least as aligned as its
6122 type, unless it is a COMPONENT_REF in which case the layout of
6123 the structure gives the alignment. */
818c0c94 6124
9ceca302 6125static unsigned HOST_WIDE_INT
d50a16c4 6126highest_pow2_factor_for_target (tree target, tree exp)
818c0c94 6127{
d50a16c4 6128 unsigned HOST_WIDE_INT target_align, factor;
818c0c94
RH
6129
6130 factor = highest_pow2_factor (exp);
d50a16c4 6131 if (TREE_CODE (target) == COMPONENT_REF)
a4e9ffe5 6132 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
d50a16c4 6133 else
a4e9ffe5 6134 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
d50a16c4 6135 return MAX (factor, target_align);
818c0c94 6136}
0d4903b8 6137\f
6de9cd9a
DN
6138/* Expands variable VAR. */
6139
6140void
6141expand_var (tree var)
6142{
6143 if (DECL_EXTERNAL (var))
6144 return;
6145
6146 if (TREE_STATIC (var))
6147 /* If this is an inlined copy of a static local variable,
6148 look up the original decl. */
6149 var = DECL_ORIGIN (var);
6150
6151 if (TREE_STATIC (var)
6152 ? !TREE_ASM_WRITTEN (var)
6153 : !DECL_RTL_SET_P (var))
6154 {
833b3afe 6155 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
1a186ec5 6156 /* Should be ignored. */;
673fda6b 6157 else if (lang_hooks.expand_decl (var))
6de9cd9a
DN
6158 /* OK. */;
6159 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6160 expand_decl (var);
6161 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
0e6df31e 6162 rest_of_decl_compilation (var, 0, 0);
6de9cd9a 6163 else
5b0264cb
NS
6164 /* No expansion needed. */
6165 gcc_assert (TREE_CODE (var) == TYPE_DECL
6166 || TREE_CODE (var) == CONST_DECL
6167 || TREE_CODE (var) == FUNCTION_DECL
6168 || TREE_CODE (var) == LABEL_DECL);
6de9cd9a
DN
6169 }
6170}
6171
eb698c58
RS
6172/* Subroutine of expand_expr. Expand the two operands of a binary
6173 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6174 The value may be stored in TARGET if TARGET is nonzero. The
6175 MODIFIER argument is as documented by expand_expr. */
6176
6177static void
6178expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6179 enum expand_modifier modifier)
6180{
6181 if (! safe_from_p (target, exp1, 1))
6182 target = 0;
6183 if (operand_equal_p (exp0, exp1, 0))
6184 {
6185 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6186 *op1 = copy_rtx (*op0);
6187 }
6188 else
6189 {
c67e6e14
RS
6190 /* If we need to preserve evaluation order, copy exp0 into its own
6191 temporary variable so that it can't be clobbered by exp1. */
6192 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6193 exp0 = save_expr (exp0);
eb698c58
RS
6194 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6195 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6196 }
6197}
6198
f47e9b4e 6199\f
70bb498a 6200/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6377bb9a
RH
6201 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6202
6203static rtx
70bb498a
RH
6204expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6205 enum expand_modifier modifier)
6377bb9a
RH
6206{
6207 rtx result, subtarget;
6208 tree inner, offset;
6209 HOST_WIDE_INT bitsize, bitpos;
6210 int volatilep, unsignedp;
6211 enum machine_mode mode1;
6212
6213 /* If we are taking the address of a constant and are at the top level,
6214 we have to use output_constant_def since we can't call force_const_mem
6215 at top level. */
6216 /* ??? This should be considered a front-end bug. We should not be
6217 generating ADDR_EXPR of something that isn't an LVALUE. The only
6218 exception here is STRING_CST. */
6219 if (TREE_CODE (exp) == CONSTRUCTOR
6615c446 6220 || CONSTANT_CLASS_P (exp))
6377bb9a
RH
6221 return XEXP (output_constant_def (exp, 0), 0);
6222
6223 /* Everything must be something allowed by is_gimple_addressable. */
6224 switch (TREE_CODE (exp))
6225 {
6226 case INDIRECT_REF:
6227 /* This case will happen via recursion for &a->b. */
6228 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6229
6230 case CONST_DECL:
6231 /* Recurse and make the output_constant_def clause above handle this. */
70bb498a 6232 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
b0b324b0 6233 tmode, modifier);
6377bb9a
RH
6234
6235 case REALPART_EXPR:
6236 /* The real part of the complex number is always first, therefore
6237 the address is the same as the address of the parent object. */
6238 offset = 0;
6239 bitpos = 0;
6240 inner = TREE_OPERAND (exp, 0);
6241 break;
6242
6243 case IMAGPART_EXPR:
6244 /* The imaginary part of the complex number is always second.
2a7e31df 6245 The expression is therefore always offset by the size of the
6377bb9a
RH
6246 scalar type. */
6247 offset = 0;
6248 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6249 inner = TREE_OPERAND (exp, 0);
6250 break;
6251
6252 default:
6253 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6254 expand_expr, as that can have various side effects; LABEL_DECLs for
6255 example, may not have their DECL_RTL set yet. Assume language
6256 specific tree nodes can be expanded in some interesting way. */
6257 if (DECL_P (exp)
6258 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6259 {
6260 result = expand_expr (exp, target, tmode,
6261 modifier == EXPAND_INITIALIZER
6262 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6263
6264 /* If the DECL isn't in memory, then the DECL wasn't properly
6265 marked TREE_ADDRESSABLE, which will be either a front-end
6266 or a tree optimizer bug. */
2ca202e7 6267 gcc_assert (MEM_P (result));
6377bb9a
RH
6268 result = XEXP (result, 0);
6269
6270 /* ??? Is this needed anymore? */
b0b324b0 6271 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6377bb9a
RH
6272 {
6273 assemble_external (exp);
6274 TREE_USED (exp) = 1;
6275 }
6276
6277 if (modifier != EXPAND_INITIALIZER
6278 && modifier != EXPAND_CONST_ADDRESS)
6279 result = force_operand (result, target);
6280 return result;
6281 }
6282
2614034e
EB
6283 /* Pass FALSE as the last argument to get_inner_reference although
6284 we are expanding to RTL. The rationale is that we know how to
6285 handle "aligning nodes" here: we can just bypass them because
6286 they won't change the final object whose address will be returned
6287 (they actually exist only for that purpose). */
6377bb9a 6288 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 6289 &mode1, &unsignedp, &volatilep, false);
6377bb9a
RH
6290 break;
6291 }
6292
6293 /* We must have made progress. */
5b0264cb 6294 gcc_assert (inner != exp);
6377bb9a
RH
6295
6296 subtarget = offset || bitpos ? NULL_RTX : target;
70bb498a 6297 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6377bb9a 6298
6377bb9a
RH
6299 if (offset)
6300 {
6301 rtx tmp;
6302
6303 if (modifier != EXPAND_NORMAL)
6304 result = force_operand (result, NULL);
6305 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6306
b0b324b0
RH
6307 result = convert_memory_address (tmode, result);
6308 tmp = convert_memory_address (tmode, tmp);
6309
6377bb9a
RH
6310 if (modifier == EXPAND_SUM)
6311 result = gen_rtx_PLUS (tmode, result, tmp);
6312 else
6313 {
6314 subtarget = bitpos ? NULL_RTX : target;
6315 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6316 1, OPTAB_LIB_WIDEN);
6317 }
6318 }
6319
6320 if (bitpos)
6321 {
6322 /* Someone beforehand should have rejected taking the address
6323 of such an object. */
b0b324b0 6324 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6377bb9a
RH
6325
6326 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6327 if (modifier < EXPAND_SUM)
6328 result = force_operand (result, target);
6329 }
6330
6331 return result;
6332}
6333
70bb498a
RH
6334/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6335 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6336
6337static rtx
6338expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6339 enum expand_modifier modifier)
6340{
6341 enum machine_mode rmode;
6342 rtx result;
6343
b0b324b0
RH
6344 /* Target mode of VOIDmode says "whatever's natural". */
6345 if (tmode == VOIDmode)
6346 tmode = TYPE_MODE (TREE_TYPE (exp));
6347
6348 /* We can get called with some Weird Things if the user does silliness
6349 like "(short) &a". In that case, convert_memory_address won't do
6350 the right thing, so ignore the given target mode. */
103b83ea 6351 if (tmode != Pmode && tmode != ptr_mode)
b0b324b0
RH
6352 tmode = Pmode;
6353
70bb498a
RH
6354 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6355 tmode, modifier);
6356
6357 /* Despite expand_expr claims concerning ignoring TMODE when not
b0b324b0
RH
6358 strictly convenient, stuff breaks if we don't honor it. Note
6359 that combined with the above, we only do this for pointer modes. */
70bb498a
RH
6360 rmode = GET_MODE (result);
6361 if (rmode == VOIDmode)
6362 rmode = tmode;
6363 if (rmode != tmode)
6364 result = convert_memory_address (tmode, result);
b0b324b0 6365
70bb498a
RH
6366 return result;
6367}
6368
6369
bbf6f052
RK
6370/* expand_expr: generate code for computing expression EXP.
6371 An rtx for the computed value is returned. The value is never null.
6372 In the case of a void EXP, const0_rtx is returned.
6373
6374 The value may be stored in TARGET if TARGET is nonzero.
6375 TARGET is just a suggestion; callers must assume that
6376 the rtx returned may not be the same as TARGET.
6377
6378 If TARGET is CONST0_RTX, it means that the value will be ignored.
6379
6380 If TMODE is not VOIDmode, it suggests generating the
6381 result in mode TMODE. But this is done only when convenient.
6382 Otherwise, TMODE is ignored and the value generated in its natural mode.
6383 TMODE is just a suggestion; callers must assume that
6384 the rtx returned may not have mode TMODE.
6385
d6a5ac33
RK
6386 Note that TARGET may have neither TMODE nor MODE. In that case, it
6387 probably will not be used.
bbf6f052
RK
6388
6389 If MODIFIER is EXPAND_SUM then when EXP is an addition
6390 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6391 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6392 products as above, or REG or MEM, or constant.
6393 Ordinarily in such cases we would output mul or add instructions
6394 and then return a pseudo reg containing the sum.
6395
6396 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6397 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6398 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6399 This is used for outputting expressions used in initializers.
6400
6401 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6402 with a constant address even if that address is not normally legitimate.
8403445a
AM
6403 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6404
6405 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6406 a call parameter. Such targets require special care as we haven't yet
6407 marked TARGET so that it's safe from being trashed by libcalls. We
6408 don't want to use TARGET for anything but the final result;
6409 Intermediate values must go elsewhere. Additionally, calls to
caf93cb0 6410 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
0fab64a3
MM
6411
6412 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6413 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6414 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6415 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6416 recursively. */
bbf6f052 6417
6de9cd9a
DN
6418static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6419 enum expand_modifier, rtx *);
6420
bbf6f052 6421rtx
0fab64a3
MM
6422expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6423 enum expand_modifier modifier, rtx *alt_rtl)
6de9cd9a
DN
6424{
6425 int rn = -1;
6426 rtx ret, last = NULL;
6427
6428 /* Handle ERROR_MARK before anybody tries to access its type. */
6429 if (TREE_CODE (exp) == ERROR_MARK
6430 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6431 {
6432 ret = CONST0_RTX (tmode);
6433 return ret ? ret : const0_rtx;
6434 }
6435
6436 if (flag_non_call_exceptions)
6437 {
6438 rn = lookup_stmt_eh_region (exp);
6439 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6440 if (rn >= 0)
6441 last = get_last_insn ();
6442 }
6443
6444 /* If this is an expression of some kind and it has an associated line
caf93cb0 6445 number, then emit the line number before expanding the expression.
6de9cd9a
DN
6446
6447 We need to save and restore the file and line information so that
6448 errors discovered during expansion are emitted with the right
caf93cb0 6449 information. It would be better of the diagnostic routines
6de9cd9a
DN
6450 used the file/line information embedded in the tree nodes rather
6451 than globals. */
6452 if (cfun && EXPR_HAS_LOCATION (exp))
6453 {
6454 location_t saved_location = input_location;
6455 input_location = EXPR_LOCATION (exp);
6456 emit_line_note (input_location);
caf93cb0 6457
6de9cd9a 6458 /* Record where the insns produced belong. */
1ea463a2 6459 record_block_change (TREE_BLOCK (exp));
6de9cd9a
DN
6460
6461 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6462
6463 input_location = saved_location;
6464 }
6465 else
6466 {
6467 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6468 }
6469
6470 /* If using non-call exceptions, mark all insns that may trap.
6471 expand_call() will mark CALL_INSNs before we get to this code,
6472 but it doesn't handle libcalls, and these may trap. */
6473 if (rn >= 0)
caf93cb0 6474 {
6de9cd9a 6475 rtx insn;
caf93cb0 6476 for (insn = next_real_insn (last); insn;
6de9cd9a
DN
6477 insn = next_real_insn (insn))
6478 {
6479 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6480 /* If we want exceptions for non-call insns, any
6481 may_trap_p instruction may throw. */
6482 && GET_CODE (PATTERN (insn)) != CLOBBER
6483 && GET_CODE (PATTERN (insn)) != USE
4b4bf941 6484 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6de9cd9a
DN
6485 {
6486 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6487 REG_NOTES (insn));
6488 }
6489 }
6490 }
6491
6492 return ret;
6493}
6494
6495static rtx
6496expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6497 enum expand_modifier modifier, rtx *alt_rtl)
bbf6f052 6498{
b3694847 6499 rtx op0, op1, temp;
bbf6f052 6500 tree type = TREE_TYPE (exp);
8df83eae 6501 int unsignedp;
b3694847
SS
6502 enum machine_mode mode;
6503 enum tree_code code = TREE_CODE (exp);
bbf6f052 6504 optab this_optab;
68557e14
ML
6505 rtx subtarget, original_target;
6506 int ignore;
bbf6f052 6507 tree context;
bc15d0ef
JM
6508 bool reduce_bit_field = false;
6509#define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6510 ? reduce_to_bit_field_precision ((expr), \
6511 target, \
6512 type) \
6513 : (expr))
bbf6f052 6514
68557e14 6515 mode = TYPE_MODE (type);
8df83eae 6516 unsignedp = TYPE_UNSIGNED (type);
bc15d0ef
JM
6517 if (lang_hooks.reduce_bit_field_operations
6518 && TREE_CODE (type) == INTEGER_TYPE
6519 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6520 {
6521 /* An operation in what may be a bit-field type needs the
6522 result to be reduced to the precision of the bit-field type,
6523 which is narrower than that of the type's mode. */
6524 reduce_bit_field = true;
6525 if (modifier == EXPAND_STACK_PARM)
6526 target = 0;
6527 }
8df83eae 6528
68557e14 6529 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6530 subtarget = get_subtarget (target);
68557e14
ML
6531 original_target = target;
6532 ignore = (target == const0_rtx
6533 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3a18db48
AP
6534 || code == CONVERT_EXPR || code == COND_EXPR
6535 || code == VIEW_CONVERT_EXPR)
68557e14
ML
6536 && TREE_CODE (type) == VOID_TYPE));
6537
dd27116b
RK
6538 /* If we are going to ignore this result, we need only do something
6539 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6540 is, short-circuit the most common cases here. Note that we must
6541 not call expand_expr with anything but const0_rtx in case this
6542 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6543
dd27116b
RK
6544 if (ignore)
6545 {
6546 if (! TREE_SIDE_EFFECTS (exp))
6547 return const0_rtx;
6548
14a774a9
RK
6549 /* Ensure we reference a volatile object even if value is ignored, but
6550 don't do this if all we are doing is taking its address. */
dd27116b
RK
6551 if (TREE_THIS_VOLATILE (exp)
6552 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6553 && mode != VOIDmode && mode != BLKmode
6554 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6555 {
37a08a29 6556 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3c0cb5de 6557 if (MEM_P (temp))
dd27116b
RK
6558 temp = copy_to_reg (temp);
6559 return const0_rtx;
6560 }
6561
6615c446
JO
6562 if (TREE_CODE_CLASS (code) == tcc_unary
6563 || code == COMPONENT_REF || code == INDIRECT_REF)
37a08a29
RK
6564 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6565 modifier);
6566
6615c446
JO
6567 else if (TREE_CODE_CLASS (code) == tcc_binary
6568 || TREE_CODE_CLASS (code) == tcc_comparison
b4e3fabb 6569 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6570 {
37a08a29
RK
6571 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6572 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6573 return const0_rtx;
6574 }
14a774a9
RK
6575 else if (code == BIT_FIELD_REF)
6576 {
37a08a29
RK
6577 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6578 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6579 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6580 return const0_rtx;
6581 }
37a08a29 6582
90764a87 6583 target = 0;
dd27116b 6584 }
bbf6f052 6585
e44842fe
RK
6586 /* If will do cse, generate all results into pseudo registers
6587 since 1) that allows cse to find more things
6588 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6589 cannot support. An exception is a CONSTRUCTOR into a multi-word
6590 MEM: that's much more likely to be most efficient into the MEM.
6591 Another is a CALL_EXPR which must return in memory. */
e44842fe 6592
bbf6f052 6593 if (! cse_not_expected && mode != BLKmode && target
f8cfc6aa 6594 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6 6595 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
61f71b34 6596 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
8403445a 6597 target = 0;
bbf6f052 6598
bbf6f052
RK
6599 switch (code)
6600 {
6601 case LABEL_DECL:
b552441b
RS
6602 {
6603 tree function = decl_function_context (exp);
c5c76735 6604
6de9cd9a
DN
6605 temp = label_rtx (exp);
6606 temp = gen_rtx_LABEL_REF (Pmode, temp);
6607
d0977240 6608 if (function != current_function_decl
6de9cd9a
DN
6609 && function != 0)
6610 LABEL_REF_NONLOCAL_P (temp) = 1;
6611
6612 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
26fcb35a 6613 return temp;
b552441b 6614 }
bbf6f052 6615
8b11a64c
ZD
6616 case SSA_NAME:
6617 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6618 NULL);
6619
bbf6f052 6620 case PARM_DECL:
bbf6f052 6621 case VAR_DECL:
2dca20cd
RS
6622 /* If a static var's type was incomplete when the decl was written,
6623 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6624 if (DECL_SIZE (exp) == 0
6625 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6626 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6627 layout_decl (exp, 0);
921b3427 6628
0f41302f 6629 /* ... fall through ... */
d6a5ac33 6630
2dca20cd 6631 case FUNCTION_DECL:
bbf6f052 6632 case RESULT_DECL:
5b0264cb 6633 gcc_assert (DECL_RTL (exp));
d6a5ac33 6634
e44842fe
RK
6635 /* Ensure variable marked as used even if it doesn't go through
6636 a parser. If it hasn't be used yet, write out an external
6637 definition. */
6638 if (! TREE_USED (exp))
6639 {
6640 assemble_external (exp);
6641 TREE_USED (exp) = 1;
6642 }
6643
dc6d66b3
RK
6644 /* Show we haven't gotten RTL for this yet. */
6645 temp = 0;
6646
ab8907ef
RH
6647 /* Variables inherited from containing functions should have
6648 been lowered by this point. */
bbf6f052 6649 context = decl_function_context (exp);
5b0264cb
NS
6650 gcc_assert (!context
6651 || context == current_function_decl
6652 || TREE_STATIC (exp)
6653 /* ??? C++ creates functions that are not TREE_STATIC. */
6654 || TREE_CODE (exp) == FUNCTION_DECL);
4af3895e 6655
bbf6f052
RK
6656 /* This is the case of an array whose size is to be determined
6657 from its initializer, while the initializer is still being parsed.
6658 See expand_decl. */
d6a5ac33 6659
5b0264cb 6660 if (MEM_P (DECL_RTL (exp))
f8cfc6aa 6661 && REG_P (XEXP (DECL_RTL (exp), 0)))
792760b9 6662 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6663
6664 /* If DECL_RTL is memory, we are in the normal case and either
6665 the address is not valid or it is not a register and -fforce-addr
6666 is specified, get the address into a register. */
6667
3c0cb5de 6668 else if (MEM_P (DECL_RTL (exp))
dc6d66b3
RK
6669 && modifier != EXPAND_CONST_ADDRESS
6670 && modifier != EXPAND_SUM
6671 && modifier != EXPAND_INITIALIZER
6672 && (! memory_address_p (DECL_MODE (exp),
6673 XEXP (DECL_RTL (exp), 0))
6674 || (flag_force_addr
f8cfc6aa 6675 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
0fab64a3
MM
6676 {
6677 if (alt_rtl)
6678 *alt_rtl = DECL_RTL (exp);
6679 temp = replace_equiv_address (DECL_RTL (exp),
6680 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6681 }
1499e0a8 6682
dc6d66b3 6683 /* If we got something, return it. But first, set the alignment
04956a1a 6684 if the address is a register. */
dc6d66b3
RK
6685 if (temp != 0)
6686 {
3c0cb5de 6687 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
bdb429a5 6688 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6689
6690 return temp;
6691 }
6692
1499e0a8
RK
6693 /* If the mode of DECL_RTL does not match that of the decl, it
6694 must be a promoted value. We return a SUBREG of the wanted mode,
6695 but mark it so that we know that it was already extended. */
6696
f8cfc6aa 6697 if (REG_P (DECL_RTL (exp))
7254c5fa 6698 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6699 {
5b0264cb
NS
6700 enum machine_mode pmode;
6701
1499e0a8
RK
6702 /* Get the signedness used for this variable. Ensure we get the
6703 same mode we got when the variable was declared. */
5b0264cb
NS
6704 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6705 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6706 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
1499e0a8 6707
ddef6bc7 6708 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6709 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6710 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6711 return temp;
6712 }
6713
bbf6f052
RK
6714 return DECL_RTL (exp);
6715
6716 case INTEGER_CST:
d8a50944 6717 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6718 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6719
d8a50944
RH
6720 /* ??? If overflow is set, fold will have done an incomplete job,
6721 which can result in (plus xx (const_int 0)), which can get
6722 simplified by validate_replace_rtx during virtual register
6723 instantiation, which can result in unrecognizable insns.
6724 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6725 if (TREE_CONSTANT_OVERFLOW (exp)
6726 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6727 temp = force_reg (mode, temp);
6728
6729 return temp;
6730
d744e06e 6731 case VECTOR_CST:
3a021db2
PB
6732 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6733 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6734 return const_vector_from_tree (exp);
caf93cb0 6735 else
3a021db2
PB
6736 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6737 TREE_VECTOR_CST_ELTS (exp)),
6738 ignore ? const0_rtx : target, tmode, modifier);
d744e06e 6739
bbf6f052 6740 case CONST_DECL:
8403445a 6741 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6742
6743 case REAL_CST:
6744 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6745 which will be turned into memory by reload if necessary.
6746
bbf6f052
RK
6747 We used to force a register so that loop.c could see it. But
6748 this does not allow gen_* patterns to perform optimizations with
6749 the constants. It also produces two insns in cases like "x = 1.0;".
6750 On most machines, floating-point constants are not permitted in
6751 many insns, so we'd end up copying it to a register in any case.
6752
6753 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6754 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6755 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6756
6757 case COMPLEX_CST:
9ad58e09
RS
6758 /* Handle evaluating a complex constant in a CONCAT target. */
6759 if (original_target && GET_CODE (original_target) == CONCAT)
6760 {
6761 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6762 rtx rtarg, itarg;
6763
6764 rtarg = XEXP (original_target, 0);
6765 itarg = XEXP (original_target, 1);
6766
6767 /* Move the real and imaginary parts separately. */
6768 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6769 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6770
6771 if (op0 != rtarg)
6772 emit_move_insn (rtarg, op0);
6773 if (op1 != itarg)
6774 emit_move_insn (itarg, op1);
6775
6776 return original_target;
6777 }
6778
71c0e7fc 6779 /* ... fall through ... */
9ad58e09 6780
bbf6f052 6781 case STRING_CST:
afc6aaab 6782 temp = output_constant_def (exp, 1);
bbf6f052 6783
afc6aaab 6784 /* temp contains a constant address.
bbf6f052
RK
6785 On RISC machines where a constant address isn't valid,
6786 make some insns to get that address into a register. */
afc6aaab 6787 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6788 && modifier != EXPAND_INITIALIZER
6789 && modifier != EXPAND_SUM
afc6aaab
ZW
6790 && (! memory_address_p (mode, XEXP (temp, 0))
6791 || flag_force_addr))
6792 return replace_equiv_address (temp,
6793 copy_rtx (XEXP (temp, 0)));
6794 return temp;
bbf6f052
RK
6795
6796 case SAVE_EXPR:
82c82743
RH
6797 {
6798 tree val = TREE_OPERAND (exp, 0);
6799 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
e5e809f4 6800
7f5e6307 6801 if (!SAVE_EXPR_RESOLVED_P (exp))
82c82743
RH
6802 {
6803 /* We can indeed still hit this case, typically via builtin
6804 expanders calling save_expr immediately before expanding
6805 something. Assume this means that we only have to deal
6806 with non-BLKmode values. */
5b0264cb 6807 gcc_assert (GET_MODE (ret) != BLKmode);
1499e0a8 6808
82c82743
RH
6809 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6810 DECL_ARTIFICIAL (val) = 1;
7f5e6307 6811 DECL_IGNORED_P (val) = 1;
82c82743 6812 TREE_OPERAND (exp, 0) = val;
7f5e6307 6813 SAVE_EXPR_RESOLVED_P (exp) = 1;
1499e0a8 6814
82c82743
RH
6815 if (!CONSTANT_P (ret))
6816 ret = copy_to_reg (ret);
6817 SET_DECL_RTL (val, ret);
6818 }
1499e0a8 6819
82c82743
RH
6820 return ret;
6821 }
bbf6f052 6822
70e6ca43
APB
6823 case GOTO_EXPR:
6824 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6825 expand_goto (TREE_OPERAND (exp, 0));
6826 else
6827 expand_computed_goto (TREE_OPERAND (exp, 0));
6828 return const0_rtx;
6829
bbf6f052 6830 case CONSTRUCTOR:
dd27116b
RK
6831 /* If we don't need the result, just ensure we evaluate any
6832 subexpressions. */
6833 if (ignore)
6834 {
6835 tree elt;
37a08a29 6836
dd27116b 6837 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
6838 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6839
dd27116b
RK
6840 return const0_rtx;
6841 }
3207b172 6842
4af3895e
JVA
6843 /* All elts simple constants => refer to a constant in memory. But
6844 if this is a non-BLKmode mode, let it store a field at a time
6845 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6846 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6847 store directly into the target unless the type is large enough
6848 that memcpy will be used. If we are making an initializer and
00182e1e
AH
6849 all operands are constant, put it in memory as well.
6850
6851 FIXME: Avoid trying to fill vector constructors piece-meal.
6852 Output them with output_constant_def below unless we're sure
6853 they're zeros. This should go away when vector initializers
6854 are treated like VECTOR_CST instead of arrays.
6855 */
dd27116b 6856 else if ((TREE_STATIC (exp)
3207b172 6857 && ((mode == BLKmode
e5e809f4 6858 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6859 || TREE_ADDRESSABLE (exp)
19caa751 6860 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6861 && (! MOVE_BY_PIECES_P
19caa751
RK
6862 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6863 TYPE_ALIGN (type)))
6de9cd9a 6864 && ! mostly_zeros_p (exp))))
f59700f9
RK
6865 || ((modifier == EXPAND_INITIALIZER
6866 || modifier == EXPAND_CONST_ADDRESS)
6867 && TREE_CONSTANT (exp)))
bbf6f052 6868 {
bd7cf17e 6869 rtx constructor = output_constant_def (exp, 1);
19caa751 6870
b552441b
RS
6871 if (modifier != EXPAND_CONST_ADDRESS
6872 && modifier != EXPAND_INITIALIZER
792760b9
RK
6873 && modifier != EXPAND_SUM)
6874 constructor = validize_mem (constructor);
6875
bbf6f052
RK
6876 return constructor;
6877 }
bbf6f052
RK
6878 else
6879 {
e9ac02a6
JW
6880 /* Handle calls that pass values in multiple non-contiguous
6881 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6882 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
6883 || GET_CODE (target) == PARALLEL
6884 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
6885 target
6886 = assign_temp (build_qualified_type (type,
6887 (TYPE_QUALS (type)
6888 | (TREE_READONLY (exp)
6889 * TYPE_QUAL_CONST))),
c24ae149 6890 0, TREE_ADDRESSABLE (exp), 1);
07604beb 6891
dbb5c281 6892 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
6893 return target;
6894 }
6895
7ccf35ed
DN
6896 case MISALIGNED_INDIRECT_REF:
6897 case ALIGN_INDIRECT_REF:
bbf6f052
RK
6898 case INDIRECT_REF:
6899 {
6900 tree exp1 = TREE_OPERAND (exp, 0);
3a94c984 6901
6de9cd9a
DN
6902 if (modifier != EXPAND_WRITE)
6903 {
6904 tree t;
6905
6906 t = fold_read_from_constant_string (exp);
6907 if (t)
6908 return expand_expr (t, target, tmode, modifier);
6909 }
bbf6f052 6910
405f0da6
JW
6911 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6912 op0 = memory_address (mode, op0);
7ccf35ed
DN
6913
6914 if (code == ALIGN_INDIRECT_REF)
6915 {
6916 int align = TYPE_ALIGN_UNIT (type);
6917 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6918 op0 = memory_address (mode, op0);
6919 }
6920
38a448ca 6921 temp = gen_rtx_MEM (mode, op0);
8b11a64c 6922
ac182688 6923 set_mem_attributes (temp, exp, 0);
1125706f 6924
1e0598e2
RH
6925 /* Resolve the misalignment now, so that we don't have to remember
6926 to resolve it later. Of course, this only works for reads. */
6927 /* ??? When we get around to supporting writes, we'll have to handle
6928 this in store_expr directly. The vectorizer isn't generating
6929 those yet, however. */
6930 if (code == MISALIGNED_INDIRECT_REF)
6931 {
6932 int icode;
6933 rtx reg, insn;
6934
6935 gcc_assert (modifier == EXPAND_NORMAL);
6936
6937 /* The vectorizer should have already checked the mode. */
6938 icode = movmisalign_optab->handlers[mode].insn_code;
6939 gcc_assert (icode != CODE_FOR_nothing);
6940
6941 /* We've already validated the memory, and we're creating a
6942 new pseudo destination. The predicates really can't fail. */
6943 reg = gen_reg_rtx (mode);
6944
6945 /* Nor can the insn generator. */
6946 insn = GEN_FCN (icode) (reg, temp);
6947 emit_insn (insn);
6948
6949 return reg;
6950 }
6951
8c8a8e34
JW
6952 return temp;
6953 }
bbf6f052 6954
ac182688
ZD
6955 case TARGET_MEM_REF:
6956 {
6957 struct mem_address addr;
6958
6959 get_address_description (exp, &addr);
6960 op0 = addr_for_mem_ref (&addr, true);
6961 op0 = memory_address (mode, op0);
6962 temp = gen_rtx_MEM (mode, op0);
6963 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
6964 }
6965 return temp;
6966
bbf6f052 6967 case ARRAY_REF:
6de9cd9a 6968
bbf6f052 6969 {
742920c7 6970 tree array = TREE_OPERAND (exp, 0);
45d8710e 6971 tree index = TREE_OPERAND (exp, 1);
742920c7 6972
742920c7 6973 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6974 This is not done in fold so it won't happen inside &.
6975 Don't fold if this is for wide characters since it's too
6976 difficult to do correctly and this is a very rare case. */
742920c7 6977
017e1b43
RH
6978 if (modifier != EXPAND_CONST_ADDRESS
6979 && modifier != EXPAND_INITIALIZER
6de9cd9a
DN
6980 && modifier != EXPAND_MEMORY)
6981 {
6982 tree t = fold_read_from_constant_string (exp);
6983
6984 if (t)
6985 return expand_expr (t, target, tmode, modifier);
6986 }
bbf6f052 6987
742920c7
RK
6988 /* If this is a constant index into a constant array,
6989 just get the value from the array. Handle both the cases when
6990 we have an explicit constructor and when our operand is a variable
6991 that was declared const. */
4af3895e 6992
017e1b43
RH
6993 if (modifier != EXPAND_CONST_ADDRESS
6994 && modifier != EXPAND_INITIALIZER
6995 && modifier != EXPAND_MEMORY
6996 && TREE_CODE (array) == CONSTRUCTOR
6997 && ! TREE_SIDE_EFFECTS (array)
45d8710e 6998 && TREE_CODE (index) == INTEGER_CST)
742920c7 6999 {
05bccae2
RK
7000 tree elem;
7001
45d8710e
RK
7002 for (elem = CONSTRUCTOR_ELTS (array);
7003 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7004 elem = TREE_CHAIN (elem))
05bccae2
RK
7005 ;
7006
45d8710e 7007 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
37a08a29
RK
7008 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7009 modifier);
742920c7 7010 }
3a94c984 7011
742920c7 7012 else if (optimize >= 1
cb5fa0f8
RK
7013 && modifier != EXPAND_CONST_ADDRESS
7014 && modifier != EXPAND_INITIALIZER
017e1b43 7015 && modifier != EXPAND_MEMORY
742920c7
RK
7016 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7017 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
beb0c2e0
RH
7018 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7019 && targetm.binds_local_p (array))
742920c7 7020 {
08293add 7021 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7022 {
7023 tree init = DECL_INITIAL (array);
7024
742920c7
RK
7025 if (TREE_CODE (init) == CONSTRUCTOR)
7026 {
665f2503 7027 tree elem;
742920c7 7028
05bccae2 7029 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7030 (elem
7031 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7032 elem = TREE_CHAIN (elem))
7033 ;
7034
c54b0a5e 7035 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7036 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7037 tmode, modifier);
742920c7
RK
7038 }
7039 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7040 && 0 > compare_tree_int (index,
7041 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7042 {
7043 tree type = TREE_TYPE (TREE_TYPE (init));
7044 enum machine_mode mode = TYPE_MODE (type);
7045
7046 if (GET_MODE_CLASS (mode) == MODE_INT
7047 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7048 return gen_int_mode (TREE_STRING_POINTER (init)
7049 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7050 }
742920c7
RK
7051 }
7052 }
7053 }
afc6aaab 7054 goto normal_inner_ref;
bbf6f052
RK
7055
7056 case COMPONENT_REF:
4af3895e 7057 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7058 appropriate field if it is present. */
7059 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
7060 {
7061 tree elt;
7062
7063 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7064 elt = TREE_CHAIN (elt))
86b5812c
RK
7065 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7066 /* We can normally use the value of the field in the
7067 CONSTRUCTOR. However, if this is a bitfield in
7068 an integral mode that we can fit in a HOST_WIDE_INT,
7069 we must mask only the number of bits in the bitfield,
7070 since this is done implicitly by the constructor. If
7071 the bitfield does not meet either of those conditions,
7072 we can't do this optimization. */
7073 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7074 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7075 == MODE_INT)
7076 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7077 <= HOST_BITS_PER_WIDE_INT))))
7078 {
8403445a
AM
7079 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7080 && modifier == EXPAND_STACK_PARM)
7081 target = 0;
3a94c984 7082 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7083 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7084 {
9df2c88c
RK
7085 HOST_WIDE_INT bitsize
7086 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7087 enum machine_mode imode
7088 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 7089
8df83eae 7090 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
86b5812c
RK
7091 {
7092 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7093 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7094 }
7095 else
7096 {
7097 tree count
4a90aeeb 7098 = build_int_cst (NULL_TREE,
7d60be94 7099 GET_MODE_BITSIZE (imode) - bitsize);
86b5812c
RK
7100
7101 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7102 target, 0);
7103 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7104 target, 0);
7105 }
7106 }
7107
7108 return op0;
7109 }
4af3895e 7110 }
afc6aaab 7111 goto normal_inner_ref;
4af3895e 7112
afc6aaab
ZW
7113 case BIT_FIELD_REF:
7114 case ARRAY_RANGE_REF:
7115 normal_inner_ref:
bbf6f052
RK
7116 {
7117 enum machine_mode mode1;
770ae6cc 7118 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7119 tree offset;
bbf6f052 7120 int volatilep = 0;
839c4796 7121 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2614034e 7122 &mode1, &unsignedp, &volatilep, true);
f47e9b4e 7123 rtx orig_op0;
bbf6f052 7124
e7f3c83f
RK
7125 /* If we got back the original object, something is wrong. Perhaps
7126 we are evaluating an expression too early. In any event, don't
7127 infinitely recurse. */
5b0264cb 7128 gcc_assert (tem != exp);
e7f3c83f 7129
3d27140a 7130 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7131 computation, since it will need a temporary and TARGET is known
7132 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7133
f47e9b4e
RK
7134 orig_op0 = op0
7135 = expand_expr (tem,
7136 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7137 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7138 != INTEGER_CST)
8403445a 7139 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7140 ? target : NULL_RTX),
7141 VOIDmode,
7142 (modifier == EXPAND_INITIALIZER
8403445a
AM
7143 || modifier == EXPAND_CONST_ADDRESS
7144 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7145 ? modifier : EXPAND_NORMAL);
bbf6f052 7146
8c8a8e34 7147 /* If this is a constant, put it into a register if it is a
14a774a9 7148 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7149 if (CONSTANT_P (op0))
7150 {
7151 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7152 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7153 && offset == 0)
8c8a8e34
JW
7154 op0 = force_reg (mode, op0);
7155 else
7156 op0 = validize_mem (force_const_mem (mode, op0));
7157 }
7158
8d2e5f72
RK
7159 /* Otherwise, if this object not in memory and we either have an
7160 offset or a BLKmode result, put it there. This case can't occur in
7161 C, but can in Ada if we have unchecked conversion of an expression
7162 from a scalar type to an array or record type or for an
7163 ARRAY_RANGE_REF whose type is BLKmode. */
3c0cb5de 7164 else if (!MEM_P (op0)
8d2e5f72
RK
7165 && (offset != 0
7166 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7167 {
82c82743
RH
7168 tree nt = build_qualified_type (TREE_TYPE (tem),
7169 (TYPE_QUALS (TREE_TYPE (tem))
7170 | TYPE_QUAL_CONST));
7171 rtx memloc = assign_temp (nt, 1, 1, 1);
450b1728 7172
82c82743
RH
7173 emit_move_insn (memloc, op0);
7174 op0 = memloc;
8d2e5f72
RK
7175 }
7176
7bb0943f
RS
7177 if (offset != 0)
7178 {
8403445a
AM
7179 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7180 EXPAND_SUM);
7bb0943f 7181
5b0264cb 7182 gcc_assert (MEM_P (op0));
2d48c13d 7183
2d48c13d 7184#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7185 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7186 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7187#else
7188 if (GET_MODE (offset_rtx) != ptr_mode)
7189 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7190#endif
7191
e82407b5
EB
7192 if (GET_MODE (op0) == BLKmode
7193 /* A constant address in OP0 can have VOIDmode, we must
7194 not try to call force_reg in that case. */
efd07ca7 7195 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7196 && bitsize != 0
3a94c984 7197 && (bitpos % bitsize) == 0
89752202 7198 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7199 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7200 {
e3c8ea67 7201 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7202 bitpos = 0;
7203 }
7204
0d4903b8
RK
7205 op0 = offset_address (op0, offset_rtx,
7206 highest_pow2_factor (offset));
7bb0943f
RS
7207 }
7208
1ce7f3c2
RK
7209 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7210 record its alignment as BIGGEST_ALIGNMENT. */
3c0cb5de 7211 if (MEM_P (op0) && bitpos == 0 && offset != 0
1ce7f3c2
RK
7212 && is_aligning_offset (offset, tem))
7213 set_mem_align (op0, BIGGEST_ALIGNMENT);
7214
bbf6f052 7215 /* Don't forget about volatility even if this is a bitfield. */
3c0cb5de 7216 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
bbf6f052 7217 {
f47e9b4e
RK
7218 if (op0 == orig_op0)
7219 op0 = copy_rtx (op0);
7220
bbf6f052
RK
7221 MEM_VOLATILE_P (op0) = 1;
7222 }
7223
010f87c4
JJ
7224 /* The following code doesn't handle CONCAT.
7225 Assume only bitpos == 0 can be used for CONCAT, due to
7226 one element arrays having the same mode as its element. */
7227 if (GET_CODE (op0) == CONCAT)
7228 {
5b0264cb
NS
7229 gcc_assert (bitpos == 0
7230 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
010f87c4
JJ
7231 return op0;
7232 }
7233
ccc98036
RS
7234 /* In cases where an aligned union has an unaligned object
7235 as a field, we might be extracting a BLKmode value from
7236 an integer-mode (e.g., SImode) object. Handle this case
7237 by doing the extract into an object as wide as the field
7238 (which we know to be the width of a basic mode), then
cb5fa0f8 7239 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7240 if (mode1 == VOIDmode
f8cfc6aa 7241 || REG_P (op0) || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7242 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7243 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7245 && modifier != EXPAND_CONST_ADDRESS
7246 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7247 /* If the field isn't aligned enough to fetch as a memref,
7248 fetch it as a bit field. */
7249 || (mode1 != BLKmode
9e5f281f 7250 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
e82407b5 7251 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
3c0cb5de 7252 || (MEM_P (op0)
e82407b5
EB
7253 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7254 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
a8f3bf52
RK
7255 && ((modifier == EXPAND_CONST_ADDRESS
7256 || modifier == EXPAND_INITIALIZER)
7257 ? STRICT_ALIGNMENT
7258 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9e5f281f 7259 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7260 /* If the type and the field are a constant size and the
7261 size of the type isn't the same size as the bitfield,
7262 we must use bitfield operations. */
7263 || (bitsize >= 0
dbe4d070
RH
7264 && TYPE_SIZE (TREE_TYPE (exp))
7265 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
cb5fa0f8 7266 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7267 bitsize)))
bbf6f052 7268 {
bbf6f052
RK
7269 enum machine_mode ext_mode = mode;
7270
14a774a9 7271 if (ext_mode == BLKmode
3c0cb5de
JQ
7272 && ! (target != 0 && MEM_P (op0)
7273 && MEM_P (target)
14a774a9 7274 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7275 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7276
7277 if (ext_mode == BLKmode)
a281e72d 7278 {
7a06d606
RK
7279 if (target == 0)
7280 target = assign_temp (type, 0, 1, 1);
7281
7282 if (bitsize == 0)
7283 return target;
7284
a281e72d
RK
7285 /* In this case, BITPOS must start at a byte boundary and
7286 TARGET, if specified, must be a MEM. */
5b0264cb
NS
7287 gcc_assert (MEM_P (op0)
7288 && (!target || MEM_P (target))
7289 && !(bitpos % BITS_PER_UNIT));
a281e72d 7290
7a06d606
RK
7291 emit_block_move (target,
7292 adjust_address (op0, VOIDmode,
7293 bitpos / BITS_PER_UNIT),
a06ef755 7294 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7295 / BITS_PER_UNIT),
8403445a
AM
7296 (modifier == EXPAND_STACK_PARM
7297 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7298
a281e72d
RK
7299 return target;
7300 }
bbf6f052 7301
dc6d66b3
RK
7302 op0 = validize_mem (op0);
7303
3c0cb5de 7304 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
04050c69 7305 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7306
8403445a
AM
7307 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7308 (modifier == EXPAND_STACK_PARM
7309 ? NULL_RTX : target),
b3520980 7310 ext_mode, ext_mode);
ef19912d
RK
7311
7312 /* If the result is a record type and BITSIZE is narrower than
7313 the mode of OP0, an integral mode, and this is a big endian
7314 machine, we must put the field into the high-order bits. */
7315 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7316 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7317 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7318 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7319 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7320 - bitsize),
7321 op0, 1);
7322
daae8185
RK
7323 /* If the result type is BLKmode, store the data into a temporary
7324 of the appropriate type, but with the mode corresponding to the
7325 mode for the data we have (op0's mode). It's tempting to make
7326 this a constant type, since we know it's only being stored once,
7327 but that can cause problems if we are taking the address of this
7328 COMPONENT_REF because the MEM of any reference via that address
7329 will have flags corresponding to the type, which will not
7330 necessarily be constant. */
bbf6f052
RK
7331 if (mode == BLKmode)
7332 {
daae8185
RK
7333 rtx new
7334 = assign_stack_temp_for_type
7335 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
bbf6f052
RK
7336
7337 emit_move_insn (new, op0);
7338 op0 = copy_rtx (new);
7339 PUT_MODE (op0, BLKmode);
c3d32120 7340 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7341 }
7342
7343 return op0;
7344 }
7345
05019f83
RK
7346 /* If the result is BLKmode, use that to access the object
7347 now as well. */
7348 if (mode == BLKmode)
7349 mode1 = BLKmode;
7350
bbf6f052
RK
7351 /* Get a reference to just this component. */
7352 if (modifier == EXPAND_CONST_ADDRESS
7353 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7354 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7355 else
f4ef873c 7356 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7357
f47e9b4e
RK
7358 if (op0 == orig_op0)
7359 op0 = copy_rtx (op0);
7360
3bdf5ad1 7361 set_mem_attributes (op0, exp, 0);
f8cfc6aa 7362 if (REG_P (XEXP (op0, 0)))
a06ef755 7363 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7364
bbf6f052 7365 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7366 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7367 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7368 || modifier == EXPAND_INITIALIZER)
bbf6f052 7369 return op0;
0d15e60c 7370 else if (target == 0)
bbf6f052 7371 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7372
bbf6f052
RK
7373 convert_move (target, op0, unsignedp);
7374 return target;
7375 }
7376
0f59171d
RH
7377 case OBJ_TYPE_REF:
7378 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
4a8d0c9c 7379
bbf6f052
RK
7380 case CALL_EXPR:
7381 /* Check for a built-in function. */
7382 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7383 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7384 == FUNCTION_DECL)
bbf6f052 7385 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7386 {
c70eaeaf
KG
7387 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7388 == BUILT_IN_FRONTEND)
673fda6b
SB
7389 return lang_hooks.expand_expr (exp, original_target,
7390 tmode, modifier,
7391 alt_rtl);
c70eaeaf
KG
7392 else
7393 return expand_builtin (exp, target, subtarget, tmode, ignore);
7394 }
d6a5ac33 7395
8129842c 7396 return expand_call (exp, target, ignore);
bbf6f052
RK
7397
7398 case NON_LVALUE_EXPR:
7399 case NOP_EXPR:
7400 case CONVERT_EXPR:
4a53008b 7401 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7402 return const0_rtx;
4a53008b 7403
bbf6f052
RK
7404 if (TREE_CODE (type) == UNION_TYPE)
7405 {
7406 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7407
c3d32120
RK
7408 /* If both input and output are BLKmode, this conversion isn't doing
7409 anything except possibly changing memory attribute. */
7410 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7411 {
7412 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7413 modifier);
7414
7415 result = copy_rtx (result);
7416 set_mem_attributes (result, exp, 0);
7417 return result;
7418 }
14a774a9 7419
bbf6f052 7420 if (target == 0)
cf7cb67e
JH
7421 {
7422 if (TYPE_MODE (type) != BLKmode)
7423 target = gen_reg_rtx (TYPE_MODE (type));
7424 else
7425 target = assign_temp (type, 0, 1, 1);
7426 }
d6a5ac33 7427
3c0cb5de 7428 if (MEM_P (target))
bbf6f052
RK
7429 /* Store data into beginning of memory target. */
7430 store_expr (TREE_OPERAND (exp, 0),
8403445a 7431 adjust_address (target, TYPE_MODE (valtype), 0),
6f4fd16d 7432 modifier == EXPAND_STACK_PARM);
1499e0a8 7433
bbf6f052 7434 else
5b0264cb
NS
7435 {
7436 gcc_assert (REG_P (target));
7437
7438 /* Store this field into a union of the proper type. */
7439 store_field (target,
7440 MIN ((int_size_in_bytes (TREE_TYPE
7441 (TREE_OPERAND (exp, 0)))
7442 * BITS_PER_UNIT),
7443 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7444 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
f45bdcd0 7445 type, 0);
5b0264cb 7446 }
bbf6f052
RK
7447
7448 /* Return the entire union. */
7449 return target;
7450 }
d6a5ac33 7451
7f62854a
RK
7452 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7453 {
7454 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7455 modifier);
7f62854a
RK
7456
7457 /* If the signedness of the conversion differs and OP0 is
7458 a promoted SUBREG, clear that indication since we now
7459 have to do the proper extension. */
8df83eae 7460 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7f62854a
RK
7461 && GET_CODE (op0) == SUBREG)
7462 SUBREG_PROMOTED_VAR_P (op0) = 0;
7463
bc15d0ef 7464 return REDUCE_BIT_FIELD (op0);
7f62854a
RK
7465 }
7466
fdf473ae 7467 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90 7468 if (GET_MODE (op0) == mode)
7acda552 7469 ;
12342f90 7470
d6a5ac33 7471 /* If OP0 is a constant, just convert it into the proper mode. */
7acda552 7472 else if (CONSTANT_P (op0))
fdf473ae
RH
7473 {
7474 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7475 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7476
0fb7aeda 7477 if (modifier == EXPAND_INITIALIZER)
7acda552
RK
7478 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7479 subreg_lowpart_offset (mode,
7480 inner_mode));
fdf473ae 7481 else
7acda552
RK
7482 op0= convert_modes (mode, inner_mode, op0,
7483 TYPE_UNSIGNED (inner_type));
fdf473ae 7484 }
12342f90 7485
7acda552
RK
7486 else if (modifier == EXPAND_INITIALIZER)
7487 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7488
7acda552
RK
7489 else if (target == 0)
7490 op0 = convert_to_mode (mode, op0,
7491 TYPE_UNSIGNED (TREE_TYPE
7492 (TREE_OPERAND (exp, 0))));
bbf6f052 7493 else
7acda552
RK
7494 {
7495 convert_move (target, op0,
7496 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7497 op0 = target;
7498 }
7499
7500 return REDUCE_BIT_FIELD (op0);
bbf6f052 7501
ed239f5a 7502 case VIEW_CONVERT_EXPR:
37a08a29 7503 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7504
7505 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
7506 Otherwise, if neither mode is BLKmode and both are integral and within
7507 a word, we can use gen_lowpart. If neither is true, make sure the
7508 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
7509 if (TYPE_MODE (type) == GET_MODE (op0))
7510 ;
7511 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
7512 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7513 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
7514 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7515 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7516 op0 = gen_lowpart (TYPE_MODE (type), op0);
3c0cb5de 7517 else if (!MEM_P (op0))
ed239f5a 7518 {
c11c10d8
RK
7519 /* If the operand is not a MEM, force it into memory. Since we
7520 are going to be be changing the mode of the MEM, don't call
7521 force_const_mem for constants because we don't allow pool
7522 constants to change mode. */
ed239f5a 7523 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7524
5b0264cb 7525 gcc_assert (!TREE_ADDRESSABLE (exp));
ed239f5a 7526
c11c10d8
RK
7527 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7528 target
7529 = assign_stack_temp_for_type
7530 (TYPE_MODE (inner_type),
7531 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7532
c11c10d8
RK
7533 emit_move_insn (target, op0);
7534 op0 = target;
ed239f5a
RK
7535 }
7536
c11c10d8
RK
7537 /* At this point, OP0 is in the correct mode. If the output type is such
7538 that the operand is known to be aligned, indicate that it is.
7539 Otherwise, we need only be concerned about alignment for non-BLKmode
7540 results. */
3c0cb5de 7541 if (MEM_P (op0))
ed239f5a
RK
7542 {
7543 op0 = copy_rtx (op0);
7544
ed239f5a
RK
7545 if (TYPE_ALIGN_OK (type))
7546 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7547 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7548 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7549 {
7550 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7551 HOST_WIDE_INT temp_size
7552 = MAX (int_size_in_bytes (inner_type),
7553 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7554 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7555 temp_size, 0, type);
c4e59f51 7556 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7557
5b0264cb 7558 gcc_assert (!TREE_ADDRESSABLE (exp));
c11c10d8 7559
ed239f5a
RK
7560 if (GET_MODE (op0) == BLKmode)
7561 emit_block_move (new_with_op0_mode, op0,
44bb111a 7562 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7563 (modifier == EXPAND_STACK_PARM
7564 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7565 else
7566 emit_move_insn (new_with_op0_mode, op0);
7567
7568 op0 = new;
7569 }
0fb7aeda 7570
c4e59f51 7571 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7572 }
7573
7574 return op0;
7575
bbf6f052 7576 case PLUS_EXPR:
4dfa0342 7577 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
bbf6f052
RK
7578 something else, make sure we add the register to the constant and
7579 then to the other thing. This case can occur during strength
7580 reduction and doing it this way will produce better code if the
7581 frame pointer or argument pointer is eliminated.
7582
7583 fold-const.c will ensure that the constant is always in the inner
7584 PLUS_EXPR, so the only case we need to do anything about is if
7585 sp, ap, or fp is our second argument, in which case we must swap
7586 the innermost first argument and our second argument. */
7587
7588 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7589 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4dfa0342
RH
7590 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7591 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7592 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7593 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
bbf6f052
RK
7594 {
7595 tree t = TREE_OPERAND (exp, 1);
7596
7597 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7598 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7599 }
7600
88f63c77 7601 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7602 something, we might be forming a constant. So try to use
7603 plus_constant. If it produces a sum and we can't accept it,
7604 use force_operand. This allows P = &ARR[const] to generate
7605 efficient code on machines where a SYMBOL_REF is not a valid
7606 address.
7607
7608 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7609 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 7610 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7611 {
8403445a
AM
7612 if (modifier == EXPAND_STACK_PARM)
7613 target = 0;
c980ac49
RS
7614 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7615 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7616 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7617 {
cbbc503e
JL
7618 rtx constant_part;
7619
c980ac49
RS
7620 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7621 EXPAND_SUM);
cbbc503e
JL
7622 /* Use immed_double_const to ensure that the constant is
7623 truncated according to the mode of OP1, then sign extended
7624 to a HOST_WIDE_INT. Using the constant directly can result
7625 in non-canonical RTL in a 64x32 cross compile. */
7626 constant_part
7627 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7628 (HOST_WIDE_INT) 0,
a5efcd63 7629 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7630 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7631 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7632 op1 = force_operand (op1, target);
bc15d0ef 7633 return REDUCE_BIT_FIELD (op1);
c980ac49 7634 }
bbf6f052 7635
c980ac49
RS
7636 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7637 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7638 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7639 {
cbbc503e
JL
7640 rtx constant_part;
7641
c980ac49 7642 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
7643 (modifier == EXPAND_INITIALIZER
7644 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
7645 if (! CONSTANT_P (op0))
7646 {
7647 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7648 VOIDmode, modifier);
f0e9957a
RS
7649 /* Return a PLUS if modifier says it's OK. */
7650 if (modifier == EXPAND_SUM
7651 || modifier == EXPAND_INITIALIZER)
7652 return simplify_gen_binary (PLUS, mode, op0, op1);
7653 goto binop2;
c980ac49 7654 }
cbbc503e
JL
7655 /* Use immed_double_const to ensure that the constant is
7656 truncated according to the mode of OP1, then sign extended
7657 to a HOST_WIDE_INT. Using the constant directly can result
7658 in non-canonical RTL in a 64x32 cross compile. */
7659 constant_part
7660 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7661 (HOST_WIDE_INT) 0,
2a94e396 7662 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7663 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7664 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7665 op0 = force_operand (op0, target);
bc15d0ef 7666 return REDUCE_BIT_FIELD (op0);
c980ac49 7667 }
bbf6f052
RK
7668 }
7669
7670 /* No sense saving up arithmetic to be done
7671 if it's all in the wrong mode to form part of an address.
7672 And force_operand won't know whether to sign-extend or
7673 zero-extend. */
7674 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7675 || mode != ptr_mode)
4ef7870a 7676 {
eb698c58
RS
7677 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7678 subtarget, &op0, &op1, 0);
6e7727eb
EB
7679 if (op0 == const0_rtx)
7680 return op1;
7681 if (op1 == const0_rtx)
7682 return op0;
4ef7870a
EB
7683 goto binop2;
7684 }
bbf6f052 7685
eb698c58
RS
7686 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7687 subtarget, &op0, &op1, modifier);
bc15d0ef 7688 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
bbf6f052
RK
7689
7690 case MINUS_EXPR:
ea87523e
RK
7691 /* For initializers, we are allowed to return a MINUS of two
7692 symbolic constants. Here we handle all cases when both operands
7693 are constant. */
bbf6f052
RK
7694 /* Handle difference of two symbolic constants,
7695 for the sake of an initializer. */
7696 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7697 && really_constant_p (TREE_OPERAND (exp, 0))
7698 && really_constant_p (TREE_OPERAND (exp, 1)))
7699 {
eb698c58
RS
7700 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7701 NULL_RTX, &op0, &op1, modifier);
ea87523e 7702
ea87523e
RK
7703 /* If the last operand is a CONST_INT, use plus_constant of
7704 the negated constant. Else make the MINUS. */
7705 if (GET_CODE (op1) == CONST_INT)
bc15d0ef 7706 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
ea87523e 7707 else
bc15d0ef 7708 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
bbf6f052 7709 }
ae431183 7710
1717e19e
UW
7711 /* No sense saving up arithmetic to be done
7712 if it's all in the wrong mode to form part of an address.
7713 And force_operand won't know whether to sign-extend or
7714 zero-extend. */
7715 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7716 || mode != ptr_mode)
7717 goto binop;
7718
eb698c58
RS
7719 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7720 subtarget, &op0, &op1, modifier);
1717e19e
UW
7721
7722 /* Convert A - const to A + (-const). */
7723 if (GET_CODE (op1) == CONST_INT)
7724 {
7725 op1 = negate_rtx (mode, op1);
bc15d0ef 7726 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
1717e19e
UW
7727 }
7728
7729 goto binop2;
bbf6f052
RK
7730
7731 case MULT_EXPR:
bbf6f052
RK
7732 /* If first operand is constant, swap them.
7733 Thus the following special case checks need only
7734 check the second operand. */
7735 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7736 {
b3694847 7737 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
7738 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7739 TREE_OPERAND (exp, 1) = t1;
7740 }
7741
7742 /* Attempt to return something suitable for generating an
7743 indexed address, for machines that support that. */
7744
88f63c77 7745 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 7746 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 7747 {
48a5f2fa
DJ
7748 tree exp1 = TREE_OPERAND (exp, 1);
7749
921b3427
RK
7750 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7751 EXPAND_SUM);
bbf6f052 7752
f8cfc6aa 7753 if (!REG_P (op0))
906c4e36 7754 op0 = force_operand (op0, NULL_RTX);
f8cfc6aa 7755 if (!REG_P (op0))
bbf6f052
RK
7756 op0 = copy_to_mode_reg (mode, op0);
7757
bc15d0ef 7758 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
48a5f2fa 7759 gen_int_mode (tree_low_cst (exp1, 0),
bc15d0ef 7760 TYPE_MODE (TREE_TYPE (exp1)))));
bbf6f052
RK
7761 }
7762
8403445a
AM
7763 if (modifier == EXPAND_STACK_PARM)
7764 target = 0;
7765
bbf6f052
RK
7766 /* Check for multiplying things that have been extended
7767 from a narrower type. If this machine supports multiplying
7768 in that narrower type with a result in the desired type,
7769 do it that way, and avoid the explicit type-conversion. */
7770 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7771 && TREE_CODE (type) == INTEGER_TYPE
7772 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7773 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7774 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7775 && int_fits_type_p (TREE_OPERAND (exp, 1),
7776 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7777 /* Don't use a widening multiply if a shift will do. */
7778 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7779 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7780 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7781 ||
7782 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8df83eae
RK
7783 && (TYPE_PRECISION (TREE_TYPE
7784 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7785 == TYPE_PRECISION (TREE_TYPE
7786 (TREE_OPERAND
7787 (TREE_OPERAND (exp, 0), 0))))
bbf6f052
RK
7788 /* If both operands are extended, they must either both
7789 be zero-extended or both be sign-extended. */
8df83eae
RK
7790 && (TYPE_UNSIGNED (TREE_TYPE
7791 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7792 == TYPE_UNSIGNED (TREE_TYPE
7793 (TREE_OPERAND
7794 (TREE_OPERAND (exp, 0), 0)))))))
bbf6f052 7795 {
888d65b5
RS
7796 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7797 enum machine_mode innermode = TYPE_MODE (op0type);
8df83eae 7798 bool zextend_p = TYPE_UNSIGNED (op0type);
888d65b5
RS
7799 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7800 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7801
d2348bd5 7802 if (mode == GET_MODE_2XWIDER_MODE (innermode))
bbf6f052 7803 {
b10af0c8
TG
7804 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7805 {
b10af0c8 7806 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
eb698c58
RS
7807 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7808 TREE_OPERAND (exp, 1),
7809 NULL_RTX, &op0, &op1, 0);
b10af0c8 7810 else
eb698c58
RS
7811 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7812 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7813 NULL_RTX, &op0, &op1, 0);
c4d70ce3 7814 goto binop3;
b10af0c8
TG
7815 }
7816 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7817 && innermode == word_mode)
7818 {
888d65b5 7819 rtx htem, hipart;
b10af0c8
TG
7820 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7821 NULL_RTX, VOIDmode, 0);
7822 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7823 op1 = convert_modes (innermode, mode,
7824 expand_expr (TREE_OPERAND (exp, 1),
7825 NULL_RTX, VOIDmode, 0),
7826 unsignedp);
b10af0c8
TG
7827 else
7828 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7829 NULL_RTX, VOIDmode, 0);
7830 temp = expand_binop (mode, other_optab, op0, op1, target,
7831 unsignedp, OPTAB_LIB_WIDEN);
888d65b5
RS
7832 hipart = gen_highpart (innermode, temp);
7833 htem = expand_mult_highpart_adjust (innermode, hipart,
7834 op0, op1, hipart,
7835 zextend_p);
7836 if (htem != hipart)
7837 emit_move_insn (hipart, htem);
bc15d0ef 7838 return REDUCE_BIT_FIELD (temp);
b10af0c8 7839 }
bbf6f052
RK
7840 }
7841 }
eb698c58
RS
7842 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7843 subtarget, &op0, &op1, 0);
bc15d0ef 7844 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
bbf6f052
RK
7845
7846 case TRUNC_DIV_EXPR:
7847 case FLOOR_DIV_EXPR:
7848 case CEIL_DIV_EXPR:
7849 case ROUND_DIV_EXPR:
7850 case EXACT_DIV_EXPR:
8403445a
AM
7851 if (modifier == EXPAND_STACK_PARM)
7852 target = 0;
bbf6f052
RK
7853 /* Possible optimization: compute the dividend with EXPAND_SUM
7854 then if the divisor is constant can optimize the case
7855 where some terms of the dividend have coeffs divisible by it. */
eb698c58
RS
7856 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7857 subtarget, &op0, &op1, 0);
bbf6f052
RK
7858 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7859
7860 case RDIV_EXPR:
bbf6f052
RK
7861 goto binop;
7862
7863 case TRUNC_MOD_EXPR:
7864 case FLOOR_MOD_EXPR:
7865 case CEIL_MOD_EXPR:
7866 case ROUND_MOD_EXPR:
8403445a
AM
7867 if (modifier == EXPAND_STACK_PARM)
7868 target = 0;
eb698c58
RS
7869 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7870 subtarget, &op0, &op1, 0);
bbf6f052
RK
7871 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7872
7873 case FIX_ROUND_EXPR:
7874 case FIX_FLOOR_EXPR:
7875 case FIX_CEIL_EXPR:
5b0264cb 7876 gcc_unreachable (); /* Not used for C. */
bbf6f052
RK
7877
7878 case FIX_TRUNC_EXPR:
906c4e36 7879 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7880 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7881 target = gen_reg_rtx (mode);
7882 expand_fix (target, op0, unsignedp);
7883 return target;
7884
7885 case FLOAT_EXPR:
906c4e36 7886 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 7887 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
7888 target = gen_reg_rtx (mode);
7889 /* expand_float can't figure out what to do if FROM has VOIDmode.
7890 So give it the correct mode. With -O, cse will optimize this. */
7891 if (GET_MODE (op0) == VOIDmode)
7892 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7893 op0);
7894 expand_float (target, op0,
8df83eae 7895 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7896 return target;
7897
7898 case NEGATE_EXPR:
5b22bee8 7899 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7900 if (modifier == EXPAND_STACK_PARM)
7901 target = 0;
91ce572a 7902 temp = expand_unop (mode,
c4d70ce3
PB
7903 optab_for_tree_code (NEGATE_EXPR, type),
7904 op0, target, 0);
5b0264cb 7905 gcc_assert (temp);
bc15d0ef 7906 return REDUCE_BIT_FIELD (temp);
bbf6f052
RK
7907
7908 case ABS_EXPR:
7909 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
7910 if (modifier == EXPAND_STACK_PARM)
7911 target = 0;
bbf6f052 7912
11017cc7 7913 /* ABS_EXPR is not valid for complex arguments. */
5b0264cb
NS
7914 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7915 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
2d7050fd 7916
bbf6f052
RK
7917 /* Unsigned abs is simply the operand. Testing here means we don't
7918 risk generating incorrect code below. */
8df83eae 7919 if (TYPE_UNSIGNED (type))
bbf6f052
RK
7920 return op0;
7921
91ce572a 7922 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7923 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7924
7925 case MAX_EXPR:
7926 case MIN_EXPR:
7927 target = original_target;
8403445a
AM
7928 if (target == 0
7929 || modifier == EXPAND_STACK_PARM
3c0cb5de 7930 || (MEM_P (target) && MEM_VOLATILE_P (target))
d6a5ac33 7931 || GET_MODE (target) != mode
f8cfc6aa 7932 || (REG_P (target)
bbf6f052
RK
7933 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7934 target = gen_reg_rtx (mode);
eb698c58
RS
7935 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7936 target, &op0, &op1, 0);
bbf6f052
RK
7937
7938 /* First try to do it with a special MIN or MAX instruction.
7939 If that does not win, use a conditional jump to select the proper
7940 value. */
c4d70ce3 7941 this_optab = optab_for_tree_code (code, type);
bbf6f052
RK
7942 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7943 OPTAB_WIDEN);
7944 if (temp != 0)
7945 return temp;
7946
fa2981d8
JW
7947 /* At this point, a MEM target is no longer useful; we will get better
7948 code without it. */
3a94c984 7949
dbedefae 7950 if (! REG_P (target))
fa2981d8
JW
7951 target = gen_reg_rtx (mode);
7952
e3be1116
RS
7953 /* If op1 was placed in target, swap op0 and op1. */
7954 if (target != op0 && target == op1)
7955 {
927630a5 7956 temp = op0;
e3be1116 7957 op0 = op1;
927630a5 7958 op1 = temp;
e3be1116
RS
7959 }
7960
dbedefae
RS
7961 /* We generate better code and avoid problems with op1 mentioning
7962 target by forcing op1 into a pseudo if it isn't a constant. */
7963 if (! CONSTANT_P (op1))
7964 op1 = force_reg (mode, op1);
7965
927630a5
SB
7966#ifdef HAVE_conditional_move
7967 /* Use a conditional move if possible. */
7968 if (can_conditionally_move_p (mode))
7969 {
7970 enum rtx_code comparison_code;
7971 rtx insn;
7972
7973 if (code == MAX_EXPR)
7974 comparison_code = unsignedp ? GEU : GE;
7975 else
7976 comparison_code = unsignedp ? LEU : LE;
7977
7978 /* ??? Same problem as in expmed.c: emit_conditional_move
7979 forces a stack adjustment via compare_from_rtx, and we
7980 lose the stack adjustment if the sequence we are about
7981 to create is discarded. */
7982 do_pending_stack_adjust ();
7983
7984 start_sequence ();
7985
7986 /* Try to emit the conditional move. */
7987 insn = emit_conditional_move (target, comparison_code,
7988 op0, op1, mode,
7989 op0, op1, mode,
7990 unsignedp);
7991
7992 /* If we could do the conditional move, emit the sequence,
7993 and return. */
7994 if (insn)
7995 {
7996 rtx seq = get_insns ();
7997 end_sequence ();
7998 emit_insn (seq);
7999 return target;
8000 }
8001
8002 /* Otherwise discard the sequence and fall back to code with
8003 branches. */
8004 end_sequence ();
8005 }
8006#endif
ee456b1c
RK
8007 if (target != op0)
8008 emit_move_insn (target, op0);
d6a5ac33 8009
927630a5 8010 temp = gen_label_rtx ();
d6a5ac33 8011
f81497d9
RS
8012 /* If this mode is an integer too wide to compare properly,
8013 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8014 if (GET_MODE_CLASS (mode) == MODE_INT
8015 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8016 {
f81497d9 8017 if (code == MAX_EXPR)
288dc1ea 8018 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
927630a5 8019 NULL_RTX, temp);
bbf6f052 8020 else
288dc1ea 8021 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
927630a5 8022 NULL_RTX, temp);
bbf6f052 8023 }
f81497d9
RS
8024 else
8025 {
b30f05db 8026 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
927630a5 8027 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
f81497d9 8028 }
b30f05db 8029 emit_move_insn (target, op1);
927630a5 8030 emit_label (temp);
bbf6f052
RK
8031 return target;
8032
bbf6f052
RK
8033 case BIT_NOT_EXPR:
8034 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8035 if (modifier == EXPAND_STACK_PARM)
8036 target = 0;
bbf6f052 8037 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5b0264cb 8038 gcc_assert (temp);
bbf6f052
RK
8039 return temp;
8040
d6a5ac33
RK
8041 /* ??? Can optimize bitwise operations with one arg constant.
8042 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8043 and (a bitwise1 b) bitwise2 b (etc)
8044 but that is probably not worth while. */
8045
8046 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8047 boolean values when we want in all cases to compute both of them. In
8048 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8049 as actual zero-or-1 values and then bitwise anding. In cases where
8050 there cannot be any side effects, better code would be made by
8051 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8052 how to recognize those cases. */
8053
bbf6f052 8054 case TRUTH_AND_EXPR:
c4d70ce3 8055 code = BIT_AND_EXPR;
bbf6f052 8056 case BIT_AND_EXPR:
bbf6f052
RK
8057 goto binop;
8058
bbf6f052 8059 case TRUTH_OR_EXPR:
7efcb746 8060 code = BIT_IOR_EXPR;
bbf6f052 8061 case BIT_IOR_EXPR:
bbf6f052
RK
8062 goto binop;
8063
874726a8 8064 case TRUTH_XOR_EXPR:
c4d70ce3 8065 code = BIT_XOR_EXPR;
bbf6f052 8066 case BIT_XOR_EXPR:
bbf6f052
RK
8067 goto binop;
8068
8069 case LSHIFT_EXPR:
8070 case RSHIFT_EXPR:
8071 case LROTATE_EXPR:
8072 case RROTATE_EXPR:
e5e809f4 8073 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8074 subtarget = 0;
8403445a
AM
8075 if (modifier == EXPAND_STACK_PARM)
8076 target = 0;
bbf6f052
RK
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8078 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8079 unsignedp);
8080
d6a5ac33
RK
8081 /* Could determine the answer when only additive constants differ. Also,
8082 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8083 case LT_EXPR:
8084 case LE_EXPR:
8085 case GT_EXPR:
8086 case GE_EXPR:
8087 case EQ_EXPR:
8088 case NE_EXPR:
1eb8759b
RH
8089 case UNORDERED_EXPR:
8090 case ORDERED_EXPR:
8091 case UNLT_EXPR:
8092 case UNLE_EXPR:
8093 case UNGT_EXPR:
8094 case UNGE_EXPR:
8095 case UNEQ_EXPR:
d1a7edaf 8096 case LTGT_EXPR:
8403445a
AM
8097 temp = do_store_flag (exp,
8098 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8099 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8100 if (temp != 0)
8101 return temp;
d6a5ac33 8102
0f41302f 8103 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8104 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8105 && original_target
f8cfc6aa 8106 && REG_P (original_target)
bbf6f052
RK
8107 && (GET_MODE (original_target)
8108 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8109 {
d6a5ac33
RK
8110 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8111 VOIDmode, 0);
8112
c0a3eeac
UW
8113 /* If temp is constant, we can just compute the result. */
8114 if (GET_CODE (temp) == CONST_INT)
8115 {
8116 if (INTVAL (temp) != 0)
8117 emit_move_insn (target, const1_rtx);
8118 else
8119 emit_move_insn (target, const0_rtx);
8120
8121 return target;
8122 }
8123
bbf6f052 8124 if (temp != original_target)
c0a3eeac
UW
8125 {
8126 enum machine_mode mode1 = GET_MODE (temp);
8127 if (mode1 == VOIDmode)
8128 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8129
c0a3eeac
UW
8130 temp = copy_to_mode_reg (mode1, temp);
8131 }
d6a5ac33 8132
bbf6f052 8133 op1 = gen_label_rtx ();
c5d5d461 8134 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8135 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8136 emit_move_insn (temp, const1_rtx);
8137 emit_label (op1);
8138 return temp;
8139 }
d6a5ac33 8140
25f3e06c
PB
8141 /* If no set-flag instruction, must generate a conditional store
8142 into a temporary variable. Drop through and handle this
8143 like && and ||. */
8144
8145 if (! ignore
8146 && (target == 0
8147 || modifier == EXPAND_STACK_PARM
8148 || ! safe_from_p (target, exp, 1)
8149 /* Make sure we don't have a hard reg (such as function's return
8150 value) live across basic blocks, if not optimizing. */
8151 || (!optimize && REG_P (target)
8152 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8153 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8154
8155 if (target)
8156 emit_move_insn (target, const0_rtx);
8157
8158 op1 = gen_label_rtx ();
8159 jumpifnot (exp, op1);
8160
8161 if (target)
8162 emit_move_insn (target, const1_rtx);
8163
8164 emit_label (op1);
8165 return ignore ? const0_rtx : target;
8166
bbf6f052 8167 case TRUTH_NOT_EXPR:
8403445a
AM
8168 if (modifier == EXPAND_STACK_PARM)
8169 target = 0;
bbf6f052
RK
8170 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8171 /* The parser is careful to generate TRUTH_NOT_EXPR
8172 only with operands that are always zero or one. */
906c4e36 8173 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052 8174 target, 1, OPTAB_LIB_WIDEN);
5b0264cb 8175 gcc_assert (temp);
bbf6f052
RK
8176 return temp;
8177
6de9cd9a
DN
8178 case STATEMENT_LIST:
8179 {
8180 tree_stmt_iterator iter;
8181
5b0264cb 8182 gcc_assert (ignore);
6de9cd9a
DN
8183
8184 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8185 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8186 }
8187 return const0_rtx;
8188
bbf6f052 8189 case COND_EXPR:
ba8081eb
KH
8190 /* A COND_EXPR with its type being VOID_TYPE represents a
8191 conditional jump and is handled in
8192 expand_gimple_cond_expr. */
8193 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
f676971a 8194
e5bacf32
PB
8195 /* Note that COND_EXPRs whose type is a structure or union
8196 are required to be constructed to contain assignments of
8197 a temporary variable, so that we can evaluate them here
8198 for side effect only. If type is void, we must do likewise. */
8199
5b0264cb
NS
8200 gcc_assert (!TREE_ADDRESSABLE (type)
8201 && !ignore
8202 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8203 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
f676971a 8204
e5bacf32
PB
8205 /* If we are not to produce a result, we have no target. Otherwise,
8206 if a target was specified use it; it will not be used as an
8207 intermediate target unless it is safe. If no target, use a
8208 temporary. */
f676971a 8209
e5bacf32
PB
8210 if (modifier != EXPAND_STACK_PARM
8211 && original_target
8212 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8213 && GET_MODE (original_target) == mode
7c00d1fe 8214#ifdef HAVE_conditional_move
e5bacf32
PB
8215 && (! can_conditionally_move_p (mode)
8216 || REG_P (original_target))
7c00d1fe 8217#endif
e5bacf32
PB
8218 && !MEM_P (original_target))
8219 temp = original_target;
8220 else
8221 temp = assign_temp (type, 0, 0, 1);
f676971a 8222
e5bacf32
PB
8223 do_pending_stack_adjust ();
8224 NO_DEFER_POP;
8225 op0 = gen_label_rtx ();
8226 op1 = gen_label_rtx ();
8227 jumpifnot (TREE_OPERAND (exp, 0), op0);
8228 store_expr (TREE_OPERAND (exp, 1), temp,
6f4fd16d 8229 modifier == EXPAND_STACK_PARM);
f676971a 8230
e5bacf32
PB
8231 emit_jump_insn (gen_jump (op1));
8232 emit_barrier ();
8233 emit_label (op0);
8234 store_expr (TREE_OPERAND (exp, 2), temp,
6f4fd16d 8235 modifier == EXPAND_STACK_PARM);
f676971a 8236
e5bacf32
PB
8237 emit_label (op1);
8238 OK_DEFER_POP;
8239 return temp;
f676971a 8240
7ce67fbe
DP
8241 case VEC_COND_EXPR:
8242 target = expand_vec_cond_expr (exp, target);
8243 return target;
8244
bbf6f052
RK
8245 case MODIFY_EXPR:
8246 {
bbf6f052
RK
8247 tree lhs = TREE_OPERAND (exp, 0);
8248 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052 8249
df9af2bb
KH
8250 gcc_assert (ignore);
8251
bbf6f052
RK
8252 /* Check for |= or &= of a bitfield of size one into another bitfield
8253 of size 1. In this case, (unless we need the result of the
8254 assignment) we can do this more efficiently with a
8255 test followed by an assignment, if necessary.
8256
8257 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8258 things change so we do, this code should be enhanced to
8259 support it. */
df9af2bb 8260 if (TREE_CODE (lhs) == COMPONENT_REF
bbf6f052
RK
8261 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8262 || TREE_CODE (rhs) == BIT_AND_EXPR)
8263 && TREE_OPERAND (rhs, 0) == lhs
8264 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8265 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8266 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8267 {
8268 rtx label = gen_label_rtx ();
8269
8270 do_jump (TREE_OPERAND (rhs, 1),
8271 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8272 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8273 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8274 (TREE_CODE (rhs) == BIT_IOR_EXPR
8275 ? integer_one_node
e836a5a2 8276 : integer_zero_node)));
e7c33f54 8277 do_pending_stack_adjust ();
bbf6f052
RK
8278 emit_label (label);
8279 return const0_rtx;
8280 }
8281
e836a5a2 8282 expand_assignment (lhs, rhs);
0fb7aeda 8283
7f8adc4e 8284 return const0_rtx;
bbf6f052
RK
8285 }
8286
6e7f84a7
APB
8287 case RETURN_EXPR:
8288 if (!TREE_OPERAND (exp, 0))
8289 expand_null_return ();
8290 else
8291 expand_return (TREE_OPERAND (exp, 0));
8292 return const0_rtx;
8293
bbf6f052 8294 case ADDR_EXPR:
70bb498a 8295 return expand_expr_addr_expr (exp, target, tmode, modifier);
bbf6f052 8296
7308a047 8297 case COMPLEX_EXPR:
1466e387
RH
8298 /* Get the rtx code of the operands. */
8299 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8300 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7308a047 8301
1466e387
RH
8302 if (!target)
8303 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6551fa4d 8304
1466e387
RH
8305 /* Move the real (op0) and imaginary (op1) parts to their location. */
8306 write_complex_part (target, op0, false);
8307 write_complex_part (target, op1, true);
7308a047 8308
1466e387 8309 return target;
7308a047
RS
8310
8311 case REALPART_EXPR:
2d7050fd 8312 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
1466e387 8313 return read_complex_part (op0, false);
3a94c984 8314
7308a047 8315 case IMAGPART_EXPR:
2d7050fd 8316 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
1466e387 8317 return read_complex_part (op0, true);
7308a047 8318
6de9cd9a
DN
8319 case RESX_EXPR:
8320 expand_resx_expr (exp);
8321 return const0_rtx;
8322
e976b8b2 8323 case TRY_CATCH_EXPR:
6de9cd9a 8324 case CATCH_EXPR:
6de9cd9a 8325 case EH_FILTER_EXPR:
b335b813 8326 case TRY_FINALLY_EXPR:
ac45df5d 8327 /* Lowered by tree-eh.c. */
5b0264cb 8328 gcc_unreachable ();
b335b813 8329
ac45df5d
RH
8330 case WITH_CLEANUP_EXPR:
8331 case CLEANUP_POINT_EXPR:
8332 case TARGET_EXPR:
165b54c3 8333 case CASE_LABEL_EXPR:
77c9db77 8334 case VA_ARG_EXPR:
caf93cb0 8335 case BIND_EXPR:
e5bacf32
PB
8336 case INIT_EXPR:
8337 case CONJ_EXPR:
8338 case COMPOUND_EXPR:
8339 case PREINCREMENT_EXPR:
8340 case PREDECREMENT_EXPR:
8341 case POSTINCREMENT_EXPR:
8342 case POSTDECREMENT_EXPR:
8343 case LOOP_EXPR:
8344 case EXIT_EXPR:
e5bacf32
PB
8345 case TRUTH_ANDIF_EXPR:
8346 case TRUTH_ORIF_EXPR:
ac45df5d 8347 /* Lowered by gimplify.c. */
5b0264cb 8348 gcc_unreachable ();
b335b813 8349
52a11cbf 8350 case EXC_PTR_EXPR:
86c99549 8351 return get_exception_pointer (cfun);
52a11cbf 8352
6de9cd9a
DN
8353 case FILTER_EXPR:
8354 return get_exception_filter (cfun);
8355
67231816
RH
8356 case FDESC_EXPR:
8357 /* Function descriptors are not valid except for as
8358 initialization constants, and should not be expanded. */
5b0264cb 8359 gcc_unreachable ();
67231816 8360
6de9cd9a 8361 case SWITCH_EXPR:
7efcb746 8362 expand_case (exp);
6de9cd9a
DN
8363 return const0_rtx;
8364
8365 case LABEL_EXPR:
8366 expand_label (TREE_OPERAND (exp, 0));
8367 return const0_rtx;
8368
6de9cd9a
DN
8369 case ASM_EXPR:
8370 expand_asm_expr (exp);
8371 return const0_rtx;
8372
d25cee4d
RH
8373 case WITH_SIZE_EXPR:
8374 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8375 have pulled out the size to use in whatever context it needed. */
8376 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8377 modifier, alt_rtl);
8378
7ccf35ed
DN
8379 case REALIGN_LOAD_EXPR:
8380 {
8381 tree oprnd0 = TREE_OPERAND (exp, 0);
8382 tree oprnd1 = TREE_OPERAND (exp, 1);
8383 tree oprnd2 = TREE_OPERAND (exp, 2);
8384 rtx op2;
8385
8386 this_optab = optab_for_tree_code (code, type);
8387 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8388 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8389 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8390 target, unsignedp);
535a42b1 8391 gcc_assert (temp);
7ccf35ed
DN
8392 return temp;
8393 }
8394
61d3cdbb
DN
8395 case REDUC_MAX_EXPR:
8396 case REDUC_MIN_EXPR:
8397 case REDUC_PLUS_EXPR:
8398 {
8399 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8400 this_optab = optab_for_tree_code (code, type);
8401 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8402 gcc_assert (temp);
8403 return temp;
8404 }
7ccf35ed 8405
a6b46ba2
DN
8406 case VEC_LSHIFT_EXPR:
8407 case VEC_RSHIFT_EXPR:
8408 {
8409 target = expand_vec_shift_expr (exp, target);
8410 return target;
8411 }
8412
bbf6f052 8413 default:
673fda6b
SB
8414 return lang_hooks.expand_expr (exp, original_target, tmode,
8415 modifier, alt_rtl);
bbf6f052
RK
8416 }
8417
c4d70ce3 8418 /* Here to do an ordinary binary operator. */
bbf6f052 8419 binop:
eb698c58
RS
8420 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8421 subtarget, &op0, &op1, 0);
bbf6f052 8422 binop2:
c4d70ce3
PB
8423 this_optab = optab_for_tree_code (code, type);
8424 binop3:
8403445a
AM
8425 if (modifier == EXPAND_STACK_PARM)
8426 target = 0;
bbf6f052
RK
8427 temp = expand_binop (mode, this_optab, op0, op1, target,
8428 unsignedp, OPTAB_LIB_WIDEN);
5b0264cb 8429 gcc_assert (temp);
bc15d0ef
JM
8430 return REDUCE_BIT_FIELD (temp);
8431}
8432#undef REDUCE_BIT_FIELD
8433\f
8434/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8435 signedness of TYPE), possibly returning the result in TARGET. */
8436static rtx
8437reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8438{
8439 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8440 if (target && GET_MODE (target) != GET_MODE (exp))
8441 target = 0;
8442 if (TYPE_UNSIGNED (type))
8443 {
8444 rtx mask;
8445 if (prec < HOST_BITS_PER_WIDE_INT)
8446 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8447 GET_MODE (exp));
8448 else
8449 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8450 ((unsigned HOST_WIDE_INT) 1
8451 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8452 GET_MODE (exp));
8453 return expand_and (GET_MODE (exp), exp, mask, target);
8454 }
8455 else
8456 {
4a90aeeb 8457 tree count = build_int_cst (NULL_TREE,
7d60be94 8458 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
bc15d0ef
JM
8459 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8460 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8461 }
bbf6f052 8462}
b93a436e 8463\f
1ce7f3c2
RK
8464/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8465 when applied to the address of EXP produces an address known to be
8466 aligned more than BIGGEST_ALIGNMENT. */
8467
8468static int
502b8322 8469is_aligning_offset (tree offset, tree exp)
1ce7f3c2 8470{
6fce44af 8471 /* Strip off any conversions. */
1ce7f3c2
RK
8472 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8473 || TREE_CODE (offset) == NOP_EXPR
6fce44af 8474 || TREE_CODE (offset) == CONVERT_EXPR)
1ce7f3c2
RK
8475 offset = TREE_OPERAND (offset, 0);
8476
8477 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8478 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8479 if (TREE_CODE (offset) != BIT_AND_EXPR
8480 || !host_integerp (TREE_OPERAND (offset, 1), 1)
caf93cb0 8481 || compare_tree_int (TREE_OPERAND (offset, 1),
c0cfc691 8482 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
1ce7f3c2
RK
8483 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8484 return 0;
8485
8486 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8487 It must be NEGATE_EXPR. Then strip any more conversions. */
8488 offset = TREE_OPERAND (offset, 0);
8489 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8490 || TREE_CODE (offset) == NOP_EXPR
8491 || TREE_CODE (offset) == CONVERT_EXPR)
8492 offset = TREE_OPERAND (offset, 0);
8493
8494 if (TREE_CODE (offset) != NEGATE_EXPR)
8495 return 0;
8496
8497 offset = TREE_OPERAND (offset, 0);
8498 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8499 || TREE_CODE (offset) == NOP_EXPR
8500 || TREE_CODE (offset) == CONVERT_EXPR)
8501 offset = TREE_OPERAND (offset, 0);
8502
6fce44af
RK
8503 /* This must now be the address of EXP. */
8504 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
1ce7f3c2
RK
8505}
8506\f
e0a2f705 8507/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 8508 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
8509 in bytes within the string that ARG is accessing. The type of the
8510 offset will be `sizetype'. */
b93a436e 8511
28f4ec01 8512tree
502b8322 8513string_constant (tree arg, tree *ptr_offset)
b93a436e 8514{
a45f71f5 8515 tree array, offset;
b93a436e
JL
8516 STRIP_NOPS (arg);
8517
a45f71f5 8518 if (TREE_CODE (arg) == ADDR_EXPR)
b93a436e 8519 {
a45f71f5
JJ
8520 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8521 {
8522 *ptr_offset = size_zero_node;
8523 return TREE_OPERAND (arg, 0);
8524 }
8525 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8526 {
8527 array = TREE_OPERAND (arg, 0);
8528 offset = size_zero_node;
8529 }
8530 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8531 {
8532 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8533 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8534 if (TREE_CODE (array) != STRING_CST
8535 && TREE_CODE (array) != VAR_DECL)
8536 return 0;
8537 }
8538 else
8539 return 0;
6de9cd9a 8540 }
b93a436e
JL
8541 else if (TREE_CODE (arg) == PLUS_EXPR)
8542 {
8543 tree arg0 = TREE_OPERAND (arg, 0);
8544 tree arg1 = TREE_OPERAND (arg, 1);
8545
8546 STRIP_NOPS (arg0);
8547 STRIP_NOPS (arg1);
8548
8549 if (TREE_CODE (arg0) == ADDR_EXPR
a45f71f5
JJ
8550 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8551 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
bbf6f052 8552 {
a45f71f5
JJ
8553 array = TREE_OPERAND (arg0, 0);
8554 offset = arg1;
bbf6f052 8555 }
b93a436e 8556 else if (TREE_CODE (arg1) == ADDR_EXPR
a45f71f5
JJ
8557 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8558 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
bbf6f052 8559 {
a45f71f5
JJ
8560 array = TREE_OPERAND (arg1, 0);
8561 offset = arg0;
bbf6f052 8562 }
a45f71f5
JJ
8563 else
8564 return 0;
8565 }
8566 else
8567 return 0;
8568
8569 if (TREE_CODE (array) == STRING_CST)
8570 {
8571 *ptr_offset = convert (sizetype, offset);
8572 return array;
8573 }
8574 else if (TREE_CODE (array) == VAR_DECL)
8575 {
8576 int length;
8577
8578 /* Variables initialized to string literals can be handled too. */
8579 if (DECL_INITIAL (array) == NULL_TREE
8580 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8581 return 0;
8582
8583 /* If they are read-only, non-volatile and bind locally. */
8584 if (! TREE_READONLY (array)
8585 || TREE_SIDE_EFFECTS (array)
8586 || ! targetm.binds_local_p (array))
8587 return 0;
8588
8589 /* Avoid const char foo[4] = "abcde"; */
8590 if (DECL_SIZE_UNIT (array) == NULL_TREE
8591 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8592 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8593 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8594 return 0;
8595
8596 /* If variable is bigger than the string literal, OFFSET must be constant
8597 and inside of the bounds of the string literal. */
8598 offset = convert (sizetype, offset);
8599 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8600 && (! host_integerp (offset, 1)
8601 || compare_tree_int (offset, length) >= 0))
8602 return 0;
8603
8604 *ptr_offset = offset;
8605 return DECL_INITIAL (array);
b93a436e 8606 }
ca695ac9 8607
b93a436e
JL
8608 return 0;
8609}
ca695ac9 8610\f
b93a436e
JL
8611/* Generate code to calculate EXP using a store-flag instruction
8612 and return an rtx for the result. EXP is either a comparison
8613 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 8614
b93a436e 8615 If TARGET is nonzero, store the result there if convenient.
ca695ac9 8616
cc2902df 8617 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 8618 cheap.
ca695ac9 8619
b93a436e
JL
8620 Return zero if there is no suitable set-flag instruction
8621 available on this machine.
ca695ac9 8622
b93a436e
JL
8623 Once expand_expr has been called on the arguments of the comparison,
8624 we are committed to doing the store flag, since it is not safe to
8625 re-evaluate the expression. We emit the store-flag insn by calling
8626 emit_store_flag, but only expand the arguments if we have a reason
8627 to believe that emit_store_flag will be successful. If we think that
8628 it will, but it isn't, we have to simulate the store-flag with a
8629 set/jump/set sequence. */
ca695ac9 8630
b93a436e 8631static rtx
502b8322 8632do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
b93a436e
JL
8633{
8634 enum rtx_code code;
8635 tree arg0, arg1, type;
8636 tree tem;
8637 enum machine_mode operand_mode;
8638 int invert = 0;
8639 int unsignedp;
8640 rtx op0, op1;
8641 enum insn_code icode;
8642 rtx subtarget = target;
381127e8 8643 rtx result, label;
ca695ac9 8644
b93a436e
JL
8645 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8646 result at the end. We can't simply invert the test since it would
8647 have already been inverted if it were valid. This case occurs for
8648 some floating-point comparisons. */
ca695ac9 8649
b93a436e
JL
8650 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8651 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 8652
b93a436e
JL
8653 arg0 = TREE_OPERAND (exp, 0);
8654 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
8655
8656 /* Don't crash if the comparison was erroneous. */
8657 if (arg0 == error_mark_node || arg1 == error_mark_node)
8658 return const0_rtx;
8659
b93a436e
JL
8660 type = TREE_TYPE (arg0);
8661 operand_mode = TYPE_MODE (type);
8df83eae 8662 unsignedp = TYPE_UNSIGNED (type);
ca695ac9 8663
b93a436e
JL
8664 /* We won't bother with BLKmode store-flag operations because it would mean
8665 passing a lot of information to emit_store_flag. */
8666 if (operand_mode == BLKmode)
8667 return 0;
ca695ac9 8668
b93a436e
JL
8669 /* We won't bother with store-flag operations involving function pointers
8670 when function pointers must be canonicalized before comparisons. */
8671#ifdef HAVE_canonicalize_funcptr_for_compare
8672 if (HAVE_canonicalize_funcptr_for_compare
8673 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8674 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8675 == FUNCTION_TYPE))
8676 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8677 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8678 == FUNCTION_TYPE))))
8679 return 0;
ca695ac9
JB
8680#endif
8681
b93a436e
JL
8682 STRIP_NOPS (arg0);
8683 STRIP_NOPS (arg1);
ca695ac9 8684
b93a436e
JL
8685 /* Get the rtx comparison code to use. We know that EXP is a comparison
8686 operation of some type. Some comparisons against 1 and -1 can be
8687 converted to comparisons with zero. Do so here so that the tests
8688 below will be aware that we have a comparison with zero. These
8689 tests will not catch constants in the first operand, but constants
8690 are rarely passed as the first operand. */
ca695ac9 8691
b93a436e
JL
8692 switch (TREE_CODE (exp))
8693 {
8694 case EQ_EXPR:
8695 code = EQ;
bbf6f052 8696 break;
b93a436e
JL
8697 case NE_EXPR:
8698 code = NE;
bbf6f052 8699 break;
b93a436e
JL
8700 case LT_EXPR:
8701 if (integer_onep (arg1))
8702 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8703 else
8704 code = unsignedp ? LTU : LT;
ca695ac9 8705 break;
b93a436e
JL
8706 case LE_EXPR:
8707 if (! unsignedp && integer_all_onesp (arg1))
8708 arg1 = integer_zero_node, code = LT;
8709 else
8710 code = unsignedp ? LEU : LE;
ca695ac9 8711 break;
b93a436e
JL
8712 case GT_EXPR:
8713 if (! unsignedp && integer_all_onesp (arg1))
8714 arg1 = integer_zero_node, code = GE;
8715 else
8716 code = unsignedp ? GTU : GT;
8717 break;
8718 case GE_EXPR:
8719 if (integer_onep (arg1))
8720 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8721 else
8722 code = unsignedp ? GEU : GE;
ca695ac9 8723 break;
1eb8759b
RH
8724
8725 case UNORDERED_EXPR:
8726 code = UNORDERED;
8727 break;
8728 case ORDERED_EXPR:
8729 code = ORDERED;
8730 break;
8731 case UNLT_EXPR:
8732 code = UNLT;
8733 break;
8734 case UNLE_EXPR:
8735 code = UNLE;
8736 break;
8737 case UNGT_EXPR:
8738 code = UNGT;
8739 break;
8740 case UNGE_EXPR:
8741 code = UNGE;
8742 break;
8743 case UNEQ_EXPR:
8744 code = UNEQ;
8745 break;
d1a7edaf
PB
8746 case LTGT_EXPR:
8747 code = LTGT;
8748 break;
1eb8759b 8749
ca695ac9 8750 default:
5b0264cb 8751 gcc_unreachable ();
bbf6f052 8752 }
bbf6f052 8753
b93a436e
JL
8754 /* Put a constant second. */
8755 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8756 {
8757 tem = arg0; arg0 = arg1; arg1 = tem;
8758 code = swap_condition (code);
ca695ac9 8759 }
bbf6f052 8760
b93a436e
JL
8761 /* If this is an equality or inequality test of a single bit, we can
8762 do this by shifting the bit being tested to the low-order bit and
8763 masking the result with the constant 1. If the condition was EQ,
8764 we xor it with 1. This does not require an scc insn and is faster
7960bf22
JL
8765 than an scc insn even if we have it.
8766
8767 The code to make this transformation was moved into fold_single_bit_test,
8768 so we just call into the folder and expand its result. */
d39985fa 8769
b93a436e
JL
8770 if ((code == NE || code == EQ)
8771 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8772 && integer_pow2p (TREE_OPERAND (arg0, 1)))
60cd4dae 8773 {
ae2bcd98 8774 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
60cd4dae 8775 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
450b1728 8776 arg0, arg1, type),
60cd4dae
JL
8777 target, VOIDmode, EXPAND_NORMAL);
8778 }
bbf6f052 8779
b93a436e 8780 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 8781 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 8782 return 0;
1eb8759b 8783
b93a436e
JL
8784 icode = setcc_gen_code[(int) code];
8785 if (icode == CODE_FOR_nothing
a995e389 8786 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 8787 {
b93a436e
JL
8788 /* We can only do this if it is one of the special cases that
8789 can be handled without an scc insn. */
8790 if ((code == LT && integer_zerop (arg1))
8791 || (! only_cheap && code == GE && integer_zerop (arg1)))
8792 ;
08fd6d04 8793 else if (! only_cheap && (code == NE || code == EQ)
b93a436e
JL
8794 && TREE_CODE (type) != REAL_TYPE
8795 && ((abs_optab->handlers[(int) operand_mode].insn_code
8796 != CODE_FOR_nothing)
8797 || (ffs_optab->handlers[(int) operand_mode].insn_code
8798 != CODE_FOR_nothing)))
8799 ;
8800 else
8801 return 0;
ca695ac9 8802 }
3a94c984 8803
296b4ed9 8804 if (! get_subtarget (target)
e3be1116 8805 || GET_MODE (subtarget) != operand_mode)
b93a436e
JL
8806 subtarget = 0;
8807
eb698c58 8808 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
b93a436e
JL
8809
8810 if (target == 0)
8811 target = gen_reg_rtx (mode);
8812
ad76cef8 8813 result = emit_store_flag (target, code, op0, op1,
b93a436e 8814 operand_mode, unsignedp, 1);
ca695ac9 8815
b93a436e
JL
8816 if (result)
8817 {
8818 if (invert)
8819 result = expand_binop (mode, xor_optab, result, const1_rtx,
8820 result, 0, OPTAB_LIB_WIDEN);
8821 return result;
ca695ac9 8822 }
bbf6f052 8823
b93a436e 8824 /* If this failed, we have to do this with set/compare/jump/set code. */
f8cfc6aa 8825 if (!REG_P (target)
b93a436e
JL
8826 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8827 target = gen_reg_rtx (GET_MODE (target));
8828
8829 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8830 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 8831 operand_mode, NULL_RTX);
b93a436e
JL
8832 if (GET_CODE (result) == CONST_INT)
8833 return (((result == const0_rtx && ! invert)
8834 || (result != const0_rtx && invert))
8835 ? const0_rtx : const1_rtx);
ca695ac9 8836
8f08e8c0
JL
8837 /* The code of RESULT may not match CODE if compare_from_rtx
8838 decided to swap its operands and reverse the original code.
8839
8840 We know that compare_from_rtx returns either a CONST_INT or
8841 a new comparison code, so it is safe to just extract the
8842 code from RESULT. */
8843 code = GET_CODE (result);
8844
b93a436e 8845 label = gen_label_rtx ();
5b0264cb 8846 gcc_assert (bcc_gen_fctn[(int) code]);
0f41302f 8847
b93a436e
JL
8848 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8849 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8850 emit_label (label);
bbf6f052 8851
b93a436e 8852 return target;
ca695ac9 8853}
b93a436e 8854\f
b93a436e 8855
ad82abb8
ZW
8856/* Stubs in case we haven't got a casesi insn. */
8857#ifndef HAVE_casesi
8858# define HAVE_casesi 0
8859# define gen_casesi(a, b, c, d, e) (0)
8860# define CODE_FOR_casesi CODE_FOR_nothing
8861#endif
8862
8863/* If the machine does not have a case insn that compares the bounds,
8864 this means extra overhead for dispatch tables, which raises the
8865 threshold for using them. */
8866#ifndef CASE_VALUES_THRESHOLD
8867#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8868#endif /* CASE_VALUES_THRESHOLD */
8869
8870unsigned int
502b8322 8871case_values_threshold (void)
ad82abb8
ZW
8872{
8873 return CASE_VALUES_THRESHOLD;
8874}
8875
8876/* Attempt to generate a casesi instruction. Returns 1 if successful,
8877 0 otherwise (i.e. if there is no casesi instruction). */
8878int
502b8322
AJ
8879try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8880 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
ad82abb8
ZW
8881{
8882 enum machine_mode index_mode = SImode;
8883 int index_bits = GET_MODE_BITSIZE (index_mode);
8884 rtx op1, op2, index;
8885 enum machine_mode op_mode;
8886
8887 if (! HAVE_casesi)
8888 return 0;
8889
8890 /* Convert the index to SImode. */
8891 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8892 {
8893 enum machine_mode omode = TYPE_MODE (index_type);
8894 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8895
8896 /* We must handle the endpoints in the original mode. */
3244e67d
RS
8897 index_expr = build2 (MINUS_EXPR, index_type,
8898 index_expr, minval);
ad82abb8
ZW
8899 minval = integer_zero_node;
8900 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8901 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 8902 omode, 1, default_label);
ad82abb8
ZW
8903 /* Now we can safely truncate. */
8904 index = convert_to_mode (index_mode, index, 0);
8905 }
8906 else
8907 {
8908 if (TYPE_MODE (index_type) != index_mode)
8909 {
ae2bcd98 8910 index_expr = convert (lang_hooks.types.type_for_size
b0c48229 8911 (index_bits, 0), index_expr);
ad82abb8
ZW
8912 index_type = TREE_TYPE (index_expr);
8913 }
8914
8915 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8916 }
ad76cef8 8917
ad82abb8
ZW
8918 do_pending_stack_adjust ();
8919
8920 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8921 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8922 (index, op_mode))
8923 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 8924
ad82abb8
ZW
8925 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8926
8927 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8928 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8df83eae 8929 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
ad82abb8
ZW
8930 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8931 (op1, op_mode))
8932 op1 = copy_to_mode_reg (op_mode, op1);
8933
8934 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8935
8936 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8937 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8df83eae 8938 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
ad82abb8
ZW
8939 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8940 (op2, op_mode))
8941 op2 = copy_to_mode_reg (op_mode, op2);
8942
8943 emit_jump_insn (gen_casesi (index, op1, op2,
8944 table_label, default_label));
8945 return 1;
8946}
8947
8948/* Attempt to generate a tablejump instruction; same concept. */
8949#ifndef HAVE_tablejump
8950#define HAVE_tablejump 0
8951#define gen_tablejump(x, y) (0)
8952#endif
8953
8954/* Subroutine of the next function.
8955
8956 INDEX is the value being switched on, with the lowest value
b93a436e
JL
8957 in the table already subtracted.
8958 MODE is its expected mode (needed if INDEX is constant).
8959 RANGE is the length of the jump table.
8960 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 8961
b93a436e
JL
8962 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8963 index value is out of range. */
0f41302f 8964
ad82abb8 8965static void
502b8322
AJ
8966do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8967 rtx default_label)
ca695ac9 8968{
b3694847 8969 rtx temp, vector;
88d3b7f0 8970
74f6d071
JH
8971 if (INTVAL (range) > cfun->max_jumptable_ents)
8972 cfun->max_jumptable_ents = INTVAL (range);
1877be45 8973
b93a436e
JL
8974 /* Do an unsigned comparison (in the proper mode) between the index
8975 expression and the value which represents the length of the range.
8976 Since we just finished subtracting the lower bound of the range
8977 from the index expression, this comparison allows us to simultaneously
8978 check that the original index expression value is both greater than
8979 or equal to the minimum value of the range and less than or equal to
8980 the maximum value of the range. */
709f5be1 8981
c5d5d461 8982 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 8983 default_label);
bbf6f052 8984
b93a436e
JL
8985 /* If index is in range, it must fit in Pmode.
8986 Convert to Pmode so we can index with it. */
8987 if (mode != Pmode)
8988 index = convert_to_mode (Pmode, index, 1);
bbf6f052 8989
ba228239 8990 /* Don't let a MEM slip through, because then INDEX that comes
b93a436e
JL
8991 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8992 and break_out_memory_refs will go to work on it and mess it up. */
8993#ifdef PIC_CASE_VECTOR_ADDRESS
f8cfc6aa 8994 if (flag_pic && !REG_P (index))
b93a436e
JL
8995 index = copy_to_mode_reg (Pmode, index);
8996#endif
ca695ac9 8997
b93a436e
JL
8998 /* If flag_force_addr were to affect this address
8999 it could interfere with the tricky assumptions made
9000 about addresses that contain label-refs,
9001 which may be valid only very near the tablejump itself. */
9002 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9003 GET_MODE_SIZE, because this indicates how large insns are. The other
9004 uses should all be Pmode, because they are addresses. This code
9005 could fail if addresses and insns are not the same size. */
9006 index = gen_rtx_PLUS (Pmode,
9007 gen_rtx_MULT (Pmode, index,
9008 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9009 gen_rtx_LABEL_REF (Pmode, table_label));
9010#ifdef PIC_CASE_VECTOR_ADDRESS
9011 if (flag_pic)
9012 index = PIC_CASE_VECTOR_ADDRESS (index);
9013 else
bbf6f052 9014#endif
b93a436e
JL
9015 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9016 temp = gen_reg_rtx (CASE_VECTOR_MODE);
542a8afa 9017 vector = gen_const_mem (CASE_VECTOR_MODE, index);
b93a436e
JL
9018 convert_move (temp, vector, 0);
9019
9020 emit_jump_insn (gen_tablejump (temp, table_label));
9021
9022 /* If we are generating PIC code or if the table is PC-relative, the
9023 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9024 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9025 emit_barrier ();
bbf6f052 9026}
b93a436e 9027
ad82abb8 9028int
502b8322
AJ
9029try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9030 rtx table_label, rtx default_label)
ad82abb8
ZW
9031{
9032 rtx index;
9033
9034 if (! HAVE_tablejump)
9035 return 0;
9036
4845b383
KH
9037 index_expr = fold_build2 (MINUS_EXPR, index_type,
9038 convert (index_type, index_expr),
9039 convert (index_type, minval));
ad82abb8 9040 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
ad82abb8
ZW
9041 do_pending_stack_adjust ();
9042
9043 do_tablejump (index, TYPE_MODE (index_type),
9044 convert_modes (TYPE_MODE (index_type),
9045 TYPE_MODE (TREE_TYPE (range)),
9046 expand_expr (range, NULL_RTX,
9047 VOIDmode, 0),
8df83eae 9048 TYPE_UNSIGNED (TREE_TYPE (range))),
ad82abb8
ZW
9049 table_label, default_label);
9050 return 1;
9051}
e2500fed 9052
cb2a532e
AH
9053/* Nonzero if the mode is a valid vector mode for this architecture.
9054 This returns nonzero even if there is no hardware support for the
9055 vector mode, but we can emulate with narrower modes. */
9056
9057int
502b8322 9058vector_mode_valid_p (enum machine_mode mode)
cb2a532e
AH
9059{
9060 enum mode_class class = GET_MODE_CLASS (mode);
9061 enum machine_mode innermode;
9062
9063 /* Doh! What's going on? */
9064 if (class != MODE_VECTOR_INT
9065 && class != MODE_VECTOR_FLOAT)
9066 return 0;
9067
9068 /* Hardware support. Woo hoo! */
f676971a 9069 if (targetm.vector_mode_supported_p (mode))
cb2a532e
AH
9070 return 1;
9071
9072 innermode = GET_MODE_INNER (mode);
9073
9074 /* We should probably return 1 if requesting V4DI and we have no DI,
9075 but we have V2DI, but this is probably very unlikely. */
9076
9077 /* If we have support for the inner mode, we can safely emulate it.
9078 We may not have V2DI, but me can emulate with a pair of DIs. */
6dd53648 9079 return targetm.scalar_mode_supported_p (innermode);
cb2a532e
AH
9080}
9081
d744e06e
AH
9082/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9083static rtx
502b8322 9084const_vector_from_tree (tree exp)
d744e06e
AH
9085{
9086 rtvec v;
9087 int units, i;
9088 tree link, elt;
9089 enum machine_mode inner, mode;
9090
9091 mode = TYPE_MODE (TREE_TYPE (exp));
9092
6de9cd9a 9093 if (initializer_zerop (exp))
d744e06e
AH
9094 return CONST0_RTX (mode);
9095
9096 units = GET_MODE_NUNITS (mode);
9097 inner = GET_MODE_INNER (mode);
9098
9099 v = rtvec_alloc (units);
9100
9101 link = TREE_VECTOR_CST_ELTS (exp);
9102 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9103 {
9104 elt = TREE_VALUE (link);
9105
9106 if (TREE_CODE (elt) == REAL_CST)
9107 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9108 inner);
9109 else
9110 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9111 TREE_INT_CST_HIGH (elt),
9112 inner);
9113 }
9114
5f6c070d
AH
9115 /* Initialize remaining elements to 0. */
9116 for (; i < units; ++i)
9117 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9118
a73b091d 9119 return gen_rtx_CONST_VECTOR (mode, v);
d744e06e 9120}
e2500fed 9121#include "gt-expr.h"