]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
rtlanal.c (set_noop_p): Return true for noop jumps.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357
AJ
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
bbf6f052
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052 32#include "function.h"
bbf6f052 33#include "insn-config.h"
3a94c984 34/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 35#include "expr.h"
bbf6f052 36#include "recog.h"
3ef1eef4 37#include "reload.h"
bbf6f052 38#include "output.h"
bbf6f052 39#include "typeclass.h"
10f0ad3d 40#include "toplev.h"
d7db6646 41#include "ggc.h"
e2c49ac2 42#include "intl.h"
b1474bb7 43#include "tm_p.h"
bbf6f052 44
bbf6f052 45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 54#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
18543a22
ILT
67/* Assume that case vectors are not pc-relative. */
68#ifndef CASE_VECTOR_PC_RELATIVE
69#define CASE_VECTOR_PC_RELATIVE 0
70#endif
71
8f17b5c5
MM
72/* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79int (*lang_safe_from_p) PARAMS ((rtx, tree));
80
bbf6f052
RK
81/* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87int cse_not_expected;
88
956d6950 89/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
956d6950
JL
92static int in_check_memory_usage;
93
14a774a9
RK
94/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95static tree placeholder_list = 0;
96
4969d05d
RK
97/* This structure is used by move_by_pieces to describe the move to
98 be performed. */
4969d05d
RK
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
3bdf5ad1
RK
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
4969d05d
RK
111 int reverse;
112};
113
57814e5e 114/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
115 be performed. */
116
57814e5e 117struct store_by_pieces
9de08200
RK
118{
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
3bdf5ad1
RK
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
57814e5e
JJ
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
9de08200
RK
127 int reverse;
128};
129
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9 131
711d877c
KG
132static rtx get_push_address PARAMS ((int));
133
134static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
135static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
711d877c
KG
138static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
57814e5e
JJ
140static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
3bdf5ad1
RK
142static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
57814e5e
JJ
144static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 147 enum machine_mode,
57814e5e 148 struct store_by_pieces *));
296b4ed9 149static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
150static int is_zeros_p PARAMS ((tree));
151static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
152static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
23cb1766
RK
154 tree, tree, unsigned int, int,
155 int));
770ae6cc 156static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
13eb1f7f 157 HOST_WIDE_INT));
770ae6cc
RK
158static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
729a2125 160 tree, enum machine_mode, int,
770ae6cc 161 unsigned int, HOST_WIDE_INT, int));
e009aaf3 162static enum memory_use_mode
711d877c
KG
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164static tree save_noncopied_parts PARAMS ((tree, tree));
165static tree init_noncopied_parts PARAMS ((tree, tree));
711d877c
KG
166static int fixed_type_p PARAMS ((tree));
167static rtx var_rtx PARAMS ((tree));
729a2125 168static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
711d877c 169static rtx expand_increment PARAMS ((tree, int, int));
711d877c
KG
170static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
172static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
711d877c 174static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
566aa174 175static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
bbf6f052 176
4fa52007
RK
177/* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181static char direct_load[NUM_MACHINE_MODES];
182static char direct_store[NUM_MACHINE_MODES];
183
7e24ffc9
HPN
184/* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
186
187#ifndef MOVE_RATIO
266007a7 188#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
189#define MOVE_RATIO 2
190#else
3a94c984 191/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 192#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
193#endif
194#endif
e87b4f3f 195
fbe1758d 196/* This macro is used to determine whether move_by_pieces should be called
3a94c984 197 to perform a structure copy. */
fbe1758d 198#ifndef MOVE_BY_PIECES_P
19caa751 199#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
201#endif
202
266007a7 203/* This array records the insn_code of insns to perform block moves. */
e6677db3 204enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 205
9de08200
RK
206/* This array records the insn_code of insns to perform block clears. */
207enum insn_code clrstr_optab[NUM_MACHINE_MODES];
208
0f41302f 209/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
210
211#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 212#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 213#endif
bbf6f052 214\f
4fa52007 215/* This is run once per compilation to set up which modes can be used
266007a7 216 directly in memory and to initialize the block move optab. */
4fa52007
RK
217
218void
219init_expr_once ()
220{
221 rtx insn, pat;
222 enum machine_mode mode;
cff48d8f 223 int num_clobbers;
9ec36da5 224 rtx mem, mem1;
9ec36da5
JL
225
226 start_sequence ();
227
e2549997
RS
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
9ec36da5
JL
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 233
38a448ca 234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
235 pat = PATTERN (insn);
236
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
239 {
240 int regno;
241 rtx reg;
4fa52007
RK
242
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
e2549997 245 PUT_MODE (mem1, mode);
4fa52007 246
e6fe56a4
RK
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
249
7308a047
RS
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
254 {
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
e6fe56a4 257
38a448ca 258 reg = gen_rtx_REG (mode, regno);
e6fe56a4 259
7308a047
RS
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
e6fe56a4 264
e2549997
RS
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
269
7308a047
RS
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
e2549997
RS
274
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
7308a047 279 }
4fa52007
RK
280 }
281
282 end_sequence ();
283}
cff48d8f 284
bbf6f052
RK
285/* This is run at the start of compiling a function. */
286
287void
288init_expr ()
289{
01d939e8 290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 291
49ad7cfa 292 pending_chain = 0;
bbf6f052 293 pending_stack_adjust = 0;
1503a7ec 294 stack_pointer_delta = 0;
bbf6f052 295 inhibit_defer_pop = 0;
bbf6f052 296 saveregs_value = 0;
0006469d 297 apply_args_value = 0;
e87b4f3f 298 forced_labels = 0;
bbf6f052
RK
299}
300
fa51b01b
RH
301void
302mark_expr_status (p)
303 struct expr_status *p;
304{
305 if (p == NULL)
306 return;
307
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
311}
312
313void
314free_expr_status (f)
315 struct function *f;
316{
317 free (f->expr);
318 f->expr = NULL;
319}
320
49ad7cfa 321/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 322
bbf6f052 323void
49ad7cfa 324finish_expr_for_function ()
bbf6f052 325{
49ad7cfa
BS
326 if (pending_chain)
327 abort ();
bbf6f052
RK
328}
329\f
330/* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
332
bbf6f052
RK
333/* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
336
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
339
340static rtx
341enqueue_insn (var, body)
342 rtx var, body;
343{
c5c76735
JL
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
bbf6f052
RK
346 return pending_chain;
347}
348
349/* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
355
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
359
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
363
364rtx
365protect_from_queue (x, modify)
366 register rtx x;
367 int modify;
368{
369 register RTX_CODE code = GET_CODE (x);
370
371#if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
374 return x;
375#endif
376
377 if (code != QUEUED)
378 {
e9baa644
RK
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
383 shared. */
bbf6f052
RK
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 {
f1ec5147
RK
387 rtx y = XEXP (x, 0);
388 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 389
bbf6f052
RK
390 if (QUEUED_INSN (y))
391 {
f1ec5147
RK
392 rtx temp = gen_reg_rtx (GET_MODE (x));
393
e9baa644 394 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
395 QUEUED_INSN (y));
396 return temp;
397 }
f1ec5147 398
73b7f58c
BS
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
f1ec5147 401 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 402 }
f1ec5147 403
bbf6f052
RK
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
406 if (code == MEM)
3f15938e
RS
407 {
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
410 {
411 x = copy_rtx (x);
412 XEXP (x, 0) = tem;
413 }
414 }
bbf6f052
RK
415 else if (code == PLUS || code == MULT)
416 {
3f15938e
RS
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 {
421 x = copy_rtx (x);
422 XEXP (x, 0) = new0;
423 XEXP (x, 1) = new1;
424 }
bbf6f052
RK
425 }
426 return x;
427 }
73b7f58c
BS
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
430 emit_queue. */
bbf6f052 431 if (QUEUED_INSN (x) == 0)
73b7f58c 432 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
433 /* If the increment has happened and a pre-increment copy exists,
434 use that copy. */
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 QUEUED_INSN (x));
442 return QUEUED_COPY (x);
443}
444
445/* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
449
1f06ee8d 450int
bbf6f052
RK
451queued_subexp_p (x)
452 rtx x;
453{
454 register enum rtx_code code = GET_CODE (x);
455 switch (code)
456 {
457 case QUEUED:
458 return 1;
459 case MEM:
460 return queued_subexp_p (XEXP (x, 0));
461 case MULT:
462 case PLUS:
463 case MINUS:
e9a25f70
JL
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
466 default:
467 return 0;
bbf6f052 468 }
bbf6f052
RK
469}
470
471/* Perform all the pending incrementations. */
472
473void
474emit_queue ()
475{
476 register rtx p;
381127e8 477 while ((p = pending_chain))
bbf6f052 478 {
41b083c4
R
479 rtx body = QUEUED_BODY (p);
480
481 if (GET_CODE (body) == SEQUENCE)
482 {
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
485 }
486 else
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
488 pending_chain = QUEUED_NEXT (p);
489 }
490}
bbf6f052
RK
491\f
492/* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
496
497void
498convert_move (to, from, unsignedp)
499 register rtx to, from;
500 int unsignedp;
501{
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
506 enum insn_code code;
507 rtx libcall;
508
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
511
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
514
515 if (to_real != from_real)
516 abort ();
517
1499e0a8
RK
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
520 TO here. */
521
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
527
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
529 abort ();
530
bbf6f052
RK
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
533 {
534 emit_move_insn (to, from);
535 return;
536 }
537
0b4565c9
BS
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
539 {
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
541 abort ();
3a94c984 542
0b4565c9
BS
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
545 else
546 to = gen_rtx_SUBREG (from_mode, to, 0);
547
548 emit_move_insn (to, from);
549 return;
550 }
551
552 if (to_real != from_real)
553 abort ();
554
bbf6f052
RK
555 if (to_real)
556 {
642dfa8b 557 rtx value, insns;
81d79e2c 558
2b01c326 559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 560 {
2b01c326
RK
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
563 != CODE_FOR_nothing)
564 {
565 emit_unop_insn (code, to, from, UNKNOWN);
566 return;
567 }
bbf6f052 568 }
3a94c984 569
b424402e
RS
570#ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
574 return;
575 }
576#endif
704af6a1
JL
577#ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
581 return;
582 }
583#endif
b424402e
RS
584#ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
588 return;
589 }
590#endif
591#ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
595 return;
596 }
597#endif
598#ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
03747aa3
RK
612
613#ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
617 return;
618 }
619#endif
b424402e
RS
620#ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
624 return;
625 }
626#endif
627#ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
634#ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641#ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
643 {
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
2b01c326
RK
648
649#ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
653 return;
654 }
655#endif
656#ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
660 return;
661 }
662#endif
663#ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
667 return;
668 }
669#endif
670#ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
677
bbf6f052
RK
678#ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
682 return;
683 }
684#endif
b092b471
JW
685#ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
689 return;
690 }
691#endif
bbf6f052
RK
692#ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
694 {
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
696 return;
697 }
698#endif
b092b471
JW
699#ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
703 return;
704 }
705#endif
bbf6f052
RK
706#ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
708 {
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
710 return;
711 }
712#endif
713
b092b471
JW
714 libcall = (rtx) 0;
715 switch (from_mode)
716 {
717 case SFmode:
718 switch (to_mode)
719 {
720 case DFmode:
721 libcall = extendsfdf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extendsfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extendsftf2_libfunc;
730 break;
3a94c984 731
e9a25f70
JL
732 default:
733 break;
b092b471
JW
734 }
735 break;
736
737 case DFmode:
738 switch (to_mode)
739 {
740 case SFmode:
741 libcall = truncdfsf2_libfunc;
742 break;
743
744 case XFmode:
745 libcall = extenddfxf2_libfunc;
746 break;
747
748 case TFmode:
749 libcall = extenddftf2_libfunc;
750 break;
3a94c984 751
e9a25f70
JL
752 default:
753 break;
b092b471
JW
754 }
755 break;
756
757 case XFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = truncxfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = truncxfdf2_libfunc;
766 break;
3a94c984 767
e9a25f70
JL
768 default:
769 break;
b092b471
JW
770 }
771 break;
772
773 case TFmode:
774 switch (to_mode)
775 {
776 case SFmode:
777 libcall = trunctfsf2_libfunc;
778 break;
779
780 case DFmode:
781 libcall = trunctfdf2_libfunc;
782 break;
3a94c984 783
e9a25f70
JL
784 default:
785 break;
b092b471
JW
786 }
787 break;
3a94c984 788
e9a25f70
JL
789 default:
790 break;
b092b471
JW
791 }
792
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
bbf6f052
RK
795 abort ();
796
642dfa8b 797 start_sequence ();
ebb1b59a 798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 799 1, from, from_mode);
642dfa8b
BS
800 insns = get_insns ();
801 end_sequence ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
803 from));
bbf6f052
RK
804 return;
805 }
806
807 /* Now both modes are integers. */
808
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
812 {
813 rtx insns;
814 rtx lowpart;
815 rtx fill_value;
816 rtx lowfrom;
817 int i;
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
820
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
823 != CODE_FOR_nothing)
824 {
cd1b4b44
RK
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
bbf6f052
RK
831 emit_unop_insn (code, to, from, equiv_code);
832 return;
833 }
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
838 {
a81fee56 839 if (GET_CODE (to) == REG)
38a448ca 840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
844 return;
845 }
846
847 /* No special multiword conversion insn; do it by hand. */
848 start_sequence ();
849
5c5033c3
RK
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
852
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
855
bbf6f052
RK
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
859 else
860 lowpart_mode = from_mode;
861
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
863
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
866
867 /* Compute the value to put in each remaining word. */
868 if (unsignedp)
869 fill_value = const0_rtx;
870 else
871 {
872#ifdef HAVE_slt
873 if (HAVE_slt
a995e389 874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
875 && STORE_FLAG_VALUE == -1)
876 {
906c4e36
RK
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
878 lowpart_mode, 0, 0);
bbf6f052
RK
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
881 }
882 else
883#endif
884 {
885 fill_value
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 888 NULL_RTX, 0);
bbf6f052
RK
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
890 }
891 }
892
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
895 {
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
898
899 if (subword == 0)
900 abort ();
901
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
904 }
905
906 insns = get_insns ();
907 end_sequence ();
908
906c4e36 909 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
911 return;
912 }
913
d3c64ee3
RS
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 917 {
431a6eca
JW
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
bbf6f052
RK
925 convert_move (to, gen_lowpart (word_mode, from), 0);
926 return;
927 }
928
3a94c984 929 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
930 if (to_mode == PQImode)
931 {
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
934
935#ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
937 {
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
939 return;
940 }
941#endif /* HAVE_truncqipqi2 */
942 abort ();
943 }
944
945 if (from_mode == PQImode)
946 {
947 if (to_mode != QImode)
948 {
949 from = convert_to_mode (QImode, from, unsignedp);
950 from_mode = QImode;
951 }
952 else
953 {
954#ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
956 {
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
958 return;
959 }
960#endif /* HAVE_extendpqiqi2 */
961 abort ();
962 }
963 }
964
bbf6f052
RK
965 if (to_mode == PSImode)
966 {
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
969
1f584163
DE
970#ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
bbf6f052 972 {
1f584163 973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
974 return;
975 }
1f584163 976#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
977 abort ();
978 }
979
980 if (from_mode == PSImode)
981 {
982 if (to_mode != SImode)
983 {
984 from = convert_to_mode (SImode, from, unsignedp);
985 from_mode = SImode;
986 }
987 else
988 {
1f584163 989#ifdef HAVE_extendpsisi2
43d75418 990 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 991 {
1f584163 992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
993 return;
994 }
1f584163 995#endif /* HAVE_extendpsisi2 */
43d75418
R
996#ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
998 {
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1000 return;
1001 }
1002#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1003 abort ();
1004 }
1005 }
1006
0407367d
RK
1007 if (to_mode == PDImode)
1008 {
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1011
1012#ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1014 {
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1016 return;
1017 }
1018#endif /* HAVE_truncdipdi2 */
1019 abort ();
1020 }
1021
1022 if (from_mode == PDImode)
1023 {
1024 if (to_mode != DImode)
1025 {
1026 from = convert_to_mode (DImode, from, unsignedp);
1027 from_mode = DImode;
1028 }
1029 else
1030 {
1031#ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1033 {
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1035 return;
1036 }
1037#endif /* HAVE_extendpdidi2 */
1038 abort ();
1039 }
1040 }
1041
bbf6f052
RK
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1044
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1048 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1049 {
d3c64ee3
RS
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
34aa3599
RK
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
bbf6f052
RK
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1061 return;
1062 }
1063
d3c64ee3 1064 /* Handle extension. */
bbf6f052
RK
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 {
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1070 {
1071 emit_unop_insn (code, to, from, equiv_code);
1072 return;
1073 }
1074 else
1075 {
1076 enum machine_mode intermediate;
2b28d92e
NC
1077 rtx tmp;
1078 tree shift_amount;
bbf6f052
RK
1079
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1090 {
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1093 return;
1094 }
1095
2b28d92e 1096 /* No suitable intermediate mode.
3a94c984 1097 Generate what we need with shifts. */
2b28d92e
NC
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1102 to, unsignedp);
3a94c984 1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1104 to, unsignedp);
1105 if (tmp != to)
1106 emit_move_insn (to, tmp);
1107 return;
bbf6f052
RK
1108 }
1109 }
1110
3a94c984 1111 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1112
1113 if (from_mode == DImode && to_mode == SImode)
1114 {
1115#ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1119 return;
1120 }
1121#endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == HImode)
1127 {
1128#ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1132 return;
1133 }
1134#endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == DImode && to_mode == QImode)
1140 {
1141#ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1145 return;
1146 }
1147#endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == HImode)
1153 {
1154#ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1158 return;
1159 }
1160#endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == SImode && to_mode == QImode)
1166 {
1167#ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173#endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == HImode && to_mode == QImode)
1179 {
1180#ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1184 return;
1185 }
1186#endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
b9bcad65
RK
1191 if (from_mode == TImode && to_mode == DImode)
1192 {
1193#ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1197 return;
1198 }
1199#endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == SImode)
1205 {
1206#ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1210 return;
1211 }
1212#endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == HImode)
1218 {
1219#ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1223 return;
1224 }
1225#endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 if (from_mode == TImode && to_mode == QImode)
1231 {
1232#ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1234 {
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1236 return;
1237 }
1238#endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1241 }
1242
bbf6f052
RK
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 {
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1250 return;
1251 }
1252
1253 /* Mode combination is not recognized. */
1254 abort ();
1255}
1256
1257/* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
5d901c31
RS
1262 or by copying to a new temporary with conversion.
1263
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1266
1267rtx
1268convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1270 rtx x;
1271 int unsignedp;
5ffe63ed
RS
1272{
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1274}
1275
1276/* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1280
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1283
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1288
1289rtx
1290convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1292 rtx x;
1293 int unsignedp;
bbf6f052
RK
1294{
1295 register rtx temp;
5ffe63ed 1296
1499e0a8
RK
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1299
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
bbf6f052 1304
64791b18
RK
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
3a94c984 1307
5ffe63ed 1308 if (mode == oldmode)
bbf6f052
RK
1309 return x;
1310
1311 /* There is one case that we must handle specially: If we are converting
906c4e36 1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1316
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 {
1326 int width = GET_MODE_BITSIZE (oldmode);
1327
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 }
1331
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1333 }
bbf6f052
RK
1334
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1339
ba2e110c
RK
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1342 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1343 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1344 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
2bf29316
JW
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1351 {
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 {
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1360
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 if (! unsignedp
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1367
69107307 1368 return GEN_INT (trunc_int_for_mode (val, mode));
ba2e110c
RK
1369 }
1370
1371 return gen_lowpart (mode, x);
1372 }
bbf6f052
RK
1373
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1376 return temp;
1377}
1378\f
fbe1758d 1379/* This macro is used to determine what the largest unit size that
3a94c984 1380 move_by_pieces can use is. */
fbe1758d
AM
1381
1382/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1384 number of bytes we can move with a single instruction. */
fbe1758d
AM
1385
1386#ifndef MOVE_MAX_PIECES
1387#define MOVE_MAX_PIECES MOVE_MAX
1388#endif
1389
bbf6f052
RK
1390/* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
566aa174
JH
1394
1395 When TO is NULL, the emit_single_push_insn is used to push the
1396 FROM to stack.
1397
19caa751 1398 ALIGN is maximum alignment we can assume. */
bbf6f052 1399
2e245dac 1400void
bbf6f052
RK
1401move_by_pieces (to, from, len, align)
1402 rtx to, from;
3bdf5ad1 1403 unsigned HOST_WIDE_INT len;
729a2125 1404 unsigned int align;
bbf6f052
RK
1405{
1406 struct move_by_pieces data;
566aa174 1407 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
bbf6f052
RK
1411
1412 data.offset = 0;
bbf6f052 1413 data.from_addr = from_addr;
566aa174
JH
1414 if (to)
1415 {
1416 to_addr = XEXP (to, 0);
1417 data.to = to;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 }
1424 else
1425 {
1426 to_addr = NULL_RTX;
1427 data.to = NULL_RTX;
1428 data.autinc_to = 1;
1429#ifdef STACK_GROWS_DOWNWARD
1430 data.reverse = 1;
1431#else
1432 data.reverse = 0;
1433#endif
1434 }
1435 data.to_addr = to_addr;
bbf6f052 1436 data.from = from;
bbf6f052
RK
1437 data.autinc_from
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1441
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
bbf6f052
RK
1444 if (data.reverse) data.offset = len;
1445 data.len = len;
1446
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1452 {
3a94c984 1453 /* Find the mode of the largest move... */
fbe1758d
AM
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1458
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1460 {
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1464 }
fbe1758d 1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1466 {
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1470 }
bbf6f052
RK
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1474 {
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1476 data.autinc_to = 1;
1477 data.explicit_inc_to = -1;
1478 }
fbe1758d 1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
bbf6f052
RK
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1487 }
1488
e1565e65 1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1492
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1495
1496 while (max_size > 1)
1497 {
e7c33f54
RK
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1501 mode = tmode;
1502
1503 if (mode == VOIDmode)
1504 break;
1505
1506 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 /* The code above should have handled everything. */
2a8e278c 1514 if (data.len > 0)
bbf6f052
RK
1515 abort ();
1516}
1517
1518/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1519 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1520
3bdf5ad1 1521static unsigned HOST_WIDE_INT
bbf6f052 1522move_by_pieces_ninsns (l, align)
3bdf5ad1 1523 unsigned HOST_WIDE_INT l;
729a2125 1524 unsigned int align;
bbf6f052 1525{
3bdf5ad1
RK
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1528
e1565e65 1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1531 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1532
1533 while (max_size > 1)
1534 {
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1537
e7c33f54
RK
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1541 mode = tmode;
1542
1543 if (mode == VOIDmode)
1544 break;
1545
1546 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1549
1550 max_size = GET_MODE_SIZE (mode);
1551 }
1552
13c6f0d5
NS
1553 if (l)
1554 abort ();
bbf6f052
RK
1555 return n_insns;
1556}
1557
1558/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1561
1562static void
1563move_by_pieces_1 (genfun, mode, data)
711d877c 1564 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1567{
3bdf5ad1 1568 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1569 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1570
1571 while (data->len >= size)
1572 {
3bdf5ad1
RK
1573 if (data->reverse)
1574 data->offset -= size;
1575
566aa174 1576 if (data->to)
3bdf5ad1 1577 {
566aa174
JH
1578 if (data->autinc_to)
1579 {
f1ec5147
RK
1580 to1 = replace_equiv_address (data->to, data->to_addr);
1581 to1 = adjust_address (to1, mode, 0);
566aa174
JH
1582 }
1583 else
f4ef873c 1584 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1585 }
3bdf5ad1
RK
1586
1587 if (data->autinc_from)
1588 {
f1ec5147
RK
1589 from1 = replace_equiv_address (data->from, data->from_addr);
1590 from1 = adjust_address (from1, mode, 0);
3bdf5ad1
RK
1591 }
1592 else
f4ef873c 1593 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1594
940da324 1595 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052 1599
566aa174
JH
1600 if (data->to)
1601 emit_insn ((*genfun) (to1, from1));
1602 else
1603 emit_single_push_insn (mode, from1, NULL);
3bdf5ad1 1604
940da324 1605 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1606 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1607 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1608 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1609
3bdf5ad1
RK
1610 if (! data->reverse)
1611 data->offset += size;
bbf6f052
RK
1612
1613 data->len -= size;
1614 }
1615}
1616\f
1617/* Emit code to move a block Y to a block X.
1618 This may be done with string-move instructions,
1619 with multiple scalar move instructions, or with a library call.
1620
1621 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1622 with mode BLKmode.
1623 SIZE is an rtx that says how long they are.
19caa751 1624 ALIGN is the maximum alignment we can assume they have.
bbf6f052 1625
e9a25f70
JL
1626 Return the address of the new block, if memcpy is called and returns it,
1627 0 otherwise. */
1628
1629rtx
bbf6f052
RK
1630emit_block_move (x, y, size, align)
1631 rtx x, y;
1632 rtx size;
729a2125 1633 unsigned int align;
bbf6f052 1634{
e9a25f70 1635 rtx retval = 0;
52cf7115
JL
1636#ifdef TARGET_MEM_FUNCTIONS
1637 static tree fn;
1638 tree call_expr, arg_list;
1639#endif
e9a25f70 1640
bbf6f052
RK
1641 if (GET_MODE (x) != BLKmode)
1642 abort ();
1643
1644 if (GET_MODE (y) != BLKmode)
1645 abort ();
1646
1647 x = protect_from_queue (x, 1);
1648 y = protect_from_queue (y, 0);
5d901c31 1649 size = protect_from_queue (size, 0);
bbf6f052
RK
1650
1651 if (GET_CODE (x) != MEM)
1652 abort ();
1653 if (GET_CODE (y) != MEM)
1654 abort ();
1655 if (size == 0)
1656 abort ();
1657
fbe1758d 1658 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1659 move_by_pieces (x, y, INTVAL (size), align);
1660 else
1661 {
1662 /* Try the most limited insn first, because there's no point
1663 including more than one in the machine description unless
1664 the more limited one has some advantage. */
266007a7 1665
19caa751 1666 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
266007a7
RK
1667 enum machine_mode mode;
1668
3ef1eef4
RK
1669 /* Since this is a move insn, we don't care about volatility. */
1670 volatile_ok = 1;
1671
266007a7
RK
1672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1673 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1674 {
266007a7 1675 enum insn_code code = movstr_optab[(int) mode];
a995e389 1676 insn_operand_predicate_fn pred;
266007a7
RK
1677
1678 if (code != CODE_FOR_nothing
803090c4
RK
1679 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1680 here because if SIZE is less than the mode mask, as it is
8008b228 1681 returned by the macro, it will definitely be less than the
803090c4 1682 actual mode mask. */
8ca00751
RK
1683 && ((GET_CODE (size) == CONST_INT
1684 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1685 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1686 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1687 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1688 || (*pred) (x, BLKmode))
1689 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1690 || (*pred) (y, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1692 || (*pred) (opalign, VOIDmode)))
bbf6f052 1693 {
1ba1e2a8 1694 rtx op2;
266007a7
RK
1695 rtx last = get_last_insn ();
1696 rtx pat;
1697
1ba1e2a8 1698 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1699 pred = insn_data[(int) code].operand[2].predicate;
1700 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1701 op2 = copy_to_mode_reg (mode, op2);
1702
1703 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1704 if (pat)
1705 {
1706 emit_insn (pat);
3ef1eef4 1707 volatile_ok = 0;
e9a25f70 1708 return 0;
266007a7
RK
1709 }
1710 else
1711 delete_insns_since (last);
bbf6f052
RK
1712 }
1713 }
bbf6f052 1714
3ef1eef4
RK
1715 volatile_ok = 0;
1716
4bc973ae
JL
1717 /* X, Y, or SIZE may have been passed through protect_from_queue.
1718
1719 It is unsafe to save the value generated by protect_from_queue
1720 and reuse it later. Consider what happens if emit_queue is
1721 called before the return value from protect_from_queue is used.
1722
1723 Expansion of the CALL_EXPR below will call emit_queue before
1724 we are finished emitting RTL for argument setup. So if we are
1725 not careful we could get the wrong value for an argument.
1726
1727 To avoid this problem we go ahead and emit code to copy X, Y &
1728 SIZE into new pseudos. We can then place those new pseudos
1729 into an RTL_EXPR and use them later, even after a call to
3a94c984 1730 emit_queue.
4bc973ae
JL
1731
1732 Note this is not strictly needed for library calls since they
1733 do not call emit_queue before loading their arguments. However,
1734 we may need to have library calls call emit_queue in the future
1735 since failing to do so could cause problems for targets which
1736 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1737 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1738 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1739
1740#ifdef TARGET_MEM_FUNCTIONS
1741 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1742#else
1743 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node));
f3dc586a 1745 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1746#endif
1747
bbf6f052 1748#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1749 /* It is incorrect to use the libcall calling conventions to call
1750 memcpy in this context.
1751
1752 This could be a user call to memcpy and the user may wish to
1753 examine the return value from memcpy.
1754
1755 For targets where libcalls and normal calls have different conventions
3a94c984 1756 for returning pointers, we could end up generating incorrect code.
52cf7115
JL
1757
1758 So instead of using a libcall sequence we build up a suitable
1759 CALL_EXPR and expand the call in the normal fashion. */
1760 if (fn == NULL_TREE)
1761 {
1762 tree fntype;
1763
1764 /* This was copied from except.c, I don't know if all this is
1765 necessary in this context or not. */
1766 fn = get_identifier ("memcpy");
52cf7115
JL
1767 fntype = build_pointer_type (void_type_node);
1768 fntype = build_function_type (fntype, NULL_TREE);
1769 fn = build_decl (FUNCTION_DECL, fn, fntype);
3a94c984 1770 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1771 DECL_EXTERNAL (fn) = 1;
1772 TREE_PUBLIC (fn) = 1;
1773 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 1774 TREE_NOTHROW (fn) = 1;
6496a589 1775 make_decl_rtl (fn, NULL);
52cf7115 1776 assemble_external (fn);
52cf7115
JL
1777 }
1778
3a94c984 1779 /* We need to make an argument list for the function call.
52cf7115
JL
1780
1781 memcpy has three arguments, the first two are void * addresses and
1782 the last is a size_t byte count for the copy. */
1783 arg_list
1784 = build_tree_list (NULL_TREE,
4bc973ae 1785 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1786 TREE_CHAIN (arg_list)
1787 = build_tree_list (NULL_TREE,
4bc973ae 1788 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1789 TREE_CHAIN (TREE_CHAIN (arg_list))
1790 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1791 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1792
1793 /* Now we have to build up the CALL_EXPR itself. */
1794 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1795 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1796 call_expr, arg_list, NULL_TREE);
1797 TREE_SIDE_EFFECTS (call_expr) = 1;
1798
1799 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1800#else
ebb1b59a 1801 emit_library_call (bcopy_libfunc, LCT_NORMAL,
fe7bbd2a 1802 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1803 convert_to_mode (TYPE_MODE (integer_type_node), size,
1804 TREE_UNSIGNED (integer_type_node)),
1805 TYPE_MODE (integer_type_node));
bbf6f052
RK
1806#endif
1807 }
e9a25f70
JL
1808
1809 return retval;
bbf6f052
RK
1810}
1811\f
1812/* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1814
1815void
1816move_block_to_reg (regno, x, nregs, mode)
1817 int regno;
1818 rtx x;
1819 int nregs;
1820 enum machine_mode mode;
1821{
1822 int i;
381127e8 1823#ifdef HAVE_load_multiple
3a94c984 1824 rtx pat;
381127e8
RL
1825 rtx last;
1826#endif
bbf6f052 1827
72bb9717
RK
1828 if (nregs == 0)
1829 return;
1830
bbf6f052
RK
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1833
1834 /* See if the machine can do this with a load multiple insn. */
1835#ifdef HAVE_load_multiple
c3a02afe 1836 if (HAVE_load_multiple)
bbf6f052 1837 {
c3a02afe 1838 last = get_last_insn ();
38a448ca 1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1840 GEN_INT (nregs));
1841 if (pat)
1842 {
1843 emit_insn (pat);
1844 return;
1845 }
1846 else
1847 delete_insns_since (last);
bbf6f052 1848 }
bbf6f052
RK
1849#endif
1850
1851 for (i = 0; i < nregs; i++)
38a448ca 1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1853 operand_subword_force (x, i, mode));
1854}
1855
1856/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1859
bbf6f052 1860void
0040593d 1861move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1862 int regno;
1863 rtx x;
1864 int nregs;
0040593d 1865 int size;
bbf6f052
RK
1866{
1867 int i;
381127e8 1868#ifdef HAVE_store_multiple
3a94c984 1869 rtx pat;
381127e8
RL
1870 rtx last;
1871#endif
58a32c5c 1872 enum machine_mode mode;
bbf6f052 1873
2954d7db
RK
1874 if (nregs == 0)
1875 return;
1876
58a32c5c
DE
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1881 {
792760b9 1882 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
58a32c5c
DE
1883 return;
1884 }
3a94c984 1885
0040593d 1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1889 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1890 {
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1892 rtx shift;
1893
1894 if (tem == 0)
1895 abort ();
1896
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1898 gen_rtx_REG (word_mode, regno),
0040593d
JW
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1902 return;
1903 }
1904
bbf6f052
RK
1905 /* See if the machine can do this with a store multiple insn. */
1906#ifdef HAVE_store_multiple
c3a02afe 1907 if (HAVE_store_multiple)
bbf6f052 1908 {
c3a02afe 1909 last = get_last_insn ();
38a448ca 1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1911 GEN_INT (nregs));
1912 if (pat)
1913 {
1914 emit_insn (pat);
1915 return;
1916 }
1917 else
1918 delete_insns_since (last);
bbf6f052 1919 }
bbf6f052
RK
1920#endif
1921
1922 for (i = 0; i < nregs; i++)
1923 {
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1925
1926 if (tem == 0)
1927 abort ();
1928
38a448ca 1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1930 }
1931}
1932
aac5cc16
RH
1933/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1936 SRC in bits. */
1937/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1938 the balance will be in what would be the low-order memory addresses, i.e.
1939 left justified for big endian, right justified for little endian. This
1940 happens to be true for the targets currently using this support. If this
1941 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1942 would be needed. */
fffa9c1d
JW
1943
1944void
aac5cc16
RH
1945emit_group_load (dst, orig_src, ssize, align)
1946 rtx dst, orig_src;
729a2125
RK
1947 unsigned int align;
1948 int ssize;
fffa9c1d 1949{
aac5cc16
RH
1950 rtx *tmps, src;
1951 int start, i;
fffa9c1d 1952
aac5cc16 1953 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1954 abort ();
1955
1956 /* Check for a NULL entry, used to indicate that the parameter goes
1957 both on the stack and in registers. */
aac5cc16
RH
1958 if (XEXP (XVECEXP (dst, 0, 0), 0))
1959 start = 0;
fffa9c1d 1960 else
aac5cc16
RH
1961 start = 1;
1962
3a94c984 1963 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 1964
aac5cc16
RH
1965 /* Process the pieces. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 {
1968 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1969 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1971 int shift = 0;
1972
1973 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1974 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
1975 {
1976 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1977 bytelen = ssize - bytepos;
1978 if (bytelen <= 0)
729a2125 1979 abort ();
aac5cc16
RH
1980 }
1981
f3ce87a9
DE
1982 /* If we won't be loading directly from memory, protect the real source
1983 from strange tricks we might play; but make sure that the source can
1984 be loaded directly into the destination. */
1985 src = orig_src;
1986 if (GET_CODE (orig_src) != MEM
1987 && (!CONSTANT_P (orig_src)
1988 || (GET_MODE (orig_src) != mode
1989 && GET_MODE (orig_src) != VOIDmode)))
1990 {
1991 if (GET_MODE (orig_src) == VOIDmode)
1992 src = gen_reg_rtx (mode);
1993 else
1994 src = gen_reg_rtx (GET_MODE (orig_src));
1995 emit_move_insn (src, orig_src);
1996 }
1997
aac5cc16
RH
1998 /* Optimize the access just a bit. */
1999 if (GET_CODE (src) == MEM
19caa751 2000 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2001 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2002 && bytelen == GET_MODE_SIZE (mode))
2003 {
2004 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2005 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2006 }
7c4a6db0
JW
2007 else if (GET_CODE (src) == CONCAT)
2008 {
2009 if (bytepos == 0
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 tmps[i] = XEXP (src, 0);
8752c357 2012 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7c4a6db0
JW
2013 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2014 tmps[i] = XEXP (src, 1);
2015 else
2016 abort ();
2017 }
f3ce87a9 2018 else if (CONSTANT_P (src)
2ee5437b
RH
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2020 tmps[i] = src;
fffa9c1d 2021 else
19caa751
RK
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, align, ssize);
fffa9c1d 2025
aac5cc16 2026 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2029 }
19caa751 2030
3a94c984 2031 emit_queue ();
aac5cc16
RH
2032
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2036}
2037
aac5cc16
RH
2038/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
2041
2042void
aac5cc16
RH
2043emit_group_store (orig_dst, src, ssize, align)
2044 rtx orig_dst, src;
729a2125
RK
2045 int ssize;
2046 unsigned int align;
fffa9c1d 2047{
aac5cc16
RH
2048 rtx *tmps, dst;
2049 int start, i;
fffa9c1d 2050
aac5cc16 2051 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2052 abort ();
2053
2054 /* Check for a NULL entry, used to indicate that the parameter goes
2055 both on the stack and in registers. */
aac5cc16
RH
2056 if (XEXP (XVECEXP (src, 0, 0), 0))
2057 start = 0;
fffa9c1d 2058 else
aac5cc16
RH
2059 start = 1;
2060
3a94c984 2061 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2062
aac5cc16
RH
2063 /* Copy the (probable) hard regs into pseudos. */
2064 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2065 {
aac5cc16
RH
2066 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2067 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2068 emit_move_insn (tmps[i], reg);
2069 }
3a94c984 2070 emit_queue ();
fffa9c1d 2071
aac5cc16
RH
2072 /* If we won't be storing directly into memory, protect the real destination
2073 from strange tricks we might play. */
2074 dst = orig_dst;
10a9f2be
JW
2075 if (GET_CODE (dst) == PARALLEL)
2076 {
2077 rtx temp;
2078
2079 /* We can get a PARALLEL dst if there is a conditional expression in
2080 a return statement. In that case, the dst and src are the same,
2081 so no action is necessary. */
2082 if (rtx_equal_p (dst, src))
2083 return;
2084
2085 /* It is unclear if we can ever reach here, but we may as well handle
2086 it. Allocate a temporary, and split this into a store/load to/from
2087 the temporary. */
2088
2089 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2090 emit_group_store (temp, src, ssize, align);
2091 emit_group_load (dst, temp, ssize, align);
2092 return;
2093 }
2094 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2095 {
2096 dst = gen_reg_rtx (GET_MODE (orig_dst));
2097 /* Make life a bit easier for combine. */
2098 emit_move_insn (dst, const0_rtx);
2099 }
aac5cc16
RH
2100
2101 /* Process the pieces. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2103 {
770ae6cc 2104 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2105 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2106 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2107
2108 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2109 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2110 {
aac5cc16
RH
2111 if (BYTES_BIG_ENDIAN)
2112 {
2113 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2114 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2115 tmps[i], 0, OPTAB_WIDEN);
2116 }
2117 bytelen = ssize - bytepos;
71bc0330 2118 }
fffa9c1d 2119
aac5cc16
RH
2120 /* Optimize the access just a bit. */
2121 if (GET_CODE (dst) == MEM
19caa751 2122 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2123 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2124 && bytelen == GET_MODE_SIZE (mode))
f4ef873c 2125 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
aac5cc16 2126 else
729a2125 2127 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
f4ef873c 2128 mode, tmps[i], align, ssize);
fffa9c1d 2129 }
729a2125 2130
3a94c984 2131 emit_queue ();
aac5cc16
RH
2132
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst) == REG)
2135 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2136}
2137
c36fce9a
GRK
2138/* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2141
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
3a94c984 2145 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2146
2147rtx
19caa751 2148copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2149 rtx tgtblk;
2150 rtx srcreg;
2151 tree type;
2152{
19caa751
RK
2153 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2154 rtx src = NULL, dst = NULL;
2155 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2156 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2157
2158 if (tgtblk == 0)
2159 {
1da68f56
RK
2160 tgtblk = assign_temp (build_qualified_type (type,
2161 (TYPE_QUALS (type)
2162 | TYPE_QUAL_CONST)),
2163 0, 1, 1);
19caa751
RK
2164 preserve_temp_slots (tgtblk);
2165 }
3a94c984 2166
19caa751
RK
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2171 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2172
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178 big_endian_correction
2179 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2180
2181 /* Copy the structure BITSIZE bites at a time.
3a94c984 2182
19caa751
RK
2183 We could probably emit more efficient code for machines which do not use
2184 strict alignment, but it doesn't seem worth the effort at the current
2185 time. */
2186 for (bitpos = 0, xbitpos = big_endian_correction;
2187 bitpos < bytes * BITS_PER_UNIT;
2188 bitpos += bitsize, xbitpos += bitsize)
2189 {
3a94c984 2190 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2191 word boundary and when xbitpos == big_endian_correction
2192 (the first time through). */
2193 if (xbitpos % BITS_PER_WORD == 0
2194 || xbitpos == big_endian_correction)
b47f8cfc
JH
2195 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2196 GET_MODE (srcreg));
19caa751
RK
2197
2198 /* We need a new destination operand each time bitpos is on
2199 a word boundary. */
2200 if (bitpos % BITS_PER_WORD == 0)
2201 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2202
19caa751
RK
2203 /* Use xbitpos for the source extraction (right justified) and
2204 xbitpos for the destination store (left justified). */
2205 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2206 extract_bit_field (src, bitsize,
2207 xbitpos % BITS_PER_WORD, 1,
2208 NULL_RTX, word_mode, word_mode,
2209 bitsize, BITS_PER_WORD),
2210 bitsize, BITS_PER_WORD);
2211 }
2212
2213 return tgtblk;
c36fce9a
GRK
2214}
2215
94b25f81
RK
2216/* Add a USE expression for REG to the (possibly empty) list pointed
2217 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2218
2219void
b3f8cf4a
RK
2220use_reg (call_fusage, reg)
2221 rtx *call_fusage, reg;
2222{
0304dfbb
DE
2223 if (GET_CODE (reg) != REG
2224 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2225 abort ();
b3f8cf4a
RK
2226
2227 *call_fusage
38a448ca
RH
2228 = gen_rtx_EXPR_LIST (VOIDmode,
2229 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2230}
2231
94b25f81
RK
2232/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2233 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2234
2235void
0304dfbb
DE
2236use_regs (call_fusage, regno, nregs)
2237 rtx *call_fusage;
bbf6f052
RK
2238 int regno;
2239 int nregs;
2240{
0304dfbb 2241 int i;
bbf6f052 2242
0304dfbb
DE
2243 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2244 abort ();
2245
2246 for (i = 0; i < nregs; i++)
38a448ca 2247 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2248}
fffa9c1d
JW
2249
2250/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2251 PARALLEL REGS. This is for calls that pass values in multiple
2252 non-contiguous locations. The Irix 6 ABI has examples of this. */
2253
2254void
2255use_group_regs (call_fusage, regs)
2256 rtx *call_fusage;
2257 rtx regs;
2258{
2259 int i;
2260
6bd35f86
DE
2261 for (i = 0; i < XVECLEN (regs, 0); i++)
2262 {
2263 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2264
6bd35f86
DE
2265 /* A NULL entry means the parameter goes both on the stack and in
2266 registers. This can also be a MEM for targets that pass values
2267 partially on the stack and partially in registers. */
e9a25f70 2268 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2269 use_reg (call_fusage, reg);
2270 }
fffa9c1d 2271}
bbf6f052 2272\f
57814e5e
JJ
2273
2274int
2275can_store_by_pieces (len, constfun, constfundata, align)
2276 unsigned HOST_WIDE_INT len;
2277 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2278 PTR constfundata;
2279 unsigned int align;
2280{
98166639 2281 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2282 HOST_WIDE_INT offset = 0;
2283 enum machine_mode mode, tmode;
2284 enum insn_code icode;
2285 int reverse;
2286 rtx cst;
2287
2288 if (! MOVE_BY_PIECES_P (len, align))
2289 return 0;
2290
2291 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2292 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2293 align = MOVE_MAX * BITS_PER_UNIT;
2294
2295 /* We would first store what we can in the largest integer mode, then go to
2296 successively smaller modes. */
2297
2298 for (reverse = 0;
2299 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2300 reverse++)
2301 {
2302 l = len;
2303 mode = VOIDmode;
98166639 2304 max_size = MOVE_MAX_PIECES + 1;
57814e5e
JJ
2305 while (max_size > 1)
2306 {
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2310 mode = tmode;
2311
2312 if (mode == VOIDmode)
2313 break;
2314
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing
2317 && align >= GET_MODE_ALIGNMENT (mode))
2318 {
2319 unsigned int size = GET_MODE_SIZE (mode);
2320
2321 while (l >= size)
2322 {
2323 if (reverse)
2324 offset -= size;
2325
2326 cst = (*constfun) (constfundata, offset, mode);
2327 if (!LEGITIMATE_CONSTANT_P (cst))
2328 return 0;
2329
2330 if (!reverse)
2331 offset += size;
2332
2333 l -= size;
2334 }
2335 }
2336
2337 max_size = GET_MODE_SIZE (mode);
2338 }
2339
2340 /* The code above should have handled everything. */
2341 if (l != 0)
2342 abort ();
2343 }
2344
2345 return 1;
2346}
2347
2348/* Generate several move instructions to store LEN bytes generated by
2349 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2350 pointer which will be passed as argument in every CONSTFUN call.
2351 ALIGN is maximum alignment we can assume. */
2352
2353void
2354store_by_pieces (to, len, constfun, constfundata, align)
2355 rtx to;
2356 unsigned HOST_WIDE_INT len;
2357 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2358 PTR constfundata;
2359 unsigned int align;
2360{
2361 struct store_by_pieces data;
2362
2363 if (! MOVE_BY_PIECES_P (len, align))
2364 abort ();
2365 to = protect_from_queue (to, 1);
2366 data.constfun = constfun;
2367 data.constfundata = constfundata;
2368 data.len = len;
2369 data.to = to;
2370 store_by_pieces_1 (&data, align);
2371}
2372
19caa751
RK
2373/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2376
2377static void
2378clear_by_pieces (to, len, align)
2379 rtx to;
3bdf5ad1 2380 unsigned HOST_WIDE_INT len;
729a2125 2381 unsigned int align;
9de08200 2382{
57814e5e
JJ
2383 struct store_by_pieces data;
2384
2385 data.constfun = clear_by_pieces_1;
df4ae160 2386 data.constfundata = NULL;
57814e5e
JJ
2387 data.len = len;
2388 data.to = to;
2389 store_by_pieces_1 (&data, align);
2390}
2391
2392/* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2394
2395static rtx
2396clear_by_pieces_1 (data, offset, mode)
2397 PTR data ATTRIBUTE_UNUSED;
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2399 enum machine_mode mode ATTRIBUTE_UNUSED;
2400{
2401 return const0_rtx;
2402}
2403
2404/* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2408
2409static void
2410store_by_pieces_1 (data, align)
2411 struct store_by_pieces *data;
2412 unsigned int align;
2413{
2414 rtx to_addr = XEXP (data->to, 0);
3bdf5ad1 2415 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
2416 enum machine_mode mode = VOIDmode, tmode;
2417 enum insn_code icode;
9de08200 2418
57814e5e
JJ
2419 data->offset = 0;
2420 data->to_addr = to_addr;
2421 data->autinc_to
9de08200
RK
2422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2424
57814e5e
JJ
2425 data->explicit_inc_to = 0;
2426 data->reverse
9de08200 2427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2428 if (data->reverse)
2429 data->offset = data->len;
9de08200 2430
57814e5e 2431 /* If storing requires more than two move insns,
9de08200
RK
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
57814e5e
JJ
2434 if (!data->autinc_to
2435 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2436 {
3a94c984 2437 /* Determine the main mode we'll be using. */
fbe1758d
AM
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2442
57814e5e 2443 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2444 {
57814e5e
JJ
2445 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2446 data->autinc_to = 1;
2447 data->explicit_inc_to = -1;
9de08200 2448 }
3bdf5ad1 2449
57814e5e
JJ
2450 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2451 && ! data->autinc_to)
9de08200 2452 {
57814e5e
JJ
2453 data->to_addr = copy_addr_to_reg (to_addr);
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = 1;
9de08200 2456 }
3bdf5ad1 2457
57814e5e
JJ
2458 if ( !data->autinc_to && CONSTANT_P (to_addr))
2459 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2460 }
2461
e1565e65 2462 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2463 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2464 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2465
57814e5e 2466 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2467 successively smaller modes. */
2468
2469 while (max_size > 1)
2470 {
9de08200
RK
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2474 mode = tmode;
2475
2476 if (mode == VOIDmode)
2477 break;
2478
2479 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2480 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2481 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2482
2483 max_size = GET_MODE_SIZE (mode);
2484 }
2485
2486 /* The code above should have handled everything. */
57814e5e 2487 if (data->len != 0)
9de08200
RK
2488 abort ();
2489}
2490
57814e5e 2491/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2494
2495static void
57814e5e 2496store_by_pieces_2 (genfun, mode, data)
711d877c 2497 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2498 enum machine_mode mode;
57814e5e 2499 struct store_by_pieces *data;
9de08200 2500{
3bdf5ad1 2501 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2502 rtx to1, cst;
9de08200
RK
2503
2504 while (data->len >= size)
2505 {
3bdf5ad1
RK
2506 if (data->reverse)
2507 data->offset -= size;
9de08200 2508
3bdf5ad1
RK
2509 if (data->autinc_to)
2510 {
f1ec5147
RK
2511 to1 = replace_equiv_address (data->to, data->to_addr);
2512 to1 = adjust_address (to1, mode, 0);
3bdf5ad1 2513 }
3a94c984 2514 else
f4ef873c 2515 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2516
940da324 2517 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2518 emit_insn (gen_add2_insn (data->to_addr,
2519 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2520
57814e5e
JJ
2521 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2522 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2523
940da324 2524 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2525 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2526
3bdf5ad1
RK
2527 if (! data->reverse)
2528 data->offset += size;
9de08200
RK
2529
2530 data->len -= size;
2531 }
2532}
2533\f
19caa751
RK
2534/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2535 its length in bytes and ALIGN is the maximum alignment we can is has.
bbf6f052 2536
e9a25f70
JL
2537 If we call a function that returns the length of the block, return it. */
2538
2539rtx
9de08200 2540clear_storage (object, size, align)
bbf6f052 2541 rtx object;
4c08eef0 2542 rtx size;
729a2125 2543 unsigned int align;
bbf6f052 2544{
52cf7115
JL
2545#ifdef TARGET_MEM_FUNCTIONS
2546 static tree fn;
2547 tree call_expr, arg_list;
2548#endif
e9a25f70
JL
2549 rtx retval = 0;
2550
fcf1b822
RK
2551 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2552 just move a zero. Otherwise, do this a piece at a time. */
2553 if (GET_MODE (object) != BLKmode
2554 && GET_CODE (size) == CONST_INT
8752c357 2555 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
fcf1b822
RK
2556 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2557 else
bbf6f052 2558 {
9de08200
RK
2559 object = protect_from_queue (object, 1);
2560 size = protect_from_queue (size, 0);
2561
2562 if (GET_CODE (size) == CONST_INT
fbe1758d 2563 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200 2564 clear_by_pieces (object, INTVAL (size), align);
9de08200
RK
2565 else
2566 {
2567 /* Try the most limited insn first, because there's no point
2568 including more than one in the machine description unless
2569 the more limited one has some advantage. */
2570
19caa751 2571 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
9de08200
RK
2572 enum machine_mode mode;
2573
2574 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2575 mode = GET_MODE_WIDER_MODE (mode))
2576 {
2577 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2578 insn_operand_predicate_fn pred;
9de08200
RK
2579
2580 if (code != CODE_FOR_nothing
2581 /* We don't need MODE to be narrower than
2582 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2583 the mode mask, as it is returned by the macro, it will
2584 definitely be less than the actual mode mask. */
2585 && ((GET_CODE (size) == CONST_INT
2586 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2587 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2588 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2589 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2590 || (*pred) (object, BLKmode))
2591 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2592 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2593 {
2594 rtx op1;
2595 rtx last = get_last_insn ();
2596 rtx pat;
2597
2598 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2599 pred = insn_data[(int) code].operand[1].predicate;
2600 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2601 op1 = copy_to_mode_reg (mode, op1);
2602
2603 pat = GEN_FCN ((int) code) (object, op1, opalign);
2604 if (pat)
2605 {
2606 emit_insn (pat);
e9a25f70 2607 return 0;
9de08200
RK
2608 }
2609 else
2610 delete_insns_since (last);
2611 }
2612 }
2613
4bc973ae 2614 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2615
4bc973ae
JL
2616 It is unsafe to save the value generated by protect_from_queue
2617 and reuse it later. Consider what happens if emit_queue is
2618 called before the return value from protect_from_queue is used.
52cf7115 2619
4bc973ae
JL
2620 Expansion of the CALL_EXPR below will call emit_queue before
2621 we are finished emitting RTL for argument setup. So if we are
2622 not careful we could get the wrong value for an argument.
52cf7115 2623
4bc973ae
JL
2624 To avoid this problem we go ahead and emit code to copy OBJECT
2625 and SIZE into new pseudos. We can then place those new pseudos
2626 into an RTL_EXPR and use them later, even after a call to
2627 emit_queue.
52cf7115 2628
4bc973ae
JL
2629 Note this is not strictly needed for library calls since they
2630 do not call emit_queue before loading their arguments. However,
2631 we may need to have library calls call emit_queue in the future
2632 since failing to do so could cause problems for targets which
2633 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2634 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2635
4bc973ae
JL
2636#ifdef TARGET_MEM_FUNCTIONS
2637 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2638#else
2639 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2640 TREE_UNSIGNED (integer_type_node));
f3dc586a 2641 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2642#endif
52cf7115 2643
4bc973ae
JL
2644#ifdef TARGET_MEM_FUNCTIONS
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context.
52cf7115 2647
4bc973ae
JL
2648 This could be a user call to memset and the user may wish to
2649 examine the return value from memset.
52cf7115 2650
4bc973ae
JL
2651 For targets where libcalls and normal calls have different
2652 conventions for returning pointers, we could end up generating
0d97bf4c 2653 incorrect code.
4bc973ae
JL
2654
2655 So instead of using a libcall sequence we build up a suitable
2656 CALL_EXPR and expand the call in the normal fashion. */
2657 if (fn == NULL_TREE)
2658 {
2659 tree fntype;
2660
2661 /* This was copied from except.c, I don't know if all this is
2662 necessary in this context or not. */
2663 fn = get_identifier ("memset");
4bc973ae
JL
2664 fntype = build_pointer_type (void_type_node);
2665 fntype = build_function_type (fntype, NULL_TREE);
2666 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2667 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2668 DECL_EXTERNAL (fn) = 1;
2669 TREE_PUBLIC (fn) = 1;
2670 DECL_ARTIFICIAL (fn) = 1;
0d97bf4c 2671 TREE_NOTHROW (fn) = 1;
6496a589 2672 make_decl_rtl (fn, NULL);
4bc973ae 2673 assemble_external (fn);
4bc973ae
JL
2674 }
2675
3a94c984 2676 /* We need to make an argument list for the function call.
4bc973ae
JL
2677
2678 memset has three arguments, the first is a void * addresses, the
2679 second a integer with the initialization value, the last is a
2680 size_t byte count for the copy. */
2681 arg_list
2682 = build_tree_list (NULL_TREE,
2683 make_tree (build_pointer_type (void_type_node),
2684 object));
2685 TREE_CHAIN (arg_list)
2686 = build_tree_list (NULL_TREE,
3a94c984 2687 make_tree (integer_type_node, const0_rtx));
4bc973ae
JL
2688 TREE_CHAIN (TREE_CHAIN (arg_list))
2689 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2690 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2691
2692 /* Now we have to build up the CALL_EXPR itself. */
2693 call_expr = build1 (ADDR_EXPR,
2694 build_pointer_type (TREE_TYPE (fn)), fn);
2695 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2696 call_expr, arg_list, NULL_TREE);
2697 TREE_SIDE_EFFECTS (call_expr) = 1;
2698
2699 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2700#else
ebb1b59a 2701 emit_library_call (bzero_libfunc, LCT_NORMAL,
fe7bbd2a 2702 VOIDmode, 2, object, Pmode, size,
9de08200 2703 TYPE_MODE (integer_type_node));
bbf6f052 2704#endif
9de08200 2705 }
bbf6f052 2706 }
e9a25f70
JL
2707
2708 return retval;
bbf6f052
RK
2709}
2710
2711/* Generate code to copy Y into X.
2712 Both Y and X must have the same mode, except that
2713 Y can be a constant with VOIDmode.
2714 This mode cannot be BLKmode; use emit_block_move for that.
2715
2716 Return the last instruction emitted. */
2717
2718rtx
2719emit_move_insn (x, y)
2720 rtx x, y;
2721{
2722 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
2723 rtx y_cst = NULL_RTX;
2724 rtx last_insn;
bbf6f052
RK
2725
2726 x = protect_from_queue (x, 1);
2727 y = protect_from_queue (y, 0);
2728
2729 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2730 abort ();
2731
ee5332b8
RH
2732 /* Never force constant_p_rtx to memory. */
2733 if (GET_CODE (y) == CONSTANT_P_RTX)
2734 ;
2735 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
de1b33dd
AO
2736 {
2737 y_cst = y;
2738 y = force_const_mem (mode, y);
2739 }
bbf6f052
RK
2740
2741 /* If X or Y are memory references, verify that their addresses are valid
2742 for the machine. */
2743 if (GET_CODE (x) == MEM
2744 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2745 && ! push_operand (x, GET_MODE (x)))
2746 || (flag_force_addr
2747 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 2748 x = validize_mem (x);
bbf6f052
RK
2749
2750 if (GET_CODE (y) == MEM
2751 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2752 || (flag_force_addr
2753 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 2754 y = validize_mem (y);
bbf6f052
RK
2755
2756 if (mode == BLKmode)
2757 abort ();
2758
de1b33dd
AO
2759 last_insn = emit_move_insn_1 (x, y);
2760
2761 if (y_cst && GET_CODE (x) == REG)
2762 REG_NOTES (last_insn)
2763 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2764
2765 return last_insn;
261c4230
RS
2766}
2767
2768/* Low level part of emit_move_insn.
2769 Called just like emit_move_insn, but assumes X and Y
2770 are basically valid. */
2771
2772rtx
2773emit_move_insn_1 (x, y)
2774 rtx x, y;
2775{
2776 enum machine_mode mode = GET_MODE (x);
2777 enum machine_mode submode;
2778 enum mode_class class = GET_MODE_CLASS (mode);
770ae6cc 2779 unsigned int i;
261c4230 2780
dbbbbf3b 2781 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 2782 abort ();
76bbe028 2783
bbf6f052
RK
2784 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2785 return
2786 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2787
89742723 2788 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2789 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2790 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2791 * BITS_PER_UNIT),
2792 (class == MODE_COMPLEX_INT
2793 ? MODE_INT : MODE_FLOAT),
2794 0))
7308a047
RS
2795 && (mov_optab->handlers[(int) submode].insn_code
2796 != CODE_FOR_nothing))
2797 {
2798 /* Don't split destination if it is a stack push. */
2799 int stack = push_operand (x, GET_MODE (x));
7308a047 2800
79ce92d7 2801#ifdef PUSH_ROUNDING
1a06f5fe
JH
2802 /* In case we output to the stack, but the size is smaller machine can
2803 push exactly, we need to use move instructions. */
2804 if (stack
2805 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2806 {
2807 rtx temp;
2808 int offset1, offset2;
2809
2810 /* Do not use anti_adjust_stack, since we don't want to update
2811 stack_pointer_delta. */
2812 temp = expand_binop (Pmode,
2813#ifdef STACK_GROWS_DOWNWARD
2814 sub_optab,
2815#else
2816 add_optab,
2817#endif
2818 stack_pointer_rtx,
2819 GEN_INT
2820 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2821 stack_pointer_rtx,
2822 0,
2823 OPTAB_LIB_WIDEN);
2824 if (temp != stack_pointer_rtx)
2825 emit_move_insn (stack_pointer_rtx, temp);
2826#ifdef STACK_GROWS_DOWNWARD
2827 offset1 = 0;
2828 offset2 = GET_MODE_SIZE (submode);
2829#else
2830 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2831 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2832 + GET_MODE_SIZE (submode));
2833#endif
2834 emit_move_insn (change_address (x, submode,
2835 gen_rtx_PLUS (Pmode,
2836 stack_pointer_rtx,
2837 GEN_INT (offset1))),
2838 gen_realpart (submode, y));
2839 emit_move_insn (change_address (x, submode,
2840 gen_rtx_PLUS (Pmode,
2841 stack_pointer_rtx,
2842 GEN_INT (offset2))),
2843 gen_imagpart (submode, y));
2844 }
e9c0bd54 2845 else
79ce92d7 2846#endif
7308a047
RS
2847 /* If this is a stack, push the highpart first, so it
2848 will be in the argument order.
2849
2850 In that case, change_address is used only to convert
2851 the mode, not to change the address. */
e9c0bd54 2852 if (stack)
c937357e 2853 {
e33c0d66
RS
2854 /* Note that the real part always precedes the imag part in memory
2855 regardless of machine's endianness. */
c937357e
RS
2856#ifdef STACK_GROWS_DOWNWARD
2857 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2858 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2859 gen_imagpart (submode, y)));
c937357e 2860 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2861 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2862 gen_realpart (submode, y)));
c937357e
RS
2863#else
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2866 gen_realpart (submode, y)));
c937357e 2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2869 gen_imagpart (submode, y)));
c937357e
RS
2870#endif
2871 }
2872 else
2873 {
235ae7be
DM
2874 rtx realpart_x, realpart_y;
2875 rtx imagpart_x, imagpart_y;
2876
405f63da
MM
2877 /* If this is a complex value with each part being smaller than a
2878 word, the usual calling sequence will likely pack the pieces into
2879 a single register. Unfortunately, SUBREG of hard registers only
2880 deals in terms of words, so we have a problem converting input
2881 arguments to the CONCAT of two registers that is used elsewhere
2882 for complex values. If this is before reload, we can copy it into
2883 memory and reload. FIXME, we should see about using extract and
2884 insert on integer registers, but complex short and complex char
2885 variables should be rarely used. */
3a94c984 2886 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2887 && (reload_in_progress | reload_completed) == 0)
2888 {
2889 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2890 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2891
2892 if (packed_dest_p || packed_src_p)
2893 {
2894 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2895 ? MODE_FLOAT : MODE_INT);
2896
1da68f56
RK
2897 enum machine_mode reg_mode
2898 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2899
2900 if (reg_mode != BLKmode)
2901 {
2902 rtx mem = assign_stack_temp (reg_mode,
2903 GET_MODE_SIZE (mode), 0);
f4ef873c 2904 rtx cmem = adjust_address (mem, mode, 0);
405f63da 2905
1da68f56
RK
2906 cfun->cannot_inline
2907 = N_("function using short complex types cannot be inline");
405f63da
MM
2908
2909 if (packed_dest_p)
2910 {
2911 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2912 emit_move_insn_1 (cmem, y);
2913 return emit_move_insn_1 (sreg, mem);
2914 }
2915 else
2916 {
2917 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2918 emit_move_insn_1 (mem, sreg);
2919 return emit_move_insn_1 (x, cmem);
2920 }
2921 }
2922 }
2923 }
2924
235ae7be
DM
2925 realpart_x = gen_realpart (submode, x);
2926 realpart_y = gen_realpart (submode, y);
2927 imagpart_x = gen_imagpart (submode, x);
2928 imagpart_y = gen_imagpart (submode, y);
2929
2930 /* Show the output dies here. This is necessary for SUBREGs
2931 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2932 hard regs shouldn't appear here except as return values.
2933 We never want to emit such a clobber after reload. */
2934 if (x != y
235ae7be
DM
2935 && ! (reload_in_progress || reload_completed)
2936 && (GET_CODE (realpart_x) == SUBREG
2937 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2938 {
c14c6529 2939 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2940 }
2638126a 2941
c937357e 2942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2943 (realpart_x, realpart_y));
c937357e 2944 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2945 (imagpart_x, imagpart_y));
c937357e 2946 }
7308a047 2947
7a1ab50a 2948 return get_last_insn ();
7308a047
RS
2949 }
2950
bbf6f052
RK
2951 /* This will handle any multi-word mode that lacks a move_insn pattern.
2952 However, you will get better code if you define such patterns,
2953 even if they must turn into multiple assembler instructions. */
a4320483 2954 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2955 {
2956 rtx last_insn = 0;
3ef1eef4 2957 rtx seq, inner;
235ae7be 2958 int need_clobber;
3a94c984 2959
a98c9f1a
RK
2960#ifdef PUSH_ROUNDING
2961
2962 /* If X is a push on the stack, do the push now and replace
2963 X with a reference to the stack pointer. */
2964 if (push_operand (x, GET_MODE (x)))
2965 {
918a6124
GK
2966 rtx temp;
2967 enum rtx_code code;
2968
2969 /* Do not use anti_adjust_stack, since we don't want to update
2970 stack_pointer_delta. */
2971 temp = expand_binop (Pmode,
2972#ifdef STACK_GROWS_DOWNWARD
2973 sub_optab,
2974#else
2975 add_optab,
2976#endif
2977 stack_pointer_rtx,
2978 GEN_INT
2979 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2980 stack_pointer_rtx,
2981 0,
2982 OPTAB_LIB_WIDEN);
2983 if (temp != stack_pointer_rtx)
2984 emit_move_insn (stack_pointer_rtx, temp);
2985
2986 code = GET_CODE (XEXP (x, 0));
2987 /* Just hope that small offsets off SP are OK. */
2988 if (code == POST_INC)
2989 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2990 GEN_INT (-(HOST_WIDE_INT)
2991 GET_MODE_SIZE (GET_MODE (x))));
2992 else if (code == POST_DEC)
2993 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2994 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2995 else
2996 temp = stack_pointer_rtx;
2997
2998 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
2999 }
3000#endif
3a94c984 3001
3ef1eef4
RK
3002 /* If we are in reload, see if either operand is a MEM whose address
3003 is scheduled for replacement. */
3004 if (reload_in_progress && GET_CODE (x) == MEM
3005 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3006 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3007 if (reload_in_progress && GET_CODE (y) == MEM
3008 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3009 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3010
235ae7be 3011 start_sequence ();
15a7a8ec 3012
235ae7be 3013 need_clobber = 0;
bbf6f052 3014 for (i = 0;
3a94c984 3015 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3016 i++)
3017 {
3018 rtx xpart = operand_subword (x, i, 1, mode);
3019 rtx ypart = operand_subword (y, i, 1, mode);
3020
3021 /* If we can't get a part of Y, put Y into memory if it is a
3022 constant. Otherwise, force it into a register. If we still
3023 can't get a part of Y, abort. */
3024 if (ypart == 0 && CONSTANT_P (y))
3025 {
3026 y = force_const_mem (mode, y);
3027 ypart = operand_subword (y, i, 1, mode);
3028 }
3029 else if (ypart == 0)
3030 ypart = operand_subword_force (y, i, mode);
3031
3032 if (xpart == 0 || ypart == 0)
3033 abort ();
3034
235ae7be
DM
3035 need_clobber |= (GET_CODE (xpart) == SUBREG);
3036
bbf6f052
RK
3037 last_insn = emit_move_insn (xpart, ypart);
3038 }
6551fa4d 3039
235ae7be
DM
3040 seq = gen_sequence ();
3041 end_sequence ();
3042
3043 /* Show the output dies here. This is necessary for SUBREGs
3044 of pseudos since we cannot track their lifetimes correctly;
3045 hard regs shouldn't appear here except as return values.
3046 We never want to emit such a clobber after reload. */
3047 if (x != y
3048 && ! (reload_in_progress || reload_completed)
3049 && need_clobber != 0)
3050 {
3051 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3052 }
3053
3054 emit_insn (seq);
3055
bbf6f052
RK
3056 return last_insn;
3057 }
3058 else
3059 abort ();
3060}
3061\f
3062/* Pushing data onto the stack. */
3063
3064/* Push a block of length SIZE (perhaps variable)
3065 and return an rtx to address the beginning of the block.
3066 Note that it is not possible for the value returned to be a QUEUED.
3067 The value may be virtual_outgoing_args_rtx.
3068
3069 EXTRA is the number of bytes of padding to push in addition to SIZE.
3070 BELOW nonzero means this padding comes at low addresses;
3071 otherwise, the padding comes at high addresses. */
3072
3073rtx
3074push_block (size, extra, below)
3075 rtx size;
3076 int extra, below;
3077{
3078 register rtx temp;
88f63c77
RK
3079
3080 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3081 if (CONSTANT_P (size))
3082 anti_adjust_stack (plus_constant (size, extra));
3083 else if (GET_CODE (size) == REG && extra == 0)
3084 anti_adjust_stack (size);
3085 else
3086 {
ce48579b 3087 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3088 if (extra != 0)
906c4e36 3089 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3090 temp, 0, OPTAB_LIB_WIDEN);
3091 anti_adjust_stack (temp);
3092 }
3093
f73ad30e
JH
3094#ifndef STACK_GROWS_DOWNWARD
3095#ifdef ARGS_GROW_DOWNWARD
3096 if (!ACCUMULATE_OUTGOING_ARGS)
bbf6f052 3097#else
f73ad30e
JH
3098 if (0)
3099#endif
3100#else
3101 if (1)
bbf6f052 3102#endif
f73ad30e 3103 {
f73ad30e
JH
3104 /* Return the lowest stack address when STACK or ARGS grow downward and
3105 we are not aaccumulating outgoing arguments (the c4x port uses such
3106 conventions). */
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3110 }
3111 else
3112 {
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3115 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3118 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3119 else
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3122 }
bbf6f052
RK
3123
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3125}
3126
bbf6f052 3127
921b3427
RK
3128/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3129 block of SIZE bytes. */
3130
3131static rtx
3132get_push_address (size)
3a94c984 3133 int size;
921b3427
RK
3134{
3135 register rtx temp;
3136
3137 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 3139 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
3141 else
3142 temp = stack_pointer_rtx;
3143
c85f7c16 3144 return copy_to_reg (temp);
921b3427
RK
3145}
3146
566aa174
JH
3147/* Emit single push insn. */
3148static void
3149emit_single_push_insn (mode, x, type)
3150 rtx x;
3151 enum machine_mode mode;
3152 tree type;
3153{
3154#ifdef PUSH_ROUNDING
3155 rtx dest_addr;
918a6124 3156 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3157 rtx dest;
371b8fc0
JH
3158 enum insn_code icode;
3159 insn_operand_predicate_fn pred;
566aa174 3160
371b8fc0
JH
3161 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3162 /* If there is push pattern, use it. Otherwise try old way of throwing
3163 MEM representing push operation to move expander. */
3164 icode = push_optab->handlers[(int) mode].insn_code;
3165 if (icode != CODE_FOR_nothing)
3166 {
3167 if (((pred = insn_data[(int) icode].operand[0].predicate)
3168 && !((*pred) (x, mode))))
3169 x = force_reg (mode, x);
3170 emit_insn (GEN_FCN (icode) (x));
3171 return;
3172 }
566aa174
JH
3173 if (GET_MODE_SIZE (mode) == rounded_size)
3174 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3175 else
3176 {
3177#ifdef STACK_GROWS_DOWNWARD
3178 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124 3179 GEN_INT (-(HOST_WIDE_INT)rounded_size));
566aa174
JH
3180#else
3181 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3182 GEN_INT (rounded_size));
3183#endif
3184 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3185 }
3186
3187 dest = gen_rtx_MEM (mode, dest_addr);
3188
566aa174
JH
3189 if (type != 0)
3190 {
3191 set_mem_attributes (dest, type, 1);
3192 /* Function incoming arguments may overlap with sibling call
3193 outgoing arguments and we cannot allow reordering of reads
3194 from function arguments with stores to outgoing arguments
3195 of sibling calls. */
ba4828e0 3196 set_mem_alias_set (dest, 0);
566aa174
JH
3197 }
3198 emit_move_insn (dest, x);
3199#else
3200 abort();
3201#endif
3202}
3203
bbf6f052
RK
3204/* Generate code to push X onto the stack, assuming it has mode MODE and
3205 type TYPE.
3206 MODE is redundant except when X is a CONST_INT (since they don't
3207 carry mode info).
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3210
f1eaaf73 3211 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3212
cd048831
RK
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
cd048831
RK
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3220 registers.
bbf6f052
RK
3221
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3223 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3224
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3229
e5e809f4
JL
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3231
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
bbf6f052
RK
3235
3236void
3237emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
3238 args_addr, args_so_far, reg_parm_stack_space,
3239 alignment_pad)
bbf6f052
RK
3240 register rtx x;
3241 enum machine_mode mode;
3242 tree type;
3243 rtx size;
729a2125 3244 unsigned int align;
bbf6f052
RK
3245 int partial;
3246 rtx reg;
3247 int extra;
3248 rtx args_addr;
3249 rtx args_so_far;
e5e809f4 3250 int reg_parm_stack_space;
4fc026cd 3251 rtx alignment_pad;
bbf6f052
RK
3252{
3253 rtx xinner;
3254 enum direction stack_direction
3255#ifdef STACK_GROWS_DOWNWARD
3256 = downward;
3257#else
3258 = upward;
3259#endif
3260
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3265
3266 /* Invert direction if stack is post-update. */
3267 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3268 if (where_pad != none)
3269 where_pad = (where_pad == downward ? upward : downward);
3270
3271 xinner = x = protect_from_queue (x, 0);
3272
3273 if (mode == BLKmode)
3274 {
3275 /* Copy a block into the stack, entirely or partially. */
3276
3277 register rtx temp;
3278 int used = partial * UNITS_PER_WORD;
3279 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3280 int skip;
3a94c984 3281
bbf6f052
RK
3282 if (size == 0)
3283 abort ();
3284
3285 used -= offset;
3286
3287 /* USED is now the # of bytes we need not copy to the stack
3288 because registers will take care of them. */
3289
3290 if (partial != 0)
f4ef873c 3291 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3292
3293 /* If the partial register-part of the arg counts in its stack size,
3294 skip the part of stack space corresponding to the registers.
3295 Otherwise, start copying to the beginning of the stack space,
3296 by setting SKIP to 0. */
e5e809f4 3297 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3298
3299#ifdef PUSH_ROUNDING
3300 /* Do it with several push insns if that doesn't take lots of insns
3301 and if there is no difficulty with push insns that skip bytes
3302 on the stack for alignment purposes. */
3303 if (args_addr == 0
f73ad30e 3304 && PUSH_ARGS
bbf6f052
RK
3305 && GET_CODE (size) == CONST_INT
3306 && skip == 0
15914757 3307 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3308 /* Here we avoid the case of a structure whose weak alignment
3309 forces many pushes of a small amount of data,
3310 and such small pushes do rounding that causes trouble. */
e1565e65 3311 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3312 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3313 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3314 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3315 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3316 {
3317 /* Push padding now if padding above and stack grows down,
3318 or if padding below and stack grows up.
3319 But if space already allocated, this has already been done. */
3320 if (extra && args_addr == 0
3321 && where_pad != none && where_pad != stack_direction)
906c4e36 3322 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3323
566aa174 3324 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
921b3427 3325
7d384cc0 3326 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3327 {
3328 rtx temp;
3a94c984 3329
956d6950 3330 in_check_memory_usage = 1;
3a94c984 3331 temp = get_push_address (INTVAL (size) - used);
c85f7c16 3332 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3333 emit_library_call (chkr_copy_bitmap_libfunc,
3334 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3335 Pmode, XEXP (xinner, 0), Pmode,
3a94c984 3336 GEN_INT (INTVAL (size) - used),
921b3427
RK
3337 TYPE_MODE (sizetype));
3338 else
ebb1b59a
BS
3339 emit_library_call (chkr_set_right_libfunc,
3340 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3341 Pmode, GEN_INT (INTVAL (size) - used),
921b3427 3342 TYPE_MODE (sizetype),
956d6950
JL
3343 GEN_INT (MEMORY_USE_RW),
3344 TYPE_MODE (integer_type_node));
3345 in_check_memory_usage = 0;
921b3427 3346 }
bbf6f052
RK
3347 }
3348 else
3a94c984 3349#endif /* PUSH_ROUNDING */
bbf6f052 3350 {
7ab923cc
JJ
3351 rtx target;
3352
bbf6f052
RK
3353 /* Otherwise make space on the stack and copy the data
3354 to the address of that space. */
3355
3356 /* Deduct words put into registers from the size we must copy. */
3357 if (partial != 0)
3358 {
3359 if (GET_CODE (size) == CONST_INT)
906c4e36 3360 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3361 else
3362 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3363 GEN_INT (used), NULL_RTX, 0,
3364 OPTAB_LIB_WIDEN);
bbf6f052
RK
3365 }
3366
3367 /* Get the address of the stack space.
3368 In this case, we do not deal with EXTRA separately.
3369 A single stack adjust will do. */
3370 if (! args_addr)
3371 {
3372 temp = push_block (size, extra, where_pad == downward);
3373 extra = 0;
3374 }
3375 else if (GET_CODE (args_so_far) == CONST_INT)
3376 temp = memory_address (BLKmode,
3377 plus_constant (args_addr,
3378 skip + INTVAL (args_so_far)));
3379 else
3380 temp = memory_address (BLKmode,
38a448ca
RH
3381 plus_constant (gen_rtx_PLUS (Pmode,
3382 args_addr,
3383 args_so_far),
bbf6f052 3384 skip));
7d384cc0 3385 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3386 {
956d6950 3387 in_check_memory_usage = 1;
921b3427 3388 target = copy_to_reg (temp);
c85f7c16 3389 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3390 emit_library_call (chkr_copy_bitmap_libfunc,
3391 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed
MK
3392 target, Pmode,
3393 XEXP (xinner, 0), Pmode,
921b3427
RK
3394 size, TYPE_MODE (sizetype));
3395 else
ebb1b59a
BS
3396 emit_library_call (chkr_set_right_libfunc,
3397 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 3398 target, Pmode,
921b3427 3399 size, TYPE_MODE (sizetype),
956d6950
JL
3400 GEN_INT (MEMORY_USE_RW),
3401 TYPE_MODE (integer_type_node));
3402 in_check_memory_usage = 0;
921b3427 3403 }
bbf6f052 3404
3a94c984 3405 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3406
3a94c984
KH
3407 if (type != 0)
3408 {
3409 set_mem_attributes (target, type, 1);
3410 /* Function incoming arguments may overlap with sibling call
3411 outgoing arguments and we cannot allow reordering of reads
3412 from function arguments with stores to outgoing arguments
3413 of sibling calls. */
ba4828e0 3414 set_mem_alias_set (target, 0);
3a94c984 3415 }
7ab923cc 3416
bbf6f052
RK
3417 /* TEMP is the address of the block. Copy the data there. */
3418 if (GET_CODE (size) == CONST_INT
729a2125 3419 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3420 {
7ab923cc 3421 move_by_pieces (target, xinner, INTVAL (size), align);
bbf6f052
RK
3422 goto ret;
3423 }
e5e809f4 3424 else
bbf6f052 3425 {
19caa751 3426 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
e5e809f4 3427 enum machine_mode mode;
3bdf5ad1 3428
e5e809f4
JL
3429 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3430 mode != VOIDmode;
3431 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3432 {
e5e809f4 3433 enum insn_code code = movstr_optab[(int) mode];
a995e389 3434 insn_operand_predicate_fn pred;
e5e809f4
JL
3435
3436 if (code != CODE_FOR_nothing
3437 && ((GET_CODE (size) == CONST_INT
3438 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3439 <= (GET_MODE_MASK (mode) >> 1)))
3440 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3441 && (!(pred = insn_data[(int) code].operand[0].predicate)
3442 || ((*pred) (target, BLKmode)))
3443 && (!(pred = insn_data[(int) code].operand[1].predicate)
3444 || ((*pred) (xinner, BLKmode)))
3445 && (!(pred = insn_data[(int) code].operand[3].predicate)
3446 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3447 {
3448 rtx op2 = convert_to_mode (mode, size, 1);
3449 rtx last = get_last_insn ();
3450 rtx pat;
3451
a995e389
RH
3452 pred = insn_data[(int) code].operand[2].predicate;
3453 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3454 op2 = copy_to_mode_reg (mode, op2);
3455
3456 pat = GEN_FCN ((int) code) (target, xinner,
3457 op2, opalign);
3458 if (pat)
3459 {
3460 emit_insn (pat);
3461 goto ret;
3462 }
3463 else
3464 delete_insns_since (last);
3465 }
c841050e 3466 }
bbf6f052 3467 }
bbf6f052 3468
f73ad30e
JH
3469 if (!ACCUMULATE_OUTGOING_ARGS)
3470 {
3471 /* If the source is referenced relative to the stack pointer,
3472 copy it to another register to stabilize it. We do not need
3473 to do this if we know that we won't be changing sp. */
bbf6f052 3474
f73ad30e
JH
3475 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3476 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3477 temp = copy_to_reg (temp);
3478 }
bbf6f052
RK
3479
3480 /* Make inhibit_defer_pop nonzero around the library call
3481 to force it to pop the bcopy-arguments right away. */
3482 NO_DEFER_POP;
3483#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3484 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052 3485 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3486 convert_to_mode (TYPE_MODE (sizetype),
3487 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3488 TYPE_MODE (sizetype));
bbf6f052 3489#else
ebb1b59a 3490 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052 3491 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3492 convert_to_mode (TYPE_MODE (integer_type_node),
3493 size,
3494 TREE_UNSIGNED (integer_type_node)),
3495 TYPE_MODE (integer_type_node));
bbf6f052
RK
3496#endif
3497 OK_DEFER_POP;
3498 }
3499 }
3500 else if (partial > 0)
3501 {
3502 /* Scalar partly in registers. */
3503
3504 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3505 int i;
3506 int not_stack;
3507 /* # words of start of argument
3508 that we must make space for but need not store. */
3509 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3510 int args_offset = INTVAL (args_so_far);
3511 int skip;
3512
3513 /* Push padding now if padding above and stack grows down,
3514 or if padding below and stack grows up.
3515 But if space already allocated, this has already been done. */
3516 if (extra && args_addr == 0
3517 && where_pad != none && where_pad != stack_direction)
906c4e36 3518 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3519
3520 /* If we make space by pushing it, we might as well push
3521 the real data. Otherwise, we can leave OFFSET nonzero
3522 and leave the space uninitialized. */
3523 if (args_addr == 0)
3524 offset = 0;
3525
3526 /* Now NOT_STACK gets the number of words that we don't need to
3527 allocate on the stack. */
3528 not_stack = partial - offset;
3529
3530 /* If the partial register-part of the arg counts in its stack size,
3531 skip the part of stack space corresponding to the registers.
3532 Otherwise, start copying to the beginning of the stack space,
3533 by setting SKIP to 0. */
e5e809f4 3534 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3535
3536 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3537 x = validize_mem (force_const_mem (mode, x));
3538
3539 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3540 SUBREGs of such registers are not allowed. */
3541 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3542 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3543 x = copy_to_reg (x);
3544
3545 /* Loop over all the words allocated on the stack for this arg. */
3546 /* We can do it by words, because any scalar bigger than a word
3547 has a size a multiple of a word. */
3548#ifndef PUSH_ARGS_REVERSED
3549 for (i = not_stack; i < size; i++)
3550#else
3551 for (i = size - 1; i >= not_stack; i--)
3552#endif
3553 if (i >= not_stack + offset)
3554 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3555 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3556 0, args_addr,
3557 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3558 * UNITS_PER_WORD)),
4fc026cd 3559 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3560 }
3561 else
3562 {
3563 rtx addr;
921b3427 3564 rtx target = NULL_RTX;
3bdf5ad1 3565 rtx dest;
bbf6f052
RK
3566
3567 /* Push padding now if padding above and stack grows down,
3568 or if padding below and stack grows up.
3569 But if space already allocated, this has already been done. */
3570 if (extra && args_addr == 0
3571 && where_pad != none && where_pad != stack_direction)
906c4e36 3572 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3573
3574#ifdef PUSH_ROUNDING
f73ad30e 3575 if (args_addr == 0 && PUSH_ARGS)
566aa174 3576 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3577 else
3578#endif
921b3427
RK
3579 {
3580 if (GET_CODE (args_so_far) == CONST_INT)
3581 addr
3582 = memory_address (mode,
3a94c984 3583 plus_constant (args_addr,
921b3427 3584 INTVAL (args_so_far)));
3a94c984 3585 else
38a448ca
RH
3586 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3587 args_so_far));
921b3427 3588 target = addr;
566aa174
JH
3589 dest = gen_rtx_MEM (mode, addr);
3590 if (type != 0)
3591 {
3592 set_mem_attributes (dest, type, 1);
3593 /* Function incoming arguments may overlap with sibling call
3594 outgoing arguments and we cannot allow reordering of reads
3595 from function arguments with stores to outgoing arguments
3596 of sibling calls. */
ba4828e0 3597 set_mem_alias_set (dest, 0);
566aa174 3598 }
bbf6f052 3599
566aa174 3600 emit_move_insn (dest, x);
3bdf5ad1 3601
566aa174 3602 }
921b3427 3603
7d384cc0 3604 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3605 {
956d6950 3606 in_check_memory_usage = 1;
921b3427
RK
3607 if (target == 0)
3608 target = get_push_address (GET_MODE_SIZE (mode));
3609
c85f7c16 3610 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3611 emit_library_call (chkr_copy_bitmap_libfunc,
3612 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3613 Pmode, XEXP (x, 0), Pmode,
921b3427
RK
3614 GEN_INT (GET_MODE_SIZE (mode)),
3615 TYPE_MODE (sizetype));
3616 else
ebb1b59a
BS
3617 emit_library_call (chkr_set_right_libfunc,
3618 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3619 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
921b3427 3620 TYPE_MODE (sizetype),
956d6950
JL
3621 GEN_INT (MEMORY_USE_RW),
3622 TYPE_MODE (integer_type_node));
3623 in_check_memory_usage = 0;
921b3427 3624 }
bbf6f052
RK
3625 }
3626
3627 ret:
3628 /* If part should go in registers, copy that part
3629 into the appropriate registers. Do this now, at the end,
3630 since mem-to-mem copies above may do function calls. */
cd048831 3631 if (partial > 0 && reg != 0)
fffa9c1d
JW
3632 {
3633 /* Handle calls that pass values in multiple non-contiguous locations.
3634 The Irix 6 ABI has examples of this. */
3635 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3636 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3637 else
3638 move_block_to_reg (REGNO (reg), x, partial, mode);
3639 }
bbf6f052
RK
3640
3641 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3642 anti_adjust_stack (GEN_INT (extra));
3a94c984 3643
3ea2292a 3644 if (alignment_pad && args_addr == 0)
4fc026cd 3645 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3646}
3647\f
296b4ed9
RK
3648/* Return X if X can be used as a subtarget in a sequence of arithmetic
3649 operations. */
3650
3651static rtx
3652get_subtarget (x)
3653 rtx x;
3654{
3655 return ((x == 0
3656 /* Only registers can be subtargets. */
3657 || GET_CODE (x) != REG
3658 /* If the register is readonly, it can't be set more than once. */
3659 || RTX_UNCHANGING_P (x)
3660 /* Don't use hard regs to avoid extending their life. */
3661 || REGNO (x) < FIRST_PSEUDO_REGISTER
3662 /* Avoid subtargets inside loops,
3663 since they hide some invariant expressions. */
3664 || preserve_subexpressions_p ())
3665 ? 0 : x);
3666}
3667
bbf6f052
RK
3668/* Expand an assignment that stores the value of FROM into TO.
3669 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3670 (This may contain a QUEUED rtx;
3671 if the value is constant, this rtx is a constant.)
3672 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3673
3674 SUGGEST_REG is no longer actually used.
3675 It used to mean, copy the value through a register
3676 and return that register, if that is possible.
709f5be1 3677 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3678
3679rtx
3680expand_assignment (to, from, want_value, suggest_reg)
3681 tree to, from;
3682 int want_value;
c5c76735 3683 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3684{
3685 register rtx to_rtx = 0;
3686 rtx result;
3687
3688 /* Don't crash if the lhs of the assignment was erroneous. */
3689
3690 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3691 {
3692 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3693 return want_value ? result : NULL_RTX;
3694 }
bbf6f052
RK
3695
3696 /* Assignment of a structure component needs special treatment
3697 if the structure component's rtx is not simply a MEM.
6be58303
JW
3698 Assignment of an array element at a constant index, and assignment of
3699 an array element in an unaligned packed structure field, has the same
3700 problem. */
bbf6f052 3701
08293add 3702 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
b4e3fabb 3703 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
bbf6f052
RK
3704 {
3705 enum machine_mode mode1;
770ae6cc 3706 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3707 tree offset;
bbf6f052
RK
3708 int unsignedp;
3709 int volatilep = 0;
0088fcb1 3710 tree tem;
729a2125 3711 unsigned int alignment;
0088fcb1
RK
3712
3713 push_temp_slots ();
839c4796
RK
3714 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3715 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3716
3717 /* If we are going to use store_bit_field and extract_bit_field,
3718 make sure to_rtx will be safe for multiple use. */
3719
3720 if (mode1 == VOIDmode && want_value)
3721 tem = stabilize_reference (tem);
3722
921b3427 3723 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3724 if (offset != 0)
3725 {
906c4e36 3726 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3727
3728 if (GET_CODE (to_rtx) != MEM)
3729 abort ();
bd070e1a
RH
3730
3731 if (GET_MODE (offset_rtx) != ptr_mode)
3732 {
3733#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3734 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3735#else
3736 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3737#endif
3738 }
3739
9a7b9f4f
JL
3740 /* A constant address in TO_RTX can have VOIDmode, we must not try
3741 to call force_reg for that case. Avoid that case. */
89752202
HB
3742 if (GET_CODE (to_rtx) == MEM
3743 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3744 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202 3745 && bitsize
3a94c984 3746 && (bitpos % bitsize) == 0
89752202 3747 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 3748 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202 3749 {
f4ef873c
RK
3750 rtx temp
3751 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3752
89752202
HB
3753 if (GET_CODE (XEXP (temp, 0)) == REG)
3754 to_rtx = temp;
3755 else
792760b9
RK
3756 to_rtx = (replace_equiv_address
3757 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3758 XEXP (temp, 0))));
89752202
HB
3759 bitpos = 0;
3760 }
3761
7bb0943f 3762 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3763 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3764 force_reg (ptr_mode,
3765 offset_rtx)));
7bb0943f 3766 }
c5c76735 3767
bbf6f052
RK
3768 if (volatilep)
3769 {
3770 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3771 {
3772 /* When the offset is zero, to_rtx is the address of the
3773 structure we are storing into, and hence may be shared.
3774 We must make a new MEM before setting the volatile bit. */
3775 if (offset == 0)
effbcc6a
RK
3776 to_rtx = copy_rtx (to_rtx);
3777
01188446
JW
3778 MEM_VOLATILE_P (to_rtx) = 1;
3779 }
bbf6f052
RK
3780#if 0 /* This was turned off because, when a field is volatile
3781 in an object which is not volatile, the object may be in a register,
3782 and then we would abort over here. */
3783 else
3784 abort ();
3785#endif
3786 }
3787
956d6950
JL
3788 if (TREE_CODE (to) == COMPONENT_REF
3789 && TREE_READONLY (TREE_OPERAND (to, 1)))
3790 {
8bd6ecc2 3791 if (offset == 0)
956d6950
JL
3792 to_rtx = copy_rtx (to_rtx);
3793
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3795 }
3796
921b3427 3797 /* Check the access. */
7d384cc0 3798 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3799 {
3800 rtx to_addr;
3801 int size;
3802 int best_mode_size;
3803 enum machine_mode best_mode;
3804
3805 best_mode = get_best_mode (bitsize, bitpos,
3806 TYPE_ALIGN (TREE_TYPE (tem)),
3807 mode1, volatilep);
3808 if (best_mode == VOIDmode)
3809 best_mode = QImode;
3810
3811 best_mode_size = GET_MODE_BITSIZE (best_mode);
3812 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3813 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3814 size *= GET_MODE_SIZE (best_mode);
3815
3816 /* Check the access right of the pointer. */
ea4da9db 3817 in_check_memory_usage = 1;
e9a25f70 3818 if (size)
ebb1b59a
BS
3819 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3820 VOIDmode, 3, to_addr, Pmode,
e9a25f70 3821 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3822 GEN_INT (MEMORY_USE_WO),
3823 TYPE_MODE (integer_type_node));
ea4da9db 3824 in_check_memory_usage = 0;
921b3427
RK
3825 }
3826
a69beca1
RK
3827 /* If this is a varying-length object, we must get the address of
3828 the source and do an explicit block move. */
3829 if (bitsize < 0)
3830 {
3831 unsigned int from_align;
3832 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3833 rtx inner_to_rtx
f4ef873c 3834 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
a69beca1
RK
3835
3836 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
19caa751 3837 MIN (alignment, from_align));
a69beca1
RK
3838 free_temp_slots ();
3839 pop_temp_slots ();
3840 return to_rtx;
3841 }
3842 else
3843 {
3844 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3845 (want_value
3846 /* Spurious cast for HPUX compiler. */
3847 ? ((enum machine_mode)
3848 TYPE_MODE (TREE_TYPE (to)))
3849 : VOIDmode),
3850 unsignedp,
a69beca1
RK
3851 alignment,
3852 int_size_in_bytes (TREE_TYPE (tem)),
3853 get_alias_set (to));
3854
3855 preserve_temp_slots (result);
3856 free_temp_slots ();
3857 pop_temp_slots ();
3858
3859 /* If the value is meaningful, convert RESULT to the proper mode.
3860 Otherwise, return nothing. */
3861 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3862 TYPE_MODE (TREE_TYPE (from)),
3863 result,
3864 TREE_UNSIGNED (TREE_TYPE (to)))
3865 : NULL_RTX);
3866 }
bbf6f052
RK
3867 }
3868
cd1db108
RS
3869 /* If the rhs is a function call and its value is not an aggregate,
3870 call the function before we start to compute the lhs.
3871 This is needed for correct code for cases such as
3872 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3873 requires loading up part of an address in a separate insn.
3874
1858863b
JW
3875 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3876 since it might be a promoted variable where the zero- or sign- extension
3877 needs to be done. Handling this in the normal way is safe because no
3878 computation is done before the call. */
1ad87b63 3879 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3880 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3881 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3882 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3883 {
0088fcb1
RK
3884 rtx value;
3885
3886 push_temp_slots ();
3887 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3888 if (to_rtx == 0)
921b3427 3889 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3890
fffa9c1d
JW
3891 /* Handle calls that return values in multiple non-contiguous locations.
3892 The Irix 6 ABI has examples of this. */
3893 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16 3894 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3895 TYPE_ALIGN (TREE_TYPE (from)));
fffa9c1d 3896 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3897 emit_block_move (to_rtx, value, expr_size (from),
19caa751 3898 TYPE_ALIGN (TREE_TYPE (from)));
aaf87c45 3899 else
6419e5b0
DT
3900 {
3901#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3902 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3903 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3904 value = convert_memory_address (GET_MODE (to_rtx), value);
3905#endif
3906 emit_move_insn (to_rtx, value);
3907 }
cd1db108
RS
3908 preserve_temp_slots (to_rtx);
3909 free_temp_slots ();
0088fcb1 3910 pop_temp_slots ();
709f5be1 3911 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3912 }
3913
bbf6f052
RK
3914 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3915 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3916
3917 if (to_rtx == 0)
41472af8
MM
3918 {
3919 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3920 if (GET_CODE (to_rtx) == MEM)
ba4828e0 3921 set_mem_alias_set (to_rtx, get_alias_set (to));
41472af8 3922 }
bbf6f052 3923
86d38d25 3924 /* Don't move directly into a return register. */
14a774a9
RK
3925 if (TREE_CODE (to) == RESULT_DECL
3926 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3927 {
0088fcb1
RK
3928 rtx temp;
3929
3930 push_temp_slots ();
3931 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3932
3933 if (GET_CODE (to_rtx) == PARALLEL)
3934 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3935 TYPE_ALIGN (TREE_TYPE (from)));
14a774a9
RK
3936 else
3937 emit_move_insn (to_rtx, temp);
3938
86d38d25
RS
3939 preserve_temp_slots (to_rtx);
3940 free_temp_slots ();
0088fcb1 3941 pop_temp_slots ();
709f5be1 3942 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3943 }
3944
bbf6f052
RK
3945 /* In case we are returning the contents of an object which overlaps
3946 the place the value is being stored, use a safe function when copying
3947 a value through a pointer into a structure value return block. */
3948 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3949 && current_function_returns_struct
3950 && !current_function_returns_pcc_struct)
3951 {
0088fcb1
RK
3952 rtx from_rtx, size;
3953
3954 push_temp_slots ();
33a20d10 3955 size = expr_size (from);
921b3427
RK
3956 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3957 EXPAND_MEMORY_USE_DONT);
3958
3959 /* Copy the rights of the bitmap. */
7d384cc0 3960 if (current_function_check_memory_usage)
ebb1b59a
BS
3961 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3962 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
6a9c4aed 3963 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3964 convert_to_mode (TYPE_MODE (sizetype),
3965 size, TREE_UNSIGNED (sizetype)),
3966 TYPE_MODE (sizetype));
bbf6f052
RK
3967
3968#ifdef TARGET_MEM_FUNCTIONS
b215b52e 3969 emit_library_call (memmove_libfunc, LCT_NORMAL,
bbf6f052
RK
3970 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3971 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3972 convert_to_mode (TYPE_MODE (sizetype),
3973 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3974 TYPE_MODE (sizetype));
bbf6f052 3975#else
ebb1b59a 3976 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052
RK
3977 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3978 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3979 convert_to_mode (TYPE_MODE (integer_type_node),
3980 size, TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
bbf6f052
RK
3982#endif
3983
3984 preserve_temp_slots (to_rtx);
3985 free_temp_slots ();
0088fcb1 3986 pop_temp_slots ();
709f5be1 3987 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3988 }
3989
3990 /* Compute FROM and store the value in the rtx we got. */
3991
0088fcb1 3992 push_temp_slots ();
bbf6f052
RK
3993 result = store_expr (from, to_rtx, want_value);
3994 preserve_temp_slots (result);
3995 free_temp_slots ();
0088fcb1 3996 pop_temp_slots ();
709f5be1 3997 return want_value ? result : NULL_RTX;
bbf6f052
RK
3998}
3999
4000/* Generate code for computing expression EXP,
4001 and storing the value into TARGET.
bbf6f052
RK
4002 TARGET may contain a QUEUED rtx.
4003
709f5be1
RS
4004 If WANT_VALUE is nonzero, return a copy of the value
4005 not in TARGET, so that we can be sure to use the proper
4006 value in a containing expression even if TARGET has something
4007 else stored in it. If possible, we copy the value through a pseudo
4008 and return that pseudo. Or, if the value is constant, we try to
4009 return the constant. In some cases, we return a pseudo
4010 copied *from* TARGET.
4011
4012 If the mode is BLKmode then we may return TARGET itself.
4013 It turns out that in BLKmode it doesn't cause a problem.
4014 because C has no operators that could combine two different
4015 assignments into the same BLKmode object with different values
4016 with no sequence point. Will other languages need this to
4017 be more thorough?
4018
4019 If WANT_VALUE is 0, we return NULL, to make sure
4020 to catch quickly any cases where the caller uses the value
4021 and fails to set WANT_VALUE. */
bbf6f052
RK
4022
4023rtx
709f5be1 4024store_expr (exp, target, want_value)
bbf6f052
RK
4025 register tree exp;
4026 register rtx target;
709f5be1 4027 int want_value;
bbf6f052
RK
4028{
4029 register rtx temp;
4030 int dont_return_target = 0;
e5408e52 4031 int dont_store_target = 0;
bbf6f052
RK
4032
4033 if (TREE_CODE (exp) == COMPOUND_EXPR)
4034 {
4035 /* Perform first part of compound expression, then assign from second
4036 part. */
4037 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4038 emit_queue ();
709f5be1 4039 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4040 }
4041 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4042 {
4043 /* For conditional expression, get safe form of the target. Then
4044 test the condition, doing the appropriate assignment on either
4045 side. This avoids the creation of unnecessary temporaries.
4046 For non-BLKmode, it is more efficient not to do this. */
4047
4048 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4049
4050 emit_queue ();
4051 target = protect_from_queue (target, 1);
4052
dabf8373 4053 do_pending_stack_adjust ();
bbf6f052
RK
4054 NO_DEFER_POP;
4055 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4056 start_cleanup_deferral ();
709f5be1 4057 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 4058 end_cleanup_deferral ();
bbf6f052
RK
4059 emit_queue ();
4060 emit_jump_insn (gen_jump (lab2));
4061 emit_barrier ();
4062 emit_label (lab1);
956d6950 4063 start_cleanup_deferral ();
709f5be1 4064 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 4065 end_cleanup_deferral ();
bbf6f052
RK
4066 emit_queue ();
4067 emit_label (lab2);
4068 OK_DEFER_POP;
a3a58acc 4069
709f5be1 4070 return want_value ? target : NULL_RTX;
bbf6f052 4071 }
bbf6f052 4072 else if (queued_subexp_p (target))
709f5be1
RS
4073 /* If target contains a postincrement, let's not risk
4074 using it as the place to generate the rhs. */
bbf6f052
RK
4075 {
4076 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4077 {
4078 /* Expand EXP into a new pseudo. */
4079 temp = gen_reg_rtx (GET_MODE (target));
4080 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4081 }
4082 else
906c4e36 4083 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
4084
4085 /* If target is volatile, ANSI requires accessing the value
4086 *from* the target, if it is accessed. So make that happen.
4087 In no case return the target itself. */
4088 if (! MEM_VOLATILE_P (target) && want_value)
4089 dont_return_target = 1;
bbf6f052 4090 }
12f06d17
CH
4091 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4092 && GET_MODE (target) != BLKmode)
4093 /* If target is in memory and caller wants value in a register instead,
4094 arrange that. Pass TARGET as target for expand_expr so that,
4095 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4096 We know expand_expr will not use the target in that case.
4097 Don't do this if TARGET is volatile because we are supposed
4098 to write it and then read it. */
4099 {
1da93fe0 4100 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17 4101 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4102 {
4103 /* If TEMP is already in the desired TARGET, only copy it from
4104 memory and don't store it there again. */
4105 if (temp == target
4106 || (rtx_equal_p (temp, target)
4107 && ! side_effects_p (temp) && ! side_effects_p (target)))
4108 dont_store_target = 1;
4109 temp = copy_to_reg (temp);
4110 }
12f06d17
CH
4111 dont_return_target = 1;
4112 }
1499e0a8
RK
4113 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4114 /* If this is an scalar in a register that is stored in a wider mode
4115 than the declared mode, compute the result into its declared mode
4116 and then convert to the wider mode. Our value is the computed
4117 expression. */
4118 {
5a32d038 4119 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4120 which will often result in some optimizations. Do the conversion
4121 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4122 the extend. But don't do this if the type of EXP is a subtype
4123 of something else since then the conversion might involve
4124 more than just converting modes. */
4125 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4126 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4127 {
4128 if (TREE_UNSIGNED (TREE_TYPE (exp))
4129 != SUBREG_PROMOTED_UNSIGNED_P (target))
4130 exp
4131 = convert
4132 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4133 TREE_TYPE (exp)),
4134 exp);
4135
4136 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4137 SUBREG_PROMOTED_UNSIGNED_P (target)),
4138 exp);
4139 }
3a94c984 4140
1499e0a8 4141 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 4142
766f36c7 4143 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
4144 the access now so it gets done only once. Likewise if
4145 it contains TARGET. */
4146 if (GET_CODE (temp) == MEM && want_value
4147 && (MEM_VOLATILE_P (temp)
4148 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
4149 temp = copy_to_reg (temp);
4150
b258707c
RS
4151 /* If TEMP is a VOIDmode constant, use convert_modes to make
4152 sure that we properly convert it. */
4153 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4154 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4155 TYPE_MODE (TREE_TYPE (exp)), temp,
4156 SUBREG_PROMOTED_UNSIGNED_P (target));
4157
1499e0a8
RK
4158 convert_move (SUBREG_REG (target), temp,
4159 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4160
4161 /* If we promoted a constant, change the mode back down to match
4162 target. Otherwise, the caller might get confused by a result whose
4163 mode is larger than expected. */
4164
4165 if (want_value && GET_MODE (temp) != GET_MODE (target)
4166 && GET_MODE (temp) != VOIDmode)
4167 {
ddef6bc7 4168 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3dbecef9
JW
4169 SUBREG_PROMOTED_VAR_P (temp) = 1;
4170 SUBREG_PROMOTED_UNSIGNED_P (temp)
4171 = SUBREG_PROMOTED_UNSIGNED_P (target);
4172 }
4173
709f5be1 4174 return want_value ? temp : NULL_RTX;
1499e0a8 4175 }
bbf6f052
RK
4176 else
4177 {
4178 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 4179 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4180 If TARGET is a volatile mem ref, either return TARGET
4181 or return a reg copied *from* TARGET; ANSI requires this.
4182
4183 Otherwise, if TEMP is not TARGET, return TEMP
4184 if it is constant (for efficiency),
4185 or if we really want the correct value. */
bbf6f052
RK
4186 if (!(target && GET_CODE (target) == REG
4187 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4188 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4189 && ! rtx_equal_p (temp, target)
709f5be1 4190 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
4191 dont_return_target = 1;
4192 }
4193
b258707c
RS
4194 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4195 the same as that of TARGET, adjust the constant. This is needed, for
4196 example, in case it is a CONST_DOUBLE and we want only a word-sized
4197 value. */
4198 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4199 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4200 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4201 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4202 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4203
7d384cc0 4204 if (current_function_check_memory_usage
921b3427
RK
4205 && GET_CODE (target) == MEM
4206 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4207 {
ea4da9db 4208 in_check_memory_usage = 1;
921b3427 4209 if (GET_CODE (temp) == MEM)
ebb1b59a
BS
4210 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4211 VOIDmode, 3, XEXP (target, 0), Pmode,
6a9c4aed 4212 XEXP (temp, 0), Pmode,
921b3427
RK
4213 expr_size (exp), TYPE_MODE (sizetype));
4214 else
ebb1b59a
BS
4215 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4216 VOIDmode, 3, XEXP (target, 0), Pmode,
921b3427 4217 expr_size (exp), TYPE_MODE (sizetype),
3a94c984 4218 GEN_INT (MEMORY_USE_WO),
956d6950 4219 TYPE_MODE (integer_type_node));
ea4da9db 4220 in_check_memory_usage = 0;
921b3427
RK
4221 }
4222
bbf6f052
RK
4223 /* If value was not generated in the target, store it there.
4224 Convert the value to TARGET's type first if nec. */
f3f2255a
R
4225 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4226 one or both of them are volatile memory refs, we have to distinguish
4227 two cases:
4228 - expand_expr has used TARGET. In this case, we must not generate
4229 another copy. This can be detected by TARGET being equal according
4230 to == .
4231 - expand_expr has not used TARGET - that means that the source just
4232 happens to have the same RTX form. Since temp will have been created
4233 by expand_expr, it will compare unequal according to == .
4234 We must generate a copy in this case, to reach the correct number
4235 of volatile memory references. */
bbf6f052 4236
6036acbb 4237 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4238 || (temp != target && (side_effects_p (temp)
4239 || side_effects_p (target))))
e5408e52
JJ
4240 && TREE_CODE (exp) != ERROR_MARK
4241 && ! dont_store_target)
bbf6f052
RK
4242 {
4243 target = protect_from_queue (target, 1);
4244 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4245 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4246 {
4247 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 if (dont_return_target)
4249 {
4250 /* In this case, we will return TEMP,
4251 so make sure it has the proper mode.
4252 But don't forget to store the value into TARGET. */
4253 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4254 emit_move_insn (target, temp);
4255 }
4256 else
4257 convert_move (target, temp, unsignedp);
4258 }
4259
4260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4261 {
4262 /* Handle copying a string constant into an array.
4263 The string constant may be shorter than the array.
4264 So copy just the string's actual length, and clear the rest. */
4265 rtx size;
22619c3f 4266 rtx addr;
bbf6f052 4267
e87b4f3f
RS
4268 /* Get the size of the data type of the string,
4269 which is actually the size of the target. */
4270 size = expr_size (exp);
4271 if (GET_CODE (size) == CONST_INT
4272 && INTVAL (size) < TREE_STRING_LENGTH (exp))
19caa751 4273 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4274 else
bbf6f052 4275 {
e87b4f3f
RS
4276 /* Compute the size of the data to copy from the string. */
4277 tree copy_size
c03b7665 4278 = size_binop (MIN_EXPR,
b50d17a1 4279 make_tree (sizetype, size),
fed3cef0 4280 size_int (TREE_STRING_LENGTH (exp)));
f9e158c3 4281 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
906c4e36
RK
4282 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4283 VOIDmode, 0);
e87b4f3f
RS
4284 rtx label = 0;
4285
4286 /* Copy that much. */
4287 emit_block_move (target, temp, copy_size_rtx,
19caa751 4288 TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4289
88f63c77
RK
4290 /* Figure out how much is left in TARGET that we have to clear.
4291 Do all calculations in ptr_mode. */
4292
4293 addr = XEXP (target, 0);
4294 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4295
e87b4f3f
RS
4296 if (GET_CODE (copy_size_rtx) == CONST_INT)
4297 {
88f63c77 4298 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3a94c984 4299 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
8752c357
AJ
4300 align = MIN (align,
4301 (unsigned int) (BITS_PER_UNIT
4302 * (INTVAL (copy_size_rtx)
4303 & - INTVAL (copy_size_rtx))));
e87b4f3f
RS
4304 }
4305 else
4306 {
88f63c77
RK
4307 addr = force_reg (ptr_mode, addr);
4308 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
4309 copy_size_rtx, NULL_RTX, 0,
4310 OPTAB_LIB_WIDEN);
e87b4f3f 4311
88f63c77 4312 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
4313 copy_size_rtx, NULL_RTX, 0,
4314 OPTAB_LIB_WIDEN);
e87b4f3f 4315
2a5b96fd 4316 align = BITS_PER_UNIT;
e87b4f3f 4317 label = gen_label_rtx ();
c5d5d461
JL
4318 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4319 GET_MODE (size), 0, 0, label);
e87b4f3f 4320 }
2a5b96fd 4321 align = MIN (align, expr_align (copy_size));
e87b4f3f
RS
4322
4323 if (size != const0_rtx)
4324 {
3bdf5ad1
RK
4325 rtx dest = gen_rtx_MEM (BLKmode, addr);
4326
4327 MEM_COPY_ATTRIBUTES (dest, target);
4328
921b3427 4329 /* Be sure we can write on ADDR. */
ea4da9db 4330 in_check_memory_usage = 1;
7d384cc0 4331 if (current_function_check_memory_usage)
ebb1b59a
BS
4332 emit_library_call (chkr_check_addr_libfunc,
4333 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 4334 addr, Pmode,
921b3427 4335 size, TYPE_MODE (sizetype),
3a94c984 4336 GEN_INT (MEMORY_USE_WO),
956d6950 4337 TYPE_MODE (integer_type_node));
ea4da9db 4338 in_check_memory_usage = 0;
051ffad5 4339 clear_storage (dest, size, align);
e87b4f3f 4340 }
22619c3f 4341
e87b4f3f
RS
4342 if (label)
4343 emit_label (label);
bbf6f052
RK
4344 }
4345 }
fffa9c1d
JW
4346 /* Handle calls that return values in multiple non-contiguous locations.
4347 The Irix 6 ABI has examples of this. */
4348 else if (GET_CODE (target) == PARALLEL)
aac5cc16 4349 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
19caa751 4350 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4351 else if (GET_MODE (temp) == BLKmode)
4352 emit_block_move (target, temp, expr_size (exp),
19caa751 4353 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4354 else
4355 emit_move_insn (target, temp);
4356 }
709f5be1 4357
766f36c7
RK
4358 /* If we don't want a value, return NULL_RTX. */
4359 if (! want_value)
4360 return NULL_RTX;
4361
4362 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4363 ??? The latter test doesn't seem to make sense. */
4364 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4365 return temp;
766f36c7
RK
4366
4367 /* Return TARGET itself if it is a hard register. */
4368 else if (want_value && GET_MODE (target) != BLKmode
4369 && ! (GET_CODE (target) == REG
4370 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4371 return copy_to_reg (target);
3a94c984 4372
766f36c7 4373 else
709f5be1 4374 return target;
bbf6f052
RK
4375}
4376\f
9de08200
RK
4377/* Return 1 if EXP just contains zeros. */
4378
4379static int
4380is_zeros_p (exp)
4381 tree exp;
4382{
4383 tree elt;
4384
4385 switch (TREE_CODE (exp))
4386 {
4387 case CONVERT_EXPR:
4388 case NOP_EXPR:
4389 case NON_LVALUE_EXPR:
4390 return is_zeros_p (TREE_OPERAND (exp, 0));
4391
4392 case INTEGER_CST:
05bccae2 4393 return integer_zerop (exp);
9de08200
RK
4394
4395 case COMPLEX_CST:
4396 return
4397 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4398
4399 case REAL_CST:
41c9120b 4400 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
4401
4402 case CONSTRUCTOR:
e1a43f73
PB
4403 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4404 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4405 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4406 if (! is_zeros_p (TREE_VALUE (elt)))
4407 return 0;
4408
4409 return 1;
3a94c984 4410
e9a25f70
JL
4411 default:
4412 return 0;
9de08200 4413 }
9de08200
RK
4414}
4415
4416/* Return 1 if EXP contains mostly (3/4) zeros. */
4417
4418static int
4419mostly_zeros_p (exp)
4420 tree exp;
4421{
9de08200
RK
4422 if (TREE_CODE (exp) == CONSTRUCTOR)
4423 {
e1a43f73
PB
4424 int elts = 0, zeros = 0;
4425 tree elt = CONSTRUCTOR_ELTS (exp);
4426 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4427 {
4428 /* If there are no ranges of true bits, it is all zero. */
4429 return elt == NULL_TREE;
4430 }
4431 for (; elt; elt = TREE_CHAIN (elt))
4432 {
4433 /* We do not handle the case where the index is a RANGE_EXPR,
4434 so the statistic will be somewhat inaccurate.
4435 We do make a more accurate count in store_constructor itself,
4436 so since this function is only used for nested array elements,
0f41302f 4437 this should be close enough. */
e1a43f73
PB
4438 if (mostly_zeros_p (TREE_VALUE (elt)))
4439 zeros++;
4440 elts++;
4441 }
9de08200
RK
4442
4443 return 4 * zeros >= 3 * elts;
4444 }
4445
4446 return is_zeros_p (exp);
4447}
4448\f
e1a43f73
PB
4449/* Helper function for store_constructor.
4450 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4451 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4452 ALIGN and CLEARED are as for store_constructor.
23cb1766 4453 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4454
4455 This provides a recursive shortcut back to store_constructor when it isn't
4456 necessary to go through store_field. This is so that we can pass through
4457 the cleared field to let store_constructor know that we may not have to
4458 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4459
4460static void
4461store_constructor_field (target, bitsize, bitpos,
23cb1766 4462 mode, exp, type, align, cleared, alias_set)
e1a43f73 4463 rtx target;
770ae6cc
RK
4464 unsigned HOST_WIDE_INT bitsize;
4465 HOST_WIDE_INT bitpos;
e1a43f73
PB
4466 enum machine_mode mode;
4467 tree exp, type;
729a2125 4468 unsigned int align;
e1a43f73 4469 int cleared;
23cb1766 4470 int alias_set;
e1a43f73
PB
4471{
4472 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4473 && bitpos % BITS_PER_UNIT == 0
4474 /* If we have a non-zero bitpos for a register target, then we just
4475 let store_field do the bitfield handling. This is unlikely to
4476 generate unnecessary clear instructions anyways. */
4477 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4478 {
126e5b0d 4479 if (bitpos != 0)
ce64861e 4480 target
f4ef873c 4481 = adjust_address (target,
ce64861e
RK
4482 GET_MODE (target) == BLKmode
4483 || 0 != (bitpos
4484 % GET_MODE_ALIGNMENT (GET_MODE (target)))
f4ef873c 4485 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4486
e0339ef7
RK
4487
4488 /* Show the alignment may no longer be what it was and update the alias
4489 set, if required. */
eeebb824 4490 if (bitpos != 0)
8752c357 4491 align = MIN (align, (unsigned int) bitpos & - bitpos);
832ea3b3 4492 if (GET_CODE (target) == MEM)
ba4828e0 4493 set_mem_alias_set (target, alias_set);
e0339ef7 4494
b7010412 4495 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4496 }
4497 else
19caa751 4498 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
23cb1766 4499 int_size_in_bytes (type), alias_set);
e1a43f73
PB
4500}
4501
bbf6f052 4502/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4503 TARGET is either a REG or a MEM.
19caa751 4504 ALIGN is the maximum known alignment for TARGET.
b7010412
RK
4505 CLEARED is true if TARGET is known to have been zero'd.
4506 SIZE is the number of bytes of TARGET we are allowed to modify: this
4507 may not be the same as the size of EXP if we are assigning to a field
4508 which has been packed to exclude padding bits. */
bbf6f052
RK
4509
4510static void
b7010412 4511store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4512 tree exp;
4513 rtx target;
729a2125 4514 unsigned int align;
e1a43f73 4515 int cleared;
13eb1f7f 4516 HOST_WIDE_INT size;
bbf6f052 4517{
4af3895e 4518 tree type = TREE_TYPE (exp);
a5efcd63 4519#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4520 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4521#endif
4af3895e 4522
bbf6f052
RK
4523 /* We know our target cannot conflict, since safe_from_p has been called. */
4524#if 0
4525 /* Don't try copying piece by piece into a hard register
4526 since that is vulnerable to being clobbered by EXP.
4527 Instead, construct in a pseudo register and then copy it all. */
4528 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4529 {
4530 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4531 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4532 emit_move_insn (target, temp);
4533 return;
4534 }
4535#endif
4536
e44842fe
RK
4537 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4538 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4539 {
4540 register tree elt;
4541
4af3895e 4542 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4543 if ((TREE_CODE (type) == UNION_TYPE
4544 || TREE_CODE (type) == QUAL_UNION_TYPE)
4545 && ! cleared)
a59f8640
R
4546 {
4547 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4548
4549 /* If the constructor is empty, clear the union. */
4550 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
19caa751 4551 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
a59f8640 4552 }
4af3895e
JVA
4553
4554 /* If we are building a static constructor into a register,
4555 set the initial value as zero so we can fold the value into
67225c15
RK
4556 a constant. But if more than one register is involved,
4557 this probably loses. */
4558 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4559 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4560 {
4561 if (! cleared)
e9a25f70 4562 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4563
9de08200
RK
4564 cleared = 1;
4565 }
4566
4567 /* If the constructor has fewer fields than the structure
4568 or if we are initializing the structure to mostly zeros,
0d97bf4c 4569 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4570 register whose mode size isn't equal to SIZE since clear_storage
4571 can't handle this case. */
9376fcd6
RK
4572 else if (size > 0
4573 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4574 != fields_length (type))
fcf1b822
RK
4575 || mostly_zeros_p (exp))
4576 && (GET_CODE (target) != REG
8752c357 4577 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
9de08200
RK
4578 {
4579 if (! cleared)
19caa751 4580 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4581
4582 cleared = 1;
4583 }
dd1db5ec 4584 else if (! cleared)
bbf6f052 4585 /* Inform later passes that the old value is dead. */
38a448ca 4586 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4587
4588 /* Store each element of the constructor into
4589 the corresponding field of TARGET. */
4590
4591 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4592 {
4593 register tree field = TREE_PURPOSE (elt);
c5c76735 4594#ifdef WORD_REGISTER_OPERATIONS
34c73909 4595 tree value = TREE_VALUE (elt);
c5c76735 4596#endif
bbf6f052 4597 register enum machine_mode mode;
770ae6cc
RK
4598 HOST_WIDE_INT bitsize;
4599 HOST_WIDE_INT bitpos = 0;
bbf6f052 4600 int unsignedp;
770ae6cc 4601 tree offset;
b50d17a1 4602 rtx to_rtx = target;
bbf6f052 4603
f32fd778
RS
4604 /* Just ignore missing fields.
4605 We cleared the whole structure, above,
4606 if any fields are missing. */
4607 if (field == 0)
4608 continue;
4609
e1a43f73
PB
4610 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4611 continue;
9de08200 4612
770ae6cc
RK
4613 if (host_integerp (DECL_SIZE (field), 1))
4614 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4615 else
4616 bitsize = -1;
4617
bbf6f052
RK
4618 unsignedp = TREE_UNSIGNED (field);
4619 mode = DECL_MODE (field);
4620 if (DECL_BIT_FIELD (field))
4621 mode = VOIDmode;
4622
770ae6cc
RK
4623 offset = DECL_FIELD_OFFSET (field);
4624 if (host_integerp (offset, 0)
4625 && host_integerp (bit_position (field), 0))
4626 {
4627 bitpos = int_bit_position (field);
4628 offset = 0;
4629 }
b50d17a1 4630 else
770ae6cc 4631 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4632
b50d17a1
RK
4633 if (offset)
4634 {
4635 rtx offset_rtx;
4636
4637 if (contains_placeholder_p (offset))
7fa96708 4638 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4639 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4640
b50d17a1
RK
4641 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4642 if (GET_CODE (to_rtx) != MEM)
4643 abort ();
4644
3a94c984
KH
4645 if (GET_MODE (offset_rtx) != ptr_mode)
4646 {
bd070e1a 4647#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4648 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4649#else
4650 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4651#endif
4652 }
4653
b50d17a1
RK
4654 to_rtx
4655 = change_address (to_rtx, VOIDmode,
38a448ca 4656 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4657 force_reg (ptr_mode,
4658 offset_rtx)));
7fa96708 4659 align = DECL_OFFSET_ALIGN (field);
b50d17a1 4660 }
c5c76735 4661
cf04eb80
RK
4662 if (TREE_READONLY (field))
4663 {
9151b3bf 4664 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4665 to_rtx = copy_rtx (to_rtx);
4666
cf04eb80
RK
4667 RTX_UNCHANGING_P (to_rtx) = 1;
4668 }
4669
34c73909
R
4670#ifdef WORD_REGISTER_OPERATIONS
4671 /* If this initializes a field that is smaller than a word, at the
4672 start of a word, try to widen it to a full word.
4673 This special case allows us to output C++ member function
4674 initializations in a form that the optimizers can understand. */
770ae6cc 4675 if (GET_CODE (target) == REG
34c73909
R
4676 && bitsize < BITS_PER_WORD
4677 && bitpos % BITS_PER_WORD == 0
4678 && GET_MODE_CLASS (mode) == MODE_INT
4679 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4680 && exp_size >= 0
4681 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4682 {
4683 tree type = TREE_TYPE (value);
4684 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4685 {
4686 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4687 value = convert (type, value);
4688 }
4689 if (BYTES_BIG_ENDIAN)
4690 value
4691 = fold (build (LSHIFT_EXPR, type, value,
4692 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4693 bitsize = BITS_PER_WORD;
4694 mode = word_mode;
4695 }
4696#endif
c5c76735 4697 store_constructor_field (to_rtx, bitsize, bitpos, mode,
23cb1766 4698 TREE_VALUE (elt), type, align, cleared,
963a2a84 4699 (DECL_NONADDRESSABLE_P (field)
1ccfe3fa 4700 && GET_CODE (to_rtx) == MEM)
23cb1766
RK
4701 ? MEM_ALIAS_SET (to_rtx)
4702 : get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4703 }
4704 }
4af3895e 4705 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4706 {
4707 register tree elt;
4708 register int i;
e1a43f73 4709 int need_to_clear;
4af3895e 4710 tree domain = TYPE_DOMAIN (type);
4af3895e 4711 tree elttype = TREE_TYPE (type);
85f3d674
RK
4712 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4713 && host_integerp (TYPE_MAX_VALUE (domain), 0));
ae0ed63a
JM
4714 HOST_WIDE_INT minelt = 0;
4715 HOST_WIDE_INT maxelt = 0;
85f3d674
RK
4716
4717 /* If we have constant bounds for the range of the type, get them. */
4718 if (const_bounds_p)
4719 {
4720 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4721 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4722 }
bbf6f052 4723
e1a43f73 4724 /* If the constructor has fewer elements than the array,
38e01259 4725 clear the whole array first. Similarly if this is
e1a43f73
PB
4726 static constructor of a non-BLKmode object. */
4727 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4728 need_to_clear = 1;
4729 else
4730 {
4731 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4732 need_to_clear = ! const_bounds_p;
4733
e1a43f73
PB
4734 /* This loop is a more accurate version of the loop in
4735 mostly_zeros_p (it handles RANGE_EXPR in an index).
4736 It is also needed to check for missing elements. */
4737 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4738 elt != NULL_TREE && ! need_to_clear;
df0faff1 4739 elt = TREE_CHAIN (elt))
e1a43f73
PB
4740 {
4741 tree index = TREE_PURPOSE (elt);
4742 HOST_WIDE_INT this_node_count;
19caa751 4743
e1a43f73
PB
4744 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4745 {
4746 tree lo_index = TREE_OPERAND (index, 0);
4747 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4748
19caa751
RK
4749 if (! host_integerp (lo_index, 1)
4750 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4751 {
4752 need_to_clear = 1;
4753 break;
4754 }
19caa751
RK
4755
4756 this_node_count = (tree_low_cst (hi_index, 1)
4757 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4758 }
4759 else
4760 this_node_count = 1;
85f3d674 4761
e1a43f73
PB
4762 count += this_node_count;
4763 if (mostly_zeros_p (TREE_VALUE (elt)))
4764 zero_count += this_node_count;
4765 }
85f3d674 4766
8e958f70 4767 /* Clear the entire array first if there are any missing elements,
0f41302f 4768 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4769 if (! need_to_clear
4770 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4771 need_to_clear = 1;
4772 }
85f3d674 4773
9376fcd6 4774 if (need_to_clear && size > 0)
9de08200
RK
4775 {
4776 if (! cleared)
19caa751 4777 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4778 cleared = 1;
4779 }
bbf6f052
RK
4780 else
4781 /* Inform later passes that the old value is dead. */
38a448ca 4782 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4783
4784 /* Store each element of the constructor into
4785 the corresponding element of TARGET, determined
4786 by counting the elements. */
4787 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4788 elt;
4789 elt = TREE_CHAIN (elt), i++)
4790 {
4791 register enum machine_mode mode;
19caa751
RK
4792 HOST_WIDE_INT bitsize;
4793 HOST_WIDE_INT bitpos;
bbf6f052 4794 int unsignedp;
e1a43f73 4795 tree value = TREE_VALUE (elt);
729a2125 4796 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4797 tree index = TREE_PURPOSE (elt);
4798 rtx xtarget = target;
bbf6f052 4799
e1a43f73
PB
4800 if (cleared && is_zeros_p (value))
4801 continue;
9de08200 4802
bbf6f052 4803 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4804 mode = TYPE_MODE (elttype);
4805 if (mode == BLKmode)
19caa751
RK
4806 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4807 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4808 : -1);
14a774a9
RK
4809 else
4810 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4811
e1a43f73
PB
4812 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4813 {
4814 tree lo_index = TREE_OPERAND (index, 0);
4815 tree hi_index = TREE_OPERAND (index, 1);
4816 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4817 struct nesting *loop;
05c0b405
PB
4818 HOST_WIDE_INT lo, hi, count;
4819 tree position;
e1a43f73 4820
0f41302f 4821 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4822 if (const_bounds_p
4823 && host_integerp (lo_index, 0)
19caa751
RK
4824 && host_integerp (hi_index, 0)
4825 && (lo = tree_low_cst (lo_index, 0),
4826 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4827 count = hi - lo + 1,
4828 (GET_CODE (target) != MEM
4829 || count <= 2
19caa751
RK
4830 || (host_integerp (TYPE_SIZE (elttype), 1)
4831 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4832 <= 40 * 8)))))
e1a43f73 4833 {
05c0b405
PB
4834 lo -= minelt; hi -= minelt;
4835 for (; lo <= hi; lo++)
e1a43f73 4836 {
19caa751 4837 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
23cb1766
RK
4838 store_constructor_field
4839 (target, bitsize, bitpos, mode, value, type, align,
4840 cleared,
4841 TYPE_NONALIASED_COMPONENT (type)
4842 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
e1a43f73
PB
4843 }
4844 }
4845 else
4846 {
4847 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4848 loop_top = gen_label_rtx ();
4849 loop_end = gen_label_rtx ();
4850
4851 unsignedp = TREE_UNSIGNED (domain);
4852
4853 index = build_decl (VAR_DECL, NULL_TREE, domain);
4854
19e7881c 4855 index_r
e1a43f73
PB
4856 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4857 &unsignedp, 0));
19e7881c 4858 SET_DECL_RTL (index, index_r);
e1a43f73
PB
4859 if (TREE_CODE (value) == SAVE_EXPR
4860 && SAVE_EXPR_RTL (value) == 0)
4861 {
0f41302f
MS
4862 /* Make sure value gets expanded once before the
4863 loop. */
e1a43f73
PB
4864 expand_expr (value, const0_rtx, VOIDmode, 0);
4865 emit_queue ();
4866 }
4867 store_expr (lo_index, index_r, 0);
4868 loop = expand_start_loop (0);
4869
0f41302f 4870 /* Assign value to element index. */
fed3cef0
RK
4871 position
4872 = convert (ssizetype,
4873 fold (build (MINUS_EXPR, TREE_TYPE (index),
4874 index, TYPE_MIN_VALUE (domain))));
4875 position = size_binop (MULT_EXPR, position,
4876 convert (ssizetype,
4877 TYPE_SIZE_UNIT (elttype)));
4878
e1a43f73 4879 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4880 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4881 xtarget = change_address (target, mode, addr);
4882 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4883 store_constructor (value, xtarget, align, cleared,
4884 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4885 else
4886 store_expr (value, xtarget, 0);
4887
4888 expand_exit_loop_if_false (loop,
4889 build (LT_EXPR, integer_type_node,
4890 index, hi_index));
4891
4892 expand_increment (build (PREINCREMENT_EXPR,
4893 TREE_TYPE (index),
7b8b9722 4894 index, integer_one_node), 0, 0);
e1a43f73
PB
4895 expand_end_loop ();
4896 emit_label (loop_end);
e1a43f73
PB
4897 }
4898 }
19caa751
RK
4899 else if ((index != 0 && ! host_integerp (index, 0))
4900 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4901 {
e1a43f73 4902 rtx pos_rtx, addr;
03dc44a6
RS
4903 tree position;
4904
5b6c44ff 4905 if (index == 0)
fed3cef0 4906 index = ssize_int (1);
5b6c44ff 4907
e1a43f73 4908 if (minelt)
fed3cef0
RK
4909 index = convert (ssizetype,
4910 fold (build (MINUS_EXPR, index,
4911 TYPE_MIN_VALUE (domain))));
19caa751 4912
fed3cef0
RK
4913 position = size_binop (MULT_EXPR, index,
4914 convert (ssizetype,
4915 TYPE_SIZE_UNIT (elttype)));
03dc44a6 4916 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4917 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4918 xtarget = change_address (target, mode, addr);
e1a43f73 4919 store_expr (value, xtarget, 0);
03dc44a6
RS
4920 }
4921 else
4922 {
4923 if (index != 0)
19caa751
RK
4924 bitpos = ((tree_low_cst (index, 0) - minelt)
4925 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4926 else
19caa751
RK
4927 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4928
c5c76735 4929 store_constructor_field (target, bitsize, bitpos, mode, value,
23cb1766
RK
4930 type, align, cleared,
4931 TYPE_NONALIASED_COMPONENT (type)
831ecbd4 4932 && GET_CODE (target) == MEM
23cb1766
RK
4933 ? MEM_ALIAS_SET (target) :
4934 get_alias_set (elttype));
4935
03dc44a6 4936 }
bbf6f052
RK
4937 }
4938 }
19caa751 4939
3a94c984 4940 /* Set constructor assignments. */
071a6595
PB
4941 else if (TREE_CODE (type) == SET_TYPE)
4942 {
e1a43f73 4943 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4944 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4945 tree domain = TYPE_DOMAIN (type);
4946 tree domain_min, domain_max, bitlength;
4947
9faa82d8 4948 /* The default implementation strategy is to extract the constant
071a6595
PB
4949 parts of the constructor, use that to initialize the target,
4950 and then "or" in whatever non-constant ranges we need in addition.
4951
4952 If a large set is all zero or all ones, it is
4953 probably better to set it using memset (if available) or bzero.
4954 Also, if a large set has just a single range, it may also be
4955 better to first clear all the first clear the set (using
0f41302f 4956 bzero/memset), and set the bits we want. */
3a94c984 4957
0f41302f 4958 /* Check for all zeros. */
9376fcd6 4959 if (elt == NULL_TREE && size > 0)
071a6595 4960 {
e1a43f73 4961 if (!cleared)
19caa751 4962 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
071a6595
PB
4963 return;
4964 }
4965
071a6595
PB
4966 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4967 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4968 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4969 size_diffop (domain_max, domain_min),
4970 ssize_int (1));
071a6595 4971
19caa751 4972 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4973
4974 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4975 are "complicated" (more than one range), initialize (the
3a94c984 4976 constant parts) by copying from a constant. */
e1a43f73
PB
4977 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4978 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4979 {
19caa751 4980 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4981 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4982 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 4983 HOST_WIDE_INT word = 0;
19caa751
RK
4984 unsigned int bit_pos = 0;
4985 unsigned int ibit = 0;
4986 unsigned int offset = 0; /* In bytes from beginning of set. */
4987
e1a43f73 4988 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4989 for (;;)
071a6595 4990 {
b4ee5a72
PB
4991 if (bit_buffer[ibit])
4992 {
b09f3348 4993 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4994 word |= (1 << (set_word_size - 1 - bit_pos));
4995 else
4996 word |= 1 << bit_pos;
4997 }
19caa751 4998
b4ee5a72
PB
4999 bit_pos++; ibit++;
5000 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5001 {
e1a43f73
PB
5002 if (word != 0 || ! cleared)
5003 {
5004 rtx datum = GEN_INT (word);
5005 rtx to_rtx;
19caa751 5006
0f41302f
MS
5007 /* The assumption here is that it is safe to use
5008 XEXP if the set is multi-word, but not if
5009 it's single-word. */
e1a43f73 5010 if (GET_CODE (target) == MEM)
f4ef873c 5011 to_rtx = adjust_address (target, mode, offset);
3a94c984 5012 else if (offset == 0)
e1a43f73
PB
5013 to_rtx = target;
5014 else
5015 abort ();
5016 emit_move_insn (to_rtx, datum);
5017 }
19caa751 5018
b4ee5a72
PB
5019 if (ibit == nbits)
5020 break;
5021 word = 0;
5022 bit_pos = 0;
5023 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5024 }
5025 }
071a6595 5026 }
e1a43f73 5027 else if (!cleared)
19caa751
RK
5028 /* Don't bother clearing storage if the set is all ones. */
5029 if (TREE_CHAIN (elt) != NULL_TREE
5030 || (TREE_PURPOSE (elt) == NULL_TREE
5031 ? nbits != 1
5032 : ( ! host_integerp (TREE_VALUE (elt), 0)
5033 || ! host_integerp (TREE_PURPOSE (elt), 0)
5034 || (tree_low_cst (TREE_VALUE (elt), 0)
5035 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5036 != (HOST_WIDE_INT) nbits))))
5037 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
3a94c984 5038
e1a43f73 5039 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5040 {
3a94c984 5041 /* Start of range of element or NULL. */
071a6595 5042 tree startbit = TREE_PURPOSE (elt);
3a94c984 5043 /* End of range of element, or element value. */
071a6595 5044 tree endbit = TREE_VALUE (elt);
381127e8 5045#ifdef TARGET_MEM_FUNCTIONS
071a6595 5046 HOST_WIDE_INT startb, endb;
381127e8 5047#endif
19caa751 5048 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5049
5050 bitlength_rtx = expand_expr (bitlength,
19caa751 5051 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5052
3a94c984 5053 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5054 if (startbit == NULL_TREE)
5055 {
5056 startbit = save_expr (endbit);
5057 endbit = startbit;
5058 }
19caa751 5059
071a6595
PB
5060 startbit = convert (sizetype, startbit);
5061 endbit = convert (sizetype, endbit);
5062 if (! integer_zerop (domain_min))
5063 {
5064 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5065 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5066 }
3a94c984 5067 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5068 EXPAND_CONST_ADDRESS);
3a94c984 5069 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5070 EXPAND_CONST_ADDRESS);
5071
5072 if (REG_P (target))
5073 {
1da68f56
RK
5074 targetx
5075 = assign_temp
5076 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5077 TYPE_QUAL_CONST)),
5078 0, 1, 1);
071a6595
PB
5079 emit_move_insn (targetx, target);
5080 }
19caa751 5081
071a6595
PB
5082 else if (GET_CODE (target) == MEM)
5083 targetx = target;
5084 else
5085 abort ();
5086
5087#ifdef TARGET_MEM_FUNCTIONS
5088 /* Optimization: If startbit and endbit are
9faa82d8 5089 constants divisible by BITS_PER_UNIT,
0f41302f 5090 call memset instead. */
071a6595
PB
5091 if (TREE_CODE (startbit) == INTEGER_CST
5092 && TREE_CODE (endbit) == INTEGER_CST
5093 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5094 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5095 {
ebb1b59a 5096 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5097 VOIDmode, 3,
e1a43f73
PB
5098 plus_constant (XEXP (targetx, 0),
5099 startb / BITS_PER_UNIT),
071a6595 5100 Pmode,
3b6f75e2 5101 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5102 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5103 TYPE_MODE (sizetype));
071a6595
PB
5104 }
5105 else
5106#endif
19caa751 5107 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
5108 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5109 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5110 startbit_rtx, TYPE_MODE (sizetype),
5111 endbit_rtx, TYPE_MODE (sizetype));
5112
071a6595
PB
5113 if (REG_P (target))
5114 emit_move_insn (target, targetx);
5115 }
5116 }
bbf6f052
RK
5117
5118 else
5119 abort ();
5120}
5121
5122/* Store the value of EXP (an expression tree)
5123 into a subfield of TARGET which has mode MODE and occupies
5124 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5125 If MODE is VOIDmode, it means that we are storing into a bit-field.
5126
5127 If VALUE_MODE is VOIDmode, return nothing in particular.
5128 UNSIGNEDP is not used in this case.
5129
5130 Otherwise, return an rtx for the value stored. This rtx
5131 has mode VALUE_MODE if that is convenient to do.
5132 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5133
19caa751 5134 ALIGN is the alignment that TARGET is known to have.
3a94c984 5135 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
ece32014
MM
5136
5137 ALIAS_SET is the alias set for the destination. This value will
5138 (in general) be different from that for TARGET, since TARGET is a
5139 reference to the containing structure. */
bbf6f052
RK
5140
5141static rtx
5142store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 5143 unsignedp, align, total_size, alias_set)
bbf6f052 5144 rtx target;
770ae6cc
RK
5145 HOST_WIDE_INT bitsize;
5146 HOST_WIDE_INT bitpos;
bbf6f052
RK
5147 enum machine_mode mode;
5148 tree exp;
5149 enum machine_mode value_mode;
5150 int unsignedp;
729a2125 5151 unsigned int align;
770ae6cc 5152 HOST_WIDE_INT total_size;
ece32014 5153 int alias_set;
bbf6f052 5154{
906c4e36 5155 HOST_WIDE_INT width_mask = 0;
bbf6f052 5156
e9a25f70
JL
5157 if (TREE_CODE (exp) == ERROR_MARK)
5158 return const0_rtx;
5159
2be6a7e9
RK
5160 /* If we have nothing to store, do nothing unless the expression has
5161 side-effects. */
5162 if (bitsize == 0)
5163 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5164
906c4e36
RK
5165 if (bitsize < HOST_BITS_PER_WIDE_INT)
5166 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5167
5168 /* If we are storing into an unaligned field of an aligned union that is
5169 in a register, we may have the mode of TARGET being an integer mode but
5170 MODE == BLKmode. In that case, get an aligned object whose size and
5171 alignment are the same as TARGET and store TARGET into it (we can avoid
5172 the store if the field being stored is the entire width of TARGET). Then
5173 call ourselves recursively to store the field into a BLKmode version of
5174 that object. Finally, load from the object into TARGET. This is not
5175 very efficient in general, but should only be slightly more expensive
5176 than the otherwise-required unaligned accesses. Perhaps this can be
5177 cleaned up later. */
5178
5179 if (mode == BLKmode
5180 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5181 {
1da68f56
RK
5182 rtx object
5183 = assign_temp
5184 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5185 TYPE_QUAL_CONST),
5186 0, 1, 1);
bbf6f052
RK
5187 rtx blk_object = copy_rtx (object);
5188
5189 PUT_MODE (blk_object, BLKmode);
5190
8752c357 5191 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5192 emit_move_insn (object, target);
5193
5194 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 5195 align, total_size, alias_set);
bbf6f052 5196
46093b97
RS
5197 /* Even though we aren't returning target, we need to
5198 give it the updated value. */
bbf6f052
RK
5199 emit_move_insn (target, object);
5200
46093b97 5201 return blk_object;
bbf6f052 5202 }
c3b247b4
JM
5203
5204 if (GET_CODE (target) == CONCAT)
5205 {
5206 /* We're storing into a struct containing a single __complex. */
5207
5208 if (bitpos != 0)
5209 abort ();
5210 return store_expr (exp, target, 0);
5211 }
bbf6f052
RK
5212
5213 /* If the structure is in a register or if the component
5214 is a bit field, we cannot use addressing to access it.
5215 Use bit-field techniques or SUBREG to store in it. */
5216
4fa52007 5217 if (mode == VOIDmode
6ab06cbb
JW
5218 || (mode != BLKmode && ! direct_store[(int) mode]
5219 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5220 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5221 || GET_CODE (target) == REG
c980ac49 5222 || GET_CODE (target) == SUBREG
ccc98036
RS
5223 /* If the field isn't aligned enough to store as an ordinary memref,
5224 store it as a bit field. */
e1565e65 5225 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5226 && (align < GET_MODE_ALIGNMENT (mode)
14a774a9 5227 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 5228 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5229 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
14a774a9
RK
5230 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5231 /* If the RHS and field are a constant size and the size of the
5232 RHS isn't the same size as the bitfield, we must use bitfield
5233 operations. */
05bccae2
RK
5234 || (bitsize >= 0
5235 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5236 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5237 {
906c4e36 5238 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5239
ef19912d
RK
5240 /* If BITSIZE is narrower than the size of the type of EXP
5241 we will be narrowing TEMP. Normally, what's wanted are the
5242 low-order bits. However, if EXP's type is a record and this is
5243 big-endian machine, we want the upper BITSIZE bits. */
5244 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5245 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5246 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5247 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5248 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5249 - bitsize),
5250 temp, 1);
5251
bbd6cf73
RK
5252 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5253 MODE. */
5254 if (mode != VOIDmode && mode != BLKmode
5255 && mode != TYPE_MODE (TREE_TYPE (exp)))
5256 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5257
a281e72d
RK
5258 /* If the modes of TARGET and TEMP are both BLKmode, both
5259 must be in memory and BITPOS must be aligned on a byte
5260 boundary. If so, we simply do a block copy. */
5261 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5262 {
19caa751 5263 unsigned int exp_align = expr_align (exp);
729a2125 5264
a281e72d
RK
5265 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5266 || bitpos % BITS_PER_UNIT != 0)
5267 abort ();
5268
f4ef873c 5269 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5270
729a2125
RK
5271 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5272 align = MIN (exp_align, align);
c297a34e 5273
14a774a9 5274 /* Find an alignment that is consistent with the bit position. */
19caa751 5275 while ((bitpos % align) != 0)
14a774a9
RK
5276 align >>= 1;
5277
a281e72d 5278 emit_block_move (target, temp,
bd5dab53
RK
5279 bitsize == -1 ? expr_size (exp)
5280 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5281 / BITS_PER_UNIT),
14a774a9 5282 align);
a281e72d
RK
5283
5284 return value_mode == VOIDmode ? const0_rtx : target;
5285 }
5286
bbf6f052
RK
5287 /* Store the value in the bitfield. */
5288 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5289 if (value_mode != VOIDmode)
5290 {
5291 /* The caller wants an rtx for the value. */
5292 /* If possible, avoid refetching from the bitfield itself. */
5293 if (width_mask != 0
5294 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5295 {
9074de27 5296 tree count;
5c4d7cfb 5297 enum machine_mode tmode;
86a2c12a 5298
5c4d7cfb 5299 if (unsignedp)
69107307
AO
5300 return expand_and (temp,
5301 GEN_INT
5302 (trunc_int_for_mode
5303 (width_mask,
5304 GET_MODE (temp) == VOIDmode
5305 ? value_mode
5306 : GET_MODE (temp))), NULL_RTX);
5c4d7cfb 5307 tmode = GET_MODE (temp);
86a2c12a
RS
5308 if (tmode == VOIDmode)
5309 tmode = value_mode;
5c4d7cfb
RS
5310 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5311 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5312 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5313 }
bbf6f052 5314 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
5315 NULL_RTX, value_mode, 0, align,
5316 total_size);
bbf6f052
RK
5317 }
5318 return const0_rtx;
5319 }
5320 else
5321 {
5322 rtx addr = XEXP (target, 0);
5323 rtx to_rtx;
5324
5325 /* If a value is wanted, it must be the lhs;
5326 so make the address stable for multiple use. */
5327
5328 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5329 && ! CONSTANT_ADDRESS_P (addr)
5330 /* A frame-pointer reference is already stable. */
5331 && ! (GET_CODE (addr) == PLUS
5332 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5333 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5334 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
792760b9 5335 target = replace_equiv_address (target, copy_to_reg (addr));
bbf6f052
RK
5336
5337 /* Now build a reference to just the desired component. */
5338
792760b9
RK
5339 to_rtx = copy_rtx (adjust_address (target, mode,
5340 bitpos / BITS_PER_UNIT));
5341
c6df88cb 5342 MEM_SET_IN_STRUCT_P (to_rtx, 1);
0ea834c1
MM
5343 /* If the address of the structure varies, then it might be on
5344 the stack. And, stack slots may be shared across scopes.
5345 So, two different structures, of different types, can end up
5346 at the same location. We will give the structures alias set
5347 zero; here we must be careful not to give non-zero alias sets
5348 to their fields. */
ba4828e0
RK
5349 set_mem_alias_set (to_rtx,
5350 rtx_varies_p (addr, /*for_alias=*/0)
5351 ? 0 : alias_set);
bbf6f052
RK
5352
5353 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5354 }
5355}
5356\f
5357/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5358 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5359 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5360
5361 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5362 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5363 If the position of the field is variable, we store a tree
5364 giving the variable offset (in units) in *POFFSET.
5365 This offset is in addition to the bit position.
5366 If the position is not variable, we store 0 in *POFFSET.
19caa751 5367 We set *PALIGNMENT to the alignment of the address that will be
839c4796
RK
5368 computed. This is the alignment of the thing we return if *POFFSET
5369 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
5370
5371 If any of the extraction expressions is volatile,
5372 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5373
5374 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5375 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5376 is redundant.
5377
5378 If the field describes a variable-sized object, *PMODE is set to
5379 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 5380 this case, but the address of the object can be found. */
bbf6f052
RK
5381
5382tree
4969d05d 5383get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 5384 punsignedp, pvolatilep, palignment)
bbf6f052 5385 tree exp;
770ae6cc
RK
5386 HOST_WIDE_INT *pbitsize;
5387 HOST_WIDE_INT *pbitpos;
7bb0943f 5388 tree *poffset;
bbf6f052
RK
5389 enum machine_mode *pmode;
5390 int *punsignedp;
5391 int *pvolatilep;
729a2125 5392 unsigned int *palignment;
bbf6f052
RK
5393{
5394 tree size_tree = 0;
5395 enum machine_mode mode = VOIDmode;
fed3cef0 5396 tree offset = size_zero_node;
770ae6cc 5397 tree bit_offset = bitsize_zero_node;
c84e2712 5398 unsigned int alignment = BIGGEST_ALIGNMENT;
770ae6cc 5399 tree tem;
bbf6f052 5400
770ae6cc
RK
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
bbf6f052
RK
5403 if (TREE_CODE (exp) == COMPONENT_REF)
5404 {
5405 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5407 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5408
bbf6f052
RK
5409 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5410 }
5411 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5412 {
5413 size_tree = TREE_OPERAND (exp, 1);
5414 *punsignedp = TREE_UNSIGNED (exp);
5415 }
5416 else
5417 {
5418 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5419 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5420
ab87f8c8
JL
5421 if (mode == BLKmode)
5422 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5423 else
5424 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5425 }
3a94c984 5426
770ae6cc 5427 if (size_tree != 0)
bbf6f052 5428 {
770ae6cc 5429 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5430 mode = BLKmode, *pbitsize = -1;
5431 else
770ae6cc 5432 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5433 }
5434
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
bbf6f052
RK
5437 while (1)
5438 {
770ae6cc
RK
5439 if (TREE_CODE (exp) == BIT_FIELD_REF)
5440 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5441 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5442 {
770ae6cc
RK
5443 tree field = TREE_OPERAND (exp, 1);
5444 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5445
e7f3c83f
RK
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
770ae6cc 5449 if (this_offset == 0)
e7f3c83f 5450 break;
770ae6cc
RK
5451 else if (! TREE_CONSTANT (this_offset)
5452 && contains_placeholder_p (this_offset))
5453 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5454
7156dead 5455 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5456 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5457 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5458
770ae6cc
RK
5459 if (! host_integerp (offset, 0))
5460 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
bbf6f052 5461 }
7156dead 5462
b4e3fabb
RK
5463 else if (TREE_CODE (exp) == ARRAY_REF
5464 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5465 {
742920c7 5466 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5467 tree array = TREE_OPERAND (exp, 0);
5468 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5469 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5470 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5471
770ae6cc
RK
5472 /* We assume all arrays have sizes that are a multiple of a byte.
5473 First subtract the lower bound, if any, in the type of the
5474 index, then convert to sizetype and multiply by the size of the
5475 array element. */
5476 if (low_bound != 0 && ! integer_zerop (low_bound))
5477 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5478 index, low_bound));
f8dac6eb 5479
7156dead
RK
5480 /* If the index has a self-referential type, pass it to a
5481 WITH_RECORD_EXPR; if the component size is, pass our
5482 component to one. */
770ae6cc
RK
5483 if (! TREE_CONSTANT (index)
5484 && contains_placeholder_p (index))
5485 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5486 if (! TREE_CONSTANT (unit_size)
5487 && contains_placeholder_p (unit_size))
b4e3fabb 5488 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5489
770ae6cc
RK
5490 offset = size_binop (PLUS_EXPR, offset,
5491 size_binop (MULT_EXPR,
5492 convert (sizetype, index),
7156dead 5493 unit_size));
bbf6f052 5494 }
7156dead 5495
bbf6f052
RK
5496 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5497 && ! ((TREE_CODE (exp) == NOP_EXPR
5498 || TREE_CODE (exp) == CONVERT_EXPR)
5499 && (TYPE_MODE (TREE_TYPE (exp))
5500 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5501 break;
7bb0943f
RS
5502
5503 /* If any reference in the chain is volatile, the effect is volatile. */
5504 if (TREE_THIS_VOLATILE (exp))
5505 *pvolatilep = 1;
839c4796
RK
5506
5507 /* If the offset is non-constant already, then we can't assume any
5508 alignment more than the alignment here. */
770ae6cc 5509 if (! TREE_CONSTANT (offset))
839c4796
RK
5510 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5511
bbf6f052
RK
5512 exp = TREE_OPERAND (exp, 0);
5513 }
5514
2f939d94 5515 if (DECL_P (exp))
839c4796 5516 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5517 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5518 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5519
770ae6cc
RK
5520 /* If OFFSET is constant, see if we can return the whole thing as a
5521 constant bit position. Otherwise, split it up. */
5522 if (host_integerp (offset, 0)
5523 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5524 bitsize_unit_node))
5525 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5526 && host_integerp (tem, 0))
5527 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5528 else
5529 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5530
bbf6f052 5531 *pmode = mode;
19caa751 5532 *palignment = alignment;
bbf6f052
RK
5533 return exp;
5534}
921b3427
RK
5535
5536/* Subroutine of expand_exp: compute memory_usage from modifier. */
770ae6cc 5537
921b3427
RK
5538static enum memory_use_mode
5539get_memory_usage_from_modifier (modifier)
5540 enum expand_modifier modifier;
5541{
5542 switch (modifier)
5543 {
5544 case EXPAND_NORMAL:
e5e809f4 5545 case EXPAND_SUM:
921b3427
RK
5546 return MEMORY_USE_RO;
5547 break;
5548 case EXPAND_MEMORY_USE_WO:
5549 return MEMORY_USE_WO;
5550 break;
5551 case EXPAND_MEMORY_USE_RW:
5552 return MEMORY_USE_RW;
5553 break;
921b3427 5554 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5555 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5556 MEMORY_USE_DONT, because they are modifiers to a call of
5557 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5558 case EXPAND_CONST_ADDRESS:
e5e809f4 5559 case EXPAND_INITIALIZER:
921b3427
RK
5560 return MEMORY_USE_DONT;
5561 case EXPAND_MEMORY_USE_BAD:
5562 default:
5563 abort ();
5564 }
5565}
bbf6f052 5566\f
3fe44edd
RK
5567/* Given an rtx VALUE that may contain additions and multiplications, return
5568 an equivalent value that just refers to a register, memory, or constant.
5569 This is done by generating instructions to perform the arithmetic and
5570 returning a pseudo-register containing the value.
c45a13a6
RK
5571
5572 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5573
5574rtx
5575force_operand (value, target)
5576 rtx value, target;
5577{
5578 register optab binoptab = 0;
5579 /* Use a temporary to force order of execution of calls to
5580 `force_operand'. */
5581 rtx tmp;
5582 register rtx op2;
5583 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 5584 register rtx subtarget = get_subtarget (target);
bbf6f052 5585
8b015896
RH
5586 /* Check for a PIC address load. */
5587 if (flag_pic
5588 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5589 && XEXP (value, 0) == pic_offset_table_rtx
5590 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5591 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5592 || GET_CODE (XEXP (value, 1)) == CONST))
5593 {
5594 if (!subtarget)
5595 subtarget = gen_reg_rtx (GET_MODE (value));
5596 emit_move_insn (subtarget, value);
5597 return subtarget;
5598 }
5599
bbf6f052
RK
5600 if (GET_CODE (value) == PLUS)
5601 binoptab = add_optab;
5602 else if (GET_CODE (value) == MINUS)
5603 binoptab = sub_optab;
5604 else if (GET_CODE (value) == MULT)
5605 {
5606 op2 = XEXP (value, 1);
5607 if (!CONSTANT_P (op2)
5608 && !(GET_CODE (op2) == REG && op2 != subtarget))
5609 subtarget = 0;
5610 tmp = force_operand (XEXP (value, 0), subtarget);
5611 return expand_mult (GET_MODE (value), tmp,
906c4e36 5612 force_operand (op2, NULL_RTX),
91ce572a 5613 target, 1);
bbf6f052
RK
5614 }
5615
5616 if (binoptab)
5617 {
5618 op2 = XEXP (value, 1);
5619 if (!CONSTANT_P (op2)
5620 && !(GET_CODE (op2) == REG && op2 != subtarget))
5621 subtarget = 0;
5622 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5623 {
5624 binoptab = add_optab;
5625 op2 = negate_rtx (GET_MODE (value), op2);
5626 }
5627
5628 /* Check for an addition with OP2 a constant integer and our first
5629 operand a PLUS of a virtual register and something else. In that
5630 case, we want to emit the sum of the virtual register and the
5631 constant first and then add the other value. This allows virtual
5632 register instantiation to simply modify the constant rather than
5633 creating another one around this addition. */
5634 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5635 && GET_CODE (XEXP (value, 0)) == PLUS
5636 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5637 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5638 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5639 {
5640 rtx temp = expand_binop (GET_MODE (value), binoptab,
5641 XEXP (XEXP (value, 0), 0), op2,
5642 subtarget, 0, OPTAB_LIB_WIDEN);
5643 return expand_binop (GET_MODE (value), binoptab, temp,
5644 force_operand (XEXP (XEXP (value, 0), 1), 0),
5645 target, 0, OPTAB_LIB_WIDEN);
5646 }
3a94c984 5647
bbf6f052
RK
5648 tmp = force_operand (XEXP (value, 0), subtarget);
5649 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5650 force_operand (op2, NULL_RTX),
bbf6f052 5651 target, 0, OPTAB_LIB_WIDEN);
8008b228 5652 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5653 because the only operations we are expanding here are signed ones. */
5654 }
5655 return value;
5656}
5657\f
5658/* Subroutine of expand_expr:
5659 save the non-copied parts (LIST) of an expr (LHS), and return a list
5660 which can restore these values to their previous values,
5661 should something modify their storage. */
5662
5663static tree
5664save_noncopied_parts (lhs, list)
5665 tree lhs;
5666 tree list;
5667{
5668 tree tail;
5669 tree parts = 0;
5670
5671 for (tail = list; tail; tail = TREE_CHAIN (tail))
5672 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5673 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5674 else
5675 {
5676 tree part = TREE_VALUE (tail);
5677 tree part_type = TREE_TYPE (part);
906c4e36 5678 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
1da68f56
RK
5679 rtx target
5680 = assign_temp (build_qualified_type (part_type,
5681 (TYPE_QUALS (part_type)
5682 | TYPE_QUAL_CONST)),
5683 0, 1, 1);
5684
bbf6f052 5685 parts = tree_cons (to_be_saved,
906c4e36 5686 build (RTL_EXPR, part_type, NULL_TREE,
792760b9 5687 (tree) validize_mem (target)),
bbf6f052 5688 parts);
792760b9
RK
5689 store_expr (TREE_PURPOSE (parts),
5690 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
bbf6f052
RK
5691 }
5692 return parts;
5693}
5694
5695/* Subroutine of expand_expr:
5696 record the non-copied parts (LIST) of an expr (LHS), and return a list
5697 which specifies the initial values of these parts. */
5698
5699static tree
5700init_noncopied_parts (lhs, list)
5701 tree lhs;
5702 tree list;
5703{
5704 tree tail;
5705 tree parts = 0;
5706
5707 for (tail = list; tail; tail = TREE_CHAIN (tail))
5708 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5709 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5710 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5711 {
5712 tree part = TREE_VALUE (tail);
5713 tree part_type = TREE_TYPE (part);
906c4e36 5714 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5715 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5716 }
5717 return parts;
5718}
5719
5720/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5721 EXP can reference X, which is being modified. TOP_P is nonzero if this
5722 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5723 for EXP, as opposed to a recursive call to this function.
5724
5725 It is always safe for this routine to return zero since it merely
5726 searches for optimization opportunities. */
bbf6f052 5727
8f17b5c5 5728int
e5e809f4 5729safe_from_p (x, exp, top_p)
bbf6f052
RK
5730 rtx x;
5731 tree exp;
e5e809f4 5732 int top_p;
bbf6f052
RK
5733{
5734 rtx exp_rtl = 0;
5735 int i, nops;
1da68f56 5736 static tree save_expr_list;
bbf6f052 5737
6676e72f
RK
5738 if (x == 0
5739 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5740 have no way of allocating temporaries of variable size
5741 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5742 So we assume here that something at a higher level has prevented a
f4510f37 5743 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5744 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5745 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5746 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5747 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5748 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5749 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5750 != INTEGER_CST)
1da68f56
RK
5751 && GET_MODE (x) == BLKmode)
5752 /* If X is in the outgoing argument area, it is always safe. */
5753 || (GET_CODE (x) == MEM
5754 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5755 || (GET_CODE (XEXP (x, 0)) == PLUS
5756 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5757 return 1;
5758
5759 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5760 find the underlying pseudo. */
5761 if (GET_CODE (x) == SUBREG)
5762 {
5763 x = SUBREG_REG (x);
5764 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5765 return 0;
5766 }
5767
1da68f56
RK
5768 /* A SAVE_EXPR might appear many times in the expression passed to the
5769 top-level safe_from_p call, and if it has a complex subexpression,
5770 examining it multiple times could result in a combinatorial explosion.
5771 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5772 with optimization took about 28 minutes to compile -- even though it was
5773 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5774 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5775 we have processed. Note that the only test of top_p was above. */
5776
5777 if (top_p)
5778 {
5779 int rtn;
5780 tree t;
5781
5782 save_expr_list = 0;
5783
5784 rtn = safe_from_p (x, exp, 0);
5785
5786 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5787 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5788
5789 return rtn;
5790 }
bbf6f052 5791
1da68f56 5792 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5793 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5794 {
5795 case 'd':
19e7881c 5796 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
bbf6f052
RK
5797 break;
5798
5799 case 'c':
5800 return 1;
5801
5802 case 'x':
5803 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5804 return ((TREE_VALUE (exp) == 0
e5e809f4 5805 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5806 && (TREE_CHAIN (exp) == 0
e5e809f4 5807 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5808 else if (TREE_CODE (exp) == ERROR_MARK)
5809 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5810 else
5811 return 0;
5812
5813 case '1':
e5e809f4 5814 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5815
5816 case '2':
5817 case '<':
e5e809f4
JL
5818 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5819 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5820
5821 case 'e':
5822 case 'r':
5823 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5824 the expression. If it is set, we conflict iff we are that rtx or
5825 both are in memory. Otherwise, we check all operands of the
5826 expression recursively. */
5827
5828 switch (TREE_CODE (exp))
5829 {
5830 case ADDR_EXPR:
e44842fe 5831 return (staticp (TREE_OPERAND (exp, 0))
1da68f56
RK
5832 || TREE_STATIC (exp)
5833 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
bbf6f052
RK
5834
5835 case INDIRECT_REF:
1da68f56
RK
5836 if (GET_CODE (x) == MEM
5837 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5838 get_alias_set (exp)))
bbf6f052
RK
5839 return 0;
5840 break;
5841
5842 case CALL_EXPR:
f9808f81
MM
5843 /* Assume that the call will clobber all hard registers and
5844 all of memory. */
5845 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5846 || GET_CODE (x) == MEM)
5847 return 0;
bbf6f052
RK
5848 break;
5849
5850 case RTL_EXPR:
3bb5826a
RK
5851 /* If a sequence exists, we would have to scan every instruction
5852 in the sequence to see if it was safe. This is probably not
5853 worthwhile. */
5854 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5855 return 0;
5856
3bb5826a 5857 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5858 break;
5859
5860 case WITH_CLEANUP_EXPR:
5861 exp_rtl = RTL_EXPR_RTL (exp);
5862 break;
5863
5dab5552 5864 case CLEANUP_POINT_EXPR:
e5e809f4 5865 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5866
bbf6f052
RK
5867 case SAVE_EXPR:
5868 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5869 if (exp_rtl)
5870 break;
5871
1da68f56
RK
5872 /* If we've already scanned this, don't do it again. Otherwise,
5873 show we've scanned it and record for clearing the flag if we're
5874 going on. */
5875 if (TREE_PRIVATE (exp))
5876 return 1;
ff439b5f 5877
1da68f56
RK
5878 TREE_PRIVATE (exp) = 1;
5879 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5880 {
1da68f56
RK
5881 TREE_PRIVATE (exp) = 0;
5882 return 0;
ff59bfe6 5883 }
1da68f56
RK
5884
5885 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5886 return 1;
bbf6f052 5887
8129842c
RS
5888 case BIND_EXPR:
5889 /* The only operand we look at is operand 1. The rest aren't
5890 part of the expression. */
e5e809f4 5891 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5892
bbf6f052 5893 case METHOD_CALL_EXPR:
0f41302f 5894 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5895 abort ();
3a94c984 5896
e9a25f70
JL
5897 default:
5898 break;
bbf6f052
RK
5899 }
5900
5901 /* If we have an rtx, we do not need to scan our operands. */
5902 if (exp_rtl)
5903 break;
5904
8f17b5c5 5905 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5906 for (i = 0; i < nops; i++)
5907 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5908 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5909 return 0;
8f17b5c5
MM
5910
5911 /* If this is a language-specific tree code, it may require
5912 special handling. */
dbbbbf3b
JDA
5913 if ((unsigned int) TREE_CODE (exp)
5914 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
8f17b5c5
MM
5915 && lang_safe_from_p
5916 && !(*lang_safe_from_p) (x, exp))
5917 return 0;
bbf6f052
RK
5918 }
5919
5920 /* If we have an rtl, find any enclosed object. Then see if we conflict
5921 with it. */
5922 if (exp_rtl)
5923 {
5924 if (GET_CODE (exp_rtl) == SUBREG)
5925 {
5926 exp_rtl = SUBREG_REG (exp_rtl);
5927 if (GET_CODE (exp_rtl) == REG
5928 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5929 return 0;
5930 }
5931
5932 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5933 are memory and they conflict. */
bbf6f052
RK
5934 return ! (rtx_equal_p (x, exp_rtl)
5935 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
1da68f56
RK
5936 && true_dependence (exp_rtl, GET_MODE (x), x,
5937 rtx_addr_varies_p)));
bbf6f052
RK
5938 }
5939
5940 /* If we reach here, it is safe. */
5941 return 1;
5942}
5943
5944/* Subroutine of expand_expr: return nonzero iff EXP is an
5945 expression whose type is statically determinable. */
5946
5947static int
5948fixed_type_p (exp)
5949 tree exp;
5950{
5951 if (TREE_CODE (exp) == PARM_DECL
5952 || TREE_CODE (exp) == VAR_DECL
5953 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5954 || TREE_CODE (exp) == COMPONENT_REF
5955 || TREE_CODE (exp) == ARRAY_REF)
5956 return 1;
5957 return 0;
5958}
01c8a7c8
RK
5959
5960/* Subroutine of expand_expr: return rtx if EXP is a
5961 variable or parameter; else return 0. */
5962
5963static rtx
5964var_rtx (exp)
5965 tree exp;
5966{
5967 STRIP_NOPS (exp);
5968 switch (TREE_CODE (exp))
5969 {
5970 case PARM_DECL:
5971 case VAR_DECL:
5972 return DECL_RTL (exp);
5973 default:
5974 return 0;
5975 }
5976}
dbecbbe4
JL
5977
5978#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 5979
dbecbbe4
JL
5980void
5981check_max_integer_computation_mode (exp)
3a94c984 5982 tree exp;
dbecbbe4 5983{
5f652c07 5984 enum tree_code code;
dbecbbe4
JL
5985 enum machine_mode mode;
5986
5f652c07
JM
5987 /* Strip any NOPs that don't change the mode. */
5988 STRIP_NOPS (exp);
5989 code = TREE_CODE (exp);
5990
71bca506
JL
5991 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5992 if (code == NOP_EXPR
5993 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5994 return;
5995
dbecbbe4
JL
5996 /* First check the type of the overall operation. We need only look at
5997 unary, binary and relational operations. */
5998 if (TREE_CODE_CLASS (code) == '1'
5999 || TREE_CODE_CLASS (code) == '2'
6000 || TREE_CODE_CLASS (code) == '<')
6001 {
6002 mode = TYPE_MODE (TREE_TYPE (exp));
6003 if (GET_MODE_CLASS (mode) == MODE_INT
6004 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6005 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6006 }
6007
6008 /* Check operand of a unary op. */
6009 if (TREE_CODE_CLASS (code) == '1')
6010 {
6011 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6012 if (GET_MODE_CLASS (mode) == MODE_INT
6013 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6014 internal_error ("unsupported wide integer operation");
dbecbbe4 6015 }
3a94c984 6016
dbecbbe4
JL
6017 /* Check operands of a binary/comparison op. */
6018 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6019 {
6020 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6021 if (GET_MODE_CLASS (mode) == MODE_INT
6022 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6023 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6024
6025 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6026 if (GET_MODE_CLASS (mode) == MODE_INT
6027 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6028 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6029 }
6030}
6031#endif
14a774a9 6032\f
bbf6f052
RK
6033/* expand_expr: generate code for computing expression EXP.
6034 An rtx for the computed value is returned. The value is never null.
6035 In the case of a void EXP, const0_rtx is returned.
6036
6037 The value may be stored in TARGET if TARGET is nonzero.
6038 TARGET is just a suggestion; callers must assume that
6039 the rtx returned may not be the same as TARGET.
6040
6041 If TARGET is CONST0_RTX, it means that the value will be ignored.
6042
6043 If TMODE is not VOIDmode, it suggests generating the
6044 result in mode TMODE. But this is done only when convenient.
6045 Otherwise, TMODE is ignored and the value generated in its natural mode.
6046 TMODE is just a suggestion; callers must assume that
6047 the rtx returned may not have mode TMODE.
6048
d6a5ac33
RK
6049 Note that TARGET may have neither TMODE nor MODE. In that case, it
6050 probably will not be used.
bbf6f052
RK
6051
6052 If MODIFIER is EXPAND_SUM then when EXP is an addition
6053 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6054 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6055 products as above, or REG or MEM, or constant.
6056 Ordinarily in such cases we would output mul or add instructions
6057 and then return a pseudo reg containing the sum.
6058
6059 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6060 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6061 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6062 This is used for outputting expressions used in initializers.
6063
6064 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6065 with a constant address even if that address is not normally legitimate.
6066 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
6067
6068rtx
6069expand_expr (exp, target, tmode, modifier)
6070 register tree exp;
6071 rtx target;
6072 enum machine_mode tmode;
6073 enum expand_modifier modifier;
6074{
6075 register rtx op0, op1, temp;
6076 tree type = TREE_TYPE (exp);
6077 int unsignedp = TREE_UNSIGNED (type);
68557e14 6078 register enum machine_mode mode;
bbf6f052
RK
6079 register enum tree_code code = TREE_CODE (exp);
6080 optab this_optab;
68557e14
ML
6081 rtx subtarget, original_target;
6082 int ignore;
bbf6f052 6083 tree context;
921b3427
RK
6084 /* Used by check-memory-usage to make modifier read only. */
6085 enum expand_modifier ro_modifier;
bbf6f052 6086
3a94c984 6087 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6088 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6089 {
6090 op0 = CONST0_RTX (tmode);
6091 if (op0 != 0)
6092 return op0;
6093 return const0_rtx;
6094 }
6095
6096 mode = TYPE_MODE (type);
6097 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6098 subtarget = get_subtarget (target);
68557e14
ML
6099 original_target = target;
6100 ignore = (target == const0_rtx
6101 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6102 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6103 || code == COND_EXPR)
6104 && TREE_CODE (type) == VOID_TYPE));
6105
921b3427
RK
6106 /* Make a read-only version of the modifier. */
6107 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6108 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6109 ro_modifier = modifier;
6110 else
6111 ro_modifier = EXPAND_NORMAL;
ca695ac9 6112
dd27116b
RK
6113 /* If we are going to ignore this result, we need only do something
6114 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6115 is, short-circuit the most common cases here. Note that we must
6116 not call expand_expr with anything but const0_rtx in case this
6117 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6118
dd27116b
RK
6119 if (ignore)
6120 {
6121 if (! TREE_SIDE_EFFECTS (exp))
6122 return const0_rtx;
6123
14a774a9
RK
6124 /* Ensure we reference a volatile object even if value is ignored, but
6125 don't do this if all we are doing is taking its address. */
dd27116b
RK
6126 if (TREE_THIS_VOLATILE (exp)
6127 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6128 && mode != VOIDmode && mode != BLKmode
6129 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6130 {
921b3427 6131 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
6132 if (GET_CODE (temp) == MEM)
6133 temp = copy_to_reg (temp);
6134 return const0_rtx;
6135 }
6136
14a774a9
RK
6137 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6138 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 6139 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6140 VOIDmode, ro_modifier);
14a774a9 6141 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6142 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6143 {
b4e3fabb
RK
6144 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6145 ro_modifier);
6146 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6147 ro_modifier);
dd27116b
RK
6148 return const0_rtx;
6149 }
6150 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6151 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6152 /* If the second operand has no side effects, just evaluate
0f41302f 6153 the first. */
dd27116b 6154 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6155 VOIDmode, ro_modifier);
14a774a9
RK
6156 else if (code == BIT_FIELD_REF)
6157 {
b4e3fabb
RK
6158 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6159 ro_modifier);
6160 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6161 ro_modifier);
6162 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6163 ro_modifier);
14a774a9
RK
6164 return const0_rtx;
6165 }
3a94c984 6166 ;
90764a87 6167 target = 0;
dd27116b 6168 }
bbf6f052 6169
dbecbbe4 6170#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
6171 /* Only check stuff here if the mode we want is different from the mode
6172 of the expression; if it's the same, check_max_integer_computiation_mode
6173 will handle it. Do we really need to check this stuff at all? */
6174
ce3c0b53 6175 if (target
5f652c07 6176 && GET_MODE (target) != mode
ce3c0b53
JL
6177 && TREE_CODE (exp) != INTEGER_CST
6178 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6179 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6180 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6181 && TREE_CODE (exp) != COMPONENT_REF
6182 && TREE_CODE (exp) != BIT_FIELD_REF
6183 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6184 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6185 && TREE_CODE (exp) != VAR_DECL
6186 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6187 {
6188 enum machine_mode mode = GET_MODE (target);
6189
6190 if (GET_MODE_CLASS (mode) == MODE_INT
6191 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6192 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6193 }
6194
5f652c07
JM
6195 if (tmode != mode
6196 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6197 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6198 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6199 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6200 && TREE_CODE (exp) != COMPONENT_REF
6201 && TREE_CODE (exp) != BIT_FIELD_REF
6202 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6203 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6204 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6205 && TREE_CODE (exp) != RTL_EXPR
71bca506 6206 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6207 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6208 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6209
6210 check_max_integer_computation_mode (exp);
6211#endif
6212
e44842fe
RK
6213 /* If will do cse, generate all results into pseudo registers
6214 since 1) that allows cse to find more things
6215 and 2) otherwise cse could produce an insn the machine
6216 cannot support. */
6217
bbf6f052
RK
6218 if (! cse_not_expected && mode != BLKmode && target
6219 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6220 target = subtarget;
6221
bbf6f052
RK
6222 switch (code)
6223 {
6224 case LABEL_DECL:
b552441b
RS
6225 {
6226 tree function = decl_function_context (exp);
6227 /* Handle using a label in a containing function. */
d0977240
RK
6228 if (function != current_function_decl
6229 && function != inline_function_decl && function != 0)
b552441b
RS
6230 {
6231 struct function *p = find_function_data (function);
49ad7cfa
BS
6232 p->expr->x_forced_labels
6233 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6234 p->expr->x_forced_labels);
b552441b 6235 }
ab87f8c8
JL
6236 else
6237 {
ab87f8c8
JL
6238 if (modifier == EXPAND_INITIALIZER)
6239 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6240 label_rtx (exp),
6241 forced_labels);
6242 }
c5c76735 6243
38a448ca
RH
6244 temp = gen_rtx_MEM (FUNCTION_MODE,
6245 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6246 if (function != current_function_decl
6247 && function != inline_function_decl && function != 0)
26fcb35a
RS
6248 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6249 return temp;
b552441b 6250 }
bbf6f052
RK
6251
6252 case PARM_DECL:
6253 if (DECL_RTL (exp) == 0)
6254 {
6255 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6256 return CONST0_RTX (mode);
bbf6f052
RK
6257 }
6258
0f41302f 6259 /* ... fall through ... */
d6a5ac33 6260
bbf6f052 6261 case VAR_DECL:
2dca20cd
RS
6262 /* If a static var's type was incomplete when the decl was written,
6263 but the type is complete now, lay out the decl now. */
d0f062fb 6264 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6265 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6266 {
2dca20cd
RS
6267 layout_decl (exp, 0);
6268 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
2dca20cd 6269 }
d6a5ac33 6270
7d384cc0
KR
6271 /* Although static-storage variables start off initialized, according to
6272 ANSI C, a memcpy could overwrite them with uninitialized values. So
6273 we check them too. This also lets us check for read-only variables
6274 accessed via a non-const declaration, in case it won't be detected
6275 any other way (e.g., in an embedded system or OS kernel without
6276 memory protection).
6277
6278 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 6279 if (cfun && current_function_check_memory_usage
49ad7cfa 6280 && code == VAR_DECL
921b3427 6281 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
6282 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6283 {
6284 enum memory_use_mode memory_usage;
6285 memory_usage = get_memory_usage_from_modifier (modifier);
6286
ea4da9db 6287 in_check_memory_usage = 1;
921b3427 6288 if (memory_usage != MEMORY_USE_DONT)
ebb1b59a
BS
6289 emit_library_call (chkr_check_addr_libfunc,
6290 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 6291 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
6292 GEN_INT (int_size_in_bytes (type)),
6293 TYPE_MODE (sizetype),
956d6950
JL
6294 GEN_INT (memory_usage),
6295 TYPE_MODE (integer_type_node));
ea4da9db 6296 in_check_memory_usage = 0;
921b3427
RK
6297 }
6298
0f41302f 6299 /* ... fall through ... */
d6a5ac33 6300
2dca20cd 6301 case FUNCTION_DECL:
bbf6f052
RK
6302 case RESULT_DECL:
6303 if (DECL_RTL (exp) == 0)
6304 abort ();
d6a5ac33 6305
e44842fe
RK
6306 /* Ensure variable marked as used even if it doesn't go through
6307 a parser. If it hasn't be used yet, write out an external
6308 definition. */
6309 if (! TREE_USED (exp))
6310 {
6311 assemble_external (exp);
6312 TREE_USED (exp) = 1;
6313 }
6314
dc6d66b3
RK
6315 /* Show we haven't gotten RTL for this yet. */
6316 temp = 0;
6317
bbf6f052
RK
6318 /* Handle variables inherited from containing functions. */
6319 context = decl_function_context (exp);
6320
6321 /* We treat inline_function_decl as an alias for the current function
6322 because that is the inline function whose vars, types, etc.
6323 are being merged into the current function.
6324 See expand_inline_function. */
d6a5ac33 6325
bbf6f052
RK
6326 if (context != 0 && context != current_function_decl
6327 && context != inline_function_decl
6328 /* If var is static, we don't need a static chain to access it. */
6329 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6330 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6331 {
6332 rtx addr;
6333
6334 /* Mark as non-local and addressable. */
81feeecb 6335 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6336 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6337 abort ();
bbf6f052
RK
6338 mark_addressable (exp);
6339 if (GET_CODE (DECL_RTL (exp)) != MEM)
6340 abort ();
6341 addr = XEXP (DECL_RTL (exp), 0);
6342 if (GET_CODE (addr) == MEM)
792760b9
RK
6343 addr
6344 = replace_equiv_address (addr,
6345 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6346 else
6347 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6348
792760b9 6349 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6350 }
4af3895e 6351
bbf6f052
RK
6352 /* This is the case of an array whose size is to be determined
6353 from its initializer, while the initializer is still being parsed.
6354 See expand_decl. */
d6a5ac33 6355
dc6d66b3
RK
6356 else if (GET_CODE (DECL_RTL (exp)) == MEM
6357 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6358 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6359
6360 /* If DECL_RTL is memory, we are in the normal case and either
6361 the address is not valid or it is not a register and -fforce-addr
6362 is specified, get the address into a register. */
6363
dc6d66b3
RK
6364 else if (GET_CODE (DECL_RTL (exp)) == MEM
6365 && modifier != EXPAND_CONST_ADDRESS
6366 && modifier != EXPAND_SUM
6367 && modifier != EXPAND_INITIALIZER
6368 && (! memory_address_p (DECL_MODE (exp),
6369 XEXP (DECL_RTL (exp), 0))
6370 || (flag_force_addr
6371 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6372 temp = replace_equiv_address (DECL_RTL (exp),
6373 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6374
dc6d66b3 6375 /* If we got something, return it. But first, set the alignment
04956a1a 6376 if the address is a register. */
dc6d66b3
RK
6377 if (temp != 0)
6378 {
6379 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6380 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6381
6382 return temp;
6383 }
6384
1499e0a8
RK
6385 /* If the mode of DECL_RTL does not match that of the decl, it
6386 must be a promoted value. We return a SUBREG of the wanted mode,
6387 but mark it so that we know that it was already extended. */
6388
6389 if (GET_CODE (DECL_RTL (exp)) == REG
6390 && GET_MODE (DECL_RTL (exp)) != mode)
6391 {
1499e0a8
RK
6392 /* Get the signedness used for this variable. Ensure we get the
6393 same mode we got when the variable was declared. */
78911e8b
RK
6394 if (GET_MODE (DECL_RTL (exp))
6395 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
6396 abort ();
6397
ddef6bc7 6398 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8
RK
6399 SUBREG_PROMOTED_VAR_P (temp) = 1;
6400 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6401 return temp;
6402 }
6403
bbf6f052
RK
6404 return DECL_RTL (exp);
6405
6406 case INTEGER_CST:
6407 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6408 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6409
6410 case CONST_DECL:
921b3427 6411 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
3a94c984 6412 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6413
6414 case REAL_CST:
6415 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6416 which will be turned into memory by reload if necessary.
6417
bbf6f052
RK
6418 We used to force a register so that loop.c could see it. But
6419 this does not allow gen_* patterns to perform optimizations with
6420 the constants. It also produces two insns in cases like "x = 1.0;".
6421 On most machines, floating-point constants are not permitted in
6422 many insns, so we'd end up copying it to a register in any case.
6423
6424 Now, we do the copying in expand_binop, if appropriate. */
6425 return immed_real_const (exp);
6426
6427 case COMPLEX_CST:
6428 case STRING_CST:
6429 if (! TREE_CST_RTL (exp))
bd7cf17e 6430 output_constant_def (exp, 1);
bbf6f052
RK
6431
6432 /* TREE_CST_RTL probably contains a constant address.
6433 On RISC machines where a constant address isn't valid,
6434 make some insns to get that address into a register. */
6435 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6436 && modifier != EXPAND_CONST_ADDRESS
6437 && modifier != EXPAND_INITIALIZER
6438 && modifier != EXPAND_SUM
d6a5ac33
RK
6439 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6440 || (flag_force_addr
6441 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
792760b9
RK
6442 return replace_equiv_address (TREE_CST_RTL (exp),
6443 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
bbf6f052
RK
6444 return TREE_CST_RTL (exp);
6445
bf1e5319 6446 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6447 {
6448 rtx to_return;
3b304f5b 6449 const char *saved_input_filename = input_filename;
b24f65cd
APB
6450 int saved_lineno = lineno;
6451 input_filename = EXPR_WFL_FILENAME (exp);
6452 lineno = EXPR_WFL_LINENO (exp);
6453 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6454 emit_line_note (input_filename, lineno);
3a94c984 6455 /* Possibly avoid switching back and force here. */
b24f65cd
APB
6456 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6457 input_filename = saved_input_filename;
6458 lineno = saved_lineno;
6459 return to_return;
6460 }
bf1e5319 6461
bbf6f052
RK
6462 case SAVE_EXPR:
6463 context = decl_function_context (exp);
d6a5ac33 6464
d0977240
RK
6465 /* If this SAVE_EXPR was at global context, assume we are an
6466 initialization function and move it into our context. */
6467 if (context == 0)
6468 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6469
bbf6f052
RK
6470 /* We treat inline_function_decl as an alias for the current function
6471 because that is the inline function whose vars, types, etc.
6472 are being merged into the current function.
6473 See expand_inline_function. */
6474 if (context == current_function_decl || context == inline_function_decl)
6475 context = 0;
6476
6477 /* If this is non-local, handle it. */
6478 if (context)
6479 {
d0977240
RK
6480 /* The following call just exists to abort if the context is
6481 not of a containing function. */
6482 find_function_data (context);
6483
bbf6f052
RK
6484 temp = SAVE_EXPR_RTL (exp);
6485 if (temp && GET_CODE (temp) == REG)
6486 {
6487 put_var_into_stack (exp);
6488 temp = SAVE_EXPR_RTL (exp);
6489 }
6490 if (temp == 0 || GET_CODE (temp) != MEM)
6491 abort ();
792760b9
RK
6492 return
6493 replace_equiv_address (temp,
6494 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6495 }
6496 if (SAVE_EXPR_RTL (exp) == 0)
6497 {
06089a8b
RK
6498 if (mode == VOIDmode)
6499 temp = const0_rtx;
6500 else
1da68f56
RK
6501 temp = assign_temp (build_qualified_type (type,
6502 (TYPE_QUALS (type)
6503 | TYPE_QUAL_CONST)),
6504 3, 0, 0);
1499e0a8 6505
bbf6f052 6506 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6507 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6508 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6509 save_expr_regs);
ff78f773
RK
6510
6511 /* If the mode of TEMP does not match that of the expression, it
6512 must be a promoted value. We pass store_expr a SUBREG of the
6513 wanted mode but mark it so that we know that it was already
6514 extended. Note that `unsignedp' was modified above in
6515 this case. */
6516
6517 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6518 {
ddef6bc7 6519 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
ff78f773
RK
6520 SUBREG_PROMOTED_VAR_P (temp) = 1;
6521 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6522 }
6523
4c7a0be9 6524 if (temp == const0_rtx)
921b3427
RK
6525 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6526 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6527 else
6528 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6529
6530 TREE_USED (exp) = 1;
bbf6f052 6531 }
1499e0a8
RK
6532
6533 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6534 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6535 but mark it so that we know that it was already extended. */
1499e0a8
RK
6536
6537 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6538 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6539 {
e70d22c8
RK
6540 /* Compute the signedness and make the proper SUBREG. */
6541 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6542 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8
RK
6543 SUBREG_PROMOTED_VAR_P (temp) = 1;
6544 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6545 return temp;
6546 }
6547
bbf6f052
RK
6548 return SAVE_EXPR_RTL (exp);
6549
679163cf
MS
6550 case UNSAVE_EXPR:
6551 {
6552 rtx temp;
6553 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6554 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6555 return temp;
6556 }
6557
b50d17a1 6558 case PLACEHOLDER_EXPR:
e9a25f70
JL
6559 {
6560 tree placeholder_expr;
6561
6562 /* If there is an object on the head of the placeholder list,
e5e809f4 6563 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6564 further information, see tree.def. */
6565 for (placeholder_expr = placeholder_list;
6566 placeholder_expr != 0;
6567 placeholder_expr = TREE_CHAIN (placeholder_expr))
6568 {
6569 tree need_type = TYPE_MAIN_VARIANT (type);
6570 tree object = 0;
6571 tree old_list = placeholder_list;
6572 tree elt;
6573
e5e809f4 6574 /* Find the outermost reference that is of the type we want.
3a94c984 6575 If none, see if any object has a type that is a pointer to
e5e809f4
JL
6576 the type we want. */
6577 for (elt = TREE_PURPOSE (placeholder_expr);
6578 elt != 0 && object == 0;
6579 elt
6580 = ((TREE_CODE (elt) == COMPOUND_EXPR
6581 || TREE_CODE (elt) == COND_EXPR)
6582 ? TREE_OPERAND (elt, 1)
6583 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6584 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6585 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6586 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6587 ? TREE_OPERAND (elt, 0) : 0))
6588 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6589 object = elt;
e9a25f70 6590
e9a25f70 6591 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6592 elt != 0 && object == 0;
6593 elt
6594 = ((TREE_CODE (elt) == COMPOUND_EXPR
6595 || TREE_CODE (elt) == COND_EXPR)
6596 ? TREE_OPERAND (elt, 1)
6597 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6598 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6599 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6600 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6601 ? TREE_OPERAND (elt, 0) : 0))
6602 if (POINTER_TYPE_P (TREE_TYPE (elt))
6603 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6604 == need_type))
e5e809f4 6605 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6606
e9a25f70 6607 if (object != 0)
2cde2255 6608 {
e9a25f70
JL
6609 /* Expand this object skipping the list entries before
6610 it was found in case it is also a PLACEHOLDER_EXPR.
6611 In that case, we want to translate it using subsequent
6612 entries. */
6613 placeholder_list = TREE_CHAIN (placeholder_expr);
6614 temp = expand_expr (object, original_target, tmode,
6615 ro_modifier);
6616 placeholder_list = old_list;
6617 return temp;
2cde2255 6618 }
e9a25f70
JL
6619 }
6620 }
b50d17a1
RK
6621
6622 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6623 abort ();
6624
6625 case WITH_RECORD_EXPR:
6626 /* Put the object on the placeholder list, expand our first operand,
6627 and pop the list. */
6628 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6629 placeholder_list);
6630 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6631 tmode, ro_modifier);
b50d17a1
RK
6632 placeholder_list = TREE_CHAIN (placeholder_list);
6633 return target;
6634
70e6ca43
APB
6635 case GOTO_EXPR:
6636 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6637 expand_goto (TREE_OPERAND (exp, 0));
6638 else
6639 expand_computed_goto (TREE_OPERAND (exp, 0));
6640 return const0_rtx;
6641
bbf6f052 6642 case EXIT_EXPR:
df4ae160 6643 expand_exit_loop_if_false (NULL,
e44842fe 6644 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6645 return const0_rtx;
6646
f42e28dd
APB
6647 case LABELED_BLOCK_EXPR:
6648 if (LABELED_BLOCK_BODY (exp))
6649 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
0a5fee32
PB
6650 /* Should perhaps use expand_label, but this is simpler and safer. */
6651 do_pending_stack_adjust ();
f42e28dd
APB
6652 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6653 return const0_rtx;
6654
6655 case EXIT_BLOCK_EXPR:
6656 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6657 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6658 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6659 return const0_rtx;
6660
bbf6f052 6661 case LOOP_EXPR:
0088fcb1 6662 push_temp_slots ();
bbf6f052
RK
6663 expand_start_loop (1);
6664 expand_expr_stmt (TREE_OPERAND (exp, 0));
6665 expand_end_loop ();
0088fcb1 6666 pop_temp_slots ();
bbf6f052
RK
6667
6668 return const0_rtx;
6669
6670 case BIND_EXPR:
6671 {
6672 tree vars = TREE_OPERAND (exp, 0);
6673 int vars_need_expansion = 0;
6674
6675 /* Need to open a binding contour here because
e976b8b2 6676 if there are any cleanups they must be contained here. */
8e91754e 6677 expand_start_bindings (2);
bbf6f052 6678
2df53c0b
RS
6679 /* Mark the corresponding BLOCK for output in its proper place. */
6680 if (TREE_OPERAND (exp, 2) != 0
6681 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6682 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6683
6684 /* If VARS have not yet been expanded, expand them now. */
6685 while (vars)
6686 {
19e7881c 6687 if (!DECL_RTL_SET_P (vars))
bbf6f052
RK
6688 {
6689 vars_need_expansion = 1;
6690 expand_decl (vars);
6691 }
6692 expand_decl_init (vars);
6693 vars = TREE_CHAIN (vars);
6694 }
6695
921b3427 6696 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6697
6698 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6699
6700 return temp;
6701 }
6702
6703 case RTL_EXPR:
83b853c9
JM
6704 if (RTL_EXPR_SEQUENCE (exp))
6705 {
6706 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6707 abort ();
6708 emit_insns (RTL_EXPR_SEQUENCE (exp));
6709 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6710 }
64dc53f3
MM
6711 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6712 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6713 return RTL_EXPR_RTL (exp);
6714
6715 case CONSTRUCTOR:
dd27116b
RK
6716 /* If we don't need the result, just ensure we evaluate any
6717 subexpressions. */
6718 if (ignore)
6719 {
6720 tree elt;
6721 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6722 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6723 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6724 return const0_rtx;
6725 }
3207b172 6726
4af3895e
JVA
6727 /* All elts simple constants => refer to a constant in memory. But
6728 if this is a non-BLKmode mode, let it store a field at a time
6729 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6730 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6731 store directly into the target unless the type is large enough
6732 that memcpy will be used. If we are making an initializer and
3207b172 6733 all operands are constant, put it in memory as well. */
dd27116b 6734 else if ((TREE_STATIC (exp)
3207b172 6735 && ((mode == BLKmode
e5e809f4 6736 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6737 || TREE_ADDRESSABLE (exp)
19caa751 6738 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6739 && (! MOVE_BY_PIECES_P
19caa751
RK
6740 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6741 TYPE_ALIGN (type)))
9de08200 6742 && ! mostly_zeros_p (exp))))
dd27116b 6743 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 6744 {
bd7cf17e 6745 rtx constructor = output_constant_def (exp, 1);
19caa751 6746
b552441b
RS
6747 if (modifier != EXPAND_CONST_ADDRESS
6748 && modifier != EXPAND_INITIALIZER
792760b9
RK
6749 && modifier != EXPAND_SUM)
6750 constructor = validize_mem (constructor);
6751
bbf6f052
RK
6752 return constructor;
6753 }
bbf6f052
RK
6754 else
6755 {
e9ac02a6
JW
6756 /* Handle calls that pass values in multiple non-contiguous
6757 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6758 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6759 || GET_CODE (target) == PARALLEL)
1da68f56
RK
6760 target
6761 = assign_temp (build_qualified_type (type,
6762 (TYPE_QUALS (type)
6763 | (TREE_READONLY (exp)
6764 * TYPE_QUAL_CONST))),
6765 TREE_ADDRESSABLE (exp), 1, 1);
07604beb 6766
b7010412
RK
6767 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6768 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6769 return target;
6770 }
6771
6772 case INDIRECT_REF:
6773 {
6774 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6775 tree index;
3a94c984
KH
6776 tree string = string_constant (exp1, &index);
6777
06eaa86f 6778 /* Try to optimize reads from const strings. */
7581a30f
JW
6779 if (string
6780 && TREE_CODE (string) == STRING_CST
6781 && TREE_CODE (index) == INTEGER_CST
05bccae2 6782 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6783 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6784 && GET_MODE_SIZE (mode) == 1
6785 && modifier != EXPAND_MEMORY_USE_WO)
05bccae2
RK
6786 return
6787 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6788
405f0da6
JW
6789 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6790 op0 = memory_address (mode, op0);
8c8a8e34 6791
01d939e8 6792 if (cfun && current_function_check_memory_usage
49ad7cfa 6793 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6794 {
6795 enum memory_use_mode memory_usage;
6796 memory_usage = get_memory_usage_from_modifier (modifier);
6797
6798 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6799 {
6800 in_check_memory_usage = 1;
ebb1b59a
BS
6801 emit_library_call (chkr_check_addr_libfunc,
6802 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6803 Pmode, GEN_INT (int_size_in_bytes (type)),
c85f7c16
JL
6804 TYPE_MODE (sizetype),
6805 GEN_INT (memory_usage),
6806 TYPE_MODE (integer_type_node));
6807 in_check_memory_usage = 0;
6808 }
921b3427
RK
6809 }
6810
38a448ca 6811 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6812 set_mem_attributes (temp, exp, 0);
1125706f
RK
6813
6814 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6815 here, because, in C and C++, the fact that a location is accessed
6816 through a pointer to const does not mean that the value there can
6817 never change. Languages where it can never change should
6818 also set TREE_STATIC. */
5cb7a25a 6819 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6820
6821 /* If we are writing to this object and its type is a record with
6822 readonly fields, we must mark it as readonly so it will
6823 conflict with readonly references to those fields. */
1da68f56 6824 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
14a774a9
RK
6825 RTX_UNCHANGING_P (temp) = 1;
6826
8c8a8e34
JW
6827 return temp;
6828 }
bbf6f052
RK
6829
6830 case ARRAY_REF:
742920c7
RK
6831 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6832 abort ();
bbf6f052 6833
bbf6f052 6834 {
742920c7
RK
6835 tree array = TREE_OPERAND (exp, 0);
6836 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6837 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6838 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6839 HOST_WIDE_INT i;
b50d17a1 6840
d4c89139
PB
6841 /* Optimize the special-case of a zero lower bound.
6842
6843 We convert the low_bound to sizetype to avoid some problems
6844 with constant folding. (E.g. suppose the lower bound is 1,
6845 and its mode is QI. Without the conversion, (ARRAY
6846 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6847 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6848
742920c7 6849 if (! integer_zerop (low_bound))
fed3cef0 6850 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6851
742920c7 6852 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6853 This is not done in fold so it won't happen inside &.
6854 Don't fold if this is for wide characters since it's too
6855 difficult to do correctly and this is a very rare case. */
742920c7 6856
cb5fa0f8
RK
6857 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6858 && TREE_CODE (array) == STRING_CST
742920c7 6859 && TREE_CODE (index) == INTEGER_CST
05bccae2 6860 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6861 && GET_MODE_CLASS (mode) == MODE_INT
6862 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6863 return
6864 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6865
742920c7
RK
6866 /* If this is a constant index into a constant array,
6867 just get the value from the array. Handle both the cases when
6868 we have an explicit constructor and when our operand is a variable
6869 that was declared const. */
4af3895e 6870
cb5fa0f8
RK
6871 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6872 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 6873 && TREE_CODE (index) == INTEGER_CST
3a94c984 6874 && 0 > compare_tree_int (index,
05bccae2
RK
6875 list_length (CONSTRUCTOR_ELTS
6876 (TREE_OPERAND (exp, 0)))))
742920c7 6877 {
05bccae2
RK
6878 tree elem;
6879
6880 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6881 i = TREE_INT_CST_LOW (index);
6882 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6883 ;
6884
6885 if (elem)
6886 return expand_expr (fold (TREE_VALUE (elem)), target,
6887 tmode, ro_modifier);
742920c7 6888 }
3a94c984 6889
742920c7 6890 else if (optimize >= 1
cb5fa0f8
RK
6891 && modifier != EXPAND_CONST_ADDRESS
6892 && modifier != EXPAND_INITIALIZER
742920c7
RK
6893 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6894 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6895 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6896 {
08293add 6897 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6898 {
6899 tree init = DECL_INITIAL (array);
6900
742920c7
RK
6901 if (TREE_CODE (init) == CONSTRUCTOR)
6902 {
665f2503 6903 tree elem;
742920c7 6904
05bccae2 6905 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6906 (elem
6907 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6908 elem = TREE_CHAIN (elem))
6909 ;
6910
c54b0a5e 6911 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 6912 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6913 tmode, ro_modifier);
742920c7
RK
6914 }
6915 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6916 && 0 > compare_tree_int (index,
6917 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6918 {
6919 tree type = TREE_TYPE (TREE_TYPE (init));
6920 enum machine_mode mode = TYPE_MODE (type);
6921
6922 if (GET_MODE_CLASS (mode) == MODE_INT
6923 && GET_MODE_SIZE (mode) == 1)
6924 return (GEN_INT
6925 (TREE_STRING_POINTER
6926 (init)[TREE_INT_CST_LOW (index)]));
6927 }
742920c7
RK
6928 }
6929 }
6930 }
3a94c984 6931 /* Fall through. */
bbf6f052
RK
6932
6933 case COMPONENT_REF:
6934 case BIT_FIELD_REF:
b4e3fabb 6935 case ARRAY_RANGE_REF:
4af3895e 6936 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6937 appropriate field if it is present. Don't do this if we have
6938 already written the data since we want to refer to that copy
6939 and varasm.c assumes that's what we'll do. */
b4e3fabb 6940 if (code == COMPONENT_REF
7a0b7b9a
RK
6941 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6942 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6943 {
6944 tree elt;
6945
6946 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6947 elt = TREE_CHAIN (elt))
86b5812c
RK
6948 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6949 /* We can normally use the value of the field in the
6950 CONSTRUCTOR. However, if this is a bitfield in
6951 an integral mode that we can fit in a HOST_WIDE_INT,
6952 we must mask only the number of bits in the bitfield,
6953 since this is done implicitly by the constructor. If
6954 the bitfield does not meet either of those conditions,
6955 we can't do this optimization. */
6956 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6957 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6958 == MODE_INT)
6959 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6960 <= HOST_BITS_PER_WIDE_INT))))
6961 {
3a94c984 6962 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6963 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6964 {
9df2c88c
RK
6965 HOST_WIDE_INT bitsize
6966 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
86b5812c
RK
6967
6968 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6969 {
6970 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6971 op0 = expand_and (op0, op1, target);
6972 }
6973 else
6974 {
e5e809f4
JL
6975 enum machine_mode imode
6976 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6977 tree count
e5e809f4
JL
6978 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6979 0);
86b5812c
RK
6980
6981 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6982 target, 0);
6983 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6984 target, 0);
6985 }
6986 }
6987
6988 return op0;
6989 }
4af3895e
JVA
6990 }
6991
bbf6f052
RK
6992 {
6993 enum machine_mode mode1;
770ae6cc 6994 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6995 tree offset;
bbf6f052 6996 int volatilep = 0;
729a2125 6997 unsigned int alignment;
839c4796
RK
6998 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6999 &mode1, &unsignedp, &volatilep,
7000 &alignment);
bbf6f052 7001
e7f3c83f
RK
7002 /* If we got back the original object, something is wrong. Perhaps
7003 we are evaluating an expression too early. In any event, don't
7004 infinitely recurse. */
7005 if (tem == exp)
7006 abort ();
7007
3d27140a 7008 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7009 computation, since it will need a temporary and TARGET is known
7010 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7011
b74f5ff2
RK
7012 op0 = expand_expr (tem,
7013 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7014 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7015 != INTEGER_CST)
7016 ? target : NULL_RTX),
4ed67205 7017 VOIDmode,
14a774a9
RK
7018 (modifier == EXPAND_INITIALIZER
7019 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 7020 ? modifier : EXPAND_NORMAL);
bbf6f052 7021
8c8a8e34 7022 /* If this is a constant, put it into a register if it is a
14a774a9 7023 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7024 if (CONSTANT_P (op0))
7025 {
7026 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7027 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7028 && offset == 0)
8c8a8e34
JW
7029 op0 = force_reg (mode, op0);
7030 else
7031 op0 = validize_mem (force_const_mem (mode, op0));
7032 }
7033
7bb0943f
RS
7034 if (offset != 0)
7035 {
906c4e36 7036 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 7037
a2725049 7038 /* If this object is in a register, put it into memory.
14a774a9
RK
7039 This case can't occur in C, but can in Ada if we have
7040 unchecked conversion of an expression from a scalar type to
7041 an array or record type. */
7042 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7043 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7044 {
d04218c0
RK
7045 /* If the operand is a SAVE_EXPR, we can deal with this by
7046 forcing the SAVE_EXPR into memory. */
7047 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45
RK
7048 {
7049 put_var_into_stack (TREE_OPERAND (exp, 0));
7050 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7051 }
d04218c0
RK
7052 else
7053 {
7054 tree nt
7055 = build_qualified_type (TREE_TYPE (tem),
7056 (TYPE_QUALS (TREE_TYPE (tem))
7057 | TYPE_QUAL_CONST));
7058 rtx memloc = assign_temp (nt, 1, 1, 1);
7059
7060 mark_temp_addr_taken (memloc);
7061 emit_move_insn (memloc, op0);
7062 op0 = memloc;
7063 }
14a774a9
RK
7064 }
7065
7bb0943f
RS
7066 if (GET_CODE (op0) != MEM)
7067 abort ();
2d48c13d
JL
7068
7069 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 7070 {
2d48c13d 7071#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 7072 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 7073#else
bd070e1a 7074 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 7075#endif
bd070e1a 7076 }
2d48c13d 7077
14a774a9 7078 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7079 to call force_reg for that case. Avoid that case. */
89752202
HB
7080 if (GET_CODE (op0) == MEM
7081 && GET_MODE (op0) == BLKmode
efd07ca7 7082 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7083 && bitsize != 0
3a94c984 7084 && (bitpos % bitsize) == 0
89752202 7085 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 7086 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202 7087 {
f4ef873c
RK
7088 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7089
89752202
HB
7090 if (GET_CODE (XEXP (temp, 0)) == REG)
7091 op0 = temp;
7092 else
792760b9
RK
7093 op0 = (replace_equiv_address
7094 (op0,
7095 force_reg (GET_MODE (XEXP (temp, 0)),
7096 XEXP (temp, 0))));
89752202
HB
7097 bitpos = 0;
7098 }
7099
7bb0943f 7100 op0 = change_address (op0, VOIDmode,
38a448ca 7101 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
7102 force_reg (ptr_mode,
7103 offset_rtx)));
7bb0943f
RS
7104 }
7105
bbf6f052
RK
7106 /* Don't forget about volatility even if this is a bitfield. */
7107 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7108 {
7109 op0 = copy_rtx (op0);
7110 MEM_VOLATILE_P (op0) = 1;
7111 }
7112
921b3427 7113 /* Check the access. */
32919a0d
RK
7114 if (cfun != 0 && current_function_check_memory_usage
7115 && GET_CODE (op0) == MEM)
3a94c984 7116 {
921b3427
RK
7117 enum memory_use_mode memory_usage;
7118 memory_usage = get_memory_usage_from_modifier (modifier);
7119
7120 if (memory_usage != MEMORY_USE_DONT)
7121 {
7122 rtx to;
7123 int size;
7124
7125 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7126 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7127
7128 /* Check the access right of the pointer. */
ea4da9db 7129 in_check_memory_usage = 1;
e9a25f70 7130 if (size > BITS_PER_UNIT)
ebb1b59a
BS
7131 emit_library_call (chkr_check_addr_libfunc,
7132 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7133 Pmode, GEN_INT (size / BITS_PER_UNIT),
e9a25f70 7134 TYPE_MODE (sizetype),
3a94c984 7135 GEN_INT (memory_usage),
956d6950 7136 TYPE_MODE (integer_type_node));
ea4da9db 7137 in_check_memory_usage = 0;
921b3427
RK
7138 }
7139 }
7140
ccc98036
RS
7141 /* In cases where an aligned union has an unaligned object
7142 as a field, we might be extracting a BLKmode value from
7143 an integer-mode (e.g., SImode) object. Handle this case
7144 by doing the extract into an object as wide as the field
7145 (which we know to be the width of a basic mode), then
cb5fa0f8 7146 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7147 if (mode1 == VOIDmode
ccc98036 7148 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7149 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7150 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7151 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7152 && modifier != EXPAND_CONST_ADDRESS
7153 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7154 /* If the field isn't aligned enough to fetch as a memref,
7155 fetch it as a bit field. */
7156 || (mode1 != BLKmode
7157 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7158 && ((TYPE_ALIGN (TREE_TYPE (tem))
7159 < GET_MODE_ALIGNMENT (mode))
7160 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7161 /* If the type and the field are a constant size and the
7162 size of the type isn't the same size as the bitfield,
7163 we must use bitfield operations. */
7164 || (bitsize >= 0
7165 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7166 == INTEGER_CST)
7167 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7168 bitsize))
7169 || (mode == BLKmode
e1565e65 7170 && SLOW_UNALIGNED_ACCESS (mode, alignment)
19caa751 7171 && (TYPE_ALIGN (type) > alignment
14a774a9 7172 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 7173 {
bbf6f052
RK
7174 enum machine_mode ext_mode = mode;
7175
14a774a9
RK
7176 if (ext_mode == BLKmode
7177 && ! (target != 0 && GET_CODE (op0) == MEM
7178 && GET_CODE (target) == MEM
7179 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7180 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7181
7182 if (ext_mode == BLKmode)
a281e72d
RK
7183 {
7184 /* In this case, BITPOS must start at a byte boundary and
7185 TARGET, if specified, must be a MEM. */
7186 if (GET_CODE (op0) != MEM
7187 || (target != 0 && GET_CODE (target) != MEM)
7188 || bitpos % BITS_PER_UNIT != 0)
7189 abort ();
7190
f4ef873c 7191 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
7192 if (target == 0)
7193 target = assign_temp (type, 0, 1, 1);
7194
7195 emit_block_move (target, op0,
bd5dab53
RK
7196 bitsize == -1 ? expr_size (exp)
7197 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7198 / BITS_PER_UNIT),
19caa751 7199 BITS_PER_UNIT);
3a94c984 7200
a281e72d
RK
7201 return target;
7202 }
bbf6f052 7203
dc6d66b3
RK
7204 op0 = validize_mem (op0);
7205
7206 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7207 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3
RK
7208
7209 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 7210 unsignedp, target, ext_mode, ext_mode,
034f9101 7211 alignment,
bbf6f052 7212 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7213
7214 /* If the result is a record type and BITSIZE is narrower than
7215 the mode of OP0, an integral mode, and this is a big endian
7216 machine, we must put the field into the high-order bits. */
7217 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7218 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7219 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7220 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7221 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7222 - bitsize),
7223 op0, 1);
7224
bbf6f052
RK
7225 if (mode == BLKmode)
7226 {
27fb3e16 7227 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
1da68f56
RK
7228 TYPE_QUAL_CONST);
7229 rtx new = assign_temp (nt, 0, 1, 1);
bbf6f052
RK
7230
7231 emit_move_insn (new, op0);
7232 op0 = copy_rtx (new);
7233 PUT_MODE (op0, BLKmode);
7234 }
7235
7236 return op0;
7237 }
7238
05019f83
RK
7239 /* If the result is BLKmode, use that to access the object
7240 now as well. */
7241 if (mode == BLKmode)
7242 mode1 = BLKmode;
7243
bbf6f052
RK
7244 /* Get a reference to just this component. */
7245 if (modifier == EXPAND_CONST_ADDRESS
7246 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7247 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7248 else
f4ef873c 7249 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7250
3bdf5ad1 7251 set_mem_attributes (op0, exp, 0);
dc6d66b3 7252 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7253 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3 7254
bbf6f052 7255 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7256 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7257 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7258 || modifier == EXPAND_INITIALIZER)
bbf6f052 7259 return op0;
0d15e60c 7260 else if (target == 0)
bbf6f052 7261 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7262
bbf6f052
RK
7263 convert_move (target, op0, unsignedp);
7264 return target;
7265 }
7266
bbf6f052
RK
7267 /* Intended for a reference to a buffer of a file-object in Pascal.
7268 But it's not certain that a special tree code will really be
7269 necessary for these. INDIRECT_REF might work for them. */
7270 case BUFFER_REF:
7271 abort ();
7272
7308a047 7273 case IN_EXPR:
7308a047 7274 {
d6a5ac33
RK
7275 /* Pascal set IN expression.
7276
7277 Algorithm:
7278 rlo = set_low - (set_low%bits_per_word);
7279 the_word = set [ (index - rlo)/bits_per_word ];
7280 bit_index = index % bits_per_word;
7281 bitmask = 1 << bit_index;
7282 return !!(the_word & bitmask); */
7283
7308a047
RS
7284 tree set = TREE_OPERAND (exp, 0);
7285 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7286 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7287 tree set_type = TREE_TYPE (set);
7308a047
RS
7288 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7289 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7290 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7291 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7292 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7293 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7294 rtx setaddr = XEXP (setval, 0);
7295 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7296 rtx rlow;
7297 rtx diff, quo, rem, addr, bit, result;
7308a047 7298
d6a5ac33
RK
7299 /* If domain is empty, answer is no. Likewise if index is constant
7300 and out of bounds. */
51723711 7301 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7302 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7303 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7304 || (TREE_CODE (index) == INTEGER_CST
7305 && TREE_CODE (set_low_bound) == INTEGER_CST
7306 && tree_int_cst_lt (index, set_low_bound))
7307 || (TREE_CODE (set_high_bound) == INTEGER_CST
7308 && TREE_CODE (index) == INTEGER_CST
7309 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7310 return const0_rtx;
7311
d6a5ac33
RK
7312 if (target == 0)
7313 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7314
7315 /* If we get here, we have to generate the code for both cases
7316 (in range and out of range). */
7317
7318 op0 = gen_label_rtx ();
7319 op1 = gen_label_rtx ();
7320
7321 if (! (GET_CODE (index_val) == CONST_INT
7322 && GET_CODE (lo_r) == CONST_INT))
7323 {
c5d5d461
JL
7324 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7325 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7326 }
7327
7328 if (! (GET_CODE (index_val) == CONST_INT
7329 && GET_CODE (hi_r) == CONST_INT))
7330 {
c5d5d461
JL
7331 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7332 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7333 }
7334
7335 /* Calculate the element number of bit zero in the first word
7336 of the set. */
7337 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7338 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7339 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7340 else
17938e57
RK
7341 rlow = expand_binop (index_mode, and_optab, lo_r,
7342 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7343 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7344
d6a5ac33
RK
7345 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7346 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7347
7348 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7349 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7350 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7351 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7352
7308a047 7353 addr = memory_address (byte_mode,
d6a5ac33
RK
7354 expand_binop (index_mode, add_optab, diff,
7355 setaddr, NULL_RTX, iunsignedp,
17938e57 7356 OPTAB_LIB_WIDEN));
d6a5ac33 7357
3a94c984 7358 /* Extract the bit we want to examine. */
7308a047 7359 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7360 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7361 make_tree (TREE_TYPE (index), rem),
7362 NULL_RTX, 1);
7363 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7364 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7365 1, OPTAB_LIB_WIDEN);
17938e57
RK
7366
7367 if (result != target)
7368 convert_move (target, result, 1);
7308a047
RS
7369
7370 /* Output the code to handle the out-of-range case. */
7371 emit_jump (op0);
7372 emit_label (op1);
7373 emit_move_insn (target, const0_rtx);
7374 emit_label (op0);
7375 return target;
7376 }
7377
bbf6f052
RK
7378 case WITH_CLEANUP_EXPR:
7379 if (RTL_EXPR_RTL (exp) == 0)
7380 {
7381 RTL_EXPR_RTL (exp)
921b3427 7382 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
7383 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7384
bbf6f052
RK
7385 /* That's it for this cleanup. */
7386 TREE_OPERAND (exp, 2) = 0;
7387 }
7388 return RTL_EXPR_RTL (exp);
7389
5dab5552
MS
7390 case CLEANUP_POINT_EXPR:
7391 {
e976b8b2
MS
7392 /* Start a new binding layer that will keep track of all cleanup
7393 actions to be performed. */
8e91754e 7394 expand_start_bindings (2);
e976b8b2 7395
d93d4205 7396 target_temp_slot_level = temp_slot_level;
e976b8b2 7397
921b3427 7398 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
7399 /* If we're going to use this value, load it up now. */
7400 if (! ignore)
7401 op0 = force_not_mem (op0);
d93d4205 7402 preserve_temp_slots (op0);
e976b8b2 7403 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7404 }
7405 return op0;
7406
bbf6f052
RK
7407 case CALL_EXPR:
7408 /* Check for a built-in function. */
7409 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7410 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7411 == FUNCTION_DECL)
bbf6f052 7412 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
c70eaeaf
KG
7413 {
7414 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7415 == BUILT_IN_FRONTEND)
7416 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7417 else
7418 return expand_builtin (exp, target, subtarget, tmode, ignore);
7419 }
d6a5ac33 7420
8129842c 7421 return expand_call (exp, target, ignore);
bbf6f052
RK
7422
7423 case NON_LVALUE_EXPR:
7424 case NOP_EXPR:
7425 case CONVERT_EXPR:
7426 case REFERENCE_EXPR:
4a53008b 7427 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7428 return const0_rtx;
4a53008b 7429
bbf6f052
RK
7430 if (TREE_CODE (type) == UNION_TYPE)
7431 {
7432 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7433
7434 /* If both input and output are BLKmode, this conversion
7435 isn't actually doing anything unless we need to make the
7436 alignment stricter. */
7437 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7438 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7439 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7440 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7441 modifier);
7442
bbf6f052 7443 if (target == 0)
1da68f56 7444 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7445
bbf6f052
RK
7446 if (GET_CODE (target) == MEM)
7447 /* Store data into beginning of memory target. */
7448 store_expr (TREE_OPERAND (exp, 0),
f4ef873c 7449 adjust_address (target, TYPE_MODE (valtype), 0), 0);
1499e0a8 7450
bbf6f052
RK
7451 else if (GET_CODE (target) == REG)
7452 /* Store this field into a union of the proper type. */
14a774a9
RK
7453 store_field (target,
7454 MIN ((int_size_in_bytes (TREE_TYPE
7455 (TREE_OPERAND (exp, 0)))
7456 * BITS_PER_UNIT),
8752c357 7457 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7458 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7156dead
RK
7459 VOIDmode, 0, BITS_PER_UNIT,
7460 int_size_in_bytes (type), 0);
bbf6f052
RK
7461 else
7462 abort ();
7463
7464 /* Return the entire union. */
7465 return target;
7466 }
d6a5ac33 7467
7f62854a
RK
7468 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7469 {
7470 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7471 ro_modifier);
7f62854a
RK
7472
7473 /* If the signedness of the conversion differs and OP0 is
7474 a promoted SUBREG, clear that indication since we now
7475 have to do the proper extension. */
7476 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7477 && GET_CODE (op0) == SUBREG)
7478 SUBREG_PROMOTED_VAR_P (op0) = 0;
7479
7480 return op0;
7481 }
7482
1499e0a8 7483 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7484 if (GET_MODE (op0) == mode)
7485 return op0;
12342f90 7486
d6a5ac33
RK
7487 /* If OP0 is a constant, just convert it into the proper mode. */
7488 if (CONSTANT_P (op0))
7489 return
7490 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7491 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7492
26fcb35a 7493 if (modifier == EXPAND_INITIALIZER)
38a448ca 7494 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7495
bbf6f052 7496 if (target == 0)
d6a5ac33
RK
7497 return
7498 convert_to_mode (mode, op0,
7499 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7500 else
d6a5ac33
RK
7501 convert_move (target, op0,
7502 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7503 return target;
7504
7505 case PLUS_EXPR:
0f41302f
MS
7506 /* We come here from MINUS_EXPR when the second operand is a
7507 constant. */
bbf6f052 7508 plus_expr:
91ce572a
CC
7509 this_optab = ! unsignedp && flag_trapv
7510 && (GET_MODE_CLASS(mode) == MODE_INT)
7511 ? addv_optab : add_optab;
bbf6f052
RK
7512
7513 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7514 something else, make sure we add the register to the constant and
7515 then to the other thing. This case can occur during strength
7516 reduction and doing it this way will produce better code if the
7517 frame pointer or argument pointer is eliminated.
7518
7519 fold-const.c will ensure that the constant is always in the inner
7520 PLUS_EXPR, so the only case we need to do anything about is if
7521 sp, ap, or fp is our second argument, in which case we must swap
7522 the innermost first argument and our second argument. */
7523
7524 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7525 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7526 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7527 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7528 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7529 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7530 {
7531 tree t = TREE_OPERAND (exp, 1);
7532
7533 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7534 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7535 }
7536
88f63c77 7537 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7538 something, we might be forming a constant. So try to use
7539 plus_constant. If it produces a sum and we can't accept it,
7540 use force_operand. This allows P = &ARR[const] to generate
7541 efficient code on machines where a SYMBOL_REF is not a valid
7542 address.
7543
7544 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7545 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
91ce572a 7546 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7547 {
c980ac49
RS
7548 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7549 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7550 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7551 {
cbbc503e
JL
7552 rtx constant_part;
7553
c980ac49
RS
7554 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7555 EXPAND_SUM);
cbbc503e
JL
7556 /* Use immed_double_const to ensure that the constant is
7557 truncated according to the mode of OP1, then sign extended
7558 to a HOST_WIDE_INT. Using the constant directly can result
7559 in non-canonical RTL in a 64x32 cross compile. */
7560 constant_part
7561 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7562 (HOST_WIDE_INT) 0,
a5efcd63 7563 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7564 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7565 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7566 op1 = force_operand (op1, target);
7567 return op1;
7568 }
bbf6f052 7569
c980ac49
RS
7570 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7571 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7572 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7573 {
cbbc503e
JL
7574 rtx constant_part;
7575
c980ac49
RS
7576 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7577 EXPAND_SUM);
7578 if (! CONSTANT_P (op0))
7579 {
7580 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7581 VOIDmode, modifier);
709f5be1
RS
7582 /* Don't go to both_summands if modifier
7583 says it's not right to return a PLUS. */
7584 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7585 goto binop2;
c980ac49
RS
7586 goto both_summands;
7587 }
cbbc503e
JL
7588 /* Use immed_double_const to ensure that the constant is
7589 truncated according to the mode of OP1, then sign extended
7590 to a HOST_WIDE_INT. Using the constant directly can result
7591 in non-canonical RTL in a 64x32 cross compile. */
7592 constant_part
7593 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7594 (HOST_WIDE_INT) 0,
2a94e396 7595 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7596 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7597 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7598 op0 = force_operand (op0, target);
7599 return op0;
7600 }
bbf6f052
RK
7601 }
7602
7603 /* No sense saving up arithmetic to be done
7604 if it's all in the wrong mode to form part of an address.
7605 And force_operand won't know whether to sign-extend or
7606 zero-extend. */
7607 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7608 || mode != ptr_mode)
c980ac49 7609 goto binop;
bbf6f052 7610
e5e809f4 7611 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7612 subtarget = 0;
7613
921b3427
RK
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7615 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7616
c980ac49 7617 both_summands:
bbf6f052
RK
7618 /* Make sure any term that's a sum with a constant comes last. */
7619 if (GET_CODE (op0) == PLUS
7620 && CONSTANT_P (XEXP (op0, 1)))
7621 {
7622 temp = op0;
7623 op0 = op1;
7624 op1 = temp;
7625 }
7626 /* If adding to a sum including a constant,
7627 associate it to put the constant outside. */
7628 if (GET_CODE (op1) == PLUS
7629 && CONSTANT_P (XEXP (op1, 1)))
7630 {
7631 rtx constant_term = const0_rtx;
7632
7633 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7634 if (temp != 0)
7635 op0 = temp;
6f90e075
JW
7636 /* Ensure that MULT comes first if there is one. */
7637 else if (GET_CODE (op0) == MULT)
38a448ca 7638 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7639 else
38a448ca 7640 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7641
7642 /* Let's also eliminate constants from op0 if possible. */
7643 op0 = eliminate_constant_term (op0, &constant_term);
7644
7645 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 7646 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
7647 result we want will then be OP0 + OP1. */
7648
7649 temp = simplify_binary_operation (PLUS, mode, constant_term,
7650 XEXP (op1, 1));
7651 if (temp != 0)
7652 op1 = temp;
7653 else
38a448ca 7654 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7655 }
7656
7657 /* Put a constant term last and put a multiplication first. */
7658 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7659 temp = op1, op1 = op0, op0 = temp;
7660
7661 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7662 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7663
7664 case MINUS_EXPR:
ea87523e
RK
7665 /* For initializers, we are allowed to return a MINUS of two
7666 symbolic constants. Here we handle all cases when both operands
7667 are constant. */
bbf6f052
RK
7668 /* Handle difference of two symbolic constants,
7669 for the sake of an initializer. */
7670 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7671 && really_constant_p (TREE_OPERAND (exp, 0))
7672 && really_constant_p (TREE_OPERAND (exp, 1)))
7673 {
906c4e36 7674 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7675 VOIDmode, ro_modifier);
906c4e36 7676 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7677 VOIDmode, ro_modifier);
ea87523e 7678
ea87523e
RK
7679 /* If the last operand is a CONST_INT, use plus_constant of
7680 the negated constant. Else make the MINUS. */
7681 if (GET_CODE (op1) == CONST_INT)
7682 return plus_constant (op0, - INTVAL (op1));
7683 else
38a448ca 7684 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7685 }
7686 /* Convert A - const to A + (-const). */
7687 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7688 {
ae431183
RK
7689 tree negated = fold (build1 (NEGATE_EXPR, type,
7690 TREE_OPERAND (exp, 1)));
7691
ae431183 7692 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7693 /* If we can't negate the constant in TYPE, leave it alone and
7694 expand_binop will negate it for us. We used to try to do it
7695 here in the signed version of TYPE, but that doesn't work
7696 on POINTER_TYPEs. */;
ae431183
RK
7697 else
7698 {
7699 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7700 goto plus_expr;
7701 }
bbf6f052 7702 }
91ce572a
CC
7703 this_optab = ! unsignedp && flag_trapv
7704 && (GET_MODE_CLASS(mode) == MODE_INT)
7705 ? subv_optab : sub_optab;
bbf6f052
RK
7706 goto binop;
7707
7708 case MULT_EXPR:
bbf6f052
RK
7709 /* If first operand is constant, swap them.
7710 Thus the following special case checks need only
7711 check the second operand. */
7712 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7713 {
7714 register tree t1 = TREE_OPERAND (exp, 0);
7715 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7716 TREE_OPERAND (exp, 1) = t1;
7717 }
7718
7719 /* Attempt to return something suitable for generating an
7720 indexed address, for machines that support that. */
7721
88f63c77 7722 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7723 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7724 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7725 {
921b3427
RK
7726 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7727 EXPAND_SUM);
bbf6f052
RK
7728
7729 /* Apply distributive law if OP0 is x+c. */
7730 if (GET_CODE (op0) == PLUS
7731 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7732 return
7733 gen_rtx_PLUS
7734 (mode,
7735 gen_rtx_MULT
7736 (mode, XEXP (op0, 0),
7737 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7738 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7739 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7740
7741 if (GET_CODE (op0) != REG)
906c4e36 7742 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7743 if (GET_CODE (op0) != REG)
7744 op0 = copy_to_mode_reg (mode, op0);
7745
c5c76735
JL
7746 return
7747 gen_rtx_MULT (mode, op0,
7748 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7749 }
7750
e5e809f4 7751 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7752 subtarget = 0;
7753
7754 /* Check for multiplying things that have been extended
7755 from a narrower type. If this machine supports multiplying
7756 in that narrower type with a result in the desired type,
7757 do it that way, and avoid the explicit type-conversion. */
7758 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7759 && TREE_CODE (type) == INTEGER_TYPE
7760 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7761 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7762 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7763 && int_fits_type_p (TREE_OPERAND (exp, 1),
7764 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7765 /* Don't use a widening multiply if a shift will do. */
7766 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7767 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7768 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7769 ||
7770 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7771 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7772 ==
7773 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7774 /* If both operands are extended, they must either both
7775 be zero-extended or both be sign-extended. */
7776 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7777 ==
7778 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7779 {
7780 enum machine_mode innermode
7781 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7782 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7783 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7784 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7785 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7786 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7787 {
b10af0c8
TG
7788 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7789 {
7790 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7791 NULL_RTX, VOIDmode, 0);
7792 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7793 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7794 VOIDmode, 0);
7795 else
7796 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7797 NULL_RTX, VOIDmode, 0);
7798 goto binop2;
7799 }
7800 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7801 && innermode == word_mode)
7802 {
7803 rtx htem;
7804 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7805 NULL_RTX, VOIDmode, 0);
7806 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7807 op1 = convert_modes (innermode, mode,
7808 expand_expr (TREE_OPERAND (exp, 1),
7809 NULL_RTX, VOIDmode, 0),
7810 unsignedp);
b10af0c8
TG
7811 else
7812 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7813 NULL_RTX, VOIDmode, 0);
7814 temp = expand_binop (mode, other_optab, op0, op1, target,
7815 unsignedp, OPTAB_LIB_WIDEN);
7816 htem = expand_mult_highpart_adjust (innermode,
7817 gen_highpart (innermode, temp),
7818 op0, op1,
7819 gen_highpart (innermode, temp),
7820 unsignedp);
7821 emit_move_insn (gen_highpart (innermode, temp), htem);
7822 return temp;
7823 }
bbf6f052
RK
7824 }
7825 }
7826 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7827 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7828 return expand_mult (mode, op0, op1, target, unsignedp);
7829
7830 case TRUNC_DIV_EXPR:
7831 case FLOOR_DIV_EXPR:
7832 case CEIL_DIV_EXPR:
7833 case ROUND_DIV_EXPR:
7834 case EXACT_DIV_EXPR:
e5e809f4 7835 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7836 subtarget = 0;
7837 /* Possible optimization: compute the dividend with EXPAND_SUM
7838 then if the divisor is constant can optimize the case
7839 where some terms of the dividend have coeffs divisible by it. */
7840 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7841 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7842 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7843
7844 case RDIV_EXPR:
b7e9703c
JH
7845 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7846 expensive divide. If not, combine will rebuild the original
7847 computation. */
7848 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7849 && !real_onep (TREE_OPERAND (exp, 0)))
7850 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7851 build (RDIV_EXPR, type,
7852 build_real (type, dconst1),
7853 TREE_OPERAND (exp, 1))),
7854 target, tmode, unsignedp);
bbf6f052
RK
7855 this_optab = flodiv_optab;
7856 goto binop;
7857
7858 case TRUNC_MOD_EXPR:
7859 case FLOOR_MOD_EXPR:
7860 case CEIL_MOD_EXPR:
7861 case ROUND_MOD_EXPR:
e5e809f4 7862 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7863 subtarget = 0;
7864 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7865 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7866 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7867
7868 case FIX_ROUND_EXPR:
7869 case FIX_FLOOR_EXPR:
7870 case FIX_CEIL_EXPR:
7871 abort (); /* Not used for C. */
7872
7873 case FIX_TRUNC_EXPR:
906c4e36 7874 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7875 if (target == 0)
7876 target = gen_reg_rtx (mode);
7877 expand_fix (target, op0, unsignedp);
7878 return target;
7879
7880 case FLOAT_EXPR:
906c4e36 7881 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7882 if (target == 0)
7883 target = gen_reg_rtx (mode);
7884 /* expand_float can't figure out what to do if FROM has VOIDmode.
7885 So give it the correct mode. With -O, cse will optimize this. */
7886 if (GET_MODE (op0) == VOIDmode)
7887 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7888 op0);
7889 expand_float (target, op0,
7890 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7891 return target;
7892
7893 case NEGATE_EXPR:
5b22bee8 7894 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
91ce572a
CC
7895 temp = expand_unop (mode,
7896 ! unsignedp && flag_trapv
7897 && (GET_MODE_CLASS(mode) == MODE_INT)
7898 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7899 if (temp == 0)
7900 abort ();
7901 return temp;
7902
7903 case ABS_EXPR:
7904 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7905
2d7050fd 7906 /* Handle complex values specially. */
d6a5ac33
RK
7907 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7908 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7909 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7910
bbf6f052
RK
7911 /* Unsigned abs is simply the operand. Testing here means we don't
7912 risk generating incorrect code below. */
7913 if (TREE_UNSIGNED (type))
7914 return op0;
7915
91ce572a 7916 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7917 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7918
7919 case MAX_EXPR:
7920 case MIN_EXPR:
7921 target = original_target;
e5e809f4 7922 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7923 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7924 || GET_MODE (target) != mode
bbf6f052
RK
7925 || (GET_CODE (target) == REG
7926 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7927 target = gen_reg_rtx (mode);
906c4e36 7928 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7930
7931 /* First try to do it with a special MIN or MAX instruction.
7932 If that does not win, use a conditional jump to select the proper
7933 value. */
7934 this_optab = (TREE_UNSIGNED (type)
7935 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7936 : (code == MIN_EXPR ? smin_optab : smax_optab));
7937
7938 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7939 OPTAB_WIDEN);
7940 if (temp != 0)
7941 return temp;
7942
fa2981d8
JW
7943 /* At this point, a MEM target is no longer useful; we will get better
7944 code without it. */
3a94c984 7945
fa2981d8
JW
7946 if (GET_CODE (target) == MEM)
7947 target = gen_reg_rtx (mode);
7948
ee456b1c
RK
7949 if (target != op0)
7950 emit_move_insn (target, op0);
d6a5ac33 7951
bbf6f052 7952 op0 = gen_label_rtx ();
d6a5ac33 7953
f81497d9
RS
7954 /* If this mode is an integer too wide to compare properly,
7955 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7956 if (GET_MODE_CLASS (mode) == MODE_INT
7957 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7958 {
f81497d9 7959 if (code == MAX_EXPR)
d6a5ac33
RK
7960 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7961 target, op1, NULL_RTX, op0);
bbf6f052 7962 else
d6a5ac33
RK
7963 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7964 op1, target, NULL_RTX, op0);
bbf6f052 7965 }
f81497d9
RS
7966 else
7967 {
b30f05db
BS
7968 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7969 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7970 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7971 op0);
f81497d9 7972 }
b30f05db 7973 emit_move_insn (target, op1);
bbf6f052
RK
7974 emit_label (op0);
7975 return target;
7976
bbf6f052
RK
7977 case BIT_NOT_EXPR:
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7979 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7980 if (temp == 0)
7981 abort ();
7982 return temp;
7983
7984 case FFS_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7986 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7987 if (temp == 0)
7988 abort ();
7989 return temp;
7990
d6a5ac33
RK
7991 /* ??? Can optimize bitwise operations with one arg constant.
7992 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7993 and (a bitwise1 b) bitwise2 b (etc)
7994 but that is probably not worth while. */
7995
7996 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7997 boolean values when we want in all cases to compute both of them. In
7998 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7999 as actual zero-or-1 values and then bitwise anding. In cases where
8000 there cannot be any side effects, better code would be made by
8001 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8002 how to recognize those cases. */
8003
bbf6f052
RK
8004 case TRUTH_AND_EXPR:
8005 case BIT_AND_EXPR:
8006 this_optab = and_optab;
8007 goto binop;
8008
bbf6f052
RK
8009 case TRUTH_OR_EXPR:
8010 case BIT_IOR_EXPR:
8011 this_optab = ior_optab;
8012 goto binop;
8013
874726a8 8014 case TRUTH_XOR_EXPR:
bbf6f052
RK
8015 case BIT_XOR_EXPR:
8016 this_optab = xor_optab;
8017 goto binop;
8018
8019 case LSHIFT_EXPR:
8020 case RSHIFT_EXPR:
8021 case LROTATE_EXPR:
8022 case RROTATE_EXPR:
e5e809f4 8023 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8024 subtarget = 0;
8025 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8026 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8027 unsignedp);
8028
d6a5ac33
RK
8029 /* Could determine the answer when only additive constants differ. Also,
8030 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8031 case LT_EXPR:
8032 case LE_EXPR:
8033 case GT_EXPR:
8034 case GE_EXPR:
8035 case EQ_EXPR:
8036 case NE_EXPR:
1eb8759b
RH
8037 case UNORDERED_EXPR:
8038 case ORDERED_EXPR:
8039 case UNLT_EXPR:
8040 case UNLE_EXPR:
8041 case UNGT_EXPR:
8042 case UNGE_EXPR:
8043 case UNEQ_EXPR:
bbf6f052
RK
8044 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8045 if (temp != 0)
8046 return temp;
d6a5ac33 8047
0f41302f 8048 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8049 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8050 && original_target
8051 && GET_CODE (original_target) == REG
8052 && (GET_MODE (original_target)
8053 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8054 {
d6a5ac33
RK
8055 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8056 VOIDmode, 0);
8057
bbf6f052
RK
8058 if (temp != original_target)
8059 temp = copy_to_reg (temp);
d6a5ac33 8060
bbf6f052 8061 op1 = gen_label_rtx ();
c5d5d461
JL
8062 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8063 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
8064 emit_move_insn (temp, const1_rtx);
8065 emit_label (op1);
8066 return temp;
8067 }
d6a5ac33 8068
bbf6f052
RK
8069 /* If no set-flag instruction, must generate a conditional
8070 store into a temporary variable. Drop through
8071 and handle this like && and ||. */
8072
8073 case TRUTH_ANDIF_EXPR:
8074 case TRUTH_ORIF_EXPR:
e44842fe 8075 if (! ignore
e5e809f4 8076 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8077 /* Make sure we don't have a hard reg (such as function's return
8078 value) live across basic blocks, if not optimizing. */
8079 || (!optimize && GET_CODE (target) == REG
8080 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8081 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8082
8083 if (target)
8084 emit_clr_insn (target);
8085
bbf6f052
RK
8086 op1 = gen_label_rtx ();
8087 jumpifnot (exp, op1);
e44842fe
RK
8088
8089 if (target)
8090 emit_0_to_1_insn (target);
8091
bbf6f052 8092 emit_label (op1);
e44842fe 8093 return ignore ? const0_rtx : target;
bbf6f052
RK
8094
8095 case TRUTH_NOT_EXPR:
8096 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8097 /* The parser is careful to generate TRUTH_NOT_EXPR
8098 only with operands that are always zero or one. */
906c4e36 8099 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8100 target, 1, OPTAB_LIB_WIDEN);
8101 if (temp == 0)
8102 abort ();
8103 return temp;
8104
8105 case COMPOUND_EXPR:
8106 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8107 emit_queue ();
8108 return expand_expr (TREE_OPERAND (exp, 1),
8109 (ignore ? const0_rtx : target),
8110 VOIDmode, 0);
8111
8112 case COND_EXPR:
ac01eace
RK
8113 /* If we would have a "singleton" (see below) were it not for a
8114 conversion in each arm, bring that conversion back out. */
8115 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8116 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8117 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8118 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8119 {
d6edb99e
ZW
8120 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8121 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8122
8123 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8124 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8125 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8126 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8127 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8128 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8129 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8130 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8131 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8132 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8133 TREE_OPERAND (exp, 0),
d6edb99e 8134 iftrue, iffalse)),
ac01eace
RK
8135 target, tmode, modifier);
8136 }
8137
bbf6f052
RK
8138 {
8139 /* Note that COND_EXPRs whose type is a structure or union
8140 are required to be constructed to contain assignments of
8141 a temporary variable, so that we can evaluate them here
8142 for side effect only. If type is void, we must do likewise. */
8143
8144 /* If an arm of the branch requires a cleanup,
8145 only that cleanup is performed. */
8146
8147 tree singleton = 0;
8148 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8149
8150 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8151 convert it to our mode, if necessary. */
8152 if (integer_onep (TREE_OPERAND (exp, 1))
8153 && integer_zerop (TREE_OPERAND (exp, 2))
8154 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8155 {
dd27116b
RK
8156 if (ignore)
8157 {
8158 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 8159 ro_modifier);
dd27116b
RK
8160 return const0_rtx;
8161 }
8162
921b3427 8163 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
8164 if (GET_MODE (op0) == mode)
8165 return op0;
d6a5ac33 8166
bbf6f052
RK
8167 if (target == 0)
8168 target = gen_reg_rtx (mode);
8169 convert_move (target, op0, unsignedp);
8170 return target;
8171 }
8172
ac01eace
RK
8173 /* Check for X ? A + B : A. If we have this, we can copy A to the
8174 output and conditionally add B. Similarly for unary operations.
8175 Don't do this if X has side-effects because those side effects
8176 might affect A or B and the "?" operation is a sequence point in
8177 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8178
8179 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8180 && operand_equal_p (TREE_OPERAND (exp, 2),
8181 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8182 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8183 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8184 && operand_equal_p (TREE_OPERAND (exp, 1),
8185 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8186 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8187 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8188 && operand_equal_p (TREE_OPERAND (exp, 2),
8189 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8190 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8191 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8192 && operand_equal_p (TREE_OPERAND (exp, 1),
8193 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8194 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8195
01c8a7c8
RK
8196 /* If we are not to produce a result, we have no target. Otherwise,
8197 if a target was specified use it; it will not be used as an
3a94c984 8198 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8199 temporary. */
8200
8201 if (ignore)
8202 temp = 0;
8203 else if (original_target
e5e809f4 8204 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8205 || (singleton && GET_CODE (original_target) == REG
8206 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8207 && original_target == var_rtx (singleton)))
8208 && GET_MODE (original_target) == mode
7c00d1fe
RK
8209#ifdef HAVE_conditional_move
8210 && (! can_conditionally_move_p (mode)
8211 || GET_CODE (original_target) == REG
8212 || TREE_ADDRESSABLE (type))
8213#endif
01c8a7c8
RK
8214 && ! (GET_CODE (original_target) == MEM
8215 && MEM_VOLATILE_P (original_target)))
8216 temp = original_target;
8217 else if (TREE_ADDRESSABLE (type))
8218 abort ();
8219 else
8220 temp = assign_temp (type, 0, 0, 1);
8221
ac01eace
RK
8222 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8223 do the test of X as a store-flag operation, do this as
8224 A + ((X != 0) << log C). Similarly for other simple binary
8225 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8226 if (temp && singleton && binary_op
bbf6f052
RK
8227 && (TREE_CODE (binary_op) == PLUS_EXPR
8228 || TREE_CODE (binary_op) == MINUS_EXPR
8229 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8230 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8231 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8232 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8233 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8234 {
8235 rtx result;
91ce572a
CC
8236 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8237 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8238 ? addv_optab : add_optab)
8239 : TREE_CODE (binary_op) == MINUS_EXPR
8240 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8241 ? subv_optab : sub_optab)
8242 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8243 : xor_optab);
bbf6f052
RK
8244
8245 /* If we had X ? A : A + 1, do this as A + (X == 0).
8246
8247 We have to invert the truth value here and then put it
8248 back later if do_store_flag fails. We cannot simply copy
8249 TREE_OPERAND (exp, 0) to another variable and modify that
8250 because invert_truthvalue can modify the tree pointed to
8251 by its argument. */
8252 if (singleton == TREE_OPERAND (exp, 1))
8253 TREE_OPERAND (exp, 0)
8254 = invert_truthvalue (TREE_OPERAND (exp, 0));
8255
8256 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 8257 (safe_from_p (temp, singleton, 1)
906c4e36 8258 ? temp : NULL_RTX),
bbf6f052
RK
8259 mode, BRANCH_COST <= 1);
8260
ac01eace
RK
8261 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8262 result = expand_shift (LSHIFT_EXPR, mode, result,
8263 build_int_2 (tree_log2
8264 (TREE_OPERAND
8265 (binary_op, 1)),
8266 0),
e5e809f4 8267 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8268 ? temp : NULL_RTX), 0);
8269
bbf6f052
RK
8270 if (result)
8271 {
906c4e36 8272 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8273 return expand_binop (mode, boptab, op1, result, temp,
8274 unsignedp, OPTAB_LIB_WIDEN);
8275 }
8276 else if (singleton == TREE_OPERAND (exp, 1))
8277 TREE_OPERAND (exp, 0)
8278 = invert_truthvalue (TREE_OPERAND (exp, 0));
8279 }
3a94c984 8280
dabf8373 8281 do_pending_stack_adjust ();
bbf6f052
RK
8282 NO_DEFER_POP;
8283 op0 = gen_label_rtx ();
8284
8285 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8286 {
8287 if (temp != 0)
8288 {
8289 /* If the target conflicts with the other operand of the
8290 binary op, we can't use it. Also, we can't use the target
8291 if it is a hard register, because evaluating the condition
8292 might clobber it. */
8293 if ((binary_op
e5e809f4 8294 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8295 || (GET_CODE (temp) == REG
8296 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8297 temp = gen_reg_rtx (mode);
8298 store_expr (singleton, temp, 0);
8299 }
8300 else
906c4e36 8301 expand_expr (singleton,
2937cf87 8302 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8303 if (singleton == TREE_OPERAND (exp, 1))
8304 jumpif (TREE_OPERAND (exp, 0), op0);
8305 else
8306 jumpifnot (TREE_OPERAND (exp, 0), op0);
8307
956d6950 8308 start_cleanup_deferral ();
bbf6f052
RK
8309 if (binary_op && temp == 0)
8310 /* Just touch the other operand. */
8311 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8312 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8313 else if (binary_op)
8314 store_expr (build (TREE_CODE (binary_op), type,
8315 make_tree (type, temp),
8316 TREE_OPERAND (binary_op, 1)),
8317 temp, 0);
8318 else
8319 store_expr (build1 (TREE_CODE (unary_op), type,
8320 make_tree (type, temp)),
8321 temp, 0);
8322 op1 = op0;
bbf6f052 8323 }
bbf6f052
RK
8324 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8325 comparison operator. If we have one of these cases, set the
8326 output to A, branch on A (cse will merge these two references),
8327 then set the output to FOO. */
8328 else if (temp
8329 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8330 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8331 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8332 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8333 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8334 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8335 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8336 {
3a94c984
KH
8337 if (GET_CODE (temp) == REG
8338 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8339 temp = gen_reg_rtx (mode);
8340 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8341 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8342
956d6950 8343 start_cleanup_deferral ();
bbf6f052
RK
8344 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8345 op1 = op0;
8346 }
8347 else if (temp
8348 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8349 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8350 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8351 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8352 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8353 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8354 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8355 {
3a94c984
KH
8356 if (GET_CODE (temp) == REG
8357 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8358 temp = gen_reg_rtx (mode);
8359 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8360 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8361
956d6950 8362 start_cleanup_deferral ();
bbf6f052
RK
8363 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8364 op1 = op0;
8365 }
8366 else
8367 {
8368 op1 = gen_label_rtx ();
8369 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8370
956d6950 8371 start_cleanup_deferral ();
3a94c984 8372
2ac84cfe 8373 /* One branch of the cond can be void, if it never returns. For
3a94c984 8374 example A ? throw : E */
2ac84cfe 8375 if (temp != 0
3a94c984 8376 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
8377 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8378 else
906c4e36
RK
8379 expand_expr (TREE_OPERAND (exp, 1),
8380 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8381 end_cleanup_deferral ();
bbf6f052
RK
8382 emit_queue ();
8383 emit_jump_insn (gen_jump (op1));
8384 emit_barrier ();
8385 emit_label (op0);
956d6950 8386 start_cleanup_deferral ();
2ac84cfe 8387 if (temp != 0
3a94c984 8388 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
8389 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8390 else
906c4e36
RK
8391 expand_expr (TREE_OPERAND (exp, 2),
8392 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8393 }
8394
956d6950 8395 end_cleanup_deferral ();
bbf6f052
RK
8396
8397 emit_queue ();
8398 emit_label (op1);
8399 OK_DEFER_POP;
5dab5552 8400
bbf6f052
RK
8401 return temp;
8402 }
8403
8404 case TARGET_EXPR:
8405 {
8406 /* Something needs to be initialized, but we didn't know
8407 where that thing was when building the tree. For example,
8408 it could be the return value of a function, or a parameter
8409 to a function which lays down in the stack, or a temporary
8410 variable which must be passed by reference.
8411
8412 We guarantee that the expression will either be constructed
8413 or copied into our original target. */
8414
8415 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8416 tree cleanups = NULL_TREE;
5c062816 8417 tree exp1;
bbf6f052
RK
8418
8419 if (TREE_CODE (slot) != VAR_DECL)
8420 abort ();
8421
9c51f375
RK
8422 if (! ignore)
8423 target = original_target;
8424
6fbfac92
JM
8425 /* Set this here so that if we get a target that refers to a
8426 register variable that's already been used, put_reg_into_stack
3a94c984 8427 knows that it should fix up those uses. */
6fbfac92
JM
8428 TREE_USED (slot) = 1;
8429
bbf6f052
RK
8430 if (target == 0)
8431 {
19e7881c 8432 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8433 {
8434 target = DECL_RTL (slot);
5c062816 8435 /* If we have already expanded the slot, so don't do
ac993f4f 8436 it again. (mrs) */
5c062816
MS
8437 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8438 return target;
ac993f4f 8439 }
bbf6f052
RK
8440 else
8441 {
e9a25f70 8442 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8443 /* All temp slots at this level must not conflict. */
8444 preserve_temp_slots (target);
19e7881c 8445 SET_DECL_RTL (slot, target);
e9a25f70 8446 if (TREE_ADDRESSABLE (slot))
4361b41d 8447 put_var_into_stack (slot);
bbf6f052 8448
e287fd6e
RK
8449 /* Since SLOT is not known to the called function
8450 to belong to its stack frame, we must build an explicit
8451 cleanup. This case occurs when we must build up a reference
8452 to pass the reference as an argument. In this case,
8453 it is very likely that such a reference need not be
8454 built here. */
8455
8456 if (TREE_OPERAND (exp, 2) == 0)
8457 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8458 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8459 }
bbf6f052
RK
8460 }
8461 else
8462 {
8463 /* This case does occur, when expanding a parameter which
8464 needs to be constructed on the stack. The target
8465 is the actual stack address that we want to initialize.
8466 The function we call will perform the cleanup in this case. */
8467
8c042b47
RS
8468 /* If we have already assigned it space, use that space,
8469 not target that we were passed in, as our target
8470 parameter is only a hint. */
19e7881c 8471 if (DECL_RTL_SET_P (slot))
3a94c984
KH
8472 {
8473 target = DECL_RTL (slot);
8474 /* If we have already expanded the slot, so don't do
8c042b47 8475 it again. (mrs) */
3a94c984
KH
8476 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8477 return target;
8c042b47 8478 }
21002281
JW
8479 else
8480 {
19e7881c 8481 SET_DECL_RTL (slot, target);
21002281
JW
8482 /* If we must have an addressable slot, then make sure that
8483 the RTL that we just stored in slot is OK. */
8484 if (TREE_ADDRESSABLE (slot))
4361b41d 8485 put_var_into_stack (slot);
21002281 8486 }
bbf6f052
RK
8487 }
8488
4847c938 8489 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8490 /* Mark it as expanded. */
8491 TREE_OPERAND (exp, 1) = NULL_TREE;
8492
41531e5b 8493 store_expr (exp1, target, 0);
61d6b1cc 8494
e976b8b2 8495 expand_decl_cleanup (NULL_TREE, cleanups);
3a94c984 8496
41531e5b 8497 return target;
bbf6f052
RK
8498 }
8499
8500 case INIT_EXPR:
8501 {
8502 tree lhs = TREE_OPERAND (exp, 0);
8503 tree rhs = TREE_OPERAND (exp, 1);
8504 tree noncopied_parts = 0;
8505 tree lhs_type = TREE_TYPE (lhs);
8506
8507 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8508 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
b4e3fabb
RK
8509 noncopied_parts
8510 = init_noncopied_parts (stabilize_reference (lhs),
8511 TYPE_NONCOPIED_PARTS (lhs_type));
8512
bbf6f052
RK
8513 while (noncopied_parts != 0)
8514 {
8515 expand_assignment (TREE_VALUE (noncopied_parts),
8516 TREE_PURPOSE (noncopied_parts), 0, 0);
8517 noncopied_parts = TREE_CHAIN (noncopied_parts);
8518 }
8519 return temp;
8520 }
8521
8522 case MODIFY_EXPR:
8523 {
8524 /* If lhs is complex, expand calls in rhs before computing it.
8525 That's so we don't compute a pointer and save it over a call.
8526 If lhs is simple, compute it first so we can give it as a
8527 target if the rhs is just a call. This avoids an extra temp and copy
8528 and that prevents a partial-subsumption which makes bad code.
8529 Actually we could treat component_ref's of vars like vars. */
8530
8531 tree lhs = TREE_OPERAND (exp, 0);
8532 tree rhs = TREE_OPERAND (exp, 1);
8533 tree noncopied_parts = 0;
8534 tree lhs_type = TREE_TYPE (lhs);
8535
8536 temp = 0;
8537
bbf6f052
RK
8538 /* Check for |= or &= of a bitfield of size one into another bitfield
8539 of size 1. In this case, (unless we need the result of the
8540 assignment) we can do this more efficiently with a
8541 test followed by an assignment, if necessary.
8542
8543 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8544 things change so we do, this code should be enhanced to
8545 support it. */
8546 if (ignore
8547 && TREE_CODE (lhs) == COMPONENT_REF
8548 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8549 || TREE_CODE (rhs) == BIT_AND_EXPR)
8550 && TREE_OPERAND (rhs, 0) == lhs
8551 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8552 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8553 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8554 {
8555 rtx label = gen_label_rtx ();
8556
8557 do_jump (TREE_OPERAND (rhs, 1),
8558 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8559 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8560 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8561 (TREE_CODE (rhs) == BIT_IOR_EXPR
8562 ? integer_one_node
8563 : integer_zero_node)),
8564 0, 0);
e7c33f54 8565 do_pending_stack_adjust ();
bbf6f052
RK
8566 emit_label (label);
8567 return const0_rtx;
8568 }
8569
8570 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8571 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
b4e3fabb
RK
8572 noncopied_parts
8573 = save_noncopied_parts (stabilize_reference (lhs),
8574 TYPE_NONCOPIED_PARTS (lhs_type));
bbf6f052
RK
8575
8576 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8577 while (noncopied_parts != 0)
8578 {
8579 expand_assignment (TREE_PURPOSE (noncopied_parts),
8580 TREE_VALUE (noncopied_parts), 0, 0);
8581 noncopied_parts = TREE_CHAIN (noncopied_parts);
8582 }
8583 return temp;
8584 }
8585
6e7f84a7
APB
8586 case RETURN_EXPR:
8587 if (!TREE_OPERAND (exp, 0))
8588 expand_null_return ();
8589 else
8590 expand_return (TREE_OPERAND (exp, 0));
8591 return const0_rtx;
8592
bbf6f052
RK
8593 case PREINCREMENT_EXPR:
8594 case PREDECREMENT_EXPR:
7b8b9722 8595 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8596
8597 case POSTINCREMENT_EXPR:
8598 case POSTDECREMENT_EXPR:
8599 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8600 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8601
8602 case ADDR_EXPR:
987c71d9 8603 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8604 be a MEM corresponding to a stack slot. */
987c71d9
RK
8605 temp = 0;
8606
bbf6f052
RK
8607 /* Are we taking the address of a nested function? */
8608 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8609 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8610 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8611 && ! TREE_STATIC (exp))
bbf6f052
RK
8612 {
8613 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8614 op0 = force_operand (op0, target);
8615 }
682ba3a6
RK
8616 /* If we are taking the address of something erroneous, just
8617 return a zero. */
8618 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8619 return const0_rtx;
bbf6f052
RK
8620 else
8621 {
e287fd6e
RK
8622 /* We make sure to pass const0_rtx down if we came in with
8623 ignore set, to avoid doing the cleanups twice for something. */
8624 op0 = expand_expr (TREE_OPERAND (exp, 0),
8625 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8626 (modifier == EXPAND_INITIALIZER
8627 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8628
119af78a
RK
8629 /* If we are going to ignore the result, OP0 will have been set
8630 to const0_rtx, so just return it. Don't get confused and
8631 think we are taking the address of the constant. */
8632 if (ignore)
8633 return op0;
8634
73b7f58c
BS
8635 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8636 clever and returns a REG when given a MEM. */
8637 op0 = protect_from_queue (op0, 1);
3539e816 8638
c5c76735
JL
8639 /* We would like the object in memory. If it is a constant, we can
8640 have it be statically allocated into memory. For a non-constant,
8641 we need to allocate some memory and store the value into it. */
896102d0
RK
8642
8643 if (CONSTANT_P (op0))
8644 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8645 op0);
987c71d9 8646 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8647 {
8648 mark_temp_addr_taken (op0);
8649 temp = XEXP (op0, 0);
8650 }
896102d0 8651
682ba3a6 8652 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
8653 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8654 || GET_CODE (op0) == PARALLEL)
896102d0
RK
8655 {
8656 /* If this object is in a register, it must be not
0f41302f 8657 be BLKmode. */
896102d0 8658 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
1da68f56
RK
8659 tree nt = build_qualified_type (inner_type,
8660 (TYPE_QUALS (inner_type)
8661 | TYPE_QUAL_CONST));
8662 rtx memloc = assign_temp (nt, 1, 1, 1);
896102d0 8663
7a0b7b9a 8664 mark_temp_addr_taken (memloc);
df6018fd
JJ
8665 if (GET_CODE (op0) == PARALLEL)
8666 /* Handle calls that pass values in multiple non-contiguous
8667 locations. The Irix 6 ABI has examples of this. */
8668 emit_group_store (memloc, op0,
8669 int_size_in_bytes (inner_type),
8670 TYPE_ALIGN (inner_type));
8671 else
8672 emit_move_insn (memloc, op0);
896102d0
RK
8673 op0 = memloc;
8674 }
8675
bbf6f052
RK
8676 if (GET_CODE (op0) != MEM)
8677 abort ();
3a94c984 8678
bbf6f052 8679 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8680 {
8681 temp = XEXP (op0, 0);
8682#ifdef POINTERS_EXTEND_UNSIGNED
8683 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8684 && mode == ptr_mode)
9fcfcce7 8685 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8686#endif
8687 return temp;
8688 }
987c71d9 8689
bbf6f052
RK
8690 op0 = force_operand (XEXP (op0, 0), target);
8691 }
987c71d9 8692
bbf6f052 8693 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8694 op0 = force_reg (Pmode, op0);
8695
dc6d66b3
RK
8696 if (GET_CODE (op0) == REG
8697 && ! REG_USERVAR_P (op0))
bdb429a5 8698 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9
RK
8699
8700 /* If we might have had a temp slot, add an equivalent address
8701 for it. */
8702 if (temp != 0)
8703 update_temp_slot_address (temp, op0);
8704
88f63c77
RK
8705#ifdef POINTERS_EXTEND_UNSIGNED
8706 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8707 && mode == ptr_mode)
9fcfcce7 8708 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8709#endif
8710
bbf6f052
RK
8711 return op0;
8712
8713 case ENTRY_VALUE_EXPR:
8714 abort ();
8715
7308a047
RS
8716 /* COMPLEX type for Extended Pascal & Fortran */
8717 case COMPLEX_EXPR:
8718 {
8719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8720 rtx insns;
7308a047
RS
8721
8722 /* Get the rtx code of the operands. */
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8724 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8725
8726 if (! target)
8727 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8728
6551fa4d 8729 start_sequence ();
7308a047
RS
8730
8731 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8732 emit_move_insn (gen_realpart (mode, target), op0);
8733 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8734
6551fa4d
JW
8735 insns = get_insns ();
8736 end_sequence ();
8737
7308a047 8738 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8739 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8740 each with a separate pseudo as destination.
8741 It's not correct for flow to treat them as a unit. */
6d6e61ce 8742 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8743 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8744 else
8745 emit_insns (insns);
7308a047
RS
8746
8747 return target;
8748 }
8749
8750 case REALPART_EXPR:
2d7050fd
RS
8751 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8752 return gen_realpart (mode, op0);
3a94c984 8753
7308a047 8754 case IMAGPART_EXPR:
2d7050fd
RS
8755 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8756 return gen_imagpart (mode, op0);
7308a047
RS
8757
8758 case CONJ_EXPR:
8759 {
62acb978 8760 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8761 rtx imag_t;
6551fa4d 8762 rtx insns;
3a94c984
KH
8763
8764 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8765
8766 if (! target)
d6a5ac33 8767 target = gen_reg_rtx (mode);
3a94c984 8768
6551fa4d 8769 start_sequence ();
7308a047
RS
8770
8771 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8772 emit_move_insn (gen_realpart (partmode, target),
8773 gen_realpart (partmode, op0));
7308a047 8774
62acb978 8775 imag_t = gen_imagpart (partmode, target);
91ce572a
CC
8776 temp = expand_unop (partmode,
8777 ! unsignedp && flag_trapv
8778 && (GET_MODE_CLASS(partmode) == MODE_INT)
8779 ? negv_optab : neg_optab,
3a94c984 8780 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8781 if (temp != imag_t)
8782 emit_move_insn (imag_t, temp);
8783
6551fa4d
JW
8784 insns = get_insns ();
8785 end_sequence ();
8786
3a94c984 8787 /* Conjugate should appear as a single unit
d6a5ac33 8788 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8789 each with a separate pseudo as destination.
8790 It's not correct for flow to treat them as a unit. */
6d6e61ce 8791 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8792 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8793 else
8794 emit_insns (insns);
7308a047
RS
8795
8796 return target;
8797 }
8798
e976b8b2
MS
8799 case TRY_CATCH_EXPR:
8800 {
8801 tree handler = TREE_OPERAND (exp, 1);
8802
8803 expand_eh_region_start ();
8804
8805 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8806
52a11cbf 8807 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
8808
8809 return op0;
8810 }
8811
b335b813
PB
8812 case TRY_FINALLY_EXPR:
8813 {
8814 tree try_block = TREE_OPERAND (exp, 0);
8815 tree finally_block = TREE_OPERAND (exp, 1);
8816 rtx finally_label = gen_label_rtx ();
8817 rtx done_label = gen_label_rtx ();
8818 rtx return_link = gen_reg_rtx (Pmode);
8819 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8820 (tree) finally_label, (tree) return_link);
8821 TREE_SIDE_EFFECTS (cleanup) = 1;
8822
8823 /* Start a new binding layer that will keep track of all cleanup
8824 actions to be performed. */
8e91754e 8825 expand_start_bindings (2);
b335b813
PB
8826
8827 target_temp_slot_level = temp_slot_level;
8828
8829 expand_decl_cleanup (NULL_TREE, cleanup);
8830 op0 = expand_expr (try_block, target, tmode, modifier);
8831
8832 preserve_temp_slots (op0);
8833 expand_end_bindings (NULL_TREE, 0, 0);
8834 emit_jump (done_label);
8835 emit_label (finally_label);
8836 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8837 emit_indirect_jump (return_link);
8838 emit_label (done_label);
8839 return op0;
8840 }
8841
3a94c984 8842 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8843 {
8844 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8845 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8846 rtx return_address = gen_label_rtx ();
3a94c984
KH
8847 emit_move_insn (return_link,
8848 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8849 emit_jump (subr);
8850 emit_label (return_address);
8851 return const0_rtx;
8852 }
8853
d3707adb
RH
8854 case VA_ARG_EXPR:
8855 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8856
52a11cbf 8857 case EXC_PTR_EXPR:
86c99549 8858 return get_exception_pointer (cfun);
52a11cbf 8859
bbf6f052 8860 default:
90764a87 8861 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8862 }
8863
8864 /* Here to do an ordinary binary operator, generating an instruction
8865 from the optab already placed in `this_optab'. */
8866 binop:
e5e809f4 8867 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8868 subtarget = 0;
8869 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8870 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8871 binop2:
8872 temp = expand_binop (mode, this_optab, op0, op1, target,
8873 unsignedp, OPTAB_LIB_WIDEN);
8874 if (temp == 0)
8875 abort ();
8876 return temp;
8877}
b93a436e 8878\f
14a774a9
RK
8879/* Similar to expand_expr, except that we don't specify a target, target
8880 mode, or modifier and we return the alignment of the inner type. This is
8881 used in cases where it is not necessary to align the result to the
8882 alignment of its type as long as we know the alignment of the result, for
8883 example for comparisons of BLKmode values. */
8884
8885static rtx
8886expand_expr_unaligned (exp, palign)
8887 register tree exp;
729a2125 8888 unsigned int *palign;
14a774a9
RK
8889{
8890 register rtx op0;
8891 tree type = TREE_TYPE (exp);
8892 register enum machine_mode mode = TYPE_MODE (type);
8893
8894 /* Default the alignment we return to that of the type. */
8895 *palign = TYPE_ALIGN (type);
8896
8897 /* The only cases in which we do anything special is if the resulting mode
8898 is BLKmode. */
8899 if (mode != BLKmode)
8900 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8901
8902 switch (TREE_CODE (exp))
8903 {
8904 case CONVERT_EXPR:
8905 case NOP_EXPR:
8906 case NON_LVALUE_EXPR:
8907 /* Conversions between BLKmode values don't change the underlying
8908 alignment or value. */
8909 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8910 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8911 break;
8912
8913 case ARRAY_REF:
8914 /* Much of the code for this case is copied directly from expand_expr.
8915 We need to duplicate it here because we will do something different
8916 in the fall-through case, so we need to handle the same exceptions
8917 it does. */
8918 {
8919 tree array = TREE_OPERAND (exp, 0);
8920 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8921 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 8922 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
14a774a9
RK
8923 HOST_WIDE_INT i;
8924
8925 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8926 abort ();
8927
8928 /* Optimize the special-case of a zero lower bound.
8929
8930 We convert the low_bound to sizetype to avoid some problems
8931 with constant folding. (E.g. suppose the lower bound is 1,
8932 and its mode is QI. Without the conversion, (ARRAY
8933 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 8934 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14a774a9
RK
8935
8936 if (! integer_zerop (low_bound))
fed3cef0 8937 index = size_diffop (index, convert (sizetype, low_bound));
14a774a9
RK
8938
8939 /* If this is a constant index into a constant array,
8940 just get the value from the array. Handle both the cases when
8941 we have an explicit constructor and when our operand is a variable
8942 that was declared const. */
8943
05bccae2 8944 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
235783d1 8945 && host_integerp (index, 0)
3a94c984 8946 && 0 > compare_tree_int (index,
05bccae2
RK
8947 list_length (CONSTRUCTOR_ELTS
8948 (TREE_OPERAND (exp, 0)))))
14a774a9 8949 {
05bccae2
RK
8950 tree elem;
8951
8952 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
235783d1 8953 i = tree_low_cst (index, 0);
05bccae2
RK
8954 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8955 ;
8956
8957 if (elem)
8958 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
14a774a9 8959 }
3a94c984 8960
14a774a9
RK
8961 else if (optimize >= 1
8962 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8963 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8964 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8965 {
8966 if (TREE_CODE (index) == INTEGER_CST)
8967 {
8968 tree init = DECL_INITIAL (array);
8969
14a774a9
RK
8970 if (TREE_CODE (init) == CONSTRUCTOR)
8971 {
05bccae2
RK
8972 tree elem;
8973
8974 for (elem = CONSTRUCTOR_ELTS (init);
8975 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8976 elem = TREE_CHAIN (elem))
8977 ;
14a774a9 8978
14a774a9
RK
8979 if (elem)
8980 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8981 palign);
8982 }
8983 }
8984 }
8985 }
3a94c984 8986 /* Fall through. */
14a774a9
RK
8987
8988 case COMPONENT_REF:
8989 case BIT_FIELD_REF:
b4e3fabb 8990 case ARRAY_RANGE_REF:
14a774a9
RK
8991 /* If the operand is a CONSTRUCTOR, we can just extract the
8992 appropriate field if it is present. Don't do this if we have
8993 already written the data since we want to refer to that copy
8994 and varasm.c assumes that's what we'll do. */
b4e3fabb 8995 if (TREE_CODE (exp) == COMPONENT_REF
14a774a9
RK
8996 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8997 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8998 {
8999 tree elt;
9000
9001 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9002 elt = TREE_CHAIN (elt))
9003 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9004 /* Note that unlike the case in expand_expr, we know this is
9005 BLKmode and hence not an integer. */
9006 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9007 }
9008
9009 {
9010 enum machine_mode mode1;
770ae6cc 9011 HOST_WIDE_INT bitsize, bitpos;
14a774a9
RK
9012 tree offset;
9013 int volatilep = 0;
729a2125 9014 unsigned int alignment;
14a774a9
RK
9015 int unsignedp;
9016 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9017 &mode1, &unsignedp, &volatilep,
9018 &alignment);
9019
9020 /* If we got back the original object, something is wrong. Perhaps
9021 we are evaluating an expression too early. In any event, don't
9022 infinitely recurse. */
9023 if (tem == exp)
9024 abort ();
9025
9026 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9027
9028 /* If this is a constant, put it into a register if it is a
9029 legitimate constant and OFFSET is 0 and memory if it isn't. */
9030 if (CONSTANT_P (op0))
9031 {
9032 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9033
9034 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9035 && offset == 0)
9036 op0 = force_reg (inner_mode, op0);
9037 else
9038 op0 = validize_mem (force_const_mem (inner_mode, op0));
9039 }
9040
9041 if (offset != 0)
9042 {
9043 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9044
9045 /* If this object is in a register, put it into memory.
9046 This case can't occur in C, but can in Ada if we have
9047 unchecked conversion of an expression from a scalar type to
9048 an array or record type. */
9049 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9050 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9051 {
1da68f56
RK
9052 tree nt = build_qualified_type (TREE_TYPE (tem),
9053 (TYPE_QUALS (TREE_TYPE (tem))
9054 | TYPE_QUAL_CONST));
9055 rtx memloc = assign_temp (nt, 1, 1, 1);
14a774a9
RK
9056
9057 mark_temp_addr_taken (memloc);
9058 emit_move_insn (memloc, op0);
9059 op0 = memloc;
9060 }
9061
9062 if (GET_CODE (op0) != MEM)
9063 abort ();
9064
9065 if (GET_MODE (offset_rtx) != ptr_mode)
9066 {
9067#ifdef POINTERS_EXTEND_UNSIGNED
9068 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9069#else
9070 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9071#endif
9072 }
9073
9074 op0 = change_address (op0, VOIDmode,
9075 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9076 force_reg (ptr_mode,
9077 offset_rtx)));
9078 }
9079
9080 /* Don't forget about volatility even if this is a bitfield. */
9081 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9082 {
9083 op0 = copy_rtx (op0);
9084 MEM_VOLATILE_P (op0) = 1;
9085 }
9086
9087 /* Check the access. */
9088 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
3a94c984 9089 {
14a774a9
RK
9090 rtx to;
9091 int size;
9092
9093 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9094 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9095
9096 /* Check the access right of the pointer. */
ea4da9db 9097 in_check_memory_usage = 1;
14a774a9 9098 if (size > BITS_PER_UNIT)
ebb1b59a
BS
9099 emit_library_call (chkr_check_addr_libfunc,
9100 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
14a774a9
RK
9101 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9102 TYPE_MODE (sizetype),
3a94c984 9103 GEN_INT (MEMORY_USE_RO),
14a774a9 9104 TYPE_MODE (integer_type_node));
ea4da9db 9105 in_check_memory_usage = 0;
14a774a9
RK
9106 }
9107
a2b99161
RK
9108 /* In cases where an aligned union has an unaligned object
9109 as a field, we might be extracting a BLKmode value from
9110 an integer-mode (e.g., SImode) object. Handle this case
9111 by doing the extract into an object as wide as the field
9112 (which we know to be the width of a basic mode), then
9113 storing into memory, and changing the mode to BLKmode.
9114 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9115 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9116 if (mode1 == VOIDmode
9117 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 9118 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
19caa751 9119 && (TYPE_ALIGN (type) > alignment
a2b99161
RK
9120 || bitpos % TYPE_ALIGN (type) != 0)))
9121 {
9122 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9123
9124 if (ext_mode == BLKmode)
9125 {
9126 /* In this case, BITPOS must start at a byte boundary. */
9127 if (GET_CODE (op0) != MEM
9128 || bitpos % BITS_PER_UNIT != 0)
9129 abort ();
9130
f4ef873c 9131 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a2b99161
RK
9132 }
9133 else
9134 {
1da68f56
RK
9135 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9136 TYPE_QUAL_CONST);
9137 rtx new = assign_temp (nt, 0, 1, 1);
a2b99161
RK
9138
9139 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9140 unsignedp, NULL_RTX, ext_mode,
9141 ext_mode, alignment,
9142 int_size_in_bytes (TREE_TYPE (tem)));
9143
9144 /* If the result is a record type and BITSIZE is narrower than
9145 the mode of OP0, an integral mode, and this is a big endian
9146 machine, we must put the field into the high-order bits. */
9147 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9148 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9149 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9150 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9151 size_int (GET_MODE_BITSIZE
9152 (GET_MODE (op0))
9153 - bitsize),
9154 op0, 1);
9155
a2b99161
RK
9156 emit_move_insn (new, op0);
9157 op0 = copy_rtx (new);
9158 PUT_MODE (op0, BLKmode);
9159 }
9160 }
9161 else
9162 /* Get a reference to just this component. */
f4ef873c 9163 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
14a774a9 9164
ba4828e0 9165 set_mem_alias_set (op0, get_alias_set (exp));
14a774a9
RK
9166
9167 /* Adjust the alignment in case the bit position is not
9168 a multiple of the alignment of the inner object. */
9169 while (bitpos % alignment != 0)
9170 alignment >>= 1;
9171
9172 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 9173 mark_reg_pointer (XEXP (op0, 0), alignment);
14a774a9
RK
9174
9175 MEM_IN_STRUCT_P (op0) = 1;
9176 MEM_VOLATILE_P (op0) |= volatilep;
9177
9178 *palign = alignment;
9179 return op0;
9180 }
9181
9182 default:
9183 break;
9184
9185 }
9186
9187 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9188}
9189\f
fed3cef0
RK
9190/* Return the tree node if a ARG corresponds to a string constant or zero
9191 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9192 in bytes within the string that ARG is accessing. The type of the
9193 offset will be `sizetype'. */
b93a436e 9194
28f4ec01 9195tree
b93a436e
JL
9196string_constant (arg, ptr_offset)
9197 tree arg;
9198 tree *ptr_offset;
9199{
9200 STRIP_NOPS (arg);
9201
9202 if (TREE_CODE (arg) == ADDR_EXPR
9203 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9204 {
fed3cef0 9205 *ptr_offset = size_zero_node;
b93a436e
JL
9206 return TREE_OPERAND (arg, 0);
9207 }
9208 else if (TREE_CODE (arg) == PLUS_EXPR)
9209 {
9210 tree arg0 = TREE_OPERAND (arg, 0);
9211 tree arg1 = TREE_OPERAND (arg, 1);
9212
9213 STRIP_NOPS (arg0);
9214 STRIP_NOPS (arg1);
9215
9216 if (TREE_CODE (arg0) == ADDR_EXPR
9217 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9218 {
fed3cef0 9219 *ptr_offset = convert (sizetype, arg1);
b93a436e 9220 return TREE_OPERAND (arg0, 0);
bbf6f052 9221 }
b93a436e
JL
9222 else if (TREE_CODE (arg1) == ADDR_EXPR
9223 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9224 {
fed3cef0 9225 *ptr_offset = convert (sizetype, arg0);
b93a436e 9226 return TREE_OPERAND (arg1, 0);
bbf6f052 9227 }
b93a436e 9228 }
ca695ac9 9229
b93a436e
JL
9230 return 0;
9231}
ca695ac9 9232\f
b93a436e
JL
9233/* Expand code for a post- or pre- increment or decrement
9234 and return the RTX for the result.
9235 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9236
b93a436e
JL
9237static rtx
9238expand_increment (exp, post, ignore)
9239 register tree exp;
9240 int post, ignore;
ca695ac9 9241{
b93a436e
JL
9242 register rtx op0, op1;
9243 register rtx temp, value;
9244 register tree incremented = TREE_OPERAND (exp, 0);
9245 optab this_optab = add_optab;
9246 int icode;
9247 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9248 int op0_is_copy = 0;
9249 int single_insn = 0;
9250 /* 1 means we can't store into OP0 directly,
9251 because it is a subreg narrower than a word,
9252 and we don't dare clobber the rest of the word. */
9253 int bad_subreg = 0;
1499e0a8 9254
b93a436e
JL
9255 /* Stabilize any component ref that might need to be
9256 evaluated more than once below. */
9257 if (!post
9258 || TREE_CODE (incremented) == BIT_FIELD_REF
9259 || (TREE_CODE (incremented) == COMPONENT_REF
9260 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9261 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9262 incremented = stabilize_reference (incremented);
9263 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9264 ones into save exprs so that they don't accidentally get evaluated
9265 more than once by the code below. */
9266 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9267 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9268 incremented = save_expr (incremented);
e9a25f70 9269
b93a436e
JL
9270 /* Compute the operands as RTX.
9271 Note whether OP0 is the actual lvalue or a copy of it:
9272 I believe it is a copy iff it is a register or subreg
9273 and insns were generated in computing it. */
e9a25f70 9274
b93a436e
JL
9275 temp = get_last_insn ();
9276 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9277
b93a436e
JL
9278 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9279 in place but instead must do sign- or zero-extension during assignment,
9280 so we copy it into a new register and let the code below use it as
9281 a copy.
e9a25f70 9282
b93a436e
JL
9283 Note that we can safely modify this SUBREG since it is know not to be
9284 shared (it was made by the expand_expr call above). */
9285
9286 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9287 {
9288 if (post)
9289 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9290 else
9291 bad_subreg = 1;
9292 }
9293 else if (GET_CODE (op0) == SUBREG
9294 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9295 {
9296 /* We cannot increment this SUBREG in place. If we are
9297 post-incrementing, get a copy of the old value. Otherwise,
9298 just mark that we cannot increment in place. */
9299 if (post)
9300 op0 = copy_to_reg (op0);
9301 else
9302 bad_subreg = 1;
e9a25f70
JL
9303 }
9304
b93a436e
JL
9305 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9306 && temp != get_last_insn ());
9307 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9308 EXPAND_MEMORY_USE_BAD);
1499e0a8 9309
b93a436e
JL
9310 /* Decide whether incrementing or decrementing. */
9311 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9312 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9313 this_optab = sub_optab;
9314
9315 /* Convert decrement by a constant into a negative increment. */
9316 if (this_optab == sub_optab
9317 && GET_CODE (op1) == CONST_INT)
ca695ac9 9318 {
3a94c984 9319 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9320 this_optab = add_optab;
ca695ac9 9321 }
1499e0a8 9322
91ce572a
CC
9323 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9324 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9325
b93a436e
JL
9326 /* For a preincrement, see if we can do this with a single instruction. */
9327 if (!post)
9328 {
9329 icode = (int) this_optab->handlers[(int) mode].insn_code;
9330 if (icode != (int) CODE_FOR_nothing
9331 /* Make sure that OP0 is valid for operands 0 and 1
9332 of the insn we want to queue. */
a995e389
RH
9333 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9334 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9335 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9336 single_insn = 1;
9337 }
bbf6f052 9338
b93a436e
JL
9339 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9340 then we cannot just increment OP0. We must therefore contrive to
9341 increment the original value. Then, for postincrement, we can return
9342 OP0 since it is a copy of the old value. For preincrement, expand here
9343 unless we can do it with a single insn.
bbf6f052 9344
b93a436e
JL
9345 Likewise if storing directly into OP0 would clobber high bits
9346 we need to preserve (bad_subreg). */
9347 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9348 {
b93a436e
JL
9349 /* This is the easiest way to increment the value wherever it is.
9350 Problems with multiple evaluation of INCREMENTED are prevented
9351 because either (1) it is a component_ref or preincrement,
9352 in which case it was stabilized above, or (2) it is an array_ref
9353 with constant index in an array in a register, which is
9354 safe to reevaluate. */
9355 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9356 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9357 ? MINUS_EXPR : PLUS_EXPR),
9358 TREE_TYPE (exp),
9359 incremented,
9360 TREE_OPERAND (exp, 1));
a358cee0 9361
b93a436e
JL
9362 while (TREE_CODE (incremented) == NOP_EXPR
9363 || TREE_CODE (incremented) == CONVERT_EXPR)
9364 {
9365 newexp = convert (TREE_TYPE (incremented), newexp);
9366 incremented = TREE_OPERAND (incremented, 0);
9367 }
bbf6f052 9368
b93a436e
JL
9369 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9370 return post ? op0 : temp;
9371 }
bbf6f052 9372
b93a436e
JL
9373 if (post)
9374 {
9375 /* We have a true reference to the value in OP0.
9376 If there is an insn to add or subtract in this mode, queue it.
9377 Queueing the increment insn avoids the register shuffling
9378 that often results if we must increment now and first save
9379 the old value for subsequent use. */
bbf6f052 9380
b93a436e
JL
9381#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9382 op0 = stabilize (op0);
9383#endif
41dfd40c 9384
b93a436e
JL
9385 icode = (int) this_optab->handlers[(int) mode].insn_code;
9386 if (icode != (int) CODE_FOR_nothing
9387 /* Make sure that OP0 is valid for operands 0 and 1
9388 of the insn we want to queue. */
a995e389
RH
9389 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9390 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9391 {
a995e389 9392 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9393 op1 = force_reg (mode, op1);
bbf6f052 9394
b93a436e
JL
9395 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9396 }
9397 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9398 {
9399 rtx addr = (general_operand (XEXP (op0, 0), mode)
9400 ? force_reg (Pmode, XEXP (op0, 0))
9401 : copy_to_reg (XEXP (op0, 0)));
9402 rtx temp, result;
ca695ac9 9403
792760b9 9404 op0 = replace_equiv_address (op0, addr);
b93a436e 9405 temp = force_reg (GET_MODE (op0), op0);
a995e389 9406 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9407 op1 = force_reg (mode, op1);
ca695ac9 9408
b93a436e
JL
9409 /* The increment queue is LIFO, thus we have to `queue'
9410 the instructions in reverse order. */
9411 enqueue_insn (op0, gen_move_insn (op0, temp));
9412 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9413 return result;
bbf6f052
RK
9414 }
9415 }
ca695ac9 9416
b93a436e
JL
9417 /* Preincrement, or we can't increment with one simple insn. */
9418 if (post)
9419 /* Save a copy of the value before inc or dec, to return it later. */
9420 temp = value = copy_to_reg (op0);
9421 else
9422 /* Arrange to return the incremented value. */
9423 /* Copy the rtx because expand_binop will protect from the queue,
9424 and the results of that would be invalid for us to return
9425 if our caller does emit_queue before using our result. */
9426 temp = copy_rtx (value = op0);
bbf6f052 9427
b93a436e
JL
9428 /* Increment however we can. */
9429 op1 = expand_binop (mode, this_optab, value, op1,
3a94c984 9430 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9431 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9432 /* Make sure the value is stored into OP0. */
9433 if (op1 != op0)
9434 emit_move_insn (op0, op1);
5718612f 9435
b93a436e
JL
9436 return temp;
9437}
9438\f
b93a436e
JL
9439/* At the start of a function, record that we have no previously-pushed
9440 arguments waiting to be popped. */
bbf6f052 9441
b93a436e
JL
9442void
9443init_pending_stack_adjust ()
9444{
9445 pending_stack_adjust = 0;
9446}
bbf6f052 9447
b93a436e 9448/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9449 so the adjustment won't get done.
9450
9451 Note, if the current function calls alloca, then it must have a
9452 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9453
b93a436e
JL
9454void
9455clear_pending_stack_adjust ()
9456{
9457#ifdef EXIT_IGNORE_STACK
9458 if (optimize > 0
060fbabf
JL
9459 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9460 && EXIT_IGNORE_STACK
b93a436e
JL
9461 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9462 && ! flag_inline_functions)
1503a7ec
JH
9463 {
9464 stack_pointer_delta -= pending_stack_adjust,
9465 pending_stack_adjust = 0;
9466 }
b93a436e
JL
9467#endif
9468}
bbf6f052 9469
b93a436e
JL
9470/* Pop any previously-pushed arguments that have not been popped yet. */
9471
9472void
9473do_pending_stack_adjust ()
9474{
9475 if (inhibit_defer_pop == 0)
ca695ac9 9476 {
b93a436e
JL
9477 if (pending_stack_adjust != 0)
9478 adjust_stack (GEN_INT (pending_stack_adjust));
9479 pending_stack_adjust = 0;
bbf6f052 9480 }
bbf6f052
RK
9481}
9482\f
b93a436e 9483/* Expand conditional expressions. */
bbf6f052 9484
b93a436e
JL
9485/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9486 LABEL is an rtx of code CODE_LABEL, in this function and all the
9487 functions here. */
bbf6f052 9488
b93a436e
JL
9489void
9490jumpifnot (exp, label)
ca695ac9 9491 tree exp;
b93a436e 9492 rtx label;
bbf6f052 9493{
b93a436e
JL
9494 do_jump (exp, label, NULL_RTX);
9495}
bbf6f052 9496
b93a436e 9497/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9498
b93a436e
JL
9499void
9500jumpif (exp, label)
9501 tree exp;
9502 rtx label;
9503{
9504 do_jump (exp, NULL_RTX, label);
9505}
ca695ac9 9506
b93a436e
JL
9507/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9508 the result is zero, or IF_TRUE_LABEL if the result is one.
9509 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9510 meaning fall through in that case.
ca695ac9 9511
b93a436e
JL
9512 do_jump always does any pending stack adjust except when it does not
9513 actually perform a jump. An example where there is no jump
9514 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9515
b93a436e
JL
9516 This function is responsible for optimizing cases such as
9517 &&, || and comparison operators in EXP. */
5718612f 9518
b93a436e
JL
9519void
9520do_jump (exp, if_false_label, if_true_label)
9521 tree exp;
9522 rtx if_false_label, if_true_label;
9523{
9524 register enum tree_code code = TREE_CODE (exp);
9525 /* Some cases need to create a label to jump to
9526 in order to properly fall through.
9527 These cases set DROP_THROUGH_LABEL nonzero. */
9528 rtx drop_through_label = 0;
9529 rtx temp;
b93a436e
JL
9530 int i;
9531 tree type;
9532 enum machine_mode mode;
ca695ac9 9533
dbecbbe4
JL
9534#ifdef MAX_INTEGER_COMPUTATION_MODE
9535 check_max_integer_computation_mode (exp);
9536#endif
9537
b93a436e 9538 emit_queue ();
ca695ac9 9539
b93a436e 9540 switch (code)
ca695ac9 9541 {
b93a436e 9542 case ERROR_MARK:
ca695ac9 9543 break;
bbf6f052 9544
b93a436e
JL
9545 case INTEGER_CST:
9546 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9547 if (temp)
9548 emit_jump (temp);
9549 break;
bbf6f052 9550
b93a436e
JL
9551#if 0
9552 /* This is not true with #pragma weak */
9553 case ADDR_EXPR:
9554 /* The address of something can never be zero. */
9555 if (if_true_label)
9556 emit_jump (if_true_label);
9557 break;
9558#endif
bbf6f052 9559
b93a436e
JL
9560 case NOP_EXPR:
9561 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9562 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
b4e3fabb
RK
9563 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9564 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
b93a436e
JL
9565 goto normal;
9566 case CONVERT_EXPR:
9567 /* If we are narrowing the operand, we have to do the compare in the
9568 narrower mode. */
9569 if ((TYPE_PRECISION (TREE_TYPE (exp))
9570 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9571 goto normal;
9572 case NON_LVALUE_EXPR:
9573 case REFERENCE_EXPR:
9574 case ABS_EXPR:
9575 case NEGATE_EXPR:
9576 case LROTATE_EXPR:
9577 case RROTATE_EXPR:
9578 /* These cannot change zero->non-zero or vice versa. */
9579 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9580 break;
bbf6f052 9581
14a774a9
RK
9582 case WITH_RECORD_EXPR:
9583 /* Put the object on the placeholder list, recurse through our first
9584 operand, and pop the list. */
9585 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9586 placeholder_list);
9587 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9588 placeholder_list = TREE_CHAIN (placeholder_list);
9589 break;
9590
b93a436e
JL
9591#if 0
9592 /* This is never less insns than evaluating the PLUS_EXPR followed by
9593 a test and can be longer if the test is eliminated. */
9594 case PLUS_EXPR:
9595 /* Reduce to minus. */
9596 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9597 TREE_OPERAND (exp, 0),
9598 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9599 TREE_OPERAND (exp, 1))));
9600 /* Process as MINUS. */
ca695ac9 9601#endif
bbf6f052 9602
b93a436e
JL
9603 case MINUS_EXPR:
9604 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9605 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9606 TREE_OPERAND (exp, 0),
9607 TREE_OPERAND (exp, 1)),
9608 NE, NE, if_false_label, if_true_label);
b93a436e 9609 break;
bbf6f052 9610
b93a436e
JL
9611 case BIT_AND_EXPR:
9612 /* If we are AND'ing with a small constant, do this comparison in the
9613 smallest type that fits. If the machine doesn't have comparisons
9614 that small, it will be converted back to the wider comparison.
9615 This helps if we are testing the sign bit of a narrower object.
9616 combine can't do this for us because it can't know whether a
9617 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9618
b93a436e
JL
9619 if (! SLOW_BYTE_ACCESS
9620 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9621 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9622 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9623 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9624 && (type = type_for_mode (mode, 1)) != 0
9625 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9626 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9627 != CODE_FOR_nothing))
9628 {
9629 do_jump (convert (type, exp), if_false_label, if_true_label);
9630 break;
9631 }
9632 goto normal;
bbf6f052 9633
b93a436e
JL
9634 case TRUTH_NOT_EXPR:
9635 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9636 break;
bbf6f052 9637
b93a436e
JL
9638 case TRUTH_ANDIF_EXPR:
9639 if (if_false_label == 0)
9640 if_false_label = drop_through_label = gen_label_rtx ();
9641 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9642 start_cleanup_deferral ();
9643 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9644 end_cleanup_deferral ();
9645 break;
bbf6f052 9646
b93a436e
JL
9647 case TRUTH_ORIF_EXPR:
9648 if (if_true_label == 0)
9649 if_true_label = drop_through_label = gen_label_rtx ();
9650 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9651 start_cleanup_deferral ();
9652 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9653 end_cleanup_deferral ();
9654 break;
bbf6f052 9655
b93a436e
JL
9656 case COMPOUND_EXPR:
9657 push_temp_slots ();
9658 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9659 preserve_temp_slots (NULL_RTX);
9660 free_temp_slots ();
9661 pop_temp_slots ();
9662 emit_queue ();
9663 do_pending_stack_adjust ();
9664 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9665 break;
bbf6f052 9666
b93a436e
JL
9667 case COMPONENT_REF:
9668 case BIT_FIELD_REF:
9669 case ARRAY_REF:
b4e3fabb 9670 case ARRAY_RANGE_REF:
b93a436e 9671 {
770ae6cc
RK
9672 HOST_WIDE_INT bitsize, bitpos;
9673 int unsignedp;
b93a436e
JL
9674 enum machine_mode mode;
9675 tree type;
9676 tree offset;
9677 int volatilep = 0;
729a2125 9678 unsigned int alignment;
bbf6f052 9679
b93a436e
JL
9680 /* Get description of this reference. We don't actually care
9681 about the underlying object here. */
19caa751
RK
9682 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9683 &unsignedp, &volatilep, &alignment);
bbf6f052 9684
b93a436e
JL
9685 type = type_for_size (bitsize, unsignedp);
9686 if (! SLOW_BYTE_ACCESS
9687 && type != 0 && bitsize >= 0
9688 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9689 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9690 != CODE_FOR_nothing))
9691 {
9692 do_jump (convert (type, exp), if_false_label, if_true_label);
9693 break;
9694 }
9695 goto normal;
9696 }
bbf6f052 9697
b93a436e
JL
9698 case COND_EXPR:
9699 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9700 if (integer_onep (TREE_OPERAND (exp, 1))
9701 && integer_zerop (TREE_OPERAND (exp, 2)))
9702 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9703
b93a436e
JL
9704 else if (integer_zerop (TREE_OPERAND (exp, 1))
9705 && integer_onep (TREE_OPERAND (exp, 2)))
9706 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9707
b93a436e
JL
9708 else
9709 {
9710 register rtx label1 = gen_label_rtx ();
9711 drop_through_label = gen_label_rtx ();
bbf6f052 9712
b93a436e 9713 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9714
b93a436e
JL
9715 start_cleanup_deferral ();
9716 /* Now the THEN-expression. */
9717 do_jump (TREE_OPERAND (exp, 1),
9718 if_false_label ? if_false_label : drop_through_label,
9719 if_true_label ? if_true_label : drop_through_label);
9720 /* In case the do_jump just above never jumps. */
9721 do_pending_stack_adjust ();
9722 emit_label (label1);
bbf6f052 9723
b93a436e
JL
9724 /* Now the ELSE-expression. */
9725 do_jump (TREE_OPERAND (exp, 2),
9726 if_false_label ? if_false_label : drop_through_label,
9727 if_true_label ? if_true_label : drop_through_label);
9728 end_cleanup_deferral ();
9729 }
9730 break;
bbf6f052 9731
b93a436e
JL
9732 case EQ_EXPR:
9733 {
9734 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9735
9ec36da5
JL
9736 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9737 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9738 {
9739 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9740 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9741 do_jump
9742 (fold
9743 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9744 fold (build (EQ_EXPR, TREE_TYPE (exp),
9745 fold (build1 (REALPART_EXPR,
9746 TREE_TYPE (inner_type),
9747 exp0)),
9748 fold (build1 (REALPART_EXPR,
9749 TREE_TYPE (inner_type),
9750 exp1)))),
9751 fold (build (EQ_EXPR, TREE_TYPE (exp),
9752 fold (build1 (IMAGPART_EXPR,
9753 TREE_TYPE (inner_type),
9754 exp0)),
9755 fold (build1 (IMAGPART_EXPR,
9756 TREE_TYPE (inner_type),
9757 exp1)))))),
9758 if_false_label, if_true_label);
9759 }
9ec36da5
JL
9760
9761 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9762 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9763
b93a436e 9764 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9765 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9766 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9767 else
b30f05db 9768 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9769 break;
9770 }
bbf6f052 9771
b93a436e
JL
9772 case NE_EXPR:
9773 {
9774 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9775
9ec36da5
JL
9776 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9777 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9778 {
9779 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9780 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9781 do_jump
9782 (fold
9783 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9784 fold (build (NE_EXPR, TREE_TYPE (exp),
9785 fold (build1 (REALPART_EXPR,
9786 TREE_TYPE (inner_type),
9787 exp0)),
9788 fold (build1 (REALPART_EXPR,
9789 TREE_TYPE (inner_type),
9790 exp1)))),
9791 fold (build (NE_EXPR, TREE_TYPE (exp),
9792 fold (build1 (IMAGPART_EXPR,
9793 TREE_TYPE (inner_type),
9794 exp0)),
9795 fold (build1 (IMAGPART_EXPR,
9796 TREE_TYPE (inner_type),
9797 exp1)))))),
9798 if_false_label, if_true_label);
9799 }
9ec36da5
JL
9800
9801 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9802 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9803
b93a436e 9804 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9805 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9806 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9807 else
b30f05db 9808 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9809 break;
9810 }
bbf6f052 9811
b93a436e 9812 case LT_EXPR:
1c0290ea
BS
9813 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9814 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9815 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9816 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9817 else
b30f05db 9818 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9819 break;
bbf6f052 9820
b93a436e 9821 case LE_EXPR:
1c0290ea
BS
9822 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9823 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9824 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9825 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9826 else
b30f05db 9827 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9828 break;
bbf6f052 9829
b93a436e 9830 case GT_EXPR:
1c0290ea
BS
9831 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9832 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9833 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9834 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9835 else
b30f05db 9836 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9837 break;
bbf6f052 9838
b93a436e 9839 case GE_EXPR:
1c0290ea
BS
9840 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9841 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9842 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9843 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9844 else
b30f05db 9845 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9846 break;
bbf6f052 9847
1eb8759b
RH
9848 case UNORDERED_EXPR:
9849 case ORDERED_EXPR:
9850 {
9851 enum rtx_code cmp, rcmp;
9852 int do_rev;
9853
9854 if (code == UNORDERED_EXPR)
9855 cmp = UNORDERED, rcmp = ORDERED;
9856 else
9857 cmp = ORDERED, rcmp = UNORDERED;
3a94c984 9858 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
9859
9860 do_rev = 0;
9861 if (! can_compare_p (cmp, mode, ccp_jump)
9862 && (can_compare_p (rcmp, mode, ccp_jump)
9863 /* If the target doesn't provide either UNORDERED or ORDERED
9864 comparisons, canonicalize on UNORDERED for the library. */
9865 || rcmp == UNORDERED))
9866 do_rev = 1;
9867
9868 if (! do_rev)
9869 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9870 else
9871 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9872 }
9873 break;
9874
9875 {
9876 enum rtx_code rcode1;
9877 enum tree_code tcode2;
9878
9879 case UNLT_EXPR:
9880 rcode1 = UNLT;
9881 tcode2 = LT_EXPR;
9882 goto unordered_bcc;
9883 case UNLE_EXPR:
9884 rcode1 = UNLE;
9885 tcode2 = LE_EXPR;
9886 goto unordered_bcc;
9887 case UNGT_EXPR:
9888 rcode1 = UNGT;
9889 tcode2 = GT_EXPR;
9890 goto unordered_bcc;
9891 case UNGE_EXPR:
9892 rcode1 = UNGE;
9893 tcode2 = GE_EXPR;
9894 goto unordered_bcc;
9895 case UNEQ_EXPR:
9896 rcode1 = UNEQ;
9897 tcode2 = EQ_EXPR;
9898 goto unordered_bcc;
7913f3d0 9899
1eb8759b
RH
9900 unordered_bcc:
9901 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9902 if (can_compare_p (rcode1, mode, ccp_jump))
9903 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9904 if_true_label);
9905 else
9906 {
9907 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9908 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9909 tree cmp0, cmp1;
9910
3a94c984 9911 /* If the target doesn't support combined unordered
1eb8759b
RH
9912 compares, decompose into UNORDERED + comparison. */
9913 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9914 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9915 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9916 do_jump (exp, if_false_label, if_true_label);
9917 }
9918 }
9919 break;
9920
5f2d6cfa
MM
9921 /* Special case:
9922 __builtin_expect (<test>, 0) and
9923 __builtin_expect (<test>, 1)
9924
9925 We need to do this here, so that <test> is not converted to a SCC
9926 operation on machines that use condition code registers and COMPARE
9927 like the PowerPC, and then the jump is done based on whether the SCC
9928 operation produced a 1 or 0. */
9929 case CALL_EXPR:
9930 /* Check for a built-in function. */
9931 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9932 {
9933 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9934 tree arglist = TREE_OPERAND (exp, 1);
9935
9936 if (TREE_CODE (fndecl) == FUNCTION_DECL
9937 && DECL_BUILT_IN (fndecl)
9938 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9939 && arglist != NULL_TREE
9940 && TREE_CHAIN (arglist) != NULL_TREE)
9941 {
9942 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9943 if_true_label);
9944
9945 if (seq != NULL_RTX)
9946 {
9947 emit_insn (seq);
9948 return;
9949 }
9950 }
9951 }
9952 /* fall through and generate the normal code. */
9953
b93a436e
JL
9954 default:
9955 normal:
9956 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9957#if 0
9958 /* This is not needed any more and causes poor code since it causes
9959 comparisons and tests from non-SI objects to have different code
9960 sequences. */
9961 /* Copy to register to avoid generating bad insns by cse
9962 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9963 if (!cse_not_expected && GET_CODE (temp) == MEM)
9964 temp = copy_to_reg (temp);
ca695ac9 9965#endif
b93a436e 9966 do_pending_stack_adjust ();
b30f05db
BS
9967 /* Do any postincrements in the expression that was tested. */
9968 emit_queue ();
9969
998a298e
GK
9970 if (GET_CODE (temp) == CONST_INT
9971 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9972 || GET_CODE (temp) == LABEL_REF)
b30f05db
BS
9973 {
9974 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9975 if (target)
9976 emit_jump (target);
9977 }
b93a436e 9978 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9979 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9980 /* Note swapping the labels gives us not-equal. */
9981 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9982 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9983 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9984 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9985 GET_MODE (temp), NULL_RTX, 0,
9986 if_false_label, if_true_label);
b93a436e
JL
9987 else
9988 abort ();
9989 }
bbf6f052 9990
b93a436e
JL
9991 if (drop_through_label)
9992 {
9993 /* If do_jump produces code that might be jumped around,
9994 do any stack adjusts from that code, before the place
9995 where control merges in. */
9996 do_pending_stack_adjust ();
9997 emit_label (drop_through_label);
9998 }
bbf6f052 9999}
b93a436e
JL
10000\f
10001/* Given a comparison expression EXP for values too wide to be compared
10002 with one insn, test the comparison and jump to the appropriate label.
10003 The code of EXP is ignored; we always test GT if SWAP is 0,
10004 and LT if SWAP is 1. */
bbf6f052 10005
b93a436e
JL
10006static void
10007do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10008 tree exp;
10009 int swap;
10010 rtx if_false_label, if_true_label;
10011{
10012 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10013 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10014 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 10015 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 10016
b30f05db 10017 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
10018}
10019
b93a436e
JL
10020/* Compare OP0 with OP1, word at a time, in mode MODE.
10021 UNSIGNEDP says to do unsigned comparison.
10022 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10023
b93a436e
JL
10024void
10025do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10026 enum machine_mode mode;
10027 int unsignedp;
10028 rtx op0, op1;
10029 rtx if_false_label, if_true_label;
f81497d9 10030{
b93a436e
JL
10031 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10032 rtx drop_through_label = 0;
10033 int i;
f81497d9 10034
b93a436e
JL
10035 if (! if_true_label || ! if_false_label)
10036 drop_through_label = gen_label_rtx ();
10037 if (! if_true_label)
10038 if_true_label = drop_through_label;
10039 if (! if_false_label)
10040 if_false_label = drop_through_label;
f81497d9 10041
b93a436e
JL
10042 /* Compare a word at a time, high order first. */
10043 for (i = 0; i < nwords; i++)
10044 {
b93a436e 10045 rtx op0_word, op1_word;
bbf6f052 10046
b93a436e
JL
10047 if (WORDS_BIG_ENDIAN)
10048 {
10049 op0_word = operand_subword_force (op0, i, mode);
10050 op1_word = operand_subword_force (op1, i, mode);
10051 }
10052 else
10053 {
10054 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10055 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10056 }
bbf6f052 10057
b93a436e 10058 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
10059 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10060 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10061 NULL_RTX, if_true_label);
bbf6f052 10062
b93a436e 10063 /* Consider lower words only if these are equal. */
b30f05db
BS
10064 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10065 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 10066 }
bbf6f052 10067
b93a436e
JL
10068 if (if_false_label)
10069 emit_jump (if_false_label);
10070 if (drop_through_label)
10071 emit_label (drop_through_label);
bbf6f052
RK
10072}
10073
b93a436e
JL
10074/* Given an EQ_EXPR expression EXP for values too wide to be compared
10075 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10076
b93a436e
JL
10077static void
10078do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10079 tree exp;
10080 rtx if_false_label, if_true_label;
bbf6f052 10081{
b93a436e
JL
10082 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10083 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10084 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10085 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10086 int i;
10087 rtx drop_through_label = 0;
bbf6f052 10088
b93a436e
JL
10089 if (! if_false_label)
10090 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10091
b93a436e 10092 for (i = 0; i < nwords; i++)
b30f05db
BS
10093 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10094 operand_subword_force (op1, i, mode),
10095 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10096 word_mode, NULL_RTX, 0, if_false_label,
10097 NULL_RTX);
bbf6f052 10098
b93a436e
JL
10099 if (if_true_label)
10100 emit_jump (if_true_label);
10101 if (drop_through_label)
10102 emit_label (drop_through_label);
bbf6f052 10103}
b93a436e
JL
10104\f
10105/* Jump according to whether OP0 is 0.
10106 We assume that OP0 has an integer mode that is too wide
10107 for the available compare insns. */
bbf6f052 10108
f5963e61 10109void
b93a436e
JL
10110do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10111 rtx op0;
10112 rtx if_false_label, if_true_label;
ca695ac9 10113{
b93a436e
JL
10114 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10115 rtx part;
10116 int i;
10117 rtx drop_through_label = 0;
bbf6f052 10118
b93a436e
JL
10119 /* The fastest way of doing this comparison on almost any machine is to
10120 "or" all the words and compare the result. If all have to be loaded
10121 from memory and this is a very wide item, it's possible this may
10122 be slower, but that's highly unlikely. */
bbf6f052 10123
b93a436e
JL
10124 part = gen_reg_rtx (word_mode);
10125 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10126 for (i = 1; i < nwords && part != 0; i++)
10127 part = expand_binop (word_mode, ior_optab, part,
10128 operand_subword_force (op0, i, GET_MODE (op0)),
10129 part, 1, OPTAB_WIDEN);
bbf6f052 10130
b93a436e
JL
10131 if (part != 0)
10132 {
b30f05db
BS
10133 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10134 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 10135
b93a436e
JL
10136 return;
10137 }
bbf6f052 10138
b93a436e
JL
10139 /* If we couldn't do the "or" simply, do this with a series of compares. */
10140 if (! if_false_label)
10141 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10142
b93a436e 10143 for (i = 0; i < nwords; i++)
b30f05db
BS
10144 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10145 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10146 if_false_label, NULL_RTX);
bbf6f052 10147
b93a436e
JL
10148 if (if_true_label)
10149 emit_jump (if_true_label);
0f41302f 10150
b93a436e
JL
10151 if (drop_through_label)
10152 emit_label (drop_through_label);
bbf6f052 10153}
b93a436e 10154\f
b30f05db 10155/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
10156 (including code to compute the values to be compared)
10157 and set (CC0) according to the result.
b30f05db 10158 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10159
b93a436e 10160 We force a stack adjustment unless there are currently
b30f05db 10161 things pushed on the stack that aren't yet used.
ca695ac9 10162
b30f05db
BS
10163 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10164 compared.
10165
10166 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10167 size of MODE should be used. */
10168
10169rtx
10170compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10171 register rtx op0, op1;
10172 enum rtx_code code;
10173 int unsignedp;
10174 enum machine_mode mode;
10175 rtx size;
729a2125 10176 unsigned int align;
b93a436e 10177{
b30f05db 10178 rtx tem;
76bbe028 10179
b30f05db
BS
10180 /* If one operand is constant, make it the second one. Only do this
10181 if the other operand is not constant as well. */
ca695ac9 10182
8c9864f3 10183 if (swap_commutative_operands_p (op0, op1))
bbf6f052 10184 {
b30f05db
BS
10185 tem = op0;
10186 op0 = op1;
10187 op1 = tem;
10188 code = swap_condition (code);
ca695ac9 10189 }
bbf6f052 10190
b30f05db 10191 if (flag_force_mem)
b93a436e 10192 {
b30f05db
BS
10193 op0 = force_not_mem (op0);
10194 op1 = force_not_mem (op1);
10195 }
bbf6f052 10196
b30f05db
BS
10197 do_pending_stack_adjust ();
10198
10199 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10200 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10201 return tem;
10202
10203#if 0
10204 /* There's no need to do this now that combine.c can eliminate lots of
10205 sign extensions. This can be less efficient in certain cases on other
10206 machines. */
10207
10208 /* If this is a signed equality comparison, we can do it as an
10209 unsigned comparison since zero-extension is cheaper than sign
10210 extension and comparisons with zero are done as unsigned. This is
10211 the case even on machines that can do fast sign extension, since
10212 zero-extension is easier to combine with other operations than
10213 sign-extension is. If we are comparing against a constant, we must
10214 convert it to what it would look like unsigned. */
10215 if ((code == EQ || code == NE) && ! unsignedp
10216 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10217 {
10218 if (GET_CODE (op1) == CONST_INT
10219 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10220 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10221 unsignedp = 1;
b93a436e
JL
10222 }
10223#endif
3a94c984 10224
b30f05db 10225 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 10226
b30f05db 10227 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 10228}
bbf6f052 10229
b30f05db 10230/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 10231 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10232
b93a436e
JL
10233 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10234 compared.
bbf6f052 10235
b93a436e
JL
10236 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10237 size of MODE should be used. */
ca695ac9 10238
b30f05db
BS
10239void
10240do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10241 if_false_label, if_true_label)
b93a436e
JL
10242 register rtx op0, op1;
10243 enum rtx_code code;
10244 int unsignedp;
10245 enum machine_mode mode;
10246 rtx size;
729a2125 10247 unsigned int align;
b30f05db 10248 rtx if_false_label, if_true_label;
bbf6f052 10249{
b93a436e 10250 rtx tem;
b30f05db
BS
10251 int dummy_true_label = 0;
10252
10253 /* Reverse the comparison if that is safe and we want to jump if it is
10254 false. */
10255 if (! if_true_label && ! FLOAT_MODE_P (mode))
10256 {
10257 if_true_label = if_false_label;
10258 if_false_label = 0;
10259 code = reverse_condition (code);
10260 }
bbf6f052 10261
b93a436e
JL
10262 /* If one operand is constant, make it the second one. Only do this
10263 if the other operand is not constant as well. */
e7c33f54 10264
8c9864f3 10265 if (swap_commutative_operands_p (op0, op1))
ca695ac9 10266 {
b93a436e
JL
10267 tem = op0;
10268 op0 = op1;
10269 op1 = tem;
10270 code = swap_condition (code);
10271 }
bbf6f052 10272
b93a436e
JL
10273 if (flag_force_mem)
10274 {
10275 op0 = force_not_mem (op0);
10276 op1 = force_not_mem (op1);
10277 }
bbf6f052 10278
b93a436e 10279 do_pending_stack_adjust ();
ca695ac9 10280
b93a436e
JL
10281 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10282 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
10283 {
10284 if (tem == const_true_rtx)
10285 {
10286 if (if_true_label)
10287 emit_jump (if_true_label);
10288 }
10289 else
10290 {
10291 if (if_false_label)
10292 emit_jump (if_false_label);
10293 }
10294 return;
10295 }
ca695ac9 10296
b93a436e
JL
10297#if 0
10298 /* There's no need to do this now that combine.c can eliminate lots of
10299 sign extensions. This can be less efficient in certain cases on other
10300 machines. */
ca695ac9 10301
b93a436e
JL
10302 /* If this is a signed equality comparison, we can do it as an
10303 unsigned comparison since zero-extension is cheaper than sign
10304 extension and comparisons with zero are done as unsigned. This is
10305 the case even on machines that can do fast sign extension, since
10306 zero-extension is easier to combine with other operations than
10307 sign-extension is. If we are comparing against a constant, we must
10308 convert it to what it would look like unsigned. */
10309 if ((code == EQ || code == NE) && ! unsignedp
10310 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10311 {
10312 if (GET_CODE (op1) == CONST_INT
10313 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10314 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10315 unsignedp = 1;
10316 }
10317#endif
ca695ac9 10318
b30f05db
BS
10319 if (! if_true_label)
10320 {
10321 dummy_true_label = 1;
10322 if_true_label = gen_label_rtx ();
10323 }
10324
10325 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10326 if_true_label);
10327
10328 if (if_false_label)
10329 emit_jump (if_false_label);
10330 if (dummy_true_label)
10331 emit_label (if_true_label);
10332}
10333
10334/* Generate code for a comparison expression EXP (including code to compute
10335 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10336 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10337 generated code will drop through.
10338 SIGNED_CODE should be the rtx operation for this comparison for
10339 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10340
10341 We force a stack adjustment unless there are currently
10342 things pushed on the stack that aren't yet used. */
10343
10344static void
10345do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10346 if_true_label)
10347 register tree exp;
10348 enum rtx_code signed_code, unsigned_code;
10349 rtx if_false_label, if_true_label;
10350{
729a2125 10351 unsigned int align0, align1;
b30f05db
BS
10352 register rtx op0, op1;
10353 register tree type;
10354 register enum machine_mode mode;
10355 int unsignedp;
10356 enum rtx_code code;
10357
10358 /* Don't crash if the comparison was erroneous. */
14a774a9 10359 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
10360 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10361 return;
10362
14a774a9 10363 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
6b16805e
JJ
10364 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10365 return;
10366
b30f05db
BS
10367 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10368 mode = TYPE_MODE (type);
6b16805e
JJ
10369 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10370 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10371 || (GET_MODE_BITSIZE (mode)
31a7659b
JDA
10372 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10373 1)))))))
6b16805e
JJ
10374 {
10375 /* op0 might have been replaced by promoted constant, in which
10376 case the type of second argument should be used. */
10377 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10378 mode = TYPE_MODE (type);
10379 }
b30f05db
BS
10380 unsignedp = TREE_UNSIGNED (type);
10381 code = unsignedp ? unsigned_code : signed_code;
10382
10383#ifdef HAVE_canonicalize_funcptr_for_compare
10384 /* If function pointers need to be "canonicalized" before they can
10385 be reliably compared, then canonicalize them. */
10386 if (HAVE_canonicalize_funcptr_for_compare
10387 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10388 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10389 == FUNCTION_TYPE))
10390 {
10391 rtx new_op0 = gen_reg_rtx (mode);
10392
10393 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10394 op0 = new_op0;
10395 }
10396
10397 if (HAVE_canonicalize_funcptr_for_compare
10398 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10399 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10400 == FUNCTION_TYPE))
10401 {
10402 rtx new_op1 = gen_reg_rtx (mode);
10403
10404 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10405 op1 = new_op1;
10406 }
10407#endif
10408
10409 /* Do any postincrements in the expression that was tested. */
10410 emit_queue ();
10411
10412 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10413 ((mode == BLKmode)
10414 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
19caa751 10415 MIN (align0, align1),
b30f05db 10416 if_false_label, if_true_label);
b93a436e
JL
10417}
10418\f
10419/* Generate code to calculate EXP using a store-flag instruction
10420 and return an rtx for the result. EXP is either a comparison
10421 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10422
b93a436e 10423 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10424
b93a436e
JL
10425 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10426 cheap.
ca695ac9 10427
b93a436e
JL
10428 Return zero if there is no suitable set-flag instruction
10429 available on this machine.
ca695ac9 10430
b93a436e
JL
10431 Once expand_expr has been called on the arguments of the comparison,
10432 we are committed to doing the store flag, since it is not safe to
10433 re-evaluate the expression. We emit the store-flag insn by calling
10434 emit_store_flag, but only expand the arguments if we have a reason
10435 to believe that emit_store_flag will be successful. If we think that
10436 it will, but it isn't, we have to simulate the store-flag with a
10437 set/jump/set sequence. */
ca695ac9 10438
b93a436e
JL
10439static rtx
10440do_store_flag (exp, target, mode, only_cheap)
10441 tree exp;
10442 rtx target;
10443 enum machine_mode mode;
10444 int only_cheap;
10445{
10446 enum rtx_code code;
10447 tree arg0, arg1, type;
10448 tree tem;
10449 enum machine_mode operand_mode;
10450 int invert = 0;
10451 int unsignedp;
10452 rtx op0, op1;
10453 enum insn_code icode;
10454 rtx subtarget = target;
381127e8 10455 rtx result, label;
ca695ac9 10456
b93a436e
JL
10457 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10458 result at the end. We can't simply invert the test since it would
10459 have already been inverted if it were valid. This case occurs for
10460 some floating-point comparisons. */
ca695ac9 10461
b93a436e
JL
10462 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10463 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10464
b93a436e
JL
10465 arg0 = TREE_OPERAND (exp, 0);
10466 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
10467
10468 /* Don't crash if the comparison was erroneous. */
10469 if (arg0 == error_mark_node || arg1 == error_mark_node)
10470 return const0_rtx;
10471
b93a436e
JL
10472 type = TREE_TYPE (arg0);
10473 operand_mode = TYPE_MODE (type);
10474 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10475
b93a436e
JL
10476 /* We won't bother with BLKmode store-flag operations because it would mean
10477 passing a lot of information to emit_store_flag. */
10478 if (operand_mode == BLKmode)
10479 return 0;
ca695ac9 10480
b93a436e
JL
10481 /* We won't bother with store-flag operations involving function pointers
10482 when function pointers must be canonicalized before comparisons. */
10483#ifdef HAVE_canonicalize_funcptr_for_compare
10484 if (HAVE_canonicalize_funcptr_for_compare
10485 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10486 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10487 == FUNCTION_TYPE))
10488 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10489 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10490 == FUNCTION_TYPE))))
10491 return 0;
ca695ac9
JB
10492#endif
10493
b93a436e
JL
10494 STRIP_NOPS (arg0);
10495 STRIP_NOPS (arg1);
ca695ac9 10496
b93a436e
JL
10497 /* Get the rtx comparison code to use. We know that EXP is a comparison
10498 operation of some type. Some comparisons against 1 and -1 can be
10499 converted to comparisons with zero. Do so here so that the tests
10500 below will be aware that we have a comparison with zero. These
10501 tests will not catch constants in the first operand, but constants
10502 are rarely passed as the first operand. */
ca695ac9 10503
b93a436e
JL
10504 switch (TREE_CODE (exp))
10505 {
10506 case EQ_EXPR:
10507 code = EQ;
bbf6f052 10508 break;
b93a436e
JL
10509 case NE_EXPR:
10510 code = NE;
bbf6f052 10511 break;
b93a436e
JL
10512 case LT_EXPR:
10513 if (integer_onep (arg1))
10514 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10515 else
10516 code = unsignedp ? LTU : LT;
ca695ac9 10517 break;
b93a436e
JL
10518 case LE_EXPR:
10519 if (! unsignedp && integer_all_onesp (arg1))
10520 arg1 = integer_zero_node, code = LT;
10521 else
10522 code = unsignedp ? LEU : LE;
ca695ac9 10523 break;
b93a436e
JL
10524 case GT_EXPR:
10525 if (! unsignedp && integer_all_onesp (arg1))
10526 arg1 = integer_zero_node, code = GE;
10527 else
10528 code = unsignedp ? GTU : GT;
10529 break;
10530 case GE_EXPR:
10531 if (integer_onep (arg1))
10532 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10533 else
10534 code = unsignedp ? GEU : GE;
ca695ac9 10535 break;
1eb8759b
RH
10536
10537 case UNORDERED_EXPR:
10538 code = UNORDERED;
10539 break;
10540 case ORDERED_EXPR:
10541 code = ORDERED;
10542 break;
10543 case UNLT_EXPR:
10544 code = UNLT;
10545 break;
10546 case UNLE_EXPR:
10547 code = UNLE;
10548 break;
10549 case UNGT_EXPR:
10550 code = UNGT;
10551 break;
10552 case UNGE_EXPR:
10553 code = UNGE;
10554 break;
10555 case UNEQ_EXPR:
10556 code = UNEQ;
10557 break;
1eb8759b 10558
ca695ac9 10559 default:
b93a436e 10560 abort ();
bbf6f052 10561 }
bbf6f052 10562
b93a436e
JL
10563 /* Put a constant second. */
10564 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10565 {
10566 tem = arg0; arg0 = arg1; arg1 = tem;
10567 code = swap_condition (code);
ca695ac9 10568 }
bbf6f052 10569
b93a436e
JL
10570 /* If this is an equality or inequality test of a single bit, we can
10571 do this by shifting the bit being tested to the low-order bit and
10572 masking the result with the constant 1. If the condition was EQ,
10573 we xor it with 1. This does not require an scc insn and is faster
10574 than an scc insn even if we have it. */
d39985fa 10575
b93a436e
JL
10576 if ((code == NE || code == EQ)
10577 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10578 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10579 {
10580 tree inner = TREE_OPERAND (arg0, 0);
10581 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10582 int ops_unsignedp;
bbf6f052 10583
b93a436e
JL
10584 /* If INNER is a right shift of a constant and it plus BITNUM does
10585 not overflow, adjust BITNUM and INNER. */
ca695ac9 10586
b93a436e
JL
10587 if (TREE_CODE (inner) == RSHIFT_EXPR
10588 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10589 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10590 && bitnum < TYPE_PRECISION (type)
10591 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10592 bitnum - TYPE_PRECISION (type)))
ca695ac9 10593 {
b93a436e
JL
10594 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10595 inner = TREE_OPERAND (inner, 0);
ca695ac9 10596 }
ca695ac9 10597
b93a436e
JL
10598 /* If we are going to be able to omit the AND below, we must do our
10599 operations as unsigned. If we must use the AND, we have a choice.
10600 Normally unsigned is faster, but for some machines signed is. */
10601 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10602#ifdef LOAD_EXTEND_OP
10603 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10604#else
10605 : 1
10606#endif
10607 );
bbf6f052 10608
296b4ed9 10609 if (! get_subtarget (subtarget)
a47fed55 10610 || GET_MODE (subtarget) != operand_mode
e5e809f4 10611 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10612 subtarget = 0;
bbf6f052 10613
b93a436e 10614 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10615
b93a436e 10616 if (bitnum != 0)
681cb233 10617 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10618 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10619
b93a436e
JL
10620 if (GET_MODE (op0) != mode)
10621 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10622
b93a436e
JL
10623 if ((code == EQ && ! invert) || (code == NE && invert))
10624 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10625 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10626
b93a436e
JL
10627 /* Put the AND last so it can combine with more things. */
10628 if (bitnum != TYPE_PRECISION (type) - 1)
10629 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10630
b93a436e
JL
10631 return op0;
10632 }
bbf6f052 10633
b93a436e 10634 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10635 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10636 return 0;
1eb8759b 10637
b93a436e
JL
10638 icode = setcc_gen_code[(int) code];
10639 if (icode == CODE_FOR_nothing
a995e389 10640 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10641 {
b93a436e
JL
10642 /* We can only do this if it is one of the special cases that
10643 can be handled without an scc insn. */
10644 if ((code == LT && integer_zerop (arg1))
10645 || (! only_cheap && code == GE && integer_zerop (arg1)))
10646 ;
10647 else if (BRANCH_COST >= 0
10648 && ! only_cheap && (code == NE || code == EQ)
10649 && TREE_CODE (type) != REAL_TYPE
10650 && ((abs_optab->handlers[(int) operand_mode].insn_code
10651 != CODE_FOR_nothing)
10652 || (ffs_optab->handlers[(int) operand_mode].insn_code
10653 != CODE_FOR_nothing)))
10654 ;
10655 else
10656 return 0;
ca695ac9 10657 }
3a94c984 10658
296b4ed9 10659 if (! get_subtarget (target)
a47fed55 10660 || GET_MODE (subtarget) != operand_mode
e5e809f4 10661 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10662 subtarget = 0;
10663
10664 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10665 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10666
10667 if (target == 0)
10668 target = gen_reg_rtx (mode);
10669
10670 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10671 because, if the emit_store_flag does anything it will succeed and
10672 OP0 and OP1 will not be used subsequently. */
ca695ac9 10673
b93a436e
JL
10674 result = emit_store_flag (target, code,
10675 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10676 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10677 operand_mode, unsignedp, 1);
ca695ac9 10678
b93a436e
JL
10679 if (result)
10680 {
10681 if (invert)
10682 result = expand_binop (mode, xor_optab, result, const1_rtx,
10683 result, 0, OPTAB_LIB_WIDEN);
10684 return result;
ca695ac9 10685 }
bbf6f052 10686
b93a436e
JL
10687 /* If this failed, we have to do this with set/compare/jump/set code. */
10688 if (GET_CODE (target) != REG
10689 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10690 target = gen_reg_rtx (GET_MODE (target));
10691
10692 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10693 result = compare_from_rtx (op0, op1, code, unsignedp,
10694 operand_mode, NULL_RTX, 0);
10695 if (GET_CODE (result) == CONST_INT)
10696 return (((result == const0_rtx && ! invert)
10697 || (result != const0_rtx && invert))
10698 ? const0_rtx : const1_rtx);
ca695ac9 10699
b93a436e
JL
10700 label = gen_label_rtx ();
10701 if (bcc_gen_fctn[(int) code] == 0)
10702 abort ();
0f41302f 10703
b93a436e
JL
10704 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10705 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10706 emit_label (label);
bbf6f052 10707
b93a436e 10708 return target;
ca695ac9 10709}
b93a436e
JL
10710\f
10711/* Generate a tablejump instruction (used for switch statements). */
10712
10713#ifdef HAVE_tablejump
e87b4f3f 10714
b93a436e
JL
10715/* INDEX is the value being switched on, with the lowest value
10716 in the table already subtracted.
10717 MODE is its expected mode (needed if INDEX is constant).
10718 RANGE is the length of the jump table.
10719 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10720
b93a436e
JL
10721 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10722 index value is out of range. */
0f41302f 10723
ca695ac9 10724void
b93a436e
JL
10725do_tablejump (index, mode, range, table_label, default_label)
10726 rtx index, range, table_label, default_label;
10727 enum machine_mode mode;
ca695ac9 10728{
b93a436e 10729 register rtx temp, vector;
88d3b7f0 10730
b93a436e
JL
10731 /* Do an unsigned comparison (in the proper mode) between the index
10732 expression and the value which represents the length of the range.
10733 Since we just finished subtracting the lower bound of the range
10734 from the index expression, this comparison allows us to simultaneously
10735 check that the original index expression value is both greater than
10736 or equal to the minimum value of the range and less than or equal to
10737 the maximum value of the range. */
709f5be1 10738
c5d5d461
JL
10739 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10740 0, default_label);
bbf6f052 10741
b93a436e
JL
10742 /* If index is in range, it must fit in Pmode.
10743 Convert to Pmode so we can index with it. */
10744 if (mode != Pmode)
10745 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10746
b93a436e
JL
10747 /* Don't let a MEM slip thru, because then INDEX that comes
10748 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10749 and break_out_memory_refs will go to work on it and mess it up. */
10750#ifdef PIC_CASE_VECTOR_ADDRESS
10751 if (flag_pic && GET_CODE (index) != REG)
10752 index = copy_to_mode_reg (Pmode, index);
10753#endif
ca695ac9 10754
b93a436e
JL
10755 /* If flag_force_addr were to affect this address
10756 it could interfere with the tricky assumptions made
10757 about addresses that contain label-refs,
10758 which may be valid only very near the tablejump itself. */
10759 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10760 GET_MODE_SIZE, because this indicates how large insns are. The other
10761 uses should all be Pmode, because they are addresses. This code
10762 could fail if addresses and insns are not the same size. */
10763 index = gen_rtx_PLUS (Pmode,
10764 gen_rtx_MULT (Pmode, index,
10765 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10766 gen_rtx_LABEL_REF (Pmode, table_label));
10767#ifdef PIC_CASE_VECTOR_ADDRESS
10768 if (flag_pic)
10769 index = PIC_CASE_VECTOR_ADDRESS (index);
10770 else
bbf6f052 10771#endif
b93a436e
JL
10772 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10773 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10774 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10775 RTX_UNCHANGING_P (vector) = 1;
10776 convert_move (temp, vector, 0);
10777
10778 emit_jump_insn (gen_tablejump (temp, table_label));
10779
10780 /* If we are generating PIC code or if the table is PC-relative, the
10781 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10782 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10783 emit_barrier ();
bbf6f052 10784}
b93a436e 10785
3a94c984 10786#endif /* HAVE_tablejump */