]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
typo typo fixes fixes
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
35#include "expr.h"
36#include "insn-config.h"
37#include "recog.h"
38#include "output.h"
bbf6f052 39#include "typeclass.h"
ca55abae 40#include "defaults.h"
10f0ad3d 41#include "toplev.h"
bbf6f052
RK
42
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
18543a22
ILT
70/* Assume that case vectors are not pc-relative. */
71#ifndef CASE_VECTOR_PC_RELATIVE
72#define CASE_VECTOR_PC_RELATIVE 0
73#endif
74
bbf6f052
RK
75/* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81int cse_not_expected;
82
83/* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86int do_preexpand_calls = 1;
87
88/* Number of units that we should eventually pop off the stack.
89 These are the arguments to function calls that have already returned. */
90int pending_stack_adjust;
91
92/* Nonzero means stack pops must not be deferred, and deferred stack
93 pops must not be output. It is nonzero inside a function call,
94 inside a conditional expression, inside a statement expression,
95 and in other cases as well. */
96int inhibit_defer_pop;
97
bbf6f052
RK
98/* Nonzero means __builtin_saveregs has already been done in this function.
99 The value is the pseudoreg containing the value __builtin_saveregs
100 returned. */
101static rtx saveregs_value;
102
dcf76fff
TW
103/* Similarly for __builtin_apply_args. */
104static rtx apply_args_value;
105
956d6950
JL
106/* Don't check memory usage, since code is being emitted to check a memory
107 usage. Used when flag_check_memory_usage is true, to avoid infinite
108 recursion. */
109static int in_check_memory_usage;
110
4969d05d
RK
111/* This structure is used by move_by_pieces to describe the move to
112 be performed. */
4969d05d
RK
113struct move_by_pieces
114{
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
e9cf6a97 119 int to_struct;
4969d05d
RK
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
e9cf6a97 124 int from_struct;
4969d05d
RK
125 int len;
126 int offset;
127 int reverse;
128};
129
9de08200
RK
130/* This structure is used by clear_by_pieces to describe the clear to
131 be performed. */
132
133struct clear_by_pieces
134{
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 int to_struct;
140 int len;
141 int offset;
142 int reverse;
143};
144
292b1216 145extern struct obstack permanent_obstack;
4ed67205 146extern rtx arg_pointer_save_area;
c02bd5d9 147
03566575
JW
148static rtx get_push_address PROTO ((int));
149
4969d05d
RK
150static rtx enqueue_insn PROTO((rtx, rtx));
151static int queued_subexp_p PROTO((rtx));
152static void init_queue PROTO((void));
153static void move_by_pieces PROTO((rtx, rtx, int, int));
154static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 155static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 156 struct move_by_pieces *));
9de08200 157static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 158static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
159 struct clear_by_pieces *));
160static int is_zeros_p PROTO((tree));
161static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
162static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
163 tree, tree, int));
e1a43f73 164static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
165static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
166 enum machine_mode, int, int, int));
e009aaf3
JL
167static enum memory_use_mode
168 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
169static tree save_noncopied_parts PROTO((tree, tree));
170static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 171static int safe_from_p PROTO((rtx, tree, int));
4969d05d 172static int fixed_type_p PROTO((tree));
01c8a7c8 173static rtx var_rtx PROTO((tree));
4969d05d
RK
174static int get_pointer_alignment PROTO((tree, unsigned));
175static tree string_constant PROTO((tree, tree *));
176static tree c_strlen PROTO((tree));
307b821c
RK
177static rtx expand_builtin PROTO((tree, rtx, rtx,
178 enum machine_mode, int));
0006469d
TW
179static int apply_args_size PROTO((void));
180static int apply_result_size PROTO((void));
181static rtx result_vector PROTO((int, rtx));
182static rtx expand_builtin_apply_args PROTO((void));
183static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
184static void expand_builtin_return PROTO((rtx));
7b8b9722 185static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
186static void preexpand_calls PROTO((tree));
187static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 188void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d 189static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
f5963e61 190void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
4969d05d
RK
191static void do_jump_for_compare PROTO((rtx, rtx, rtx));
192static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
193static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
16545b0a 194extern tree truthvalue_conversion PROTO((tree));
bbf6f052 195
4fa52007
RK
196/* Record for each mode whether we can move a register directly to or
197 from an object of that mode in memory. If we can't, we won't try
198 to use that mode directly when accessing a field of that mode. */
199
200static char direct_load[NUM_MACHINE_MODES];
201static char direct_store[NUM_MACHINE_MODES];
202
bbf6f052
RK
203/* MOVE_RATIO is the number of move instructions that is better than
204 a block move. */
205
206#ifndef MOVE_RATIO
266007a7 207#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
208#define MOVE_RATIO 2
209#else
996d9dac
MM
210/* If we are optimizing for space (-Os), cut down the default move ratio */
211#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
212#endif
213#endif
e87b4f3f 214
266007a7 215/* This array records the insn_code of insns to perform block moves. */
e6677db3 216enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 217
9de08200
RK
218/* This array records the insn_code of insns to perform block clears. */
219enum insn_code clrstr_optab[NUM_MACHINE_MODES];
220
0f41302f 221/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
222
223#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 224#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 225#endif
0006469d
TW
226
227/* Register mappings for target machines without register windows. */
228#ifndef INCOMING_REGNO
229#define INCOMING_REGNO(OUT) (OUT)
230#endif
231#ifndef OUTGOING_REGNO
232#define OUTGOING_REGNO(IN) (IN)
233#endif
bbf6f052 234\f
4fa52007 235/* This is run once per compilation to set up which modes can be used
266007a7 236 directly in memory and to initialize the block move optab. */
4fa52007
RK
237
238void
239init_expr_once ()
240{
241 rtx insn, pat;
242 enum machine_mode mode;
e2549997
RS
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
38a448ca
RH
246 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007
RK
248
249 start_sequence ();
38a448ca 250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
251 pat = PATTERN (insn);
252
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
255 {
256 int regno;
257 rtx reg;
258 int num_clobbers;
259
260 direct_load[(int) mode] = direct_store[(int) mode] = 0;
261 PUT_MODE (mem, mode);
e2549997 262 PUT_MODE (mem1, mode);
4fa52007 263
e6fe56a4
RK
264 /* See if there is some register that can be used in this mode and
265 directly loaded or stored from memory. */
266
7308a047
RS
267 if (mode != VOIDmode && mode != BLKmode)
268 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
269 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
270 regno++)
271 {
272 if (! HARD_REGNO_MODE_OK (regno, mode))
273 continue;
e6fe56a4 274
38a448ca 275 reg = gen_rtx_REG (mode, regno);
e6fe56a4 276
7308a047
RS
277 SET_SRC (pat) = mem;
278 SET_DEST (pat) = reg;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_load[(int) mode] = 1;
e6fe56a4 281
e2549997
RS
282 SET_SRC (pat) = mem1;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
286
7308a047
RS
287 SET_SRC (pat) = reg;
288 SET_DEST (pat) = mem;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_store[(int) mode] = 1;
e2549997
RS
291
292 SET_SRC (pat) = reg;
293 SET_DEST (pat) = mem1;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
7308a047 296 }
4fa52007
RK
297 }
298
299 end_sequence ();
300}
301
bbf6f052
RK
302/* This is run at the start of compiling a function. */
303
304void
305init_expr ()
306{
307 init_queue ();
308
309 pending_stack_adjust = 0;
310 inhibit_defer_pop = 0;
bbf6f052 311 saveregs_value = 0;
0006469d 312 apply_args_value = 0;
e87b4f3f 313 forced_labels = 0;
bbf6f052
RK
314}
315
316/* Save all variables describing the current status into the structure *P.
317 This is used before starting a nested function. */
318
319void
320save_expr_status (p)
321 struct function *p;
322{
323 /* Instead of saving the postincrement queue, empty it. */
324 emit_queue ();
325
326 p->pending_stack_adjust = pending_stack_adjust;
327 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 328 p->saveregs_value = saveregs_value;
0006469d 329 p->apply_args_value = apply_args_value;
e87b4f3f 330 p->forced_labels = forced_labels;
bbf6f052
RK
331
332 pending_stack_adjust = 0;
333 inhibit_defer_pop = 0;
bbf6f052 334 saveregs_value = 0;
0006469d 335 apply_args_value = 0;
e87b4f3f 336 forced_labels = 0;
bbf6f052
RK
337}
338
339/* Restore all variables describing the current status from the structure *P.
340 This is used after a nested function. */
341
342void
343restore_expr_status (p)
344 struct function *p;
345{
346 pending_stack_adjust = p->pending_stack_adjust;
347 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 348 saveregs_value = p->saveregs_value;
0006469d 349 apply_args_value = p->apply_args_value;
e87b4f3f 350 forced_labels = p->forced_labels;
bbf6f052
RK
351}
352\f
353/* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
355
356static rtx pending_chain;
357
358/* Queue up to increment (or change) VAR later. BODY says how:
359 BODY should be the same thing you would pass to emit_insn
360 to increment right away. It will go to emit_insn later on.
361
362 The value is a QUEUED expression to be used in place of VAR
363 where you want to guarantee the pre-incrementation value of VAR. */
364
365static rtx
366enqueue_insn (var, body)
367 rtx var, body;
368{
38a448ca
RH
369 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
370 var, NULL_RTX, NULL_RTX, body,
371 pending_chain);
bbf6f052
RK
372 return pending_chain;
373}
374
375/* Use protect_from_queue to convert a QUEUED expression
376 into something that you can put immediately into an instruction.
377 If the queued incrementation has not happened yet,
378 protect_from_queue returns the variable itself.
379 If the incrementation has happened, protect_from_queue returns a temp
380 that contains a copy of the old value of the variable.
381
382 Any time an rtx which might possibly be a QUEUED is to be put
383 into an instruction, it must be passed through protect_from_queue first.
384 QUEUED expressions are not meaningful in instructions.
385
386 Do not pass a value through protect_from_queue and then hold
387 on to it for a while before putting it in an instruction!
388 If the queue is flushed in between, incorrect code will result. */
389
390rtx
391protect_from_queue (x, modify)
392 register rtx x;
393 int modify;
394{
395 register RTX_CODE code = GET_CODE (x);
396
397#if 0 /* A QUEUED can hang around after the queue is forced out. */
398 /* Shortcut for most common case. */
399 if (pending_chain == 0)
400 return x;
401#endif
402
403 if (code != QUEUED)
404 {
e9baa644
RK
405 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
406 use of autoincrement. Make a copy of the contents of the memory
407 location rather than a copy of the address, but not if the value is
408 of mode BLKmode. Don't modify X in place since it might be
409 shared. */
bbf6f052
RK
410 if (code == MEM && GET_MODE (x) != BLKmode
411 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
412 {
413 register rtx y = XEXP (x, 0);
38a448ca 414 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
415
416 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
417 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
418 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
419
bbf6f052
RK
420 if (QUEUED_INSN (y))
421 {
e9baa644
RK
422 register rtx temp = gen_reg_rtx (GET_MODE (new));
423 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
424 QUEUED_INSN (y));
425 return temp;
426 }
e9baa644 427 return new;
bbf6f052
RK
428 }
429 /* Otherwise, recursively protect the subexpressions of all
430 the kinds of rtx's that can contain a QUEUED. */
431 if (code == MEM)
3f15938e
RS
432 {
433 rtx tem = protect_from_queue (XEXP (x, 0), 0);
434 if (tem != XEXP (x, 0))
435 {
436 x = copy_rtx (x);
437 XEXP (x, 0) = tem;
438 }
439 }
bbf6f052
RK
440 else if (code == PLUS || code == MULT)
441 {
3f15938e
RS
442 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
443 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
444 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
445 {
446 x = copy_rtx (x);
447 XEXP (x, 0) = new0;
448 XEXP (x, 1) = new1;
449 }
bbf6f052
RK
450 }
451 return x;
452 }
453 /* If the increment has not happened, use the variable itself. */
454 if (QUEUED_INSN (x) == 0)
455 return QUEUED_VAR (x);
456 /* If the increment has happened and a pre-increment copy exists,
457 use that copy. */
458 if (QUEUED_COPY (x) != 0)
459 return QUEUED_COPY (x);
460 /* The increment has happened but we haven't set up a pre-increment copy.
461 Set one up now, and use it. */
462 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
463 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
464 QUEUED_INSN (x));
465 return QUEUED_COPY (x);
466}
467
468/* Return nonzero if X contains a QUEUED expression:
469 if it contains anything that will be altered by a queued increment.
470 We handle only combinations of MEM, PLUS, MINUS and MULT operators
471 since memory addresses generally contain only those. */
472
473static int
474queued_subexp_p (x)
475 rtx x;
476{
477 register enum rtx_code code = GET_CODE (x);
478 switch (code)
479 {
480 case QUEUED:
481 return 1;
482 case MEM:
483 return queued_subexp_p (XEXP (x, 0));
484 case MULT:
485 case PLUS:
486 case MINUS:
e9a25f70
JL
487 return (queued_subexp_p (XEXP (x, 0))
488 || queued_subexp_p (XEXP (x, 1)));
489 default:
490 return 0;
bbf6f052 491 }
bbf6f052
RK
492}
493
494/* Perform all the pending incrementations. */
495
496void
497emit_queue ()
498{
499 register rtx p;
381127e8 500 while ((p = pending_chain))
bbf6f052
RK
501 {
502 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
503 pending_chain = QUEUED_NEXT (p);
504 }
505}
506
507static void
508init_queue ()
509{
510 if (pending_chain)
511 abort ();
512}
513\f
514/* Copy data from FROM to TO, where the machine modes are not the same.
515 Both modes may be integer, or both may be floating.
516 UNSIGNEDP should be nonzero if FROM is an unsigned type.
517 This causes zero-extension instead of sign-extension. */
518
519void
520convert_move (to, from, unsignedp)
521 register rtx to, from;
522 int unsignedp;
523{
524 enum machine_mode to_mode = GET_MODE (to);
525 enum machine_mode from_mode = GET_MODE (from);
526 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
527 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
528 enum insn_code code;
529 rtx libcall;
530
531 /* rtx code for making an equivalent value. */
532 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
533
534 to = protect_from_queue (to, 1);
535 from = protect_from_queue (from, 0);
536
537 if (to_real != from_real)
538 abort ();
539
1499e0a8
RK
540 /* If FROM is a SUBREG that indicates that we have already done at least
541 the required extension, strip it. We don't handle such SUBREGs as
542 TO here. */
543
544 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
545 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
546 >= GET_MODE_SIZE (to_mode))
547 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
548 from = gen_lowpart (to_mode, from), from_mode = to_mode;
549
550 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
551 abort ();
552
bbf6f052
RK
553 if (to_mode == from_mode
554 || (from_mode == VOIDmode && CONSTANT_P (from)))
555 {
556 emit_move_insn (to, from);
557 return;
558 }
559
560 if (to_real)
561 {
81d79e2c
RS
562 rtx value;
563
2b01c326 564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 565 {
2b01c326
RK
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
568 != CODE_FOR_nothing)
569 {
570 emit_unop_insn (code, to, from, UNKNOWN);
571 return;
572 }
bbf6f052 573 }
2b01c326 574
b424402e
RS
575#ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 return;
580 }
581#endif
704af6a1
JL
582#ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 return;
587 }
588#endif
b424402e
RS
589#ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 return;
594 }
595#endif
596#ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
598 {
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 return;
601 }
602#endif
603#ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
605 {
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 return;
608 }
609#endif
610#ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
612 {
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 return;
615 }
616#endif
03747aa3
RK
617
618#ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 return;
623 }
624#endif
b424402e
RS
625#ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 return;
630 }
631#endif
632#ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 return;
637 }
638#endif
639#ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 return;
644 }
645#endif
646#ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
648 {
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 return;
651 }
652#endif
2b01c326
RK
653
654#ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
661#ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
668#ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
675#ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
682
bbf6f052
RK
683#ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 return;
688 }
689#endif
b092b471
JW
690#ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
692 {
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 return;
695 }
696#endif
bbf6f052
RK
697#ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
699 {
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 return;
702 }
703#endif
b092b471
JW
704#ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
706 {
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 return;
709 }
710#endif
bbf6f052
RK
711#ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
713 {
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
715 return;
716 }
717#endif
718
b092b471
JW
719 libcall = (rtx) 0;
720 switch (from_mode)
721 {
722 case SFmode:
723 switch (to_mode)
724 {
725 case DFmode:
726 libcall = extendsfdf2_libfunc;
727 break;
728
729 case XFmode:
730 libcall = extendsfxf2_libfunc;
731 break;
732
733 case TFmode:
734 libcall = extendsftf2_libfunc;
735 break;
e9a25f70
JL
736
737 default:
738 break;
b092b471
JW
739 }
740 break;
741
742 case DFmode:
743 switch (to_mode)
744 {
745 case SFmode:
746 libcall = truncdfsf2_libfunc;
747 break;
748
749 case XFmode:
750 libcall = extenddfxf2_libfunc;
751 break;
752
753 case TFmode:
754 libcall = extenddftf2_libfunc;
755 break;
e9a25f70
JL
756
757 default:
758 break;
b092b471
JW
759 }
760 break;
761
762 case XFmode:
763 switch (to_mode)
764 {
765 case SFmode:
766 libcall = truncxfsf2_libfunc;
767 break;
768
769 case DFmode:
770 libcall = truncxfdf2_libfunc;
771 break;
e9a25f70
JL
772
773 default:
774 break;
b092b471
JW
775 }
776 break;
777
778 case TFmode:
779 switch (to_mode)
780 {
781 case SFmode:
782 libcall = trunctfsf2_libfunc;
783 break;
784
785 case DFmode:
786 libcall = trunctfdf2_libfunc;
787 break;
e9a25f70
JL
788
789 default:
790 break;
b092b471
JW
791 }
792 break;
e9a25f70
JL
793
794 default:
795 break;
b092b471
JW
796 }
797
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
bbf6f052
RK
800 abort ();
801
81d79e2c
RS
802 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
803 1, from, from_mode);
804 emit_move_insn (to, value);
bbf6f052
RK
805 return;
806 }
807
808 /* Now both modes are integers. */
809
810 /* Handle expanding beyond a word. */
811 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
812 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
813 {
814 rtx insns;
815 rtx lowpart;
816 rtx fill_value;
817 rtx lowfrom;
818 int i;
819 enum machine_mode lowpart_mode;
820 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821
822 /* Try converting directly if the insn is supported. */
823 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
824 != CODE_FOR_nothing)
825 {
cd1b4b44
RK
826 /* If FROM is a SUBREG, put it into a register. Do this
827 so that we always generate the same set of insns for
828 better cse'ing; if an intermediate assignment occurred,
829 we won't be doing the operation directly on the SUBREG. */
830 if (optimize > 0 && GET_CODE (from) == SUBREG)
831 from = force_reg (from_mode, from);
bbf6f052
RK
832 emit_unop_insn (code, to, from, equiv_code);
833 return;
834 }
835 /* Next, try converting via full word. */
836 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
837 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
838 != CODE_FOR_nothing))
839 {
a81fee56 840 if (GET_CODE (to) == REG)
38a448ca 841 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
842 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
843 emit_unop_insn (code, to,
844 gen_lowpart (word_mode, to), equiv_code);
845 return;
846 }
847
848 /* No special multiword conversion insn; do it by hand. */
849 start_sequence ();
850
5c5033c3
RK
851 /* Since we will turn this into a no conflict block, we must ensure
852 that the source does not overlap the target. */
853
854 if (reg_overlap_mentioned_p (to, from))
855 from = force_reg (from_mode, from);
856
bbf6f052
RK
857 /* Get a copy of FROM widened to a word, if necessary. */
858 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
859 lowpart_mode = word_mode;
860 else
861 lowpart_mode = from_mode;
862
863 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864
865 lowpart = gen_lowpart (lowpart_mode, to);
866 emit_move_insn (lowpart, lowfrom);
867
868 /* Compute the value to put in each remaining word. */
869 if (unsignedp)
870 fill_value = const0_rtx;
871 else
872 {
873#ifdef HAVE_slt
874 if (HAVE_slt
875 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
876 && STORE_FLAG_VALUE == -1)
877 {
906c4e36
RK
878 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 lowpart_mode, 0, 0);
bbf6f052
RK
880 fill_value = gen_reg_rtx (word_mode);
881 emit_insn (gen_slt (fill_value));
882 }
883 else
884#endif
885 {
886 fill_value
887 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
888 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 889 NULL_RTX, 0);
bbf6f052
RK
890 fill_value = convert_to_mode (word_mode, fill_value, 1);
891 }
892 }
893
894 /* Fill the remaining words. */
895 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 {
897 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
898 rtx subword = operand_subword (to, index, 1, to_mode);
899
900 if (subword == 0)
901 abort ();
902
903 if (fill_value != subword)
904 emit_move_insn (subword, fill_value);
905 }
906
907 insns = get_insns ();
908 end_sequence ();
909
906c4e36 910 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 911 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
912 return;
913 }
914
d3c64ee3
RS
915 /* Truncating multi-word to a word or less. */
916 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
917 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 918 {
431a6eca
JW
919 if (!((GET_CODE (from) == MEM
920 && ! MEM_VOLATILE_P (from)
921 && direct_load[(int) to_mode]
922 && ! mode_dependent_address_p (XEXP (from, 0)))
923 || GET_CODE (from) == REG
924 || GET_CODE (from) == SUBREG))
925 from = force_reg (from_mode, from);
bbf6f052
RK
926 convert_move (to, gen_lowpart (word_mode, from), 0);
927 return;
928 }
929
930 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
931 if (to_mode == PQImode)
932 {
933 if (from_mode != QImode)
934 from = convert_to_mode (QImode, from, unsignedp);
935
936#ifdef HAVE_truncqipqi2
937 if (HAVE_truncqipqi2)
938 {
939 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
940 return;
941 }
942#endif /* HAVE_truncqipqi2 */
943 abort ();
944 }
945
946 if (from_mode == PQImode)
947 {
948 if (to_mode != QImode)
949 {
950 from = convert_to_mode (QImode, from, unsignedp);
951 from_mode = QImode;
952 }
953 else
954 {
955#ifdef HAVE_extendpqiqi2
956 if (HAVE_extendpqiqi2)
957 {
958 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
959 return;
960 }
961#endif /* HAVE_extendpqiqi2 */
962 abort ();
963 }
964 }
965
bbf6f052
RK
966 if (to_mode == PSImode)
967 {
968 if (from_mode != SImode)
969 from = convert_to_mode (SImode, from, unsignedp);
970
1f584163
DE
971#ifdef HAVE_truncsipsi2
972 if (HAVE_truncsipsi2)
bbf6f052 973 {
1f584163 974 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
975 return;
976 }
1f584163 977#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
978 abort ();
979 }
980
981 if (from_mode == PSImode)
982 {
983 if (to_mode != SImode)
984 {
985 from = convert_to_mode (SImode, from, unsignedp);
986 from_mode = SImode;
987 }
988 else
989 {
1f584163
DE
990#ifdef HAVE_extendpsisi2
991 if (HAVE_extendpsisi2)
bbf6f052 992 {
1f584163 993 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
994 return;
995 }
1f584163 996#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
997 abort ();
998 }
999 }
1000
0407367d
RK
1001 if (to_mode == PDImode)
1002 {
1003 if (from_mode != DImode)
1004 from = convert_to_mode (DImode, from, unsignedp);
1005
1006#ifdef HAVE_truncdipdi2
1007 if (HAVE_truncdipdi2)
1008 {
1009 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1010 return;
1011 }
1012#endif /* HAVE_truncdipdi2 */
1013 abort ();
1014 }
1015
1016 if (from_mode == PDImode)
1017 {
1018 if (to_mode != DImode)
1019 {
1020 from = convert_to_mode (DImode, from, unsignedp);
1021 from_mode = DImode;
1022 }
1023 else
1024 {
1025#ifdef HAVE_extendpdidi2
1026 if (HAVE_extendpdidi2)
1027 {
1028 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1029 return;
1030 }
1031#endif /* HAVE_extendpdidi2 */
1032 abort ();
1033 }
1034 }
1035
bbf6f052
RK
1036 /* Now follow all the conversions between integers
1037 no more than a word long. */
1038
1039 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1040 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1041 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1042 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1043 {
d3c64ee3
RS
1044 if (!((GET_CODE (from) == MEM
1045 && ! MEM_VOLATILE_P (from)
1046 && direct_load[(int) to_mode]
1047 && ! mode_dependent_address_p (XEXP (from, 0)))
1048 || GET_CODE (from) == REG
1049 || GET_CODE (from) == SUBREG))
1050 from = force_reg (from_mode, from);
34aa3599
RK
1051 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1052 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1053 from = copy_to_reg (from);
bbf6f052
RK
1054 emit_move_insn (to, gen_lowpart (to_mode, from));
1055 return;
1056 }
1057
d3c64ee3 1058 /* Handle extension. */
bbf6f052
RK
1059 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1060 {
1061 /* Convert directly if that works. */
1062 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1063 != CODE_FOR_nothing)
1064 {
1065 emit_unop_insn (code, to, from, equiv_code);
1066 return;
1067 }
1068 else
1069 {
1070 enum machine_mode intermediate;
1071
1072 /* Search for a mode to convert via. */
1073 for (intermediate = from_mode; intermediate != VOIDmode;
1074 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1075 if (((can_extend_p (to_mode, intermediate, unsignedp)
1076 != CODE_FOR_nothing)
1077 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1078 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1079 && (can_extend_p (intermediate, from_mode, unsignedp)
1080 != CODE_FOR_nothing))
1081 {
1082 convert_move (to, convert_to_mode (intermediate, from,
1083 unsignedp), unsignedp);
1084 return;
1085 }
1086
1087 /* No suitable intermediate mode. */
1088 abort ();
1089 }
1090 }
1091
1092 /* Support special truncate insns for certain modes. */
1093
1094 if (from_mode == DImode && to_mode == SImode)
1095 {
1096#ifdef HAVE_truncdisi2
1097 if (HAVE_truncdisi2)
1098 {
1099 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1100 return;
1101 }
1102#endif
1103 convert_move (to, force_reg (from_mode, from), unsignedp);
1104 return;
1105 }
1106
1107 if (from_mode == DImode && to_mode == HImode)
1108 {
1109#ifdef HAVE_truncdihi2
1110 if (HAVE_truncdihi2)
1111 {
1112 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1113 return;
1114 }
1115#endif
1116 convert_move (to, force_reg (from_mode, from), unsignedp);
1117 return;
1118 }
1119
1120 if (from_mode == DImode && to_mode == QImode)
1121 {
1122#ifdef HAVE_truncdiqi2
1123 if (HAVE_truncdiqi2)
1124 {
1125 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1126 return;
1127 }
1128#endif
1129 convert_move (to, force_reg (from_mode, from), unsignedp);
1130 return;
1131 }
1132
1133 if (from_mode == SImode && to_mode == HImode)
1134 {
1135#ifdef HAVE_truncsihi2
1136 if (HAVE_truncsihi2)
1137 {
1138 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1139 return;
1140 }
1141#endif
1142 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 return;
1144 }
1145
1146 if (from_mode == SImode && to_mode == QImode)
1147 {
1148#ifdef HAVE_truncsiqi2
1149 if (HAVE_truncsiqi2)
1150 {
1151 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1152 return;
1153 }
1154#endif
1155 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 return;
1157 }
1158
1159 if (from_mode == HImode && to_mode == QImode)
1160 {
1161#ifdef HAVE_trunchiqi2
1162 if (HAVE_trunchiqi2)
1163 {
1164 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1165 return;
1166 }
1167#endif
1168 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 return;
1170 }
1171
b9bcad65
RK
1172 if (from_mode == TImode && to_mode == DImode)
1173 {
1174#ifdef HAVE_trunctidi2
1175 if (HAVE_trunctidi2)
1176 {
1177 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1178 return;
1179 }
1180#endif
1181 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 return;
1183 }
1184
1185 if (from_mode == TImode && to_mode == SImode)
1186 {
1187#ifdef HAVE_trunctisi2
1188 if (HAVE_trunctisi2)
1189 {
1190 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1191 return;
1192 }
1193#endif
1194 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 return;
1196 }
1197
1198 if (from_mode == TImode && to_mode == HImode)
1199 {
1200#ifdef HAVE_trunctihi2
1201 if (HAVE_trunctihi2)
1202 {
1203 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1204 return;
1205 }
1206#endif
1207 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 return;
1209 }
1210
1211 if (from_mode == TImode && to_mode == QImode)
1212 {
1213#ifdef HAVE_trunctiqi2
1214 if (HAVE_trunctiqi2)
1215 {
1216 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1217 return;
1218 }
1219#endif
1220 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 return;
1222 }
1223
bbf6f052
RK
1224 /* Handle truncation of volatile memrefs, and so on;
1225 the things that couldn't be truncated directly,
1226 and for which there was no special instruction. */
1227 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1228 {
1229 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1230 emit_move_insn (to, temp);
1231 return;
1232 }
1233
1234 /* Mode combination is not recognized. */
1235 abort ();
1236}
1237
1238/* Return an rtx for a value that would result
1239 from converting X to mode MODE.
1240 Both X and MODE may be floating, or both integer.
1241 UNSIGNEDP is nonzero if X is an unsigned value.
1242 This can be done by referring to a part of X in place
5d901c31
RS
1243 or by copying to a new temporary with conversion.
1244
1245 This function *must not* call protect_from_queue
1246 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1247
1248rtx
1249convert_to_mode (mode, x, unsignedp)
1250 enum machine_mode mode;
1251 rtx x;
1252 int unsignedp;
5ffe63ed
RS
1253{
1254 return convert_modes (mode, VOIDmode, x, unsignedp);
1255}
1256
1257/* Return an rtx for a value that would result
1258 from converting X from mode OLDMODE to mode MODE.
1259 Both modes may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261
1262 This can be done by referring to a part of X in place
1263 or by copying to a new temporary with conversion.
1264
1265 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1266
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1269
1270rtx
1271convert_modes (mode, oldmode, x, unsignedp)
1272 enum machine_mode mode, oldmode;
1273 rtx x;
1274 int unsignedp;
bbf6f052
RK
1275{
1276 register rtx temp;
5ffe63ed 1277
1499e0a8
RK
1278 /* If FROM is a SUBREG that indicates that we have already done at least
1279 the required extension, strip it. */
1280
1281 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1282 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1283 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1284 x = gen_lowpart (mode, x);
bbf6f052 1285
64791b18
RK
1286 if (GET_MODE (x) != VOIDmode)
1287 oldmode = GET_MODE (x);
1288
5ffe63ed 1289 if (mode == oldmode)
bbf6f052
RK
1290 return x;
1291
1292 /* There is one case that we must handle specially: If we are converting
906c4e36 1293 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1294 we are to interpret the constant as unsigned, gen_lowpart will do
1295 the wrong if the constant appears negative. What we want to do is
1296 make the high-order word of the constant zero, not all ones. */
1297
1298 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1299 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1300 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1301 {
1302 HOST_WIDE_INT val = INTVAL (x);
1303
1304 if (oldmode != VOIDmode
1305 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1306 {
1307 int width = GET_MODE_BITSIZE (oldmode);
1308
1309 /* We need to zero extend VAL. */
1310 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1311 }
1312
1313 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1314 }
bbf6f052
RK
1315
1316 /* We can do this with a gen_lowpart if both desired and current modes
1317 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1318 non-volatile MEM. Except for the constant case where MODE is no
1319 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1320
ba2e110c
RK
1321 if ((GET_CODE (x) == CONST_INT
1322 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1323 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1324 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1325 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1326 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1327 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1328 && direct_load[(int) mode])
2bf29316
JW
1329 || (GET_CODE (x) == REG
1330 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1331 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1332 {
1333 /* ?? If we don't know OLDMODE, we have to assume here that
1334 X does not need sign- or zero-extension. This may not be
1335 the case, but it's the best we can do. */
1336 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1337 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1338 {
1339 HOST_WIDE_INT val = INTVAL (x);
1340 int width = GET_MODE_BITSIZE (oldmode);
1341
1342 /* We must sign or zero-extend in this case. Start by
1343 zero-extending, then sign extend if we need to. */
1344 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1345 if (! unsignedp
1346 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1347 val |= (HOST_WIDE_INT) (-1) << width;
1348
1349 return GEN_INT (val);
1350 }
1351
1352 return gen_lowpart (mode, x);
1353 }
bbf6f052
RK
1354
1355 temp = gen_reg_rtx (mode);
1356 convert_move (temp, x, unsignedp);
1357 return temp;
1358}
1359\f
1360/* Generate several move instructions to copy LEN bytes
1361 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1362 The caller must pass FROM and TO
1363 through protect_from_queue before calling.
1364 ALIGN (in bytes) is maximum alignment we can assume. */
1365
bbf6f052
RK
1366static void
1367move_by_pieces (to, from, len, align)
1368 rtx to, from;
1369 int len, align;
1370{
1371 struct move_by_pieces data;
1372 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1373 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1374
1375 data.offset = 0;
1376 data.to_addr = to_addr;
1377 data.from_addr = from_addr;
1378 data.to = to;
1379 data.from = from;
1380 data.autinc_to
1381 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1382 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1383 data.autinc_from
1384 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1385 || GET_CODE (from_addr) == POST_INC
1386 || GET_CODE (from_addr) == POST_DEC);
1387
1388 data.explicit_inc_from = 0;
1389 data.explicit_inc_to = 0;
1390 data.reverse
1391 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1392 if (data.reverse) data.offset = len;
1393 data.len = len;
1394
e9cf6a97
JW
1395 data.to_struct = MEM_IN_STRUCT_P (to);
1396 data.from_struct = MEM_IN_STRUCT_P (from);
1397
bbf6f052
RK
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1403 {
1404#ifdef HAVE_PRE_DECREMENT
1405 if (data.reverse && ! data.autinc_from)
1406 {
1407 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1408 data.autinc_from = 1;
1409 data.explicit_inc_from = -1;
1410 }
1411#endif
1412#ifdef HAVE_POST_INCREMENT
1413 if (! data.autinc_from)
1414 {
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 data.autinc_from = 1;
1417 data.explicit_inc_from = 1;
1418 }
1419#endif
1420 if (!data.autinc_from && CONSTANT_P (from_addr))
1421 data.from_addr = copy_addr_to_reg (from_addr);
1422#ifdef HAVE_PRE_DECREMENT
1423 if (data.reverse && ! data.autinc_to)
1424 {
1425 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1426 data.autinc_to = 1;
1427 data.explicit_inc_to = -1;
1428 }
1429#endif
1430#ifdef HAVE_POST_INCREMENT
1431 if (! data.reverse && ! data.autinc_to)
1432 {
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 data.autinc_to = 1;
1435 data.explicit_inc_to = 1;
1436 }
1437#endif
1438 if (!data.autinc_to && CONSTANT_P (to_addr))
1439 data.to_addr = copy_addr_to_reg (to_addr);
1440 }
1441
c7a7ac46 1442 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1443 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1444 align = MOVE_MAX;
bbf6f052
RK
1445
1446 /* First move what we can in the largest integer mode, then go to
1447 successively smaller modes. */
1448
1449 while (max_size > 1)
1450 {
1451 enum machine_mode mode = VOIDmode, tmode;
1452 enum insn_code icode;
1453
e7c33f54
RK
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1457 mode = tmode;
1458
1459 if (mode == VOIDmode)
1460 break;
1461
1462 icode = mov_optab->handlers[(int) mode].insn_code;
1463 if (icode != CODE_FOR_nothing
1464 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1465 GET_MODE_SIZE (mode)))
1466 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1467
1468 max_size = GET_MODE_SIZE (mode);
1469 }
1470
1471 /* The code above should have handled everything. */
2a8e278c 1472 if (data.len > 0)
bbf6f052
RK
1473 abort ();
1474}
1475
1476/* Return number of insns required to move L bytes by pieces.
1477 ALIGN (in bytes) is maximum alignment we can assume. */
1478
1479static int
1480move_by_pieces_ninsns (l, align)
1481 unsigned int l;
1482 int align;
1483{
1484 register int n_insns = 0;
e87b4f3f 1485 int max_size = MOVE_MAX + 1;
bbf6f052 1486
c7a7ac46 1487 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1488 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1489 align = MOVE_MAX;
bbf6f052
RK
1490
1491 while (max_size > 1)
1492 {
1493 enum machine_mode mode = VOIDmode, tmode;
1494 enum insn_code icode;
1495
e7c33f54
RK
1496 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1497 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1498 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1499 mode = tmode;
1500
1501 if (mode == VOIDmode)
1502 break;
1503
1504 icode = mov_optab->handlers[(int) mode].insn_code;
1505 if (icode != CODE_FOR_nothing
1506 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1507 GET_MODE_SIZE (mode)))
1508 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 return n_insns;
1514}
1515
1516/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1517 with move instructions for mode MODE. GENFUN is the gen_... function
1518 to make a move insn for that mode. DATA has all the other info. */
1519
1520static void
1521move_by_pieces_1 (genfun, mode, data)
eae4b970 1522 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1523 enum machine_mode mode;
1524 struct move_by_pieces *data;
1525{
1526 register int size = GET_MODE_SIZE (mode);
1527 register rtx to1, from1;
1528
1529 while (data->len >= size)
1530 {
1531 if (data->reverse) data->offset -= size;
1532
1533 to1 = (data->autinc_to
38a448ca 1534 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1535 : copy_rtx (change_address (data->to, mode,
1536 plus_constant (data->to_addr,
1537 data->offset))));
e9cf6a97 1538 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1539
db3cf6fb
MS
1540 from1
1541 = (data->autinc_from
38a448ca 1542 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1543 : copy_rtx (change_address (data->from, mode,
1544 plus_constant (data->from_addr,
1545 data->offset))));
e9cf6a97 1546 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1547
1548#ifdef HAVE_PRE_DECREMENT
1549 if (data->explicit_inc_to < 0)
906c4e36 1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1551 if (data->explicit_inc_from < 0)
906c4e36 1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1553#endif
1554
1555 emit_insn ((*genfun) (to1, from1));
1556#ifdef HAVE_POST_INCREMENT
1557 if (data->explicit_inc_to > 0)
906c4e36 1558 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1559 if (data->explicit_inc_from > 0)
906c4e36 1560 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1561#endif
1562
1563 if (! data->reverse) data->offset += size;
1564
1565 data->len -= size;
1566 }
1567}
1568\f
1569/* Emit code to move a block Y to a block X.
1570 This may be done with string-move instructions,
1571 with multiple scalar move instructions, or with a library call.
1572
1573 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1574 with mode BLKmode.
1575 SIZE is an rtx that says how long they are.
1576 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1577 measured in bytes.
bbf6f052 1578
e9a25f70
JL
1579 Return the address of the new block, if memcpy is called and returns it,
1580 0 otherwise. */
1581
1582rtx
bbf6f052
RK
1583emit_block_move (x, y, size, align)
1584 rtx x, y;
1585 rtx size;
1586 int align;
1587{
e9a25f70
JL
1588 rtx retval = 0;
1589
bbf6f052
RK
1590 if (GET_MODE (x) != BLKmode)
1591 abort ();
1592
1593 if (GET_MODE (y) != BLKmode)
1594 abort ();
1595
1596 x = protect_from_queue (x, 1);
1597 y = protect_from_queue (y, 0);
5d901c31 1598 size = protect_from_queue (size, 0);
bbf6f052
RK
1599
1600 if (GET_CODE (x) != MEM)
1601 abort ();
1602 if (GET_CODE (y) != MEM)
1603 abort ();
1604 if (size == 0)
1605 abort ();
1606
1607 if (GET_CODE (size) == CONST_INT
906c4e36 1608 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1609 move_by_pieces (x, y, INTVAL (size), align);
1610 else
1611 {
1612 /* Try the most limited insn first, because there's no point
1613 including more than one in the machine description unless
1614 the more limited one has some advantage. */
266007a7 1615
0bba3f6f 1616 rtx opalign = GEN_INT (align);
266007a7
RK
1617 enum machine_mode mode;
1618
1619 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1620 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1621 {
266007a7 1622 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1623
1624 if (code != CODE_FOR_nothing
803090c4
RK
1625 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1626 here because if SIZE is less than the mode mask, as it is
8008b228 1627 returned by the macro, it will definitely be less than the
803090c4 1628 actual mode mask. */
8ca00751
RK
1629 && ((GET_CODE (size) == CONST_INT
1630 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1631 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1632 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1633 && (insn_operand_predicate[(int) code][0] == 0
1634 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1635 && (insn_operand_predicate[(int) code][1] == 0
1636 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1637 && (insn_operand_predicate[(int) code][3] == 0
1638 || (*insn_operand_predicate[(int) code][3]) (opalign,
1639 VOIDmode)))
bbf6f052 1640 {
1ba1e2a8 1641 rtx op2;
266007a7
RK
1642 rtx last = get_last_insn ();
1643 rtx pat;
1644
1ba1e2a8 1645 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1646 if (insn_operand_predicate[(int) code][2] != 0
1647 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1648 op2 = copy_to_mode_reg (mode, op2);
1649
1650 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1651 if (pat)
1652 {
1653 emit_insn (pat);
e9a25f70 1654 return 0;
266007a7
RK
1655 }
1656 else
1657 delete_insns_since (last);
bbf6f052
RK
1658 }
1659 }
bbf6f052
RK
1660
1661#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
1662 retval
1663 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1664 ptr_mode, 3, XEXP (x, 0), Pmode,
1665 XEXP (y, 0), Pmode,
1666 convert_to_mode (TYPE_MODE (sizetype), size,
1667 TREE_UNSIGNED (sizetype)),
1668 TYPE_MODE (sizetype));
bbf6f052 1669#else
d562e42e 1670 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1671 VOIDmode, 3, XEXP (y, 0), Pmode,
1672 XEXP (x, 0), Pmode,
3b6f75e2
JW
1673 convert_to_mode (TYPE_MODE (integer_type_node), size,
1674 TREE_UNSIGNED (integer_type_node)),
1675 TYPE_MODE (integer_type_node));
bbf6f052
RK
1676#endif
1677 }
e9a25f70
JL
1678
1679 return retval;
bbf6f052
RK
1680}
1681\f
1682/* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1684
1685void
1686move_block_to_reg (regno, x, nregs, mode)
1687 int regno;
1688 rtx x;
1689 int nregs;
1690 enum machine_mode mode;
1691{
1692 int i;
381127e8
RL
1693#ifdef HAVE_load_multiple
1694 rtx pat;
1695 rtx last;
1696#endif
bbf6f052 1697
72bb9717
RK
1698 if (nregs == 0)
1699 return;
1700
bbf6f052
RK
1701 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1702 x = validize_mem (force_const_mem (mode, x));
1703
1704 /* See if the machine can do this with a load multiple insn. */
1705#ifdef HAVE_load_multiple
c3a02afe 1706 if (HAVE_load_multiple)
bbf6f052 1707 {
c3a02afe 1708 last = get_last_insn ();
38a448ca 1709 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1710 GEN_INT (nregs));
1711 if (pat)
1712 {
1713 emit_insn (pat);
1714 return;
1715 }
1716 else
1717 delete_insns_since (last);
bbf6f052 1718 }
bbf6f052
RK
1719#endif
1720
1721 for (i = 0; i < nregs; i++)
38a448ca 1722 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1723 operand_subword_force (x, i, mode));
1724}
1725
1726/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1727 The number of registers to be filled is NREGS. SIZE indicates the number
1728 of bytes in the object X. */
1729
bbf6f052
RK
1730
1731void
0040593d 1732move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1733 int regno;
1734 rtx x;
1735 int nregs;
0040593d 1736 int size;
bbf6f052
RK
1737{
1738 int i;
381127e8
RL
1739#ifdef HAVE_store_multiple
1740 rtx pat;
1741 rtx last;
1742#endif
58a32c5c 1743 enum machine_mode mode;
bbf6f052 1744
58a32c5c
DE
1745 /* If SIZE is that of a mode no bigger than a word, just use that
1746 mode's store operation. */
1747 if (size <= UNITS_PER_WORD
1748 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1749 {
1750 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1751 gen_rtx_REG (mode, regno));
58a32c5c
DE
1752 return;
1753 }
1754
0040593d 1755 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1756 to the left before storing to memory. Note that the previous test
1757 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1758 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1759 {
1760 rtx tem = operand_subword (x, 0, 1, BLKmode);
1761 rtx shift;
1762
1763 if (tem == 0)
1764 abort ();
1765
1766 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1767 gen_rtx_REG (word_mode, regno),
0040593d
JW
1768 build_int_2 ((UNITS_PER_WORD - size)
1769 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1770 emit_move_insn (tem, shift);
1771 return;
1772 }
1773
bbf6f052
RK
1774 /* See if the machine can do this with a store multiple insn. */
1775#ifdef HAVE_store_multiple
c3a02afe 1776 if (HAVE_store_multiple)
bbf6f052 1777 {
c3a02afe 1778 last = get_last_insn ();
38a448ca 1779 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1780 GEN_INT (nregs));
1781 if (pat)
1782 {
1783 emit_insn (pat);
1784 return;
1785 }
1786 else
1787 delete_insns_since (last);
bbf6f052 1788 }
bbf6f052
RK
1789#endif
1790
1791 for (i = 0; i < nregs; i++)
1792 {
1793 rtx tem = operand_subword (x, i, 1, BLKmode);
1794
1795 if (tem == 0)
1796 abort ();
1797
38a448ca 1798 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1799 }
1800}
1801
fffa9c1d
JW
1802/* Emit code to move a block Y to a block X, where X is non-consecutive
1803 registers represented by a PARALLEL. */
1804
1805void
1806emit_group_load (x, y)
1807 rtx x, y;
1808{
1809 rtx target_reg, source;
1810 int i;
1811
1812 if (GET_CODE (x) != PARALLEL)
1813 abort ();
1814
1815 /* Check for a NULL entry, used to indicate that the parameter goes
1816 both on the stack and in registers. */
1817 if (XEXP (XVECEXP (x, 0, 0), 0))
1818 i = 0;
1819 else
1820 i = 1;
1821
1822 for (; i < XVECLEN (x, 0); i++)
1823 {
1824 rtx element = XVECEXP (x, 0, i);
1825
1826 target_reg = XEXP (element, 0);
1827
1828 if (GET_CODE (y) == MEM)
1829 source = change_address (y, GET_MODE (target_reg),
1830 plus_constant (XEXP (y, 0),
1831 INTVAL (XEXP (element, 1))));
1832 else if (XEXP (element, 1) == const0_rtx)
1833 {
1834 if (GET_MODE (target_reg) == GET_MODE (y))
1835 source = y;
eaa9b4d9
MM
1836 /* Allow for the target_reg to be smaller than the input register
1837 to allow for AIX with 4 DF arguments after a single SI arg. The
1838 last DF argument will only load 1 word into the integer registers,
1839 but load a DF value into the float registers. */
aff4d29b
JW
1840 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1841 <= GET_MODE_SIZE (GET_MODE (y)))
1842 && GET_MODE (target_reg) == word_mode)
1843 /* This might be a const_double, so we can't just use SUBREG. */
1844 source = operand_subword (y, 0, 0, VOIDmode);
d7d775a0
JW
1845 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1846 == GET_MODE_SIZE (GET_MODE (y)))
1847 source = gen_lowpart (GET_MODE (target_reg), y);
fffa9c1d
JW
1848 else
1849 abort ();
1850 }
1851 else
1852 abort ();
1853
1854 emit_move_insn (target_reg, source);
1855 }
1856}
1857
1858/* Emit code to move a block Y to a block X, where Y is non-consecutive
1859 registers represented by a PARALLEL. */
1860
1861void
1862emit_group_store (x, y)
1863 rtx x, y;
1864{
1865 rtx source_reg, target;
1866 int i;
1867
1868 if (GET_CODE (y) != PARALLEL)
1869 abort ();
1870
1871 /* Check for a NULL entry, used to indicate that the parameter goes
1872 both on the stack and in registers. */
1873 if (XEXP (XVECEXP (y, 0, 0), 0))
1874 i = 0;
1875 else
1876 i = 1;
1877
1878 for (; i < XVECLEN (y, 0); i++)
1879 {
1880 rtx element = XVECEXP (y, 0, i);
1881
1882 source_reg = XEXP (element, 0);
1883
1884 if (GET_CODE (x) == MEM)
1885 target = change_address (x, GET_MODE (source_reg),
1886 plus_constant (XEXP (x, 0),
1887 INTVAL (XEXP (element, 1))));
1888 else if (XEXP (element, 1) == const0_rtx)
71bc0330
JW
1889 {
1890 target = x;
1891 if (GET_MODE (target) != GET_MODE (source_reg))
1892 target = gen_lowpart (GET_MODE (source_reg), target);
1893 }
fffa9c1d
JW
1894 else
1895 abort ();
1896
1897 emit_move_insn (target, source_reg);
1898 }
1899}
1900
94b25f81
RK
1901/* Add a USE expression for REG to the (possibly empty) list pointed
1902 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1903
1904void
b3f8cf4a
RK
1905use_reg (call_fusage, reg)
1906 rtx *call_fusage, reg;
1907{
0304dfbb
DE
1908 if (GET_CODE (reg) != REG
1909 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1910 abort();
1911
1912 *call_fusage
38a448ca
RH
1913 = gen_rtx_EXPR_LIST (VOIDmode,
1914 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1915}
1916
94b25f81
RK
1917/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1918 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1919
1920void
0304dfbb
DE
1921use_regs (call_fusage, regno, nregs)
1922 rtx *call_fusage;
bbf6f052
RK
1923 int regno;
1924 int nregs;
1925{
0304dfbb 1926 int i;
bbf6f052 1927
0304dfbb
DE
1928 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1929 abort ();
1930
1931 for (i = 0; i < nregs; i++)
38a448ca 1932 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 1933}
fffa9c1d
JW
1934
1935/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1936 PARALLEL REGS. This is for calls that pass values in multiple
1937 non-contiguous locations. The Irix 6 ABI has examples of this. */
1938
1939void
1940use_group_regs (call_fusage, regs)
1941 rtx *call_fusage;
1942 rtx regs;
1943{
1944 int i;
1945
6bd35f86
DE
1946 for (i = 0; i < XVECLEN (regs, 0); i++)
1947 {
1948 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 1949
6bd35f86
DE
1950 /* A NULL entry means the parameter goes both on the stack and in
1951 registers. This can also be a MEM for targets that pass values
1952 partially on the stack and partially in registers. */
e9a25f70 1953 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
1954 use_reg (call_fusage, reg);
1955 }
fffa9c1d 1956}
bbf6f052 1957\f
9de08200
RK
1958/* Generate several move instructions to clear LEN bytes of block TO.
1959 (A MEM rtx with BLKmode). The caller must pass TO through
1960 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1961 we can assume. */
1962
1963static void
1964clear_by_pieces (to, len, align)
1965 rtx to;
1966 int len, align;
1967{
1968 struct clear_by_pieces data;
1969 rtx to_addr = XEXP (to, 0);
1970 int max_size = MOVE_MAX + 1;
1971
1972 data.offset = 0;
1973 data.to_addr = to_addr;
1974 data.to = to;
1975 data.autinc_to
1976 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1977 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1978
1979 data.explicit_inc_to = 0;
1980 data.reverse
1981 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1982 if (data.reverse) data.offset = len;
1983 data.len = len;
1984
1985 data.to_struct = MEM_IN_STRUCT_P (to);
1986
1987 /* If copying requires more than two move insns,
1988 copy addresses to registers (to make displacements shorter)
1989 and use post-increment if available. */
1990 if (!data.autinc_to
1991 && move_by_pieces_ninsns (len, align) > 2)
1992 {
1993#ifdef HAVE_PRE_DECREMENT
1994 if (data.reverse && ! data.autinc_to)
1995 {
1996 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1997 data.autinc_to = 1;
1998 data.explicit_inc_to = -1;
1999 }
2000#endif
2001#ifdef HAVE_POST_INCREMENT
2002 if (! data.reverse && ! data.autinc_to)
2003 {
2004 data.to_addr = copy_addr_to_reg (to_addr);
2005 data.autinc_to = 1;
2006 data.explicit_inc_to = 1;
2007 }
2008#endif
2009 if (!data.autinc_to && CONSTANT_P (to_addr))
2010 data.to_addr = copy_addr_to_reg (to_addr);
2011 }
2012
2013 if (! SLOW_UNALIGNED_ACCESS
2014 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2015 align = MOVE_MAX;
2016
2017 /* First move what we can in the largest integer mode, then go to
2018 successively smaller modes. */
2019
2020 while (max_size > 1)
2021 {
2022 enum machine_mode mode = VOIDmode, tmode;
2023 enum insn_code icode;
2024
2025 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2026 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2027 if (GET_MODE_SIZE (tmode) < max_size)
2028 mode = tmode;
2029
2030 if (mode == VOIDmode)
2031 break;
2032
2033 icode = mov_optab->handlers[(int) mode].insn_code;
2034 if (icode != CODE_FOR_nothing
2035 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2036 GET_MODE_SIZE (mode)))
2037 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2038
2039 max_size = GET_MODE_SIZE (mode);
2040 }
2041
2042 /* The code above should have handled everything. */
2043 if (data.len != 0)
2044 abort ();
2045}
2046
2047/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2048 with move instructions for mode MODE. GENFUN is the gen_... function
2049 to make a move insn for that mode. DATA has all the other info. */
2050
2051static void
2052clear_by_pieces_1 (genfun, mode, data)
eae4b970 2053 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2054 enum machine_mode mode;
2055 struct clear_by_pieces *data;
2056{
2057 register int size = GET_MODE_SIZE (mode);
2058 register rtx to1;
2059
2060 while (data->len >= size)
2061 {
2062 if (data->reverse) data->offset -= size;
2063
2064 to1 = (data->autinc_to
38a448ca 2065 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2066 : copy_rtx (change_address (data->to, mode,
2067 plus_constant (data->to_addr,
2068 data->offset))));
9de08200
RK
2069 MEM_IN_STRUCT_P (to1) = data->to_struct;
2070
2071#ifdef HAVE_PRE_DECREMENT
2072 if (data->explicit_inc_to < 0)
2073 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2074#endif
2075
2076 emit_insn ((*genfun) (to1, const0_rtx));
2077#ifdef HAVE_POST_INCREMENT
2078 if (data->explicit_inc_to > 0)
2079 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2080#endif
2081
2082 if (! data->reverse) data->offset += size;
2083
2084 data->len -= size;
2085 }
2086}
2087\f
bbf6f052 2088/* Write zeros through the storage of OBJECT.
9de08200 2089 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2090 the maximum alignment we can is has, measured in bytes.
bbf6f052 2091
e9a25f70
JL
2092 If we call a function that returns the length of the block, return it. */
2093
2094rtx
9de08200 2095clear_storage (object, size, align)
bbf6f052 2096 rtx object;
4c08eef0 2097 rtx size;
9de08200 2098 int align;
bbf6f052 2099{
e9a25f70
JL
2100 rtx retval = 0;
2101
bbf6f052
RK
2102 if (GET_MODE (object) == BLKmode)
2103 {
9de08200
RK
2104 object = protect_from_queue (object, 1);
2105 size = protect_from_queue (size, 0);
2106
2107 if (GET_CODE (size) == CONST_INT
2108 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2109 clear_by_pieces (object, INTVAL (size), align);
2110
2111 else
2112 {
2113 /* Try the most limited insn first, because there's no point
2114 including more than one in the machine description unless
2115 the more limited one has some advantage. */
2116
2117 rtx opalign = GEN_INT (align);
2118 enum machine_mode mode;
2119
2120 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2121 mode = GET_MODE_WIDER_MODE (mode))
2122 {
2123 enum insn_code code = clrstr_optab[(int) mode];
2124
2125 if (code != CODE_FOR_nothing
2126 /* We don't need MODE to be narrower than
2127 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2128 the mode mask, as it is returned by the macro, it will
2129 definitely be less than the actual mode mask. */
2130 && ((GET_CODE (size) == CONST_INT
2131 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2132 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2133 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2134 && (insn_operand_predicate[(int) code][0] == 0
2135 || (*insn_operand_predicate[(int) code][0]) (object,
2136 BLKmode))
2137 && (insn_operand_predicate[(int) code][2] == 0
2138 || (*insn_operand_predicate[(int) code][2]) (opalign,
2139 VOIDmode)))
2140 {
2141 rtx op1;
2142 rtx last = get_last_insn ();
2143 rtx pat;
2144
2145 op1 = convert_to_mode (mode, size, 1);
2146 if (insn_operand_predicate[(int) code][1] != 0
2147 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2148 mode))
2149 op1 = copy_to_mode_reg (mode, op1);
2150
2151 pat = GEN_FCN ((int) code) (object, op1, opalign);
2152 if (pat)
2153 {
2154 emit_insn (pat);
e9a25f70 2155 return 0;
9de08200
RK
2156 }
2157 else
2158 delete_insns_since (last);
2159 }
2160 }
2161
2162
bbf6f052 2163#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
2164 retval
2165 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2166 ptr_mode, 3,
2167 XEXP (object, 0), Pmode,
2168 const0_rtx,
2169 TYPE_MODE (integer_type_node),
2170 convert_to_mode
2171 (TYPE_MODE (sizetype), size,
2172 TREE_UNSIGNED (sizetype)),
2173 TYPE_MODE (sizetype));
bbf6f052 2174#else
9de08200
RK
2175 emit_library_call (bzero_libfunc, 0,
2176 VOIDmode, 2,
2177 XEXP (object, 0), Pmode,
e9a25f70
JL
2178 convert_to_mode
2179 (TYPE_MODE (integer_type_node), size,
2180 TREE_UNSIGNED (integer_type_node)),
9de08200 2181 TYPE_MODE (integer_type_node));
bbf6f052 2182#endif
9de08200 2183 }
bbf6f052
RK
2184 }
2185 else
66ed0683 2186 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2187
2188 return retval;
bbf6f052
RK
2189}
2190
2191/* Generate code to copy Y into X.
2192 Both Y and X must have the same mode, except that
2193 Y can be a constant with VOIDmode.
2194 This mode cannot be BLKmode; use emit_block_move for that.
2195
2196 Return the last instruction emitted. */
2197
2198rtx
2199emit_move_insn (x, y)
2200 rtx x, y;
2201{
2202 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2203
2204 x = protect_from_queue (x, 1);
2205 y = protect_from_queue (y, 0);
2206
2207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2208 abort ();
2209
2210 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2211 y = force_const_mem (mode, y);
2212
2213 /* If X or Y are memory references, verify that their addresses are valid
2214 for the machine. */
2215 if (GET_CODE (x) == MEM
2216 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2217 && ! push_operand (x, GET_MODE (x)))
2218 || (flag_force_addr
2219 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2220 x = change_address (x, VOIDmode, XEXP (x, 0));
2221
2222 if (GET_CODE (y) == MEM
2223 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2224 || (flag_force_addr
2225 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2226 y = change_address (y, VOIDmode, XEXP (y, 0));
2227
2228 if (mode == BLKmode)
2229 abort ();
2230
261c4230
RS
2231 return emit_move_insn_1 (x, y);
2232}
2233
2234/* Low level part of emit_move_insn.
2235 Called just like emit_move_insn, but assumes X and Y
2236 are basically valid. */
2237
2238rtx
2239emit_move_insn_1 (x, y)
2240 rtx x, y;
2241{
2242 enum machine_mode mode = GET_MODE (x);
2243 enum machine_mode submode;
2244 enum mode_class class = GET_MODE_CLASS (mode);
2245 int i;
2246
bbf6f052
RK
2247 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2248 return
2249 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2250
89742723 2251 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2252 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2253 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2254 * BITS_PER_UNIT),
2255 (class == MODE_COMPLEX_INT
2256 ? MODE_INT : MODE_FLOAT),
2257 0))
7308a047
RS
2258 && (mov_optab->handlers[(int) submode].insn_code
2259 != CODE_FOR_nothing))
2260 {
2261 /* Don't split destination if it is a stack push. */
2262 int stack = push_operand (x, GET_MODE (x));
7308a047 2263
7308a047
RS
2264 /* If this is a stack, push the highpart first, so it
2265 will be in the argument order.
2266
2267 In that case, change_address is used only to convert
2268 the mode, not to change the address. */
c937357e
RS
2269 if (stack)
2270 {
e33c0d66
RS
2271 /* Note that the real part always precedes the imag part in memory
2272 regardless of machine's endianness. */
c937357e
RS
2273#ifdef STACK_GROWS_DOWNWARD
2274 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2275 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2276 gen_imagpart (submode, y)));
c937357e 2277 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2278 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2279 gen_realpart (submode, y)));
c937357e
RS
2280#else
2281 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2282 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2283 gen_realpart (submode, y)));
c937357e 2284 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2285 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2286 gen_imagpart (submode, y)));
c937357e
RS
2287#endif
2288 }
2289 else
2290 {
2291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2292 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2293 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2294 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2295 }
7308a047 2296
7a1ab50a 2297 return get_last_insn ();
7308a047
RS
2298 }
2299
bbf6f052
RK
2300 /* This will handle any multi-word mode that lacks a move_insn pattern.
2301 However, you will get better code if you define such patterns,
2302 even if they must turn into multiple assembler instructions. */
a4320483 2303 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2304 {
2305 rtx last_insn = 0;
6551fa4d 2306
a98c9f1a
RK
2307#ifdef PUSH_ROUNDING
2308
2309 /* If X is a push on the stack, do the push now and replace
2310 X with a reference to the stack pointer. */
2311 if (push_operand (x, GET_MODE (x)))
2312 {
2313 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2314 x = change_address (x, VOIDmode, stack_pointer_rtx);
2315 }
2316#endif
2317
15a7a8ec 2318 /* Show the output dies here. */
43e046cb 2319 if (x != y)
38a448ca 2320 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2321
bbf6f052
RK
2322 for (i = 0;
2323 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2324 i++)
2325 {
2326 rtx xpart = operand_subword (x, i, 1, mode);
2327 rtx ypart = operand_subword (y, i, 1, mode);
2328
2329 /* If we can't get a part of Y, put Y into memory if it is a
2330 constant. Otherwise, force it into a register. If we still
2331 can't get a part of Y, abort. */
2332 if (ypart == 0 && CONSTANT_P (y))
2333 {
2334 y = force_const_mem (mode, y);
2335 ypart = operand_subword (y, i, 1, mode);
2336 }
2337 else if (ypart == 0)
2338 ypart = operand_subword_force (y, i, mode);
2339
2340 if (xpart == 0 || ypart == 0)
2341 abort ();
2342
2343 last_insn = emit_move_insn (xpart, ypart);
2344 }
6551fa4d 2345
bbf6f052
RK
2346 return last_insn;
2347 }
2348 else
2349 abort ();
2350}
2351\f
2352/* Pushing data onto the stack. */
2353
2354/* Push a block of length SIZE (perhaps variable)
2355 and return an rtx to address the beginning of the block.
2356 Note that it is not possible for the value returned to be a QUEUED.
2357 The value may be virtual_outgoing_args_rtx.
2358
2359 EXTRA is the number of bytes of padding to push in addition to SIZE.
2360 BELOW nonzero means this padding comes at low addresses;
2361 otherwise, the padding comes at high addresses. */
2362
2363rtx
2364push_block (size, extra, below)
2365 rtx size;
2366 int extra, below;
2367{
2368 register rtx temp;
88f63c77
RK
2369
2370 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2371 if (CONSTANT_P (size))
2372 anti_adjust_stack (plus_constant (size, extra));
2373 else if (GET_CODE (size) == REG && extra == 0)
2374 anti_adjust_stack (size);
2375 else
2376 {
2377 rtx temp = copy_to_mode_reg (Pmode, size);
2378 if (extra != 0)
906c4e36 2379 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2380 temp, 0, OPTAB_LIB_WIDEN);
2381 anti_adjust_stack (temp);
2382 }
2383
2384#ifdef STACK_GROWS_DOWNWARD
2385 temp = virtual_outgoing_args_rtx;
2386 if (extra != 0 && below)
2387 temp = plus_constant (temp, extra);
2388#else
2389 if (GET_CODE (size) == CONST_INT)
2390 temp = plus_constant (virtual_outgoing_args_rtx,
2391 - INTVAL (size) - (below ? 0 : extra));
2392 else if (extra != 0 && !below)
38a448ca 2393 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2394 negate_rtx (Pmode, plus_constant (size, extra)));
2395 else
38a448ca 2396 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2397 negate_rtx (Pmode, size));
2398#endif
2399
2400 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2401}
2402
87e38d84 2403rtx
bbf6f052
RK
2404gen_push_operand ()
2405{
38a448ca 2406 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2407}
2408
921b3427
RK
2409/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2410 block of SIZE bytes. */
2411
2412static rtx
2413get_push_address (size)
2414 int size;
2415{
2416 register rtx temp;
2417
2418 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2419 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2420 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2421 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2422 else
2423 temp = stack_pointer_rtx;
2424
c85f7c16 2425 return copy_to_reg (temp);
921b3427
RK
2426}
2427
bbf6f052
RK
2428/* Generate code to push X onto the stack, assuming it has mode MODE and
2429 type TYPE.
2430 MODE is redundant except when X is a CONST_INT (since they don't
2431 carry mode info).
2432 SIZE is an rtx for the size of data to be copied (in bytes),
2433 needed only if X is BLKmode.
2434
2435 ALIGN (in bytes) is maximum alignment we can assume.
2436
cd048831
RK
2437 If PARTIAL and REG are both nonzero, then copy that many of the first
2438 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2439 The amount of space pushed is decreased by PARTIAL words,
2440 rounded *down* to a multiple of PARM_BOUNDARY.
2441 REG must be a hard register in this case.
cd048831
RK
2442 If REG is zero but PARTIAL is not, take any all others actions for an
2443 argument partially in registers, but do not actually load any
2444 registers.
bbf6f052
RK
2445
2446 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2447 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2448
2449 On a machine that lacks real push insns, ARGS_ADDR is the address of
2450 the bottom of the argument block for this call. We use indexing off there
2451 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2452 argument block has not been preallocated.
2453
e5e809f4
JL
2454 ARGS_SO_FAR is the size of args previously pushed for this call.
2455
2456 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2457 for arguments passed in registers. If nonzero, it will be the number
2458 of bytes required. */
bbf6f052
RK
2459
2460void
2461emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2462 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2463 register rtx x;
2464 enum machine_mode mode;
2465 tree type;
2466 rtx size;
2467 int align;
2468 int partial;
2469 rtx reg;
2470 int extra;
2471 rtx args_addr;
2472 rtx args_so_far;
e5e809f4 2473 int reg_parm_stack_space;
bbf6f052
RK
2474{
2475 rtx xinner;
2476 enum direction stack_direction
2477#ifdef STACK_GROWS_DOWNWARD
2478 = downward;
2479#else
2480 = upward;
2481#endif
2482
2483 /* Decide where to pad the argument: `downward' for below,
2484 `upward' for above, or `none' for don't pad it.
2485 Default is below for small data on big-endian machines; else above. */
2486 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2487
2488 /* Invert direction if stack is post-update. */
2489 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2490 if (where_pad != none)
2491 where_pad = (where_pad == downward ? upward : downward);
2492
2493 xinner = x = protect_from_queue (x, 0);
2494
2495 if (mode == BLKmode)
2496 {
2497 /* Copy a block into the stack, entirely or partially. */
2498
2499 register rtx temp;
2500 int used = partial * UNITS_PER_WORD;
2501 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2502 int skip;
2503
2504 if (size == 0)
2505 abort ();
2506
2507 used -= offset;
2508
2509 /* USED is now the # of bytes we need not copy to the stack
2510 because registers will take care of them. */
2511
2512 if (partial != 0)
2513 xinner = change_address (xinner, BLKmode,
2514 plus_constant (XEXP (xinner, 0), used));
2515
2516 /* If the partial register-part of the arg counts in its stack size,
2517 skip the part of stack space corresponding to the registers.
2518 Otherwise, start copying to the beginning of the stack space,
2519 by setting SKIP to 0. */
e5e809f4 2520 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2521
2522#ifdef PUSH_ROUNDING
2523 /* Do it with several push insns if that doesn't take lots of insns
2524 and if there is no difficulty with push insns that skip bytes
2525 on the stack for alignment purposes. */
2526 if (args_addr == 0
2527 && GET_CODE (size) == CONST_INT
2528 && skip == 0
2529 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2530 < MOVE_RATIO)
bbf6f052
RK
2531 /* Here we avoid the case of a structure whose weak alignment
2532 forces many pushes of a small amount of data,
2533 and such small pushes do rounding that causes trouble. */
c7a7ac46 2534 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2535 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2536 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2537 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2538 {
2539 /* Push padding now if padding above and stack grows down,
2540 or if padding below and stack grows up.
2541 But if space already allocated, this has already been done. */
2542 if (extra && args_addr == 0
2543 && where_pad != none && where_pad != stack_direction)
906c4e36 2544 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2545
38a448ca 2546 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2547 INTVAL (size) - used, align);
921b3427 2548
956d6950 2549 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2550 {
2551 rtx temp;
2552
956d6950 2553 in_check_memory_usage = 1;
921b3427 2554 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2555 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2556 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2557 temp, ptr_mode,
2558 XEXP (xinner, 0), ptr_mode,
2559 GEN_INT (INTVAL(size) - used),
2560 TYPE_MODE (sizetype));
2561 else
2562 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2563 temp, ptr_mode,
2564 GEN_INT (INTVAL(size) - used),
2565 TYPE_MODE (sizetype),
956d6950
JL
2566 GEN_INT (MEMORY_USE_RW),
2567 TYPE_MODE (integer_type_node));
2568 in_check_memory_usage = 0;
921b3427 2569 }
bbf6f052
RK
2570 }
2571 else
2572#endif /* PUSH_ROUNDING */
2573 {
2574 /* Otherwise make space on the stack and copy the data
2575 to the address of that space. */
2576
2577 /* Deduct words put into registers from the size we must copy. */
2578 if (partial != 0)
2579 {
2580 if (GET_CODE (size) == CONST_INT)
906c4e36 2581 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2582 else
2583 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2584 GEN_INT (used), NULL_RTX, 0,
2585 OPTAB_LIB_WIDEN);
bbf6f052
RK
2586 }
2587
2588 /* Get the address of the stack space.
2589 In this case, we do not deal with EXTRA separately.
2590 A single stack adjust will do. */
2591 if (! args_addr)
2592 {
2593 temp = push_block (size, extra, where_pad == downward);
2594 extra = 0;
2595 }
2596 else if (GET_CODE (args_so_far) == CONST_INT)
2597 temp = memory_address (BLKmode,
2598 plus_constant (args_addr,
2599 skip + INTVAL (args_so_far)));
2600 else
2601 temp = memory_address (BLKmode,
38a448ca
RH
2602 plus_constant (gen_rtx_PLUS (Pmode,
2603 args_addr,
2604 args_so_far),
bbf6f052 2605 skip));
956d6950 2606 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2607 {
2608 rtx target;
2609
956d6950 2610 in_check_memory_usage = 1;
921b3427 2611 target = copy_to_reg (temp);
c85f7c16 2612 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2613 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2614 target, ptr_mode,
2615 XEXP (xinner, 0), ptr_mode,
2616 size, TYPE_MODE (sizetype));
2617 else
2618 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2619 target, ptr_mode,
2620 size, TYPE_MODE (sizetype),
956d6950
JL
2621 GEN_INT (MEMORY_USE_RW),
2622 TYPE_MODE (integer_type_node));
2623 in_check_memory_usage = 0;
921b3427 2624 }
bbf6f052
RK
2625
2626 /* TEMP is the address of the block. Copy the data there. */
2627 if (GET_CODE (size) == CONST_INT
2628 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2629 < MOVE_RATIO))
2630 {
38a448ca 2631 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2632 INTVAL (size), align);
2633 goto ret;
2634 }
e5e809f4 2635 else
bbf6f052 2636 {
e5e809f4
JL
2637 rtx opalign = GEN_INT (align);
2638 enum machine_mode mode;
2639 rtx target = gen_rtx (MEM, BLKmode, temp);
2640
2641 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2642 mode != VOIDmode;
2643 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2644 {
e5e809f4
JL
2645 enum insn_code code = movstr_optab[(int) mode];
2646
2647 if (code != CODE_FOR_nothing
2648 && ((GET_CODE (size) == CONST_INT
2649 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2650 <= (GET_MODE_MASK (mode) >> 1)))
2651 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2652 && (insn_operand_predicate[(int) code][0] == 0
2653 || ((*insn_operand_predicate[(int) code][0])
2654 (target, BLKmode)))
2655 && (insn_operand_predicate[(int) code][1] == 0
2656 || ((*insn_operand_predicate[(int) code][1])
2657 (xinner, BLKmode)))
2658 && (insn_operand_predicate[(int) code][3] == 0
2659 || ((*insn_operand_predicate[(int) code][3])
2660 (opalign, VOIDmode))))
2661 {
2662 rtx op2 = convert_to_mode (mode, size, 1);
2663 rtx last = get_last_insn ();
2664 rtx pat;
2665
2666 if (insn_operand_predicate[(int) code][2] != 0
2667 && ! ((*insn_operand_predicate[(int) code][2])
2668 (op2, mode)))
2669 op2 = copy_to_mode_reg (mode, op2);
2670
2671 pat = GEN_FCN ((int) code) (target, xinner,
2672 op2, opalign);
2673 if (pat)
2674 {
2675 emit_insn (pat);
2676 goto ret;
2677 }
2678 else
2679 delete_insns_since (last);
2680 }
c841050e 2681 }
bbf6f052 2682 }
bbf6f052
RK
2683
2684#ifndef ACCUMULATE_OUTGOING_ARGS
2685 /* If the source is referenced relative to the stack pointer,
2686 copy it to another register to stabilize it. We do not need
2687 to do this if we know that we won't be changing sp. */
2688
2689 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2690 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2691 temp = copy_to_reg (temp);
2692#endif
2693
2694 /* Make inhibit_defer_pop nonzero around the library call
2695 to force it to pop the bcopy-arguments right away. */
2696 NO_DEFER_POP;
2697#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2698 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2699 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2700 convert_to_mode (TYPE_MODE (sizetype),
2701 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2702 TYPE_MODE (sizetype));
bbf6f052 2703#else
d562e42e 2704 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2705 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2706 convert_to_mode (TYPE_MODE (integer_type_node),
2707 size,
2708 TREE_UNSIGNED (integer_type_node)),
2709 TYPE_MODE (integer_type_node));
bbf6f052
RK
2710#endif
2711 OK_DEFER_POP;
2712 }
2713 }
2714 else if (partial > 0)
2715 {
2716 /* Scalar partly in registers. */
2717
2718 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2719 int i;
2720 int not_stack;
2721 /* # words of start of argument
2722 that we must make space for but need not store. */
2723 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2724 int args_offset = INTVAL (args_so_far);
2725 int skip;
2726
2727 /* Push padding now if padding above and stack grows down,
2728 or if padding below and stack grows up.
2729 But if space already allocated, this has already been done. */
2730 if (extra && args_addr == 0
2731 && where_pad != none && where_pad != stack_direction)
906c4e36 2732 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2733
2734 /* If we make space by pushing it, we might as well push
2735 the real data. Otherwise, we can leave OFFSET nonzero
2736 and leave the space uninitialized. */
2737 if (args_addr == 0)
2738 offset = 0;
2739
2740 /* Now NOT_STACK gets the number of words that we don't need to
2741 allocate on the stack. */
2742 not_stack = partial - offset;
2743
2744 /* If the partial register-part of the arg counts in its stack size,
2745 skip the part of stack space corresponding to the registers.
2746 Otherwise, start copying to the beginning of the stack space,
2747 by setting SKIP to 0. */
e5e809f4 2748 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
2749
2750 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2751 x = validize_mem (force_const_mem (mode, x));
2752
2753 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2754 SUBREGs of such registers are not allowed. */
2755 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2756 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2757 x = copy_to_reg (x);
2758
2759 /* Loop over all the words allocated on the stack for this arg. */
2760 /* We can do it by words, because any scalar bigger than a word
2761 has a size a multiple of a word. */
2762#ifndef PUSH_ARGS_REVERSED
2763 for (i = not_stack; i < size; i++)
2764#else
2765 for (i = size - 1; i >= not_stack; i--)
2766#endif
2767 if (i >= not_stack + offset)
2768 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2769 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2770 0, args_addr,
2771 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
2772 * UNITS_PER_WORD)),
2773 reg_parm_stack_space);
bbf6f052
RK
2774 }
2775 else
2776 {
2777 rtx addr;
921b3427 2778 rtx target = NULL_RTX;
bbf6f052
RK
2779
2780 /* Push padding now if padding above and stack grows down,
2781 or if padding below and stack grows up.
2782 But if space already allocated, this has already been done. */
2783 if (extra && args_addr == 0
2784 && where_pad != none && where_pad != stack_direction)
906c4e36 2785 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2786
2787#ifdef PUSH_ROUNDING
2788 if (args_addr == 0)
2789 addr = gen_push_operand ();
2790 else
2791#endif
921b3427
RK
2792 {
2793 if (GET_CODE (args_so_far) == CONST_INT)
2794 addr
2795 = memory_address (mode,
2796 plus_constant (args_addr,
2797 INTVAL (args_so_far)));
2798 else
38a448ca
RH
2799 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2800 args_so_far));
921b3427
RK
2801 target = addr;
2802 }
bbf6f052 2803
38a448ca 2804 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 2805
956d6950 2806 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 2807 {
956d6950 2808 in_check_memory_usage = 1;
921b3427
RK
2809 if (target == 0)
2810 target = get_push_address (GET_MODE_SIZE (mode));
2811
c85f7c16 2812 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2813 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2814 target, ptr_mode,
2815 XEXP (x, 0), ptr_mode,
2816 GEN_INT (GET_MODE_SIZE (mode)),
2817 TYPE_MODE (sizetype));
2818 else
2819 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2820 target, ptr_mode,
2821 GEN_INT (GET_MODE_SIZE (mode)),
2822 TYPE_MODE (sizetype),
956d6950
JL
2823 GEN_INT (MEMORY_USE_RW),
2824 TYPE_MODE (integer_type_node));
2825 in_check_memory_usage = 0;
921b3427 2826 }
bbf6f052
RK
2827 }
2828
2829 ret:
2830 /* If part should go in registers, copy that part
2831 into the appropriate registers. Do this now, at the end,
2832 since mem-to-mem copies above may do function calls. */
cd048831 2833 if (partial > 0 && reg != 0)
fffa9c1d
JW
2834 {
2835 /* Handle calls that pass values in multiple non-contiguous locations.
2836 The Irix 6 ABI has examples of this. */
2837 if (GET_CODE (reg) == PARALLEL)
2838 emit_group_load (reg, x);
2839 else
2840 move_block_to_reg (REGNO (reg), x, partial, mode);
2841 }
bbf6f052
RK
2842
2843 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2844 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2845}
2846\f
bbf6f052
RK
2847/* Expand an assignment that stores the value of FROM into TO.
2848 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2849 (This may contain a QUEUED rtx;
2850 if the value is constant, this rtx is a constant.)
2851 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2852
2853 SUGGEST_REG is no longer actually used.
2854 It used to mean, copy the value through a register
2855 and return that register, if that is possible.
709f5be1 2856 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2857
2858rtx
2859expand_assignment (to, from, want_value, suggest_reg)
2860 tree to, from;
2861 int want_value;
2862 int suggest_reg;
2863{
2864 register rtx to_rtx = 0;
2865 rtx result;
2866
2867 /* Don't crash if the lhs of the assignment was erroneous. */
2868
2869 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2870 {
2871 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2872 return want_value ? result : NULL_RTX;
2873 }
bbf6f052
RK
2874
2875 /* Assignment of a structure component needs special treatment
2876 if the structure component's rtx is not simply a MEM.
6be58303
JW
2877 Assignment of an array element at a constant index, and assignment of
2878 an array element in an unaligned packed structure field, has the same
2879 problem. */
bbf6f052 2880
08293add
RK
2881 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2882 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
2883 {
2884 enum machine_mode mode1;
2885 int bitsize;
2886 int bitpos;
7bb0943f 2887 tree offset;
bbf6f052
RK
2888 int unsignedp;
2889 int volatilep = 0;
0088fcb1 2890 tree tem;
d78d243c 2891 int alignment;
0088fcb1
RK
2892
2893 push_temp_slots ();
839c4796
RK
2894 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2895 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
2896
2897 /* If we are going to use store_bit_field and extract_bit_field,
2898 make sure to_rtx will be safe for multiple use. */
2899
2900 if (mode1 == VOIDmode && want_value)
2901 tem = stabilize_reference (tem);
2902
921b3427 2903 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
2904 if (offset != 0)
2905 {
906c4e36 2906 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2907
2908 if (GET_CODE (to_rtx) != MEM)
2909 abort ();
2910 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
2911 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2912 force_reg (ptr_mode, offset_rtx)));
7bb0943f 2913 }
bbf6f052
RK
2914 if (volatilep)
2915 {
2916 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2917 {
2918 /* When the offset is zero, to_rtx is the address of the
2919 structure we are storing into, and hence may be shared.
2920 We must make a new MEM before setting the volatile bit. */
2921 if (offset == 0)
effbcc6a
RK
2922 to_rtx = copy_rtx (to_rtx);
2923
01188446
JW
2924 MEM_VOLATILE_P (to_rtx) = 1;
2925 }
bbf6f052
RK
2926#if 0 /* This was turned off because, when a field is volatile
2927 in an object which is not volatile, the object may be in a register,
2928 and then we would abort over here. */
2929 else
2930 abort ();
2931#endif
2932 }
2933
956d6950
JL
2934 if (TREE_CODE (to) == COMPONENT_REF
2935 && TREE_READONLY (TREE_OPERAND (to, 1)))
2936 {
8bd6ecc2 2937 if (offset == 0)
956d6950
JL
2938 to_rtx = copy_rtx (to_rtx);
2939
2940 RTX_UNCHANGING_P (to_rtx) = 1;
2941 }
2942
921b3427
RK
2943 /* Check the access. */
2944 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2945 {
2946 rtx to_addr;
2947 int size;
2948 int best_mode_size;
2949 enum machine_mode best_mode;
2950
2951 best_mode = get_best_mode (bitsize, bitpos,
2952 TYPE_ALIGN (TREE_TYPE (tem)),
2953 mode1, volatilep);
2954 if (best_mode == VOIDmode)
2955 best_mode = QImode;
2956
2957 best_mode_size = GET_MODE_BITSIZE (best_mode);
2958 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2959 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2960 size *= GET_MODE_SIZE (best_mode);
2961
2962 /* Check the access right of the pointer. */
e9a25f70
JL
2963 if (size)
2964 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2965 to_addr, ptr_mode,
2966 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
2967 GEN_INT (MEMORY_USE_WO),
2968 TYPE_MODE (integer_type_node));
921b3427
RK
2969 }
2970
bbf6f052
RK
2971 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2972 (want_value
2973 /* Spurious cast makes HPUX compiler happy. */
2974 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2975 : VOIDmode),
2976 unsignedp,
2977 /* Required alignment of containing datum. */
d78d243c 2978 alignment,
bbf6f052
RK
2979 int_size_in_bytes (TREE_TYPE (tem)));
2980 preserve_temp_slots (result);
2981 free_temp_slots ();
0088fcb1 2982 pop_temp_slots ();
bbf6f052 2983
709f5be1
RS
2984 /* If the value is meaningful, convert RESULT to the proper mode.
2985 Otherwise, return nothing. */
5ffe63ed
RS
2986 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2987 TYPE_MODE (TREE_TYPE (from)),
2988 result,
2989 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2990 : NULL_RTX);
bbf6f052
RK
2991 }
2992
cd1db108
RS
2993 /* If the rhs is a function call and its value is not an aggregate,
2994 call the function before we start to compute the lhs.
2995 This is needed for correct code for cases such as
2996 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2997 requires loading up part of an address in a separate insn.
2998
2999 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3000 a promoted variable where the zero- or sign- extension needs to be done.
3001 Handling this in the normal way is safe because no computation is done
3002 before the call. */
3003 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3004 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3005 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3006 {
0088fcb1
RK
3007 rtx value;
3008
3009 push_temp_slots ();
3010 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3011 if (to_rtx == 0)
921b3427 3012 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3013
fffa9c1d
JW
3014 /* Handle calls that return values in multiple non-contiguous locations.
3015 The Irix 6 ABI has examples of this. */
3016 if (GET_CODE (to_rtx) == PARALLEL)
3017 emit_group_load (to_rtx, value);
3018 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3019 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3020 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3021 else
3022 emit_move_insn (to_rtx, value);
cd1db108
RS
3023 preserve_temp_slots (to_rtx);
3024 free_temp_slots ();
0088fcb1 3025 pop_temp_slots ();
709f5be1 3026 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3027 }
3028
bbf6f052
RK
3029 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3030 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3031
3032 if (to_rtx == 0)
921b3427 3033 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
bbf6f052 3034
86d38d25
RS
3035 /* Don't move directly into a return register. */
3036 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3037 {
0088fcb1
RK
3038 rtx temp;
3039
3040 push_temp_slots ();
3041 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3042 emit_move_insn (to_rtx, temp);
3043 preserve_temp_slots (to_rtx);
3044 free_temp_slots ();
0088fcb1 3045 pop_temp_slots ();
709f5be1 3046 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3047 }
3048
bbf6f052
RK
3049 /* In case we are returning the contents of an object which overlaps
3050 the place the value is being stored, use a safe function when copying
3051 a value through a pointer into a structure value return block. */
3052 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3053 && current_function_returns_struct
3054 && !current_function_returns_pcc_struct)
3055 {
0088fcb1
RK
3056 rtx from_rtx, size;
3057
3058 push_temp_slots ();
33a20d10 3059 size = expr_size (from);
921b3427
RK
3060 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3061 EXPAND_MEMORY_USE_DONT);
3062
3063 /* Copy the rights of the bitmap. */
3064 if (flag_check_memory_usage)
3065 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3066 XEXP (to_rtx, 0), ptr_mode,
3067 XEXP (from_rtx, 0), ptr_mode,
3068 convert_to_mode (TYPE_MODE (sizetype),
3069 size, TREE_UNSIGNED (sizetype)),
3070 TYPE_MODE (sizetype));
bbf6f052
RK
3071
3072#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3073 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3074 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3075 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3076 convert_to_mode (TYPE_MODE (sizetype),
3077 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3078 TYPE_MODE (sizetype));
bbf6f052 3079#else
d562e42e 3080 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3081 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3082 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3083 convert_to_mode (TYPE_MODE (integer_type_node),
3084 size, TREE_UNSIGNED (integer_type_node)),
3085 TYPE_MODE (integer_type_node));
bbf6f052
RK
3086#endif
3087
3088 preserve_temp_slots (to_rtx);
3089 free_temp_slots ();
0088fcb1 3090 pop_temp_slots ();
709f5be1 3091 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3092 }
3093
3094 /* Compute FROM and store the value in the rtx we got. */
3095
0088fcb1 3096 push_temp_slots ();
bbf6f052
RK
3097 result = store_expr (from, to_rtx, want_value);
3098 preserve_temp_slots (result);
3099 free_temp_slots ();
0088fcb1 3100 pop_temp_slots ();
709f5be1 3101 return want_value ? result : NULL_RTX;
bbf6f052
RK
3102}
3103
3104/* Generate code for computing expression EXP,
3105 and storing the value into TARGET.
bbf6f052
RK
3106 TARGET may contain a QUEUED rtx.
3107
709f5be1
RS
3108 If WANT_VALUE is nonzero, return a copy of the value
3109 not in TARGET, so that we can be sure to use the proper
3110 value in a containing expression even if TARGET has something
3111 else stored in it. If possible, we copy the value through a pseudo
3112 and return that pseudo. Or, if the value is constant, we try to
3113 return the constant. In some cases, we return a pseudo
3114 copied *from* TARGET.
3115
3116 If the mode is BLKmode then we may return TARGET itself.
3117 It turns out that in BLKmode it doesn't cause a problem.
3118 because C has no operators that could combine two different
3119 assignments into the same BLKmode object with different values
3120 with no sequence point. Will other languages need this to
3121 be more thorough?
3122
3123 If WANT_VALUE is 0, we return NULL, to make sure
3124 to catch quickly any cases where the caller uses the value
3125 and fails to set WANT_VALUE. */
bbf6f052
RK
3126
3127rtx
709f5be1 3128store_expr (exp, target, want_value)
bbf6f052
RK
3129 register tree exp;
3130 register rtx target;
709f5be1 3131 int want_value;
bbf6f052
RK
3132{
3133 register rtx temp;
3134 int dont_return_target = 0;
3135
3136 if (TREE_CODE (exp) == COMPOUND_EXPR)
3137 {
3138 /* Perform first part of compound expression, then assign from second
3139 part. */
3140 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3141 emit_queue ();
709f5be1 3142 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3143 }
3144 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3145 {
3146 /* For conditional expression, get safe form of the target. Then
3147 test the condition, doing the appropriate assignment on either
3148 side. This avoids the creation of unnecessary temporaries.
3149 For non-BLKmode, it is more efficient not to do this. */
3150
3151 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3152
3153 emit_queue ();
3154 target = protect_from_queue (target, 1);
3155
dabf8373 3156 do_pending_stack_adjust ();
bbf6f052
RK
3157 NO_DEFER_POP;
3158 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3159 start_cleanup_deferral ();
709f5be1 3160 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3161 end_cleanup_deferral ();
bbf6f052
RK
3162 emit_queue ();
3163 emit_jump_insn (gen_jump (lab2));
3164 emit_barrier ();
3165 emit_label (lab1);
956d6950 3166 start_cleanup_deferral ();
709f5be1 3167 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3168 end_cleanup_deferral ();
bbf6f052
RK
3169 emit_queue ();
3170 emit_label (lab2);
3171 OK_DEFER_POP;
a3a58acc 3172
709f5be1 3173 return want_value ? target : NULL_RTX;
bbf6f052 3174 }
709f5be1 3175 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3176 && GET_MODE (target) != BLKmode)
3177 /* If target is in memory and caller wants value in a register instead,
3178 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3179 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3180 We know expand_expr will not use the target in that case.
3181 Don't do this if TARGET is volatile because we are supposed
3182 to write it and then read it. */
bbf6f052 3183 {
906c4e36 3184 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3185 GET_MODE (target), 0);
3186 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3187 temp = copy_to_reg (temp);
3188 dont_return_target = 1;
3189 }
3190 else if (queued_subexp_p (target))
709f5be1
RS
3191 /* If target contains a postincrement, let's not risk
3192 using it as the place to generate the rhs. */
bbf6f052
RK
3193 {
3194 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3195 {
3196 /* Expand EXP into a new pseudo. */
3197 temp = gen_reg_rtx (GET_MODE (target));
3198 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3199 }
3200 else
906c4e36 3201 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3202
3203 /* If target is volatile, ANSI requires accessing the value
3204 *from* the target, if it is accessed. So make that happen.
3205 In no case return the target itself. */
3206 if (! MEM_VOLATILE_P (target) && want_value)
3207 dont_return_target = 1;
bbf6f052 3208 }
1499e0a8
RK
3209 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3210 /* If this is an scalar in a register that is stored in a wider mode
3211 than the declared mode, compute the result into its declared mode
3212 and then convert to the wider mode. Our value is the computed
3213 expression. */
3214 {
5a32d038 3215 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3216 which will often result in some optimizations. Do the conversion
3217 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3218 the extend. But don't do this if the type of EXP is a subtype
3219 of something else since then the conversion might involve
3220 more than just converting modes. */
3221 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3222 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3223 {
3224 if (TREE_UNSIGNED (TREE_TYPE (exp))
3225 != SUBREG_PROMOTED_UNSIGNED_P (target))
3226 exp
3227 = convert
3228 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3229 TREE_TYPE (exp)),
3230 exp);
3231
3232 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3233 SUBREG_PROMOTED_UNSIGNED_P (target)),
3234 exp);
3235 }
5a32d038 3236
1499e0a8 3237 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3238
766f36c7 3239 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3240 the access now so it gets done only once. Likewise if
3241 it contains TARGET. */
3242 if (GET_CODE (temp) == MEM && want_value
3243 && (MEM_VOLATILE_P (temp)
3244 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3245 temp = copy_to_reg (temp);
3246
b258707c
RS
3247 /* If TEMP is a VOIDmode constant, use convert_modes to make
3248 sure that we properly convert it. */
3249 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3250 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3251 TYPE_MODE (TREE_TYPE (exp)), temp,
3252 SUBREG_PROMOTED_UNSIGNED_P (target));
3253
1499e0a8
RK
3254 convert_move (SUBREG_REG (target), temp,
3255 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3256 return want_value ? temp : NULL_RTX;
1499e0a8 3257 }
bbf6f052
RK
3258 else
3259 {
3260 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3261 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3262 If TARGET is a volatile mem ref, either return TARGET
3263 or return a reg copied *from* TARGET; ANSI requires this.
3264
3265 Otherwise, if TEMP is not TARGET, return TEMP
3266 if it is constant (for efficiency),
3267 or if we really want the correct value. */
bbf6f052
RK
3268 if (!(target && GET_CODE (target) == REG
3269 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3270 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3271 && ! rtx_equal_p (temp, target)
709f5be1 3272 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3273 dont_return_target = 1;
3274 }
3275
b258707c
RS
3276 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3277 the same as that of TARGET, adjust the constant. This is needed, for
3278 example, in case it is a CONST_DOUBLE and we want only a word-sized
3279 value. */
3280 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3281 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3282 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3283 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3284 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3285
921b3427
RK
3286 if (flag_check_memory_usage
3287 && GET_CODE (target) == MEM
3288 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3289 {
3290 if (GET_CODE (temp) == MEM)
3291 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3292 XEXP (target, 0), ptr_mode,
3293 XEXP (temp, 0), ptr_mode,
3294 expr_size (exp), TYPE_MODE (sizetype));
3295 else
3296 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3297 XEXP (target, 0), ptr_mode,
3298 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3299 GEN_INT (MEMORY_USE_WO),
3300 TYPE_MODE (integer_type_node));
921b3427
RK
3301 }
3302
bbf6f052
RK
3303 /* If value was not generated in the target, store it there.
3304 Convert the value to TARGET's type first if nec. */
3305
effbcc6a 3306 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3307 {
3308 target = protect_from_queue (target, 1);
3309 if (GET_MODE (temp) != GET_MODE (target)
3310 && GET_MODE (temp) != VOIDmode)
3311 {
3312 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3313 if (dont_return_target)
3314 {
3315 /* In this case, we will return TEMP,
3316 so make sure it has the proper mode.
3317 But don't forget to store the value into TARGET. */
3318 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3319 emit_move_insn (target, temp);
3320 }
3321 else
3322 convert_move (target, temp, unsignedp);
3323 }
3324
3325 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3326 {
3327 /* Handle copying a string constant into an array.
3328 The string constant may be shorter than the array.
3329 So copy just the string's actual length, and clear the rest. */
3330 rtx size;
22619c3f 3331 rtx addr;
bbf6f052 3332
e87b4f3f
RS
3333 /* Get the size of the data type of the string,
3334 which is actually the size of the target. */
3335 size = expr_size (exp);
3336 if (GET_CODE (size) == CONST_INT
3337 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3338 emit_block_move (target, temp, size,
3339 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3340 else
bbf6f052 3341 {
e87b4f3f
RS
3342 /* Compute the size of the data to copy from the string. */
3343 tree copy_size
c03b7665 3344 = size_binop (MIN_EXPR,
b50d17a1 3345 make_tree (sizetype, size),
c03b7665
RK
3346 convert (sizetype,
3347 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3348 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3349 VOIDmode, 0);
e87b4f3f
RS
3350 rtx label = 0;
3351
3352 /* Copy that much. */
3353 emit_block_move (target, temp, copy_size_rtx,
3354 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3355
88f63c77
RK
3356 /* Figure out how much is left in TARGET that we have to clear.
3357 Do all calculations in ptr_mode. */
3358
3359 addr = XEXP (target, 0);
3360 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3361
e87b4f3f
RS
3362 if (GET_CODE (copy_size_rtx) == CONST_INT)
3363 {
88f63c77 3364 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3365 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3366 }
3367 else
3368 {
88f63c77
RK
3369 addr = force_reg (ptr_mode, addr);
3370 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3371 copy_size_rtx, NULL_RTX, 0,
3372 OPTAB_LIB_WIDEN);
e87b4f3f 3373
88f63c77 3374 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3375 copy_size_rtx, NULL_RTX, 0,
3376 OPTAB_LIB_WIDEN);
e87b4f3f 3377
906c4e36 3378 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3379 GET_MODE (size), 0, 0);
3380 label = gen_label_rtx ();
3381 emit_jump_insn (gen_blt (label));
3382 }
3383
3384 if (size != const0_rtx)
3385 {
921b3427
RK
3386 /* Be sure we can write on ADDR. */
3387 if (flag_check_memory_usage)
3388 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3389 addr, ptr_mode,
3390 size, TYPE_MODE (sizetype),
956d6950
JL
3391 GEN_INT (MEMORY_USE_WO),
3392 TYPE_MODE (integer_type_node));
bbf6f052 3393#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3394 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3395 addr, ptr_mode,
3b6f75e2
JW
3396 const0_rtx, TYPE_MODE (integer_type_node),
3397 convert_to_mode (TYPE_MODE (sizetype),
3398 size,
3399 TREE_UNSIGNED (sizetype)),
3400 TYPE_MODE (sizetype));
bbf6f052 3401#else
d562e42e 3402 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3403 addr, ptr_mode,
3b6f75e2
JW
3404 convert_to_mode (TYPE_MODE (integer_type_node),
3405 size,
3406 TREE_UNSIGNED (integer_type_node)),
3407 TYPE_MODE (integer_type_node));
bbf6f052 3408#endif
e87b4f3f 3409 }
22619c3f 3410
e87b4f3f
RS
3411 if (label)
3412 emit_label (label);
bbf6f052
RK
3413 }
3414 }
fffa9c1d
JW
3415 /* Handle calls that return values in multiple non-contiguous locations.
3416 The Irix 6 ABI has examples of this. */
3417 else if (GET_CODE (target) == PARALLEL)
3418 emit_group_load (target, temp);
bbf6f052
RK
3419 else if (GET_MODE (temp) == BLKmode)
3420 emit_block_move (target, temp, expr_size (exp),
3421 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3422 else
3423 emit_move_insn (target, temp);
3424 }
709f5be1 3425
766f36c7
RK
3426 /* If we don't want a value, return NULL_RTX. */
3427 if (! want_value)
3428 return NULL_RTX;
3429
3430 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3431 ??? The latter test doesn't seem to make sense. */
3432 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3433 return temp;
766f36c7
RK
3434
3435 /* Return TARGET itself if it is a hard register. */
3436 else if (want_value && GET_MODE (target) != BLKmode
3437 && ! (GET_CODE (target) == REG
3438 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3439 return copy_to_reg (target);
766f36c7
RK
3440
3441 else
709f5be1 3442 return target;
bbf6f052
RK
3443}
3444\f
9de08200
RK
3445/* Return 1 if EXP just contains zeros. */
3446
3447static int
3448is_zeros_p (exp)
3449 tree exp;
3450{
3451 tree elt;
3452
3453 switch (TREE_CODE (exp))
3454 {
3455 case CONVERT_EXPR:
3456 case NOP_EXPR:
3457 case NON_LVALUE_EXPR:
3458 return is_zeros_p (TREE_OPERAND (exp, 0));
3459
3460 case INTEGER_CST:
3461 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3462
3463 case COMPLEX_CST:
3464 return
3465 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3466
3467 case REAL_CST:
41c9120b 3468 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3469
3470 case CONSTRUCTOR:
e1a43f73
PB
3471 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3472 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3473 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3474 if (! is_zeros_p (TREE_VALUE (elt)))
3475 return 0;
3476
3477 return 1;
e9a25f70
JL
3478
3479 default:
3480 return 0;
9de08200 3481 }
9de08200
RK
3482}
3483
3484/* Return 1 if EXP contains mostly (3/4) zeros. */
3485
3486static int
3487mostly_zeros_p (exp)
3488 tree exp;
3489{
9de08200
RK
3490 if (TREE_CODE (exp) == CONSTRUCTOR)
3491 {
e1a43f73
PB
3492 int elts = 0, zeros = 0;
3493 tree elt = CONSTRUCTOR_ELTS (exp);
3494 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3495 {
3496 /* If there are no ranges of true bits, it is all zero. */
3497 return elt == NULL_TREE;
3498 }
3499 for (; elt; elt = TREE_CHAIN (elt))
3500 {
3501 /* We do not handle the case where the index is a RANGE_EXPR,
3502 so the statistic will be somewhat inaccurate.
3503 We do make a more accurate count in store_constructor itself,
3504 so since this function is only used for nested array elements,
0f41302f 3505 this should be close enough. */
e1a43f73
PB
3506 if (mostly_zeros_p (TREE_VALUE (elt)))
3507 zeros++;
3508 elts++;
3509 }
9de08200
RK
3510
3511 return 4 * zeros >= 3 * elts;
3512 }
3513
3514 return is_zeros_p (exp);
3515}
3516\f
e1a43f73
PB
3517/* Helper function for store_constructor.
3518 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3519 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3520 CLEARED is as for store_constructor.
3521
3522 This provides a recursive shortcut back to store_constructor when it isn't
3523 necessary to go through store_field. This is so that we can pass through
3524 the cleared field to let store_constructor know that we may not have to
3525 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3526
3527static void
3528store_constructor_field (target, bitsize, bitpos,
3529 mode, exp, type, cleared)
3530 rtx target;
3531 int bitsize, bitpos;
3532 enum machine_mode mode;
3533 tree exp, type;
3534 int cleared;
3535{
3536 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3537 && bitpos % BITS_PER_UNIT == 0
3538 /* If we have a non-zero bitpos for a register target, then we just
3539 let store_field do the bitfield handling. This is unlikely to
3540 generate unnecessary clear instructions anyways. */
3541 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3542 {
126e5b0d
JW
3543 if (bitpos != 0)
3544 target = change_address (target, VOIDmode,
3545 plus_constant (XEXP (target, 0),
3546 bitpos / BITS_PER_UNIT));
3547 store_constructor (exp, target, cleared);
e1a43f73
PB
3548 }
3549 else
3550 store_field (target, bitsize, bitpos, mode, exp,
3551 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3552 int_size_in_bytes (type));
3553}
3554
bbf6f052 3555/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3556 TARGET is either a REG or a MEM.
0f41302f 3557 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3558
3559static void
e1a43f73 3560store_constructor (exp, target, cleared)
bbf6f052
RK
3561 tree exp;
3562 rtx target;
e1a43f73 3563 int cleared;
bbf6f052 3564{
4af3895e
JVA
3565 tree type = TREE_TYPE (exp);
3566
bbf6f052
RK
3567 /* We know our target cannot conflict, since safe_from_p has been called. */
3568#if 0
3569 /* Don't try copying piece by piece into a hard register
3570 since that is vulnerable to being clobbered by EXP.
3571 Instead, construct in a pseudo register and then copy it all. */
3572 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3573 {
3574 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3575 store_constructor (exp, temp, 0);
bbf6f052
RK
3576 emit_move_insn (target, temp);
3577 return;
3578 }
3579#endif
3580
e44842fe
RK
3581 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3582 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3583 {
3584 register tree elt;
3585
4af3895e 3586 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3587 if (TREE_CODE (type) == UNION_TYPE
3588 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3589 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3590
3591 /* If we are building a static constructor into a register,
3592 set the initial value as zero so we can fold the value into
67225c15
RK
3593 a constant. But if more than one register is involved,
3594 this probably loses. */
3595 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3596 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3597 {
3598 if (! cleared)
e9a25f70 3599 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3600
9de08200
RK
3601 cleared = 1;
3602 }
3603
3604 /* If the constructor has fewer fields than the structure
3605 or if we are initializing the structure to mostly zeros,
bbf6f052 3606 clear the whole structure first. */
9de08200
RK
3607 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3608 != list_length (TYPE_FIELDS (type)))
3609 || mostly_zeros_p (exp))
3610 {
3611 if (! cleared)
3612 clear_storage (target, expr_size (exp),
3613 TYPE_ALIGN (type) / BITS_PER_UNIT);
3614
3615 cleared = 1;
3616 }
bbf6f052
RK
3617 else
3618 /* Inform later passes that the old value is dead. */
38a448ca 3619 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3620
3621 /* Store each element of the constructor into
3622 the corresponding field of TARGET. */
3623
3624 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3625 {
3626 register tree field = TREE_PURPOSE (elt);
3627 register enum machine_mode mode;
3628 int bitsize;
b50d17a1 3629 int bitpos = 0;
bbf6f052 3630 int unsignedp;
b50d17a1
RK
3631 tree pos, constant = 0, offset = 0;
3632 rtx to_rtx = target;
bbf6f052 3633
f32fd778
RS
3634 /* Just ignore missing fields.
3635 We cleared the whole structure, above,
3636 if any fields are missing. */
3637 if (field == 0)
3638 continue;
3639
e1a43f73
PB
3640 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3641 continue;
9de08200 3642
bbf6f052
RK
3643 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3644 unsignedp = TREE_UNSIGNED (field);
3645 mode = DECL_MODE (field);
3646 if (DECL_BIT_FIELD (field))
3647 mode = VOIDmode;
3648
b50d17a1
RK
3649 pos = DECL_FIELD_BITPOS (field);
3650 if (TREE_CODE (pos) == INTEGER_CST)
3651 constant = pos;
3652 else if (TREE_CODE (pos) == PLUS_EXPR
3653 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3654 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3655 else
3656 offset = pos;
3657
3658 if (constant)
cd11b87e 3659 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3660
3661 if (offset)
3662 {
3663 rtx offset_rtx;
3664
3665 if (contains_placeholder_p (offset))
3666 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 3667 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 3668
b50d17a1
RK
3669 offset = size_binop (FLOOR_DIV_EXPR, offset,
3670 size_int (BITS_PER_UNIT));
bbf6f052 3671
b50d17a1
RK
3672 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3673 if (GET_CODE (to_rtx) != MEM)
3674 abort ();
3675
3676 to_rtx
3677 = change_address (to_rtx, VOIDmode,
38a448ca 3678 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 3679 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3680 }
cf04eb80
RK
3681 if (TREE_READONLY (field))
3682 {
9151b3bf 3683 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
3684 to_rtx = copy_rtx (to_rtx);
3685
cf04eb80
RK
3686 RTX_UNCHANGING_P (to_rtx) = 1;
3687 }
3688
e1a43f73
PB
3689 store_constructor_field (to_rtx, bitsize, bitpos,
3690 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3691 }
3692 }
4af3895e 3693 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3694 {
3695 register tree elt;
3696 register int i;
e1a43f73 3697 int need_to_clear;
4af3895e 3698 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3699 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3700 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3701 tree elttype = TREE_TYPE (type);
bbf6f052 3702
e1a43f73 3703 /* If the constructor has fewer elements than the array,
38e01259 3704 clear the whole array first. Similarly if this is
e1a43f73
PB
3705 static constructor of a non-BLKmode object. */
3706 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3707 need_to_clear = 1;
3708 else
3709 {
3710 HOST_WIDE_INT count = 0, zero_count = 0;
3711 need_to_clear = 0;
3712 /* This loop is a more accurate version of the loop in
3713 mostly_zeros_p (it handles RANGE_EXPR in an index).
3714 It is also needed to check for missing elements. */
3715 for (elt = CONSTRUCTOR_ELTS (exp);
3716 elt != NULL_TREE;
df0faff1 3717 elt = TREE_CHAIN (elt))
e1a43f73
PB
3718 {
3719 tree index = TREE_PURPOSE (elt);
3720 HOST_WIDE_INT this_node_count;
3721 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3722 {
3723 tree lo_index = TREE_OPERAND (index, 0);
3724 tree hi_index = TREE_OPERAND (index, 1);
3725 if (TREE_CODE (lo_index) != INTEGER_CST
3726 || TREE_CODE (hi_index) != INTEGER_CST)
3727 {
3728 need_to_clear = 1;
3729 break;
3730 }
3731 this_node_count = TREE_INT_CST_LOW (hi_index)
3732 - TREE_INT_CST_LOW (lo_index) + 1;
3733 }
3734 else
3735 this_node_count = 1;
3736 count += this_node_count;
3737 if (mostly_zeros_p (TREE_VALUE (elt)))
3738 zero_count += this_node_count;
3739 }
8e958f70 3740 /* Clear the entire array first if there are any missing elements,
0f41302f 3741 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3742 if (count < maxelt - minelt + 1
3743 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3744 need_to_clear = 1;
3745 }
3746 if (need_to_clear)
9de08200
RK
3747 {
3748 if (! cleared)
3749 clear_storage (target, expr_size (exp),
3750 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3751 cleared = 1;
3752 }
bbf6f052
RK
3753 else
3754 /* Inform later passes that the old value is dead. */
38a448ca 3755 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3756
3757 /* Store each element of the constructor into
3758 the corresponding element of TARGET, determined
3759 by counting the elements. */
3760 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3761 elt;
3762 elt = TREE_CHAIN (elt), i++)
3763 {
3764 register enum machine_mode mode;
3765 int bitsize;
3766 int bitpos;
3767 int unsignedp;
e1a43f73 3768 tree value = TREE_VALUE (elt);
03dc44a6
RS
3769 tree index = TREE_PURPOSE (elt);
3770 rtx xtarget = target;
bbf6f052 3771
e1a43f73
PB
3772 if (cleared && is_zeros_p (value))
3773 continue;
9de08200 3774
bbf6f052
RK
3775 mode = TYPE_MODE (elttype);
3776 bitsize = GET_MODE_BITSIZE (mode);
3777 unsignedp = TREE_UNSIGNED (elttype);
3778
e1a43f73
PB
3779 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3780 {
3781 tree lo_index = TREE_OPERAND (index, 0);
3782 tree hi_index = TREE_OPERAND (index, 1);
3783 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3784 struct nesting *loop;
05c0b405
PB
3785 HOST_WIDE_INT lo, hi, count;
3786 tree position;
e1a43f73 3787
0f41302f 3788 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3789 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3790 && TREE_CODE (hi_index) == INTEGER_CST
3791 && (lo = TREE_INT_CST_LOW (lo_index),
3792 hi = TREE_INT_CST_LOW (hi_index),
3793 count = hi - lo + 1,
3794 (GET_CODE (target) != MEM
3795 || count <= 2
3796 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3797 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3798 <= 40 * 8))))
e1a43f73 3799 {
05c0b405
PB
3800 lo -= minelt; hi -= minelt;
3801 for (; lo <= hi; lo++)
e1a43f73 3802 {
05c0b405
PB
3803 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3804 store_constructor_field (target, bitsize, bitpos,
3805 mode, value, type, cleared);
e1a43f73
PB
3806 }
3807 }
3808 else
3809 {
3810 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3811 loop_top = gen_label_rtx ();
3812 loop_end = gen_label_rtx ();
3813
3814 unsignedp = TREE_UNSIGNED (domain);
3815
3816 index = build_decl (VAR_DECL, NULL_TREE, domain);
3817
3818 DECL_RTL (index) = index_r
3819 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3820 &unsignedp, 0));
3821
3822 if (TREE_CODE (value) == SAVE_EXPR
3823 && SAVE_EXPR_RTL (value) == 0)
3824 {
0f41302f
MS
3825 /* Make sure value gets expanded once before the
3826 loop. */
e1a43f73
PB
3827 expand_expr (value, const0_rtx, VOIDmode, 0);
3828 emit_queue ();
3829 }
3830 store_expr (lo_index, index_r, 0);
3831 loop = expand_start_loop (0);
3832
0f41302f 3833 /* Assign value to element index. */
e1a43f73
PB
3834 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3835 size_int (BITS_PER_UNIT));
3836 position = size_binop (MULT_EXPR,
3837 size_binop (MINUS_EXPR, index,
3838 TYPE_MIN_VALUE (domain)),
3839 position);
3840 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3841 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
3842 xtarget = change_address (target, mode, addr);
3843 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3844 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3845 else
3846 store_expr (value, xtarget, 0);
3847
3848 expand_exit_loop_if_false (loop,
3849 build (LT_EXPR, integer_type_node,
3850 index, hi_index));
3851
3852 expand_increment (build (PREINCREMENT_EXPR,
3853 TREE_TYPE (index),
7b8b9722 3854 index, integer_one_node), 0, 0);
e1a43f73
PB
3855 expand_end_loop ();
3856 emit_label (loop_end);
3857
3858 /* Needed by stupid register allocation. to extend the
3859 lifetime of pseudo-regs used by target past the end
3860 of the loop. */
38a448ca 3861 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
3862 }
3863 }
3864 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3865 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3866 {
e1a43f73 3867 rtx pos_rtx, addr;
03dc44a6
RS
3868 tree position;
3869
5b6c44ff
RK
3870 if (index == 0)
3871 index = size_int (i);
3872
e1a43f73
PB
3873 if (minelt)
3874 index = size_binop (MINUS_EXPR, index,
3875 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3876 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3877 size_int (BITS_PER_UNIT));
3878 position = size_binop (MULT_EXPR, index, position);
03dc44a6 3879 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3880 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 3881 xtarget = change_address (target, mode, addr);
e1a43f73 3882 store_expr (value, xtarget, 0);
03dc44a6
RS
3883 }
3884 else
3885 {
3886 if (index != 0)
7c314719 3887 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3888 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3889 else
3890 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3891 store_constructor_field (target, bitsize, bitpos,
3892 mode, value, type, cleared);
03dc44a6 3893 }
bbf6f052
RK
3894 }
3895 }
071a6595
PB
3896 /* set constructor assignments */
3897 else if (TREE_CODE (type) == SET_TYPE)
3898 {
e1a43f73 3899 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 3900 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3901 tree domain = TYPE_DOMAIN (type);
3902 tree domain_min, domain_max, bitlength;
3903
9faa82d8 3904 /* The default implementation strategy is to extract the constant
071a6595
PB
3905 parts of the constructor, use that to initialize the target,
3906 and then "or" in whatever non-constant ranges we need in addition.
3907
3908 If a large set is all zero or all ones, it is
3909 probably better to set it using memset (if available) or bzero.
3910 Also, if a large set has just a single range, it may also be
3911 better to first clear all the first clear the set (using
0f41302f 3912 bzero/memset), and set the bits we want. */
071a6595 3913
0f41302f 3914 /* Check for all zeros. */
e1a43f73 3915 if (elt == NULL_TREE)
071a6595 3916 {
e1a43f73
PB
3917 if (!cleared)
3918 clear_storage (target, expr_size (exp),
3919 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3920 return;
3921 }
3922
071a6595
PB
3923 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3924 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3925 bitlength = size_binop (PLUS_EXPR,
3926 size_binop (MINUS_EXPR, domain_max, domain_min),
3927 size_one_node);
3928
e1a43f73
PB
3929 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3930 abort ();
3931 nbits = TREE_INT_CST_LOW (bitlength);
3932
3933 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3934 are "complicated" (more than one range), initialize (the
3935 constant parts) by copying from a constant. */
3936 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3937 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3938 {
b4ee5a72
PB
3939 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3940 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3941 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3942 HOST_WIDE_INT word = 0;
3943 int bit_pos = 0;
3944 int ibit = 0;
0f41302f 3945 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3946 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3947 for (;;)
071a6595 3948 {
b4ee5a72
PB
3949 if (bit_buffer[ibit])
3950 {
b09f3348 3951 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3952 word |= (1 << (set_word_size - 1 - bit_pos));
3953 else
3954 word |= 1 << bit_pos;
3955 }
3956 bit_pos++; ibit++;
3957 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3958 {
e1a43f73
PB
3959 if (word != 0 || ! cleared)
3960 {
3961 rtx datum = GEN_INT (word);
3962 rtx to_rtx;
0f41302f
MS
3963 /* The assumption here is that it is safe to use
3964 XEXP if the set is multi-word, but not if
3965 it's single-word. */
e1a43f73
PB
3966 if (GET_CODE (target) == MEM)
3967 {
3968 to_rtx = plus_constant (XEXP (target, 0), offset);
3969 to_rtx = change_address (target, mode, to_rtx);
3970 }
3971 else if (offset == 0)
3972 to_rtx = target;
3973 else
3974 abort ();
3975 emit_move_insn (to_rtx, datum);
3976 }
b4ee5a72
PB
3977 if (ibit == nbits)
3978 break;
3979 word = 0;
3980 bit_pos = 0;
3981 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3982 }
3983 }
071a6595 3984 }
e1a43f73
PB
3985 else if (!cleared)
3986 {
0f41302f 3987 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3988 if (TREE_CHAIN (elt) != NULL_TREE
3989 || (TREE_PURPOSE (elt) == NULL_TREE
3990 ? nbits != 1
3991 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3992 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3993 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3994 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3995 != nbits))))
3996 clear_storage (target, expr_size (exp),
3997 TYPE_ALIGN (type) / BITS_PER_UNIT);
3998 }
3999
4000 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4001 {
4002 /* start of range of element or NULL */
4003 tree startbit = TREE_PURPOSE (elt);
4004 /* end of range of element, or element value */
4005 tree endbit = TREE_VALUE (elt);
381127e8 4006#ifdef TARGET_MEM_FUNCTIONS
071a6595 4007 HOST_WIDE_INT startb, endb;
381127e8 4008#endif
071a6595
PB
4009 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4010
4011 bitlength_rtx = expand_expr (bitlength,
4012 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4013
4014 /* handle non-range tuple element like [ expr ] */
4015 if (startbit == NULL_TREE)
4016 {
4017 startbit = save_expr (endbit);
4018 endbit = startbit;
4019 }
4020 startbit = convert (sizetype, startbit);
4021 endbit = convert (sizetype, endbit);
4022 if (! integer_zerop (domain_min))
4023 {
4024 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4025 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4026 }
4027 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4028 EXPAND_CONST_ADDRESS);
4029 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4030 EXPAND_CONST_ADDRESS);
4031
4032 if (REG_P (target))
4033 {
4034 targetx = assign_stack_temp (GET_MODE (target),
4035 GET_MODE_SIZE (GET_MODE (target)),
4036 0);
4037 emit_move_insn (targetx, target);
4038 }
4039 else if (GET_CODE (target) == MEM)
4040 targetx = target;
4041 else
4042 abort ();
4043
4044#ifdef TARGET_MEM_FUNCTIONS
4045 /* Optimization: If startbit and endbit are
9faa82d8 4046 constants divisible by BITS_PER_UNIT,
0f41302f 4047 call memset instead. */
071a6595
PB
4048 if (TREE_CODE (startbit) == INTEGER_CST
4049 && TREE_CODE (endbit) == INTEGER_CST
4050 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4051 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4052 {
071a6595
PB
4053 emit_library_call (memset_libfunc, 0,
4054 VOIDmode, 3,
e1a43f73
PB
4055 plus_constant (XEXP (targetx, 0),
4056 startb / BITS_PER_UNIT),
071a6595 4057 Pmode,
3b6f75e2 4058 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4059 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4060 TYPE_MODE (sizetype));
071a6595
PB
4061 }
4062 else
4063#endif
4064 {
38a448ca 4065 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4066 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4067 bitlength_rtx, TYPE_MODE (sizetype),
4068 startbit_rtx, TYPE_MODE (sizetype),
4069 endbit_rtx, TYPE_MODE (sizetype));
4070 }
4071 if (REG_P (target))
4072 emit_move_insn (target, targetx);
4073 }
4074 }
bbf6f052
RK
4075
4076 else
4077 abort ();
4078}
4079
4080/* Store the value of EXP (an expression tree)
4081 into a subfield of TARGET which has mode MODE and occupies
4082 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4083 If MODE is VOIDmode, it means that we are storing into a bit-field.
4084
4085 If VALUE_MODE is VOIDmode, return nothing in particular.
4086 UNSIGNEDP is not used in this case.
4087
4088 Otherwise, return an rtx for the value stored. This rtx
4089 has mode VALUE_MODE if that is convenient to do.
4090 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4091
4092 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4093 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4094
4095static rtx
4096store_field (target, bitsize, bitpos, mode, exp, value_mode,
4097 unsignedp, align, total_size)
4098 rtx target;
4099 int bitsize, bitpos;
4100 enum machine_mode mode;
4101 tree exp;
4102 enum machine_mode value_mode;
4103 int unsignedp;
4104 int align;
4105 int total_size;
4106{
906c4e36 4107 HOST_WIDE_INT width_mask = 0;
bbf6f052 4108
e9a25f70
JL
4109 if (TREE_CODE (exp) == ERROR_MARK)
4110 return const0_rtx;
4111
906c4e36
RK
4112 if (bitsize < HOST_BITS_PER_WIDE_INT)
4113 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4114
4115 /* If we are storing into an unaligned field of an aligned union that is
4116 in a register, we may have the mode of TARGET being an integer mode but
4117 MODE == BLKmode. In that case, get an aligned object whose size and
4118 alignment are the same as TARGET and store TARGET into it (we can avoid
4119 the store if the field being stored is the entire width of TARGET). Then
4120 call ourselves recursively to store the field into a BLKmode version of
4121 that object. Finally, load from the object into TARGET. This is not
4122 very efficient in general, but should only be slightly more expensive
4123 than the otherwise-required unaligned accesses. Perhaps this can be
4124 cleaned up later. */
4125
4126 if (mode == BLKmode
4127 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4128 {
4129 rtx object = assign_stack_temp (GET_MODE (target),
4130 GET_MODE_SIZE (GET_MODE (target)), 0);
4131 rtx blk_object = copy_rtx (object);
4132
24a13950
JW
4133 MEM_IN_STRUCT_P (object) = 1;
4134 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4135 PUT_MODE (blk_object, BLKmode);
4136
4137 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4138 emit_move_insn (object, target);
4139
4140 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4141 align, total_size);
4142
46093b97
RS
4143 /* Even though we aren't returning target, we need to
4144 give it the updated value. */
bbf6f052
RK
4145 emit_move_insn (target, object);
4146
46093b97 4147 return blk_object;
bbf6f052
RK
4148 }
4149
4150 /* If the structure is in a register or if the component
4151 is a bit field, we cannot use addressing to access it.
4152 Use bit-field techniques or SUBREG to store in it. */
4153
4fa52007
RK
4154 if (mode == VOIDmode
4155 || (mode != BLKmode && ! direct_store[(int) mode])
4156 || GET_CODE (target) == REG
c980ac49 4157 || GET_CODE (target) == SUBREG
ccc98036
RS
4158 /* If the field isn't aligned enough to store as an ordinary memref,
4159 store it as a bit field. */
c7a7ac46 4160 || (SLOW_UNALIGNED_ACCESS
ccc98036 4161 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4162 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4163 {
906c4e36 4164 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4165
ef19912d
RK
4166 /* If BITSIZE is narrower than the size of the type of EXP
4167 we will be narrowing TEMP. Normally, what's wanted are the
4168 low-order bits. However, if EXP's type is a record and this is
4169 big-endian machine, we want the upper BITSIZE bits. */
4170 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4171 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4172 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4173 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4174 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4175 - bitsize),
4176 temp, 1);
4177
bbd6cf73
RK
4178 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4179 MODE. */
4180 if (mode != VOIDmode && mode != BLKmode
4181 && mode != TYPE_MODE (TREE_TYPE (exp)))
4182 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4183
a281e72d
RK
4184 /* If the modes of TARGET and TEMP are both BLKmode, both
4185 must be in memory and BITPOS must be aligned on a byte
4186 boundary. If so, we simply do a block copy. */
4187 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4188 {
4189 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4190 || bitpos % BITS_PER_UNIT != 0)
4191 abort ();
4192
0086427c
RK
4193 target = change_address (target, VOIDmode,
4194 plus_constant (XEXP (target, 0),
a281e72d
RK
4195 bitpos / BITS_PER_UNIT));
4196
4197 emit_block_move (target, temp,
4198 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4199 / BITS_PER_UNIT),
4200 1);
4201
4202 return value_mode == VOIDmode ? const0_rtx : target;
4203 }
4204
bbf6f052
RK
4205 /* Store the value in the bitfield. */
4206 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4207 if (value_mode != VOIDmode)
4208 {
4209 /* The caller wants an rtx for the value. */
4210 /* If possible, avoid refetching from the bitfield itself. */
4211 if (width_mask != 0
4212 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4213 {
9074de27 4214 tree count;
5c4d7cfb 4215 enum machine_mode tmode;
86a2c12a 4216
5c4d7cfb
RS
4217 if (unsignedp)
4218 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4219 tmode = GET_MODE (temp);
86a2c12a
RS
4220 if (tmode == VOIDmode)
4221 tmode = value_mode;
5c4d7cfb
RS
4222 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4223 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4224 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4225 }
bbf6f052 4226 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4227 NULL_RTX, value_mode, 0, align,
4228 total_size);
bbf6f052
RK
4229 }
4230 return const0_rtx;
4231 }
4232 else
4233 {
4234 rtx addr = XEXP (target, 0);
4235 rtx to_rtx;
4236
4237 /* If a value is wanted, it must be the lhs;
4238 so make the address stable for multiple use. */
4239
4240 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4241 && ! CONSTANT_ADDRESS_P (addr)
4242 /* A frame-pointer reference is already stable. */
4243 && ! (GET_CODE (addr) == PLUS
4244 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4245 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4246 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4247 addr = copy_to_reg (addr);
4248
4249 /* Now build a reference to just the desired component. */
4250
effbcc6a
RK
4251 to_rtx = copy_rtx (change_address (target, mode,
4252 plus_constant (addr,
4253 (bitpos
4254 / BITS_PER_UNIT))));
bbf6f052
RK
4255 MEM_IN_STRUCT_P (to_rtx) = 1;
4256
4257 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4258 }
4259}
4260\f
4261/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4262 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4263 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4264
4265 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4266 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4267 If the position of the field is variable, we store a tree
4268 giving the variable offset (in units) in *POFFSET.
4269 This offset is in addition to the bit position.
4270 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4271 We set *PALIGNMENT to the alignment in bytes of the address that will be
4272 computed. This is the alignment of the thing we return if *POFFSET
4273 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4274
4275 If any of the extraction expressions is volatile,
4276 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4277
4278 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4279 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4280 is redundant.
4281
4282 If the field describes a variable-sized object, *PMODE is set to
4283 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4284 this case, but the address of the object can be found. */
bbf6f052
RK
4285
4286tree
4969d05d 4287get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4288 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4289 tree exp;
4290 int *pbitsize;
4291 int *pbitpos;
7bb0943f 4292 tree *poffset;
bbf6f052
RK
4293 enum machine_mode *pmode;
4294 int *punsignedp;
4295 int *pvolatilep;
839c4796 4296 int *palignment;
bbf6f052 4297{
b50d17a1 4298 tree orig_exp = exp;
bbf6f052
RK
4299 tree size_tree = 0;
4300 enum machine_mode mode = VOIDmode;
742920c7 4301 tree offset = integer_zero_node;
839c4796 4302 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4303
4304 if (TREE_CODE (exp) == COMPONENT_REF)
4305 {
4306 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4307 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4308 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4309 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4310 }
4311 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4312 {
4313 size_tree = TREE_OPERAND (exp, 1);
4314 *punsignedp = TREE_UNSIGNED (exp);
4315 }
4316 else
4317 {
4318 mode = TYPE_MODE (TREE_TYPE (exp));
4319 *pbitsize = GET_MODE_BITSIZE (mode);
4320 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4321 }
4322
4323 if (size_tree)
4324 {
4325 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4326 mode = BLKmode, *pbitsize = -1;
4327 else
4328 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4329 }
4330
4331 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4332 and find the ultimate containing object. */
4333
4334 *pbitpos = 0;
4335
4336 while (1)
4337 {
7bb0943f 4338 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4339 {
7bb0943f
RS
4340 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4341 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4342 : TREE_OPERAND (exp, 2));
e6d8c385 4343 tree constant = integer_zero_node, var = pos;
bbf6f052 4344
e7f3c83f
RK
4345 /* If this field hasn't been filled in yet, don't go
4346 past it. This should only happen when folding expressions
4347 made during type construction. */
4348 if (pos == 0)
4349 break;
4350
e6d8c385
RK
4351 /* Assume here that the offset is a multiple of a unit.
4352 If not, there should be an explicitly added constant. */
4353 if (TREE_CODE (pos) == PLUS_EXPR
4354 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4355 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4356 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4357 constant = pos, var = integer_zero_node;
4358
4359 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4360 offset = size_binop (PLUS_EXPR, offset,
4361 size_binop (EXACT_DIV_EXPR, var,
4362 size_int (BITS_PER_UNIT)));
bbf6f052 4363 }
bbf6f052 4364
742920c7 4365 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4366 {
742920c7
RK
4367 /* This code is based on the code in case ARRAY_REF in expand_expr
4368 below. We assume here that the size of an array element is
4369 always an integral multiple of BITS_PER_UNIT. */
4370
4371 tree index = TREE_OPERAND (exp, 1);
4372 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4373 tree low_bound
4374 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4375 tree index_type = TREE_TYPE (index);
4376
4c08eef0 4377 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4378 {
4c08eef0
RK
4379 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4380 index);
742920c7
RK
4381 index_type = TREE_TYPE (index);
4382 }
4383
ca0f2220
RH
4384 if (! integer_zerop (low_bound))
4385 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4386
f8dac6eb
R
4387 if (TREE_CODE (index) == INTEGER_CST)
4388 {
4389 index = convert (sbitsizetype, index);
4390 index_type = TREE_TYPE (index);
4391 }
4392
4393 index = fold (build (MULT_EXPR, sbitsizetype, index,
4394 convert (sbitsizetype,
0d15e60c 4395 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7
RK
4396
4397 if (TREE_CODE (index) == INTEGER_CST
4398 && TREE_INT_CST_HIGH (index) == 0)
4399 *pbitpos += TREE_INT_CST_LOW (index);
4400 else
956d6950 4401 {
e5e809f4
JL
4402 if (contains_placeholder_p (index))
4403 index = build (WITH_RECORD_EXPR, sizetype, index, exp);
4404
956d6950 4405 offset = size_binop (PLUS_EXPR, offset,
e5e809f4
JL
4406 size_binop (FLOOR_DIV_EXPR, index,
4407 size_int (BITS_PER_UNIT)));
956d6950 4408 }
bbf6f052
RK
4409 }
4410 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4411 && ! ((TREE_CODE (exp) == NOP_EXPR
4412 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4413 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4414 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4415 != UNION_TYPE))
bbf6f052
RK
4416 && (TYPE_MODE (TREE_TYPE (exp))
4417 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4418 break;
7bb0943f
RS
4419
4420 /* If any reference in the chain is volatile, the effect is volatile. */
4421 if (TREE_THIS_VOLATILE (exp))
4422 *pvolatilep = 1;
839c4796
RK
4423
4424 /* If the offset is non-constant already, then we can't assume any
4425 alignment more than the alignment here. */
4426 if (! integer_zerop (offset))
4427 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4428
bbf6f052
RK
4429 exp = TREE_OPERAND (exp, 0);
4430 }
4431
839c4796
RK
4432 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4433 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4434 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4435 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4436
742920c7
RK
4437 if (integer_zerop (offset))
4438 offset = 0;
4439
b50d17a1
RK
4440 if (offset != 0 && contains_placeholder_p (offset))
4441 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4442
bbf6f052 4443 *pmode = mode;
7bb0943f 4444 *poffset = offset;
839c4796 4445 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4446 return exp;
4447}
921b3427
RK
4448
4449/* Subroutine of expand_exp: compute memory_usage from modifier. */
4450static enum memory_use_mode
4451get_memory_usage_from_modifier (modifier)
4452 enum expand_modifier modifier;
4453{
4454 switch (modifier)
4455 {
4456 case EXPAND_NORMAL:
e5e809f4 4457 case EXPAND_SUM:
921b3427
RK
4458 return MEMORY_USE_RO;
4459 break;
4460 case EXPAND_MEMORY_USE_WO:
4461 return MEMORY_USE_WO;
4462 break;
4463 case EXPAND_MEMORY_USE_RW:
4464 return MEMORY_USE_RW;
4465 break;
921b3427 4466 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4467 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4468 MEMORY_USE_DONT, because they are modifiers to a call of
4469 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4470 case EXPAND_CONST_ADDRESS:
e5e809f4 4471 case EXPAND_INITIALIZER:
921b3427
RK
4472 return MEMORY_USE_DONT;
4473 case EXPAND_MEMORY_USE_BAD:
4474 default:
4475 abort ();
4476 }
4477}
bbf6f052
RK
4478\f
4479/* Given an rtx VALUE that may contain additions and multiplications,
4480 return an equivalent value that just refers to a register or memory.
4481 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4482 and returning a pseudo-register containing the value.
4483
4484 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4485
4486rtx
4487force_operand (value, target)
4488 rtx value, target;
4489{
4490 register optab binoptab = 0;
4491 /* Use a temporary to force order of execution of calls to
4492 `force_operand'. */
4493 rtx tmp;
4494 register rtx op2;
4495 /* Use subtarget as the target for operand 0 of a binary operation. */
4496 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4497
4498 if (GET_CODE (value) == PLUS)
4499 binoptab = add_optab;
4500 else if (GET_CODE (value) == MINUS)
4501 binoptab = sub_optab;
4502 else if (GET_CODE (value) == MULT)
4503 {
4504 op2 = XEXP (value, 1);
4505 if (!CONSTANT_P (op2)
4506 && !(GET_CODE (op2) == REG && op2 != subtarget))
4507 subtarget = 0;
4508 tmp = force_operand (XEXP (value, 0), subtarget);
4509 return expand_mult (GET_MODE (value), tmp,
906c4e36 4510 force_operand (op2, NULL_RTX),
bbf6f052
RK
4511 target, 0);
4512 }
4513
4514 if (binoptab)
4515 {
4516 op2 = XEXP (value, 1);
4517 if (!CONSTANT_P (op2)
4518 && !(GET_CODE (op2) == REG && op2 != subtarget))
4519 subtarget = 0;
4520 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4521 {
4522 binoptab = add_optab;
4523 op2 = negate_rtx (GET_MODE (value), op2);
4524 }
4525
4526 /* Check for an addition with OP2 a constant integer and our first
4527 operand a PLUS of a virtual register and something else. In that
4528 case, we want to emit the sum of the virtual register and the
4529 constant first and then add the other value. This allows virtual
4530 register instantiation to simply modify the constant rather than
4531 creating another one around this addition. */
4532 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4533 && GET_CODE (XEXP (value, 0)) == PLUS
4534 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4535 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4536 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4537 {
4538 rtx temp = expand_binop (GET_MODE (value), binoptab,
4539 XEXP (XEXP (value, 0), 0), op2,
4540 subtarget, 0, OPTAB_LIB_WIDEN);
4541 return expand_binop (GET_MODE (value), binoptab, temp,
4542 force_operand (XEXP (XEXP (value, 0), 1), 0),
4543 target, 0, OPTAB_LIB_WIDEN);
4544 }
4545
4546 tmp = force_operand (XEXP (value, 0), subtarget);
4547 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4548 force_operand (op2, NULL_RTX),
bbf6f052 4549 target, 0, OPTAB_LIB_WIDEN);
8008b228 4550 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4551 because the only operations we are expanding here are signed ones. */
4552 }
4553 return value;
4554}
4555\f
4556/* Subroutine of expand_expr:
4557 save the non-copied parts (LIST) of an expr (LHS), and return a list
4558 which can restore these values to their previous values,
4559 should something modify their storage. */
4560
4561static tree
4562save_noncopied_parts (lhs, list)
4563 tree lhs;
4564 tree list;
4565{
4566 tree tail;
4567 tree parts = 0;
4568
4569 for (tail = list; tail; tail = TREE_CHAIN (tail))
4570 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4571 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4572 else
4573 {
4574 tree part = TREE_VALUE (tail);
4575 tree part_type = TREE_TYPE (part);
906c4e36 4576 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4577 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4578 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4579 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4580 parts = tree_cons (to_be_saved,
906c4e36
RK
4581 build (RTL_EXPR, part_type, NULL_TREE,
4582 (tree) target),
bbf6f052
RK
4583 parts);
4584 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4585 }
4586 return parts;
4587}
4588
4589/* Subroutine of expand_expr:
4590 record the non-copied parts (LIST) of an expr (LHS), and return a list
4591 which specifies the initial values of these parts. */
4592
4593static tree
4594init_noncopied_parts (lhs, list)
4595 tree lhs;
4596 tree list;
4597{
4598 tree tail;
4599 tree parts = 0;
4600
4601 for (tail = list; tail; tail = TREE_CHAIN (tail))
4602 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4603 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4604 else
4605 {
4606 tree part = TREE_VALUE (tail);
4607 tree part_type = TREE_TYPE (part);
906c4e36 4608 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4609 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4610 }
4611 return parts;
4612}
4613
4614/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
4615 EXP can reference X, which is being modified. TOP_P is nonzero if this
4616 call is going to be used to determine whether we need a temporary
4617 for EXP, as opposed to a recursive call to this function. */
bbf6f052
RK
4618
4619static int
e5e809f4 4620safe_from_p (x, exp, top_p)
bbf6f052
RK
4621 rtx x;
4622 tree exp;
e5e809f4 4623 int top_p;
bbf6f052
RK
4624{
4625 rtx exp_rtl = 0;
4626 int i, nops;
4627
6676e72f
RK
4628 if (x == 0
4629 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4630 have no way of allocating temporaries of variable size
4631 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4632 So we assume here that something at a higher level has prevented a
f4510f37 4633 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
4634 do this when X is BLKmode and when we are at the top level. */
4635 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4636 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4637 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4638 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4639 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4640 != INTEGER_CST)
f4510f37 4641 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4642 return 1;
4643
4644 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4645 find the underlying pseudo. */
4646 if (GET_CODE (x) == SUBREG)
4647 {
4648 x = SUBREG_REG (x);
4649 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4650 return 0;
4651 }
4652
4653 /* If X is a location in the outgoing argument area, it is always safe. */
4654 if (GET_CODE (x) == MEM
4655 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4656 || (GET_CODE (XEXP (x, 0)) == PLUS
4657 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4658 return 1;
4659
4660 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4661 {
4662 case 'd':
4663 exp_rtl = DECL_RTL (exp);
4664 break;
4665
4666 case 'c':
4667 return 1;
4668
4669 case 'x':
4670 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 4671 return ((TREE_VALUE (exp) == 0
e5e809f4 4672 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 4673 && (TREE_CHAIN (exp) == 0
e5e809f4 4674 || safe_from_p (x, TREE_CHAIN (exp), 0)));
bbf6f052
RK
4675 else
4676 return 0;
4677
4678 case '1':
e5e809f4 4679 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
4680
4681 case '2':
4682 case '<':
e5e809f4
JL
4683 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4684 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
4685
4686 case 'e':
4687 case 'r':
4688 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4689 the expression. If it is set, we conflict iff we are that rtx or
4690 both are in memory. Otherwise, we check all operands of the
4691 expression recursively. */
4692
4693 switch (TREE_CODE (exp))
4694 {
4695 case ADDR_EXPR:
e44842fe 4696 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
4697 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4698 || TREE_STATIC (exp));
bbf6f052
RK
4699
4700 case INDIRECT_REF:
4701 if (GET_CODE (x) == MEM)
4702 return 0;
4703 break;
4704
4705 case CALL_EXPR:
4706 exp_rtl = CALL_EXPR_RTL (exp);
4707 if (exp_rtl == 0)
4708 {
4709 /* Assume that the call will clobber all hard registers and
4710 all of memory. */
4711 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4712 || GET_CODE (x) == MEM)
4713 return 0;
4714 }
4715
4716 break;
4717
4718 case RTL_EXPR:
3bb5826a
RK
4719 /* If a sequence exists, we would have to scan every instruction
4720 in the sequence to see if it was safe. This is probably not
4721 worthwhile. */
4722 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4723 return 0;
4724
3bb5826a 4725 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4726 break;
4727
4728 case WITH_CLEANUP_EXPR:
4729 exp_rtl = RTL_EXPR_RTL (exp);
4730 break;
4731
5dab5552 4732 case CLEANUP_POINT_EXPR:
e5e809f4 4733 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 4734
bbf6f052
RK
4735 case SAVE_EXPR:
4736 exp_rtl = SAVE_EXPR_RTL (exp);
4737 break;
4738
8129842c
RS
4739 case BIND_EXPR:
4740 /* The only operand we look at is operand 1. The rest aren't
4741 part of the expression. */
e5e809f4 4742 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 4743
bbf6f052 4744 case METHOD_CALL_EXPR:
0f41302f 4745 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 4746 abort ();
e9a25f70
JL
4747
4748 default:
4749 break;
bbf6f052
RK
4750 }
4751
4752 /* If we have an rtx, we do not need to scan our operands. */
4753 if (exp_rtl)
4754 break;
4755
4756 nops = tree_code_length[(int) TREE_CODE (exp)];
4757 for (i = 0; i < nops; i++)
4758 if (TREE_OPERAND (exp, i) != 0
e5e809f4 4759 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
4760 return 0;
4761 }
4762
4763 /* If we have an rtl, find any enclosed object. Then see if we conflict
4764 with it. */
4765 if (exp_rtl)
4766 {
4767 if (GET_CODE (exp_rtl) == SUBREG)
4768 {
4769 exp_rtl = SUBREG_REG (exp_rtl);
4770 if (GET_CODE (exp_rtl) == REG
4771 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4772 return 0;
4773 }
4774
4775 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4776 are memory and EXP is not readonly. */
4777 return ! (rtx_equal_p (x, exp_rtl)
4778 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4779 && ! TREE_READONLY (exp)));
4780 }
4781
4782 /* If we reach here, it is safe. */
4783 return 1;
4784}
4785
4786/* Subroutine of expand_expr: return nonzero iff EXP is an
4787 expression whose type is statically determinable. */
4788
4789static int
4790fixed_type_p (exp)
4791 tree exp;
4792{
4793 if (TREE_CODE (exp) == PARM_DECL
4794 || TREE_CODE (exp) == VAR_DECL
4795 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4796 || TREE_CODE (exp) == COMPONENT_REF
4797 || TREE_CODE (exp) == ARRAY_REF)
4798 return 1;
4799 return 0;
4800}
01c8a7c8
RK
4801
4802/* Subroutine of expand_expr: return rtx if EXP is a
4803 variable or parameter; else return 0. */
4804
4805static rtx
4806var_rtx (exp)
4807 tree exp;
4808{
4809 STRIP_NOPS (exp);
4810 switch (TREE_CODE (exp))
4811 {
4812 case PARM_DECL:
4813 case VAR_DECL:
4814 return DECL_RTL (exp);
4815 default:
4816 return 0;
4817 }
4818}
bbf6f052
RK
4819\f
4820/* expand_expr: generate code for computing expression EXP.
4821 An rtx for the computed value is returned. The value is never null.
4822 In the case of a void EXP, const0_rtx is returned.
4823
4824 The value may be stored in TARGET if TARGET is nonzero.
4825 TARGET is just a suggestion; callers must assume that
4826 the rtx returned may not be the same as TARGET.
4827
4828 If TARGET is CONST0_RTX, it means that the value will be ignored.
4829
4830 If TMODE is not VOIDmode, it suggests generating the
4831 result in mode TMODE. But this is done only when convenient.
4832 Otherwise, TMODE is ignored and the value generated in its natural mode.
4833 TMODE is just a suggestion; callers must assume that
4834 the rtx returned may not have mode TMODE.
4835
d6a5ac33
RK
4836 Note that TARGET may have neither TMODE nor MODE. In that case, it
4837 probably will not be used.
bbf6f052
RK
4838
4839 If MODIFIER is EXPAND_SUM then when EXP is an addition
4840 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4841 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4842 products as above, or REG or MEM, or constant.
4843 Ordinarily in such cases we would output mul or add instructions
4844 and then return a pseudo reg containing the sum.
4845
4846 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4847 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4848 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4849 This is used for outputting expressions used in initializers.
4850
4851 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4852 with a constant address even if that address is not normally legitimate.
4853 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4854
4855rtx
4856expand_expr (exp, target, tmode, modifier)
4857 register tree exp;
4858 rtx target;
4859 enum machine_mode tmode;
4860 enum expand_modifier modifier;
4861{
b50d17a1
RK
4862 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4863 This is static so it will be accessible to our recursive callees. */
4864 static tree placeholder_list = 0;
bbf6f052
RK
4865 register rtx op0, op1, temp;
4866 tree type = TREE_TYPE (exp);
4867 int unsignedp = TREE_UNSIGNED (type);
4868 register enum machine_mode mode = TYPE_MODE (type);
4869 register enum tree_code code = TREE_CODE (exp);
4870 optab this_optab;
4871 /* Use subtarget as the target for operand 0 of a binary operation. */
4872 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4873 rtx original_target = target;
dd27116b
RK
4874 int ignore = (target == const0_rtx
4875 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4876 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4877 || code == COND_EXPR)
dd27116b 4878 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 4879 tree context;
921b3427
RK
4880 /* Used by check-memory-usage to make modifier read only. */
4881 enum expand_modifier ro_modifier;
bbf6f052 4882
921b3427
RK
4883 /* Make a read-only version of the modifier. */
4884 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4885 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4886 ro_modifier = modifier;
4887 else
4888 ro_modifier = EXPAND_NORMAL;
ca695ac9 4889
bbf6f052
RK
4890 /* Don't use hard regs as subtargets, because the combiner
4891 can only handle pseudo regs. */
4892 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4893 subtarget = 0;
4894 /* Avoid subtargets inside loops,
4895 since they hide some invariant expressions. */
4896 if (preserve_subexpressions_p ())
4897 subtarget = 0;
4898
dd27116b
RK
4899 /* If we are going to ignore this result, we need only do something
4900 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4901 is, short-circuit the most common cases here. Note that we must
4902 not call expand_expr with anything but const0_rtx in case this
4903 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4904
dd27116b
RK
4905 if (ignore)
4906 {
4907 if (! TREE_SIDE_EFFECTS (exp))
4908 return const0_rtx;
4909
4910 /* Ensure we reference a volatile object even if value is ignored. */
4911 if (TREE_THIS_VOLATILE (exp)
4912 && TREE_CODE (exp) != FUNCTION_DECL
4913 && mode != VOIDmode && mode != BLKmode)
4914 {
921b3427 4915 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
4916 if (GET_CODE (temp) == MEM)
4917 temp = copy_to_reg (temp);
4918 return const0_rtx;
4919 }
4920
4921 if (TREE_CODE_CLASS (code) == '1')
4922 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4923 VOIDmode, ro_modifier);
dd27116b
RK
4924 else if (TREE_CODE_CLASS (code) == '2'
4925 || TREE_CODE_CLASS (code) == '<')
4926 {
921b3427
RK
4927 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4928 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
4929 return const0_rtx;
4930 }
4931 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4932 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4933 /* If the second operand has no side effects, just evaluate
0f41302f 4934 the first. */
dd27116b 4935 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4936 VOIDmode, ro_modifier);
dd27116b 4937
90764a87 4938 target = 0;
dd27116b 4939 }
bbf6f052 4940
e44842fe
RK
4941 /* If will do cse, generate all results into pseudo registers
4942 since 1) that allows cse to find more things
4943 and 2) otherwise cse could produce an insn the machine
4944 cannot support. */
4945
bbf6f052
RK
4946 if (! cse_not_expected && mode != BLKmode && target
4947 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4948 target = subtarget;
4949
bbf6f052
RK
4950 switch (code)
4951 {
4952 case LABEL_DECL:
b552441b
RS
4953 {
4954 tree function = decl_function_context (exp);
4955 /* Handle using a label in a containing function. */
d0977240
RK
4956 if (function != current_function_decl
4957 && function != inline_function_decl && function != 0)
b552441b
RS
4958 {
4959 struct function *p = find_function_data (function);
4960 /* Allocate in the memory associated with the function
4961 that the label is in. */
4962 push_obstacks (p->function_obstack,
4963 p->function_maybepermanent_obstack);
4964
38a448ca
RH
4965 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4966 label_rtx (exp),
4967 p->forced_labels);
b552441b
RS
4968 pop_obstacks ();
4969 }
4970 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
4971 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4972 label_rtx (exp), forced_labels);
4973 temp = gen_rtx_MEM (FUNCTION_MODE,
4974 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
4975 if (function != current_function_decl
4976 && function != inline_function_decl && function != 0)
26fcb35a
RS
4977 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4978 return temp;
b552441b 4979 }
bbf6f052
RK
4980
4981 case PARM_DECL:
4982 if (DECL_RTL (exp) == 0)
4983 {
4984 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4985 return CONST0_RTX (mode);
bbf6f052
RK
4986 }
4987
0f41302f 4988 /* ... fall through ... */
d6a5ac33 4989
bbf6f052 4990 case VAR_DECL:
2dca20cd
RS
4991 /* If a static var's type was incomplete when the decl was written,
4992 but the type is complete now, lay out the decl now. */
4993 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4994 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4995 {
4996 push_obstacks_nochange ();
4997 end_temporary_allocation ();
4998 layout_decl (exp, 0);
4999 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5000 pop_obstacks ();
5001 }
d6a5ac33 5002
921b3427
RK
5003 /* Only check automatic variables. Currently, function arguments are
5004 not checked (this can be done at compile-time with prototypes).
5005 Aggregates are not checked. */
5006 if (flag_check_memory_usage && code == VAR_DECL
5007 && GET_CODE (DECL_RTL (exp)) == MEM
5008 && DECL_CONTEXT (exp) != NULL_TREE
5009 && ! TREE_STATIC (exp)
5010 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5011 {
5012 enum memory_use_mode memory_usage;
5013 memory_usage = get_memory_usage_from_modifier (modifier);
5014
5015 if (memory_usage != MEMORY_USE_DONT)
5016 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5017 XEXP (DECL_RTL (exp), 0), ptr_mode,
5018 GEN_INT (int_size_in_bytes (type)),
5019 TYPE_MODE (sizetype),
956d6950
JL
5020 GEN_INT (memory_usage),
5021 TYPE_MODE (integer_type_node));
921b3427
RK
5022 }
5023
0f41302f 5024 /* ... fall through ... */
d6a5ac33 5025
2dca20cd 5026 case FUNCTION_DECL:
bbf6f052
RK
5027 case RESULT_DECL:
5028 if (DECL_RTL (exp) == 0)
5029 abort ();
d6a5ac33 5030
e44842fe
RK
5031 /* Ensure variable marked as used even if it doesn't go through
5032 a parser. If it hasn't be used yet, write out an external
5033 definition. */
5034 if (! TREE_USED (exp))
5035 {
5036 assemble_external (exp);
5037 TREE_USED (exp) = 1;
5038 }
5039
dc6d66b3
RK
5040 /* Show we haven't gotten RTL for this yet. */
5041 temp = 0;
5042
bbf6f052
RK
5043 /* Handle variables inherited from containing functions. */
5044 context = decl_function_context (exp);
5045
5046 /* We treat inline_function_decl as an alias for the current function
5047 because that is the inline function whose vars, types, etc.
5048 are being merged into the current function.
5049 See expand_inline_function. */
d6a5ac33 5050
bbf6f052
RK
5051 if (context != 0 && context != current_function_decl
5052 && context != inline_function_decl
5053 /* If var is static, we don't need a static chain to access it. */
5054 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5055 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5056 {
5057 rtx addr;
5058
5059 /* Mark as non-local and addressable. */
81feeecb 5060 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5061 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5062 abort ();
bbf6f052
RK
5063 mark_addressable (exp);
5064 if (GET_CODE (DECL_RTL (exp)) != MEM)
5065 abort ();
5066 addr = XEXP (DECL_RTL (exp), 0);
5067 if (GET_CODE (addr) == MEM)
38a448ca
RH
5068 addr = gen_rtx_MEM (Pmode,
5069 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5070 else
5071 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5072 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5073 }
4af3895e 5074
bbf6f052
RK
5075 /* This is the case of an array whose size is to be determined
5076 from its initializer, while the initializer is still being parsed.
5077 See expand_decl. */
d6a5ac33 5078
dc6d66b3
RK
5079 else if (GET_CODE (DECL_RTL (exp)) == MEM
5080 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5081 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5082 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5083
5084 /* If DECL_RTL is memory, we are in the normal case and either
5085 the address is not valid or it is not a register and -fforce-addr
5086 is specified, get the address into a register. */
5087
dc6d66b3
RK
5088 else if (GET_CODE (DECL_RTL (exp)) == MEM
5089 && modifier != EXPAND_CONST_ADDRESS
5090 && modifier != EXPAND_SUM
5091 && modifier != EXPAND_INITIALIZER
5092 && (! memory_address_p (DECL_MODE (exp),
5093 XEXP (DECL_RTL (exp), 0))
5094 || (flag_force_addr
5095 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5096 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5097 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5098
dc6d66b3
RK
5099 /* If we got something, return it. But first, set the alignment
5100 the address is a register. */
5101 if (temp != 0)
5102 {
5103 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5104 mark_reg_pointer (XEXP (temp, 0),
5105 DECL_ALIGN (exp) / BITS_PER_UNIT);
5106
5107 return temp;
5108 }
5109
1499e0a8
RK
5110 /* If the mode of DECL_RTL does not match that of the decl, it
5111 must be a promoted value. We return a SUBREG of the wanted mode,
5112 but mark it so that we know that it was already extended. */
5113
5114 if (GET_CODE (DECL_RTL (exp)) == REG
5115 && GET_MODE (DECL_RTL (exp)) != mode)
5116 {
1499e0a8
RK
5117 /* Get the signedness used for this variable. Ensure we get the
5118 same mode we got when the variable was declared. */
78911e8b
RK
5119 if (GET_MODE (DECL_RTL (exp))
5120 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5121 abort ();
5122
38a448ca 5123 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5124 SUBREG_PROMOTED_VAR_P (temp) = 1;
5125 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5126 return temp;
5127 }
5128
bbf6f052
RK
5129 return DECL_RTL (exp);
5130
5131 case INTEGER_CST:
5132 return immed_double_const (TREE_INT_CST_LOW (exp),
5133 TREE_INT_CST_HIGH (exp),
5134 mode);
5135
5136 case CONST_DECL:
921b3427
RK
5137 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5138 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5139
5140 case REAL_CST:
5141 /* If optimized, generate immediate CONST_DOUBLE
5142 which will be turned into memory by reload if necessary.
5143
5144 We used to force a register so that loop.c could see it. But
5145 this does not allow gen_* patterns to perform optimizations with
5146 the constants. It also produces two insns in cases like "x = 1.0;".
5147 On most machines, floating-point constants are not permitted in
5148 many insns, so we'd end up copying it to a register in any case.
5149
5150 Now, we do the copying in expand_binop, if appropriate. */
5151 return immed_real_const (exp);
5152
5153 case COMPLEX_CST:
5154 case STRING_CST:
5155 if (! TREE_CST_RTL (exp))
5156 output_constant_def (exp);
5157
5158 /* TREE_CST_RTL probably contains a constant address.
5159 On RISC machines where a constant address isn't valid,
5160 make some insns to get that address into a register. */
5161 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5162 && modifier != EXPAND_CONST_ADDRESS
5163 && modifier != EXPAND_INITIALIZER
5164 && modifier != EXPAND_SUM
d6a5ac33
RK
5165 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5166 || (flag_force_addr
5167 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5168 return change_address (TREE_CST_RTL (exp), VOIDmode,
5169 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5170 return TREE_CST_RTL (exp);
5171
bf1e5319
APB
5172 case EXPR_WITH_FILE_LOCATION:
5173 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5174 emit_line_note (EXPR_WFL_FILENAME (exp), EXPR_WFL_LINENO (exp));
5175 return expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5176
bbf6f052
RK
5177 case SAVE_EXPR:
5178 context = decl_function_context (exp);
d6a5ac33 5179
d0977240
RK
5180 /* If this SAVE_EXPR was at global context, assume we are an
5181 initialization function and move it into our context. */
5182 if (context == 0)
5183 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5184
bbf6f052
RK
5185 /* We treat inline_function_decl as an alias for the current function
5186 because that is the inline function whose vars, types, etc.
5187 are being merged into the current function.
5188 See expand_inline_function. */
5189 if (context == current_function_decl || context == inline_function_decl)
5190 context = 0;
5191
5192 /* If this is non-local, handle it. */
5193 if (context)
5194 {
d0977240
RK
5195 /* The following call just exists to abort if the context is
5196 not of a containing function. */
5197 find_function_data (context);
5198
bbf6f052
RK
5199 temp = SAVE_EXPR_RTL (exp);
5200 if (temp && GET_CODE (temp) == REG)
5201 {
5202 put_var_into_stack (exp);
5203 temp = SAVE_EXPR_RTL (exp);
5204 }
5205 if (temp == 0 || GET_CODE (temp) != MEM)
5206 abort ();
5207 return change_address (temp, mode,
5208 fix_lexical_addr (XEXP (temp, 0), exp));
5209 }
5210 if (SAVE_EXPR_RTL (exp) == 0)
5211 {
06089a8b
RK
5212 if (mode == VOIDmode)
5213 temp = const0_rtx;
5214 else
e5e809f4 5215 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5216
bbf6f052 5217 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5218 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5219 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5220 save_expr_regs);
ff78f773
RK
5221
5222 /* If the mode of TEMP does not match that of the expression, it
5223 must be a promoted value. We pass store_expr a SUBREG of the
5224 wanted mode but mark it so that we know that it was already
5225 extended. Note that `unsignedp' was modified above in
5226 this case. */
5227
5228 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5229 {
38a448ca 5230 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5231 SUBREG_PROMOTED_VAR_P (temp) = 1;
5232 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5233 }
5234
4c7a0be9 5235 if (temp == const0_rtx)
921b3427
RK
5236 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5237 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5238 else
5239 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5240
5241 TREE_USED (exp) = 1;
bbf6f052 5242 }
1499e0a8
RK
5243
5244 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5245 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5246 but mark it so that we know that it was already extended. */
1499e0a8
RK
5247
5248 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5249 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5250 {
e70d22c8
RK
5251 /* Compute the signedness and make the proper SUBREG. */
5252 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5253 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5254 SUBREG_PROMOTED_VAR_P (temp) = 1;
5255 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5256 return temp;
5257 }
5258
bbf6f052
RK
5259 return SAVE_EXPR_RTL (exp);
5260
679163cf
MS
5261 case UNSAVE_EXPR:
5262 {
5263 rtx temp;
5264 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5265 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5266 return temp;
5267 }
5268
b50d17a1 5269 case PLACEHOLDER_EXPR:
e9a25f70
JL
5270 {
5271 tree placeholder_expr;
5272
5273 /* If there is an object on the head of the placeholder list,
e5e809f4 5274 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5275 further information, see tree.def. */
5276 for (placeholder_expr = placeholder_list;
5277 placeholder_expr != 0;
5278 placeholder_expr = TREE_CHAIN (placeholder_expr))
5279 {
5280 tree need_type = TYPE_MAIN_VARIANT (type);
5281 tree object = 0;
5282 tree old_list = placeholder_list;
5283 tree elt;
5284
e5e809f4
JL
5285 /* Find the outermost reference that is of the type we want.
5286 If none, see if any object has a type that is a pointer to
5287 the type we want. */
5288 for (elt = TREE_PURPOSE (placeholder_expr);
5289 elt != 0 && object == 0;
5290 elt
5291 = ((TREE_CODE (elt) == COMPOUND_EXPR
5292 || TREE_CODE (elt) == COND_EXPR)
5293 ? TREE_OPERAND (elt, 1)
5294 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5295 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5296 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5297 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5298 ? TREE_OPERAND (elt, 0) : 0))
5299 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5300 object = elt;
e9a25f70 5301
e9a25f70 5302 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5303 elt != 0 && object == 0;
5304 elt
5305 = ((TREE_CODE (elt) == COMPOUND_EXPR
5306 || TREE_CODE (elt) == COND_EXPR)
5307 ? TREE_OPERAND (elt, 1)
5308 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5309 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5310 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5311 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5312 ? TREE_OPERAND (elt, 0) : 0))
5313 if (POINTER_TYPE_P (TREE_TYPE (elt))
5314 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5315 == need_type))
e5e809f4 5316 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5317
e9a25f70 5318 if (object != 0)
2cde2255 5319 {
e9a25f70
JL
5320 /* Expand this object skipping the list entries before
5321 it was found in case it is also a PLACEHOLDER_EXPR.
5322 In that case, we want to translate it using subsequent
5323 entries. */
5324 placeholder_list = TREE_CHAIN (placeholder_expr);
5325 temp = expand_expr (object, original_target, tmode,
5326 ro_modifier);
5327 placeholder_list = old_list;
5328 return temp;
2cde2255 5329 }
e9a25f70
JL
5330 }
5331 }
b50d17a1
RK
5332
5333 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5334 abort ();
5335
5336 case WITH_RECORD_EXPR:
5337 /* Put the object on the placeholder list, expand our first operand,
5338 and pop the list. */
5339 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5340 placeholder_list);
5341 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5342 tmode, ro_modifier);
b50d17a1
RK
5343 placeholder_list = TREE_CHAIN (placeholder_list);
5344 return target;
5345
bbf6f052 5346 case EXIT_EXPR:
e44842fe
RK
5347 expand_exit_loop_if_false (NULL_PTR,
5348 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5349 return const0_rtx;
5350
5351 case LOOP_EXPR:
0088fcb1 5352 push_temp_slots ();
bbf6f052
RK
5353 expand_start_loop (1);
5354 expand_expr_stmt (TREE_OPERAND (exp, 0));
5355 expand_end_loop ();
0088fcb1 5356 pop_temp_slots ();
bbf6f052
RK
5357
5358 return const0_rtx;
5359
5360 case BIND_EXPR:
5361 {
5362 tree vars = TREE_OPERAND (exp, 0);
5363 int vars_need_expansion = 0;
5364
5365 /* Need to open a binding contour here because
e976b8b2 5366 if there are any cleanups they must be contained here. */
bbf6f052
RK
5367 expand_start_bindings (0);
5368
2df53c0b
RS
5369 /* Mark the corresponding BLOCK for output in its proper place. */
5370 if (TREE_OPERAND (exp, 2) != 0
5371 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5372 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5373
5374 /* If VARS have not yet been expanded, expand them now. */
5375 while (vars)
5376 {
5377 if (DECL_RTL (vars) == 0)
5378 {
5379 vars_need_expansion = 1;
5380 expand_decl (vars);
5381 }
5382 expand_decl_init (vars);
5383 vars = TREE_CHAIN (vars);
5384 }
5385
921b3427 5386 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5387
5388 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5389
5390 return temp;
5391 }
5392
5393 case RTL_EXPR:
83b853c9
JM
5394 if (RTL_EXPR_SEQUENCE (exp))
5395 {
5396 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5397 abort ();
5398 emit_insns (RTL_EXPR_SEQUENCE (exp));
5399 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5400 }
99310285 5401 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5402 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5403 return RTL_EXPR_RTL (exp);
5404
5405 case CONSTRUCTOR:
dd27116b
RK
5406 /* If we don't need the result, just ensure we evaluate any
5407 subexpressions. */
5408 if (ignore)
5409 {
5410 tree elt;
5411 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
5412 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5413 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
5414 return const0_rtx;
5415 }
3207b172 5416
4af3895e
JVA
5417 /* All elts simple constants => refer to a constant in memory. But
5418 if this is a non-BLKmode mode, let it store a field at a time
5419 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5420 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5421 store directly into the target unless the type is large enough
5422 that memcpy will be used. If we are making an initializer and
3207b172 5423 all operands are constant, put it in memory as well. */
dd27116b 5424 else if ((TREE_STATIC (exp)
3207b172 5425 && ((mode == BLKmode
e5e809f4 5426 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
5427 || TREE_ADDRESSABLE (exp)
5428 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5429 && (move_by_pieces_ninsns
67225c15
RK
5430 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5431 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5432 > MOVE_RATIO)
5433 && ! mostly_zeros_p (exp))))
dd27116b 5434 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5435 {
5436 rtx constructor = output_constant_def (exp);
b552441b
RS
5437 if (modifier != EXPAND_CONST_ADDRESS
5438 && modifier != EXPAND_INITIALIZER
5439 && modifier != EXPAND_SUM
d6a5ac33
RK
5440 && (! memory_address_p (GET_MODE (constructor),
5441 XEXP (constructor, 0))
5442 || (flag_force_addr
5443 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5444 constructor = change_address (constructor, VOIDmode,
5445 XEXP (constructor, 0));
5446 return constructor;
5447 }
5448
bbf6f052
RK
5449 else
5450 {
e9ac02a6
JW
5451 /* Handle calls that pass values in multiple non-contiguous
5452 locations. The Irix 6 ABI has examples of this. */
e5e809f4 5453 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 5454 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5455 {
5456 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5457 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5458 else
5459 target = assign_temp (type, 0, 1, 1);
5460 }
07604beb
RK
5461
5462 if (TREE_READONLY (exp))
5463 {
9151b3bf 5464 if (GET_CODE (target) == MEM)
effbcc6a
RK
5465 target = copy_rtx (target);
5466
07604beb
RK
5467 RTX_UNCHANGING_P (target) = 1;
5468 }
5469
e1a43f73 5470 store_constructor (exp, target, 0);
bbf6f052
RK
5471 return target;
5472 }
5473
5474 case INDIRECT_REF:
5475 {
5476 tree exp1 = TREE_OPERAND (exp, 0);
5477 tree exp2;
7581a30f
JW
5478 tree index;
5479 tree string = string_constant (exp1, &index);
5480 int i;
5481
06eaa86f 5482 /* Try to optimize reads from const strings. */
7581a30f
JW
5483 if (string
5484 && TREE_CODE (string) == STRING_CST
5485 && TREE_CODE (index) == INTEGER_CST
5486 && !TREE_INT_CST_HIGH (index)
5487 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5488 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
5489 && GET_MODE_SIZE (mode) == 1
5490 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 5491 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 5492
405f0da6
JW
5493 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5494 op0 = memory_address (mode, op0);
8c8a8e34 5495
921b3427
RK
5496 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5497 {
5498 enum memory_use_mode memory_usage;
5499 memory_usage = get_memory_usage_from_modifier (modifier);
5500
5501 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
5502 {
5503 in_check_memory_usage = 1;
5504 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5505 op0, ptr_mode,
5506 GEN_INT (int_size_in_bytes (type)),
5507 TYPE_MODE (sizetype),
5508 GEN_INT (memory_usage),
5509 TYPE_MODE (integer_type_node));
5510 in_check_memory_usage = 0;
5511 }
921b3427
RK
5512 }
5513
38a448ca 5514 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
5515 /* If address was computed by addition,
5516 mark this as an element of an aggregate. */
5517 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5518 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5519 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5520 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5521 || (TREE_CODE (exp1) == ADDR_EXPR
5522 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5523 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5524 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5525 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5526
5527 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5528 here, because, in C and C++, the fact that a location is accessed
5529 through a pointer to const does not mean that the value there can
5530 never change. Languages where it can never change should
5531 also set TREE_STATIC. */
5cb7a25a 5532 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5533 return temp;
5534 }
bbf6f052
RK
5535
5536 case ARRAY_REF:
742920c7
RK
5537 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5538 abort ();
bbf6f052 5539
bbf6f052 5540 {
742920c7
RK
5541 tree array = TREE_OPERAND (exp, 0);
5542 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5543 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5544 tree index = TREE_OPERAND (exp, 1);
5545 tree index_type = TREE_TYPE (index);
08293add 5546 HOST_WIDE_INT i;
b50d17a1 5547
d4c89139
PB
5548 /* Optimize the special-case of a zero lower bound.
5549
5550 We convert the low_bound to sizetype to avoid some problems
5551 with constant folding. (E.g. suppose the lower bound is 1,
5552 and its mode is QI. Without the conversion, (ARRAY
5553 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5554 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5555
5556 But sizetype isn't quite right either (especially if
5557 the lowbound is negative). FIXME */
5558
742920c7 5559 if (! integer_zerop (low_bound))
d4c89139
PB
5560 index = fold (build (MINUS_EXPR, index_type, index,
5561 convert (sizetype, low_bound)));
742920c7 5562
742920c7 5563 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5564 This is not done in fold so it won't happen inside &.
5565 Don't fold if this is for wide characters since it's too
5566 difficult to do correctly and this is a very rare case. */
742920c7
RK
5567
5568 if (TREE_CODE (array) == STRING_CST
5569 && TREE_CODE (index) == INTEGER_CST
5570 && !TREE_INT_CST_HIGH (index)
307b821c 5571 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5572 && GET_MODE_CLASS (mode) == MODE_INT
5573 && GET_MODE_SIZE (mode) == 1)
307b821c 5574 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5575
742920c7
RK
5576 /* If this is a constant index into a constant array,
5577 just get the value from the array. Handle both the cases when
5578 we have an explicit constructor and when our operand is a variable
5579 that was declared const. */
4af3895e 5580
742920c7
RK
5581 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5582 {
5583 if (TREE_CODE (index) == INTEGER_CST
5584 && TREE_INT_CST_HIGH (index) == 0)
5585 {
5586 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5587
5588 i = TREE_INT_CST_LOW (index);
5589 while (elem && i--)
5590 elem = TREE_CHAIN (elem);
5591 if (elem)
5592 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5593 tmode, ro_modifier);
742920c7
RK
5594 }
5595 }
4af3895e 5596
742920c7
RK
5597 else if (optimize >= 1
5598 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5599 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5600 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5601 {
08293add 5602 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
5603 {
5604 tree init = DECL_INITIAL (array);
5605
5606 i = TREE_INT_CST_LOW (index);
5607 if (TREE_CODE (init) == CONSTRUCTOR)
5608 {
5609 tree elem = CONSTRUCTOR_ELTS (init);
5610
03dc44a6
RS
5611 while (elem
5612 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5613 elem = TREE_CHAIN (elem);
5614 if (elem)
5615 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5616 tmode, ro_modifier);
742920c7
RK
5617 }
5618 else if (TREE_CODE (init) == STRING_CST
08293add
RK
5619 && TREE_INT_CST_HIGH (index) == 0
5620 && (TREE_INT_CST_LOW (index)
5621 < TREE_STRING_LENGTH (init)))
5622 return (GEN_INT
5623 (TREE_STRING_POINTER
5624 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
5625 }
5626 }
5627 }
8c8a8e34 5628
08293add 5629 /* ... fall through ... */
bbf6f052
RK
5630
5631 case COMPONENT_REF:
5632 case BIT_FIELD_REF:
4af3895e 5633 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5634 appropriate field if it is present. Don't do this if we have
5635 already written the data since we want to refer to that copy
5636 and varasm.c assumes that's what we'll do. */
4af3895e 5637 if (code != ARRAY_REF
7a0b7b9a
RK
5638 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5639 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5640 {
5641 tree elt;
5642
5643 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5644 elt = TREE_CHAIN (elt))
86b5812c
RK
5645 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5646 /* We can normally use the value of the field in the
5647 CONSTRUCTOR. However, if this is a bitfield in
5648 an integral mode that we can fit in a HOST_WIDE_INT,
5649 we must mask only the number of bits in the bitfield,
5650 since this is done implicitly by the constructor. If
5651 the bitfield does not meet either of those conditions,
5652 we can't do this optimization. */
5653 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5654 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5655 == MODE_INT)
5656 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5657 <= HOST_BITS_PER_WIDE_INT))))
5658 {
5659 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5660 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5661 {
5662 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
5663
5664 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5665 {
5666 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5667 op0 = expand_and (op0, op1, target);
5668 }
5669 else
5670 {
e5e809f4
JL
5671 enum machine_mode imode
5672 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 5673 tree count
e5e809f4
JL
5674 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5675 0);
86b5812c
RK
5676
5677 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5678 target, 0);
5679 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5680 target, 0);
5681 }
5682 }
5683
5684 return op0;
5685 }
4af3895e
JVA
5686 }
5687
bbf6f052
RK
5688 {
5689 enum machine_mode mode1;
5690 int bitsize;
5691 int bitpos;
7bb0943f 5692 tree offset;
bbf6f052 5693 int volatilep = 0;
034f9101 5694 int alignment;
839c4796
RK
5695 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5696 &mode1, &unsignedp, &volatilep,
5697 &alignment);
bbf6f052 5698
e7f3c83f
RK
5699 /* If we got back the original object, something is wrong. Perhaps
5700 we are evaluating an expression too early. In any event, don't
5701 infinitely recurse. */
5702 if (tem == exp)
5703 abort ();
5704
3d27140a 5705 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5706 computation, since it will need a temporary and TARGET is known
5707 to have to do. This occurs in unchecked conversion in Ada. */
5708
5709 op0 = expand_expr (tem,
5710 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5711 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5712 != INTEGER_CST)
5713 ? target : NULL_RTX),
4ed67205 5714 VOIDmode,
e5e809f4
JL
5715 modifier == EXPAND_INITIALIZER
5716 ? modifier : EXPAND_NORMAL);
bbf6f052 5717
8c8a8e34 5718 /* If this is a constant, put it into a register if it is a
8008b228 5719 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5720 if (CONSTANT_P (op0))
5721 {
5722 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5723 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5724 op0 = force_reg (mode, op0);
5725 else
5726 op0 = validize_mem (force_const_mem (mode, op0));
5727 }
5728
7bb0943f
RS
5729 if (offset != 0)
5730 {
906c4e36 5731 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5732
5733 if (GET_CODE (op0) != MEM)
5734 abort ();
2d48c13d
JL
5735
5736 if (GET_MODE (offset_rtx) != ptr_mode)
5737#ifdef POINTERS_EXTEND_UNSIGNED
5738 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5739#else
5740 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5741#endif
5742
7bb0943f 5743 op0 = change_address (op0, VOIDmode,
38a448ca
RH
5744 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5745 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
5746 }
5747
bbf6f052
RK
5748 /* Don't forget about volatility even if this is a bitfield. */
5749 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5750 {
5751 op0 = copy_rtx (op0);
5752 MEM_VOLATILE_P (op0) = 1;
5753 }
5754
921b3427
RK
5755 /* Check the access. */
5756 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5757 {
5758 enum memory_use_mode memory_usage;
5759 memory_usage = get_memory_usage_from_modifier (modifier);
5760
5761 if (memory_usage != MEMORY_USE_DONT)
5762 {
5763 rtx to;
5764 int size;
5765
5766 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5767 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5768
5769 /* Check the access right of the pointer. */
e9a25f70
JL
5770 if (size > BITS_PER_UNIT)
5771 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5772 to, ptr_mode,
5773 GEN_INT (size / BITS_PER_UNIT),
5774 TYPE_MODE (sizetype),
956d6950
JL
5775 GEN_INT (memory_usage),
5776 TYPE_MODE (integer_type_node));
921b3427
RK
5777 }
5778 }
5779
ccc98036
RS
5780 /* In cases where an aligned union has an unaligned object
5781 as a field, we might be extracting a BLKmode value from
5782 an integer-mode (e.g., SImode) object. Handle this case
5783 by doing the extract into an object as wide as the field
5784 (which we know to be the width of a basic mode), then
f2420d0b
JW
5785 storing into memory, and changing the mode to BLKmode.
5786 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5787 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5788 if (mode1 == VOIDmode
ccc98036 5789 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5790 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 5791 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
5792 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5793 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5794 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
5795 /* If the field isn't aligned enough to fetch as a memref,
5796 fetch it as a bit field. */
5797 || (SLOW_UNALIGNED_ACCESS
5798 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5799 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5800 {
bbf6f052
RK
5801 enum machine_mode ext_mode = mode;
5802
5803 if (ext_mode == BLKmode)
5804 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5805
5806 if (ext_mode == BLKmode)
a281e72d
RK
5807 {
5808 /* In this case, BITPOS must start at a byte boundary and
5809 TARGET, if specified, must be a MEM. */
5810 if (GET_CODE (op0) != MEM
5811 || (target != 0 && GET_CODE (target) != MEM)
5812 || bitpos % BITS_PER_UNIT != 0)
5813 abort ();
5814
5815 op0 = change_address (op0, VOIDmode,
5816 plus_constant (XEXP (op0, 0),
5817 bitpos / BITS_PER_UNIT));
5818 if (target == 0)
5819 target = assign_temp (type, 0, 1, 1);
5820
5821 emit_block_move (target, op0,
5822 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5823 / BITS_PER_UNIT),
5824 1);
5825
5826 return target;
5827 }
bbf6f052 5828
dc6d66b3
RK
5829 op0 = validize_mem (op0);
5830
5831 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5832 mark_reg_pointer (XEXP (op0, 0), alignment);
5833
5834 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5835 unsignedp, target, ext_mode, ext_mode,
034f9101 5836 alignment,
bbf6f052 5837 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
5838
5839 /* If the result is a record type and BITSIZE is narrower than
5840 the mode of OP0, an integral mode, and this is a big endian
5841 machine, we must put the field into the high-order bits. */
5842 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5843 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5844 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5845 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5846 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5847 - bitsize),
5848 op0, 1);
5849
bbf6f052
RK
5850 if (mode == BLKmode)
5851 {
5852 rtx new = assign_stack_temp (ext_mode,
5853 bitsize / BITS_PER_UNIT, 0);
5854
5855 emit_move_insn (new, op0);
5856 op0 = copy_rtx (new);
5857 PUT_MODE (op0, BLKmode);
092dded9 5858 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5859 }
5860
5861 return op0;
5862 }
5863
05019f83
RK
5864 /* If the result is BLKmode, use that to access the object
5865 now as well. */
5866 if (mode == BLKmode)
5867 mode1 = BLKmode;
5868
bbf6f052
RK
5869 /* Get a reference to just this component. */
5870 if (modifier == EXPAND_CONST_ADDRESS
5871 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
5872 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5873 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
5874 else
5875 op0 = change_address (op0, mode1,
5876 plus_constant (XEXP (op0, 0),
5877 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5878 if (GET_CODE (XEXP (op0, 0)) == REG)
5879 mark_reg_pointer (XEXP (op0, 0), alignment);
5880
bbf6f052
RK
5881 MEM_IN_STRUCT_P (op0) = 1;
5882 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 5883 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 5884 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 5885 || modifier == EXPAND_INITIALIZER)
bbf6f052 5886 return op0;
0d15e60c 5887 else if (target == 0)
bbf6f052 5888 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 5889
bbf6f052
RK
5890 convert_move (target, op0, unsignedp);
5891 return target;
5892 }
5893
bbf6f052
RK
5894 /* Intended for a reference to a buffer of a file-object in Pascal.
5895 But it's not certain that a special tree code will really be
5896 necessary for these. INDIRECT_REF might work for them. */
5897 case BUFFER_REF:
5898 abort ();
5899
7308a047 5900 case IN_EXPR:
7308a047 5901 {
d6a5ac33
RK
5902 /* Pascal set IN expression.
5903
5904 Algorithm:
5905 rlo = set_low - (set_low%bits_per_word);
5906 the_word = set [ (index - rlo)/bits_per_word ];
5907 bit_index = index % bits_per_word;
5908 bitmask = 1 << bit_index;
5909 return !!(the_word & bitmask); */
5910
7308a047
RS
5911 tree set = TREE_OPERAND (exp, 0);
5912 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5913 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5914 tree set_type = TREE_TYPE (set);
7308a047
RS
5915 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5916 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5917 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5918 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5919 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5920 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5921 rtx setaddr = XEXP (setval, 0);
5922 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5923 rtx rlow;
5924 rtx diff, quo, rem, addr, bit, result;
7308a047 5925
d6a5ac33
RK
5926 preexpand_calls (exp);
5927
5928 /* If domain is empty, answer is no. Likewise if index is constant
5929 and out of bounds. */
51723711 5930 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 5931 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 5932 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
5933 || (TREE_CODE (index) == INTEGER_CST
5934 && TREE_CODE (set_low_bound) == INTEGER_CST
5935 && tree_int_cst_lt (index, set_low_bound))
5936 || (TREE_CODE (set_high_bound) == INTEGER_CST
5937 && TREE_CODE (index) == INTEGER_CST
5938 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5939 return const0_rtx;
5940
d6a5ac33
RK
5941 if (target == 0)
5942 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5943
5944 /* If we get here, we have to generate the code for both cases
5945 (in range and out of range). */
5946
5947 op0 = gen_label_rtx ();
5948 op1 = gen_label_rtx ();
5949
5950 if (! (GET_CODE (index_val) == CONST_INT
5951 && GET_CODE (lo_r) == CONST_INT))
5952 {
17938e57 5953 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5954 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5955 emit_jump_insn (gen_blt (op1));
5956 }
5957
5958 if (! (GET_CODE (index_val) == CONST_INT
5959 && GET_CODE (hi_r) == CONST_INT))
5960 {
17938e57 5961 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5962 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5963 emit_jump_insn (gen_bgt (op1));
5964 }
5965
5966 /* Calculate the element number of bit zero in the first word
5967 of the set. */
5968 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5969 rlow = GEN_INT (INTVAL (lo_r)
5970 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5971 else
17938e57
RK
5972 rlow = expand_binop (index_mode, and_optab, lo_r,
5973 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5974 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5975
d6a5ac33
RK
5976 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5977 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5978
5979 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5980 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5981 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5982 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5983
7308a047 5984 addr = memory_address (byte_mode,
d6a5ac33
RK
5985 expand_binop (index_mode, add_optab, diff,
5986 setaddr, NULL_RTX, iunsignedp,
17938e57 5987 OPTAB_LIB_WIDEN));
d6a5ac33 5988
7308a047
RS
5989 /* Extract the bit we want to examine */
5990 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 5991 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
5992 make_tree (TREE_TYPE (index), rem),
5993 NULL_RTX, 1);
5994 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5995 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5996 1, OPTAB_LIB_WIDEN);
17938e57
RK
5997
5998 if (result != target)
5999 convert_move (target, result, 1);
7308a047
RS
6000
6001 /* Output the code to handle the out-of-range case. */
6002 emit_jump (op0);
6003 emit_label (op1);
6004 emit_move_insn (target, const0_rtx);
6005 emit_label (op0);
6006 return target;
6007 }
6008
bbf6f052
RK
6009 case WITH_CLEANUP_EXPR:
6010 if (RTL_EXPR_RTL (exp) == 0)
6011 {
6012 RTL_EXPR_RTL (exp)
921b3427 6013 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6014 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6015
bbf6f052
RK
6016 /* That's it for this cleanup. */
6017 TREE_OPERAND (exp, 2) = 0;
6018 }
6019 return RTL_EXPR_RTL (exp);
6020
5dab5552
MS
6021 case CLEANUP_POINT_EXPR:
6022 {
d93d4205 6023 extern int temp_slot_level;
e976b8b2
MS
6024 /* Start a new binding layer that will keep track of all cleanup
6025 actions to be performed. */
6026 expand_start_bindings (0);
6027
d93d4205 6028 target_temp_slot_level = temp_slot_level;
e976b8b2 6029
921b3427 6030 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6031 /* If we're going to use this value, load it up now. */
6032 if (! ignore)
6033 op0 = force_not_mem (op0);
d93d4205 6034 preserve_temp_slots (op0);
e976b8b2 6035 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6036 }
6037 return op0;
6038
bbf6f052
RK
6039 case CALL_EXPR:
6040 /* Check for a built-in function. */
6041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6042 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6043 == FUNCTION_DECL)
bbf6f052
RK
6044 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6045 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6046
bbf6f052
RK
6047 /* If this call was expanded already by preexpand_calls,
6048 just return the result we got. */
6049 if (CALL_EXPR_RTL (exp) != 0)
6050 return CALL_EXPR_RTL (exp);
d6a5ac33 6051
8129842c 6052 return expand_call (exp, target, ignore);
bbf6f052
RK
6053
6054 case NON_LVALUE_EXPR:
6055 case NOP_EXPR:
6056 case CONVERT_EXPR:
6057 case REFERENCE_EXPR:
bbf6f052
RK
6058 if (TREE_CODE (type) == UNION_TYPE)
6059 {
6060 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6061 if (target == 0)
06089a8b
RK
6062 {
6063 if (mode != BLKmode)
6064 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6065 else
6066 target = assign_temp (type, 0, 1, 1);
6067 }
d6a5ac33 6068
bbf6f052
RK
6069 if (GET_CODE (target) == MEM)
6070 /* Store data into beginning of memory target. */
6071 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6072 change_address (target, TYPE_MODE (valtype), 0), 0);
6073
bbf6f052
RK
6074 else if (GET_CODE (target) == REG)
6075 /* Store this field into a union of the proper type. */
6076 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6077 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6078 VOIDmode, 0, 1,
6079 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6080 else
6081 abort ();
6082
6083 /* Return the entire union. */
6084 return target;
6085 }
d6a5ac33 6086
7f62854a
RK
6087 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6088 {
6089 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6090 ro_modifier);
7f62854a
RK
6091
6092 /* If the signedness of the conversion differs and OP0 is
6093 a promoted SUBREG, clear that indication since we now
6094 have to do the proper extension. */
6095 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6096 && GET_CODE (op0) == SUBREG)
6097 SUBREG_PROMOTED_VAR_P (op0) = 0;
6098
6099 return op0;
6100 }
6101
1499e0a8 6102 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6103 if (GET_MODE (op0) == mode)
6104 return op0;
12342f90 6105
d6a5ac33
RK
6106 /* If OP0 is a constant, just convert it into the proper mode. */
6107 if (CONSTANT_P (op0))
6108 return
6109 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6110 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6111
26fcb35a 6112 if (modifier == EXPAND_INITIALIZER)
38a448ca 6113 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6114
bbf6f052 6115 if (target == 0)
d6a5ac33
RK
6116 return
6117 convert_to_mode (mode, op0,
6118 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6119 else
d6a5ac33
RK
6120 convert_move (target, op0,
6121 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6122 return target;
6123
6124 case PLUS_EXPR:
0f41302f
MS
6125 /* We come here from MINUS_EXPR when the second operand is a
6126 constant. */
bbf6f052
RK
6127 plus_expr:
6128 this_optab = add_optab;
6129
6130 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6131 something else, make sure we add the register to the constant and
6132 then to the other thing. This case can occur during strength
6133 reduction and doing it this way will produce better code if the
6134 frame pointer or argument pointer is eliminated.
6135
6136 fold-const.c will ensure that the constant is always in the inner
6137 PLUS_EXPR, so the only case we need to do anything about is if
6138 sp, ap, or fp is our second argument, in which case we must swap
6139 the innermost first argument and our second argument. */
6140
6141 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6142 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6143 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6144 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6145 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6146 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6147 {
6148 tree t = TREE_OPERAND (exp, 1);
6149
6150 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6151 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6152 }
6153
88f63c77 6154 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6155 something, we might be forming a constant. So try to use
6156 plus_constant. If it produces a sum and we can't accept it,
6157 use force_operand. This allows P = &ARR[const] to generate
6158 efficient code on machines where a SYMBOL_REF is not a valid
6159 address.
6160
6161 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6162 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6163 || mode == ptr_mode)
bbf6f052 6164 {
c980ac49
RS
6165 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6166 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6167 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6168 {
6169 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6170 EXPAND_SUM);
6171 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6172 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6173 op1 = force_operand (op1, target);
6174 return op1;
6175 }
bbf6f052 6176
c980ac49
RS
6177 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6178 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6179 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6180 {
6181 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6182 EXPAND_SUM);
6183 if (! CONSTANT_P (op0))
6184 {
6185 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6186 VOIDmode, modifier);
709f5be1
RS
6187 /* Don't go to both_summands if modifier
6188 says it's not right to return a PLUS. */
6189 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6190 goto binop2;
c980ac49
RS
6191 goto both_summands;
6192 }
6193 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6194 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6195 op0 = force_operand (op0, target);
6196 return op0;
6197 }
bbf6f052
RK
6198 }
6199
6200 /* No sense saving up arithmetic to be done
6201 if it's all in the wrong mode to form part of an address.
6202 And force_operand won't know whether to sign-extend or
6203 zero-extend. */
6204 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6205 || mode != ptr_mode)
c980ac49 6206 goto binop;
bbf6f052
RK
6207
6208 preexpand_calls (exp);
e5e809f4 6209 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6210 subtarget = 0;
6211
921b3427
RK
6212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6214
c980ac49 6215 both_summands:
bbf6f052
RK
6216 /* Make sure any term that's a sum with a constant comes last. */
6217 if (GET_CODE (op0) == PLUS
6218 && CONSTANT_P (XEXP (op0, 1)))
6219 {
6220 temp = op0;
6221 op0 = op1;
6222 op1 = temp;
6223 }
6224 /* If adding to a sum including a constant,
6225 associate it to put the constant outside. */
6226 if (GET_CODE (op1) == PLUS
6227 && CONSTANT_P (XEXP (op1, 1)))
6228 {
6229 rtx constant_term = const0_rtx;
6230
6231 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6232 if (temp != 0)
6233 op0 = temp;
6f90e075
JW
6234 /* Ensure that MULT comes first if there is one. */
6235 else if (GET_CODE (op0) == MULT)
38a448ca 6236 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6237 else
38a448ca 6238 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6239
6240 /* Let's also eliminate constants from op0 if possible. */
6241 op0 = eliminate_constant_term (op0, &constant_term);
6242
6243 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6244 their sum should be a constant. Form it into OP1, since the
6245 result we want will then be OP0 + OP1. */
6246
6247 temp = simplify_binary_operation (PLUS, mode, constant_term,
6248 XEXP (op1, 1));
6249 if (temp != 0)
6250 op1 = temp;
6251 else
38a448ca 6252 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6253 }
6254
6255 /* Put a constant term last and put a multiplication first. */
6256 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6257 temp = op1, op1 = op0, op0 = temp;
6258
6259 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6260 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6261
6262 case MINUS_EXPR:
ea87523e
RK
6263 /* For initializers, we are allowed to return a MINUS of two
6264 symbolic constants. Here we handle all cases when both operands
6265 are constant. */
bbf6f052
RK
6266 /* Handle difference of two symbolic constants,
6267 for the sake of an initializer. */
6268 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6269 && really_constant_p (TREE_OPERAND (exp, 0))
6270 && really_constant_p (TREE_OPERAND (exp, 1)))
6271 {
906c4e36 6272 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6273 VOIDmode, ro_modifier);
906c4e36 6274 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6275 VOIDmode, ro_modifier);
ea87523e 6276
ea87523e
RK
6277 /* If the last operand is a CONST_INT, use plus_constant of
6278 the negated constant. Else make the MINUS. */
6279 if (GET_CODE (op1) == CONST_INT)
6280 return plus_constant (op0, - INTVAL (op1));
6281 else
38a448ca 6282 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6283 }
6284 /* Convert A - const to A + (-const). */
6285 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6286 {
ae431183
RK
6287 tree negated = fold (build1 (NEGATE_EXPR, type,
6288 TREE_OPERAND (exp, 1)));
6289
6290 /* Deal with the case where we can't negate the constant
6291 in TYPE. */
6292 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6293 {
6294 tree newtype = signed_type (type);
6295 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6296 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6297 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6298
6299 if (! TREE_OVERFLOW (newneg))
6300 return expand_expr (convert (type,
6301 build (PLUS_EXPR, newtype,
6302 newop0, newneg)),
921b3427 6303 target, tmode, ro_modifier);
ae431183
RK
6304 }
6305 else
6306 {
6307 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6308 goto plus_expr;
6309 }
bbf6f052
RK
6310 }
6311 this_optab = sub_optab;
6312 goto binop;
6313
6314 case MULT_EXPR:
6315 preexpand_calls (exp);
6316 /* If first operand is constant, swap them.
6317 Thus the following special case checks need only
6318 check the second operand. */
6319 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6320 {
6321 register tree t1 = TREE_OPERAND (exp, 0);
6322 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6323 TREE_OPERAND (exp, 1) = t1;
6324 }
6325
6326 /* Attempt to return something suitable for generating an
6327 indexed address, for machines that support that. */
6328
88f63c77 6329 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6330 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6331 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6332 {
921b3427
RK
6333 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6334 EXPAND_SUM);
bbf6f052
RK
6335
6336 /* Apply distributive law if OP0 is x+c. */
6337 if (GET_CODE (op0) == PLUS
6338 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6339 return gen_rtx_PLUS (mode,
6340 gen_rtx_MULT (mode, XEXP (op0, 0),
6341 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6342 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6343 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6344
6345 if (GET_CODE (op0) != REG)
906c4e36 6346 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6347 if (GET_CODE (op0) != REG)
6348 op0 = copy_to_mode_reg (mode, op0);
6349
38a448ca
RH
6350 return gen_rtx_MULT (mode, op0,
6351 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6352 }
6353
e5e809f4 6354 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6355 subtarget = 0;
6356
6357 /* Check for multiplying things that have been extended
6358 from a narrower type. If this machine supports multiplying
6359 in that narrower type with a result in the desired type,
6360 do it that way, and avoid the explicit type-conversion. */
6361 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6362 && TREE_CODE (type) == INTEGER_TYPE
6363 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6364 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6365 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6366 && int_fits_type_p (TREE_OPERAND (exp, 1),
6367 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6368 /* Don't use a widening multiply if a shift will do. */
6369 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6370 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6371 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6372 ||
6373 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6374 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6375 ==
6376 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6377 /* If both operands are extended, they must either both
6378 be zero-extended or both be sign-extended. */
6379 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6380 ==
6381 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6382 {
6383 enum machine_mode innermode
6384 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6385 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6386 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6387 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6388 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6389 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6390 {
b10af0c8
TG
6391 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6392 {
6393 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6394 NULL_RTX, VOIDmode, 0);
6395 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6396 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6397 VOIDmode, 0);
6398 else
6399 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6400 NULL_RTX, VOIDmode, 0);
6401 goto binop2;
6402 }
6403 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6404 && innermode == word_mode)
6405 {
6406 rtx htem;
6407 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6408 NULL_RTX, VOIDmode, 0);
6409 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6410 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6411 VOIDmode, 0);
6412 else
6413 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6414 NULL_RTX, VOIDmode, 0);
6415 temp = expand_binop (mode, other_optab, op0, op1, target,
6416 unsignedp, OPTAB_LIB_WIDEN);
6417 htem = expand_mult_highpart_adjust (innermode,
6418 gen_highpart (innermode, temp),
6419 op0, op1,
6420 gen_highpart (innermode, temp),
6421 unsignedp);
6422 emit_move_insn (gen_highpart (innermode, temp), htem);
6423 return temp;
6424 }
bbf6f052
RK
6425 }
6426 }
6427 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6428 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6429 return expand_mult (mode, op0, op1, target, unsignedp);
6430
6431 case TRUNC_DIV_EXPR:
6432 case FLOOR_DIV_EXPR:
6433 case CEIL_DIV_EXPR:
6434 case ROUND_DIV_EXPR:
6435 case EXACT_DIV_EXPR:
6436 preexpand_calls (exp);
e5e809f4 6437 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6438 subtarget = 0;
6439 /* Possible optimization: compute the dividend with EXPAND_SUM
6440 then if the divisor is constant can optimize the case
6441 where some terms of the dividend have coeffs divisible by it. */
6442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6443 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6444 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6445
6446 case RDIV_EXPR:
6447 this_optab = flodiv_optab;
6448 goto binop;
6449
6450 case TRUNC_MOD_EXPR:
6451 case FLOOR_MOD_EXPR:
6452 case CEIL_MOD_EXPR:
6453 case ROUND_MOD_EXPR:
6454 preexpand_calls (exp);
e5e809f4 6455 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6456 subtarget = 0;
6457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6458 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6459 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6460
6461 case FIX_ROUND_EXPR:
6462 case FIX_FLOOR_EXPR:
6463 case FIX_CEIL_EXPR:
6464 abort (); /* Not used for C. */
6465
6466 case FIX_TRUNC_EXPR:
906c4e36 6467 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6468 if (target == 0)
6469 target = gen_reg_rtx (mode);
6470 expand_fix (target, op0, unsignedp);
6471 return target;
6472
6473 case FLOAT_EXPR:
906c4e36 6474 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6475 if (target == 0)
6476 target = gen_reg_rtx (mode);
6477 /* expand_float can't figure out what to do if FROM has VOIDmode.
6478 So give it the correct mode. With -O, cse will optimize this. */
6479 if (GET_MODE (op0) == VOIDmode)
6480 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6481 op0);
6482 expand_float (target, op0,
6483 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6484 return target;
6485
6486 case NEGATE_EXPR:
5b22bee8 6487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6488 temp = expand_unop (mode, neg_optab, op0, target, 0);
6489 if (temp == 0)
6490 abort ();
6491 return temp;
6492
6493 case ABS_EXPR:
6494 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6495
2d7050fd 6496 /* Handle complex values specially. */
d6a5ac33
RK
6497 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6498 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6499 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6500
bbf6f052
RK
6501 /* Unsigned abs is simply the operand. Testing here means we don't
6502 risk generating incorrect code below. */
6503 if (TREE_UNSIGNED (type))
6504 return op0;
6505
2e5ec6cf 6506 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 6507 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
6508
6509 case MAX_EXPR:
6510 case MIN_EXPR:
6511 target = original_target;
e5e809f4 6512 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 6513 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6514 || GET_MODE (target) != mode
bbf6f052
RK
6515 || (GET_CODE (target) == REG
6516 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6517 target = gen_reg_rtx (mode);
906c4e36 6518 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6519 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6520
6521 /* First try to do it with a special MIN or MAX instruction.
6522 If that does not win, use a conditional jump to select the proper
6523 value. */
6524 this_optab = (TREE_UNSIGNED (type)
6525 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6526 : (code == MIN_EXPR ? smin_optab : smax_optab));
6527
6528 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6529 OPTAB_WIDEN);
6530 if (temp != 0)
6531 return temp;
6532
fa2981d8
JW
6533 /* At this point, a MEM target is no longer useful; we will get better
6534 code without it. */
6535
6536 if (GET_CODE (target) == MEM)
6537 target = gen_reg_rtx (mode);
6538
ee456b1c
RK
6539 if (target != op0)
6540 emit_move_insn (target, op0);
d6a5ac33 6541
bbf6f052 6542 op0 = gen_label_rtx ();
d6a5ac33 6543
f81497d9
RS
6544 /* If this mode is an integer too wide to compare properly,
6545 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6546 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6547 {
f81497d9 6548 if (code == MAX_EXPR)
d6a5ac33
RK
6549 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6550 target, op1, NULL_RTX, op0);
bbf6f052 6551 else
d6a5ac33
RK
6552 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6553 op1, target, NULL_RTX, op0);
ee456b1c 6554 emit_move_insn (target, op1);
bbf6f052 6555 }
f81497d9
RS
6556 else
6557 {
6558 if (code == MAX_EXPR)
6559 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6560 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6561 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6562 else
6563 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6564 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6565 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6566 if (temp == const0_rtx)
ee456b1c 6567 emit_move_insn (target, op1);
f81497d9
RS
6568 else if (temp != const_true_rtx)
6569 {
6570 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6571 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6572 else
6573 abort ();
ee456b1c 6574 emit_move_insn (target, op1);
f81497d9
RS
6575 }
6576 }
bbf6f052
RK
6577 emit_label (op0);
6578 return target;
6579
bbf6f052
RK
6580 case BIT_NOT_EXPR:
6581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6582 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6583 if (temp == 0)
6584 abort ();
6585 return temp;
6586
6587 case FFS_EXPR:
6588 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6589 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6590 if (temp == 0)
6591 abort ();
6592 return temp;
6593
d6a5ac33
RK
6594 /* ??? Can optimize bitwise operations with one arg constant.
6595 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6596 and (a bitwise1 b) bitwise2 b (etc)
6597 but that is probably not worth while. */
6598
6599 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6600 boolean values when we want in all cases to compute both of them. In
6601 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6602 as actual zero-or-1 values and then bitwise anding. In cases where
6603 there cannot be any side effects, better code would be made by
6604 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6605 how to recognize those cases. */
6606
bbf6f052
RK
6607 case TRUTH_AND_EXPR:
6608 case BIT_AND_EXPR:
6609 this_optab = and_optab;
6610 goto binop;
6611
bbf6f052
RK
6612 case TRUTH_OR_EXPR:
6613 case BIT_IOR_EXPR:
6614 this_optab = ior_optab;
6615 goto binop;
6616
874726a8 6617 case TRUTH_XOR_EXPR:
bbf6f052
RK
6618 case BIT_XOR_EXPR:
6619 this_optab = xor_optab;
6620 goto binop;
6621
6622 case LSHIFT_EXPR:
6623 case RSHIFT_EXPR:
6624 case LROTATE_EXPR:
6625 case RROTATE_EXPR:
6626 preexpand_calls (exp);
e5e809f4 6627 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6628 subtarget = 0;
6629 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6630 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6631 unsignedp);
6632
d6a5ac33
RK
6633 /* Could determine the answer when only additive constants differ. Also,
6634 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6635 case LT_EXPR:
6636 case LE_EXPR:
6637 case GT_EXPR:
6638 case GE_EXPR:
6639 case EQ_EXPR:
6640 case NE_EXPR:
6641 preexpand_calls (exp);
6642 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6643 if (temp != 0)
6644 return temp;
d6a5ac33 6645
0f41302f 6646 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6647 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6648 && original_target
6649 && GET_CODE (original_target) == REG
6650 && (GET_MODE (original_target)
6651 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6652 {
d6a5ac33
RK
6653 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6654 VOIDmode, 0);
6655
bbf6f052
RK
6656 if (temp != original_target)
6657 temp = copy_to_reg (temp);
d6a5ac33 6658
bbf6f052 6659 op1 = gen_label_rtx ();
906c4e36 6660 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6661 GET_MODE (temp), unsignedp, 0);
6662 emit_jump_insn (gen_beq (op1));
6663 emit_move_insn (temp, const1_rtx);
6664 emit_label (op1);
6665 return temp;
6666 }
d6a5ac33 6667
bbf6f052
RK
6668 /* If no set-flag instruction, must generate a conditional
6669 store into a temporary variable. Drop through
6670 and handle this like && and ||. */
6671
6672 case TRUTH_ANDIF_EXPR:
6673 case TRUTH_ORIF_EXPR:
e44842fe 6674 if (! ignore
e5e809f4 6675 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
6676 /* Make sure we don't have a hard reg (such as function's return
6677 value) live across basic blocks, if not optimizing. */
6678 || (!optimize && GET_CODE (target) == REG
6679 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6680 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6681
6682 if (target)
6683 emit_clr_insn (target);
6684
bbf6f052
RK
6685 op1 = gen_label_rtx ();
6686 jumpifnot (exp, op1);
e44842fe
RK
6687
6688 if (target)
6689 emit_0_to_1_insn (target);
6690
bbf6f052 6691 emit_label (op1);
e44842fe 6692 return ignore ? const0_rtx : target;
bbf6f052
RK
6693
6694 case TRUTH_NOT_EXPR:
6695 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6696 /* The parser is careful to generate TRUTH_NOT_EXPR
6697 only with operands that are always zero or one. */
906c4e36 6698 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6699 target, 1, OPTAB_LIB_WIDEN);
6700 if (temp == 0)
6701 abort ();
6702 return temp;
6703
6704 case COMPOUND_EXPR:
6705 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6706 emit_queue ();
6707 return expand_expr (TREE_OPERAND (exp, 1),
6708 (ignore ? const0_rtx : target),
6709 VOIDmode, 0);
6710
6711 case COND_EXPR:
ac01eace
RK
6712 /* If we would have a "singleton" (see below) were it not for a
6713 conversion in each arm, bring that conversion back out. */
6714 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6715 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6716 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6717 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6718 {
6719 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6720 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6721
6722 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6723 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6724 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6725 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6726 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6727 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6728 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6729 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6730 return expand_expr (build1 (NOP_EXPR, type,
6731 build (COND_EXPR, TREE_TYPE (true),
6732 TREE_OPERAND (exp, 0),
6733 true, false)),
6734 target, tmode, modifier);
6735 }
6736
bbf6f052
RK
6737 {
6738 /* Note that COND_EXPRs whose type is a structure or union
6739 are required to be constructed to contain assignments of
6740 a temporary variable, so that we can evaluate them here
6741 for side effect only. If type is void, we must do likewise. */
6742
6743 /* If an arm of the branch requires a cleanup,
6744 only that cleanup is performed. */
6745
6746 tree singleton = 0;
6747 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
6748
6749 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6750 convert it to our mode, if necessary. */
6751 if (integer_onep (TREE_OPERAND (exp, 1))
6752 && integer_zerop (TREE_OPERAND (exp, 2))
6753 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6754 {
dd27116b
RK
6755 if (ignore)
6756 {
6757 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 6758 ro_modifier);
dd27116b
RK
6759 return const0_rtx;
6760 }
6761
921b3427 6762 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
6763 if (GET_MODE (op0) == mode)
6764 return op0;
d6a5ac33 6765
bbf6f052
RK
6766 if (target == 0)
6767 target = gen_reg_rtx (mode);
6768 convert_move (target, op0, unsignedp);
6769 return target;
6770 }
6771
ac01eace
RK
6772 /* Check for X ? A + B : A. If we have this, we can copy A to the
6773 output and conditionally add B. Similarly for unary operations.
6774 Don't do this if X has side-effects because those side effects
6775 might affect A or B and the "?" operation is a sequence point in
6776 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
6777
6778 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6779 && operand_equal_p (TREE_OPERAND (exp, 2),
6780 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6781 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6782 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6783 && operand_equal_p (TREE_OPERAND (exp, 1),
6784 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6785 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6786 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6787 && operand_equal_p (TREE_OPERAND (exp, 2),
6788 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6789 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6790 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6791 && operand_equal_p (TREE_OPERAND (exp, 1),
6792 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6793 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6794
01c8a7c8
RK
6795 /* If we are not to produce a result, we have no target. Otherwise,
6796 if a target was specified use it; it will not be used as an
6797 intermediate target unless it is safe. If no target, use a
6798 temporary. */
6799
6800 if (ignore)
6801 temp = 0;
6802 else if (original_target
e5e809f4 6803 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
6804 || (singleton && GET_CODE (original_target) == REG
6805 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6806 && original_target == var_rtx (singleton)))
6807 && GET_MODE (original_target) == mode
7c00d1fe
RK
6808#ifdef HAVE_conditional_move
6809 && (! can_conditionally_move_p (mode)
6810 || GET_CODE (original_target) == REG
6811 || TREE_ADDRESSABLE (type))
6812#endif
01c8a7c8
RK
6813 && ! (GET_CODE (original_target) == MEM
6814 && MEM_VOLATILE_P (original_target)))
6815 temp = original_target;
6816 else if (TREE_ADDRESSABLE (type))
6817 abort ();
6818 else
6819 temp = assign_temp (type, 0, 0, 1);
6820
ac01eace
RK
6821 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6822 do the test of X as a store-flag operation, do this as
6823 A + ((X != 0) << log C). Similarly for other simple binary
6824 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 6825 if (temp && singleton && binary_op
bbf6f052
RK
6826 && (TREE_CODE (binary_op) == PLUS_EXPR
6827 || TREE_CODE (binary_op) == MINUS_EXPR
6828 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6829 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
6830 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6831 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
6832 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6833 {
6834 rtx result;
6835 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6836 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6837 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6838 : xor_optab);
bbf6f052
RK
6839
6840 /* If we had X ? A : A + 1, do this as A + (X == 0).
6841
6842 We have to invert the truth value here and then put it
6843 back later if do_store_flag fails. We cannot simply copy
6844 TREE_OPERAND (exp, 0) to another variable and modify that
6845 because invert_truthvalue can modify the tree pointed to
6846 by its argument. */
6847 if (singleton == TREE_OPERAND (exp, 1))
6848 TREE_OPERAND (exp, 0)
6849 = invert_truthvalue (TREE_OPERAND (exp, 0));
6850
6851 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 6852 (safe_from_p (temp, singleton, 1)
906c4e36 6853 ? temp : NULL_RTX),
bbf6f052
RK
6854 mode, BRANCH_COST <= 1);
6855
ac01eace
RK
6856 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6857 result = expand_shift (LSHIFT_EXPR, mode, result,
6858 build_int_2 (tree_log2
6859 (TREE_OPERAND
6860 (binary_op, 1)),
6861 0),
e5e809f4 6862 (safe_from_p (temp, singleton, 1)
ac01eace
RK
6863 ? temp : NULL_RTX), 0);
6864
bbf6f052
RK
6865 if (result)
6866 {
906c4e36 6867 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6868 return expand_binop (mode, boptab, op1, result, temp,
6869 unsignedp, OPTAB_LIB_WIDEN);
6870 }
6871 else if (singleton == TREE_OPERAND (exp, 1))
6872 TREE_OPERAND (exp, 0)
6873 = invert_truthvalue (TREE_OPERAND (exp, 0));
6874 }
6875
dabf8373 6876 do_pending_stack_adjust ();
bbf6f052
RK
6877 NO_DEFER_POP;
6878 op0 = gen_label_rtx ();
6879
6880 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6881 {
6882 if (temp != 0)
6883 {
6884 /* If the target conflicts with the other operand of the
6885 binary op, we can't use it. Also, we can't use the target
6886 if it is a hard register, because evaluating the condition
6887 might clobber it. */
6888 if ((binary_op
e5e809f4 6889 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
6890 || (GET_CODE (temp) == REG
6891 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6892 temp = gen_reg_rtx (mode);
6893 store_expr (singleton, temp, 0);
6894 }
6895 else
906c4e36 6896 expand_expr (singleton,
2937cf87 6897 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6898 if (singleton == TREE_OPERAND (exp, 1))
6899 jumpif (TREE_OPERAND (exp, 0), op0);
6900 else
6901 jumpifnot (TREE_OPERAND (exp, 0), op0);
6902
956d6950 6903 start_cleanup_deferral ();
bbf6f052
RK
6904 if (binary_op && temp == 0)
6905 /* Just touch the other operand. */
6906 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6907 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6908 else if (binary_op)
6909 store_expr (build (TREE_CODE (binary_op), type,
6910 make_tree (type, temp),
6911 TREE_OPERAND (binary_op, 1)),
6912 temp, 0);
6913 else
6914 store_expr (build1 (TREE_CODE (unary_op), type,
6915 make_tree (type, temp)),
6916 temp, 0);
6917 op1 = op0;
bbf6f052 6918 }
bbf6f052
RK
6919 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6920 comparison operator. If we have one of these cases, set the
6921 output to A, branch on A (cse will merge these two references),
6922 then set the output to FOO. */
6923 else if (temp
6924 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6925 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6926 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6927 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
6928 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6929 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 6930 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
6931 {
6932 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6933 temp = gen_reg_rtx (mode);
6934 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6935 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6936
956d6950 6937 start_cleanup_deferral ();
bbf6f052
RK
6938 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6939 op1 = op0;
6940 }
6941 else if (temp
6942 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6943 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6944 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6945 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
6946 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6947 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 6948 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6949 {
6950 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6951 temp = gen_reg_rtx (mode);
6952 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6953 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6954
956d6950 6955 start_cleanup_deferral ();
bbf6f052
RK
6956 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6957 op1 = op0;
6958 }
6959 else
6960 {
6961 op1 = gen_label_rtx ();
6962 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6963
956d6950 6964 start_cleanup_deferral ();
bbf6f052
RK
6965 if (temp != 0)
6966 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6967 else
906c4e36
RK
6968 expand_expr (TREE_OPERAND (exp, 1),
6969 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 6970 end_cleanup_deferral ();
bbf6f052
RK
6971 emit_queue ();
6972 emit_jump_insn (gen_jump (op1));
6973 emit_barrier ();
6974 emit_label (op0);
956d6950 6975 start_cleanup_deferral ();
bbf6f052
RK
6976 if (temp != 0)
6977 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6978 else
906c4e36
RK
6979 expand_expr (TREE_OPERAND (exp, 2),
6980 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6981 }
6982
956d6950 6983 end_cleanup_deferral ();
bbf6f052
RK
6984
6985 emit_queue ();
6986 emit_label (op1);
6987 OK_DEFER_POP;
5dab5552 6988
bbf6f052
RK
6989 return temp;
6990 }
6991
6992 case TARGET_EXPR:
6993 {
6994 /* Something needs to be initialized, but we didn't know
6995 where that thing was when building the tree. For example,
6996 it could be the return value of a function, or a parameter
6997 to a function which lays down in the stack, or a temporary
6998 variable which must be passed by reference.
6999
7000 We guarantee that the expression will either be constructed
7001 or copied into our original target. */
7002
7003 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7004 tree cleanups = NULL_TREE;
5c062816 7005 tree exp1;
bbf6f052
RK
7006
7007 if (TREE_CODE (slot) != VAR_DECL)
7008 abort ();
7009
9c51f375
RK
7010 if (! ignore)
7011 target = original_target;
7012
bbf6f052
RK
7013 if (target == 0)
7014 {
7015 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7016 {
7017 target = DECL_RTL (slot);
5c062816 7018 /* If we have already expanded the slot, so don't do
ac993f4f 7019 it again. (mrs) */
5c062816
MS
7020 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7021 return target;
ac993f4f 7022 }
bbf6f052
RK
7023 else
7024 {
e9a25f70 7025 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7026 /* All temp slots at this level must not conflict. */
7027 preserve_temp_slots (target);
7028 DECL_RTL (slot) = target;
e9a25f70
JL
7029 if (TREE_ADDRESSABLE (slot))
7030 {
7031 TREE_ADDRESSABLE (slot) = 0;
7032 mark_addressable (slot);
7033 }
bbf6f052 7034
e287fd6e
RK
7035 /* Since SLOT is not known to the called function
7036 to belong to its stack frame, we must build an explicit
7037 cleanup. This case occurs when we must build up a reference
7038 to pass the reference as an argument. In this case,
7039 it is very likely that such a reference need not be
7040 built here. */
7041
7042 if (TREE_OPERAND (exp, 2) == 0)
7043 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7044 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7045 }
bbf6f052
RK
7046 }
7047 else
7048 {
7049 /* This case does occur, when expanding a parameter which
7050 needs to be constructed on the stack. The target
7051 is the actual stack address that we want to initialize.
7052 The function we call will perform the cleanup in this case. */
7053
8c042b47
RS
7054 /* If we have already assigned it space, use that space,
7055 not target that we were passed in, as our target
7056 parameter is only a hint. */
7057 if (DECL_RTL (slot) != 0)
7058 {
7059 target = DECL_RTL (slot);
7060 /* If we have already expanded the slot, so don't do
7061 it again. (mrs) */
7062 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7063 return target;
7064 }
21002281
JW
7065 else
7066 {
7067 DECL_RTL (slot) = target;
7068 /* If we must have an addressable slot, then make sure that
7069 the RTL that we just stored in slot is OK. */
7070 if (TREE_ADDRESSABLE (slot))
7071 {
7072 TREE_ADDRESSABLE (slot) = 0;
7073 mark_addressable (slot);
7074 }
7075 }
bbf6f052
RK
7076 }
7077
4847c938 7078 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7079 /* Mark it as expanded. */
7080 TREE_OPERAND (exp, 1) = NULL_TREE;
7081
e5e809f4 7082 TREE_USED (slot) = 1;
41531e5b 7083 store_expr (exp1, target, 0);
61d6b1cc 7084
e976b8b2 7085 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7086
41531e5b 7087 return target;
bbf6f052
RK
7088 }
7089
7090 case INIT_EXPR:
7091 {
7092 tree lhs = TREE_OPERAND (exp, 0);
7093 tree rhs = TREE_OPERAND (exp, 1);
7094 tree noncopied_parts = 0;
7095 tree lhs_type = TREE_TYPE (lhs);
7096
7097 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7098 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7099 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7100 TYPE_NONCOPIED_PARTS (lhs_type));
7101 while (noncopied_parts != 0)
7102 {
7103 expand_assignment (TREE_VALUE (noncopied_parts),
7104 TREE_PURPOSE (noncopied_parts), 0, 0);
7105 noncopied_parts = TREE_CHAIN (noncopied_parts);
7106 }
7107 return temp;
7108 }
7109
7110 case MODIFY_EXPR:
7111 {
7112 /* If lhs is complex, expand calls in rhs before computing it.
7113 That's so we don't compute a pointer and save it over a call.
7114 If lhs is simple, compute it first so we can give it as a
7115 target if the rhs is just a call. This avoids an extra temp and copy
7116 and that prevents a partial-subsumption which makes bad code.
7117 Actually we could treat component_ref's of vars like vars. */
7118
7119 tree lhs = TREE_OPERAND (exp, 0);
7120 tree rhs = TREE_OPERAND (exp, 1);
7121 tree noncopied_parts = 0;
7122 tree lhs_type = TREE_TYPE (lhs);
7123
7124 temp = 0;
7125
7126 if (TREE_CODE (lhs) != VAR_DECL
7127 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7128 && TREE_CODE (lhs) != PARM_DECL
7129 && ! (TREE_CODE (lhs) == INDIRECT_REF
7130 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7131 preexpand_calls (exp);
7132
7133 /* Check for |= or &= of a bitfield of size one into another bitfield
7134 of size 1. In this case, (unless we need the result of the
7135 assignment) we can do this more efficiently with a
7136 test followed by an assignment, if necessary.
7137
7138 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7139 things change so we do, this code should be enhanced to
7140 support it. */
7141 if (ignore
7142 && TREE_CODE (lhs) == COMPONENT_REF
7143 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7144 || TREE_CODE (rhs) == BIT_AND_EXPR)
7145 && TREE_OPERAND (rhs, 0) == lhs
7146 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7147 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7148 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7149 {
7150 rtx label = gen_label_rtx ();
7151
7152 do_jump (TREE_OPERAND (rhs, 1),
7153 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7154 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7155 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7156 (TREE_CODE (rhs) == BIT_IOR_EXPR
7157 ? integer_one_node
7158 : integer_zero_node)),
7159 0, 0);
e7c33f54 7160 do_pending_stack_adjust ();
bbf6f052
RK
7161 emit_label (label);
7162 return const0_rtx;
7163 }
7164
7165 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7166 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7167 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7168 TYPE_NONCOPIED_PARTS (lhs_type));
7169
7170 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7171 while (noncopied_parts != 0)
7172 {
7173 expand_assignment (TREE_PURPOSE (noncopied_parts),
7174 TREE_VALUE (noncopied_parts), 0, 0);
7175 noncopied_parts = TREE_CHAIN (noncopied_parts);
7176 }
7177 return temp;
7178 }
7179
7180 case PREINCREMENT_EXPR:
7181 case PREDECREMENT_EXPR:
7b8b9722 7182 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7183
7184 case POSTINCREMENT_EXPR:
7185 case POSTDECREMENT_EXPR:
7186 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7187 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7188
7189 case ADDR_EXPR:
987c71d9 7190 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7191 be a MEM corresponding to a stack slot. */
987c71d9
RK
7192 temp = 0;
7193
bbf6f052
RK
7194 /* Are we taking the address of a nested function? */
7195 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7196 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7197 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7198 && ! TREE_STATIC (exp))
bbf6f052
RK
7199 {
7200 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7201 op0 = force_operand (op0, target);
7202 }
682ba3a6
RK
7203 /* If we are taking the address of something erroneous, just
7204 return a zero. */
7205 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7206 return const0_rtx;
bbf6f052
RK
7207 else
7208 {
e287fd6e
RK
7209 /* We make sure to pass const0_rtx down if we came in with
7210 ignore set, to avoid doing the cleanups twice for something. */
7211 op0 = expand_expr (TREE_OPERAND (exp, 0),
7212 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7213 (modifier == EXPAND_INITIALIZER
7214 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7215
119af78a
RK
7216 /* If we are going to ignore the result, OP0 will have been set
7217 to const0_rtx, so just return it. Don't get confused and
7218 think we are taking the address of the constant. */
7219 if (ignore)
7220 return op0;
7221
3539e816
MS
7222 op0 = protect_from_queue (op0, 0);
7223
896102d0
RK
7224 /* We would like the object in memory. If it is a constant,
7225 we can have it be statically allocated into memory. For
682ba3a6 7226 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7227 memory and store the value into it. */
7228
7229 if (CONSTANT_P (op0))
7230 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7231 op0);
987c71d9 7232 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7233 {
7234 mark_temp_addr_taken (op0);
7235 temp = XEXP (op0, 0);
7236 }
896102d0 7237
682ba3a6
RK
7238 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7239 || GET_CODE (op0) == CONCAT)
896102d0
RK
7240 {
7241 /* If this object is in a register, it must be not
0f41302f 7242 be BLKmode. */
896102d0 7243 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7244 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7245
7a0b7b9a 7246 mark_temp_addr_taken (memloc);
896102d0
RK
7247 emit_move_insn (memloc, op0);
7248 op0 = memloc;
7249 }
7250
bbf6f052
RK
7251 if (GET_CODE (op0) != MEM)
7252 abort ();
7253
7254 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7255 {
7256 temp = XEXP (op0, 0);
7257#ifdef POINTERS_EXTEND_UNSIGNED
7258 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7259 && mode == ptr_mode)
9fcfcce7 7260 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7261#endif
7262 return temp;
7263 }
987c71d9 7264
bbf6f052
RK
7265 op0 = force_operand (XEXP (op0, 0), target);
7266 }
987c71d9 7267
bbf6f052 7268 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7269 op0 = force_reg (Pmode, op0);
7270
dc6d66b3
RK
7271 if (GET_CODE (op0) == REG
7272 && ! REG_USERVAR_P (op0))
7273 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7274
7275 /* If we might have had a temp slot, add an equivalent address
7276 for it. */
7277 if (temp != 0)
7278 update_temp_slot_address (temp, op0);
7279
88f63c77
RK
7280#ifdef POINTERS_EXTEND_UNSIGNED
7281 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7282 && mode == ptr_mode)
9fcfcce7 7283 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7284#endif
7285
bbf6f052
RK
7286 return op0;
7287
7288 case ENTRY_VALUE_EXPR:
7289 abort ();
7290
7308a047
RS
7291 /* COMPLEX type for Extended Pascal & Fortran */
7292 case COMPLEX_EXPR:
7293 {
7294 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7295 rtx insns;
7308a047
RS
7296
7297 /* Get the rtx code of the operands. */
7298 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7299 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7300
7301 if (! target)
7302 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7303
6551fa4d 7304 start_sequence ();
7308a047
RS
7305
7306 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7307 emit_move_insn (gen_realpart (mode, target), op0);
7308 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7309
6551fa4d
JW
7310 insns = get_insns ();
7311 end_sequence ();
7312
7308a047 7313 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7314 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7315 each with a separate pseudo as destination.
7316 It's not correct for flow to treat them as a unit. */
6d6e61ce 7317 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7318 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7319 else
7320 emit_insns (insns);
7308a047
RS
7321
7322 return target;
7323 }
7324
7325 case REALPART_EXPR:
2d7050fd
RS
7326 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7327 return gen_realpart (mode, op0);
7308a047
RS
7328
7329 case IMAGPART_EXPR:
2d7050fd
RS
7330 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7331 return gen_imagpart (mode, op0);
7308a047
RS
7332
7333 case CONJ_EXPR:
7334 {
62acb978 7335 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7336 rtx imag_t;
6551fa4d 7337 rtx insns;
7308a047
RS
7338
7339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7340
7341 if (! target)
d6a5ac33 7342 target = gen_reg_rtx (mode);
7308a047 7343
6551fa4d 7344 start_sequence ();
7308a047
RS
7345
7346 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7347 emit_move_insn (gen_realpart (partmode, target),
7348 gen_realpart (partmode, op0));
7308a047 7349
62acb978
RK
7350 imag_t = gen_imagpart (partmode, target);
7351 temp = expand_unop (partmode, neg_optab,
7352 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7353 if (temp != imag_t)
7354 emit_move_insn (imag_t, temp);
7355
6551fa4d
JW
7356 insns = get_insns ();
7357 end_sequence ();
7358
d6a5ac33
RK
7359 /* Conjugate should appear as a single unit
7360 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7361 each with a separate pseudo as destination.
7362 It's not correct for flow to treat them as a unit. */
6d6e61ce 7363 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7364 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7365 else
7366 emit_insns (insns);
7308a047
RS
7367
7368 return target;
7369 }
7370
e976b8b2
MS
7371 case TRY_CATCH_EXPR:
7372 {
7373 tree handler = TREE_OPERAND (exp, 1);
7374
7375 expand_eh_region_start ();
7376
7377 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7378
7379 expand_eh_region_end (handler);
7380
7381 return op0;
7382 }
7383
7384 case POPDCC_EXPR:
7385 {
7386 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 7387 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
7388 return const0_rtx;
7389 }
7390
7391 case POPDHC_EXPR:
7392 {
7393 rtx dhc = get_dynamic_handler_chain ();
38a448ca 7394 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
7395 return const0_rtx;
7396 }
7397
bbf6f052 7398 case ERROR_MARK:
66538193
RS
7399 op0 = CONST0_RTX (tmode);
7400 if (op0 != 0)
7401 return op0;
bbf6f052
RK
7402 return const0_rtx;
7403
7404 default:
90764a87 7405 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7406 }
7407
7408 /* Here to do an ordinary binary operator, generating an instruction
7409 from the optab already placed in `this_optab'. */
7410 binop:
7411 preexpand_calls (exp);
e5e809f4 7412 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7413 subtarget = 0;
7414 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7415 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7416 binop2:
7417 temp = expand_binop (mode, this_optab, op0, op1, target,
7418 unsignedp, OPTAB_LIB_WIDEN);
7419 if (temp == 0)
7420 abort ();
7421 return temp;
7422}
bbf6f052 7423
bbf6f052 7424
b93a436e
JL
7425\f
7426/* Return the alignment in bits of EXP, a pointer valued expression.
7427 But don't return more than MAX_ALIGN no matter what.
7428 The alignment returned is, by default, the alignment of the thing that
7429 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7430
7431 Otherwise, look at the expression to see if we can do better, i.e., if the
7432 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 7433
b93a436e
JL
7434static int
7435get_pointer_alignment (exp, max_align)
7436 tree exp;
7437 unsigned max_align;
bbf6f052 7438{
b93a436e
JL
7439 unsigned align, inner;
7440
7441 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7442 return 0;
7443
7444 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7445 align = MIN (align, max_align);
7446
7447 while (1)
bbf6f052 7448 {
b93a436e 7449 switch (TREE_CODE (exp))
bbf6f052 7450 {
b93a436e
JL
7451 case NOP_EXPR:
7452 case CONVERT_EXPR:
7453 case NON_LVALUE_EXPR:
7454 exp = TREE_OPERAND (exp, 0);
7455 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7456 return align;
7457 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7458 align = MIN (inner, max_align);
7459 break;
7460
7461 case PLUS_EXPR:
7462 /* If sum of pointer + int, restrict our maximum alignment to that
7463 imposed by the integer. If not, we can't do any better than
7464 ALIGN. */
7465 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7466 return align;
7467
7468 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7469 & (max_align - 1))
7470 != 0)
7471 max_align >>= 1;
7472
7473 exp = TREE_OPERAND (exp, 0);
7474 break;
7475
7476 case ADDR_EXPR:
7477 /* See what we are pointing at and look at its alignment. */
7478 exp = TREE_OPERAND (exp, 0);
7479 if (TREE_CODE (exp) == FUNCTION_DECL)
7480 align = FUNCTION_BOUNDARY;
7481 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7482 align = DECL_ALIGN (exp);
7483#ifdef CONSTANT_ALIGNMENT
7484 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7485 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 7486#endif
b93a436e 7487 return MIN (align, max_align);
c02bd5d9 7488
b93a436e
JL
7489 default:
7490 return align;
7491 }
7492 }
7493}
7494\f
7495/* Return the tree node and offset if a given argument corresponds to
7496 a string constant. */
7497
7498static tree
7499string_constant (arg, ptr_offset)
7500 tree arg;
7501 tree *ptr_offset;
7502{
7503 STRIP_NOPS (arg);
7504
7505 if (TREE_CODE (arg) == ADDR_EXPR
7506 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7507 {
7508 *ptr_offset = integer_zero_node;
7509 return TREE_OPERAND (arg, 0);
7510 }
7511 else if (TREE_CODE (arg) == PLUS_EXPR)
7512 {
7513 tree arg0 = TREE_OPERAND (arg, 0);
7514 tree arg1 = TREE_OPERAND (arg, 1);
7515
7516 STRIP_NOPS (arg0);
7517 STRIP_NOPS (arg1);
7518
7519 if (TREE_CODE (arg0) == ADDR_EXPR
7520 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 7521 {
b93a436e
JL
7522 *ptr_offset = arg1;
7523 return TREE_OPERAND (arg0, 0);
bbf6f052 7524 }
b93a436e
JL
7525 else if (TREE_CODE (arg1) == ADDR_EXPR
7526 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 7527 {
b93a436e
JL
7528 *ptr_offset = arg0;
7529 return TREE_OPERAND (arg1, 0);
bbf6f052 7530 }
b93a436e 7531 }
ca695ac9 7532
b93a436e
JL
7533 return 0;
7534}
ca695ac9 7535
b93a436e
JL
7536/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7537 way, because it could contain a zero byte in the middle.
7538 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 7539
b93a436e
JL
7540 Unfortunately, string_constant can't access the values of const char
7541 arrays with initializers, so neither can we do so here. */
e87b4f3f 7542
b93a436e
JL
7543static tree
7544c_strlen (src)
7545 tree src;
7546{
7547 tree offset_node;
7548 int offset, max;
7549 char *ptr;
e7c33f54 7550
b93a436e
JL
7551 src = string_constant (src, &offset_node);
7552 if (src == 0)
7553 return 0;
7554 max = TREE_STRING_LENGTH (src);
7555 ptr = TREE_STRING_POINTER (src);
7556 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7557 {
7558 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7559 compute the offset to the following null if we don't know where to
7560 start searching for it. */
7561 int i;
7562 for (i = 0; i < max; i++)
7563 if (ptr[i] == 0)
7564 return 0;
7565 /* We don't know the starting offset, but we do know that the string
7566 has no internal zero bytes. We can assume that the offset falls
7567 within the bounds of the string; otherwise, the programmer deserves
7568 what he gets. Subtract the offset from the length of the string,
7569 and return that. */
7570 /* This would perhaps not be valid if we were dealing with named
7571 arrays in addition to literal string constants. */
7572 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7573 }
e7c33f54 7574
b93a436e
JL
7575 /* We have a known offset into the string. Start searching there for
7576 a null character. */
7577 if (offset_node == 0)
7578 offset = 0;
7579 else
7580 {
7581 /* Did we get a long long offset? If so, punt. */
7582 if (TREE_INT_CST_HIGH (offset_node) != 0)
7583 return 0;
7584 offset = TREE_INT_CST_LOW (offset_node);
7585 }
7586 /* If the offset is known to be out of bounds, warn, and call strlen at
7587 runtime. */
7588 if (offset < 0 || offset > max)
7589 {
7590 warning ("offset outside bounds of constant string");
7591 return 0;
7592 }
7593 /* Use strlen to search for the first zero byte. Since any strings
7594 constructed with build_string will have nulls appended, we win even
7595 if we get handed something like (char[4])"abcd".
e7c33f54 7596
b93a436e
JL
7597 Since OFFSET is our starting index into the string, no further
7598 calculation is needed. */
7599 return size_int (strlen (ptr + offset));
7600}
1bbddf11 7601
b93a436e
JL
7602rtx
7603expand_builtin_return_addr (fndecl_code, count, tem)
7604 enum built_in_function fndecl_code;
7605 int count;
7606 rtx tem;
7607{
7608 int i;
e7c33f54 7609
b93a436e
JL
7610 /* Some machines need special handling before we can access
7611 arbitrary frames. For example, on the sparc, we must first flush
7612 all register windows to the stack. */
7613#ifdef SETUP_FRAME_ADDRESSES
7614 if (count > 0)
7615 SETUP_FRAME_ADDRESSES ();
7616#endif
e87b4f3f 7617
b93a436e
JL
7618 /* On the sparc, the return address is not in the frame, it is in a
7619 register. There is no way to access it off of the current frame
7620 pointer, but it can be accessed off the previous frame pointer by
7621 reading the value from the register window save area. */
7622#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7623 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7624 count--;
7625#endif
60bac6ea 7626
b93a436e
JL
7627 /* Scan back COUNT frames to the specified frame. */
7628 for (i = 0; i < count; i++)
7629 {
7630 /* Assume the dynamic chain pointer is in the word that the
7631 frame address points to, unless otherwise specified. */
7632#ifdef DYNAMIC_CHAIN_ADDRESS
7633 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7634#endif
7635 tem = memory_address (Pmode, tem);
7636 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7637 }
ca695ac9 7638
b93a436e
JL
7639 /* For __builtin_frame_address, return what we've got. */
7640 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7641 return tem;
e9a25f70 7642
b93a436e
JL
7643 /* For __builtin_return_address, Get the return address from that
7644 frame. */
7645#ifdef RETURN_ADDR_RTX
7646 tem = RETURN_ADDR_RTX (count, tem);
7647#else
7648 tem = memory_address (Pmode,
7649 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7650 tem = gen_rtx_MEM (Pmode, tem);
7651#endif
7652 return tem;
7653}
e9a25f70 7654
b93a436e
JL
7655/* __builtin_setjmp is passed a pointer to an array of five words (not
7656 all will be used on all machines). It operates similarly to the C
7657 library function of the same name, but is more efficient. Much of
7658 the code below (and for longjmp) is copied from the handling of
7659 non-local gotos.
ca695ac9 7660
b93a436e
JL
7661 NOTE: This is intended for use by GNAT and the exception handling
7662 scheme in the compiler and will only work in the method used by
7663 them. */
e9a25f70 7664
b93a436e 7665rtx
6fd1c67b 7666expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
7667 rtx buf_addr;
7668 rtx target;
6fd1c67b 7669 rtx first_label, next_label;
b93a436e 7670{
6fd1c67b 7671 rtx lab1 = gen_label_rtx ();
b93a436e
JL
7672 enum machine_mode sa_mode = Pmode, value_mode;
7673 rtx stack_save;
e9a25f70 7674
b93a436e 7675 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 7676
b93a436e
JL
7677#ifdef POINTERS_EXTEND_UNSIGNED
7678 buf_addr = convert_memory_address (Pmode, buf_addr);
7679#endif
d7f21d63 7680
b93a436e 7681 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 7682
b93a436e
JL
7683 if (target == 0 || GET_CODE (target) != REG
7684 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7685 target = gen_reg_rtx (value_mode);
d7f21d63 7686
b93a436e 7687 emit_queue ();
d7f21d63 7688
b93a436e
JL
7689 /* We store the frame pointer and the address of lab1 in the buffer
7690 and use the rest of it for the stack save area, which is
7691 machine-dependent. */
7692 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7693 virtual_stack_vars_rtx);
6fd1c67b
RH
7694 emit_move_insn (validize_mem
7695 (gen_rtx_MEM (Pmode,
b93a436e
JL
7696 plus_constant (buf_addr,
7697 GET_MODE_SIZE (Pmode)))),
6fd1c67b 7698 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 7699
b93a436e
JL
7700#ifdef HAVE_save_stack_nonlocal
7701 if (HAVE_save_stack_nonlocal)
7702 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7703#endif
6c174fc0 7704
b93a436e
JL
7705 stack_save = gen_rtx_MEM (sa_mode,
7706 plus_constant (buf_addr,
7707 2 * GET_MODE_SIZE (Pmode)));
7708 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 7709
6fd1c67b
RH
7710 /* If there is further processing to do, do it. */
7711#ifdef HAVE_builtin_setjmp_setup
7712 if (HAVE_builtin_setjmp_setup)
7713 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 7714#endif
d7f21d63 7715
6fd1c67b 7716 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 7717 emit_move_insn (target, const0_rtx);
6fd1c67b 7718 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
7719 emit_barrier ();
7720 emit_label (lab1);
d7f21d63 7721
6fd1c67b
RH
7722 /* Tell flow about the strange goings on. */
7723 current_function_has_nonlocal_label = 1;
7724
7725 /* Clobber the FP when we get here, so we have to make sure it's
7726 marked as used by this function. */
b93a436e 7727 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 7728
b93a436e
JL
7729 /* Mark the static chain as clobbered here so life information
7730 doesn't get messed up for it. */
7731 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 7732
b93a436e
JL
7733 /* Now put in the code to restore the frame pointer, and argument
7734 pointer, if needed. The code below is from expand_end_bindings
7735 in stmt.c; see detailed documentation there. */
7736#ifdef HAVE_nonlocal_goto
7737 if (! HAVE_nonlocal_goto)
7738#endif
7739 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 7740
b93a436e
JL
7741#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7742 if (fixed_regs[ARG_POINTER_REGNUM])
7743 {
7744#ifdef ELIMINABLE_REGS
081f5e7e 7745 int i;
b93a436e 7746 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 7747
b93a436e
JL
7748 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7749 if (elim_regs[i].from == ARG_POINTER_REGNUM
7750 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7751 break;
ca695ac9 7752
b93a436e
JL
7753 if (i == sizeof elim_regs / sizeof elim_regs [0])
7754#endif
7755 {
7756 /* Now restore our arg pointer from the address at which it
7757 was saved in our stack frame.
7758 If there hasn't be space allocated for it yet, make
7759 some now. */
7760 if (arg_pointer_save_area == 0)
7761 arg_pointer_save_area
7762 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7763 emit_move_insn (virtual_incoming_args_rtx,
7764 copy_to_reg (arg_pointer_save_area));
7765 }
7766 }
7767#endif
ca695ac9 7768
6fd1c67b
RH
7769#ifdef HAVE_builtin_setjmp_receiver
7770 if (HAVE_builtin_setjmp_receiver)
7771 emit_insn (gen_builtin_setjmp_receiver (lab1));
7772 else
7773#endif
b93a436e 7774#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
7775 if (HAVE_nonlocal_goto_receiver)
7776 emit_insn (gen_nonlocal_goto_receiver ());
7777 else
b93a436e 7778#endif
081f5e7e
KG
7779 {
7780 ; /* Nothing */
7781 }
6fd1c67b
RH
7782
7783 /* Set TARGET, and branch to the next-time-through label. */
7784 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7785 emit_jump_insn (gen_jump (next_label));
7786 emit_barrier ();
ca695ac9 7787
6fd1c67b
RH
7788 return target;
7789}
ca695ac9 7790
6fd1c67b
RH
7791void
7792expand_builtin_longjmp (buf_addr, value)
7793 rtx buf_addr, value;
7794{
7795 rtx fp, lab, stack;
7796 enum machine_mode sa_mode;
ca695ac9 7797
6fd1c67b
RH
7798#ifdef POINTERS_EXTEND_UNSIGNED
7799 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 7800#endif
6fd1c67b
RH
7801 buf_addr = force_reg (Pmode, buf_addr);
7802
7803 /* The value sent by longjmp is not allowed to be zero. Force it
7804 to one if so. */
7805 if (GET_CODE (value) == CONST_INT)
7806 {
7807 if (INTVAL (value) == 0)
7808 value = const1_rtx;
7809 }
7810 else
7811 {
7812 lab = gen_label_rtx ();
7813
7814 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7815 emit_jump_insn (gen_bne (lab));
7816 emit_move_insn (value, const1_rtx);
7817 emit_label (lab);
7818 }
7819
7820 /* Make sure the value is in the right mode to be copied to the chain. */
7821 if (GET_MODE (value) != VOIDmode)
7822 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7823
7824#ifdef HAVE_builtin_longjmp
7825 if (HAVE_builtin_longjmp)
7826 {
7827 /* Copy the "return value" to the static chain reg. */
7828 emit_move_insn (static_chain_rtx, value);
7829 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7830 emit_insn (gen_builtin_longjmp (buf_addr));
7831 }
7832 else
b93a436e 7833#endif
6fd1c67b
RH
7834 {
7835 fp = gen_rtx_MEM (Pmode, buf_addr);
7836 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7837 GET_MODE_SIZE (Pmode)));
e9a25f70 7838
6fd1c67b
RH
7839#ifdef HAVE_save_stack_nonlocal
7840 sa_mode = (HAVE_save_stack_nonlocal
7841 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
7842 : Pmode);
7843#else
7844 sa_mode = Pmode;
b93a436e 7845#endif
ca695ac9 7846
6fd1c67b
RH
7847 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7848 2 * GET_MODE_SIZE (Pmode)));
7849
7850 /* Pick up FP, label, and SP from the block and jump. This code is
7851 from expand_goto in stmt.c; see there for detailed comments. */
7852#if HAVE_nonlocal_goto
7853 if (HAVE_nonlocal_goto)
7854 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7855 else
b93a436e 7856#endif
6fd1c67b
RH
7857 {
7858 lab = copy_to_reg (lab);
60bac6ea 7859
6fd1c67b
RH
7860 /* Copy the "return value" to the static chain reg. */
7861 emit_move_insn (static_chain_rtx, value);
7862
7863 emit_move_insn (hard_frame_pointer_rtx, fp);
7864 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7865
7866 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7867 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7868 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7869 emit_indirect_jump (lab);
7870 }
7871 }
b93a436e 7872}
60bac6ea 7873
b93a436e
JL
7874\f
7875/* Expand an expression EXP that calls a built-in function,
7876 with result going to TARGET if that's convenient
7877 (and in mode MODE if that's convenient).
7878 SUBTARGET may be used as the target for computing one of EXP's operands.
7879 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 7880
b93a436e
JL
7881#define CALLED_AS_BUILT_IN(NODE) \
7882 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 7883
b93a436e
JL
7884static rtx
7885expand_builtin (exp, target, subtarget, mode, ignore)
7886 tree exp;
7887 rtx target;
7888 rtx subtarget;
7889 enum machine_mode mode;
7890 int ignore;
7891{
7892 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7893 tree arglist = TREE_OPERAND (exp, 1);
7894 rtx op0;
7895 rtx lab1, insns;
7896 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7897 optab builtin_optab;
60bac6ea 7898
b93a436e
JL
7899 switch (DECL_FUNCTION_CODE (fndecl))
7900 {
7901 case BUILT_IN_ABS:
7902 case BUILT_IN_LABS:
7903 case BUILT_IN_FABS:
7904 /* build_function_call changes these into ABS_EXPR. */
7905 abort ();
4ed67205 7906
b93a436e
JL
7907 case BUILT_IN_SIN:
7908 case BUILT_IN_COS:
7909 /* Treat these like sqrt, but only if the user asks for them. */
7910 if (! flag_fast_math)
7911 break;
7912 case BUILT_IN_FSQRT:
7913 /* If not optimizing, call the library function. */
7914 if (! optimize)
7915 break;
4ed67205 7916
b93a436e
JL
7917 if (arglist == 0
7918 /* Arg could be wrong type if user redeclared this fcn wrong. */
7919 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
7920 break;
7921
b93a436e
JL
7922 /* Stabilize and compute the argument. */
7923 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7924 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7925 {
7926 exp = copy_node (exp);
7927 arglist = copy_node (arglist);
7928 TREE_OPERAND (exp, 1) = arglist;
7929 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7930 }
7931 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 7932
b93a436e
JL
7933 /* Make a suitable register to place result in. */
7934 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 7935
b93a436e
JL
7936 emit_queue ();
7937 start_sequence ();
7565a035 7938
b93a436e
JL
7939 switch (DECL_FUNCTION_CODE (fndecl))
7940 {
7941 case BUILT_IN_SIN:
7942 builtin_optab = sin_optab; break;
7943 case BUILT_IN_COS:
7944 builtin_optab = cos_optab; break;
7945 case BUILT_IN_FSQRT:
7946 builtin_optab = sqrt_optab; break;
7947 default:
7948 abort ();
7949 }
4ed67205 7950
b93a436e
JL
7951 /* Compute into TARGET.
7952 Set TARGET to wherever the result comes back. */
7953 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7954 builtin_optab, op0, target, 0);
7955
7956 /* If we were unable to expand via the builtin, stop the
7957 sequence (without outputting the insns) and break, causing
38e01259 7958 a call to the library function. */
b93a436e 7959 if (target == 0)
4ed67205 7960 {
b93a436e
JL
7961 end_sequence ();
7962 break;
7963 }
4ed67205 7964
b93a436e
JL
7965 /* Check the results by default. But if flag_fast_math is turned on,
7966 then assume sqrt will always be called with valid arguments. */
4ed67205 7967
b93a436e
JL
7968 if (! flag_fast_math)
7969 {
7970 /* Don't define the builtin FP instructions
7971 if your machine is not IEEE. */
7972 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7973 abort ();
4ed67205 7974
b93a436e 7975 lab1 = gen_label_rtx ();
ca55abae 7976
b93a436e
JL
7977 /* Test the result; if it is NaN, set errno=EDOM because
7978 the argument was not in the domain. */
7979 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7980 emit_jump_insn (gen_beq (lab1));
7981
7982#ifdef TARGET_EDOM
7983 {
7984#ifdef GEN_ERRNO_RTX
7985 rtx errno_rtx = GEN_ERRNO_RTX;
7986#else
7987 rtx errno_rtx
7988 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
7989#endif
e87b4f3f 7990
b93a436e
JL
7991 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7992 }
7993#else
7994 /* We can't set errno=EDOM directly; let the library call do it.
7995 Pop the arguments right away in case the call gets deleted. */
7996 NO_DEFER_POP;
7997 expand_call (exp, target, 0);
7998 OK_DEFER_POP;
7999#endif
e7c33f54 8000
b93a436e
JL
8001 emit_label (lab1);
8002 }
0006469d 8003
b93a436e
JL
8004 /* Output the entire sequence. */
8005 insns = get_insns ();
8006 end_sequence ();
8007 emit_insns (insns);
8008
8009 return target;
0006469d 8010
b93a436e
JL
8011 case BUILT_IN_FMOD:
8012 break;
0006469d 8013
b93a436e
JL
8014 /* __builtin_apply_args returns block of memory allocated on
8015 the stack into which is stored the arg pointer, structure
8016 value address, static chain, and all the registers that might
8017 possibly be used in performing a function call. The code is
8018 moved to the start of the function so the incoming values are
8019 saved. */
8020 case BUILT_IN_APPLY_ARGS:
8021 /* Don't do __builtin_apply_args more than once in a function.
8022 Save the result of the first call and reuse it. */
8023 if (apply_args_value != 0)
8024 return apply_args_value;
8025 {
8026 /* When this function is called, it means that registers must be
8027 saved on entry to this function. So we migrate the
8028 call to the first insn of this function. */
8029 rtx temp;
8030 rtx seq;
0006469d 8031
b93a436e
JL
8032 start_sequence ();
8033 temp = expand_builtin_apply_args ();
8034 seq = get_insns ();
8035 end_sequence ();
0006469d 8036
b93a436e 8037 apply_args_value = temp;
0006469d 8038
b93a436e
JL
8039 /* Put the sequence after the NOTE that starts the function.
8040 If this is inside a SEQUENCE, make the outer-level insn
8041 chain current, so the code is placed at the start of the
8042 function. */
8043 push_topmost_sequence ();
8044 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8045 pop_topmost_sequence ();
8046 return temp;
8047 }
0006469d 8048
b93a436e
JL
8049 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8050 FUNCTION with a copy of the parameters described by
8051 ARGUMENTS, and ARGSIZE. It returns a block of memory
8052 allocated on the stack into which is stored all the registers
8053 that might possibly be used for returning the result of a
8054 function. ARGUMENTS is the value returned by
8055 __builtin_apply_args. ARGSIZE is the number of bytes of
8056 arguments that must be copied. ??? How should this value be
8057 computed? We'll also need a safe worst case value for varargs
8058 functions. */
8059 case BUILT_IN_APPLY:
8060 if (arglist == 0
8061 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8062 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8063 || TREE_CHAIN (arglist) == 0
8064 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8065 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8066 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8067 return const0_rtx;
8068 else
8069 {
8070 int i;
8071 tree t;
8072 rtx ops[3];
0006469d 8073
b93a436e
JL
8074 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8075 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8076
b93a436e
JL
8077 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8078 }
bbf6f052 8079
b93a436e
JL
8080 /* __builtin_return (RESULT) causes the function to return the
8081 value described by RESULT. RESULT is address of the block of
8082 memory returned by __builtin_apply. */
8083 case BUILT_IN_RETURN:
8084 if (arglist
8085 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8086 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8087 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8088 NULL_RTX, VOIDmode, 0));
8089 return const0_rtx;
ca695ac9 8090
b93a436e
JL
8091 case BUILT_IN_SAVEREGS:
8092 /* Don't do __builtin_saveregs more than once in a function.
8093 Save the result of the first call and reuse it. */
8094 if (saveregs_value != 0)
8095 return saveregs_value;
8096 {
8097 /* When this function is called, it means that registers must be
8098 saved on entry to this function. So we migrate the
8099 call to the first insn of this function. */
8100 rtx temp;
8101 rtx seq;
ca695ac9 8102
b93a436e
JL
8103 /* Now really call the function. `expand_call' does not call
8104 expand_builtin, so there is no danger of infinite recursion here. */
8105 start_sequence ();
ca695ac9 8106
b93a436e
JL
8107#ifdef EXPAND_BUILTIN_SAVEREGS
8108 /* Do whatever the machine needs done in this case. */
8109 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8110#else
8111 /* The register where the function returns its value
8112 is likely to have something else in it, such as an argument.
8113 So preserve that register around the call. */
ca695ac9 8114
b93a436e
JL
8115 if (value_mode != VOIDmode)
8116 {
8117 rtx valreg = hard_libcall_value (value_mode);
8118 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8119
b93a436e
JL
8120 emit_move_insn (saved_valreg, valreg);
8121 temp = expand_call (exp, target, ignore);
8122 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8123 }
8124 else
b93a436e
JL
8125 /* Generate the call, putting the value in a pseudo. */
8126 temp = expand_call (exp, target, ignore);
8127#endif
bbf6f052 8128
b93a436e
JL
8129 seq = get_insns ();
8130 end_sequence ();
bbf6f052 8131
b93a436e 8132 saveregs_value = temp;
bbf6f052 8133
b93a436e
JL
8134 /* Put the sequence after the NOTE that starts the function.
8135 If this is inside a SEQUENCE, make the outer-level insn
8136 chain current, so the code is placed at the start of the
8137 function. */
8138 push_topmost_sequence ();
8139 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8140 pop_topmost_sequence ();
8141 return temp;
8142 }
bbf6f052 8143
b93a436e
JL
8144 /* __builtin_args_info (N) returns word N of the arg space info
8145 for the current function. The number and meanings of words
8146 is controlled by the definition of CUMULATIVE_ARGS. */
8147 case BUILT_IN_ARGS_INFO:
8148 {
8149 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8150 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8151#if 0
8152 /* These are used by the code below that is if 0'ed away */
8153 int i;
b93a436e 8154 tree type, elts, result;
381127e8 8155#endif
bbf6f052 8156
b93a436e
JL
8157 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8158 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8159 __FILE__, __LINE__);
bbf6f052 8160
b93a436e
JL
8161 if (arglist != 0)
8162 {
8163 tree arg = TREE_VALUE (arglist);
8164 if (TREE_CODE (arg) != INTEGER_CST)
8165 error ("argument of `__builtin_args_info' must be constant");
8166 else
8167 {
8168 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8169
b93a436e
JL
8170 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8171 error ("argument of `__builtin_args_info' out of range");
8172 else
8173 return GEN_INT (word_ptr[wordnum]);
8174 }
bbf6f052
RK
8175 }
8176 else
b93a436e 8177 error ("missing argument in `__builtin_args_info'");
bbf6f052 8178
b93a436e 8179 return const0_rtx;
bbf6f052 8180
b93a436e
JL
8181#if 0
8182 for (i = 0; i < nwords; i++)
8183 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8184
b93a436e
JL
8185 type = build_array_type (integer_type_node,
8186 build_index_type (build_int_2 (nwords, 0)));
8187 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8188 TREE_CONSTANT (result) = 1;
8189 TREE_STATIC (result) = 1;
8190 result = build (INDIRECT_REF, build_pointer_type (type), result);
8191 TREE_CONSTANT (result) = 1;
8192 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8193#endif
8194 }
8195
8196 /* Return the address of the first anonymous stack arg. */
8197 case BUILT_IN_NEXT_ARG:
ca695ac9 8198 {
b93a436e
JL
8199 tree fntype = TREE_TYPE (current_function_decl);
8200
8201 if ((TYPE_ARG_TYPES (fntype) == 0
8202 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8203 == void_type_node))
8204 && ! current_function_varargs)
8205 {
8206 error ("`va_start' used in function with fixed args");
8207 return const0_rtx;
8208 }
8209
8210 if (arglist)
8211 {
8212 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8213 tree arg = TREE_VALUE (arglist);
8214
8215 /* Strip off all nops for the sake of the comparison. This
8216 is not quite the same as STRIP_NOPS. It does more.
8217 We must also strip off INDIRECT_EXPR for C++ reference
8218 parameters. */
8219 while (TREE_CODE (arg) == NOP_EXPR
8220 || TREE_CODE (arg) == CONVERT_EXPR
8221 || TREE_CODE (arg) == NON_LVALUE_EXPR
8222 || TREE_CODE (arg) == INDIRECT_REF)
8223 arg = TREE_OPERAND (arg, 0);
8224 if (arg != last_parm)
8225 warning ("second parameter of `va_start' not last named argument");
8226 }
8227 else if (! current_function_varargs)
8228 /* Evidently an out of date version of <stdarg.h>; can't validate
8229 va_start's second argument, but can still work as intended. */
8230 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8231 }
8232
b93a436e
JL
8233 return expand_binop (Pmode, add_optab,
8234 current_function_internal_arg_pointer,
8235 current_function_arg_offset_rtx,
8236 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8237
b93a436e
JL
8238 case BUILT_IN_CLASSIFY_TYPE:
8239 if (arglist != 0)
8240 {
8241 tree type = TREE_TYPE (TREE_VALUE (arglist));
8242 enum tree_code code = TREE_CODE (type);
8243 if (code == VOID_TYPE)
8244 return GEN_INT (void_type_class);
8245 if (code == INTEGER_TYPE)
8246 return GEN_INT (integer_type_class);
8247 if (code == CHAR_TYPE)
8248 return GEN_INT (char_type_class);
8249 if (code == ENUMERAL_TYPE)
8250 return GEN_INT (enumeral_type_class);
8251 if (code == BOOLEAN_TYPE)
8252 return GEN_INT (boolean_type_class);
8253 if (code == POINTER_TYPE)
8254 return GEN_INT (pointer_type_class);
8255 if (code == REFERENCE_TYPE)
8256 return GEN_INT (reference_type_class);
8257 if (code == OFFSET_TYPE)
8258 return GEN_INT (offset_type_class);
8259 if (code == REAL_TYPE)
8260 return GEN_INT (real_type_class);
8261 if (code == COMPLEX_TYPE)
8262 return GEN_INT (complex_type_class);
8263 if (code == FUNCTION_TYPE)
8264 return GEN_INT (function_type_class);
8265 if (code == METHOD_TYPE)
8266 return GEN_INT (method_type_class);
8267 if (code == RECORD_TYPE)
8268 return GEN_INT (record_type_class);
8269 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8270 return GEN_INT (union_type_class);
8271 if (code == ARRAY_TYPE)
8272 {
8273 if (TYPE_STRING_FLAG (type))
8274 return GEN_INT (string_type_class);
8275 else
8276 return GEN_INT (array_type_class);
8277 }
8278 if (code == SET_TYPE)
8279 return GEN_INT (set_type_class);
8280 if (code == FILE_TYPE)
8281 return GEN_INT (file_type_class);
8282 if (code == LANG_TYPE)
8283 return GEN_INT (lang_type_class);
8284 }
8285 return GEN_INT (no_type_class);
ca695ac9 8286
b93a436e
JL
8287 case BUILT_IN_CONSTANT_P:
8288 if (arglist == 0)
8289 return const0_rtx;
8290 else
8291 {
8292 tree arg = TREE_VALUE (arglist);
ca695ac9 8293
b93a436e
JL
8294 STRIP_NOPS (arg);
8295 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8296 || (TREE_CODE (arg) == ADDR_EXPR
8297 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8298 ? const1_rtx : const0_rtx);
8299 }
ca695ac9 8300
b93a436e
JL
8301 case BUILT_IN_FRAME_ADDRESS:
8302 /* The argument must be a nonnegative integer constant.
8303 It counts the number of frames to scan up the stack.
8304 The value is the address of that frame. */
8305 case BUILT_IN_RETURN_ADDRESS:
8306 /* The argument must be a nonnegative integer constant.
8307 It counts the number of frames to scan up the stack.
8308 The value is the return address saved in that frame. */
8309 if (arglist == 0)
8310 /* Warning about missing arg was already issued. */
8311 return const0_rtx;
8312 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8313 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8314 {
8315 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8316 error ("invalid arg to `__builtin_frame_address'");
8317 else
8318 error ("invalid arg to `__builtin_return_address'");
8319 return const0_rtx;
8320 }
8321 else
8322 {
8323 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8324 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8325 hard_frame_pointer_rtx);
ee33823f 8326
b93a436e
JL
8327 /* Some ports cannot access arbitrary stack frames. */
8328 if (tem == NULL)
8329 {
8330 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8331 warning ("unsupported arg to `__builtin_frame_address'");
8332 else
8333 warning ("unsupported arg to `__builtin_return_address'");
8334 return const0_rtx;
8335 }
ee33823f 8336
b93a436e
JL
8337 /* For __builtin_frame_address, return what we've got. */
8338 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8339 return tem;
ee33823f 8340
b93a436e
JL
8341 if (GET_CODE (tem) != REG)
8342 tem = copy_to_reg (tem);
8343 return tem;
8344 }
ee33823f 8345
b93a436e
JL
8346 /* Returns the address of the area where the structure is returned.
8347 0 otherwise. */
8348 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8349 if (arglist != 0
8350 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8351 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8352 return const0_rtx;
8353 else
8354 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 8355
b93a436e
JL
8356 case BUILT_IN_ALLOCA:
8357 if (arglist == 0
8358 /* Arg could be non-integer if user redeclared this fcn wrong. */
8359 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8360 break;
bbf6f052 8361
b93a436e
JL
8362 /* Compute the argument. */
8363 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 8364
b93a436e
JL
8365 /* Allocate the desired space. */
8366 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 8367
b93a436e
JL
8368 case BUILT_IN_FFS:
8369 /* If not optimizing, call the library function. */
8370 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8371 break;
ca695ac9 8372
b93a436e
JL
8373 if (arglist == 0
8374 /* Arg could be non-integer if user redeclared this fcn wrong. */
8375 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8376 break;
ca695ac9 8377
b93a436e
JL
8378 /* Compute the argument. */
8379 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8380 /* Compute ffs, into TARGET if possible.
8381 Set TARGET to wherever the result comes back. */
8382 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8383 ffs_optab, op0, target, 1);
8384 if (target == 0)
8385 abort ();
8386 return target;
bbf6f052 8387
b93a436e
JL
8388 case BUILT_IN_STRLEN:
8389 /* If not optimizing, call the library function. */
8390 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8391 break;
bbf6f052 8392
b93a436e
JL
8393 if (arglist == 0
8394 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8395 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8396 break;
8397 else
8398 {
8399 tree src = TREE_VALUE (arglist);
8400 tree len = c_strlen (src);
bbf6f052 8401
b93a436e
JL
8402 int align
8403 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 8404
b93a436e
JL
8405 rtx result, src_rtx, char_rtx;
8406 enum machine_mode insn_mode = value_mode, char_mode;
8407 enum insn_code icode;
46b68a37 8408
b93a436e
JL
8409 /* If the length is known, just return it. */
8410 if (len != 0)
8411 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 8412
b93a436e
JL
8413 /* If SRC is not a pointer type, don't do this operation inline. */
8414 if (align == 0)
8415 break;
bbf6f052 8416
b93a436e 8417 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 8418
b93a436e
JL
8419 while (insn_mode != VOIDmode)
8420 {
8421 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8422 if (icode != CODE_FOR_nothing)
8423 break;
ca695ac9 8424
b93a436e
JL
8425 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8426 }
8427 if (insn_mode == VOIDmode)
8428 break;
ca695ac9 8429
b93a436e
JL
8430 /* Make a place to write the result of the instruction. */
8431 result = target;
8432 if (! (result != 0
8433 && GET_CODE (result) == REG
8434 && GET_MODE (result) == insn_mode
8435 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8436 result = gen_reg_rtx (insn_mode);
ca695ac9 8437
b93a436e 8438 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 8439
b93a436e
JL
8440 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8441 result = gen_reg_rtx (insn_mode);
8442 src_rtx = memory_address (BLKmode,
8443 expand_expr (src, NULL_RTX, ptr_mode,
8444 EXPAND_NORMAL));
bbf6f052 8445
b93a436e
JL
8446 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8447 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 8448
b93a436e
JL
8449 /* Check the string is readable and has an end. */
8450 if (flag_check_memory_usage)
8451 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8452 src_rtx, ptr_mode,
8453 GEN_INT (MEMORY_USE_RO),
8454 TYPE_MODE (integer_type_node));
bbf6f052 8455
b93a436e
JL
8456 char_rtx = const0_rtx;
8457 char_mode = insn_operand_mode[(int)icode][2];
8458 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8459 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 8460
b93a436e
JL
8461 emit_insn (GEN_FCN (icode) (result,
8462 gen_rtx_MEM (BLKmode, src_rtx),
8463 char_rtx, GEN_INT (align)));
bbf6f052 8464
b93a436e
JL
8465 /* Return the value in the proper mode for this function. */
8466 if (GET_MODE (result) == value_mode)
8467 return result;
8468 else if (target != 0)
8469 {
8470 convert_move (target, result, 0);
8471 return target;
8472 }
8473 else
8474 return convert_to_mode (value_mode, result, 0);
8475 }
bbf6f052 8476
b93a436e
JL
8477 case BUILT_IN_STRCPY:
8478 /* If not optimizing, call the library function. */
8479 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8480 break;
bbf6f052 8481
b93a436e
JL
8482 if (arglist == 0
8483 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8484 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8485 || TREE_CHAIN (arglist) == 0
8486 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8487 break;
8488 else
8489 {
8490 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 8491
b93a436e
JL
8492 if (len == 0)
8493 break;
bbf6f052 8494
b93a436e 8495 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 8496
b93a436e
JL
8497 chainon (arglist, build_tree_list (NULL_TREE, len));
8498 }
6d100794 8499
b93a436e
JL
8500 /* Drops in. */
8501 case BUILT_IN_MEMCPY:
8502 /* If not optimizing, call the library function. */
8503 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8504 break;
e7c33f54 8505
b93a436e
JL
8506 if (arglist == 0
8507 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8508 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8509 || TREE_CHAIN (arglist) == 0
8510 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8511 != POINTER_TYPE)
8512 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8513 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8514 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8515 != INTEGER_TYPE))
8516 break;
8517 else
8518 {
8519 tree dest = TREE_VALUE (arglist);
8520 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8521 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8522 tree type;
e7c33f54 8523
b93a436e
JL
8524 int src_align
8525 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8526 int dest_align
8527 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8528 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
e7c33f54 8529
b93a436e
JL
8530 /* If either SRC or DEST is not a pointer type, don't do
8531 this operation in-line. */
8532 if (src_align == 0 || dest_align == 0)
8533 {
8534 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8535 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8536 break;
8537 }
e7c33f54 8538
b93a436e
JL
8539 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8540 dest_mem = gen_rtx_MEM (BLKmode,
8541 memory_address (BLKmode, dest_rtx));
8542 /* There could be a void* cast on top of the object. */
8543 while (TREE_CODE (dest) == NOP_EXPR)
8544 dest = TREE_OPERAND (dest, 0);
8545 type = TREE_TYPE (TREE_TYPE (dest));
8546 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8547 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8548 src_mem = gen_rtx_MEM (BLKmode,
8549 memory_address (BLKmode, src_rtx));
8550 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 8551
b93a436e
JL
8552 /* Just copy the rights of SRC to the rights of DEST. */
8553 if (flag_check_memory_usage)
8554 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8555 dest_rtx, ptr_mode,
8556 src_rtx, ptr_mode,
8557 len_rtx, TYPE_MODE (sizetype));
e7c33f54 8558
b93a436e
JL
8559 /* There could be a void* cast on top of the object. */
8560 while (TREE_CODE (src) == NOP_EXPR)
8561 src = TREE_OPERAND (src, 0);
8562 type = TREE_TYPE (TREE_TYPE (src));
8563 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
e7c33f54 8564
b93a436e
JL
8565 /* Copy word part most expediently. */
8566 dest_addr
8567 = emit_block_move (dest_mem, src_mem, len_rtx,
8568 MIN (src_align, dest_align));
e7c33f54 8569
b93a436e
JL
8570 if (dest_addr == 0)
8571 dest_addr = force_operand (dest_rtx, NULL_RTX);
e7c33f54 8572
b93a436e
JL
8573 return dest_addr;
8574 }
e7c33f54 8575
b93a436e
JL
8576 case BUILT_IN_MEMSET:
8577 /* If not optimizing, call the library function. */
8578 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8579 break;
e7c33f54 8580
b93a436e
JL
8581 if (arglist == 0
8582 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8583 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8584 || TREE_CHAIN (arglist) == 0
8585 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8586 != INTEGER_TYPE)
8587 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8588 || (INTEGER_TYPE
8589 != (TREE_CODE (TREE_TYPE
8590 (TREE_VALUE
8591 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8592 break;
8593 else
8594 {
8595 tree dest = TREE_VALUE (arglist);
8596 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8597 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8598 tree type;
e7c33f54 8599
b93a436e
JL
8600 int dest_align
8601 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8602 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
e7c33f54 8603
b93a436e
JL
8604 /* If DEST is not a pointer type, don't do this
8605 operation in-line. */
8606 if (dest_align == 0)
8607 break;
bbf6f052 8608
bf931ec8
JW
8609 /* If the arguments have side-effects, then we can only evaluate
8610 them at most once. The following code evaluates them twice if
8611 they are not constants because we break out to expand_call
8612 in that case. They can't be constants if they have side-effects
8613 so we can check for that first. Alternatively, we could call
8614 save_expr to make multiple evaluation safe. */
8615 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
8616 break;
8617
b93a436e
JL
8618 /* If VAL is not 0, don't do this operation in-line. */
8619 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8620 break;
bbf6f052 8621
b93a436e
JL
8622 /* If LEN does not expand to a constant, don't do this
8623 operation in-line. */
8624 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8625 if (GET_CODE (len_rtx) != CONST_INT)
8626 break;
bbf6f052 8627
b93a436e
JL
8628 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8629 dest_mem = gen_rtx_MEM (BLKmode,
8630 memory_address (BLKmode, dest_rtx));
8631
8632 /* Just check DST is writable and mark it as readable. */
8633 if (flag_check_memory_usage)
8634 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8635 dest_rtx, ptr_mode,
8636 len_rtx, TYPE_MODE (sizetype),
8637 GEN_INT (MEMORY_USE_WO),
8638 TYPE_MODE (integer_type_node));
bbf6f052 8639
b93a436e
JL
8640 /* There could be a void* cast on top of the object. */
8641 while (TREE_CODE (dest) == NOP_EXPR)
8642 dest = TREE_OPERAND (dest, 0);
87d1ea79
JC
8643
8644 if (TREE_CODE (dest) == ADDR_EXPR)
8645 /* If this is the address of an object, check whether the
8646 object is an array. */
8647 type = TREE_TYPE (TREE_OPERAND (dest, 0));
8648 else
8649 type = TREE_TYPE (TREE_TYPE (dest));
b93a436e 8650 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
bbf6f052 8651
b93a436e 8652 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 8653
b93a436e
JL
8654 if (dest_addr == 0)
8655 dest_addr = force_operand (dest_rtx, NULL_RTX);
bbf6f052 8656
b93a436e
JL
8657 return dest_addr;
8658 }
bbf6f052 8659
b93a436e
JL
8660/* These comparison functions need an instruction that returns an actual
8661 index. An ordinary compare that just sets the condition codes
8662 is not enough. */
8663#ifdef HAVE_cmpstrsi
8664 case BUILT_IN_STRCMP:
8665 /* If not optimizing, call the library function. */
8666 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8667 break;
bbf6f052 8668
b93a436e
JL
8669 /* If we need to check memory accesses, call the library function. */
8670 if (flag_check_memory_usage)
8671 break;
bbf6f052 8672
b93a436e
JL
8673 if (arglist == 0
8674 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8675 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8676 || TREE_CHAIN (arglist) == 0
8677 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8678 break;
8679 else if (!HAVE_cmpstrsi)
8680 break;
8681 {
8682 tree arg1 = TREE_VALUE (arglist);
8683 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 8684 tree len, len2;
a97f5a86 8685
b93a436e
JL
8686 len = c_strlen (arg1);
8687 if (len)
8688 len = size_binop (PLUS_EXPR, integer_one_node, len);
8689 len2 = c_strlen (arg2);
8690 if (len2)
8691 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 8692
b93a436e
JL
8693 /* If we don't have a constant length for the first, use the length
8694 of the second, if we know it. We don't require a constant for
8695 this case; some cost analysis could be done if both are available
8696 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 8697
b93a436e
JL
8698 If both strings have constant lengths, use the smaller. This
8699 could arise if optimization results in strcpy being called with
8700 two fixed strings, or if the code was machine-generated. We should
8701 add some code to the `memcmp' handler below to deal with such
8702 situations, someday. */
8703 if (!len || TREE_CODE (len) != INTEGER_CST)
8704 {
8705 if (len2)
8706 len = len2;
8707 else if (len == 0)
8708 break;
8709 }
8710 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8711 {
8712 if (tree_int_cst_lt (len2, len))
8713 len = len2;
8714 }
bbf6f052 8715
b93a436e
JL
8716 chainon (arglist, build_tree_list (NULL_TREE, len));
8717 }
bbf6f052 8718
b93a436e
JL
8719 /* Drops in. */
8720 case BUILT_IN_MEMCMP:
8721 /* If not optimizing, call the library function. */
8722 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8723 break;
bbf6f052 8724
b93a436e
JL
8725 /* If we need to check memory accesses, call the library function. */
8726 if (flag_check_memory_usage)
8727 break;
bbf6f052 8728
b93a436e
JL
8729 if (arglist == 0
8730 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8731 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8732 || TREE_CHAIN (arglist) == 0
8733 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8734 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8735 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8736 break;
8737 else if (!HAVE_cmpstrsi)
8738 break;
8739 {
8740 tree arg1 = TREE_VALUE (arglist);
8741 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8742 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8743 rtx result;
0842a179 8744
b93a436e
JL
8745 int arg1_align
8746 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8747 int arg2_align
8748 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8749 enum machine_mode insn_mode
8750 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 8751
b93a436e
JL
8752 /* If we don't have POINTER_TYPE, call the function. */
8753 if (arg1_align == 0 || arg2_align == 0)
8754 {
8755 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8756 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8757 break;
8758 }
bbf6f052 8759
b93a436e
JL
8760 /* Make a place to write the result of the instruction. */
8761 result = target;
8762 if (! (result != 0
8763 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8764 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8765 result = gen_reg_rtx (insn_mode);
bbf6f052 8766
b93a436e
JL
8767 emit_insn (gen_cmpstrsi (result,
8768 gen_rtx_MEM (BLKmode,
8769 expand_expr (arg1, NULL_RTX,
8770 ptr_mode,
8771 EXPAND_NORMAL)),
8772 gen_rtx_MEM (BLKmode,
8773 expand_expr (arg2, NULL_RTX,
8774 ptr_mode,
8775 EXPAND_NORMAL)),
8776 expand_expr (len, NULL_RTX, VOIDmode, 0),
8777 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 8778
b93a436e
JL
8779 /* Return the value in the proper mode for this function. */
8780 mode = TYPE_MODE (TREE_TYPE (exp));
8781 if (GET_MODE (result) == mode)
8782 return result;
8783 else if (target != 0)
8784 {
8785 convert_move (target, result, 0);
8786 return target;
8787 }
8788 else
8789 return convert_to_mode (mode, result, 0);
8790 }
8791#else
8792 case BUILT_IN_STRCMP:
8793 case BUILT_IN_MEMCMP:
8794 break;
8795#endif
bbf6f052 8796
b93a436e
JL
8797 case BUILT_IN_SETJMP:
8798 if (arglist == 0
8799 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8800 break;
6fd1c67b
RH
8801 else
8802 {
8803 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8804 VOIDmode, 0);
8805 rtx lab = gen_label_rtx ();
8806 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8807 emit_label (lab);
8808 return ret;
8809 }
bbf6f052 8810
6fd1c67b
RH
8811 /* __builtin_longjmp is passed a pointer to an array of five words.
8812 It's similar to the C library longjmp function but works with
8813 __builtin_setjmp above. */
b93a436e
JL
8814 case BUILT_IN_LONGJMP:
8815 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8816 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8817 break;
b93a436e 8818 else
b93a436e 8819 {
6fd1c67b
RH
8820 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8821 VOIDmode, 0);
8822 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8823 const0_rtx, VOIDmode, 0);
8824 expand_builtin_longjmp (buf_addr, value);
8825 return const0_rtx;
b93a436e 8826 }
bbf6f052 8827
b93a436e
JL
8828 /* Various hooks for the DWARF 2 __throw routine. */
8829 case BUILT_IN_UNWIND_INIT:
8830 expand_builtin_unwind_init ();
8831 return const0_rtx;
8832 case BUILT_IN_FP:
8833 return frame_pointer_rtx;
8834 case BUILT_IN_SP:
8835 return stack_pointer_rtx;
8836#ifdef DWARF2_UNWIND_INFO
8837 case BUILT_IN_DWARF_FP_REGNUM:
8838 return expand_builtin_dwarf_fp_regnum ();
8839 case BUILT_IN_DWARF_REG_SIZE:
8840 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 8841#endif
b93a436e
JL
8842 case BUILT_IN_FROB_RETURN_ADDR:
8843 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8844 case BUILT_IN_EXTRACT_RETURN_ADDR:
8845 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8846 case BUILT_IN_SET_RETURN_ADDR_REG:
8847 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8848 return const0_rtx;
8849 case BUILT_IN_EH_STUB:
8850 return expand_builtin_eh_stub ();
8851 case BUILT_IN_SET_EH_REGS:
8852 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8853 TREE_VALUE (TREE_CHAIN (arglist)));
8854 return const0_rtx;
ca695ac9 8855
b93a436e
JL
8856 default: /* just do library call, if unknown builtin */
8857 error ("built-in function `%s' not currently supported",
8858 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 8859 }
0006469d 8860
b93a436e
JL
8861 /* The switch statement above can drop through to cause the function
8862 to be called normally. */
0006469d 8863
b93a436e 8864 return expand_call (exp, target, ignore);
ca695ac9 8865}
b93a436e
JL
8866\f
8867/* Built-in functions to perform an untyped call and return. */
0006469d 8868
b93a436e
JL
8869/* For each register that may be used for calling a function, this
8870 gives a mode used to copy the register's value. VOIDmode indicates
8871 the register is not used for calling a function. If the machine
8872 has register windows, this gives only the outbound registers.
8873 INCOMING_REGNO gives the corresponding inbound register. */
8874static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8875
b93a436e
JL
8876/* For each register that may be used for returning values, this gives
8877 a mode used to copy the register's value. VOIDmode indicates the
8878 register is not used for returning values. If the machine has
8879 register windows, this gives only the outbound registers.
8880 INCOMING_REGNO gives the corresponding inbound register. */
8881static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8882
b93a436e
JL
8883/* For each register that may be used for calling a function, this
8884 gives the offset of that register into the block returned by
8885 __builtin_apply_args. 0 indicates that the register is not
8886 used for calling a function. */
8887static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8888
8889/* Return the offset of register REGNO into the block returned by
8890 __builtin_apply_args. This is not declared static, since it is
8891 needed in objc-act.c. */
0006469d 8892
b93a436e
JL
8893int
8894apply_args_register_offset (regno)
8895 int regno;
8896{
8897 apply_args_size ();
0006469d 8898
b93a436e
JL
8899 /* Arguments are always put in outgoing registers (in the argument
8900 block) if such make sense. */
8901#ifdef OUTGOING_REGNO
8902 regno = OUTGOING_REGNO(regno);
8903#endif
8904 return apply_args_reg_offset[regno];
8905}
904762c8 8906
b93a436e
JL
8907/* Return the size required for the block returned by __builtin_apply_args,
8908 and initialize apply_args_mode. */
8909
8910static int
8911apply_args_size ()
0006469d 8912{
b93a436e
JL
8913 static int size = -1;
8914 int align, regno;
2f6e6d22 8915 enum machine_mode mode;
0006469d 8916
b93a436e
JL
8917 /* The values computed by this function never change. */
8918 if (size < 0)
ca695ac9 8919 {
b93a436e
JL
8920 /* The first value is the incoming arg-pointer. */
8921 size = GET_MODE_SIZE (Pmode);
0006469d 8922
b93a436e
JL
8923 /* The second value is the structure value address unless this is
8924 passed as an "invisible" first argument. */
8925 if (struct_value_rtx)
8926 size += GET_MODE_SIZE (Pmode);
0006469d 8927
b93a436e
JL
8928 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8929 if (FUNCTION_ARG_REGNO_P (regno))
8930 {
8931 /* Search for the proper mode for copying this register's
8932 value. I'm not sure this is right, but it works so far. */
8933 enum machine_mode best_mode = VOIDmode;
0006469d 8934
b93a436e
JL
8935 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8936 mode != VOIDmode;
8937 mode = GET_MODE_WIDER_MODE (mode))
8938 if (HARD_REGNO_MODE_OK (regno, mode)
8939 && HARD_REGNO_NREGS (regno, mode) == 1)
8940 best_mode = mode;
0006469d 8941
b93a436e
JL
8942 if (best_mode == VOIDmode)
8943 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8944 mode != VOIDmode;
8945 mode = GET_MODE_WIDER_MODE (mode))
8946 if (HARD_REGNO_MODE_OK (regno, mode)
8947 && (mov_optab->handlers[(int) mode].insn_code
8948 != CODE_FOR_nothing))
8949 best_mode = mode;
0006469d 8950
b93a436e
JL
8951 mode = best_mode;
8952 if (mode == VOIDmode)
8953 abort ();
904762c8 8954
b93a436e
JL
8955 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8956 if (size % align != 0)
8957 size = CEIL (size, align) * align;
8958 apply_args_reg_offset[regno] = size;
8959 size += GET_MODE_SIZE (mode);
8960 apply_args_mode[regno] = mode;
8961 }
8962 else
8963 {
8964 apply_args_mode[regno] = VOIDmode;
8965 apply_args_reg_offset[regno] = 0;
8966 }
8967 }
8968 return size;
8969}
0006469d 8970
b93a436e
JL
8971/* Return the size required for the block returned by __builtin_apply,
8972 and initialize apply_result_mode. */
904762c8 8973
b93a436e
JL
8974static int
8975apply_result_size ()
8976{
8977 static int size = -1;
8978 int align, regno;
8979 enum machine_mode mode;
0006469d 8980
b93a436e
JL
8981 /* The values computed by this function never change. */
8982 if (size < 0)
8983 {
8984 size = 0;
0006469d 8985
b93a436e
JL
8986 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8987 if (FUNCTION_VALUE_REGNO_P (regno))
8988 {
8989 /* Search for the proper mode for copying this register's
8990 value. I'm not sure this is right, but it works so far. */
8991 enum machine_mode best_mode = VOIDmode;
0006469d 8992
b93a436e
JL
8993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8994 mode != TImode;
8995 mode = GET_MODE_WIDER_MODE (mode))
8996 if (HARD_REGNO_MODE_OK (regno, mode))
8997 best_mode = mode;
0006469d 8998
b93a436e
JL
8999 if (best_mode == VOIDmode)
9000 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9001 mode != VOIDmode;
9002 mode = GET_MODE_WIDER_MODE (mode))
9003 if (HARD_REGNO_MODE_OK (regno, mode)
9004 && (mov_optab->handlers[(int) mode].insn_code
9005 != CODE_FOR_nothing))
9006 best_mode = mode;
0006469d 9007
b93a436e
JL
9008 mode = best_mode;
9009 if (mode == VOIDmode)
9010 abort ();
9011
9012 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9013 if (size % align != 0)
9014 size = CEIL (size, align) * align;
9015 size += GET_MODE_SIZE (mode);
9016 apply_result_mode[regno] = mode;
9017 }
9018 else
9019 apply_result_mode[regno] = VOIDmode;
9020
9021 /* Allow targets that use untyped_call and untyped_return to override
9022 the size so that machine-specific information can be stored here. */
9023#ifdef APPLY_RESULT_SIZE
9024 size = APPLY_RESULT_SIZE;
9025#endif
9026 }
9027 return size;
9028}
0006469d 9029
b93a436e
JL
9030#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9031/* Create a vector describing the result block RESULT. If SAVEP is true,
9032 the result block is used to save the values; otherwise it is used to
9033 restore the values. */
9034
9035static rtx
9036result_vector (savep, result)
9037 int savep;
9038 rtx result;
9039{
9040 int regno, size, align, nelts;
9041 enum machine_mode mode;
9042 rtx reg, mem;
9043 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9044
9045 size = nelts = 0;
9046 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9047 if ((mode = apply_result_mode[regno]) != VOIDmode)
9048 {
9049 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9050 if (size % align != 0)
9051 size = CEIL (size, align) * align;
9052 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9053 mem = change_address (result, mode,
9054 plus_constant (XEXP (result, 0), size));
9055 savevec[nelts++] = (savep
9056 ? gen_rtx_SET (VOIDmode, mem, reg)
9057 : gen_rtx_SET (VOIDmode, reg, mem));
9058 size += GET_MODE_SIZE (mode);
ca695ac9 9059 }
b93a436e
JL
9060 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9061}
9062#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9063
b93a436e
JL
9064/* Save the state required to perform an untyped call with the same
9065 arguments as were passed to the current function. */
904762c8 9066
b93a436e
JL
9067static rtx
9068expand_builtin_apply_args ()
9069{
9070 rtx registers;
9071 int size, align, regno;
9072 enum machine_mode mode;
0006469d 9073
b93a436e
JL
9074 /* Create a block where the arg-pointer, structure value address,
9075 and argument registers can be saved. */
9076 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9077
b93a436e
JL
9078 /* Walk past the arg-pointer and structure value address. */
9079 size = GET_MODE_SIZE (Pmode);
9080 if (struct_value_rtx)
9081 size += GET_MODE_SIZE (Pmode);
0cb1d109 9082
b93a436e
JL
9083 /* Save each register used in calling a function to the block. */
9084 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9085 if ((mode = apply_args_mode[regno]) != VOIDmode)
9086 {
9087 rtx tem;
0cb1d109 9088
b93a436e
JL
9089 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9090 if (size % align != 0)
9091 size = CEIL (size, align) * align;
0006469d 9092
b93a436e 9093 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9094
b93a436e
JL
9095#ifdef STACK_REGS
9096 /* For reg-stack.c's stack register household.
9097 Compare with a similar piece of code in function.c. */
0006469d 9098
b93a436e
JL
9099 emit_insn (gen_rtx_USE (mode, tem));
9100#endif
0e8c9172 9101
b93a436e
JL
9102 emit_move_insn (change_address (registers, mode,
9103 plus_constant (XEXP (registers, 0),
9104 size)),
9105 tem);
9106 size += GET_MODE_SIZE (mode);
0e8c9172 9107 }
0006469d 9108
b93a436e
JL
9109 /* Save the arg pointer to the block. */
9110 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9111 copy_to_reg (virtual_incoming_args_rtx));
9112 size = GET_MODE_SIZE (Pmode);
0006469d 9113
b93a436e
JL
9114 /* Save the structure value address unless this is passed as an
9115 "invisible" first argument. */
9116 if (struct_value_incoming_rtx)
9117 {
9118 emit_move_insn (change_address (registers, Pmode,
9119 plus_constant (XEXP (registers, 0),
9120 size)),
9121 copy_to_reg (struct_value_incoming_rtx));
9122 size += GET_MODE_SIZE (Pmode);
9123 }
0006469d 9124
b93a436e
JL
9125 /* Return the address of the block. */
9126 return copy_addr_to_reg (XEXP (registers, 0));
9127}
0006469d 9128
b93a436e
JL
9129/* Perform an untyped call and save the state required to perform an
9130 untyped return of whatever value was returned by the given function. */
0006469d 9131
b93a436e
JL
9132static rtx
9133expand_builtin_apply (function, arguments, argsize)
9134 rtx function, arguments, argsize;
9135{
9136 int size, align, regno;
9137 enum machine_mode mode;
9138 rtx incoming_args, result, reg, dest, call_insn;
9139 rtx old_stack_level = 0;
9140 rtx call_fusage = 0;
0006469d 9141
b93a436e
JL
9142 /* Create a block where the return registers can be saved. */
9143 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9144
9145 /* ??? The argsize value should be adjusted here. */
9146
9147 /* Fetch the arg pointer from the ARGUMENTS block. */
9148 incoming_args = gen_reg_rtx (Pmode);
9149 emit_move_insn (incoming_args,
9150 gen_rtx_MEM (Pmode, arguments));
9151#ifndef STACK_GROWS_DOWNWARD
9152 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9153 incoming_args, 0, OPTAB_LIB_WIDEN);
9154#endif
9155
9156 /* Perform postincrements before actually calling the function. */
ca695ac9 9157 emit_queue ();
0006469d 9158
b93a436e
JL
9159 /* Push a new argument block and copy the arguments. */
9160 do_pending_stack_adjust ();
0006469d 9161
b93a436e
JL
9162 /* Save the stack with nonlocal if available */
9163#ifdef HAVE_save_stack_nonlocal
9164 if (HAVE_save_stack_nonlocal)
9165 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9166 else
9167#endif
9168 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9169
b93a436e
JL
9170 /* Push a block of memory onto the stack to store the memory arguments.
9171 Save the address in a register, and copy the memory arguments. ??? I
9172 haven't figured out how the calling convention macros effect this,
9173 but it's likely that the source and/or destination addresses in
9174 the block copy will need updating in machine specific ways. */
9175 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9176 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9177 gen_rtx_MEM (BLKmode, incoming_args),
9178 argsize,
9179 PARM_BOUNDARY / BITS_PER_UNIT);
9180
9181 /* Refer to the argument block. */
9182 apply_args_size ();
9183 arguments = gen_rtx_MEM (BLKmode, arguments);
9184
9185 /* Walk past the arg-pointer and structure value address. */
9186 size = GET_MODE_SIZE (Pmode);
9187 if (struct_value_rtx)
9188 size += GET_MODE_SIZE (Pmode);
9189
9190 /* Restore each of the registers previously saved. Make USE insns
9191 for each of these registers for use in making the call. */
9192 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9193 if ((mode = apply_args_mode[regno]) != VOIDmode)
9194 {
9195 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9196 if (size % align != 0)
9197 size = CEIL (size, align) * align;
9198 reg = gen_rtx_REG (mode, regno);
9199 emit_move_insn (reg,
9200 change_address (arguments, mode,
9201 plus_constant (XEXP (arguments, 0),
9202 size)));
9203
9204 use_reg (&call_fusage, reg);
9205 size += GET_MODE_SIZE (mode);
9206 }
9207
9208 /* Restore the structure value address unless this is passed as an
9209 "invisible" first argument. */
9210 size = GET_MODE_SIZE (Pmode);
9211 if (struct_value_rtx)
0006469d 9212 {
b93a436e
JL
9213 rtx value = gen_reg_rtx (Pmode);
9214 emit_move_insn (value,
9215 change_address (arguments, Pmode,
9216 plus_constant (XEXP (arguments, 0),
9217 size)));
9218 emit_move_insn (struct_value_rtx, value);
9219 if (GET_CODE (struct_value_rtx) == REG)
9220 use_reg (&call_fusage, struct_value_rtx);
9221 size += GET_MODE_SIZE (Pmode);
ca695ac9 9222 }
0006469d 9223
b93a436e
JL
9224 /* All arguments and registers used for the call are set up by now! */
9225 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9226
b93a436e
JL
9227 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9228 and we don't want to load it into a register as an optimization,
9229 because prepare_call_address already did it if it should be done. */
9230 if (GET_CODE (function) != SYMBOL_REF)
9231 function = memory_address (FUNCTION_MODE, function);
0006469d 9232
b93a436e
JL
9233 /* Generate the actual call instruction and save the return value. */
9234#ifdef HAVE_untyped_call
9235 if (HAVE_untyped_call)
9236 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9237 result, result_vector (1, result)));
9238 else
9239#endif
9240#ifdef HAVE_call_value
9241 if (HAVE_call_value)
ca695ac9 9242 {
b93a436e 9243 rtx valreg = 0;
0006469d 9244
b93a436e
JL
9245 /* Locate the unique return register. It is not possible to
9246 express a call that sets more than one return register using
9247 call_value; use untyped_call for that. In fact, untyped_call
9248 only needs to save the return registers in the given block. */
9249 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9250 if ((mode = apply_result_mode[regno]) != VOIDmode)
9251 {
9252 if (valreg)
9253 abort (); /* HAVE_untyped_call required. */
9254 valreg = gen_rtx_REG (mode, regno);
9255 }
0006469d 9256
b93a436e
JL
9257 emit_call_insn (gen_call_value (valreg,
9258 gen_rtx_MEM (FUNCTION_MODE, function),
9259 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9260
b93a436e
JL
9261 emit_move_insn (change_address (result, GET_MODE (valreg),
9262 XEXP (result, 0)),
9263 valreg);
ca695ac9 9264 }
b93a436e
JL
9265 else
9266#endif
9267 abort ();
0006469d 9268
b93a436e
JL
9269 /* Find the CALL insn we just emitted. */
9270 for (call_insn = get_last_insn ();
9271 call_insn && GET_CODE (call_insn) != CALL_INSN;
9272 call_insn = PREV_INSN (call_insn))
9273 ;
0006469d 9274
b93a436e
JL
9275 if (! call_insn)
9276 abort ();
0006469d 9277
b93a436e
JL
9278 /* Put the register usage information on the CALL. If there is already
9279 some usage information, put ours at the end. */
9280 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9281 {
b93a436e 9282 rtx link;
0006469d 9283
b93a436e
JL
9284 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9285 link = XEXP (link, 1))
9286 ;
9287
9288 XEXP (link, 1) = call_fusage;
ca695ac9 9289 }
b93a436e
JL
9290 else
9291 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9292
b93a436e
JL
9293 /* Restore the stack. */
9294#ifdef HAVE_save_stack_nonlocal
9295 if (HAVE_save_stack_nonlocal)
9296 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9297 else
9298#endif
9299 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9300
9301 /* Return the address of the result block. */
9302 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9303}
bbf6f052 9304
b93a436e 9305/* Perform an untyped return. */
ca695ac9
JB
9306
9307static void
b93a436e
JL
9308expand_builtin_return (result)
9309 rtx result;
bbf6f052 9310{
b93a436e
JL
9311 int size, align, regno;
9312 enum machine_mode mode;
9313 rtx reg;
9314 rtx call_fusage = 0;
bbf6f052 9315
b93a436e
JL
9316 apply_result_size ();
9317 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9318
b93a436e
JL
9319#ifdef HAVE_untyped_return
9320 if (HAVE_untyped_return)
ca695ac9 9321 {
b93a436e
JL
9322 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9323 emit_barrier ();
9324 return;
ca695ac9 9325 }
b93a436e 9326#endif
1499e0a8 9327
b93a436e
JL
9328 /* Restore the return value and note that each value is used. */
9329 size = 0;
9330 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9331 if ((mode = apply_result_mode[regno]) != VOIDmode)
9332 {
9333 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9334 if (size % align != 0)
9335 size = CEIL (size, align) * align;
9336 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9337 emit_move_insn (reg,
9338 change_address (result, mode,
9339 plus_constant (XEXP (result, 0),
9340 size)));
9341
9342 push_to_sequence (call_fusage);
9343 emit_insn (gen_rtx_USE (VOIDmode, reg));
9344 call_fusage = get_insns ();
9345 end_sequence ();
9346 size += GET_MODE_SIZE (mode);
9347 }
9348
9349 /* Put the USE insns before the return. */
9350 emit_insns (call_fusage);
9351
9352 /* Return whatever values was restored by jumping directly to the end
9353 of the function. */
9354 expand_null_return ();
ca695ac9
JB
9355}
9356\f
b93a436e
JL
9357/* Expand code for a post- or pre- increment or decrement
9358 and return the RTX for the result.
9359 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9360
b93a436e
JL
9361static rtx
9362expand_increment (exp, post, ignore)
9363 register tree exp;
9364 int post, ignore;
ca695ac9 9365{
b93a436e
JL
9366 register rtx op0, op1;
9367 register rtx temp, value;
9368 register tree incremented = TREE_OPERAND (exp, 0);
9369 optab this_optab = add_optab;
9370 int icode;
9371 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9372 int op0_is_copy = 0;
9373 int single_insn = 0;
9374 /* 1 means we can't store into OP0 directly,
9375 because it is a subreg narrower than a word,
9376 and we don't dare clobber the rest of the word. */
9377 int bad_subreg = 0;
1499e0a8 9378
b93a436e
JL
9379 /* Stabilize any component ref that might need to be
9380 evaluated more than once below. */
9381 if (!post
9382 || TREE_CODE (incremented) == BIT_FIELD_REF
9383 || (TREE_CODE (incremented) == COMPONENT_REF
9384 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9385 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9386 incremented = stabilize_reference (incremented);
9387 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9388 ones into save exprs so that they don't accidentally get evaluated
9389 more than once by the code below. */
9390 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9391 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9392 incremented = save_expr (incremented);
e9a25f70 9393
b93a436e
JL
9394 /* Compute the operands as RTX.
9395 Note whether OP0 is the actual lvalue or a copy of it:
9396 I believe it is a copy iff it is a register or subreg
9397 and insns were generated in computing it. */
e9a25f70 9398
b93a436e
JL
9399 temp = get_last_insn ();
9400 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9401
b93a436e
JL
9402 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9403 in place but instead must do sign- or zero-extension during assignment,
9404 so we copy it into a new register and let the code below use it as
9405 a copy.
e9a25f70 9406
b93a436e
JL
9407 Note that we can safely modify this SUBREG since it is know not to be
9408 shared (it was made by the expand_expr call above). */
9409
9410 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9411 {
9412 if (post)
9413 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9414 else
9415 bad_subreg = 1;
9416 }
9417 else if (GET_CODE (op0) == SUBREG
9418 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9419 {
9420 /* We cannot increment this SUBREG in place. If we are
9421 post-incrementing, get a copy of the old value. Otherwise,
9422 just mark that we cannot increment in place. */
9423 if (post)
9424 op0 = copy_to_reg (op0);
9425 else
9426 bad_subreg = 1;
e9a25f70
JL
9427 }
9428
b93a436e
JL
9429 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9430 && temp != get_last_insn ());
9431 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9432 EXPAND_MEMORY_USE_BAD);
1499e0a8 9433
b93a436e
JL
9434 /* Decide whether incrementing or decrementing. */
9435 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9436 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9437 this_optab = sub_optab;
9438
9439 /* Convert decrement by a constant into a negative increment. */
9440 if (this_optab == sub_optab
9441 && GET_CODE (op1) == CONST_INT)
ca695ac9 9442 {
b93a436e
JL
9443 op1 = GEN_INT (- INTVAL (op1));
9444 this_optab = add_optab;
ca695ac9 9445 }
1499e0a8 9446
b93a436e
JL
9447 /* For a preincrement, see if we can do this with a single instruction. */
9448 if (!post)
9449 {
9450 icode = (int) this_optab->handlers[(int) mode].insn_code;
9451 if (icode != (int) CODE_FOR_nothing
9452 /* Make sure that OP0 is valid for operands 0 and 1
9453 of the insn we want to queue. */
9454 && (*insn_operand_predicate[icode][0]) (op0, mode)
9455 && (*insn_operand_predicate[icode][1]) (op0, mode)
9456 && (*insn_operand_predicate[icode][2]) (op1, mode))
9457 single_insn = 1;
9458 }
bbf6f052 9459
b93a436e
JL
9460 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9461 then we cannot just increment OP0. We must therefore contrive to
9462 increment the original value. Then, for postincrement, we can return
9463 OP0 since it is a copy of the old value. For preincrement, expand here
9464 unless we can do it with a single insn.
bbf6f052 9465
b93a436e
JL
9466 Likewise if storing directly into OP0 would clobber high bits
9467 we need to preserve (bad_subreg). */
9468 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9469 {
b93a436e
JL
9470 /* This is the easiest way to increment the value wherever it is.
9471 Problems with multiple evaluation of INCREMENTED are prevented
9472 because either (1) it is a component_ref or preincrement,
9473 in which case it was stabilized above, or (2) it is an array_ref
9474 with constant index in an array in a register, which is
9475 safe to reevaluate. */
9476 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9477 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9478 ? MINUS_EXPR : PLUS_EXPR),
9479 TREE_TYPE (exp),
9480 incremented,
9481 TREE_OPERAND (exp, 1));
a358cee0 9482
b93a436e
JL
9483 while (TREE_CODE (incremented) == NOP_EXPR
9484 || TREE_CODE (incremented) == CONVERT_EXPR)
9485 {
9486 newexp = convert (TREE_TYPE (incremented), newexp);
9487 incremented = TREE_OPERAND (incremented, 0);
9488 }
bbf6f052 9489
b93a436e
JL
9490 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9491 return post ? op0 : temp;
9492 }
bbf6f052 9493
b93a436e
JL
9494 if (post)
9495 {
9496 /* We have a true reference to the value in OP0.
9497 If there is an insn to add or subtract in this mode, queue it.
9498 Queueing the increment insn avoids the register shuffling
9499 that often results if we must increment now and first save
9500 the old value for subsequent use. */
bbf6f052 9501
b93a436e
JL
9502#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9503 op0 = stabilize (op0);
9504#endif
41dfd40c 9505
b93a436e
JL
9506 icode = (int) this_optab->handlers[(int) mode].insn_code;
9507 if (icode != (int) CODE_FOR_nothing
9508 /* Make sure that OP0 is valid for operands 0 and 1
9509 of the insn we want to queue. */
9510 && (*insn_operand_predicate[icode][0]) (op0, mode)
9511 && (*insn_operand_predicate[icode][1]) (op0, mode))
9512 {
9513 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9514 op1 = force_reg (mode, op1);
bbf6f052 9515
b93a436e
JL
9516 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9517 }
9518 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9519 {
9520 rtx addr = (general_operand (XEXP (op0, 0), mode)
9521 ? force_reg (Pmode, XEXP (op0, 0))
9522 : copy_to_reg (XEXP (op0, 0)));
9523 rtx temp, result;
ca695ac9 9524
b93a436e
JL
9525 op0 = change_address (op0, VOIDmode, addr);
9526 temp = force_reg (GET_MODE (op0), op0);
9527 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9528 op1 = force_reg (mode, op1);
ca695ac9 9529
b93a436e
JL
9530 /* The increment queue is LIFO, thus we have to `queue'
9531 the instructions in reverse order. */
9532 enqueue_insn (op0, gen_move_insn (op0, temp));
9533 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9534 return result;
bbf6f052
RK
9535 }
9536 }
ca695ac9 9537
b93a436e
JL
9538 /* Preincrement, or we can't increment with one simple insn. */
9539 if (post)
9540 /* Save a copy of the value before inc or dec, to return it later. */
9541 temp = value = copy_to_reg (op0);
9542 else
9543 /* Arrange to return the incremented value. */
9544 /* Copy the rtx because expand_binop will protect from the queue,
9545 and the results of that would be invalid for us to return
9546 if our caller does emit_queue before using our result. */
9547 temp = copy_rtx (value = op0);
bbf6f052 9548
b93a436e
JL
9549 /* Increment however we can. */
9550 op1 = expand_binop (mode, this_optab, value, op1,
9551 flag_check_memory_usage ? NULL_RTX : op0,
9552 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9553 /* Make sure the value is stored into OP0. */
9554 if (op1 != op0)
9555 emit_move_insn (op0, op1);
5718612f 9556
b93a436e
JL
9557 return temp;
9558}
9559\f
9560/* Expand all function calls contained within EXP, innermost ones first.
9561 But don't look within expressions that have sequence points.
9562 For each CALL_EXPR, record the rtx for its value
9563 in the CALL_EXPR_RTL field. */
5718612f 9564
b93a436e
JL
9565static void
9566preexpand_calls (exp)
9567 tree exp;
9568{
9569 register int nops, i;
9570 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9571
b93a436e
JL
9572 if (! do_preexpand_calls)
9573 return;
5718612f 9574
b93a436e 9575 /* Only expressions and references can contain calls. */
bbf6f052 9576
b93a436e
JL
9577 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9578 return;
bbf6f052 9579
b93a436e
JL
9580 switch (TREE_CODE (exp))
9581 {
9582 case CALL_EXPR:
9583 /* Do nothing if already expanded. */
9584 if (CALL_EXPR_RTL (exp) != 0
9585 /* Do nothing if the call returns a variable-sized object. */
9586 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9587 /* Do nothing to built-in functions. */
9588 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9589 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9590 == FUNCTION_DECL)
9591 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9592 return;
bbf6f052 9593
b93a436e
JL
9594 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9595 return;
bbf6f052 9596
b93a436e
JL
9597 case COMPOUND_EXPR:
9598 case COND_EXPR:
9599 case TRUTH_ANDIF_EXPR:
9600 case TRUTH_ORIF_EXPR:
9601 /* If we find one of these, then we can be sure
9602 the adjust will be done for it (since it makes jumps).
9603 Do it now, so that if this is inside an argument
9604 of a function, we don't get the stack adjustment
9605 after some other args have already been pushed. */
9606 do_pending_stack_adjust ();
9607 return;
bbf6f052 9608
b93a436e
JL
9609 case BLOCK:
9610 case RTL_EXPR:
9611 case WITH_CLEANUP_EXPR:
9612 case CLEANUP_POINT_EXPR:
9613 case TRY_CATCH_EXPR:
9614 return;
bbf6f052 9615
b93a436e
JL
9616 case SAVE_EXPR:
9617 if (SAVE_EXPR_RTL (exp) != 0)
9618 return;
9619
9620 default:
9621 break;
ca695ac9 9622 }
bbf6f052 9623
b93a436e
JL
9624 nops = tree_code_length[(int) TREE_CODE (exp)];
9625 for (i = 0; i < nops; i++)
9626 if (TREE_OPERAND (exp, i) != 0)
9627 {
9628 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9629 if (type == 'e' || type == '<' || type == '1' || type == '2'
9630 || type == 'r')
9631 preexpand_calls (TREE_OPERAND (exp, i));
9632 }
9633}
9634\f
9635/* At the start of a function, record that we have no previously-pushed
9636 arguments waiting to be popped. */
bbf6f052 9637
b93a436e
JL
9638void
9639init_pending_stack_adjust ()
9640{
9641 pending_stack_adjust = 0;
9642}
bbf6f052 9643
b93a436e 9644/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9645 so the adjustment won't get done.
9646
9647 Note, if the current function calls alloca, then it must have a
9648 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9649
b93a436e
JL
9650void
9651clear_pending_stack_adjust ()
9652{
9653#ifdef EXIT_IGNORE_STACK
9654 if (optimize > 0
060fbabf
JL
9655 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9656 && EXIT_IGNORE_STACK
b93a436e
JL
9657 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9658 && ! flag_inline_functions)
9659 pending_stack_adjust = 0;
9660#endif
9661}
bbf6f052 9662
b93a436e
JL
9663/* Pop any previously-pushed arguments that have not been popped yet. */
9664
9665void
9666do_pending_stack_adjust ()
9667{
9668 if (inhibit_defer_pop == 0)
ca695ac9 9669 {
b93a436e
JL
9670 if (pending_stack_adjust != 0)
9671 adjust_stack (GEN_INT (pending_stack_adjust));
9672 pending_stack_adjust = 0;
bbf6f052 9673 }
bbf6f052
RK
9674}
9675\f
b93a436e 9676/* Expand conditional expressions. */
bbf6f052 9677
b93a436e
JL
9678/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9679 LABEL is an rtx of code CODE_LABEL, in this function and all the
9680 functions here. */
bbf6f052 9681
b93a436e
JL
9682void
9683jumpifnot (exp, label)
ca695ac9 9684 tree exp;
b93a436e 9685 rtx label;
bbf6f052 9686{
b93a436e
JL
9687 do_jump (exp, label, NULL_RTX);
9688}
bbf6f052 9689
b93a436e 9690/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9691
b93a436e
JL
9692void
9693jumpif (exp, label)
9694 tree exp;
9695 rtx label;
9696{
9697 do_jump (exp, NULL_RTX, label);
9698}
ca695ac9 9699
b93a436e
JL
9700/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9701 the result is zero, or IF_TRUE_LABEL if the result is one.
9702 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9703 meaning fall through in that case.
ca695ac9 9704
b93a436e
JL
9705 do_jump always does any pending stack adjust except when it does not
9706 actually perform a jump. An example where there is no jump
9707 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9708
b93a436e
JL
9709 This function is responsible for optimizing cases such as
9710 &&, || and comparison operators in EXP. */
5718612f 9711
b93a436e
JL
9712void
9713do_jump (exp, if_false_label, if_true_label)
9714 tree exp;
9715 rtx if_false_label, if_true_label;
9716{
9717 register enum tree_code code = TREE_CODE (exp);
9718 /* Some cases need to create a label to jump to
9719 in order to properly fall through.
9720 These cases set DROP_THROUGH_LABEL nonzero. */
9721 rtx drop_through_label = 0;
9722 rtx temp;
9723 rtx comparison = 0;
9724 int i;
9725 tree type;
9726 enum machine_mode mode;
ca695ac9 9727
b93a436e 9728 emit_queue ();
ca695ac9 9729
b93a436e 9730 switch (code)
ca695ac9 9731 {
b93a436e 9732 case ERROR_MARK:
ca695ac9 9733 break;
bbf6f052 9734
b93a436e
JL
9735 case INTEGER_CST:
9736 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9737 if (temp)
9738 emit_jump (temp);
9739 break;
bbf6f052 9740
b93a436e
JL
9741#if 0
9742 /* This is not true with #pragma weak */
9743 case ADDR_EXPR:
9744 /* The address of something can never be zero. */
9745 if (if_true_label)
9746 emit_jump (if_true_label);
9747 break;
9748#endif
bbf6f052 9749
b93a436e
JL
9750 case NOP_EXPR:
9751 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9752 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9753 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9754 goto normal;
9755 case CONVERT_EXPR:
9756 /* If we are narrowing the operand, we have to do the compare in the
9757 narrower mode. */
9758 if ((TYPE_PRECISION (TREE_TYPE (exp))
9759 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9760 goto normal;
9761 case NON_LVALUE_EXPR:
9762 case REFERENCE_EXPR:
9763 case ABS_EXPR:
9764 case NEGATE_EXPR:
9765 case LROTATE_EXPR:
9766 case RROTATE_EXPR:
9767 /* These cannot change zero->non-zero or vice versa. */
9768 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9769 break;
bbf6f052 9770
b93a436e
JL
9771#if 0
9772 /* This is never less insns than evaluating the PLUS_EXPR followed by
9773 a test and can be longer if the test is eliminated. */
9774 case PLUS_EXPR:
9775 /* Reduce to minus. */
9776 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9777 TREE_OPERAND (exp, 0),
9778 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9779 TREE_OPERAND (exp, 1))));
9780 /* Process as MINUS. */
ca695ac9 9781#endif
bbf6f052 9782
b93a436e
JL
9783 case MINUS_EXPR:
9784 /* Non-zero iff operands of minus differ. */
9785 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9786 TREE_OPERAND (exp, 0),
9787 TREE_OPERAND (exp, 1)),
9788 NE, NE);
9789 break;
bbf6f052 9790
b93a436e
JL
9791 case BIT_AND_EXPR:
9792 /* If we are AND'ing with a small constant, do this comparison in the
9793 smallest type that fits. If the machine doesn't have comparisons
9794 that small, it will be converted back to the wider comparison.
9795 This helps if we are testing the sign bit of a narrower object.
9796 combine can't do this for us because it can't know whether a
9797 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9798
b93a436e
JL
9799 if (! SLOW_BYTE_ACCESS
9800 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9801 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9802 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9803 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9804 && (type = type_for_mode (mode, 1)) != 0
9805 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9806 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9807 != CODE_FOR_nothing))
9808 {
9809 do_jump (convert (type, exp), if_false_label, if_true_label);
9810 break;
9811 }
9812 goto normal;
bbf6f052 9813
b93a436e
JL
9814 case TRUTH_NOT_EXPR:
9815 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9816 break;
bbf6f052 9817
b93a436e
JL
9818 case TRUTH_ANDIF_EXPR:
9819 if (if_false_label == 0)
9820 if_false_label = drop_through_label = gen_label_rtx ();
9821 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9822 start_cleanup_deferral ();
9823 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9824 end_cleanup_deferral ();
9825 break;
bbf6f052 9826
b93a436e
JL
9827 case TRUTH_ORIF_EXPR:
9828 if (if_true_label == 0)
9829 if_true_label = drop_through_label = gen_label_rtx ();
9830 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9831 start_cleanup_deferral ();
9832 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9833 end_cleanup_deferral ();
9834 break;
bbf6f052 9835
b93a436e
JL
9836 case COMPOUND_EXPR:
9837 push_temp_slots ();
9838 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9839 preserve_temp_slots (NULL_RTX);
9840 free_temp_slots ();
9841 pop_temp_slots ();
9842 emit_queue ();
9843 do_pending_stack_adjust ();
9844 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9845 break;
bbf6f052 9846
b93a436e
JL
9847 case COMPONENT_REF:
9848 case BIT_FIELD_REF:
9849 case ARRAY_REF:
9850 {
9851 int bitsize, bitpos, unsignedp;
9852 enum machine_mode mode;
9853 tree type;
9854 tree offset;
9855 int volatilep = 0;
9856 int alignment;
bbf6f052 9857
b93a436e
JL
9858 /* Get description of this reference. We don't actually care
9859 about the underlying object here. */
9860 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9861 &mode, &unsignedp, &volatilep,
9862 &alignment);
bbf6f052 9863
b93a436e
JL
9864 type = type_for_size (bitsize, unsignedp);
9865 if (! SLOW_BYTE_ACCESS
9866 && type != 0 && bitsize >= 0
9867 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9868 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9869 != CODE_FOR_nothing))
9870 {
9871 do_jump (convert (type, exp), if_false_label, if_true_label);
9872 break;
9873 }
9874 goto normal;
9875 }
bbf6f052 9876
b93a436e
JL
9877 case COND_EXPR:
9878 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9879 if (integer_onep (TREE_OPERAND (exp, 1))
9880 && integer_zerop (TREE_OPERAND (exp, 2)))
9881 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9882
b93a436e
JL
9883 else if (integer_zerop (TREE_OPERAND (exp, 1))
9884 && integer_onep (TREE_OPERAND (exp, 2)))
9885 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9886
b93a436e
JL
9887 else
9888 {
9889 register rtx label1 = gen_label_rtx ();
9890 drop_through_label = gen_label_rtx ();
bbf6f052 9891
b93a436e 9892 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9893
b93a436e
JL
9894 start_cleanup_deferral ();
9895 /* Now the THEN-expression. */
9896 do_jump (TREE_OPERAND (exp, 1),
9897 if_false_label ? if_false_label : drop_through_label,
9898 if_true_label ? if_true_label : drop_through_label);
9899 /* In case the do_jump just above never jumps. */
9900 do_pending_stack_adjust ();
9901 emit_label (label1);
bbf6f052 9902
b93a436e
JL
9903 /* Now the ELSE-expression. */
9904 do_jump (TREE_OPERAND (exp, 2),
9905 if_false_label ? if_false_label : drop_through_label,
9906 if_true_label ? if_true_label : drop_through_label);
9907 end_cleanup_deferral ();
9908 }
9909 break;
bbf6f052 9910
b93a436e
JL
9911 case EQ_EXPR:
9912 {
9913 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9914
b93a436e
JL
9915 if (integer_zerop (TREE_OPERAND (exp, 1)))
9916 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9917 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9918 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9919 do_jump
9920 (fold
9921 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9922 fold (build (EQ_EXPR, TREE_TYPE (exp),
9923 fold (build1 (REALPART_EXPR,
9924 TREE_TYPE (inner_type),
9925 TREE_OPERAND (exp, 0))),
9926 fold (build1 (REALPART_EXPR,
9927 TREE_TYPE (inner_type),
9928 TREE_OPERAND (exp, 1))))),
9929 fold (build (EQ_EXPR, TREE_TYPE (exp),
9930 fold (build1 (IMAGPART_EXPR,
9931 TREE_TYPE (inner_type),
9932 TREE_OPERAND (exp, 0))),
9933 fold (build1 (IMAGPART_EXPR,
9934 TREE_TYPE (inner_type),
9935 TREE_OPERAND (exp, 1))))))),
9936 if_false_label, if_true_label);
9937 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9938 && !can_compare_p (TYPE_MODE (inner_type)))
9939 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9940 else
9941 comparison = compare (exp, EQ, EQ);
9942 break;
9943 }
bbf6f052 9944
b93a436e
JL
9945 case NE_EXPR:
9946 {
9947 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9948
b93a436e
JL
9949 if (integer_zerop (TREE_OPERAND (exp, 1)))
9950 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9951 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9952 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9953 do_jump
9954 (fold
9955 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9956 fold (build (NE_EXPR, TREE_TYPE (exp),
9957 fold (build1 (REALPART_EXPR,
9958 TREE_TYPE (inner_type),
9959 TREE_OPERAND (exp, 0))),
9960 fold (build1 (REALPART_EXPR,
9961 TREE_TYPE (inner_type),
9962 TREE_OPERAND (exp, 1))))),
9963 fold (build (NE_EXPR, TREE_TYPE (exp),
9964 fold (build1 (IMAGPART_EXPR,
9965 TREE_TYPE (inner_type),
9966 TREE_OPERAND (exp, 0))),
9967 fold (build1 (IMAGPART_EXPR,
9968 TREE_TYPE (inner_type),
9969 TREE_OPERAND (exp, 1))))))),
9970 if_false_label, if_true_label);
9971 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9972 && !can_compare_p (TYPE_MODE (inner_type)))
9973 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9974 else
9975 comparison = compare (exp, NE, NE);
9976 break;
9977 }
bbf6f052 9978
b93a436e
JL
9979 case LT_EXPR:
9980 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9981 == MODE_INT)
9982 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9983 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9984 else
9985 comparison = compare (exp, LT, LTU);
9986 break;
bbf6f052 9987
b93a436e
JL
9988 case LE_EXPR:
9989 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9990 == MODE_INT)
9991 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9992 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9993 else
9994 comparison = compare (exp, LE, LEU);
9995 break;
bbf6f052 9996
b93a436e
JL
9997 case GT_EXPR:
9998 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9999 == MODE_INT)
10000 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10001 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10002 else
10003 comparison = compare (exp, GT, GTU);
10004 break;
bbf6f052 10005
b93a436e
JL
10006 case GE_EXPR:
10007 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10008 == MODE_INT)
10009 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10010 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10011 else
10012 comparison = compare (exp, GE, GEU);
10013 break;
bbf6f052 10014
b93a436e
JL
10015 default:
10016 normal:
10017 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10018#if 0
10019 /* This is not needed any more and causes poor code since it causes
10020 comparisons and tests from non-SI objects to have different code
10021 sequences. */
10022 /* Copy to register to avoid generating bad insns by cse
10023 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10024 if (!cse_not_expected && GET_CODE (temp) == MEM)
10025 temp = copy_to_reg (temp);
ca695ac9 10026#endif
b93a436e
JL
10027 do_pending_stack_adjust ();
10028 if (GET_CODE (temp) == CONST_INT)
10029 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10030 else if (GET_CODE (temp) == LABEL_REF)
10031 comparison = const_true_rtx;
10032 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10033 && !can_compare_p (GET_MODE (temp)))
10034 /* Note swapping the labels gives us not-equal. */
10035 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10036 else if (GET_MODE (temp) != VOIDmode)
10037 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10038 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10039 GET_MODE (temp), NULL_RTX, 0);
10040 else
10041 abort ();
10042 }
bbf6f052 10043
b93a436e
JL
10044 /* Do any postincrements in the expression that was tested. */
10045 emit_queue ();
bbf6f052 10046
b93a436e
JL
10047 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10048 straight into a conditional jump instruction as the jump condition.
10049 Otherwise, all the work has been done already. */
bbf6f052 10050
b93a436e
JL
10051 if (comparison == const_true_rtx)
10052 {
10053 if (if_true_label)
10054 emit_jump (if_true_label);
10055 }
10056 else if (comparison == const0_rtx)
10057 {
10058 if (if_false_label)
10059 emit_jump (if_false_label);
10060 }
10061 else if (comparison)
10062 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10063
b93a436e
JL
10064 if (drop_through_label)
10065 {
10066 /* If do_jump produces code that might be jumped around,
10067 do any stack adjusts from that code, before the place
10068 where control merges in. */
10069 do_pending_stack_adjust ();
10070 emit_label (drop_through_label);
10071 }
bbf6f052 10072}
b93a436e
JL
10073\f
10074/* Given a comparison expression EXP for values too wide to be compared
10075 with one insn, test the comparison and jump to the appropriate label.
10076 The code of EXP is ignored; we always test GT if SWAP is 0,
10077 and LT if SWAP is 1. */
bbf6f052 10078
b93a436e
JL
10079static void
10080do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10081 tree exp;
10082 int swap;
10083 rtx if_false_label, if_true_label;
10084{
10085 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10086 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10087 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10088 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10089 rtx drop_through_label = 0;
10090 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10091 int i;
bbf6f052 10092
b93a436e
JL
10093 if (! if_true_label || ! if_false_label)
10094 drop_through_label = gen_label_rtx ();
10095 if (! if_true_label)
10096 if_true_label = drop_through_label;
10097 if (! if_false_label)
10098 if_false_label = drop_through_label;
bbf6f052 10099
b93a436e
JL
10100 /* Compare a word at a time, high order first. */
10101 for (i = 0; i < nwords; i++)
f81497d9 10102 {
b93a436e
JL
10103 rtx comp;
10104 rtx op0_word, op1_word;
10105
10106 if (WORDS_BIG_ENDIAN)
10107 {
10108 op0_word = operand_subword_force (op0, i, mode);
10109 op1_word = operand_subword_force (op1, i, mode);
10110 }
f81497d9 10111 else
b93a436e
JL
10112 {
10113 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10114 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10115 }
10116
10117 /* All but high-order word must be compared as unsigned. */
10118 comp = compare_from_rtx (op0_word, op1_word,
10119 (unsignedp || i > 0) ? GTU : GT,
10120 unsignedp, word_mode, NULL_RTX, 0);
10121 if (comp == const_true_rtx)
10122 emit_jump (if_true_label);
10123 else if (comp != const0_rtx)
10124 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10125
10126 /* Consider lower words only if these are equal. */
10127 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10128 NULL_RTX, 0);
10129 if (comp == const_true_rtx)
10130 emit_jump (if_false_label);
10131 else if (comp != const0_rtx)
10132 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10133 }
ca695ac9 10134
b93a436e
JL
10135 if (if_false_label)
10136 emit_jump (if_false_label);
10137 if (drop_through_label)
10138 emit_label (drop_through_label);
f81497d9
RS
10139}
10140
b93a436e
JL
10141/* Compare OP0 with OP1, word at a time, in mode MODE.
10142 UNSIGNEDP says to do unsigned comparison.
10143 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10144
b93a436e
JL
10145void
10146do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10147 enum machine_mode mode;
10148 int unsignedp;
10149 rtx op0, op1;
10150 rtx if_false_label, if_true_label;
f81497d9 10151{
b93a436e
JL
10152 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10153 rtx drop_through_label = 0;
10154 int i;
f81497d9 10155
b93a436e
JL
10156 if (! if_true_label || ! if_false_label)
10157 drop_through_label = gen_label_rtx ();
10158 if (! if_true_label)
10159 if_true_label = drop_through_label;
10160 if (! if_false_label)
10161 if_false_label = drop_through_label;
f81497d9 10162
b93a436e
JL
10163 /* Compare a word at a time, high order first. */
10164 for (i = 0; i < nwords; i++)
10165 {
10166 rtx comp;
10167 rtx op0_word, op1_word;
bbf6f052 10168
b93a436e
JL
10169 if (WORDS_BIG_ENDIAN)
10170 {
10171 op0_word = operand_subword_force (op0, i, mode);
10172 op1_word = operand_subword_force (op1, i, mode);
10173 }
10174 else
10175 {
10176 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10177 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10178 }
bbf6f052 10179
b93a436e
JL
10180 /* All but high-order word must be compared as unsigned. */
10181 comp = compare_from_rtx (op0_word, op1_word,
10182 (unsignedp || i > 0) ? GTU : GT,
10183 unsignedp, word_mode, NULL_RTX, 0);
10184 if (comp == const_true_rtx)
10185 emit_jump (if_true_label);
10186 else if (comp != const0_rtx)
10187 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10188
b93a436e
JL
10189 /* Consider lower words only if these are equal. */
10190 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10191 NULL_RTX, 0);
10192 if (comp == const_true_rtx)
10193 emit_jump (if_false_label);
10194 else if (comp != const0_rtx)
10195 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10196 }
bbf6f052 10197
b93a436e
JL
10198 if (if_false_label)
10199 emit_jump (if_false_label);
10200 if (drop_through_label)
10201 emit_label (drop_through_label);
bbf6f052
RK
10202}
10203
b93a436e
JL
10204/* Given an EQ_EXPR expression EXP for values too wide to be compared
10205 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10206
b93a436e
JL
10207static void
10208do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10209 tree exp;
10210 rtx if_false_label, if_true_label;
bbf6f052 10211{
b93a436e
JL
10212 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10213 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10214 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10215 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10216 int i;
10217 rtx drop_through_label = 0;
bbf6f052 10218
b93a436e
JL
10219 if (! if_false_label)
10220 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10221
b93a436e
JL
10222 for (i = 0; i < nwords; i++)
10223 {
10224 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10225 operand_subword_force (op1, i, mode),
10226 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10227 word_mode, NULL_RTX, 0);
10228 if (comp == const_true_rtx)
10229 emit_jump (if_false_label);
10230 else if (comp != const0_rtx)
10231 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10232 }
bbf6f052 10233
b93a436e
JL
10234 if (if_true_label)
10235 emit_jump (if_true_label);
10236 if (drop_through_label)
10237 emit_label (drop_through_label);
bbf6f052 10238}
b93a436e
JL
10239\f
10240/* Jump according to whether OP0 is 0.
10241 We assume that OP0 has an integer mode that is too wide
10242 for the available compare insns. */
bbf6f052 10243
f5963e61 10244void
b93a436e
JL
10245do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10246 rtx op0;
10247 rtx if_false_label, if_true_label;
ca695ac9 10248{
b93a436e
JL
10249 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10250 rtx part;
10251 int i;
10252 rtx drop_through_label = 0;
bbf6f052 10253
b93a436e
JL
10254 /* The fastest way of doing this comparison on almost any machine is to
10255 "or" all the words and compare the result. If all have to be loaded
10256 from memory and this is a very wide item, it's possible this may
10257 be slower, but that's highly unlikely. */
bbf6f052 10258
b93a436e
JL
10259 part = gen_reg_rtx (word_mode);
10260 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10261 for (i = 1; i < nwords && part != 0; i++)
10262 part = expand_binop (word_mode, ior_optab, part,
10263 operand_subword_force (op0, i, GET_MODE (op0)),
10264 part, 1, OPTAB_WIDEN);
bbf6f052 10265
b93a436e
JL
10266 if (part != 0)
10267 {
10268 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10269 NULL_RTX, 0);
0f41302f 10270
b93a436e
JL
10271 if (comp == const_true_rtx)
10272 emit_jump (if_false_label);
10273 else if (comp == const0_rtx)
10274 emit_jump (if_true_label);
10275 else
10276 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10277
b93a436e
JL
10278 return;
10279 }
bbf6f052 10280
b93a436e
JL
10281 /* If we couldn't do the "or" simply, do this with a series of compares. */
10282 if (! if_false_label)
10283 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10284
b93a436e
JL
10285 for (i = 0; i < nwords; i++)
10286 {
10287 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10288 GET_MODE (op0)),
10289 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10290 if (comp == const_true_rtx)
10291 emit_jump (if_false_label);
10292 else if (comp != const0_rtx)
10293 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10294 }
bbf6f052 10295
b93a436e
JL
10296 if (if_true_label)
10297 emit_jump (if_true_label);
0f41302f 10298
b93a436e
JL
10299 if (drop_through_label)
10300 emit_label (drop_through_label);
bbf6f052 10301}
bbf6f052 10302
b93a436e
JL
10303/* Given a comparison expression in rtl form, output conditional branches to
10304 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10305
b93a436e
JL
10306static void
10307do_jump_for_compare (comparison, if_false_label, if_true_label)
10308 rtx comparison, if_false_label, if_true_label;
bbf6f052 10309{
b93a436e
JL
10310 if (if_true_label)
10311 {
10312 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10313 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10314 else
10315 abort ();
ca695ac9 10316
b93a436e
JL
10317 if (if_false_label)
10318 emit_jump (if_false_label);
10319 }
10320 else if (if_false_label)
10321 {
10322 rtx insn;
10323 rtx prev = get_last_insn ();
10324 rtx branch = 0;
0f41302f 10325
b93a436e
JL
10326 /* Output the branch with the opposite condition. Then try to invert
10327 what is generated. If more than one insn is a branch, or if the
10328 branch is not the last insn written, abort. If we can't invert
10329 the branch, emit make a true label, redirect this jump to that,
10330 emit a jump to the false label and define the true label. */
bbf6f052 10331
b93a436e
JL
10332 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10333 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10334 else
10335 abort ();
bbf6f052 10336
b93a436e
JL
10337 /* Here we get the first insn that was just emitted. It used to be the
10338 case that, on some machines, emitting the branch would discard
10339 the previous compare insn and emit a replacement. This isn't
10340 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 10341
b93a436e
JL
10342 if (prev == 0)
10343 insn = get_insns ();
10344 else if (INSN_DELETED_P (prev))
10345 abort ();
10346 else
10347 insn = NEXT_INSN (prev);
bbf6f052 10348
b93a436e
JL
10349 for (; insn; insn = NEXT_INSN (insn))
10350 if (GET_CODE (insn) == JUMP_INSN)
10351 {
10352 if (branch)
10353 abort ();
10354 branch = insn;
10355 }
a7c5971a 10356
b93a436e
JL
10357 if (branch != get_last_insn ())
10358 abort ();
bbf6f052 10359
b93a436e
JL
10360 JUMP_LABEL (branch) = if_false_label;
10361 if (! invert_jump (branch, if_false_label))
10362 {
10363 if_true_label = gen_label_rtx ();
10364 redirect_jump (branch, if_true_label);
10365 emit_jump (if_false_label);
10366 emit_label (if_true_label);
10367 }
10368 }
10369}
10370\f
10371/* Generate code for a comparison expression EXP
10372 (including code to compute the values to be compared)
10373 and set (CC0) according to the result.
10374 SIGNED_CODE should be the rtx operation for this comparison for
10375 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 10376
b93a436e
JL
10377 We force a stack adjustment unless there are currently
10378 things pushed on the stack that aren't yet used. */
ca695ac9 10379
b93a436e
JL
10380static rtx
10381compare (exp, signed_code, unsigned_code)
10382 register tree exp;
10383 enum rtx_code signed_code, unsigned_code;
10384{
10385 register rtx op0
10386 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10387 register rtx op1
10388 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10389 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10390 register enum machine_mode mode = TYPE_MODE (type);
10391 int unsignedp = TREE_UNSIGNED (type);
10392 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 10393
b93a436e
JL
10394#ifdef HAVE_canonicalize_funcptr_for_compare
10395 /* If function pointers need to be "canonicalized" before they can
10396 be reliably compared, then canonicalize them. */
10397 if (HAVE_canonicalize_funcptr_for_compare
10398 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10399 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10400 == FUNCTION_TYPE))
bbf6f052 10401 {
b93a436e 10402 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 10403
b93a436e
JL
10404 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10405 op0 = new_op0;
ca695ac9 10406 }
bbf6f052 10407
b93a436e
JL
10408 if (HAVE_canonicalize_funcptr_for_compare
10409 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10410 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10411 == FUNCTION_TYPE))
10412 {
10413 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 10414
b93a436e
JL
10415 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10416 op1 = new_op1;
10417 }
10418#endif
0f41302f 10419
b93a436e
JL
10420 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10421 ((mode == BLKmode)
10422 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10423 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 10424}
bbf6f052 10425
b93a436e
JL
10426/* Like compare but expects the values to compare as two rtx's.
10427 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10428
b93a436e
JL
10429 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10430 compared.
bbf6f052 10431
b93a436e
JL
10432 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10433 size of MODE should be used. */
ca695ac9 10434
b93a436e
JL
10435rtx
10436compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10437 register rtx op0, op1;
10438 enum rtx_code code;
10439 int unsignedp;
10440 enum machine_mode mode;
10441 rtx size;
10442 int align;
bbf6f052 10443{
b93a436e 10444 rtx tem;
bbf6f052 10445
b93a436e
JL
10446 /* If one operand is constant, make it the second one. Only do this
10447 if the other operand is not constant as well. */
e7c33f54 10448
b93a436e
JL
10449 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10450 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10451 {
b93a436e
JL
10452 tem = op0;
10453 op0 = op1;
10454 op1 = tem;
10455 code = swap_condition (code);
10456 }
bbf6f052 10457
b93a436e
JL
10458 if (flag_force_mem)
10459 {
10460 op0 = force_not_mem (op0);
10461 op1 = force_not_mem (op1);
10462 }
bbf6f052 10463
b93a436e 10464 do_pending_stack_adjust ();
ca695ac9 10465
b93a436e
JL
10466 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10467 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10468 return tem;
ca695ac9 10469
b93a436e
JL
10470#if 0
10471 /* There's no need to do this now that combine.c can eliminate lots of
10472 sign extensions. This can be less efficient in certain cases on other
10473 machines. */
ca695ac9 10474
b93a436e
JL
10475 /* If this is a signed equality comparison, we can do it as an
10476 unsigned comparison since zero-extension is cheaper than sign
10477 extension and comparisons with zero are done as unsigned. This is
10478 the case even on machines that can do fast sign extension, since
10479 zero-extension is easier to combine with other operations than
10480 sign-extension is. If we are comparing against a constant, we must
10481 convert it to what it would look like unsigned. */
10482 if ((code == EQ || code == NE) && ! unsignedp
10483 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10484 {
10485 if (GET_CODE (op1) == CONST_INT
10486 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10487 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10488 unsignedp = 1;
10489 }
10490#endif
ca695ac9 10491
b93a436e 10492 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 10493
b93a436e
JL
10494 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10495}
10496\f
10497/* Generate code to calculate EXP using a store-flag instruction
10498 and return an rtx for the result. EXP is either a comparison
10499 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10500
b93a436e 10501 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10502
b93a436e
JL
10503 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10504 cheap.
ca695ac9 10505
b93a436e
JL
10506 Return zero if there is no suitable set-flag instruction
10507 available on this machine.
ca695ac9 10508
b93a436e
JL
10509 Once expand_expr has been called on the arguments of the comparison,
10510 we are committed to doing the store flag, since it is not safe to
10511 re-evaluate the expression. We emit the store-flag insn by calling
10512 emit_store_flag, but only expand the arguments if we have a reason
10513 to believe that emit_store_flag will be successful. If we think that
10514 it will, but it isn't, we have to simulate the store-flag with a
10515 set/jump/set sequence. */
ca695ac9 10516
b93a436e
JL
10517static rtx
10518do_store_flag (exp, target, mode, only_cheap)
10519 tree exp;
10520 rtx target;
10521 enum machine_mode mode;
10522 int only_cheap;
10523{
10524 enum rtx_code code;
10525 tree arg0, arg1, type;
10526 tree tem;
10527 enum machine_mode operand_mode;
10528 int invert = 0;
10529 int unsignedp;
10530 rtx op0, op1;
10531 enum insn_code icode;
10532 rtx subtarget = target;
381127e8 10533 rtx result, label;
ca695ac9 10534
b93a436e
JL
10535 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10536 result at the end. We can't simply invert the test since it would
10537 have already been inverted if it were valid. This case occurs for
10538 some floating-point comparisons. */
ca695ac9 10539
b93a436e
JL
10540 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10541 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10542
b93a436e
JL
10543 arg0 = TREE_OPERAND (exp, 0);
10544 arg1 = TREE_OPERAND (exp, 1);
10545 type = TREE_TYPE (arg0);
10546 operand_mode = TYPE_MODE (type);
10547 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10548
b93a436e
JL
10549 /* We won't bother with BLKmode store-flag operations because it would mean
10550 passing a lot of information to emit_store_flag. */
10551 if (operand_mode == BLKmode)
10552 return 0;
ca695ac9 10553
b93a436e
JL
10554 /* We won't bother with store-flag operations involving function pointers
10555 when function pointers must be canonicalized before comparisons. */
10556#ifdef HAVE_canonicalize_funcptr_for_compare
10557 if (HAVE_canonicalize_funcptr_for_compare
10558 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10559 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10560 == FUNCTION_TYPE))
10561 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10562 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10563 == FUNCTION_TYPE))))
10564 return 0;
ca695ac9
JB
10565#endif
10566
b93a436e
JL
10567 STRIP_NOPS (arg0);
10568 STRIP_NOPS (arg1);
ca695ac9 10569
b93a436e
JL
10570 /* Get the rtx comparison code to use. We know that EXP is a comparison
10571 operation of some type. Some comparisons against 1 and -1 can be
10572 converted to comparisons with zero. Do so here so that the tests
10573 below will be aware that we have a comparison with zero. These
10574 tests will not catch constants in the first operand, but constants
10575 are rarely passed as the first operand. */
ca695ac9 10576
b93a436e
JL
10577 switch (TREE_CODE (exp))
10578 {
10579 case EQ_EXPR:
10580 code = EQ;
bbf6f052 10581 break;
b93a436e
JL
10582 case NE_EXPR:
10583 code = NE;
bbf6f052 10584 break;
b93a436e
JL
10585 case LT_EXPR:
10586 if (integer_onep (arg1))
10587 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10588 else
10589 code = unsignedp ? LTU : LT;
ca695ac9 10590 break;
b93a436e
JL
10591 case LE_EXPR:
10592 if (! unsignedp && integer_all_onesp (arg1))
10593 arg1 = integer_zero_node, code = LT;
10594 else
10595 code = unsignedp ? LEU : LE;
ca695ac9 10596 break;
b93a436e
JL
10597 case GT_EXPR:
10598 if (! unsignedp && integer_all_onesp (arg1))
10599 arg1 = integer_zero_node, code = GE;
10600 else
10601 code = unsignedp ? GTU : GT;
10602 break;
10603 case GE_EXPR:
10604 if (integer_onep (arg1))
10605 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10606 else
10607 code = unsignedp ? GEU : GE;
ca695ac9 10608 break;
ca695ac9 10609 default:
b93a436e 10610 abort ();
bbf6f052 10611 }
bbf6f052 10612
b93a436e
JL
10613 /* Put a constant second. */
10614 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10615 {
10616 tem = arg0; arg0 = arg1; arg1 = tem;
10617 code = swap_condition (code);
ca695ac9 10618 }
bbf6f052 10619
b93a436e
JL
10620 /* If this is an equality or inequality test of a single bit, we can
10621 do this by shifting the bit being tested to the low-order bit and
10622 masking the result with the constant 1. If the condition was EQ,
10623 we xor it with 1. This does not require an scc insn and is faster
10624 than an scc insn even if we have it. */
d39985fa 10625
b93a436e
JL
10626 if ((code == NE || code == EQ)
10627 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10628 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10629 {
10630 tree inner = TREE_OPERAND (arg0, 0);
10631 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10632 int ops_unsignedp;
bbf6f052 10633
b93a436e
JL
10634 /* If INNER is a right shift of a constant and it plus BITNUM does
10635 not overflow, adjust BITNUM and INNER. */
ca695ac9 10636
b93a436e
JL
10637 if (TREE_CODE (inner) == RSHIFT_EXPR
10638 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10639 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10640 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10641 < TYPE_PRECISION (type)))
ca695ac9 10642 {
b93a436e
JL
10643 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10644 inner = TREE_OPERAND (inner, 0);
ca695ac9 10645 }
ca695ac9 10646
b93a436e
JL
10647 /* If we are going to be able to omit the AND below, we must do our
10648 operations as unsigned. If we must use the AND, we have a choice.
10649 Normally unsigned is faster, but for some machines signed is. */
10650 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10651#ifdef LOAD_EXTEND_OP
10652 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10653#else
10654 : 1
10655#endif
10656 );
bbf6f052 10657
b93a436e
JL
10658 if (subtarget == 0 || GET_CODE (subtarget) != REG
10659 || GET_MODE (subtarget) != operand_mode
e5e809f4 10660 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10661 subtarget = 0;
bbf6f052 10662
b93a436e 10663 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10664
b93a436e
JL
10665 if (bitnum != 0)
10666 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10667 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10668
b93a436e
JL
10669 if (GET_MODE (op0) != mode)
10670 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10671
b93a436e
JL
10672 if ((code == EQ && ! invert) || (code == NE && invert))
10673 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10674 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10675
b93a436e
JL
10676 /* Put the AND last so it can combine with more things. */
10677 if (bitnum != TYPE_PRECISION (type) - 1)
10678 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10679
b93a436e
JL
10680 return op0;
10681 }
bbf6f052 10682
b93a436e
JL
10683 /* Now see if we are likely to be able to do this. Return if not. */
10684 if (! can_compare_p (operand_mode))
10685 return 0;
10686 icode = setcc_gen_code[(int) code];
10687 if (icode == CODE_FOR_nothing
10688 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 10689 {
b93a436e
JL
10690 /* We can only do this if it is one of the special cases that
10691 can be handled without an scc insn. */
10692 if ((code == LT && integer_zerop (arg1))
10693 || (! only_cheap && code == GE && integer_zerop (arg1)))
10694 ;
10695 else if (BRANCH_COST >= 0
10696 && ! only_cheap && (code == NE || code == EQ)
10697 && TREE_CODE (type) != REAL_TYPE
10698 && ((abs_optab->handlers[(int) operand_mode].insn_code
10699 != CODE_FOR_nothing)
10700 || (ffs_optab->handlers[(int) operand_mode].insn_code
10701 != CODE_FOR_nothing)))
10702 ;
10703 else
10704 return 0;
ca695ac9 10705 }
b93a436e
JL
10706
10707 preexpand_calls (exp);
10708 if (subtarget == 0 || GET_CODE (subtarget) != REG
10709 || GET_MODE (subtarget) != operand_mode
e5e809f4 10710 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10711 subtarget = 0;
10712
10713 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10714 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10715
10716 if (target == 0)
10717 target = gen_reg_rtx (mode);
10718
10719 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10720 because, if the emit_store_flag does anything it will succeed and
10721 OP0 and OP1 will not be used subsequently. */
ca695ac9 10722
b93a436e
JL
10723 result = emit_store_flag (target, code,
10724 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10725 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10726 operand_mode, unsignedp, 1);
ca695ac9 10727
b93a436e
JL
10728 if (result)
10729 {
10730 if (invert)
10731 result = expand_binop (mode, xor_optab, result, const1_rtx,
10732 result, 0, OPTAB_LIB_WIDEN);
10733 return result;
ca695ac9 10734 }
bbf6f052 10735
b93a436e
JL
10736 /* If this failed, we have to do this with set/compare/jump/set code. */
10737 if (GET_CODE (target) != REG
10738 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10739 target = gen_reg_rtx (GET_MODE (target));
10740
10741 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10742 result = compare_from_rtx (op0, op1, code, unsignedp,
10743 operand_mode, NULL_RTX, 0);
10744 if (GET_CODE (result) == CONST_INT)
10745 return (((result == const0_rtx && ! invert)
10746 || (result != const0_rtx && invert))
10747 ? const0_rtx : const1_rtx);
ca695ac9 10748
b93a436e
JL
10749 label = gen_label_rtx ();
10750 if (bcc_gen_fctn[(int) code] == 0)
10751 abort ();
0f41302f 10752
b93a436e
JL
10753 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10754 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10755 emit_label (label);
bbf6f052 10756
b93a436e 10757 return target;
ca695ac9 10758}
b93a436e
JL
10759\f
10760/* Generate a tablejump instruction (used for switch statements). */
10761
10762#ifdef HAVE_tablejump
e87b4f3f 10763
b93a436e
JL
10764/* INDEX is the value being switched on, with the lowest value
10765 in the table already subtracted.
10766 MODE is its expected mode (needed if INDEX is constant).
10767 RANGE is the length of the jump table.
10768 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10769
b93a436e
JL
10770 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10771 index value is out of range. */
0f41302f 10772
ca695ac9 10773void
b93a436e
JL
10774do_tablejump (index, mode, range, table_label, default_label)
10775 rtx index, range, table_label, default_label;
10776 enum machine_mode mode;
ca695ac9 10777{
b93a436e 10778 register rtx temp, vector;
88d3b7f0 10779
b93a436e
JL
10780 /* Do an unsigned comparison (in the proper mode) between the index
10781 expression and the value which represents the length of the range.
10782 Since we just finished subtracting the lower bound of the range
10783 from the index expression, this comparison allows us to simultaneously
10784 check that the original index expression value is both greater than
10785 or equal to the minimum value of the range and less than or equal to
10786 the maximum value of the range. */
709f5be1 10787
b93a436e
JL
10788 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10789 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 10790
b93a436e
JL
10791 /* If index is in range, it must fit in Pmode.
10792 Convert to Pmode so we can index with it. */
10793 if (mode != Pmode)
10794 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10795
b93a436e
JL
10796 /* Don't let a MEM slip thru, because then INDEX that comes
10797 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10798 and break_out_memory_refs will go to work on it and mess it up. */
10799#ifdef PIC_CASE_VECTOR_ADDRESS
10800 if (flag_pic && GET_CODE (index) != REG)
10801 index = copy_to_mode_reg (Pmode, index);
10802#endif
ca695ac9 10803
b93a436e
JL
10804 /* If flag_force_addr were to affect this address
10805 it could interfere with the tricky assumptions made
10806 about addresses that contain label-refs,
10807 which may be valid only very near the tablejump itself. */
10808 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10809 GET_MODE_SIZE, because this indicates how large insns are. The other
10810 uses should all be Pmode, because they are addresses. This code
10811 could fail if addresses and insns are not the same size. */
10812 index = gen_rtx_PLUS (Pmode,
10813 gen_rtx_MULT (Pmode, index,
10814 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10815 gen_rtx_LABEL_REF (Pmode, table_label));
10816#ifdef PIC_CASE_VECTOR_ADDRESS
10817 if (flag_pic)
10818 index = PIC_CASE_VECTOR_ADDRESS (index);
10819 else
bbf6f052 10820#endif
b93a436e
JL
10821 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10822 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10823 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10824 RTX_UNCHANGING_P (vector) = 1;
10825 convert_move (temp, vector, 0);
10826
10827 emit_jump_insn (gen_tablejump (temp, table_label));
10828
10829 /* If we are generating PIC code or if the table is PC-relative, the
10830 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10831 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10832 emit_barrier ();
bbf6f052 10833}
b93a436e
JL
10834
10835#endif /* HAVE_tablejump */