]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
Warning fixes:
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
bbf6f052
RK
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
68/* Like STACK_BOUNDARY but in units of bytes, not bits. */
69#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70
18543a22
ILT
71/* Assume that case vectors are not pc-relative. */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
bbf6f052
RK
76/* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82int cse_not_expected;
83
84/* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87int do_preexpand_calls = 1;
88
89/* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91int pending_stack_adjust;
92
93/* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97int inhibit_defer_pop;
98
bbf6f052
RK
99/* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102static rtx saveregs_value;
103
dcf76fff
TW
104/* Similarly for __builtin_apply_args. */
105static rtx apply_args_value;
106
956d6950
JL
107/* Don't check memory usage, since code is being emitted to check a memory
108 usage. Used when flag_check_memory_usage is true, to avoid infinite
109 recursion. */
110static int in_check_memory_usage;
111
4969d05d
RK
112/* This structure is used by move_by_pieces to describe the move to
113 be performed. */
4969d05d
RK
114struct move_by_pieces
115{
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
e9cf6a97 120 int to_struct;
4969d05d
RK
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
e9cf6a97 125 int from_struct;
4969d05d
RK
126 int len;
127 int offset;
128 int reverse;
129};
130
9de08200
RK
131/* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134struct clear_by_pieces
135{
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144};
145
292b1216 146extern struct obstack permanent_obstack;
4ed67205 147extern rtx arg_pointer_save_area;
c02bd5d9 148
03566575
JW
149static rtx get_push_address PROTO ((int));
150
4969d05d
RK
151static rtx enqueue_insn PROTO((rtx, rtx));
152static int queued_subexp_p PROTO((rtx));
153static void init_queue PROTO((void));
154static void move_by_pieces PROTO((rtx, rtx, int, int));
155static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 156static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 157 struct move_by_pieces *));
9de08200 158static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 159static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
160 struct clear_by_pieces *));
161static int is_zeros_p PROTO((tree));
162static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
163static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
164 tree, tree, int));
e1a43f73 165static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
166static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
167 enum machine_mode, int, int, int));
e009aaf3
JL
168static enum memory_use_mode
169 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
170static tree save_noncopied_parts PROTO((tree, tree));
171static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 172static int safe_from_p PROTO((rtx, tree, int));
4969d05d 173static int fixed_type_p PROTO((tree));
01c8a7c8 174static rtx var_rtx PROTO((tree));
4969d05d
RK
175static int get_pointer_alignment PROTO((tree, unsigned));
176static tree string_constant PROTO((tree, tree *));
177static tree c_strlen PROTO((tree));
307b821c
RK
178static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
0006469d
TW
180static int apply_args_size PROTO((void));
181static int apply_result_size PROTO((void));
182static rtx result_vector PROTO((int, rtx));
183static rtx expand_builtin_apply_args PROTO((void));
184static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185static void expand_builtin_return PROTO((rtx));
7b8b9722 186static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
187static void preexpand_calls PROTO((tree));
188static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 189void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d 190static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
f5963e61 191void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
4969d05d
RK
192static void do_jump_for_compare PROTO((rtx, rtx, rtx));
193static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
194static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
16545b0a 195extern tree truthvalue_conversion PROTO((tree));
bbf6f052 196
4fa52007
RK
197/* Record for each mode whether we can move a register directly to or
198 from an object of that mode in memory. If we can't, we won't try
199 to use that mode directly when accessing a field of that mode. */
200
201static char direct_load[NUM_MACHINE_MODES];
202static char direct_store[NUM_MACHINE_MODES];
203
bbf6f052
RK
204/* MOVE_RATIO is the number of move instructions that is better than
205 a block move. */
206
207#ifndef MOVE_RATIO
266007a7 208#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
209#define MOVE_RATIO 2
210#else
996d9dac
MM
211/* If we are optimizing for space (-Os), cut down the default move ratio */
212#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
213#endif
214#endif
e87b4f3f 215
266007a7 216/* This array records the insn_code of insns to perform block moves. */
e6677db3 217enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 218
9de08200
RK
219/* This array records the insn_code of insns to perform block clears. */
220enum insn_code clrstr_optab[NUM_MACHINE_MODES];
221
0f41302f 222/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
223
224#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 225#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 226#endif
0006469d
TW
227
228/* Register mappings for target machines without register windows. */
229#ifndef INCOMING_REGNO
230#define INCOMING_REGNO(OUT) (OUT)
231#endif
232#ifndef OUTGOING_REGNO
233#define OUTGOING_REGNO(IN) (IN)
234#endif
bbf6f052 235\f
4fa52007 236/* This is run once per compilation to set up which modes can be used
266007a7 237 directly in memory and to initialize the block move optab. */
4fa52007
RK
238
239void
240init_expr_once ()
241{
242 rtx insn, pat;
243 enum machine_mode mode;
e2549997
RS
244 /* Try indexing by frame ptr and try by stack ptr.
245 It is known that on the Convex the stack ptr isn't a valid index.
246 With luck, one or the other is valid on any machine. */
38a448ca
RH
247 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
248 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007
RK
249
250 start_sequence ();
38a448ca 251 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
252 pat = PATTERN (insn);
253
254 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
255 mode = (enum machine_mode) ((int) mode + 1))
256 {
257 int regno;
258 rtx reg;
259 int num_clobbers;
260
261 direct_load[(int) mode] = direct_store[(int) mode] = 0;
262 PUT_MODE (mem, mode);
e2549997 263 PUT_MODE (mem1, mode);
4fa52007 264
e6fe56a4
RK
265 /* See if there is some register that can be used in this mode and
266 directly loaded or stored from memory. */
267
7308a047
RS
268 if (mode != VOIDmode && mode != BLKmode)
269 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
270 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
271 regno++)
272 {
273 if (! HARD_REGNO_MODE_OK (regno, mode))
274 continue;
e6fe56a4 275
38a448ca 276 reg = gen_rtx_REG (mode, regno);
e6fe56a4 277
7308a047
RS
278 SET_SRC (pat) = mem;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
e6fe56a4 282
e2549997
RS
283 SET_SRC (pat) = mem1;
284 SET_DEST (pat) = reg;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_load[(int) mode] = 1;
287
7308a047
RS
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
e2549997
RS
292
293 SET_SRC (pat) = reg;
294 SET_DEST (pat) = mem1;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_store[(int) mode] = 1;
7308a047 297 }
4fa52007
RK
298 }
299
300 end_sequence ();
301}
302
bbf6f052
RK
303/* This is run at the start of compiling a function. */
304
305void
306init_expr ()
307{
308 init_queue ();
309
310 pending_stack_adjust = 0;
311 inhibit_defer_pop = 0;
bbf6f052 312 saveregs_value = 0;
0006469d 313 apply_args_value = 0;
e87b4f3f 314 forced_labels = 0;
bbf6f052
RK
315}
316
317/* Save all variables describing the current status into the structure *P.
318 This is used before starting a nested function. */
319
320void
321save_expr_status (p)
322 struct function *p;
323{
324 /* Instead of saving the postincrement queue, empty it. */
325 emit_queue ();
326
327 p->pending_stack_adjust = pending_stack_adjust;
328 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 329 p->saveregs_value = saveregs_value;
0006469d 330 p->apply_args_value = apply_args_value;
e87b4f3f 331 p->forced_labels = forced_labels;
bbf6f052
RK
332
333 pending_stack_adjust = 0;
334 inhibit_defer_pop = 0;
bbf6f052 335 saveregs_value = 0;
0006469d 336 apply_args_value = 0;
e87b4f3f 337 forced_labels = 0;
bbf6f052
RK
338}
339
340/* Restore all variables describing the current status from the structure *P.
341 This is used after a nested function. */
342
343void
344restore_expr_status (p)
345 struct function *p;
346{
347 pending_stack_adjust = p->pending_stack_adjust;
348 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 349 saveregs_value = p->saveregs_value;
0006469d 350 apply_args_value = p->apply_args_value;
e87b4f3f 351 forced_labels = p->forced_labels;
bbf6f052
RK
352}
353\f
354/* Manage the queue of increment instructions to be output
355 for POSTINCREMENT_EXPR expressions, etc. */
356
357static rtx pending_chain;
358
359/* Queue up to increment (or change) VAR later. BODY says how:
360 BODY should be the same thing you would pass to emit_insn
361 to increment right away. It will go to emit_insn later on.
362
363 The value is a QUEUED expression to be used in place of VAR
364 where you want to guarantee the pre-incrementation value of VAR. */
365
366static rtx
367enqueue_insn (var, body)
368 rtx var, body;
369{
38a448ca
RH
370 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
371 var, NULL_RTX, NULL_RTX, body,
372 pending_chain);
bbf6f052
RK
373 return pending_chain;
374}
375
376/* Use protect_from_queue to convert a QUEUED expression
377 into something that you can put immediately into an instruction.
378 If the queued incrementation has not happened yet,
379 protect_from_queue returns the variable itself.
380 If the incrementation has happened, protect_from_queue returns a temp
381 that contains a copy of the old value of the variable.
382
383 Any time an rtx which might possibly be a QUEUED is to be put
384 into an instruction, it must be passed through protect_from_queue first.
385 QUEUED expressions are not meaningful in instructions.
386
387 Do not pass a value through protect_from_queue and then hold
388 on to it for a while before putting it in an instruction!
389 If the queue is flushed in between, incorrect code will result. */
390
391rtx
392protect_from_queue (x, modify)
393 register rtx x;
394 int modify;
395{
396 register RTX_CODE code = GET_CODE (x);
397
398#if 0 /* A QUEUED can hang around after the queue is forced out. */
399 /* Shortcut for most common case. */
400 if (pending_chain == 0)
401 return x;
402#endif
403
404 if (code != QUEUED)
405 {
e9baa644
RK
406 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
407 use of autoincrement. Make a copy of the contents of the memory
408 location rather than a copy of the address, but not if the value is
409 of mode BLKmode. Don't modify X in place since it might be
410 shared. */
bbf6f052
RK
411 if (code == MEM && GET_MODE (x) != BLKmode
412 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
413 {
414 register rtx y = XEXP (x, 0);
38a448ca 415 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
416
417 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
418 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
419 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
420
bbf6f052
RK
421 if (QUEUED_INSN (y))
422 {
e9baa644
RK
423 register rtx temp = gen_reg_rtx (GET_MODE (new));
424 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
425 QUEUED_INSN (y));
426 return temp;
427 }
e9baa644 428 return new;
bbf6f052
RK
429 }
430 /* Otherwise, recursively protect the subexpressions of all
431 the kinds of rtx's that can contain a QUEUED. */
432 if (code == MEM)
3f15938e
RS
433 {
434 rtx tem = protect_from_queue (XEXP (x, 0), 0);
435 if (tem != XEXP (x, 0))
436 {
437 x = copy_rtx (x);
438 XEXP (x, 0) = tem;
439 }
440 }
bbf6f052
RK
441 else if (code == PLUS || code == MULT)
442 {
3f15938e
RS
443 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
444 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
445 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
446 {
447 x = copy_rtx (x);
448 XEXP (x, 0) = new0;
449 XEXP (x, 1) = new1;
450 }
bbf6f052
RK
451 }
452 return x;
453 }
454 /* If the increment has not happened, use the variable itself. */
455 if (QUEUED_INSN (x) == 0)
456 return QUEUED_VAR (x);
457 /* If the increment has happened and a pre-increment copy exists,
458 use that copy. */
459 if (QUEUED_COPY (x) != 0)
460 return QUEUED_COPY (x);
461 /* The increment has happened but we haven't set up a pre-increment copy.
462 Set one up now, and use it. */
463 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
464 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
465 QUEUED_INSN (x));
466 return QUEUED_COPY (x);
467}
468
469/* Return nonzero if X contains a QUEUED expression:
470 if it contains anything that will be altered by a queued increment.
471 We handle only combinations of MEM, PLUS, MINUS and MULT operators
472 since memory addresses generally contain only those. */
473
474static int
475queued_subexp_p (x)
476 rtx x;
477{
478 register enum rtx_code code = GET_CODE (x);
479 switch (code)
480 {
481 case QUEUED:
482 return 1;
483 case MEM:
484 return queued_subexp_p (XEXP (x, 0));
485 case MULT:
486 case PLUS:
487 case MINUS:
e9a25f70
JL
488 return (queued_subexp_p (XEXP (x, 0))
489 || queued_subexp_p (XEXP (x, 1)));
490 default:
491 return 0;
bbf6f052 492 }
bbf6f052
RK
493}
494
495/* Perform all the pending incrementations. */
496
497void
498emit_queue ()
499{
500 register rtx p;
381127e8 501 while ((p = pending_chain))
bbf6f052
RK
502 {
503 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
504 pending_chain = QUEUED_NEXT (p);
505 }
506}
507
508static void
509init_queue ()
510{
511 if (pending_chain)
512 abort ();
513}
514\f
515/* Copy data from FROM to TO, where the machine modes are not the same.
516 Both modes may be integer, or both may be floating.
517 UNSIGNEDP should be nonzero if FROM is an unsigned type.
518 This causes zero-extension instead of sign-extension. */
519
520void
521convert_move (to, from, unsignedp)
522 register rtx to, from;
523 int unsignedp;
524{
525 enum machine_mode to_mode = GET_MODE (to);
526 enum machine_mode from_mode = GET_MODE (from);
527 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
528 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
529 enum insn_code code;
530 rtx libcall;
531
532 /* rtx code for making an equivalent value. */
533 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
534
535 to = protect_from_queue (to, 1);
536 from = protect_from_queue (from, 0);
537
538 if (to_real != from_real)
539 abort ();
540
1499e0a8
RK
541 /* If FROM is a SUBREG that indicates that we have already done at least
542 the required extension, strip it. We don't handle such SUBREGs as
543 TO here. */
544
545 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
546 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
547 >= GET_MODE_SIZE (to_mode))
548 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
549 from = gen_lowpart (to_mode, from), from_mode = to_mode;
550
551 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
552 abort ();
553
bbf6f052
RK
554 if (to_mode == from_mode
555 || (from_mode == VOIDmode && CONSTANT_P (from)))
556 {
557 emit_move_insn (to, from);
558 return;
559 }
560
561 if (to_real)
562 {
81d79e2c
RS
563 rtx value;
564
2b01c326 565 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 566 {
2b01c326
RK
567 /* Try converting directly if the insn is supported. */
568 if ((code = can_extend_p (to_mode, from_mode, 0))
569 != CODE_FOR_nothing)
570 {
571 emit_unop_insn (code, to, from, UNKNOWN);
572 return;
573 }
bbf6f052 574 }
2b01c326 575
b424402e
RS
576#ifdef HAVE_trunchfqf2
577 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
578 {
579 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
704af6a1
JL
583#ifdef HAVE_trunctqfqf2
584 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
585 {
586 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
b424402e
RS
590#ifdef HAVE_truncsfqf2
591 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
592 {
593 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
597#ifdef HAVE_truncdfqf2
598 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
599 {
600 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
601 return;
602 }
603#endif
604#ifdef HAVE_truncxfqf2
605 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
608 return;
609 }
610#endif
611#ifdef HAVE_trunctfqf2
612 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
613 {
614 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
615 return;
616 }
617#endif
03747aa3
RK
618
619#ifdef HAVE_trunctqfhf2
620 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
621 {
622 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
b424402e
RS
626#ifdef HAVE_truncsfhf2
627 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
628 {
629 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncdfhf2
634 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncxfhf2
641 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_trunctfhf2
648 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
2b01c326
RK
654
655#ifdef HAVE_truncsftqf2
656 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
662#ifdef HAVE_truncdftqf2
663 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
669#ifdef HAVE_truncxftqf2
670 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
676#ifdef HAVE_trunctftqf2
677 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
678 {
679 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
680 return;
681 }
682#endif
683
bbf6f052
RK
684#ifdef HAVE_truncdfsf2
685 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
b092b471
JW
691#ifdef HAVE_truncxfsf2
692 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
bbf6f052
RK
698#ifdef HAVE_trunctfsf2
699 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 {
701 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
b092b471
JW
705#ifdef HAVE_truncxfdf2
706 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
709 return;
710 }
711#endif
bbf6f052
RK
712#ifdef HAVE_trunctfdf2
713 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 {
715 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
716 return;
717 }
718#endif
719
b092b471
JW
720 libcall = (rtx) 0;
721 switch (from_mode)
722 {
723 case SFmode:
724 switch (to_mode)
725 {
726 case DFmode:
727 libcall = extendsfdf2_libfunc;
728 break;
729
730 case XFmode:
731 libcall = extendsfxf2_libfunc;
732 break;
733
734 case TFmode:
735 libcall = extendsftf2_libfunc;
736 break;
e9a25f70
JL
737
738 default:
739 break;
b092b471
JW
740 }
741 break;
742
743 case DFmode:
744 switch (to_mode)
745 {
746 case SFmode:
747 libcall = truncdfsf2_libfunc;
748 break;
749
750 case XFmode:
751 libcall = extenddfxf2_libfunc;
752 break;
753
754 case TFmode:
755 libcall = extenddftf2_libfunc;
756 break;
e9a25f70
JL
757
758 default:
759 break;
b092b471
JW
760 }
761 break;
762
763 case XFmode:
764 switch (to_mode)
765 {
766 case SFmode:
767 libcall = truncxfsf2_libfunc;
768 break;
769
770 case DFmode:
771 libcall = truncxfdf2_libfunc;
772 break;
e9a25f70
JL
773
774 default:
775 break;
b092b471
JW
776 }
777 break;
778
779 case TFmode:
780 switch (to_mode)
781 {
782 case SFmode:
783 libcall = trunctfsf2_libfunc;
784 break;
785
786 case DFmode:
787 libcall = trunctfdf2_libfunc;
788 break;
e9a25f70
JL
789
790 default:
791 break;
b092b471
JW
792 }
793 break;
e9a25f70
JL
794
795 default:
796 break;
b092b471
JW
797 }
798
799 if (libcall == (rtx) 0)
800 /* This conversion is not implemented yet. */
bbf6f052
RK
801 abort ();
802
81d79e2c
RS
803 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
804 1, from, from_mode);
805 emit_move_insn (to, value);
bbf6f052
RK
806 return;
807 }
808
809 /* Now both modes are integers. */
810
811 /* Handle expanding beyond a word. */
812 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
813 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
814 {
815 rtx insns;
816 rtx lowpart;
817 rtx fill_value;
818 rtx lowfrom;
819 int i;
820 enum machine_mode lowpart_mode;
821 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
822
823 /* Try converting directly if the insn is supported. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 != CODE_FOR_nothing)
826 {
cd1b4b44
RK
827 /* If FROM is a SUBREG, put it into a register. Do this
828 so that we always generate the same set of insns for
829 better cse'ing; if an intermediate assignment occurred,
830 we won't be doing the operation directly on the SUBREG. */
831 if (optimize > 0 && GET_CODE (from) == SUBREG)
832 from = force_reg (from_mode, from);
bbf6f052
RK
833 emit_unop_insn (code, to, from, equiv_code);
834 return;
835 }
836 /* Next, try converting via full word. */
837 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
838 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
839 != CODE_FOR_nothing))
840 {
a81fee56 841 if (GET_CODE (to) == REG)
38a448ca 842 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
843 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
844 emit_unop_insn (code, to,
845 gen_lowpart (word_mode, to), equiv_code);
846 return;
847 }
848
849 /* No special multiword conversion insn; do it by hand. */
850 start_sequence ();
851
5c5033c3
RK
852 /* Since we will turn this into a no conflict block, we must ensure
853 that the source does not overlap the target. */
854
855 if (reg_overlap_mentioned_p (to, from))
856 from = force_reg (from_mode, from);
857
bbf6f052
RK
858 /* Get a copy of FROM widened to a word, if necessary. */
859 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
860 lowpart_mode = word_mode;
861 else
862 lowpart_mode = from_mode;
863
864 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
865
866 lowpart = gen_lowpart (lowpart_mode, to);
867 emit_move_insn (lowpart, lowfrom);
868
869 /* Compute the value to put in each remaining word. */
870 if (unsignedp)
871 fill_value = const0_rtx;
872 else
873 {
874#ifdef HAVE_slt
875 if (HAVE_slt
876 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
877 && STORE_FLAG_VALUE == -1)
878 {
906c4e36
RK
879 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
880 lowpart_mode, 0, 0);
bbf6f052
RK
881 fill_value = gen_reg_rtx (word_mode);
882 emit_insn (gen_slt (fill_value));
883 }
884 else
885#endif
886 {
887 fill_value
888 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
889 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 890 NULL_RTX, 0);
bbf6f052
RK
891 fill_value = convert_to_mode (word_mode, fill_value, 1);
892 }
893 }
894
895 /* Fill the remaining words. */
896 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
897 {
898 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
899 rtx subword = operand_subword (to, index, 1, to_mode);
900
901 if (subword == 0)
902 abort ();
903
904 if (fill_value != subword)
905 emit_move_insn (subword, fill_value);
906 }
907
908 insns = get_insns ();
909 end_sequence ();
910
906c4e36 911 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 912 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
913 return;
914 }
915
d3c64ee3
RS
916 /* Truncating multi-word to a word or less. */
917 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
918 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 919 {
431a6eca
JW
920 if (!((GET_CODE (from) == MEM
921 && ! MEM_VOLATILE_P (from)
922 && direct_load[(int) to_mode]
923 && ! mode_dependent_address_p (XEXP (from, 0)))
924 || GET_CODE (from) == REG
925 || GET_CODE (from) == SUBREG))
926 from = force_reg (from_mode, from);
bbf6f052
RK
927 convert_move (to, gen_lowpart (word_mode, from), 0);
928 return;
929 }
930
931 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
932 if (to_mode == PQImode)
933 {
934 if (from_mode != QImode)
935 from = convert_to_mode (QImode, from, unsignedp);
936
937#ifdef HAVE_truncqipqi2
938 if (HAVE_truncqipqi2)
939 {
940 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 return;
942 }
943#endif /* HAVE_truncqipqi2 */
944 abort ();
945 }
946
947 if (from_mode == PQImode)
948 {
949 if (to_mode != QImode)
950 {
951 from = convert_to_mode (QImode, from, unsignedp);
952 from_mode = QImode;
953 }
954 else
955 {
956#ifdef HAVE_extendpqiqi2
957 if (HAVE_extendpqiqi2)
958 {
959 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 return;
961 }
962#endif /* HAVE_extendpqiqi2 */
963 abort ();
964 }
965 }
966
bbf6f052
RK
967 if (to_mode == PSImode)
968 {
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
971
1f584163
DE
972#ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
bbf6f052 974 {
1f584163 975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
976 return;
977 }
1f584163 978#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
979 abort ();
980 }
981
982 if (from_mode == PSImode)
983 {
984 if (to_mode != SImode)
985 {
986 from = convert_to_mode (SImode, from, unsignedp);
987 from_mode = SImode;
988 }
989 else
990 {
1f584163
DE
991#ifdef HAVE_extendpsisi2
992 if (HAVE_extendpsisi2)
bbf6f052 993 {
1f584163 994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
995 return;
996 }
1f584163 997#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
998 abort ();
999 }
1000 }
1001
0407367d
RK
1002 if (to_mode == PDImode)
1003 {
1004 if (from_mode != DImode)
1005 from = convert_to_mode (DImode, from, unsignedp);
1006
1007#ifdef HAVE_truncdipdi2
1008 if (HAVE_truncdipdi2)
1009 {
1010 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1011 return;
1012 }
1013#endif /* HAVE_truncdipdi2 */
1014 abort ();
1015 }
1016
1017 if (from_mode == PDImode)
1018 {
1019 if (to_mode != DImode)
1020 {
1021 from = convert_to_mode (DImode, from, unsignedp);
1022 from_mode = DImode;
1023 }
1024 else
1025 {
1026#ifdef HAVE_extendpdidi2
1027 if (HAVE_extendpdidi2)
1028 {
1029 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1030 return;
1031 }
1032#endif /* HAVE_extendpdidi2 */
1033 abort ();
1034 }
1035 }
1036
bbf6f052
RK
1037 /* Now follow all the conversions between integers
1038 no more than a word long. */
1039
1040 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1041 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1042 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1043 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1044 {
d3c64ee3
RS
1045 if (!((GET_CODE (from) == MEM
1046 && ! MEM_VOLATILE_P (from)
1047 && direct_load[(int) to_mode]
1048 && ! mode_dependent_address_p (XEXP (from, 0)))
1049 || GET_CODE (from) == REG
1050 || GET_CODE (from) == SUBREG))
1051 from = force_reg (from_mode, from);
34aa3599
RK
1052 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1053 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1054 from = copy_to_reg (from);
bbf6f052
RK
1055 emit_move_insn (to, gen_lowpart (to_mode, from));
1056 return;
1057 }
1058
d3c64ee3 1059 /* Handle extension. */
bbf6f052
RK
1060 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1061 {
1062 /* Convert directly if that works. */
1063 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1064 != CODE_FOR_nothing)
1065 {
1066 emit_unop_insn (code, to, from, equiv_code);
1067 return;
1068 }
1069 else
1070 {
1071 enum machine_mode intermediate;
1072
1073 /* Search for a mode to convert via. */
1074 for (intermediate = from_mode; intermediate != VOIDmode;
1075 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1076 if (((can_extend_p (to_mode, intermediate, unsignedp)
1077 != CODE_FOR_nothing)
1078 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1079 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1080 && (can_extend_p (intermediate, from_mode, unsignedp)
1081 != CODE_FOR_nothing))
1082 {
1083 convert_move (to, convert_to_mode (intermediate, from,
1084 unsignedp), unsignedp);
1085 return;
1086 }
1087
1088 /* No suitable intermediate mode. */
1089 abort ();
1090 }
1091 }
1092
1093 /* Support special truncate insns for certain modes. */
1094
1095 if (from_mode == DImode && to_mode == SImode)
1096 {
1097#ifdef HAVE_truncdisi2
1098 if (HAVE_truncdisi2)
1099 {
1100 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1101 return;
1102 }
1103#endif
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1105 return;
1106 }
1107
1108 if (from_mode == DImode && to_mode == HImode)
1109 {
1110#ifdef HAVE_truncdihi2
1111 if (HAVE_truncdihi2)
1112 {
1113 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1114 return;
1115 }
1116#endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1119 }
1120
1121 if (from_mode == DImode && to_mode == QImode)
1122 {
1123#ifdef HAVE_truncdiqi2
1124 if (HAVE_truncdiqi2)
1125 {
1126 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1127 return;
1128 }
1129#endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1132 }
1133
1134 if (from_mode == SImode && to_mode == HImode)
1135 {
1136#ifdef HAVE_truncsihi2
1137 if (HAVE_truncsihi2)
1138 {
1139 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1140 return;
1141 }
1142#endif
1143 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 return;
1145 }
1146
1147 if (from_mode == SImode && to_mode == QImode)
1148 {
1149#ifdef HAVE_truncsiqi2
1150 if (HAVE_truncsiqi2)
1151 {
1152 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1153 return;
1154 }
1155#endif
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 return;
1158 }
1159
1160 if (from_mode == HImode && to_mode == QImode)
1161 {
1162#ifdef HAVE_trunchiqi2
1163 if (HAVE_trunchiqi2)
1164 {
1165 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1166 return;
1167 }
1168#endif
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 return;
1171 }
1172
b9bcad65
RK
1173 if (from_mode == TImode && to_mode == DImode)
1174 {
1175#ifdef HAVE_trunctidi2
1176 if (HAVE_trunctidi2)
1177 {
1178 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1179 return;
1180 }
1181#endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1184 }
1185
1186 if (from_mode == TImode && to_mode == SImode)
1187 {
1188#ifdef HAVE_trunctisi2
1189 if (HAVE_trunctisi2)
1190 {
1191 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1192 return;
1193 }
1194#endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1197 }
1198
1199 if (from_mode == TImode && to_mode == HImode)
1200 {
1201#ifdef HAVE_trunctihi2
1202 if (HAVE_trunctihi2)
1203 {
1204 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1205 return;
1206 }
1207#endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == TImode && to_mode == QImode)
1213 {
1214#ifdef HAVE_trunctiqi2
1215 if (HAVE_trunctiqi2)
1216 {
1217 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1218 return;
1219 }
1220#endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
bbf6f052
RK
1225 /* Handle truncation of volatile memrefs, and so on;
1226 the things that couldn't be truncated directly,
1227 and for which there was no special instruction. */
1228 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1229 {
1230 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1231 emit_move_insn (to, temp);
1232 return;
1233 }
1234
1235 /* Mode combination is not recognized. */
1236 abort ();
1237}
1238
1239/* Return an rtx for a value that would result
1240 from converting X to mode MODE.
1241 Both X and MODE may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243 This can be done by referring to a part of X in place
5d901c31
RS
1244 or by copying to a new temporary with conversion.
1245
1246 This function *must not* call protect_from_queue
1247 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1248
1249rtx
1250convert_to_mode (mode, x, unsignedp)
1251 enum machine_mode mode;
1252 rtx x;
1253 int unsignedp;
5ffe63ed
RS
1254{
1255 return convert_modes (mode, VOIDmode, x, unsignedp);
1256}
1257
1258/* Return an rtx for a value that would result
1259 from converting X from mode OLDMODE to mode MODE.
1260 Both modes may be floating, or both integer.
1261 UNSIGNEDP is nonzero if X is an unsigned value.
1262
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1265
1266 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1267
1268 This function *must not* call protect_from_queue
1269 except when putting X into an insn (in which case convert_move does it). */
1270
1271rtx
1272convert_modes (mode, oldmode, x, unsignedp)
1273 enum machine_mode mode, oldmode;
1274 rtx x;
1275 int unsignedp;
bbf6f052
RK
1276{
1277 register rtx temp;
5ffe63ed 1278
1499e0a8
RK
1279 /* If FROM is a SUBREG that indicates that we have already done at least
1280 the required extension, strip it. */
1281
1282 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1283 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1284 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1285 x = gen_lowpart (mode, x);
bbf6f052 1286
64791b18
RK
1287 if (GET_MODE (x) != VOIDmode)
1288 oldmode = GET_MODE (x);
1289
5ffe63ed 1290 if (mode == oldmode)
bbf6f052
RK
1291 return x;
1292
1293 /* There is one case that we must handle specially: If we are converting
906c4e36 1294 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1295 we are to interpret the constant as unsigned, gen_lowpart will do
1296 the wrong if the constant appears negative. What we want to do is
1297 make the high-order word of the constant zero, not all ones. */
1298
1299 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1300 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1301 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1302 {
1303 HOST_WIDE_INT val = INTVAL (x);
1304
1305 if (oldmode != VOIDmode
1306 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1307 {
1308 int width = GET_MODE_BITSIZE (oldmode);
1309
1310 /* We need to zero extend VAL. */
1311 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1312 }
1313
1314 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1315 }
bbf6f052
RK
1316
1317 /* We can do this with a gen_lowpart if both desired and current modes
1318 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1319 non-volatile MEM. Except for the constant case where MODE is no
1320 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1321
ba2e110c
RK
1322 if ((GET_CODE (x) == CONST_INT
1323 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1324 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1325 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1326 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1327 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1328 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1329 && direct_load[(int) mode])
2bf29316
JW
1330 || (GET_CODE (x) == REG
1331 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1332 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1333 {
1334 /* ?? If we don't know OLDMODE, we have to assume here that
1335 X does not need sign- or zero-extension. This may not be
1336 the case, but it's the best we can do. */
1337 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1338 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1339 {
1340 HOST_WIDE_INT val = INTVAL (x);
1341 int width = GET_MODE_BITSIZE (oldmode);
1342
1343 /* We must sign or zero-extend in this case. Start by
1344 zero-extending, then sign extend if we need to. */
1345 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1346 if (! unsignedp
1347 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1348 val |= (HOST_WIDE_INT) (-1) << width;
1349
1350 return GEN_INT (val);
1351 }
1352
1353 return gen_lowpart (mode, x);
1354 }
bbf6f052
RK
1355
1356 temp = gen_reg_rtx (mode);
1357 convert_move (temp, x, unsignedp);
1358 return temp;
1359}
1360\f
1361/* Generate several move instructions to copy LEN bytes
1362 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1363 The caller must pass FROM and TO
1364 through protect_from_queue before calling.
1365 ALIGN (in bytes) is maximum alignment we can assume. */
1366
bbf6f052
RK
1367static void
1368move_by_pieces (to, from, len, align)
1369 rtx to, from;
1370 int len, align;
1371{
1372 struct move_by_pieces data;
1373 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1374 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1375
1376 data.offset = 0;
1377 data.to_addr = to_addr;
1378 data.from_addr = from_addr;
1379 data.to = to;
1380 data.from = from;
1381 data.autinc_to
1382 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1383 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1384 data.autinc_from
1385 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1386 || GET_CODE (from_addr) == POST_INC
1387 || GET_CODE (from_addr) == POST_DEC);
1388
1389 data.explicit_inc_from = 0;
1390 data.explicit_inc_to = 0;
1391 data.reverse
1392 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1393 if (data.reverse) data.offset = len;
1394 data.len = len;
1395
e9cf6a97
JW
1396 data.to_struct = MEM_IN_STRUCT_P (to);
1397 data.from_struct = MEM_IN_STRUCT_P (from);
1398
bbf6f052
RK
1399 /* If copying requires more than two move insns,
1400 copy addresses to registers (to make displacements shorter)
1401 and use post-increment if available. */
1402 if (!(data.autinc_from && data.autinc_to)
1403 && move_by_pieces_ninsns (len, align) > 2)
1404 {
1405#ifdef HAVE_PRE_DECREMENT
1406 if (data.reverse && ! data.autinc_from)
1407 {
1408 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1409 data.autinc_from = 1;
1410 data.explicit_inc_from = -1;
1411 }
1412#endif
1413#ifdef HAVE_POST_INCREMENT
1414 if (! data.autinc_from)
1415 {
1416 data.from_addr = copy_addr_to_reg (from_addr);
1417 data.autinc_from = 1;
1418 data.explicit_inc_from = 1;
1419 }
1420#endif
1421 if (!data.autinc_from && CONSTANT_P (from_addr))
1422 data.from_addr = copy_addr_to_reg (from_addr);
1423#ifdef HAVE_PRE_DECREMENT
1424 if (data.reverse && ! data.autinc_to)
1425 {
1426 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1427 data.autinc_to = 1;
1428 data.explicit_inc_to = -1;
1429 }
1430#endif
1431#ifdef HAVE_POST_INCREMENT
1432 if (! data.reverse && ! data.autinc_to)
1433 {
1434 data.to_addr = copy_addr_to_reg (to_addr);
1435 data.autinc_to = 1;
1436 data.explicit_inc_to = 1;
1437 }
1438#endif
1439 if (!data.autinc_to && CONSTANT_P (to_addr))
1440 data.to_addr = copy_addr_to_reg (to_addr);
1441 }
1442
c7a7ac46 1443 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1444 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1445 align = MOVE_MAX;
bbf6f052
RK
1446
1447 /* First move what we can in the largest integer mode, then go to
1448 successively smaller modes. */
1449
1450 while (max_size > 1)
1451 {
1452 enum machine_mode mode = VOIDmode, tmode;
1453 enum insn_code icode;
1454
e7c33f54
RK
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1458 mode = tmode;
1459
1460 if (mode == VOIDmode)
1461 break;
1462
1463 icode = mov_optab->handlers[(int) mode].insn_code;
1464 if (icode != CODE_FOR_nothing
1465 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1466 GET_MODE_SIZE (mode)))
1467 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1468
1469 max_size = GET_MODE_SIZE (mode);
1470 }
1471
1472 /* The code above should have handled everything. */
2a8e278c 1473 if (data.len > 0)
bbf6f052
RK
1474 abort ();
1475}
1476
1477/* Return number of insns required to move L bytes by pieces.
1478 ALIGN (in bytes) is maximum alignment we can assume. */
1479
1480static int
1481move_by_pieces_ninsns (l, align)
1482 unsigned int l;
1483 int align;
1484{
1485 register int n_insns = 0;
e87b4f3f 1486 int max_size = MOVE_MAX + 1;
bbf6f052 1487
c7a7ac46 1488 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1489 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1490 align = MOVE_MAX;
bbf6f052
RK
1491
1492 while (max_size > 1)
1493 {
1494 enum machine_mode mode = VOIDmode, tmode;
1495 enum insn_code icode;
1496
e7c33f54
RK
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1500 mode = tmode;
1501
1502 if (mode == VOIDmode)
1503 break;
1504
1505 icode = mov_optab->handlers[(int) mode].insn_code;
1506 if (icode != CODE_FOR_nothing
1507 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1508 GET_MODE_SIZE (mode)))
1509 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1510
1511 max_size = GET_MODE_SIZE (mode);
1512 }
1513
1514 return n_insns;
1515}
1516
1517/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1518 with move instructions for mode MODE. GENFUN is the gen_... function
1519 to make a move insn for that mode. DATA has all the other info. */
1520
1521static void
1522move_by_pieces_1 (genfun, mode, data)
eae4b970 1523 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1524 enum machine_mode mode;
1525 struct move_by_pieces *data;
1526{
1527 register int size = GET_MODE_SIZE (mode);
1528 register rtx to1, from1;
1529
1530 while (data->len >= size)
1531 {
1532 if (data->reverse) data->offset -= size;
1533
1534 to1 = (data->autinc_to
38a448ca 1535 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1536 : copy_rtx (change_address (data->to, mode,
1537 plus_constant (data->to_addr,
1538 data->offset))));
e9cf6a97 1539 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1540
db3cf6fb
MS
1541 from1
1542 = (data->autinc_from
38a448ca 1543 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1544 : copy_rtx (change_address (data->from, mode,
1545 plus_constant (data->from_addr,
1546 data->offset))));
e9cf6a97 1547 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1548
1549#ifdef HAVE_PRE_DECREMENT
1550 if (data->explicit_inc_to < 0)
906c4e36 1551 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1552 if (data->explicit_inc_from < 0)
906c4e36 1553 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1554#endif
1555
1556 emit_insn ((*genfun) (to1, from1));
1557#ifdef HAVE_POST_INCREMENT
1558 if (data->explicit_inc_to > 0)
906c4e36 1559 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1560 if (data->explicit_inc_from > 0)
906c4e36 1561 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1562#endif
1563
1564 if (! data->reverse) data->offset += size;
1565
1566 data->len -= size;
1567 }
1568}
1569\f
1570/* Emit code to move a block Y to a block X.
1571 This may be done with string-move instructions,
1572 with multiple scalar move instructions, or with a library call.
1573
1574 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1575 with mode BLKmode.
1576 SIZE is an rtx that says how long they are.
1577 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1578 measured in bytes.
bbf6f052 1579
e9a25f70
JL
1580 Return the address of the new block, if memcpy is called and returns it,
1581 0 otherwise. */
1582
1583rtx
bbf6f052
RK
1584emit_block_move (x, y, size, align)
1585 rtx x, y;
1586 rtx size;
1587 int align;
1588{
e9a25f70
JL
1589 rtx retval = 0;
1590
bbf6f052
RK
1591 if (GET_MODE (x) != BLKmode)
1592 abort ();
1593
1594 if (GET_MODE (y) != BLKmode)
1595 abort ();
1596
1597 x = protect_from_queue (x, 1);
1598 y = protect_from_queue (y, 0);
5d901c31 1599 size = protect_from_queue (size, 0);
bbf6f052
RK
1600
1601 if (GET_CODE (x) != MEM)
1602 abort ();
1603 if (GET_CODE (y) != MEM)
1604 abort ();
1605 if (size == 0)
1606 abort ();
1607
1608 if (GET_CODE (size) == CONST_INT
906c4e36 1609 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1610 move_by_pieces (x, y, INTVAL (size), align);
1611 else
1612 {
1613 /* Try the most limited insn first, because there's no point
1614 including more than one in the machine description unless
1615 the more limited one has some advantage. */
266007a7 1616
0bba3f6f 1617 rtx opalign = GEN_INT (align);
266007a7
RK
1618 enum machine_mode mode;
1619
1620 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1621 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1622 {
266007a7 1623 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1624
1625 if (code != CODE_FOR_nothing
803090c4
RK
1626 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1627 here because if SIZE is less than the mode mask, as it is
8008b228 1628 returned by the macro, it will definitely be less than the
803090c4 1629 actual mode mask. */
8ca00751
RK
1630 && ((GET_CODE (size) == CONST_INT
1631 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1632 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1633 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1634 && (insn_operand_predicate[(int) code][0] == 0
1635 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1636 && (insn_operand_predicate[(int) code][1] == 0
1637 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1638 && (insn_operand_predicate[(int) code][3] == 0
1639 || (*insn_operand_predicate[(int) code][3]) (opalign,
1640 VOIDmode)))
bbf6f052 1641 {
1ba1e2a8 1642 rtx op2;
266007a7
RK
1643 rtx last = get_last_insn ();
1644 rtx pat;
1645
1ba1e2a8 1646 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1647 if (insn_operand_predicate[(int) code][2] != 0
1648 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1649 op2 = copy_to_mode_reg (mode, op2);
1650
1651 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1652 if (pat)
1653 {
1654 emit_insn (pat);
e9a25f70 1655 return 0;
266007a7
RK
1656 }
1657 else
1658 delete_insns_since (last);
bbf6f052
RK
1659 }
1660 }
bbf6f052
RK
1661
1662#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
1663 retval
1664 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1665 ptr_mode, 3, XEXP (x, 0), Pmode,
1666 XEXP (y, 0), Pmode,
1667 convert_to_mode (TYPE_MODE (sizetype), size,
1668 TREE_UNSIGNED (sizetype)),
1669 TYPE_MODE (sizetype));
bbf6f052 1670#else
d562e42e 1671 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1672 VOIDmode, 3, XEXP (y, 0), Pmode,
1673 XEXP (x, 0), Pmode,
3b6f75e2
JW
1674 convert_to_mode (TYPE_MODE (integer_type_node), size,
1675 TREE_UNSIGNED (integer_type_node)),
1676 TYPE_MODE (integer_type_node));
bbf6f052
RK
1677#endif
1678 }
e9a25f70
JL
1679
1680 return retval;
bbf6f052
RK
1681}
1682\f
1683/* Copy all or part of a value X into registers starting at REGNO.
1684 The number of registers to be filled is NREGS. */
1685
1686void
1687move_block_to_reg (regno, x, nregs, mode)
1688 int regno;
1689 rtx x;
1690 int nregs;
1691 enum machine_mode mode;
1692{
1693 int i;
381127e8
RL
1694#ifdef HAVE_load_multiple
1695 rtx pat;
1696 rtx last;
1697#endif
bbf6f052 1698
72bb9717
RK
1699 if (nregs == 0)
1700 return;
1701
bbf6f052
RK
1702 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1703 x = validize_mem (force_const_mem (mode, x));
1704
1705 /* See if the machine can do this with a load multiple insn. */
1706#ifdef HAVE_load_multiple
c3a02afe 1707 if (HAVE_load_multiple)
bbf6f052 1708 {
c3a02afe 1709 last = get_last_insn ();
38a448ca 1710 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1711 GEN_INT (nregs));
1712 if (pat)
1713 {
1714 emit_insn (pat);
1715 return;
1716 }
1717 else
1718 delete_insns_since (last);
bbf6f052 1719 }
bbf6f052
RK
1720#endif
1721
1722 for (i = 0; i < nregs; i++)
38a448ca 1723 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1724 operand_subword_force (x, i, mode));
1725}
1726
1727/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1728 The number of registers to be filled is NREGS. SIZE indicates the number
1729 of bytes in the object X. */
1730
bbf6f052
RK
1731
1732void
0040593d 1733move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1734 int regno;
1735 rtx x;
1736 int nregs;
0040593d 1737 int size;
bbf6f052
RK
1738{
1739 int i;
381127e8
RL
1740#ifdef HAVE_store_multiple
1741 rtx pat;
1742 rtx last;
1743#endif
58a32c5c 1744 enum machine_mode mode;
bbf6f052 1745
58a32c5c
DE
1746 /* If SIZE is that of a mode no bigger than a word, just use that
1747 mode's store operation. */
1748 if (size <= UNITS_PER_WORD
1749 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1750 {
1751 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1752 gen_rtx_REG (mode, regno));
58a32c5c
DE
1753 return;
1754 }
1755
0040593d 1756 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1757 to the left before storing to memory. Note that the previous test
1758 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1759 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1760 {
1761 rtx tem = operand_subword (x, 0, 1, BLKmode);
1762 rtx shift;
1763
1764 if (tem == 0)
1765 abort ();
1766
1767 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1768 gen_rtx_REG (word_mode, regno),
0040593d
JW
1769 build_int_2 ((UNITS_PER_WORD - size)
1770 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1771 emit_move_insn (tem, shift);
1772 return;
1773 }
1774
bbf6f052
RK
1775 /* See if the machine can do this with a store multiple insn. */
1776#ifdef HAVE_store_multiple
c3a02afe 1777 if (HAVE_store_multiple)
bbf6f052 1778 {
c3a02afe 1779 last = get_last_insn ();
38a448ca 1780 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1781 GEN_INT (nregs));
1782 if (pat)
1783 {
1784 emit_insn (pat);
1785 return;
1786 }
1787 else
1788 delete_insns_since (last);
bbf6f052 1789 }
bbf6f052
RK
1790#endif
1791
1792 for (i = 0; i < nregs; i++)
1793 {
1794 rtx tem = operand_subword (x, i, 1, BLKmode);
1795
1796 if (tem == 0)
1797 abort ();
1798
38a448ca 1799 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1800 }
1801}
1802
fffa9c1d
JW
1803/* Emit code to move a block Y to a block X, where X is non-consecutive
1804 registers represented by a PARALLEL. */
1805
1806void
1807emit_group_load (x, y)
1808 rtx x, y;
1809{
1810 rtx target_reg, source;
1811 int i;
1812
1813 if (GET_CODE (x) != PARALLEL)
1814 abort ();
1815
1816 /* Check for a NULL entry, used to indicate that the parameter goes
1817 both on the stack and in registers. */
1818 if (XEXP (XVECEXP (x, 0, 0), 0))
1819 i = 0;
1820 else
1821 i = 1;
1822
1823 for (; i < XVECLEN (x, 0); i++)
1824 {
1825 rtx element = XVECEXP (x, 0, i);
1826
1827 target_reg = XEXP (element, 0);
1828
1829 if (GET_CODE (y) == MEM)
1830 source = change_address (y, GET_MODE (target_reg),
1831 plus_constant (XEXP (y, 0),
1832 INTVAL (XEXP (element, 1))));
1833 else if (XEXP (element, 1) == const0_rtx)
1834 {
1835 if (GET_MODE (target_reg) == GET_MODE (y))
1836 source = y;
eaa9b4d9
MM
1837 /* Allow for the target_reg to be smaller than the input register
1838 to allow for AIX with 4 DF arguments after a single SI arg. The
1839 last DF argument will only load 1 word into the integer registers,
1840 but load a DF value into the float registers. */
aff4d29b
JW
1841 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1842 <= GET_MODE_SIZE (GET_MODE (y)))
1843 && GET_MODE (target_reg) == word_mode)
1844 /* This might be a const_double, so we can't just use SUBREG. */
1845 source = operand_subword (y, 0, 0, VOIDmode);
d7d775a0
JW
1846 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1847 == GET_MODE_SIZE (GET_MODE (y)))
1848 source = gen_lowpart (GET_MODE (target_reg), y);
fffa9c1d
JW
1849 else
1850 abort ();
1851 }
1852 else
1853 abort ();
1854
1855 emit_move_insn (target_reg, source);
1856 }
1857}
1858
1859/* Emit code to move a block Y to a block X, where Y is non-consecutive
1860 registers represented by a PARALLEL. */
1861
1862void
1863emit_group_store (x, y)
1864 rtx x, y;
1865{
1866 rtx source_reg, target;
1867 int i;
1868
1869 if (GET_CODE (y) != PARALLEL)
1870 abort ();
1871
1872 /* Check for a NULL entry, used to indicate that the parameter goes
1873 both on the stack and in registers. */
1874 if (XEXP (XVECEXP (y, 0, 0), 0))
1875 i = 0;
1876 else
1877 i = 1;
1878
1879 for (; i < XVECLEN (y, 0); i++)
1880 {
1881 rtx element = XVECEXP (y, 0, i);
1882
1883 source_reg = XEXP (element, 0);
1884
1885 if (GET_CODE (x) == MEM)
1886 target = change_address (x, GET_MODE (source_reg),
1887 plus_constant (XEXP (x, 0),
1888 INTVAL (XEXP (element, 1))));
1889 else if (XEXP (element, 1) == const0_rtx)
71bc0330
JW
1890 {
1891 target = x;
1892 if (GET_MODE (target) != GET_MODE (source_reg))
1893 target = gen_lowpart (GET_MODE (source_reg), target);
1894 }
fffa9c1d
JW
1895 else
1896 abort ();
1897
1898 emit_move_insn (target, source_reg);
1899 }
1900}
1901
94b25f81
RK
1902/* Add a USE expression for REG to the (possibly empty) list pointed
1903 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1904
1905void
b3f8cf4a
RK
1906use_reg (call_fusage, reg)
1907 rtx *call_fusage, reg;
1908{
0304dfbb
DE
1909 if (GET_CODE (reg) != REG
1910 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1911 abort();
1912
1913 *call_fusage
38a448ca
RH
1914 = gen_rtx_EXPR_LIST (VOIDmode,
1915 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1916}
1917
94b25f81
RK
1918/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1919 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1920
1921void
0304dfbb
DE
1922use_regs (call_fusage, regno, nregs)
1923 rtx *call_fusage;
bbf6f052
RK
1924 int regno;
1925 int nregs;
1926{
0304dfbb 1927 int i;
bbf6f052 1928
0304dfbb
DE
1929 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1930 abort ();
1931
1932 for (i = 0; i < nregs; i++)
38a448ca 1933 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 1934}
fffa9c1d
JW
1935
1936/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1937 PARALLEL REGS. This is for calls that pass values in multiple
1938 non-contiguous locations. The Irix 6 ABI has examples of this. */
1939
1940void
1941use_group_regs (call_fusage, regs)
1942 rtx *call_fusage;
1943 rtx regs;
1944{
1945 int i;
1946
6bd35f86
DE
1947 for (i = 0; i < XVECLEN (regs, 0); i++)
1948 {
1949 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 1950
6bd35f86
DE
1951 /* A NULL entry means the parameter goes both on the stack and in
1952 registers. This can also be a MEM for targets that pass values
1953 partially on the stack and partially in registers. */
e9a25f70 1954 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
1955 use_reg (call_fusage, reg);
1956 }
fffa9c1d 1957}
bbf6f052 1958\f
9de08200
RK
1959/* Generate several move instructions to clear LEN bytes of block TO.
1960 (A MEM rtx with BLKmode). The caller must pass TO through
1961 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1962 we can assume. */
1963
1964static void
1965clear_by_pieces (to, len, align)
1966 rtx to;
1967 int len, align;
1968{
1969 struct clear_by_pieces data;
1970 rtx to_addr = XEXP (to, 0);
1971 int max_size = MOVE_MAX + 1;
1972
1973 data.offset = 0;
1974 data.to_addr = to_addr;
1975 data.to = to;
1976 data.autinc_to
1977 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1978 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1979
1980 data.explicit_inc_to = 0;
1981 data.reverse
1982 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1983 if (data.reverse) data.offset = len;
1984 data.len = len;
1985
1986 data.to_struct = MEM_IN_STRUCT_P (to);
1987
1988 /* If copying requires more than two move insns,
1989 copy addresses to registers (to make displacements shorter)
1990 and use post-increment if available. */
1991 if (!data.autinc_to
1992 && move_by_pieces_ninsns (len, align) > 2)
1993 {
1994#ifdef HAVE_PRE_DECREMENT
1995 if (data.reverse && ! data.autinc_to)
1996 {
1997 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1998 data.autinc_to = 1;
1999 data.explicit_inc_to = -1;
2000 }
2001#endif
2002#ifdef HAVE_POST_INCREMENT
2003 if (! data.reverse && ! data.autinc_to)
2004 {
2005 data.to_addr = copy_addr_to_reg (to_addr);
2006 data.autinc_to = 1;
2007 data.explicit_inc_to = 1;
2008 }
2009#endif
2010 if (!data.autinc_to && CONSTANT_P (to_addr))
2011 data.to_addr = copy_addr_to_reg (to_addr);
2012 }
2013
2014 if (! SLOW_UNALIGNED_ACCESS
2015 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2016 align = MOVE_MAX;
2017
2018 /* First move what we can in the largest integer mode, then go to
2019 successively smaller modes. */
2020
2021 while (max_size > 1)
2022 {
2023 enum machine_mode mode = VOIDmode, tmode;
2024 enum insn_code icode;
2025
2026 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2027 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2028 if (GET_MODE_SIZE (tmode) < max_size)
2029 mode = tmode;
2030
2031 if (mode == VOIDmode)
2032 break;
2033
2034 icode = mov_optab->handlers[(int) mode].insn_code;
2035 if (icode != CODE_FOR_nothing
2036 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2037 GET_MODE_SIZE (mode)))
2038 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2039
2040 max_size = GET_MODE_SIZE (mode);
2041 }
2042
2043 /* The code above should have handled everything. */
2044 if (data.len != 0)
2045 abort ();
2046}
2047
2048/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2049 with move instructions for mode MODE. GENFUN is the gen_... function
2050 to make a move insn for that mode. DATA has all the other info. */
2051
2052static void
2053clear_by_pieces_1 (genfun, mode, data)
eae4b970 2054 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2055 enum machine_mode mode;
2056 struct clear_by_pieces *data;
2057{
2058 register int size = GET_MODE_SIZE (mode);
2059 register rtx to1;
2060
2061 while (data->len >= size)
2062 {
2063 if (data->reverse) data->offset -= size;
2064
2065 to1 = (data->autinc_to
38a448ca 2066 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2067 : copy_rtx (change_address (data->to, mode,
2068 plus_constant (data->to_addr,
2069 data->offset))));
9de08200
RK
2070 MEM_IN_STRUCT_P (to1) = data->to_struct;
2071
2072#ifdef HAVE_PRE_DECREMENT
2073 if (data->explicit_inc_to < 0)
2074 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2075#endif
2076
2077 emit_insn ((*genfun) (to1, const0_rtx));
2078#ifdef HAVE_POST_INCREMENT
2079 if (data->explicit_inc_to > 0)
2080 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2081#endif
2082
2083 if (! data->reverse) data->offset += size;
2084
2085 data->len -= size;
2086 }
2087}
2088\f
bbf6f052 2089/* Write zeros through the storage of OBJECT.
9de08200 2090 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2091 the maximum alignment we can is has, measured in bytes.
bbf6f052 2092
e9a25f70
JL
2093 If we call a function that returns the length of the block, return it. */
2094
2095rtx
9de08200 2096clear_storage (object, size, align)
bbf6f052 2097 rtx object;
4c08eef0 2098 rtx size;
9de08200 2099 int align;
bbf6f052 2100{
e9a25f70
JL
2101 rtx retval = 0;
2102
bbf6f052
RK
2103 if (GET_MODE (object) == BLKmode)
2104 {
9de08200
RK
2105 object = protect_from_queue (object, 1);
2106 size = protect_from_queue (size, 0);
2107
2108 if (GET_CODE (size) == CONST_INT
2109 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2110 clear_by_pieces (object, INTVAL (size), align);
2111
2112 else
2113 {
2114 /* Try the most limited insn first, because there's no point
2115 including more than one in the machine description unless
2116 the more limited one has some advantage. */
2117
2118 rtx opalign = GEN_INT (align);
2119 enum machine_mode mode;
2120
2121 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2122 mode = GET_MODE_WIDER_MODE (mode))
2123 {
2124 enum insn_code code = clrstr_optab[(int) mode];
2125
2126 if (code != CODE_FOR_nothing
2127 /* We don't need MODE to be narrower than
2128 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2129 the mode mask, as it is returned by the macro, it will
2130 definitely be less than the actual mode mask. */
2131 && ((GET_CODE (size) == CONST_INT
2132 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2133 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2134 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2135 && (insn_operand_predicate[(int) code][0] == 0
2136 || (*insn_operand_predicate[(int) code][0]) (object,
2137 BLKmode))
2138 && (insn_operand_predicate[(int) code][2] == 0
2139 || (*insn_operand_predicate[(int) code][2]) (opalign,
2140 VOIDmode)))
2141 {
2142 rtx op1;
2143 rtx last = get_last_insn ();
2144 rtx pat;
2145
2146 op1 = convert_to_mode (mode, size, 1);
2147 if (insn_operand_predicate[(int) code][1] != 0
2148 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2149 mode))
2150 op1 = copy_to_mode_reg (mode, op1);
2151
2152 pat = GEN_FCN ((int) code) (object, op1, opalign);
2153 if (pat)
2154 {
2155 emit_insn (pat);
e9a25f70 2156 return 0;
9de08200
RK
2157 }
2158 else
2159 delete_insns_since (last);
2160 }
2161 }
2162
2163
bbf6f052 2164#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
2165 retval
2166 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2167 ptr_mode, 3,
2168 XEXP (object, 0), Pmode,
2169 const0_rtx,
2170 TYPE_MODE (integer_type_node),
2171 convert_to_mode
2172 (TYPE_MODE (sizetype), size,
2173 TREE_UNSIGNED (sizetype)),
2174 TYPE_MODE (sizetype));
bbf6f052 2175#else
9de08200
RK
2176 emit_library_call (bzero_libfunc, 0,
2177 VOIDmode, 2,
2178 XEXP (object, 0), Pmode,
e9a25f70
JL
2179 convert_to_mode
2180 (TYPE_MODE (integer_type_node), size,
2181 TREE_UNSIGNED (integer_type_node)),
9de08200 2182 TYPE_MODE (integer_type_node));
bbf6f052 2183#endif
9de08200 2184 }
bbf6f052
RK
2185 }
2186 else
66ed0683 2187 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2188
2189 return retval;
bbf6f052
RK
2190}
2191
2192/* Generate code to copy Y into X.
2193 Both Y and X must have the same mode, except that
2194 Y can be a constant with VOIDmode.
2195 This mode cannot be BLKmode; use emit_block_move for that.
2196
2197 Return the last instruction emitted. */
2198
2199rtx
2200emit_move_insn (x, y)
2201 rtx x, y;
2202{
2203 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2204
2205 x = protect_from_queue (x, 1);
2206 y = protect_from_queue (y, 0);
2207
2208 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2209 abort ();
2210
2211 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2212 y = force_const_mem (mode, y);
2213
2214 /* If X or Y are memory references, verify that their addresses are valid
2215 for the machine. */
2216 if (GET_CODE (x) == MEM
2217 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2218 && ! push_operand (x, GET_MODE (x)))
2219 || (flag_force_addr
2220 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2221 x = change_address (x, VOIDmode, XEXP (x, 0));
2222
2223 if (GET_CODE (y) == MEM
2224 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2225 || (flag_force_addr
2226 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2227 y = change_address (y, VOIDmode, XEXP (y, 0));
2228
2229 if (mode == BLKmode)
2230 abort ();
2231
261c4230
RS
2232 return emit_move_insn_1 (x, y);
2233}
2234
2235/* Low level part of emit_move_insn.
2236 Called just like emit_move_insn, but assumes X and Y
2237 are basically valid. */
2238
2239rtx
2240emit_move_insn_1 (x, y)
2241 rtx x, y;
2242{
2243 enum machine_mode mode = GET_MODE (x);
2244 enum machine_mode submode;
2245 enum mode_class class = GET_MODE_CLASS (mode);
2246 int i;
2247
bbf6f052
RK
2248 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2249 return
2250 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2251
89742723 2252 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2253 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2254 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2255 * BITS_PER_UNIT),
2256 (class == MODE_COMPLEX_INT
2257 ? MODE_INT : MODE_FLOAT),
2258 0))
7308a047
RS
2259 && (mov_optab->handlers[(int) submode].insn_code
2260 != CODE_FOR_nothing))
2261 {
2262 /* Don't split destination if it is a stack push. */
2263 int stack = push_operand (x, GET_MODE (x));
7308a047 2264
7308a047
RS
2265 /* If this is a stack, push the highpart first, so it
2266 will be in the argument order.
2267
2268 In that case, change_address is used only to convert
2269 the mode, not to change the address. */
c937357e
RS
2270 if (stack)
2271 {
e33c0d66
RS
2272 /* Note that the real part always precedes the imag part in memory
2273 regardless of machine's endianness. */
c937357e
RS
2274#ifdef STACK_GROWS_DOWNWARD
2275 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2276 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2277 gen_imagpart (submode, y)));
c937357e 2278 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2279 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2280 gen_realpart (submode, y)));
c937357e
RS
2281#else
2282 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2283 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2284 gen_realpart (submode, y)));
c937357e 2285 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2286 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2287 gen_imagpart (submode, y)));
c937357e
RS
2288#endif
2289 }
2290 else
2291 {
2292 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2293 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2295 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2296 }
7308a047 2297
7a1ab50a 2298 return get_last_insn ();
7308a047
RS
2299 }
2300
bbf6f052
RK
2301 /* This will handle any multi-word mode that lacks a move_insn pattern.
2302 However, you will get better code if you define such patterns,
2303 even if they must turn into multiple assembler instructions. */
a4320483 2304 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2305 {
2306 rtx last_insn = 0;
6551fa4d 2307
a98c9f1a
RK
2308#ifdef PUSH_ROUNDING
2309
2310 /* If X is a push on the stack, do the push now and replace
2311 X with a reference to the stack pointer. */
2312 if (push_operand (x, GET_MODE (x)))
2313 {
2314 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2315 x = change_address (x, VOIDmode, stack_pointer_rtx);
2316 }
2317#endif
2318
15a7a8ec 2319 /* Show the output dies here. */
43e046cb 2320 if (x != y)
38a448ca 2321 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2322
bbf6f052
RK
2323 for (i = 0;
2324 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2325 i++)
2326 {
2327 rtx xpart = operand_subword (x, i, 1, mode);
2328 rtx ypart = operand_subword (y, i, 1, mode);
2329
2330 /* If we can't get a part of Y, put Y into memory if it is a
2331 constant. Otherwise, force it into a register. If we still
2332 can't get a part of Y, abort. */
2333 if (ypart == 0 && CONSTANT_P (y))
2334 {
2335 y = force_const_mem (mode, y);
2336 ypart = operand_subword (y, i, 1, mode);
2337 }
2338 else if (ypart == 0)
2339 ypart = operand_subword_force (y, i, mode);
2340
2341 if (xpart == 0 || ypart == 0)
2342 abort ();
2343
2344 last_insn = emit_move_insn (xpart, ypart);
2345 }
6551fa4d 2346
bbf6f052
RK
2347 return last_insn;
2348 }
2349 else
2350 abort ();
2351}
2352\f
2353/* Pushing data onto the stack. */
2354
2355/* Push a block of length SIZE (perhaps variable)
2356 and return an rtx to address the beginning of the block.
2357 Note that it is not possible for the value returned to be a QUEUED.
2358 The value may be virtual_outgoing_args_rtx.
2359
2360 EXTRA is the number of bytes of padding to push in addition to SIZE.
2361 BELOW nonzero means this padding comes at low addresses;
2362 otherwise, the padding comes at high addresses. */
2363
2364rtx
2365push_block (size, extra, below)
2366 rtx size;
2367 int extra, below;
2368{
2369 register rtx temp;
88f63c77
RK
2370
2371 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2372 if (CONSTANT_P (size))
2373 anti_adjust_stack (plus_constant (size, extra));
2374 else if (GET_CODE (size) == REG && extra == 0)
2375 anti_adjust_stack (size);
2376 else
2377 {
2378 rtx temp = copy_to_mode_reg (Pmode, size);
2379 if (extra != 0)
906c4e36 2380 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2381 temp, 0, OPTAB_LIB_WIDEN);
2382 anti_adjust_stack (temp);
2383 }
2384
2385#ifdef STACK_GROWS_DOWNWARD
2386 temp = virtual_outgoing_args_rtx;
2387 if (extra != 0 && below)
2388 temp = plus_constant (temp, extra);
2389#else
2390 if (GET_CODE (size) == CONST_INT)
2391 temp = plus_constant (virtual_outgoing_args_rtx,
2392 - INTVAL (size) - (below ? 0 : extra));
2393 else if (extra != 0 && !below)
38a448ca 2394 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2395 negate_rtx (Pmode, plus_constant (size, extra)));
2396 else
38a448ca 2397 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2398 negate_rtx (Pmode, size));
2399#endif
2400
2401 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2402}
2403
87e38d84 2404rtx
bbf6f052
RK
2405gen_push_operand ()
2406{
38a448ca 2407 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2408}
2409
921b3427
RK
2410/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2411 block of SIZE bytes. */
2412
2413static rtx
2414get_push_address (size)
2415 int size;
2416{
2417 register rtx temp;
2418
2419 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2420 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2421 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2422 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2423 else
2424 temp = stack_pointer_rtx;
2425
c85f7c16 2426 return copy_to_reg (temp);
921b3427
RK
2427}
2428
bbf6f052
RK
2429/* Generate code to push X onto the stack, assuming it has mode MODE and
2430 type TYPE.
2431 MODE is redundant except when X is a CONST_INT (since they don't
2432 carry mode info).
2433 SIZE is an rtx for the size of data to be copied (in bytes),
2434 needed only if X is BLKmode.
2435
2436 ALIGN (in bytes) is maximum alignment we can assume.
2437
cd048831
RK
2438 If PARTIAL and REG are both nonzero, then copy that many of the first
2439 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2440 The amount of space pushed is decreased by PARTIAL words,
2441 rounded *down* to a multiple of PARM_BOUNDARY.
2442 REG must be a hard register in this case.
cd048831
RK
2443 If REG is zero but PARTIAL is not, take any all others actions for an
2444 argument partially in registers, but do not actually load any
2445 registers.
bbf6f052
RK
2446
2447 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2448 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2449
2450 On a machine that lacks real push insns, ARGS_ADDR is the address of
2451 the bottom of the argument block for this call. We use indexing off there
2452 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2453 argument block has not been preallocated.
2454
e5e809f4
JL
2455 ARGS_SO_FAR is the size of args previously pushed for this call.
2456
2457 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2458 for arguments passed in registers. If nonzero, it will be the number
2459 of bytes required. */
bbf6f052
RK
2460
2461void
2462emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2463 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2464 register rtx x;
2465 enum machine_mode mode;
2466 tree type;
2467 rtx size;
2468 int align;
2469 int partial;
2470 rtx reg;
2471 int extra;
2472 rtx args_addr;
2473 rtx args_so_far;
e5e809f4 2474 int reg_parm_stack_space;
bbf6f052
RK
2475{
2476 rtx xinner;
2477 enum direction stack_direction
2478#ifdef STACK_GROWS_DOWNWARD
2479 = downward;
2480#else
2481 = upward;
2482#endif
2483
2484 /* Decide where to pad the argument: `downward' for below,
2485 `upward' for above, or `none' for don't pad it.
2486 Default is below for small data on big-endian machines; else above. */
2487 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2488
2489 /* Invert direction if stack is post-update. */
2490 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2491 if (where_pad != none)
2492 where_pad = (where_pad == downward ? upward : downward);
2493
2494 xinner = x = protect_from_queue (x, 0);
2495
2496 if (mode == BLKmode)
2497 {
2498 /* Copy a block into the stack, entirely or partially. */
2499
2500 register rtx temp;
2501 int used = partial * UNITS_PER_WORD;
2502 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2503 int skip;
2504
2505 if (size == 0)
2506 abort ();
2507
2508 used -= offset;
2509
2510 /* USED is now the # of bytes we need not copy to the stack
2511 because registers will take care of them. */
2512
2513 if (partial != 0)
2514 xinner = change_address (xinner, BLKmode,
2515 plus_constant (XEXP (xinner, 0), used));
2516
2517 /* If the partial register-part of the arg counts in its stack size,
2518 skip the part of stack space corresponding to the registers.
2519 Otherwise, start copying to the beginning of the stack space,
2520 by setting SKIP to 0. */
e5e809f4 2521 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2522
2523#ifdef PUSH_ROUNDING
2524 /* Do it with several push insns if that doesn't take lots of insns
2525 and if there is no difficulty with push insns that skip bytes
2526 on the stack for alignment purposes. */
2527 if (args_addr == 0
2528 && GET_CODE (size) == CONST_INT
2529 && skip == 0
2530 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2531 < MOVE_RATIO)
bbf6f052
RK
2532 /* Here we avoid the case of a structure whose weak alignment
2533 forces many pushes of a small amount of data,
2534 and such small pushes do rounding that causes trouble. */
c7a7ac46 2535 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2536 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2537 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2538 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2539 {
2540 /* Push padding now if padding above and stack grows down,
2541 or if padding below and stack grows up.
2542 But if space already allocated, this has already been done. */
2543 if (extra && args_addr == 0
2544 && where_pad != none && where_pad != stack_direction)
906c4e36 2545 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2546
38a448ca 2547 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2548 INTVAL (size) - used, align);
921b3427 2549
956d6950 2550 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2551 {
2552 rtx temp;
2553
956d6950 2554 in_check_memory_usage = 1;
921b3427 2555 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2556 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2557 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2558 temp, ptr_mode,
2559 XEXP (xinner, 0), ptr_mode,
2560 GEN_INT (INTVAL(size) - used),
2561 TYPE_MODE (sizetype));
2562 else
2563 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2564 temp, ptr_mode,
2565 GEN_INT (INTVAL(size) - used),
2566 TYPE_MODE (sizetype),
956d6950
JL
2567 GEN_INT (MEMORY_USE_RW),
2568 TYPE_MODE (integer_type_node));
2569 in_check_memory_usage = 0;
921b3427 2570 }
bbf6f052
RK
2571 }
2572 else
2573#endif /* PUSH_ROUNDING */
2574 {
2575 /* Otherwise make space on the stack and copy the data
2576 to the address of that space. */
2577
2578 /* Deduct words put into registers from the size we must copy. */
2579 if (partial != 0)
2580 {
2581 if (GET_CODE (size) == CONST_INT)
906c4e36 2582 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2583 else
2584 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2585 GEN_INT (used), NULL_RTX, 0,
2586 OPTAB_LIB_WIDEN);
bbf6f052
RK
2587 }
2588
2589 /* Get the address of the stack space.
2590 In this case, we do not deal with EXTRA separately.
2591 A single stack adjust will do. */
2592 if (! args_addr)
2593 {
2594 temp = push_block (size, extra, where_pad == downward);
2595 extra = 0;
2596 }
2597 else if (GET_CODE (args_so_far) == CONST_INT)
2598 temp = memory_address (BLKmode,
2599 plus_constant (args_addr,
2600 skip + INTVAL (args_so_far)));
2601 else
2602 temp = memory_address (BLKmode,
38a448ca
RH
2603 plus_constant (gen_rtx_PLUS (Pmode,
2604 args_addr,
2605 args_so_far),
bbf6f052 2606 skip));
956d6950 2607 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2608 {
2609 rtx target;
2610
956d6950 2611 in_check_memory_usage = 1;
921b3427 2612 target = copy_to_reg (temp);
c85f7c16 2613 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2614 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2615 target, ptr_mode,
2616 XEXP (xinner, 0), ptr_mode,
2617 size, TYPE_MODE (sizetype));
2618 else
2619 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2620 target, ptr_mode,
2621 size, TYPE_MODE (sizetype),
956d6950
JL
2622 GEN_INT (MEMORY_USE_RW),
2623 TYPE_MODE (integer_type_node));
2624 in_check_memory_usage = 0;
921b3427 2625 }
bbf6f052
RK
2626
2627 /* TEMP is the address of the block. Copy the data there. */
2628 if (GET_CODE (size) == CONST_INT
2629 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2630 < MOVE_RATIO))
2631 {
38a448ca 2632 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2633 INTVAL (size), align);
2634 goto ret;
2635 }
e5e809f4 2636 else
bbf6f052 2637 {
e5e809f4
JL
2638 rtx opalign = GEN_INT (align);
2639 enum machine_mode mode;
2640 rtx target = gen_rtx (MEM, BLKmode, temp);
2641
2642 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2643 mode != VOIDmode;
2644 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2645 {
e5e809f4
JL
2646 enum insn_code code = movstr_optab[(int) mode];
2647
2648 if (code != CODE_FOR_nothing
2649 && ((GET_CODE (size) == CONST_INT
2650 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2651 <= (GET_MODE_MASK (mode) >> 1)))
2652 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2653 && (insn_operand_predicate[(int) code][0] == 0
2654 || ((*insn_operand_predicate[(int) code][0])
2655 (target, BLKmode)))
2656 && (insn_operand_predicate[(int) code][1] == 0
2657 || ((*insn_operand_predicate[(int) code][1])
2658 (xinner, BLKmode)))
2659 && (insn_operand_predicate[(int) code][3] == 0
2660 || ((*insn_operand_predicate[(int) code][3])
2661 (opalign, VOIDmode))))
2662 {
2663 rtx op2 = convert_to_mode (mode, size, 1);
2664 rtx last = get_last_insn ();
2665 rtx pat;
2666
2667 if (insn_operand_predicate[(int) code][2] != 0
2668 && ! ((*insn_operand_predicate[(int) code][2])
2669 (op2, mode)))
2670 op2 = copy_to_mode_reg (mode, op2);
2671
2672 pat = GEN_FCN ((int) code) (target, xinner,
2673 op2, opalign);
2674 if (pat)
2675 {
2676 emit_insn (pat);
2677 goto ret;
2678 }
2679 else
2680 delete_insns_since (last);
2681 }
c841050e 2682 }
bbf6f052 2683 }
bbf6f052
RK
2684
2685#ifndef ACCUMULATE_OUTGOING_ARGS
2686 /* If the source is referenced relative to the stack pointer,
2687 copy it to another register to stabilize it. We do not need
2688 to do this if we know that we won't be changing sp. */
2689
2690 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2691 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2692 temp = copy_to_reg (temp);
2693#endif
2694
2695 /* Make inhibit_defer_pop nonzero around the library call
2696 to force it to pop the bcopy-arguments right away. */
2697 NO_DEFER_POP;
2698#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2699 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2700 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2701 convert_to_mode (TYPE_MODE (sizetype),
2702 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2703 TYPE_MODE (sizetype));
bbf6f052 2704#else
d562e42e 2705 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2706 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2707 convert_to_mode (TYPE_MODE (integer_type_node),
2708 size,
2709 TREE_UNSIGNED (integer_type_node)),
2710 TYPE_MODE (integer_type_node));
bbf6f052
RK
2711#endif
2712 OK_DEFER_POP;
2713 }
2714 }
2715 else if (partial > 0)
2716 {
2717 /* Scalar partly in registers. */
2718
2719 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2720 int i;
2721 int not_stack;
2722 /* # words of start of argument
2723 that we must make space for but need not store. */
2724 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2725 int args_offset = INTVAL (args_so_far);
2726 int skip;
2727
2728 /* Push padding now if padding above and stack grows down,
2729 or if padding below and stack grows up.
2730 But if space already allocated, this has already been done. */
2731 if (extra && args_addr == 0
2732 && where_pad != none && where_pad != stack_direction)
906c4e36 2733 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2734
2735 /* If we make space by pushing it, we might as well push
2736 the real data. Otherwise, we can leave OFFSET nonzero
2737 and leave the space uninitialized. */
2738 if (args_addr == 0)
2739 offset = 0;
2740
2741 /* Now NOT_STACK gets the number of words that we don't need to
2742 allocate on the stack. */
2743 not_stack = partial - offset;
2744
2745 /* If the partial register-part of the arg counts in its stack size,
2746 skip the part of stack space corresponding to the registers.
2747 Otherwise, start copying to the beginning of the stack space,
2748 by setting SKIP to 0. */
e5e809f4 2749 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
2750
2751 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2752 x = validize_mem (force_const_mem (mode, x));
2753
2754 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2755 SUBREGs of such registers are not allowed. */
2756 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2757 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2758 x = copy_to_reg (x);
2759
2760 /* Loop over all the words allocated on the stack for this arg. */
2761 /* We can do it by words, because any scalar bigger than a word
2762 has a size a multiple of a word. */
2763#ifndef PUSH_ARGS_REVERSED
2764 for (i = not_stack; i < size; i++)
2765#else
2766 for (i = size - 1; i >= not_stack; i--)
2767#endif
2768 if (i >= not_stack + offset)
2769 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2770 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2771 0, args_addr,
2772 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
2773 * UNITS_PER_WORD)),
2774 reg_parm_stack_space);
bbf6f052
RK
2775 }
2776 else
2777 {
2778 rtx addr;
921b3427 2779 rtx target = NULL_RTX;
bbf6f052
RK
2780
2781 /* Push padding now if padding above and stack grows down,
2782 or if padding below and stack grows up.
2783 But if space already allocated, this has already been done. */
2784 if (extra && args_addr == 0
2785 && where_pad != none && where_pad != stack_direction)
906c4e36 2786 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2787
2788#ifdef PUSH_ROUNDING
2789 if (args_addr == 0)
2790 addr = gen_push_operand ();
2791 else
2792#endif
921b3427
RK
2793 {
2794 if (GET_CODE (args_so_far) == CONST_INT)
2795 addr
2796 = memory_address (mode,
2797 plus_constant (args_addr,
2798 INTVAL (args_so_far)));
2799 else
38a448ca
RH
2800 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2801 args_so_far));
921b3427
RK
2802 target = addr;
2803 }
bbf6f052 2804
38a448ca 2805 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 2806
956d6950 2807 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 2808 {
956d6950 2809 in_check_memory_usage = 1;
921b3427
RK
2810 if (target == 0)
2811 target = get_push_address (GET_MODE_SIZE (mode));
2812
c85f7c16 2813 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2814 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2815 target, ptr_mode,
2816 XEXP (x, 0), ptr_mode,
2817 GEN_INT (GET_MODE_SIZE (mode)),
2818 TYPE_MODE (sizetype));
2819 else
2820 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2821 target, ptr_mode,
2822 GEN_INT (GET_MODE_SIZE (mode)),
2823 TYPE_MODE (sizetype),
956d6950
JL
2824 GEN_INT (MEMORY_USE_RW),
2825 TYPE_MODE (integer_type_node));
2826 in_check_memory_usage = 0;
921b3427 2827 }
bbf6f052
RK
2828 }
2829
2830 ret:
2831 /* If part should go in registers, copy that part
2832 into the appropriate registers. Do this now, at the end,
2833 since mem-to-mem copies above may do function calls. */
cd048831 2834 if (partial > 0 && reg != 0)
fffa9c1d
JW
2835 {
2836 /* Handle calls that pass values in multiple non-contiguous locations.
2837 The Irix 6 ABI has examples of this. */
2838 if (GET_CODE (reg) == PARALLEL)
2839 emit_group_load (reg, x);
2840 else
2841 move_block_to_reg (REGNO (reg), x, partial, mode);
2842 }
bbf6f052
RK
2843
2844 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2845 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2846}
2847\f
bbf6f052
RK
2848/* Expand an assignment that stores the value of FROM into TO.
2849 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2850 (This may contain a QUEUED rtx;
2851 if the value is constant, this rtx is a constant.)
2852 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2853
2854 SUGGEST_REG is no longer actually used.
2855 It used to mean, copy the value through a register
2856 and return that register, if that is possible.
709f5be1 2857 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2858
2859rtx
2860expand_assignment (to, from, want_value, suggest_reg)
2861 tree to, from;
2862 int want_value;
2863 int suggest_reg;
2864{
2865 register rtx to_rtx = 0;
2866 rtx result;
2867
2868 /* Don't crash if the lhs of the assignment was erroneous. */
2869
2870 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2871 {
2872 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2873 return want_value ? result : NULL_RTX;
2874 }
bbf6f052
RK
2875
2876 /* Assignment of a structure component needs special treatment
2877 if the structure component's rtx is not simply a MEM.
6be58303
JW
2878 Assignment of an array element at a constant index, and assignment of
2879 an array element in an unaligned packed structure field, has the same
2880 problem. */
bbf6f052 2881
08293add
RK
2882 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2883 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
2884 {
2885 enum machine_mode mode1;
2886 int bitsize;
2887 int bitpos;
7bb0943f 2888 tree offset;
bbf6f052
RK
2889 int unsignedp;
2890 int volatilep = 0;
0088fcb1 2891 tree tem;
d78d243c 2892 int alignment;
0088fcb1
RK
2893
2894 push_temp_slots ();
839c4796
RK
2895 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2896 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
2897
2898 /* If we are going to use store_bit_field and extract_bit_field,
2899 make sure to_rtx will be safe for multiple use. */
2900
2901 if (mode1 == VOIDmode && want_value)
2902 tem = stabilize_reference (tem);
2903
921b3427 2904 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
2905 if (offset != 0)
2906 {
906c4e36 2907 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2908
2909 if (GET_CODE (to_rtx) != MEM)
2910 abort ();
2911 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
2912 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2913 force_reg (ptr_mode, offset_rtx)));
7bb0943f 2914 }
bbf6f052
RK
2915 if (volatilep)
2916 {
2917 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2918 {
2919 /* When the offset is zero, to_rtx is the address of the
2920 structure we are storing into, and hence may be shared.
2921 We must make a new MEM before setting the volatile bit. */
2922 if (offset == 0)
effbcc6a
RK
2923 to_rtx = copy_rtx (to_rtx);
2924
01188446
JW
2925 MEM_VOLATILE_P (to_rtx) = 1;
2926 }
bbf6f052
RK
2927#if 0 /* This was turned off because, when a field is volatile
2928 in an object which is not volatile, the object may be in a register,
2929 and then we would abort over here. */
2930 else
2931 abort ();
2932#endif
2933 }
2934
956d6950
JL
2935 if (TREE_CODE (to) == COMPONENT_REF
2936 && TREE_READONLY (TREE_OPERAND (to, 1)))
2937 {
8bd6ecc2 2938 if (offset == 0)
956d6950
JL
2939 to_rtx = copy_rtx (to_rtx);
2940
2941 RTX_UNCHANGING_P (to_rtx) = 1;
2942 }
2943
921b3427
RK
2944 /* Check the access. */
2945 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2946 {
2947 rtx to_addr;
2948 int size;
2949 int best_mode_size;
2950 enum machine_mode best_mode;
2951
2952 best_mode = get_best_mode (bitsize, bitpos,
2953 TYPE_ALIGN (TREE_TYPE (tem)),
2954 mode1, volatilep);
2955 if (best_mode == VOIDmode)
2956 best_mode = QImode;
2957
2958 best_mode_size = GET_MODE_BITSIZE (best_mode);
2959 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2960 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2961 size *= GET_MODE_SIZE (best_mode);
2962
2963 /* Check the access right of the pointer. */
e9a25f70
JL
2964 if (size)
2965 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2966 to_addr, ptr_mode,
2967 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
2968 GEN_INT (MEMORY_USE_WO),
2969 TYPE_MODE (integer_type_node));
921b3427
RK
2970 }
2971
bbf6f052
RK
2972 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2973 (want_value
2974 /* Spurious cast makes HPUX compiler happy. */
2975 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2976 : VOIDmode),
2977 unsignedp,
2978 /* Required alignment of containing datum. */
d78d243c 2979 alignment,
bbf6f052
RK
2980 int_size_in_bytes (TREE_TYPE (tem)));
2981 preserve_temp_slots (result);
2982 free_temp_slots ();
0088fcb1 2983 pop_temp_slots ();
bbf6f052 2984
709f5be1
RS
2985 /* If the value is meaningful, convert RESULT to the proper mode.
2986 Otherwise, return nothing. */
5ffe63ed
RS
2987 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2988 TYPE_MODE (TREE_TYPE (from)),
2989 result,
2990 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2991 : NULL_RTX);
bbf6f052
RK
2992 }
2993
cd1db108
RS
2994 /* If the rhs is a function call and its value is not an aggregate,
2995 call the function before we start to compute the lhs.
2996 This is needed for correct code for cases such as
2997 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2998 requires loading up part of an address in a separate insn.
2999
3000 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3001 a promoted variable where the zero- or sign- extension needs to be done.
3002 Handling this in the normal way is safe because no computation is done
3003 before the call. */
3004 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3005 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3006 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3007 {
0088fcb1
RK
3008 rtx value;
3009
3010 push_temp_slots ();
3011 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3012 if (to_rtx == 0)
921b3427 3013 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3014
fffa9c1d
JW
3015 /* Handle calls that return values in multiple non-contiguous locations.
3016 The Irix 6 ABI has examples of this. */
3017 if (GET_CODE (to_rtx) == PARALLEL)
3018 emit_group_load (to_rtx, value);
3019 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3020 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3021 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3022 else
3023 emit_move_insn (to_rtx, value);
cd1db108
RS
3024 preserve_temp_slots (to_rtx);
3025 free_temp_slots ();
0088fcb1 3026 pop_temp_slots ();
709f5be1 3027 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3028 }
3029
bbf6f052
RK
3030 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3031 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3032
3033 if (to_rtx == 0)
921b3427 3034 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
bbf6f052 3035
86d38d25
RS
3036 /* Don't move directly into a return register. */
3037 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3038 {
0088fcb1
RK
3039 rtx temp;
3040
3041 push_temp_slots ();
3042 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3043 emit_move_insn (to_rtx, temp);
3044 preserve_temp_slots (to_rtx);
3045 free_temp_slots ();
0088fcb1 3046 pop_temp_slots ();
709f5be1 3047 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3048 }
3049
bbf6f052
RK
3050 /* In case we are returning the contents of an object which overlaps
3051 the place the value is being stored, use a safe function when copying
3052 a value through a pointer into a structure value return block. */
3053 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3054 && current_function_returns_struct
3055 && !current_function_returns_pcc_struct)
3056 {
0088fcb1
RK
3057 rtx from_rtx, size;
3058
3059 push_temp_slots ();
33a20d10 3060 size = expr_size (from);
921b3427
RK
3061 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3062 EXPAND_MEMORY_USE_DONT);
3063
3064 /* Copy the rights of the bitmap. */
3065 if (flag_check_memory_usage)
3066 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3067 XEXP (to_rtx, 0), ptr_mode,
3068 XEXP (from_rtx, 0), ptr_mode,
3069 convert_to_mode (TYPE_MODE (sizetype),
3070 size, TREE_UNSIGNED (sizetype)),
3071 TYPE_MODE (sizetype));
bbf6f052
RK
3072
3073#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3074 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3075 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3076 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3077 convert_to_mode (TYPE_MODE (sizetype),
3078 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3079 TYPE_MODE (sizetype));
bbf6f052 3080#else
d562e42e 3081 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3082 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3083 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3084 convert_to_mode (TYPE_MODE (integer_type_node),
3085 size, TREE_UNSIGNED (integer_type_node)),
3086 TYPE_MODE (integer_type_node));
bbf6f052
RK
3087#endif
3088
3089 preserve_temp_slots (to_rtx);
3090 free_temp_slots ();
0088fcb1 3091 pop_temp_slots ();
709f5be1 3092 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3093 }
3094
3095 /* Compute FROM and store the value in the rtx we got. */
3096
0088fcb1 3097 push_temp_slots ();
bbf6f052
RK
3098 result = store_expr (from, to_rtx, want_value);
3099 preserve_temp_slots (result);
3100 free_temp_slots ();
0088fcb1 3101 pop_temp_slots ();
709f5be1 3102 return want_value ? result : NULL_RTX;
bbf6f052
RK
3103}
3104
3105/* Generate code for computing expression EXP,
3106 and storing the value into TARGET.
bbf6f052
RK
3107 TARGET may contain a QUEUED rtx.
3108
709f5be1
RS
3109 If WANT_VALUE is nonzero, return a copy of the value
3110 not in TARGET, so that we can be sure to use the proper
3111 value in a containing expression even if TARGET has something
3112 else stored in it. If possible, we copy the value through a pseudo
3113 and return that pseudo. Or, if the value is constant, we try to
3114 return the constant. In some cases, we return a pseudo
3115 copied *from* TARGET.
3116
3117 If the mode is BLKmode then we may return TARGET itself.
3118 It turns out that in BLKmode it doesn't cause a problem.
3119 because C has no operators that could combine two different
3120 assignments into the same BLKmode object with different values
3121 with no sequence point. Will other languages need this to
3122 be more thorough?
3123
3124 If WANT_VALUE is 0, we return NULL, to make sure
3125 to catch quickly any cases where the caller uses the value
3126 and fails to set WANT_VALUE. */
bbf6f052
RK
3127
3128rtx
709f5be1 3129store_expr (exp, target, want_value)
bbf6f052
RK
3130 register tree exp;
3131 register rtx target;
709f5be1 3132 int want_value;
bbf6f052
RK
3133{
3134 register rtx temp;
3135 int dont_return_target = 0;
3136
3137 if (TREE_CODE (exp) == COMPOUND_EXPR)
3138 {
3139 /* Perform first part of compound expression, then assign from second
3140 part. */
3141 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3142 emit_queue ();
709f5be1 3143 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3144 }
3145 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3146 {
3147 /* For conditional expression, get safe form of the target. Then
3148 test the condition, doing the appropriate assignment on either
3149 side. This avoids the creation of unnecessary temporaries.
3150 For non-BLKmode, it is more efficient not to do this. */
3151
3152 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3153
3154 emit_queue ();
3155 target = protect_from_queue (target, 1);
3156
dabf8373 3157 do_pending_stack_adjust ();
bbf6f052
RK
3158 NO_DEFER_POP;
3159 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3160 start_cleanup_deferral ();
709f5be1 3161 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3162 end_cleanup_deferral ();
bbf6f052
RK
3163 emit_queue ();
3164 emit_jump_insn (gen_jump (lab2));
3165 emit_barrier ();
3166 emit_label (lab1);
956d6950 3167 start_cleanup_deferral ();
709f5be1 3168 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3169 end_cleanup_deferral ();
bbf6f052
RK
3170 emit_queue ();
3171 emit_label (lab2);
3172 OK_DEFER_POP;
a3a58acc 3173
709f5be1 3174 return want_value ? target : NULL_RTX;
bbf6f052 3175 }
709f5be1 3176 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3177 && GET_MODE (target) != BLKmode)
3178 /* If target is in memory and caller wants value in a register instead,
3179 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3180 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3181 We know expand_expr will not use the target in that case.
3182 Don't do this if TARGET is volatile because we are supposed
3183 to write it and then read it. */
bbf6f052 3184 {
906c4e36 3185 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3186 GET_MODE (target), 0);
3187 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3188 temp = copy_to_reg (temp);
3189 dont_return_target = 1;
3190 }
3191 else if (queued_subexp_p (target))
709f5be1
RS
3192 /* If target contains a postincrement, let's not risk
3193 using it as the place to generate the rhs. */
bbf6f052
RK
3194 {
3195 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3196 {
3197 /* Expand EXP into a new pseudo. */
3198 temp = gen_reg_rtx (GET_MODE (target));
3199 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3200 }
3201 else
906c4e36 3202 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3203
3204 /* If target is volatile, ANSI requires accessing the value
3205 *from* the target, if it is accessed. So make that happen.
3206 In no case return the target itself. */
3207 if (! MEM_VOLATILE_P (target) && want_value)
3208 dont_return_target = 1;
bbf6f052 3209 }
1499e0a8
RK
3210 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3211 /* If this is an scalar in a register that is stored in a wider mode
3212 than the declared mode, compute the result into its declared mode
3213 and then convert to the wider mode. Our value is the computed
3214 expression. */
3215 {
5a32d038 3216 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3217 which will often result in some optimizations. Do the conversion
3218 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3219 the extend. But don't do this if the type of EXP is a subtype
3220 of something else since then the conversion might involve
3221 more than just converting modes. */
3222 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3223 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3224 {
3225 if (TREE_UNSIGNED (TREE_TYPE (exp))
3226 != SUBREG_PROMOTED_UNSIGNED_P (target))
3227 exp
3228 = convert
3229 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3230 TREE_TYPE (exp)),
3231 exp);
3232
3233 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3234 SUBREG_PROMOTED_UNSIGNED_P (target)),
3235 exp);
3236 }
5a32d038 3237
1499e0a8 3238 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3239
766f36c7 3240 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3241 the access now so it gets done only once. Likewise if
3242 it contains TARGET. */
3243 if (GET_CODE (temp) == MEM && want_value
3244 && (MEM_VOLATILE_P (temp)
3245 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3246 temp = copy_to_reg (temp);
3247
b258707c
RS
3248 /* If TEMP is a VOIDmode constant, use convert_modes to make
3249 sure that we properly convert it. */
3250 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3251 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3252 TYPE_MODE (TREE_TYPE (exp)), temp,
3253 SUBREG_PROMOTED_UNSIGNED_P (target));
3254
1499e0a8
RK
3255 convert_move (SUBREG_REG (target), temp,
3256 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3257 return want_value ? temp : NULL_RTX;
1499e0a8 3258 }
bbf6f052
RK
3259 else
3260 {
3261 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3262 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3263 If TARGET is a volatile mem ref, either return TARGET
3264 or return a reg copied *from* TARGET; ANSI requires this.
3265
3266 Otherwise, if TEMP is not TARGET, return TEMP
3267 if it is constant (for efficiency),
3268 or if we really want the correct value. */
bbf6f052
RK
3269 if (!(target && GET_CODE (target) == REG
3270 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3271 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3272 && ! rtx_equal_p (temp, target)
709f5be1 3273 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3274 dont_return_target = 1;
3275 }
3276
b258707c
RS
3277 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3278 the same as that of TARGET, adjust the constant. This is needed, for
3279 example, in case it is a CONST_DOUBLE and we want only a word-sized
3280 value. */
3281 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3282 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3283 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3284 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3285 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3286
921b3427
RK
3287 if (flag_check_memory_usage
3288 && GET_CODE (target) == MEM
3289 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3290 {
3291 if (GET_CODE (temp) == MEM)
3292 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3293 XEXP (target, 0), ptr_mode,
3294 XEXP (temp, 0), ptr_mode,
3295 expr_size (exp), TYPE_MODE (sizetype));
3296 else
3297 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3298 XEXP (target, 0), ptr_mode,
3299 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3300 GEN_INT (MEMORY_USE_WO),
3301 TYPE_MODE (integer_type_node));
921b3427
RK
3302 }
3303
bbf6f052
RK
3304 /* If value was not generated in the target, store it there.
3305 Convert the value to TARGET's type first if nec. */
3306
effbcc6a 3307 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3308 {
3309 target = protect_from_queue (target, 1);
3310 if (GET_MODE (temp) != GET_MODE (target)
3311 && GET_MODE (temp) != VOIDmode)
3312 {
3313 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3314 if (dont_return_target)
3315 {
3316 /* In this case, we will return TEMP,
3317 so make sure it has the proper mode.
3318 But don't forget to store the value into TARGET. */
3319 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3320 emit_move_insn (target, temp);
3321 }
3322 else
3323 convert_move (target, temp, unsignedp);
3324 }
3325
3326 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3327 {
3328 /* Handle copying a string constant into an array.
3329 The string constant may be shorter than the array.
3330 So copy just the string's actual length, and clear the rest. */
3331 rtx size;
22619c3f 3332 rtx addr;
bbf6f052 3333
e87b4f3f
RS
3334 /* Get the size of the data type of the string,
3335 which is actually the size of the target. */
3336 size = expr_size (exp);
3337 if (GET_CODE (size) == CONST_INT
3338 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3339 emit_block_move (target, temp, size,
3340 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3341 else
bbf6f052 3342 {
e87b4f3f
RS
3343 /* Compute the size of the data to copy from the string. */
3344 tree copy_size
c03b7665 3345 = size_binop (MIN_EXPR,
b50d17a1 3346 make_tree (sizetype, size),
c03b7665
RK
3347 convert (sizetype,
3348 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3349 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3350 VOIDmode, 0);
e87b4f3f
RS
3351 rtx label = 0;
3352
3353 /* Copy that much. */
3354 emit_block_move (target, temp, copy_size_rtx,
3355 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3356
88f63c77
RK
3357 /* Figure out how much is left in TARGET that we have to clear.
3358 Do all calculations in ptr_mode. */
3359
3360 addr = XEXP (target, 0);
3361 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3362
e87b4f3f
RS
3363 if (GET_CODE (copy_size_rtx) == CONST_INT)
3364 {
88f63c77 3365 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3366 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3367 }
3368 else
3369 {
88f63c77
RK
3370 addr = force_reg (ptr_mode, addr);
3371 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3372 copy_size_rtx, NULL_RTX, 0,
3373 OPTAB_LIB_WIDEN);
e87b4f3f 3374
88f63c77 3375 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3376 copy_size_rtx, NULL_RTX, 0,
3377 OPTAB_LIB_WIDEN);
e87b4f3f 3378
906c4e36 3379 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3380 GET_MODE (size), 0, 0);
3381 label = gen_label_rtx ();
3382 emit_jump_insn (gen_blt (label));
3383 }
3384
3385 if (size != const0_rtx)
3386 {
921b3427
RK
3387 /* Be sure we can write on ADDR. */
3388 if (flag_check_memory_usage)
3389 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3390 addr, ptr_mode,
3391 size, TYPE_MODE (sizetype),
956d6950
JL
3392 GEN_INT (MEMORY_USE_WO),
3393 TYPE_MODE (integer_type_node));
bbf6f052 3394#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3395 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3396 addr, ptr_mode,
3b6f75e2
JW
3397 const0_rtx, TYPE_MODE (integer_type_node),
3398 convert_to_mode (TYPE_MODE (sizetype),
3399 size,
3400 TREE_UNSIGNED (sizetype)),
3401 TYPE_MODE (sizetype));
bbf6f052 3402#else
d562e42e 3403 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3404 addr, ptr_mode,
3b6f75e2
JW
3405 convert_to_mode (TYPE_MODE (integer_type_node),
3406 size,
3407 TREE_UNSIGNED (integer_type_node)),
3408 TYPE_MODE (integer_type_node));
bbf6f052 3409#endif
e87b4f3f 3410 }
22619c3f 3411
e87b4f3f
RS
3412 if (label)
3413 emit_label (label);
bbf6f052
RK
3414 }
3415 }
fffa9c1d
JW
3416 /* Handle calls that return values in multiple non-contiguous locations.
3417 The Irix 6 ABI has examples of this. */
3418 else if (GET_CODE (target) == PARALLEL)
3419 emit_group_load (target, temp);
bbf6f052
RK
3420 else if (GET_MODE (temp) == BLKmode)
3421 emit_block_move (target, temp, expr_size (exp),
3422 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3423 else
3424 emit_move_insn (target, temp);
3425 }
709f5be1 3426
766f36c7
RK
3427 /* If we don't want a value, return NULL_RTX. */
3428 if (! want_value)
3429 return NULL_RTX;
3430
3431 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3432 ??? The latter test doesn't seem to make sense. */
3433 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3434 return temp;
766f36c7
RK
3435
3436 /* Return TARGET itself if it is a hard register. */
3437 else if (want_value && GET_MODE (target) != BLKmode
3438 && ! (GET_CODE (target) == REG
3439 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3440 return copy_to_reg (target);
766f36c7
RK
3441
3442 else
709f5be1 3443 return target;
bbf6f052
RK
3444}
3445\f
9de08200
RK
3446/* Return 1 if EXP just contains zeros. */
3447
3448static int
3449is_zeros_p (exp)
3450 tree exp;
3451{
3452 tree elt;
3453
3454 switch (TREE_CODE (exp))
3455 {
3456 case CONVERT_EXPR:
3457 case NOP_EXPR:
3458 case NON_LVALUE_EXPR:
3459 return is_zeros_p (TREE_OPERAND (exp, 0));
3460
3461 case INTEGER_CST:
3462 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3463
3464 case COMPLEX_CST:
3465 return
3466 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3467
3468 case REAL_CST:
41c9120b 3469 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3470
3471 case CONSTRUCTOR:
e1a43f73
PB
3472 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3473 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3474 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3475 if (! is_zeros_p (TREE_VALUE (elt)))
3476 return 0;
3477
3478 return 1;
e9a25f70
JL
3479
3480 default:
3481 return 0;
9de08200 3482 }
9de08200
RK
3483}
3484
3485/* Return 1 if EXP contains mostly (3/4) zeros. */
3486
3487static int
3488mostly_zeros_p (exp)
3489 tree exp;
3490{
9de08200
RK
3491 if (TREE_CODE (exp) == CONSTRUCTOR)
3492 {
e1a43f73
PB
3493 int elts = 0, zeros = 0;
3494 tree elt = CONSTRUCTOR_ELTS (exp);
3495 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3496 {
3497 /* If there are no ranges of true bits, it is all zero. */
3498 return elt == NULL_TREE;
3499 }
3500 for (; elt; elt = TREE_CHAIN (elt))
3501 {
3502 /* We do not handle the case where the index is a RANGE_EXPR,
3503 so the statistic will be somewhat inaccurate.
3504 We do make a more accurate count in store_constructor itself,
3505 so since this function is only used for nested array elements,
0f41302f 3506 this should be close enough. */
e1a43f73
PB
3507 if (mostly_zeros_p (TREE_VALUE (elt)))
3508 zeros++;
3509 elts++;
3510 }
9de08200
RK
3511
3512 return 4 * zeros >= 3 * elts;
3513 }
3514
3515 return is_zeros_p (exp);
3516}
3517\f
e1a43f73
PB
3518/* Helper function for store_constructor.
3519 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3520 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3521 CLEARED is as for store_constructor.
3522
3523 This provides a recursive shortcut back to store_constructor when it isn't
3524 necessary to go through store_field. This is so that we can pass through
3525 the cleared field to let store_constructor know that we may not have to
3526 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3527
3528static void
3529store_constructor_field (target, bitsize, bitpos,
3530 mode, exp, type, cleared)
3531 rtx target;
3532 int bitsize, bitpos;
3533 enum machine_mode mode;
3534 tree exp, type;
3535 int cleared;
3536{
3537 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3538 && bitpos % BITS_PER_UNIT == 0
3539 /* If we have a non-zero bitpos for a register target, then we just
3540 let store_field do the bitfield handling. This is unlikely to
3541 generate unnecessary clear instructions anyways. */
3542 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3543 {
126e5b0d
JW
3544 if (bitpos != 0)
3545 target = change_address (target, VOIDmode,
3546 plus_constant (XEXP (target, 0),
3547 bitpos / BITS_PER_UNIT));
3548 store_constructor (exp, target, cleared);
e1a43f73
PB
3549 }
3550 else
3551 store_field (target, bitsize, bitpos, mode, exp,
3552 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3553 int_size_in_bytes (type));
3554}
3555
bbf6f052 3556/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3557 TARGET is either a REG or a MEM.
0f41302f 3558 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3559
3560static void
e1a43f73 3561store_constructor (exp, target, cleared)
bbf6f052
RK
3562 tree exp;
3563 rtx target;
e1a43f73 3564 int cleared;
bbf6f052 3565{
4af3895e
JVA
3566 tree type = TREE_TYPE (exp);
3567
bbf6f052
RK
3568 /* We know our target cannot conflict, since safe_from_p has been called. */
3569#if 0
3570 /* Don't try copying piece by piece into a hard register
3571 since that is vulnerable to being clobbered by EXP.
3572 Instead, construct in a pseudo register and then copy it all. */
3573 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3574 {
3575 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3576 store_constructor (exp, temp, 0);
bbf6f052
RK
3577 emit_move_insn (target, temp);
3578 return;
3579 }
3580#endif
3581
e44842fe
RK
3582 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3583 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3584 {
3585 register tree elt;
3586
4af3895e 3587 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3588 if (TREE_CODE (type) == UNION_TYPE
3589 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3590 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3591
3592 /* If we are building a static constructor into a register,
3593 set the initial value as zero so we can fold the value into
67225c15
RK
3594 a constant. But if more than one register is involved,
3595 this probably loses. */
3596 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3597 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3598 {
3599 if (! cleared)
e9a25f70 3600 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3601
9de08200
RK
3602 cleared = 1;
3603 }
3604
3605 /* If the constructor has fewer fields than the structure
3606 or if we are initializing the structure to mostly zeros,
bbf6f052 3607 clear the whole structure first. */
9de08200
RK
3608 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3609 != list_length (TYPE_FIELDS (type)))
3610 || mostly_zeros_p (exp))
3611 {
3612 if (! cleared)
3613 clear_storage (target, expr_size (exp),
3614 TYPE_ALIGN (type) / BITS_PER_UNIT);
3615
3616 cleared = 1;
3617 }
bbf6f052
RK
3618 else
3619 /* Inform later passes that the old value is dead. */
38a448ca 3620 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3621
3622 /* Store each element of the constructor into
3623 the corresponding field of TARGET. */
3624
3625 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3626 {
3627 register tree field = TREE_PURPOSE (elt);
3628 register enum machine_mode mode;
3629 int bitsize;
b50d17a1 3630 int bitpos = 0;
bbf6f052 3631 int unsignedp;
b50d17a1
RK
3632 tree pos, constant = 0, offset = 0;
3633 rtx to_rtx = target;
bbf6f052 3634
f32fd778
RS
3635 /* Just ignore missing fields.
3636 We cleared the whole structure, above,
3637 if any fields are missing. */
3638 if (field == 0)
3639 continue;
3640
e1a43f73
PB
3641 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3642 continue;
9de08200 3643
bbf6f052
RK
3644 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3645 unsignedp = TREE_UNSIGNED (field);
3646 mode = DECL_MODE (field);
3647 if (DECL_BIT_FIELD (field))
3648 mode = VOIDmode;
3649
b50d17a1
RK
3650 pos = DECL_FIELD_BITPOS (field);
3651 if (TREE_CODE (pos) == INTEGER_CST)
3652 constant = pos;
3653 else if (TREE_CODE (pos) == PLUS_EXPR
3654 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3655 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3656 else
3657 offset = pos;
3658
3659 if (constant)
cd11b87e 3660 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3661
3662 if (offset)
3663 {
3664 rtx offset_rtx;
3665
3666 if (contains_placeholder_p (offset))
3667 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 3668 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 3669
b50d17a1
RK
3670 offset = size_binop (FLOOR_DIV_EXPR, offset,
3671 size_int (BITS_PER_UNIT));
bbf6f052 3672
b50d17a1
RK
3673 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3674 if (GET_CODE (to_rtx) != MEM)
3675 abort ();
3676
3677 to_rtx
3678 = change_address (to_rtx, VOIDmode,
38a448ca 3679 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 3680 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3681 }
cf04eb80
RK
3682 if (TREE_READONLY (field))
3683 {
9151b3bf 3684 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
3685 to_rtx = copy_rtx (to_rtx);
3686
cf04eb80
RK
3687 RTX_UNCHANGING_P (to_rtx) = 1;
3688 }
3689
e1a43f73
PB
3690 store_constructor_field (to_rtx, bitsize, bitpos,
3691 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3692 }
3693 }
4af3895e 3694 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3695 {
3696 register tree elt;
3697 register int i;
e1a43f73 3698 int need_to_clear;
4af3895e 3699 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3700 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3701 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3702 tree elttype = TREE_TYPE (type);
bbf6f052 3703
e1a43f73 3704 /* If the constructor has fewer elements than the array,
38e01259 3705 clear the whole array first. Similarly if this is
e1a43f73
PB
3706 static constructor of a non-BLKmode object. */
3707 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3708 need_to_clear = 1;
3709 else
3710 {
3711 HOST_WIDE_INT count = 0, zero_count = 0;
3712 need_to_clear = 0;
3713 /* This loop is a more accurate version of the loop in
3714 mostly_zeros_p (it handles RANGE_EXPR in an index).
3715 It is also needed to check for missing elements. */
3716 for (elt = CONSTRUCTOR_ELTS (exp);
3717 elt != NULL_TREE;
df0faff1 3718 elt = TREE_CHAIN (elt))
e1a43f73
PB
3719 {
3720 tree index = TREE_PURPOSE (elt);
3721 HOST_WIDE_INT this_node_count;
3722 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3723 {
3724 tree lo_index = TREE_OPERAND (index, 0);
3725 tree hi_index = TREE_OPERAND (index, 1);
3726 if (TREE_CODE (lo_index) != INTEGER_CST
3727 || TREE_CODE (hi_index) != INTEGER_CST)
3728 {
3729 need_to_clear = 1;
3730 break;
3731 }
3732 this_node_count = TREE_INT_CST_LOW (hi_index)
3733 - TREE_INT_CST_LOW (lo_index) + 1;
3734 }
3735 else
3736 this_node_count = 1;
3737 count += this_node_count;
3738 if (mostly_zeros_p (TREE_VALUE (elt)))
3739 zero_count += this_node_count;
3740 }
8e958f70 3741 /* Clear the entire array first if there are any missing elements,
0f41302f 3742 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3743 if (count < maxelt - minelt + 1
3744 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3745 need_to_clear = 1;
3746 }
3747 if (need_to_clear)
9de08200
RK
3748 {
3749 if (! cleared)
3750 clear_storage (target, expr_size (exp),
3751 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3752 cleared = 1;
3753 }
bbf6f052
RK
3754 else
3755 /* Inform later passes that the old value is dead. */
38a448ca 3756 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3757
3758 /* Store each element of the constructor into
3759 the corresponding element of TARGET, determined
3760 by counting the elements. */
3761 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3762 elt;
3763 elt = TREE_CHAIN (elt), i++)
3764 {
3765 register enum machine_mode mode;
3766 int bitsize;
3767 int bitpos;
3768 int unsignedp;
e1a43f73 3769 tree value = TREE_VALUE (elt);
03dc44a6
RS
3770 tree index = TREE_PURPOSE (elt);
3771 rtx xtarget = target;
bbf6f052 3772
e1a43f73
PB
3773 if (cleared && is_zeros_p (value))
3774 continue;
9de08200 3775
bbf6f052
RK
3776 mode = TYPE_MODE (elttype);
3777 bitsize = GET_MODE_BITSIZE (mode);
3778 unsignedp = TREE_UNSIGNED (elttype);
3779
e1a43f73
PB
3780 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3781 {
3782 tree lo_index = TREE_OPERAND (index, 0);
3783 tree hi_index = TREE_OPERAND (index, 1);
3784 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3785 struct nesting *loop;
05c0b405
PB
3786 HOST_WIDE_INT lo, hi, count;
3787 tree position;
e1a43f73 3788
0f41302f 3789 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3790 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3791 && TREE_CODE (hi_index) == INTEGER_CST
3792 && (lo = TREE_INT_CST_LOW (lo_index),
3793 hi = TREE_INT_CST_LOW (hi_index),
3794 count = hi - lo + 1,
3795 (GET_CODE (target) != MEM
3796 || count <= 2
3797 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3798 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3799 <= 40 * 8))))
e1a43f73 3800 {
05c0b405
PB
3801 lo -= minelt; hi -= minelt;
3802 for (; lo <= hi; lo++)
e1a43f73 3803 {
05c0b405
PB
3804 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3805 store_constructor_field (target, bitsize, bitpos,
3806 mode, value, type, cleared);
e1a43f73
PB
3807 }
3808 }
3809 else
3810 {
3811 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3812 loop_top = gen_label_rtx ();
3813 loop_end = gen_label_rtx ();
3814
3815 unsignedp = TREE_UNSIGNED (domain);
3816
3817 index = build_decl (VAR_DECL, NULL_TREE, domain);
3818
3819 DECL_RTL (index) = index_r
3820 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3821 &unsignedp, 0));
3822
3823 if (TREE_CODE (value) == SAVE_EXPR
3824 && SAVE_EXPR_RTL (value) == 0)
3825 {
0f41302f
MS
3826 /* Make sure value gets expanded once before the
3827 loop. */
e1a43f73
PB
3828 expand_expr (value, const0_rtx, VOIDmode, 0);
3829 emit_queue ();
3830 }
3831 store_expr (lo_index, index_r, 0);
3832 loop = expand_start_loop (0);
3833
0f41302f 3834 /* Assign value to element index. */
e1a43f73
PB
3835 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3836 size_int (BITS_PER_UNIT));
3837 position = size_binop (MULT_EXPR,
3838 size_binop (MINUS_EXPR, index,
3839 TYPE_MIN_VALUE (domain)),
3840 position);
3841 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3842 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
3843 xtarget = change_address (target, mode, addr);
3844 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3845 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3846 else
3847 store_expr (value, xtarget, 0);
3848
3849 expand_exit_loop_if_false (loop,
3850 build (LT_EXPR, integer_type_node,
3851 index, hi_index));
3852
3853 expand_increment (build (PREINCREMENT_EXPR,
3854 TREE_TYPE (index),
7b8b9722 3855 index, integer_one_node), 0, 0);
e1a43f73
PB
3856 expand_end_loop ();
3857 emit_label (loop_end);
3858
3859 /* Needed by stupid register allocation. to extend the
3860 lifetime of pseudo-regs used by target past the end
3861 of the loop. */
38a448ca 3862 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
3863 }
3864 }
3865 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3866 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3867 {
e1a43f73 3868 rtx pos_rtx, addr;
03dc44a6
RS
3869 tree position;
3870
5b6c44ff
RK
3871 if (index == 0)
3872 index = size_int (i);
3873
e1a43f73
PB
3874 if (minelt)
3875 index = size_binop (MINUS_EXPR, index,
3876 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3877 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3878 size_int (BITS_PER_UNIT));
3879 position = size_binop (MULT_EXPR, index, position);
03dc44a6 3880 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3881 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 3882 xtarget = change_address (target, mode, addr);
e1a43f73 3883 store_expr (value, xtarget, 0);
03dc44a6
RS
3884 }
3885 else
3886 {
3887 if (index != 0)
7c314719 3888 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3889 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3890 else
3891 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3892 store_constructor_field (target, bitsize, bitpos,
3893 mode, value, type, cleared);
03dc44a6 3894 }
bbf6f052
RK
3895 }
3896 }
071a6595
PB
3897 /* set constructor assignments */
3898 else if (TREE_CODE (type) == SET_TYPE)
3899 {
e1a43f73 3900 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 3901 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3902 tree domain = TYPE_DOMAIN (type);
3903 tree domain_min, domain_max, bitlength;
3904
9faa82d8 3905 /* The default implementation strategy is to extract the constant
071a6595
PB
3906 parts of the constructor, use that to initialize the target,
3907 and then "or" in whatever non-constant ranges we need in addition.
3908
3909 If a large set is all zero or all ones, it is
3910 probably better to set it using memset (if available) or bzero.
3911 Also, if a large set has just a single range, it may also be
3912 better to first clear all the first clear the set (using
0f41302f 3913 bzero/memset), and set the bits we want. */
071a6595 3914
0f41302f 3915 /* Check for all zeros. */
e1a43f73 3916 if (elt == NULL_TREE)
071a6595 3917 {
e1a43f73
PB
3918 if (!cleared)
3919 clear_storage (target, expr_size (exp),
3920 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3921 return;
3922 }
3923
071a6595
PB
3924 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3925 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3926 bitlength = size_binop (PLUS_EXPR,
3927 size_binop (MINUS_EXPR, domain_max, domain_min),
3928 size_one_node);
3929
e1a43f73
PB
3930 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3931 abort ();
3932 nbits = TREE_INT_CST_LOW (bitlength);
3933
3934 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3935 are "complicated" (more than one range), initialize (the
3936 constant parts) by copying from a constant. */
3937 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3938 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3939 {
b4ee5a72
PB
3940 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3941 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3942 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3943 HOST_WIDE_INT word = 0;
3944 int bit_pos = 0;
3945 int ibit = 0;
0f41302f 3946 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3947 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3948 for (;;)
071a6595 3949 {
b4ee5a72
PB
3950 if (bit_buffer[ibit])
3951 {
b09f3348 3952 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3953 word |= (1 << (set_word_size - 1 - bit_pos));
3954 else
3955 word |= 1 << bit_pos;
3956 }
3957 bit_pos++; ibit++;
3958 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3959 {
e1a43f73
PB
3960 if (word != 0 || ! cleared)
3961 {
3962 rtx datum = GEN_INT (word);
3963 rtx to_rtx;
0f41302f
MS
3964 /* The assumption here is that it is safe to use
3965 XEXP if the set is multi-word, but not if
3966 it's single-word. */
e1a43f73
PB
3967 if (GET_CODE (target) == MEM)
3968 {
3969 to_rtx = plus_constant (XEXP (target, 0), offset);
3970 to_rtx = change_address (target, mode, to_rtx);
3971 }
3972 else if (offset == 0)
3973 to_rtx = target;
3974 else
3975 abort ();
3976 emit_move_insn (to_rtx, datum);
3977 }
b4ee5a72
PB
3978 if (ibit == nbits)
3979 break;
3980 word = 0;
3981 bit_pos = 0;
3982 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3983 }
3984 }
071a6595 3985 }
e1a43f73
PB
3986 else if (!cleared)
3987 {
0f41302f 3988 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3989 if (TREE_CHAIN (elt) != NULL_TREE
3990 || (TREE_PURPOSE (elt) == NULL_TREE
3991 ? nbits != 1
3992 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3993 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3994 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3995 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3996 != nbits))))
3997 clear_storage (target, expr_size (exp),
3998 TYPE_ALIGN (type) / BITS_PER_UNIT);
3999 }
4000
4001 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4002 {
4003 /* start of range of element or NULL */
4004 tree startbit = TREE_PURPOSE (elt);
4005 /* end of range of element, or element value */
4006 tree endbit = TREE_VALUE (elt);
381127e8 4007#ifdef TARGET_MEM_FUNCTIONS
071a6595 4008 HOST_WIDE_INT startb, endb;
381127e8 4009#endif
071a6595
PB
4010 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4011
4012 bitlength_rtx = expand_expr (bitlength,
4013 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4014
4015 /* handle non-range tuple element like [ expr ] */
4016 if (startbit == NULL_TREE)
4017 {
4018 startbit = save_expr (endbit);
4019 endbit = startbit;
4020 }
4021 startbit = convert (sizetype, startbit);
4022 endbit = convert (sizetype, endbit);
4023 if (! integer_zerop (domain_min))
4024 {
4025 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4026 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4027 }
4028 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4029 EXPAND_CONST_ADDRESS);
4030 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4031 EXPAND_CONST_ADDRESS);
4032
4033 if (REG_P (target))
4034 {
4035 targetx = assign_stack_temp (GET_MODE (target),
4036 GET_MODE_SIZE (GET_MODE (target)),
4037 0);
4038 emit_move_insn (targetx, target);
4039 }
4040 else if (GET_CODE (target) == MEM)
4041 targetx = target;
4042 else
4043 abort ();
4044
4045#ifdef TARGET_MEM_FUNCTIONS
4046 /* Optimization: If startbit and endbit are
9faa82d8 4047 constants divisible by BITS_PER_UNIT,
0f41302f 4048 call memset instead. */
071a6595
PB
4049 if (TREE_CODE (startbit) == INTEGER_CST
4050 && TREE_CODE (endbit) == INTEGER_CST
4051 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4052 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4053 {
071a6595
PB
4054 emit_library_call (memset_libfunc, 0,
4055 VOIDmode, 3,
e1a43f73
PB
4056 plus_constant (XEXP (targetx, 0),
4057 startb / BITS_PER_UNIT),
071a6595 4058 Pmode,
3b6f75e2 4059 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4060 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4061 TYPE_MODE (sizetype));
071a6595
PB
4062 }
4063 else
4064#endif
4065 {
38a448ca 4066 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4067 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4068 bitlength_rtx, TYPE_MODE (sizetype),
4069 startbit_rtx, TYPE_MODE (sizetype),
4070 endbit_rtx, TYPE_MODE (sizetype));
4071 }
4072 if (REG_P (target))
4073 emit_move_insn (target, targetx);
4074 }
4075 }
bbf6f052
RK
4076
4077 else
4078 abort ();
4079}
4080
4081/* Store the value of EXP (an expression tree)
4082 into a subfield of TARGET which has mode MODE and occupies
4083 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4084 If MODE is VOIDmode, it means that we are storing into a bit-field.
4085
4086 If VALUE_MODE is VOIDmode, return nothing in particular.
4087 UNSIGNEDP is not used in this case.
4088
4089 Otherwise, return an rtx for the value stored. This rtx
4090 has mode VALUE_MODE if that is convenient to do.
4091 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4092
4093 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4094 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4095
4096static rtx
4097store_field (target, bitsize, bitpos, mode, exp, value_mode,
4098 unsignedp, align, total_size)
4099 rtx target;
4100 int bitsize, bitpos;
4101 enum machine_mode mode;
4102 tree exp;
4103 enum machine_mode value_mode;
4104 int unsignedp;
4105 int align;
4106 int total_size;
4107{
906c4e36 4108 HOST_WIDE_INT width_mask = 0;
bbf6f052 4109
e9a25f70
JL
4110 if (TREE_CODE (exp) == ERROR_MARK)
4111 return const0_rtx;
4112
906c4e36
RK
4113 if (bitsize < HOST_BITS_PER_WIDE_INT)
4114 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4115
4116 /* If we are storing into an unaligned field of an aligned union that is
4117 in a register, we may have the mode of TARGET being an integer mode but
4118 MODE == BLKmode. In that case, get an aligned object whose size and
4119 alignment are the same as TARGET and store TARGET into it (we can avoid
4120 the store if the field being stored is the entire width of TARGET). Then
4121 call ourselves recursively to store the field into a BLKmode version of
4122 that object. Finally, load from the object into TARGET. This is not
4123 very efficient in general, but should only be slightly more expensive
4124 than the otherwise-required unaligned accesses. Perhaps this can be
4125 cleaned up later. */
4126
4127 if (mode == BLKmode
4128 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4129 {
4130 rtx object = assign_stack_temp (GET_MODE (target),
4131 GET_MODE_SIZE (GET_MODE (target)), 0);
4132 rtx blk_object = copy_rtx (object);
4133
24a13950
JW
4134 MEM_IN_STRUCT_P (object) = 1;
4135 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4136 PUT_MODE (blk_object, BLKmode);
4137
4138 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4139 emit_move_insn (object, target);
4140
4141 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4142 align, total_size);
4143
46093b97
RS
4144 /* Even though we aren't returning target, we need to
4145 give it the updated value. */
bbf6f052
RK
4146 emit_move_insn (target, object);
4147
46093b97 4148 return blk_object;
bbf6f052
RK
4149 }
4150
4151 /* If the structure is in a register or if the component
4152 is a bit field, we cannot use addressing to access it.
4153 Use bit-field techniques or SUBREG to store in it. */
4154
4fa52007
RK
4155 if (mode == VOIDmode
4156 || (mode != BLKmode && ! direct_store[(int) mode])
4157 || GET_CODE (target) == REG
c980ac49 4158 || GET_CODE (target) == SUBREG
ccc98036
RS
4159 /* If the field isn't aligned enough to store as an ordinary memref,
4160 store it as a bit field. */
c7a7ac46 4161 || (SLOW_UNALIGNED_ACCESS
ccc98036 4162 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4163 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4164 {
906c4e36 4165 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4166
ef19912d
RK
4167 /* If BITSIZE is narrower than the size of the type of EXP
4168 we will be narrowing TEMP. Normally, what's wanted are the
4169 low-order bits. However, if EXP's type is a record and this is
4170 big-endian machine, we want the upper BITSIZE bits. */
4171 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4172 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4173 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4174 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4175 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4176 - bitsize),
4177 temp, 1);
4178
bbd6cf73
RK
4179 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4180 MODE. */
4181 if (mode != VOIDmode && mode != BLKmode
4182 && mode != TYPE_MODE (TREE_TYPE (exp)))
4183 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4184
a281e72d
RK
4185 /* If the modes of TARGET and TEMP are both BLKmode, both
4186 must be in memory and BITPOS must be aligned on a byte
4187 boundary. If so, we simply do a block copy. */
4188 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4189 {
4190 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4191 || bitpos % BITS_PER_UNIT != 0)
4192 abort ();
4193
0086427c
RK
4194 target = change_address (target, VOIDmode,
4195 plus_constant (XEXP (target, 0),
a281e72d
RK
4196 bitpos / BITS_PER_UNIT));
4197
4198 emit_block_move (target, temp,
4199 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4200 / BITS_PER_UNIT),
4201 1);
4202
4203 return value_mode == VOIDmode ? const0_rtx : target;
4204 }
4205
bbf6f052
RK
4206 /* Store the value in the bitfield. */
4207 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4208 if (value_mode != VOIDmode)
4209 {
4210 /* The caller wants an rtx for the value. */
4211 /* If possible, avoid refetching from the bitfield itself. */
4212 if (width_mask != 0
4213 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4214 {
9074de27 4215 tree count;
5c4d7cfb 4216 enum machine_mode tmode;
86a2c12a 4217
5c4d7cfb
RS
4218 if (unsignedp)
4219 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4220 tmode = GET_MODE (temp);
86a2c12a
RS
4221 if (tmode == VOIDmode)
4222 tmode = value_mode;
5c4d7cfb
RS
4223 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4224 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4225 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4226 }
bbf6f052 4227 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4228 NULL_RTX, value_mode, 0, align,
4229 total_size);
bbf6f052
RK
4230 }
4231 return const0_rtx;
4232 }
4233 else
4234 {
4235 rtx addr = XEXP (target, 0);
4236 rtx to_rtx;
4237
4238 /* If a value is wanted, it must be the lhs;
4239 so make the address stable for multiple use. */
4240
4241 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4242 && ! CONSTANT_ADDRESS_P (addr)
4243 /* A frame-pointer reference is already stable. */
4244 && ! (GET_CODE (addr) == PLUS
4245 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4246 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4247 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4248 addr = copy_to_reg (addr);
4249
4250 /* Now build a reference to just the desired component. */
4251
effbcc6a
RK
4252 to_rtx = copy_rtx (change_address (target, mode,
4253 plus_constant (addr,
4254 (bitpos
4255 / BITS_PER_UNIT))));
bbf6f052
RK
4256 MEM_IN_STRUCT_P (to_rtx) = 1;
4257
4258 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4259 }
4260}
4261\f
4262/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4263 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4264 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4265
4266 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4267 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4268 If the position of the field is variable, we store a tree
4269 giving the variable offset (in units) in *POFFSET.
4270 This offset is in addition to the bit position.
4271 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4272 We set *PALIGNMENT to the alignment in bytes of the address that will be
4273 computed. This is the alignment of the thing we return if *POFFSET
4274 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4275
4276 If any of the extraction expressions is volatile,
4277 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4278
4279 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4280 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4281 is redundant.
4282
4283 If the field describes a variable-sized object, *PMODE is set to
4284 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4285 this case, but the address of the object can be found. */
bbf6f052
RK
4286
4287tree
4969d05d 4288get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4289 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4290 tree exp;
4291 int *pbitsize;
4292 int *pbitpos;
7bb0943f 4293 tree *poffset;
bbf6f052
RK
4294 enum machine_mode *pmode;
4295 int *punsignedp;
4296 int *pvolatilep;
839c4796 4297 int *palignment;
bbf6f052 4298{
b50d17a1 4299 tree orig_exp = exp;
bbf6f052
RK
4300 tree size_tree = 0;
4301 enum machine_mode mode = VOIDmode;
742920c7 4302 tree offset = integer_zero_node;
839c4796 4303 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4304
4305 if (TREE_CODE (exp) == COMPONENT_REF)
4306 {
4307 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4308 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4309 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4310 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4311 }
4312 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4313 {
4314 size_tree = TREE_OPERAND (exp, 1);
4315 *punsignedp = TREE_UNSIGNED (exp);
4316 }
4317 else
4318 {
4319 mode = TYPE_MODE (TREE_TYPE (exp));
4320 *pbitsize = GET_MODE_BITSIZE (mode);
4321 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4322 }
4323
4324 if (size_tree)
4325 {
4326 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4327 mode = BLKmode, *pbitsize = -1;
4328 else
4329 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4330 }
4331
4332 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4333 and find the ultimate containing object. */
4334
4335 *pbitpos = 0;
4336
4337 while (1)
4338 {
7bb0943f 4339 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4340 {
7bb0943f
RS
4341 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4342 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4343 : TREE_OPERAND (exp, 2));
e6d8c385 4344 tree constant = integer_zero_node, var = pos;
bbf6f052 4345
e7f3c83f
RK
4346 /* If this field hasn't been filled in yet, don't go
4347 past it. This should only happen when folding expressions
4348 made during type construction. */
4349 if (pos == 0)
4350 break;
4351
e6d8c385
RK
4352 /* Assume here that the offset is a multiple of a unit.
4353 If not, there should be an explicitly added constant. */
4354 if (TREE_CODE (pos) == PLUS_EXPR
4355 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4356 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4357 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4358 constant = pos, var = integer_zero_node;
4359
4360 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4361 offset = size_binop (PLUS_EXPR, offset,
4362 size_binop (EXACT_DIV_EXPR, var,
4363 size_int (BITS_PER_UNIT)));
bbf6f052 4364 }
bbf6f052 4365
742920c7 4366 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4367 {
742920c7
RK
4368 /* This code is based on the code in case ARRAY_REF in expand_expr
4369 below. We assume here that the size of an array element is
4370 always an integral multiple of BITS_PER_UNIT. */
4371
4372 tree index = TREE_OPERAND (exp, 1);
4373 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4374 tree low_bound
4375 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4376 tree index_type = TREE_TYPE (index);
4377
4c08eef0 4378 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4379 {
4c08eef0
RK
4380 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4381 index);
742920c7
RK
4382 index_type = TREE_TYPE (index);
4383 }
4384
ca0f2220
RH
4385 if (! integer_zerop (low_bound))
4386 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4387
f8dac6eb
R
4388 if (TREE_CODE (index) == INTEGER_CST)
4389 {
4390 index = convert (sbitsizetype, index);
4391 index_type = TREE_TYPE (index);
4392 }
4393
4394 index = fold (build (MULT_EXPR, sbitsizetype, index,
4395 convert (sbitsizetype,
0d15e60c 4396 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7
RK
4397
4398 if (TREE_CODE (index) == INTEGER_CST
4399 && TREE_INT_CST_HIGH (index) == 0)
4400 *pbitpos += TREE_INT_CST_LOW (index);
4401 else
956d6950 4402 {
e5e809f4
JL
4403 if (contains_placeholder_p (index))
4404 index = build (WITH_RECORD_EXPR, sizetype, index, exp);
4405
956d6950 4406 offset = size_binop (PLUS_EXPR, offset,
e5e809f4
JL
4407 size_binop (FLOOR_DIV_EXPR, index,
4408 size_int (BITS_PER_UNIT)));
956d6950 4409 }
bbf6f052
RK
4410 }
4411 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4412 && ! ((TREE_CODE (exp) == NOP_EXPR
4413 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4414 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4415 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4416 != UNION_TYPE))
bbf6f052
RK
4417 && (TYPE_MODE (TREE_TYPE (exp))
4418 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4419 break;
7bb0943f
RS
4420
4421 /* If any reference in the chain is volatile, the effect is volatile. */
4422 if (TREE_THIS_VOLATILE (exp))
4423 *pvolatilep = 1;
839c4796
RK
4424
4425 /* If the offset is non-constant already, then we can't assume any
4426 alignment more than the alignment here. */
4427 if (! integer_zerop (offset))
4428 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4429
bbf6f052
RK
4430 exp = TREE_OPERAND (exp, 0);
4431 }
4432
839c4796
RK
4433 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4434 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4435 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4436 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4437
742920c7
RK
4438 if (integer_zerop (offset))
4439 offset = 0;
4440
b50d17a1
RK
4441 if (offset != 0 && contains_placeholder_p (offset))
4442 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4443
bbf6f052 4444 *pmode = mode;
7bb0943f 4445 *poffset = offset;
839c4796 4446 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4447 return exp;
4448}
921b3427
RK
4449
4450/* Subroutine of expand_exp: compute memory_usage from modifier. */
4451static enum memory_use_mode
4452get_memory_usage_from_modifier (modifier)
4453 enum expand_modifier modifier;
4454{
4455 switch (modifier)
4456 {
4457 case EXPAND_NORMAL:
e5e809f4 4458 case EXPAND_SUM:
921b3427
RK
4459 return MEMORY_USE_RO;
4460 break;
4461 case EXPAND_MEMORY_USE_WO:
4462 return MEMORY_USE_WO;
4463 break;
4464 case EXPAND_MEMORY_USE_RW:
4465 return MEMORY_USE_RW;
4466 break;
921b3427 4467 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4468 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4469 MEMORY_USE_DONT, because they are modifiers to a call of
4470 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4471 case EXPAND_CONST_ADDRESS:
e5e809f4 4472 case EXPAND_INITIALIZER:
921b3427
RK
4473 return MEMORY_USE_DONT;
4474 case EXPAND_MEMORY_USE_BAD:
4475 default:
4476 abort ();
4477 }
4478}
bbf6f052
RK
4479\f
4480/* Given an rtx VALUE that may contain additions and multiplications,
4481 return an equivalent value that just refers to a register or memory.
4482 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4483 and returning a pseudo-register containing the value.
4484
4485 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4486
4487rtx
4488force_operand (value, target)
4489 rtx value, target;
4490{
4491 register optab binoptab = 0;
4492 /* Use a temporary to force order of execution of calls to
4493 `force_operand'. */
4494 rtx tmp;
4495 register rtx op2;
4496 /* Use subtarget as the target for operand 0 of a binary operation. */
4497 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4498
4499 if (GET_CODE (value) == PLUS)
4500 binoptab = add_optab;
4501 else if (GET_CODE (value) == MINUS)
4502 binoptab = sub_optab;
4503 else if (GET_CODE (value) == MULT)
4504 {
4505 op2 = XEXP (value, 1);
4506 if (!CONSTANT_P (op2)
4507 && !(GET_CODE (op2) == REG && op2 != subtarget))
4508 subtarget = 0;
4509 tmp = force_operand (XEXP (value, 0), subtarget);
4510 return expand_mult (GET_MODE (value), tmp,
906c4e36 4511 force_operand (op2, NULL_RTX),
bbf6f052
RK
4512 target, 0);
4513 }
4514
4515 if (binoptab)
4516 {
4517 op2 = XEXP (value, 1);
4518 if (!CONSTANT_P (op2)
4519 && !(GET_CODE (op2) == REG && op2 != subtarget))
4520 subtarget = 0;
4521 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4522 {
4523 binoptab = add_optab;
4524 op2 = negate_rtx (GET_MODE (value), op2);
4525 }
4526
4527 /* Check for an addition with OP2 a constant integer and our first
4528 operand a PLUS of a virtual register and something else. In that
4529 case, we want to emit the sum of the virtual register and the
4530 constant first and then add the other value. This allows virtual
4531 register instantiation to simply modify the constant rather than
4532 creating another one around this addition. */
4533 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4534 && GET_CODE (XEXP (value, 0)) == PLUS
4535 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4536 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4537 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4538 {
4539 rtx temp = expand_binop (GET_MODE (value), binoptab,
4540 XEXP (XEXP (value, 0), 0), op2,
4541 subtarget, 0, OPTAB_LIB_WIDEN);
4542 return expand_binop (GET_MODE (value), binoptab, temp,
4543 force_operand (XEXP (XEXP (value, 0), 1), 0),
4544 target, 0, OPTAB_LIB_WIDEN);
4545 }
4546
4547 tmp = force_operand (XEXP (value, 0), subtarget);
4548 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4549 force_operand (op2, NULL_RTX),
bbf6f052 4550 target, 0, OPTAB_LIB_WIDEN);
8008b228 4551 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4552 because the only operations we are expanding here are signed ones. */
4553 }
4554 return value;
4555}
4556\f
4557/* Subroutine of expand_expr:
4558 save the non-copied parts (LIST) of an expr (LHS), and return a list
4559 which can restore these values to their previous values,
4560 should something modify their storage. */
4561
4562static tree
4563save_noncopied_parts (lhs, list)
4564 tree lhs;
4565 tree list;
4566{
4567 tree tail;
4568 tree parts = 0;
4569
4570 for (tail = list; tail; tail = TREE_CHAIN (tail))
4571 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4572 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4573 else
4574 {
4575 tree part = TREE_VALUE (tail);
4576 tree part_type = TREE_TYPE (part);
906c4e36 4577 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4578 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4579 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4580 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4581 parts = tree_cons (to_be_saved,
906c4e36
RK
4582 build (RTL_EXPR, part_type, NULL_TREE,
4583 (tree) target),
bbf6f052
RK
4584 parts);
4585 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4586 }
4587 return parts;
4588}
4589
4590/* Subroutine of expand_expr:
4591 record the non-copied parts (LIST) of an expr (LHS), and return a list
4592 which specifies the initial values of these parts. */
4593
4594static tree
4595init_noncopied_parts (lhs, list)
4596 tree lhs;
4597 tree list;
4598{
4599 tree tail;
4600 tree parts = 0;
4601
4602 for (tail = list; tail; tail = TREE_CHAIN (tail))
4603 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4604 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4605 else
4606 {
4607 tree part = TREE_VALUE (tail);
4608 tree part_type = TREE_TYPE (part);
906c4e36 4609 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4610 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4611 }
4612 return parts;
4613}
4614
4615/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
4616 EXP can reference X, which is being modified. TOP_P is nonzero if this
4617 call is going to be used to determine whether we need a temporary
4618 for EXP, as opposed to a recursive call to this function. */
bbf6f052
RK
4619
4620static int
e5e809f4 4621safe_from_p (x, exp, top_p)
bbf6f052
RK
4622 rtx x;
4623 tree exp;
e5e809f4 4624 int top_p;
bbf6f052
RK
4625{
4626 rtx exp_rtl = 0;
4627 int i, nops;
4628
6676e72f
RK
4629 if (x == 0
4630 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4631 have no way of allocating temporaries of variable size
4632 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4633 So we assume here that something at a higher level has prevented a
f4510f37 4634 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
4635 do this when X is BLKmode and when we are at the top level. */
4636 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4637 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4638 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4639 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4640 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4641 != INTEGER_CST)
f4510f37 4642 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4643 return 1;
4644
4645 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4646 find the underlying pseudo. */
4647 if (GET_CODE (x) == SUBREG)
4648 {
4649 x = SUBREG_REG (x);
4650 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4651 return 0;
4652 }
4653
4654 /* If X is a location in the outgoing argument area, it is always safe. */
4655 if (GET_CODE (x) == MEM
4656 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4657 || (GET_CODE (XEXP (x, 0)) == PLUS
4658 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4659 return 1;
4660
4661 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4662 {
4663 case 'd':
4664 exp_rtl = DECL_RTL (exp);
4665 break;
4666
4667 case 'c':
4668 return 1;
4669
4670 case 'x':
4671 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 4672 return ((TREE_VALUE (exp) == 0
e5e809f4 4673 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 4674 && (TREE_CHAIN (exp) == 0
e5e809f4 4675 || safe_from_p (x, TREE_CHAIN (exp), 0)));
bbf6f052
RK
4676 else
4677 return 0;
4678
4679 case '1':
e5e809f4 4680 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
4681
4682 case '2':
4683 case '<':
e5e809f4
JL
4684 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4685 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
4686
4687 case 'e':
4688 case 'r':
4689 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4690 the expression. If it is set, we conflict iff we are that rtx or
4691 both are in memory. Otherwise, we check all operands of the
4692 expression recursively. */
4693
4694 switch (TREE_CODE (exp))
4695 {
4696 case ADDR_EXPR:
e44842fe 4697 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
4698 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4699 || TREE_STATIC (exp));
bbf6f052
RK
4700
4701 case INDIRECT_REF:
4702 if (GET_CODE (x) == MEM)
4703 return 0;
4704 break;
4705
4706 case CALL_EXPR:
4707 exp_rtl = CALL_EXPR_RTL (exp);
4708 if (exp_rtl == 0)
4709 {
4710 /* Assume that the call will clobber all hard registers and
4711 all of memory. */
4712 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4713 || GET_CODE (x) == MEM)
4714 return 0;
4715 }
4716
4717 break;
4718
4719 case RTL_EXPR:
3bb5826a
RK
4720 /* If a sequence exists, we would have to scan every instruction
4721 in the sequence to see if it was safe. This is probably not
4722 worthwhile. */
4723 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4724 return 0;
4725
3bb5826a 4726 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4727 break;
4728
4729 case WITH_CLEANUP_EXPR:
4730 exp_rtl = RTL_EXPR_RTL (exp);
4731 break;
4732
5dab5552 4733 case CLEANUP_POINT_EXPR:
e5e809f4 4734 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 4735
bbf6f052
RK
4736 case SAVE_EXPR:
4737 exp_rtl = SAVE_EXPR_RTL (exp);
4738 break;
4739
8129842c
RS
4740 case BIND_EXPR:
4741 /* The only operand we look at is operand 1. The rest aren't
4742 part of the expression. */
e5e809f4 4743 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 4744
bbf6f052 4745 case METHOD_CALL_EXPR:
0f41302f 4746 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 4747 abort ();
e9a25f70
JL
4748
4749 default:
4750 break;
bbf6f052
RK
4751 }
4752
4753 /* If we have an rtx, we do not need to scan our operands. */
4754 if (exp_rtl)
4755 break;
4756
4757 nops = tree_code_length[(int) TREE_CODE (exp)];
4758 for (i = 0; i < nops; i++)
4759 if (TREE_OPERAND (exp, i) != 0
e5e809f4 4760 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
4761 return 0;
4762 }
4763
4764 /* If we have an rtl, find any enclosed object. Then see if we conflict
4765 with it. */
4766 if (exp_rtl)
4767 {
4768 if (GET_CODE (exp_rtl) == SUBREG)
4769 {
4770 exp_rtl = SUBREG_REG (exp_rtl);
4771 if (GET_CODE (exp_rtl) == REG
4772 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4773 return 0;
4774 }
4775
4776 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4777 are memory and EXP is not readonly. */
4778 return ! (rtx_equal_p (x, exp_rtl)
4779 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4780 && ! TREE_READONLY (exp)));
4781 }
4782
4783 /* If we reach here, it is safe. */
4784 return 1;
4785}
4786
4787/* Subroutine of expand_expr: return nonzero iff EXP is an
4788 expression whose type is statically determinable. */
4789
4790static int
4791fixed_type_p (exp)
4792 tree exp;
4793{
4794 if (TREE_CODE (exp) == PARM_DECL
4795 || TREE_CODE (exp) == VAR_DECL
4796 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4797 || TREE_CODE (exp) == COMPONENT_REF
4798 || TREE_CODE (exp) == ARRAY_REF)
4799 return 1;
4800 return 0;
4801}
01c8a7c8
RK
4802
4803/* Subroutine of expand_expr: return rtx if EXP is a
4804 variable or parameter; else return 0. */
4805
4806static rtx
4807var_rtx (exp)
4808 tree exp;
4809{
4810 STRIP_NOPS (exp);
4811 switch (TREE_CODE (exp))
4812 {
4813 case PARM_DECL:
4814 case VAR_DECL:
4815 return DECL_RTL (exp);
4816 default:
4817 return 0;
4818 }
4819}
bbf6f052
RK
4820\f
4821/* expand_expr: generate code for computing expression EXP.
4822 An rtx for the computed value is returned. The value is never null.
4823 In the case of a void EXP, const0_rtx is returned.
4824
4825 The value may be stored in TARGET if TARGET is nonzero.
4826 TARGET is just a suggestion; callers must assume that
4827 the rtx returned may not be the same as TARGET.
4828
4829 If TARGET is CONST0_RTX, it means that the value will be ignored.
4830
4831 If TMODE is not VOIDmode, it suggests generating the
4832 result in mode TMODE. But this is done only when convenient.
4833 Otherwise, TMODE is ignored and the value generated in its natural mode.
4834 TMODE is just a suggestion; callers must assume that
4835 the rtx returned may not have mode TMODE.
4836
d6a5ac33
RK
4837 Note that TARGET may have neither TMODE nor MODE. In that case, it
4838 probably will not be used.
bbf6f052
RK
4839
4840 If MODIFIER is EXPAND_SUM then when EXP is an addition
4841 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4842 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4843 products as above, or REG or MEM, or constant.
4844 Ordinarily in such cases we would output mul or add instructions
4845 and then return a pseudo reg containing the sum.
4846
4847 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4848 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4849 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4850 This is used for outputting expressions used in initializers.
4851
4852 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4853 with a constant address even if that address is not normally legitimate.
4854 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4855
4856rtx
4857expand_expr (exp, target, tmode, modifier)
4858 register tree exp;
4859 rtx target;
4860 enum machine_mode tmode;
4861 enum expand_modifier modifier;
4862{
b50d17a1
RK
4863 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4864 This is static so it will be accessible to our recursive callees. */
4865 static tree placeholder_list = 0;
bbf6f052
RK
4866 register rtx op0, op1, temp;
4867 tree type = TREE_TYPE (exp);
4868 int unsignedp = TREE_UNSIGNED (type);
4869 register enum machine_mode mode = TYPE_MODE (type);
4870 register enum tree_code code = TREE_CODE (exp);
4871 optab this_optab;
4872 /* Use subtarget as the target for operand 0 of a binary operation. */
4873 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4874 rtx original_target = target;
dd27116b
RK
4875 int ignore = (target == const0_rtx
4876 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4877 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4878 || code == COND_EXPR)
dd27116b 4879 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 4880 tree context;
921b3427
RK
4881 /* Used by check-memory-usage to make modifier read only. */
4882 enum expand_modifier ro_modifier;
bbf6f052 4883
921b3427
RK
4884 /* Make a read-only version of the modifier. */
4885 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4886 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4887 ro_modifier = modifier;
4888 else
4889 ro_modifier = EXPAND_NORMAL;
ca695ac9 4890
bbf6f052
RK
4891 /* Don't use hard regs as subtargets, because the combiner
4892 can only handle pseudo regs. */
4893 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4894 subtarget = 0;
4895 /* Avoid subtargets inside loops,
4896 since they hide some invariant expressions. */
4897 if (preserve_subexpressions_p ())
4898 subtarget = 0;
4899
dd27116b
RK
4900 /* If we are going to ignore this result, we need only do something
4901 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4902 is, short-circuit the most common cases here. Note that we must
4903 not call expand_expr with anything but const0_rtx in case this
4904 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4905
dd27116b
RK
4906 if (ignore)
4907 {
4908 if (! TREE_SIDE_EFFECTS (exp))
4909 return const0_rtx;
4910
4911 /* Ensure we reference a volatile object even if value is ignored. */
4912 if (TREE_THIS_VOLATILE (exp)
4913 && TREE_CODE (exp) != FUNCTION_DECL
4914 && mode != VOIDmode && mode != BLKmode)
4915 {
921b3427 4916 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
4917 if (GET_CODE (temp) == MEM)
4918 temp = copy_to_reg (temp);
4919 return const0_rtx;
4920 }
4921
4922 if (TREE_CODE_CLASS (code) == '1')
4923 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4924 VOIDmode, ro_modifier);
dd27116b
RK
4925 else if (TREE_CODE_CLASS (code) == '2'
4926 || TREE_CODE_CLASS (code) == '<')
4927 {
921b3427
RK
4928 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4929 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
4930 return const0_rtx;
4931 }
4932 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4933 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4934 /* If the second operand has no side effects, just evaluate
0f41302f 4935 the first. */
dd27116b 4936 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4937 VOIDmode, ro_modifier);
dd27116b 4938
90764a87 4939 target = 0;
dd27116b 4940 }
bbf6f052 4941
e44842fe
RK
4942 /* If will do cse, generate all results into pseudo registers
4943 since 1) that allows cse to find more things
4944 and 2) otherwise cse could produce an insn the machine
4945 cannot support. */
4946
bbf6f052
RK
4947 if (! cse_not_expected && mode != BLKmode && target
4948 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4949 target = subtarget;
4950
bbf6f052
RK
4951 switch (code)
4952 {
4953 case LABEL_DECL:
b552441b
RS
4954 {
4955 tree function = decl_function_context (exp);
4956 /* Handle using a label in a containing function. */
d0977240
RK
4957 if (function != current_function_decl
4958 && function != inline_function_decl && function != 0)
b552441b
RS
4959 {
4960 struct function *p = find_function_data (function);
4961 /* Allocate in the memory associated with the function
4962 that the label is in. */
4963 push_obstacks (p->function_obstack,
4964 p->function_maybepermanent_obstack);
4965
38a448ca
RH
4966 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4967 label_rtx (exp),
4968 p->forced_labels);
b552441b
RS
4969 pop_obstacks ();
4970 }
4971 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
4972 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4973 label_rtx (exp), forced_labels);
4974 temp = gen_rtx_MEM (FUNCTION_MODE,
4975 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
4976 if (function != current_function_decl
4977 && function != inline_function_decl && function != 0)
26fcb35a
RS
4978 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4979 return temp;
b552441b 4980 }
bbf6f052
RK
4981
4982 case PARM_DECL:
4983 if (DECL_RTL (exp) == 0)
4984 {
4985 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4986 return CONST0_RTX (mode);
bbf6f052
RK
4987 }
4988
0f41302f 4989 /* ... fall through ... */
d6a5ac33 4990
bbf6f052 4991 case VAR_DECL:
2dca20cd
RS
4992 /* If a static var's type was incomplete when the decl was written,
4993 but the type is complete now, lay out the decl now. */
4994 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4995 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4996 {
4997 push_obstacks_nochange ();
4998 end_temporary_allocation ();
4999 layout_decl (exp, 0);
5000 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5001 pop_obstacks ();
5002 }
d6a5ac33 5003
921b3427
RK
5004 /* Only check automatic variables. Currently, function arguments are
5005 not checked (this can be done at compile-time with prototypes).
5006 Aggregates are not checked. */
5007 if (flag_check_memory_usage && code == VAR_DECL
5008 && GET_CODE (DECL_RTL (exp)) == MEM
5009 && DECL_CONTEXT (exp) != NULL_TREE
5010 && ! TREE_STATIC (exp)
5011 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5012 {
5013 enum memory_use_mode memory_usage;
5014 memory_usage = get_memory_usage_from_modifier (modifier);
5015
5016 if (memory_usage != MEMORY_USE_DONT)
5017 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5018 XEXP (DECL_RTL (exp), 0), ptr_mode,
5019 GEN_INT (int_size_in_bytes (type)),
5020 TYPE_MODE (sizetype),
956d6950
JL
5021 GEN_INT (memory_usage),
5022 TYPE_MODE (integer_type_node));
921b3427
RK
5023 }
5024
0f41302f 5025 /* ... fall through ... */
d6a5ac33 5026
2dca20cd 5027 case FUNCTION_DECL:
bbf6f052
RK
5028 case RESULT_DECL:
5029 if (DECL_RTL (exp) == 0)
5030 abort ();
d6a5ac33 5031
e44842fe
RK
5032 /* Ensure variable marked as used even if it doesn't go through
5033 a parser. If it hasn't be used yet, write out an external
5034 definition. */
5035 if (! TREE_USED (exp))
5036 {
5037 assemble_external (exp);
5038 TREE_USED (exp) = 1;
5039 }
5040
dc6d66b3
RK
5041 /* Show we haven't gotten RTL for this yet. */
5042 temp = 0;
5043
bbf6f052
RK
5044 /* Handle variables inherited from containing functions. */
5045 context = decl_function_context (exp);
5046
5047 /* We treat inline_function_decl as an alias for the current function
5048 because that is the inline function whose vars, types, etc.
5049 are being merged into the current function.
5050 See expand_inline_function. */
d6a5ac33 5051
bbf6f052
RK
5052 if (context != 0 && context != current_function_decl
5053 && context != inline_function_decl
5054 /* If var is static, we don't need a static chain to access it. */
5055 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5056 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5057 {
5058 rtx addr;
5059
5060 /* Mark as non-local and addressable. */
81feeecb 5061 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5062 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5063 abort ();
bbf6f052
RK
5064 mark_addressable (exp);
5065 if (GET_CODE (DECL_RTL (exp)) != MEM)
5066 abort ();
5067 addr = XEXP (DECL_RTL (exp), 0);
5068 if (GET_CODE (addr) == MEM)
38a448ca
RH
5069 addr = gen_rtx_MEM (Pmode,
5070 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5071 else
5072 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5073 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5074 }
4af3895e 5075
bbf6f052
RK
5076 /* This is the case of an array whose size is to be determined
5077 from its initializer, while the initializer is still being parsed.
5078 See expand_decl. */
d6a5ac33 5079
dc6d66b3
RK
5080 else if (GET_CODE (DECL_RTL (exp)) == MEM
5081 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5082 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5083 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5084
5085 /* If DECL_RTL is memory, we are in the normal case and either
5086 the address is not valid or it is not a register and -fforce-addr
5087 is specified, get the address into a register. */
5088
dc6d66b3
RK
5089 else if (GET_CODE (DECL_RTL (exp)) == MEM
5090 && modifier != EXPAND_CONST_ADDRESS
5091 && modifier != EXPAND_SUM
5092 && modifier != EXPAND_INITIALIZER
5093 && (! memory_address_p (DECL_MODE (exp),
5094 XEXP (DECL_RTL (exp), 0))
5095 || (flag_force_addr
5096 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5097 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5098 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5099
dc6d66b3
RK
5100 /* If we got something, return it. But first, set the alignment
5101 the address is a register. */
5102 if (temp != 0)
5103 {
5104 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5105 mark_reg_pointer (XEXP (temp, 0),
5106 DECL_ALIGN (exp) / BITS_PER_UNIT);
5107
5108 return temp;
5109 }
5110
1499e0a8
RK
5111 /* If the mode of DECL_RTL does not match that of the decl, it
5112 must be a promoted value. We return a SUBREG of the wanted mode,
5113 but mark it so that we know that it was already extended. */
5114
5115 if (GET_CODE (DECL_RTL (exp)) == REG
5116 && GET_MODE (DECL_RTL (exp)) != mode)
5117 {
1499e0a8
RK
5118 /* Get the signedness used for this variable. Ensure we get the
5119 same mode we got when the variable was declared. */
78911e8b
RK
5120 if (GET_MODE (DECL_RTL (exp))
5121 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5122 abort ();
5123
38a448ca 5124 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5125 SUBREG_PROMOTED_VAR_P (temp) = 1;
5126 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5127 return temp;
5128 }
5129
bbf6f052
RK
5130 return DECL_RTL (exp);
5131
5132 case INTEGER_CST:
5133 return immed_double_const (TREE_INT_CST_LOW (exp),
5134 TREE_INT_CST_HIGH (exp),
5135 mode);
5136
5137 case CONST_DECL:
921b3427
RK
5138 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5139 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5140
5141 case REAL_CST:
5142 /* If optimized, generate immediate CONST_DOUBLE
5143 which will be turned into memory by reload if necessary.
5144
5145 We used to force a register so that loop.c could see it. But
5146 this does not allow gen_* patterns to perform optimizations with
5147 the constants. It also produces two insns in cases like "x = 1.0;".
5148 On most machines, floating-point constants are not permitted in
5149 many insns, so we'd end up copying it to a register in any case.
5150
5151 Now, we do the copying in expand_binop, if appropriate. */
5152 return immed_real_const (exp);
5153
5154 case COMPLEX_CST:
5155 case STRING_CST:
5156 if (! TREE_CST_RTL (exp))
5157 output_constant_def (exp);
5158
5159 /* TREE_CST_RTL probably contains a constant address.
5160 On RISC machines where a constant address isn't valid,
5161 make some insns to get that address into a register. */
5162 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5163 && modifier != EXPAND_CONST_ADDRESS
5164 && modifier != EXPAND_INITIALIZER
5165 && modifier != EXPAND_SUM
d6a5ac33
RK
5166 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5167 || (flag_force_addr
5168 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5169 return change_address (TREE_CST_RTL (exp), VOIDmode,
5170 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5171 return TREE_CST_RTL (exp);
5172
bf1e5319
APB
5173 case EXPR_WITH_FILE_LOCATION:
5174 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5175 emit_line_note (EXPR_WFL_FILENAME (exp), EXPR_WFL_LINENO (exp));
5176 return expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5177
bbf6f052
RK
5178 case SAVE_EXPR:
5179 context = decl_function_context (exp);
d6a5ac33 5180
d0977240
RK
5181 /* If this SAVE_EXPR was at global context, assume we are an
5182 initialization function and move it into our context. */
5183 if (context == 0)
5184 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5185
bbf6f052
RK
5186 /* We treat inline_function_decl as an alias for the current function
5187 because that is the inline function whose vars, types, etc.
5188 are being merged into the current function.
5189 See expand_inline_function. */
5190 if (context == current_function_decl || context == inline_function_decl)
5191 context = 0;
5192
5193 /* If this is non-local, handle it. */
5194 if (context)
5195 {
d0977240
RK
5196 /* The following call just exists to abort if the context is
5197 not of a containing function. */
5198 find_function_data (context);
5199
bbf6f052
RK
5200 temp = SAVE_EXPR_RTL (exp);
5201 if (temp && GET_CODE (temp) == REG)
5202 {
5203 put_var_into_stack (exp);
5204 temp = SAVE_EXPR_RTL (exp);
5205 }
5206 if (temp == 0 || GET_CODE (temp) != MEM)
5207 abort ();
5208 return change_address (temp, mode,
5209 fix_lexical_addr (XEXP (temp, 0), exp));
5210 }
5211 if (SAVE_EXPR_RTL (exp) == 0)
5212 {
06089a8b
RK
5213 if (mode == VOIDmode)
5214 temp = const0_rtx;
5215 else
e5e809f4 5216 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5217
bbf6f052 5218 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5219 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5220 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5221 save_expr_regs);
ff78f773
RK
5222
5223 /* If the mode of TEMP does not match that of the expression, it
5224 must be a promoted value. We pass store_expr a SUBREG of the
5225 wanted mode but mark it so that we know that it was already
5226 extended. Note that `unsignedp' was modified above in
5227 this case. */
5228
5229 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5230 {
38a448ca 5231 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5232 SUBREG_PROMOTED_VAR_P (temp) = 1;
5233 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5234 }
5235
4c7a0be9 5236 if (temp == const0_rtx)
921b3427
RK
5237 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5238 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5239 else
5240 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5241
5242 TREE_USED (exp) = 1;
bbf6f052 5243 }
1499e0a8
RK
5244
5245 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5246 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5247 but mark it so that we know that it was already extended. */
1499e0a8
RK
5248
5249 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5250 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5251 {
e70d22c8
RK
5252 /* Compute the signedness and make the proper SUBREG. */
5253 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5254 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5255 SUBREG_PROMOTED_VAR_P (temp) = 1;
5256 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5257 return temp;
5258 }
5259
bbf6f052
RK
5260 return SAVE_EXPR_RTL (exp);
5261
679163cf
MS
5262 case UNSAVE_EXPR:
5263 {
5264 rtx temp;
5265 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5266 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5267 return temp;
5268 }
5269
b50d17a1 5270 case PLACEHOLDER_EXPR:
e9a25f70
JL
5271 {
5272 tree placeholder_expr;
5273
5274 /* If there is an object on the head of the placeholder list,
e5e809f4 5275 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5276 further information, see tree.def. */
5277 for (placeholder_expr = placeholder_list;
5278 placeholder_expr != 0;
5279 placeholder_expr = TREE_CHAIN (placeholder_expr))
5280 {
5281 tree need_type = TYPE_MAIN_VARIANT (type);
5282 tree object = 0;
5283 tree old_list = placeholder_list;
5284 tree elt;
5285
e5e809f4
JL
5286 /* Find the outermost reference that is of the type we want.
5287 If none, see if any object has a type that is a pointer to
5288 the type we want. */
5289 for (elt = TREE_PURPOSE (placeholder_expr);
5290 elt != 0 && object == 0;
5291 elt
5292 = ((TREE_CODE (elt) == COMPOUND_EXPR
5293 || TREE_CODE (elt) == COND_EXPR)
5294 ? TREE_OPERAND (elt, 1)
5295 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5296 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5297 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5298 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5299 ? TREE_OPERAND (elt, 0) : 0))
5300 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5301 object = elt;
e9a25f70 5302
e9a25f70 5303 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5304 elt != 0 && object == 0;
5305 elt
5306 = ((TREE_CODE (elt) == COMPOUND_EXPR
5307 || TREE_CODE (elt) == COND_EXPR)
5308 ? TREE_OPERAND (elt, 1)
5309 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5310 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5311 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5312 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5313 ? TREE_OPERAND (elt, 0) : 0))
5314 if (POINTER_TYPE_P (TREE_TYPE (elt))
5315 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5316 == need_type))
e5e809f4 5317 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5318
e9a25f70 5319 if (object != 0)
2cde2255 5320 {
e9a25f70
JL
5321 /* Expand this object skipping the list entries before
5322 it was found in case it is also a PLACEHOLDER_EXPR.
5323 In that case, we want to translate it using subsequent
5324 entries. */
5325 placeholder_list = TREE_CHAIN (placeholder_expr);
5326 temp = expand_expr (object, original_target, tmode,
5327 ro_modifier);
5328 placeholder_list = old_list;
5329 return temp;
2cde2255 5330 }
e9a25f70
JL
5331 }
5332 }
b50d17a1
RK
5333
5334 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5335 abort ();
5336
5337 case WITH_RECORD_EXPR:
5338 /* Put the object on the placeholder list, expand our first operand,
5339 and pop the list. */
5340 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5341 placeholder_list);
5342 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5343 tmode, ro_modifier);
b50d17a1
RK
5344 placeholder_list = TREE_CHAIN (placeholder_list);
5345 return target;
5346
bbf6f052 5347 case EXIT_EXPR:
e44842fe
RK
5348 expand_exit_loop_if_false (NULL_PTR,
5349 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5350 return const0_rtx;
5351
5352 case LOOP_EXPR:
0088fcb1 5353 push_temp_slots ();
bbf6f052
RK
5354 expand_start_loop (1);
5355 expand_expr_stmt (TREE_OPERAND (exp, 0));
5356 expand_end_loop ();
0088fcb1 5357 pop_temp_slots ();
bbf6f052
RK
5358
5359 return const0_rtx;
5360
5361 case BIND_EXPR:
5362 {
5363 tree vars = TREE_OPERAND (exp, 0);
5364 int vars_need_expansion = 0;
5365
5366 /* Need to open a binding contour here because
e976b8b2 5367 if there are any cleanups they must be contained here. */
bbf6f052
RK
5368 expand_start_bindings (0);
5369
2df53c0b
RS
5370 /* Mark the corresponding BLOCK for output in its proper place. */
5371 if (TREE_OPERAND (exp, 2) != 0
5372 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5373 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5374
5375 /* If VARS have not yet been expanded, expand them now. */
5376 while (vars)
5377 {
5378 if (DECL_RTL (vars) == 0)
5379 {
5380 vars_need_expansion = 1;
5381 expand_decl (vars);
5382 }
5383 expand_decl_init (vars);
5384 vars = TREE_CHAIN (vars);
5385 }
5386
921b3427 5387 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5388
5389 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5390
5391 return temp;
5392 }
5393
5394 case RTL_EXPR:
83b853c9
JM
5395 if (RTL_EXPR_SEQUENCE (exp))
5396 {
5397 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5398 abort ();
5399 emit_insns (RTL_EXPR_SEQUENCE (exp));
5400 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5401 }
99310285 5402 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5403 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5404 return RTL_EXPR_RTL (exp);
5405
5406 case CONSTRUCTOR:
dd27116b
RK
5407 /* If we don't need the result, just ensure we evaluate any
5408 subexpressions. */
5409 if (ignore)
5410 {
5411 tree elt;
5412 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
5413 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5414 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
5415 return const0_rtx;
5416 }
3207b172 5417
4af3895e
JVA
5418 /* All elts simple constants => refer to a constant in memory. But
5419 if this is a non-BLKmode mode, let it store a field at a time
5420 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5421 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5422 store directly into the target unless the type is large enough
5423 that memcpy will be used. If we are making an initializer and
3207b172 5424 all operands are constant, put it in memory as well. */
dd27116b 5425 else if ((TREE_STATIC (exp)
3207b172 5426 && ((mode == BLKmode
e5e809f4 5427 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
5428 || TREE_ADDRESSABLE (exp)
5429 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5430 && (move_by_pieces_ninsns
67225c15
RK
5431 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5432 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5433 > MOVE_RATIO)
5434 && ! mostly_zeros_p (exp))))
dd27116b 5435 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5436 {
5437 rtx constructor = output_constant_def (exp);
b552441b
RS
5438 if (modifier != EXPAND_CONST_ADDRESS
5439 && modifier != EXPAND_INITIALIZER
5440 && modifier != EXPAND_SUM
d6a5ac33
RK
5441 && (! memory_address_p (GET_MODE (constructor),
5442 XEXP (constructor, 0))
5443 || (flag_force_addr
5444 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5445 constructor = change_address (constructor, VOIDmode,
5446 XEXP (constructor, 0));
5447 return constructor;
5448 }
5449
bbf6f052
RK
5450 else
5451 {
e9ac02a6
JW
5452 /* Handle calls that pass values in multiple non-contiguous
5453 locations. The Irix 6 ABI has examples of this. */
e5e809f4 5454 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 5455 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5456 {
5457 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5458 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5459 else
5460 target = assign_temp (type, 0, 1, 1);
5461 }
07604beb
RK
5462
5463 if (TREE_READONLY (exp))
5464 {
9151b3bf 5465 if (GET_CODE (target) == MEM)
effbcc6a
RK
5466 target = copy_rtx (target);
5467
07604beb
RK
5468 RTX_UNCHANGING_P (target) = 1;
5469 }
5470
e1a43f73 5471 store_constructor (exp, target, 0);
bbf6f052
RK
5472 return target;
5473 }
5474
5475 case INDIRECT_REF:
5476 {
5477 tree exp1 = TREE_OPERAND (exp, 0);
5478 tree exp2;
7581a30f
JW
5479 tree index;
5480 tree string = string_constant (exp1, &index);
5481 int i;
5482
06eaa86f 5483 /* Try to optimize reads from const strings. */
7581a30f
JW
5484 if (string
5485 && TREE_CODE (string) == STRING_CST
5486 && TREE_CODE (index) == INTEGER_CST
5487 && !TREE_INT_CST_HIGH (index)
5488 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5489 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
5490 && GET_MODE_SIZE (mode) == 1
5491 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 5492 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 5493
405f0da6
JW
5494 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5495 op0 = memory_address (mode, op0);
8c8a8e34 5496
921b3427
RK
5497 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5498 {
5499 enum memory_use_mode memory_usage;
5500 memory_usage = get_memory_usage_from_modifier (modifier);
5501
5502 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
5503 {
5504 in_check_memory_usage = 1;
5505 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5506 op0, ptr_mode,
5507 GEN_INT (int_size_in_bytes (type)),
5508 TYPE_MODE (sizetype),
5509 GEN_INT (memory_usage),
5510 TYPE_MODE (integer_type_node));
5511 in_check_memory_usage = 0;
5512 }
921b3427
RK
5513 }
5514
38a448ca 5515 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
5516 /* If address was computed by addition,
5517 mark this as an element of an aggregate. */
5518 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5519 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5520 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5521 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5522 || (TREE_CODE (exp1) == ADDR_EXPR
5523 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5524 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5525 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5526 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5527
5528 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5529 here, because, in C and C++, the fact that a location is accessed
5530 through a pointer to const does not mean that the value there can
5531 never change. Languages where it can never change should
5532 also set TREE_STATIC. */
5cb7a25a 5533 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5534 return temp;
5535 }
bbf6f052
RK
5536
5537 case ARRAY_REF:
742920c7
RK
5538 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5539 abort ();
bbf6f052 5540
bbf6f052 5541 {
742920c7
RK
5542 tree array = TREE_OPERAND (exp, 0);
5543 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5544 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5545 tree index = TREE_OPERAND (exp, 1);
5546 tree index_type = TREE_TYPE (index);
08293add 5547 HOST_WIDE_INT i;
b50d17a1 5548
d4c89139
PB
5549 /* Optimize the special-case of a zero lower bound.
5550
5551 We convert the low_bound to sizetype to avoid some problems
5552 with constant folding. (E.g. suppose the lower bound is 1,
5553 and its mode is QI. Without the conversion, (ARRAY
5554 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5555 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5556
5557 But sizetype isn't quite right either (especially if
5558 the lowbound is negative). FIXME */
5559
742920c7 5560 if (! integer_zerop (low_bound))
d4c89139
PB
5561 index = fold (build (MINUS_EXPR, index_type, index,
5562 convert (sizetype, low_bound)));
742920c7 5563
742920c7 5564 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5565 This is not done in fold so it won't happen inside &.
5566 Don't fold if this is for wide characters since it's too
5567 difficult to do correctly and this is a very rare case. */
742920c7
RK
5568
5569 if (TREE_CODE (array) == STRING_CST
5570 && TREE_CODE (index) == INTEGER_CST
5571 && !TREE_INT_CST_HIGH (index)
307b821c 5572 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5573 && GET_MODE_CLASS (mode) == MODE_INT
5574 && GET_MODE_SIZE (mode) == 1)
307b821c 5575 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5576
742920c7
RK
5577 /* If this is a constant index into a constant array,
5578 just get the value from the array. Handle both the cases when
5579 we have an explicit constructor and when our operand is a variable
5580 that was declared const. */
4af3895e 5581
742920c7
RK
5582 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5583 {
5584 if (TREE_CODE (index) == INTEGER_CST
5585 && TREE_INT_CST_HIGH (index) == 0)
5586 {
5587 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5588
5589 i = TREE_INT_CST_LOW (index);
5590 while (elem && i--)
5591 elem = TREE_CHAIN (elem);
5592 if (elem)
5593 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5594 tmode, ro_modifier);
742920c7
RK
5595 }
5596 }
4af3895e 5597
742920c7
RK
5598 else if (optimize >= 1
5599 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5600 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5601 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5602 {
08293add 5603 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
5604 {
5605 tree init = DECL_INITIAL (array);
5606
5607 i = TREE_INT_CST_LOW (index);
5608 if (TREE_CODE (init) == CONSTRUCTOR)
5609 {
5610 tree elem = CONSTRUCTOR_ELTS (init);
5611
03dc44a6
RS
5612 while (elem
5613 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5614 elem = TREE_CHAIN (elem);
5615 if (elem)
5616 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5617 tmode, ro_modifier);
742920c7
RK
5618 }
5619 else if (TREE_CODE (init) == STRING_CST
08293add
RK
5620 && TREE_INT_CST_HIGH (index) == 0
5621 && (TREE_INT_CST_LOW (index)
5622 < TREE_STRING_LENGTH (init)))
5623 return (GEN_INT
5624 (TREE_STRING_POINTER
5625 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
5626 }
5627 }
5628 }
8c8a8e34 5629
08293add 5630 /* ... fall through ... */
bbf6f052
RK
5631
5632 case COMPONENT_REF:
5633 case BIT_FIELD_REF:
4af3895e 5634 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5635 appropriate field if it is present. Don't do this if we have
5636 already written the data since we want to refer to that copy
5637 and varasm.c assumes that's what we'll do. */
4af3895e 5638 if (code != ARRAY_REF
7a0b7b9a
RK
5639 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5640 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5641 {
5642 tree elt;
5643
5644 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5645 elt = TREE_CHAIN (elt))
86b5812c
RK
5646 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5647 /* We can normally use the value of the field in the
5648 CONSTRUCTOR. However, if this is a bitfield in
5649 an integral mode that we can fit in a HOST_WIDE_INT,
5650 we must mask only the number of bits in the bitfield,
5651 since this is done implicitly by the constructor. If
5652 the bitfield does not meet either of those conditions,
5653 we can't do this optimization. */
5654 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5655 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5656 == MODE_INT)
5657 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5658 <= HOST_BITS_PER_WIDE_INT))))
5659 {
5660 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5661 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5662 {
5663 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
5664
5665 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5666 {
5667 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5668 op0 = expand_and (op0, op1, target);
5669 }
5670 else
5671 {
e5e809f4
JL
5672 enum machine_mode imode
5673 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 5674 tree count
e5e809f4
JL
5675 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5676 0);
86b5812c
RK
5677
5678 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5679 target, 0);
5680 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5681 target, 0);
5682 }
5683 }
5684
5685 return op0;
5686 }
4af3895e
JVA
5687 }
5688
bbf6f052
RK
5689 {
5690 enum machine_mode mode1;
5691 int bitsize;
5692 int bitpos;
7bb0943f 5693 tree offset;
bbf6f052 5694 int volatilep = 0;
034f9101 5695 int alignment;
839c4796
RK
5696 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5697 &mode1, &unsignedp, &volatilep,
5698 &alignment);
bbf6f052 5699
e7f3c83f
RK
5700 /* If we got back the original object, something is wrong. Perhaps
5701 we are evaluating an expression too early. In any event, don't
5702 infinitely recurse. */
5703 if (tem == exp)
5704 abort ();
5705
3d27140a 5706 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5707 computation, since it will need a temporary and TARGET is known
5708 to have to do. This occurs in unchecked conversion in Ada. */
5709
5710 op0 = expand_expr (tem,
5711 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5712 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5713 != INTEGER_CST)
5714 ? target : NULL_RTX),
4ed67205 5715 VOIDmode,
e5e809f4
JL
5716 modifier == EXPAND_INITIALIZER
5717 ? modifier : EXPAND_NORMAL);
bbf6f052 5718
8c8a8e34 5719 /* If this is a constant, put it into a register if it is a
8008b228 5720 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5721 if (CONSTANT_P (op0))
5722 {
5723 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5724 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5725 op0 = force_reg (mode, op0);
5726 else
5727 op0 = validize_mem (force_const_mem (mode, op0));
5728 }
5729
7bb0943f
RS
5730 if (offset != 0)
5731 {
906c4e36 5732 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5733
5734 if (GET_CODE (op0) != MEM)
5735 abort ();
2d48c13d
JL
5736
5737 if (GET_MODE (offset_rtx) != ptr_mode)
5738#ifdef POINTERS_EXTEND_UNSIGNED
5739 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5740#else
5741 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5742#endif
5743
7bb0943f 5744 op0 = change_address (op0, VOIDmode,
38a448ca
RH
5745 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5746 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
5747 }
5748
bbf6f052
RK
5749 /* Don't forget about volatility even if this is a bitfield. */
5750 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5751 {
5752 op0 = copy_rtx (op0);
5753 MEM_VOLATILE_P (op0) = 1;
5754 }
5755
921b3427
RK
5756 /* Check the access. */
5757 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5758 {
5759 enum memory_use_mode memory_usage;
5760 memory_usage = get_memory_usage_from_modifier (modifier);
5761
5762 if (memory_usage != MEMORY_USE_DONT)
5763 {
5764 rtx to;
5765 int size;
5766
5767 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5768 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5769
5770 /* Check the access right of the pointer. */
e9a25f70
JL
5771 if (size > BITS_PER_UNIT)
5772 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5773 to, ptr_mode,
5774 GEN_INT (size / BITS_PER_UNIT),
5775 TYPE_MODE (sizetype),
956d6950
JL
5776 GEN_INT (memory_usage),
5777 TYPE_MODE (integer_type_node));
921b3427
RK
5778 }
5779 }
5780
ccc98036
RS
5781 /* In cases where an aligned union has an unaligned object
5782 as a field, we might be extracting a BLKmode value from
5783 an integer-mode (e.g., SImode) object. Handle this case
5784 by doing the extract into an object as wide as the field
5785 (which we know to be the width of a basic mode), then
f2420d0b
JW
5786 storing into memory, and changing the mode to BLKmode.
5787 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5788 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5789 if (mode1 == VOIDmode
ccc98036 5790 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5791 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 5792 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
5793 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5794 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5795 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
5796 /* If the field isn't aligned enough to fetch as a memref,
5797 fetch it as a bit field. */
5798 || (SLOW_UNALIGNED_ACCESS
5799 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5800 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5801 {
bbf6f052
RK
5802 enum machine_mode ext_mode = mode;
5803
5804 if (ext_mode == BLKmode)
5805 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5806
5807 if (ext_mode == BLKmode)
a281e72d
RK
5808 {
5809 /* In this case, BITPOS must start at a byte boundary and
5810 TARGET, if specified, must be a MEM. */
5811 if (GET_CODE (op0) != MEM
5812 || (target != 0 && GET_CODE (target) != MEM)
5813 || bitpos % BITS_PER_UNIT != 0)
5814 abort ();
5815
5816 op0 = change_address (op0, VOIDmode,
5817 plus_constant (XEXP (op0, 0),
5818 bitpos / BITS_PER_UNIT));
5819 if (target == 0)
5820 target = assign_temp (type, 0, 1, 1);
5821
5822 emit_block_move (target, op0,
5823 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5824 / BITS_PER_UNIT),
5825 1);
5826
5827 return target;
5828 }
bbf6f052 5829
dc6d66b3
RK
5830 op0 = validize_mem (op0);
5831
5832 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5833 mark_reg_pointer (XEXP (op0, 0), alignment);
5834
5835 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5836 unsignedp, target, ext_mode, ext_mode,
034f9101 5837 alignment,
bbf6f052 5838 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
5839
5840 /* If the result is a record type and BITSIZE is narrower than
5841 the mode of OP0, an integral mode, and this is a big endian
5842 machine, we must put the field into the high-order bits. */
5843 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5844 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5845 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5846 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5847 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5848 - bitsize),
5849 op0, 1);
5850
bbf6f052
RK
5851 if (mode == BLKmode)
5852 {
5853 rtx new = assign_stack_temp (ext_mode,
5854 bitsize / BITS_PER_UNIT, 0);
5855
5856 emit_move_insn (new, op0);
5857 op0 = copy_rtx (new);
5858 PUT_MODE (op0, BLKmode);
092dded9 5859 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5860 }
5861
5862 return op0;
5863 }
5864
05019f83
RK
5865 /* If the result is BLKmode, use that to access the object
5866 now as well. */
5867 if (mode == BLKmode)
5868 mode1 = BLKmode;
5869
bbf6f052
RK
5870 /* Get a reference to just this component. */
5871 if (modifier == EXPAND_CONST_ADDRESS
5872 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
5873 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5874 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
5875 else
5876 op0 = change_address (op0, mode1,
5877 plus_constant (XEXP (op0, 0),
5878 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5879 if (GET_CODE (XEXP (op0, 0)) == REG)
5880 mark_reg_pointer (XEXP (op0, 0), alignment);
5881
bbf6f052
RK
5882 MEM_IN_STRUCT_P (op0) = 1;
5883 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 5884 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 5885 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 5886 || modifier == EXPAND_INITIALIZER)
bbf6f052 5887 return op0;
0d15e60c 5888 else if (target == 0)
bbf6f052 5889 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 5890
bbf6f052
RK
5891 convert_move (target, op0, unsignedp);
5892 return target;
5893 }
5894
bbf6f052
RK
5895 /* Intended for a reference to a buffer of a file-object in Pascal.
5896 But it's not certain that a special tree code will really be
5897 necessary for these. INDIRECT_REF might work for them. */
5898 case BUFFER_REF:
5899 abort ();
5900
7308a047 5901 case IN_EXPR:
7308a047 5902 {
d6a5ac33
RK
5903 /* Pascal set IN expression.
5904
5905 Algorithm:
5906 rlo = set_low - (set_low%bits_per_word);
5907 the_word = set [ (index - rlo)/bits_per_word ];
5908 bit_index = index % bits_per_word;
5909 bitmask = 1 << bit_index;
5910 return !!(the_word & bitmask); */
5911
7308a047
RS
5912 tree set = TREE_OPERAND (exp, 0);
5913 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5914 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5915 tree set_type = TREE_TYPE (set);
7308a047
RS
5916 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5917 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5918 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5919 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5920 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5921 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5922 rtx setaddr = XEXP (setval, 0);
5923 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5924 rtx rlow;
5925 rtx diff, quo, rem, addr, bit, result;
7308a047 5926
d6a5ac33
RK
5927 preexpand_calls (exp);
5928
5929 /* If domain is empty, answer is no. Likewise if index is constant
5930 and out of bounds. */
51723711 5931 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 5932 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 5933 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
5934 || (TREE_CODE (index) == INTEGER_CST
5935 && TREE_CODE (set_low_bound) == INTEGER_CST
5936 && tree_int_cst_lt (index, set_low_bound))
5937 || (TREE_CODE (set_high_bound) == INTEGER_CST
5938 && TREE_CODE (index) == INTEGER_CST
5939 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5940 return const0_rtx;
5941
d6a5ac33
RK
5942 if (target == 0)
5943 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5944
5945 /* If we get here, we have to generate the code for both cases
5946 (in range and out of range). */
5947
5948 op0 = gen_label_rtx ();
5949 op1 = gen_label_rtx ();
5950
5951 if (! (GET_CODE (index_val) == CONST_INT
5952 && GET_CODE (lo_r) == CONST_INT))
5953 {
17938e57 5954 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5955 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5956 emit_jump_insn (gen_blt (op1));
5957 }
5958
5959 if (! (GET_CODE (index_val) == CONST_INT
5960 && GET_CODE (hi_r) == CONST_INT))
5961 {
17938e57 5962 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5963 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5964 emit_jump_insn (gen_bgt (op1));
5965 }
5966
5967 /* Calculate the element number of bit zero in the first word
5968 of the set. */
5969 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5970 rlow = GEN_INT (INTVAL (lo_r)
5971 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5972 else
17938e57
RK
5973 rlow = expand_binop (index_mode, and_optab, lo_r,
5974 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5975 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5976
d6a5ac33
RK
5977 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5978 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5979
5980 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5981 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5982 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5983 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5984
7308a047 5985 addr = memory_address (byte_mode,
d6a5ac33
RK
5986 expand_binop (index_mode, add_optab, diff,
5987 setaddr, NULL_RTX, iunsignedp,
17938e57 5988 OPTAB_LIB_WIDEN));
d6a5ac33 5989
7308a047
RS
5990 /* Extract the bit we want to examine */
5991 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 5992 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
5993 make_tree (TREE_TYPE (index), rem),
5994 NULL_RTX, 1);
5995 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5996 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5997 1, OPTAB_LIB_WIDEN);
17938e57
RK
5998
5999 if (result != target)
6000 convert_move (target, result, 1);
7308a047
RS
6001
6002 /* Output the code to handle the out-of-range case. */
6003 emit_jump (op0);
6004 emit_label (op1);
6005 emit_move_insn (target, const0_rtx);
6006 emit_label (op0);
6007 return target;
6008 }
6009
bbf6f052
RK
6010 case WITH_CLEANUP_EXPR:
6011 if (RTL_EXPR_RTL (exp) == 0)
6012 {
6013 RTL_EXPR_RTL (exp)
921b3427 6014 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6015 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6016
bbf6f052
RK
6017 /* That's it for this cleanup. */
6018 TREE_OPERAND (exp, 2) = 0;
6019 }
6020 return RTL_EXPR_RTL (exp);
6021
5dab5552
MS
6022 case CLEANUP_POINT_EXPR:
6023 {
d93d4205 6024 extern int temp_slot_level;
e976b8b2
MS
6025 /* Start a new binding layer that will keep track of all cleanup
6026 actions to be performed. */
6027 expand_start_bindings (0);
6028
d93d4205 6029 target_temp_slot_level = temp_slot_level;
e976b8b2 6030
921b3427 6031 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6032 /* If we're going to use this value, load it up now. */
6033 if (! ignore)
6034 op0 = force_not_mem (op0);
d93d4205 6035 preserve_temp_slots (op0);
e976b8b2 6036 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6037 }
6038 return op0;
6039
bbf6f052
RK
6040 case CALL_EXPR:
6041 /* Check for a built-in function. */
6042 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6043 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6044 == FUNCTION_DECL)
bbf6f052
RK
6045 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6046 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6047
bbf6f052
RK
6048 /* If this call was expanded already by preexpand_calls,
6049 just return the result we got. */
6050 if (CALL_EXPR_RTL (exp) != 0)
6051 return CALL_EXPR_RTL (exp);
d6a5ac33 6052
8129842c 6053 return expand_call (exp, target, ignore);
bbf6f052
RK
6054
6055 case NON_LVALUE_EXPR:
6056 case NOP_EXPR:
6057 case CONVERT_EXPR:
6058 case REFERENCE_EXPR:
bbf6f052
RK
6059 if (TREE_CODE (type) == UNION_TYPE)
6060 {
6061 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6062 if (target == 0)
06089a8b
RK
6063 {
6064 if (mode != BLKmode)
6065 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6066 else
6067 target = assign_temp (type, 0, 1, 1);
6068 }
d6a5ac33 6069
bbf6f052
RK
6070 if (GET_CODE (target) == MEM)
6071 /* Store data into beginning of memory target. */
6072 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6073 change_address (target, TYPE_MODE (valtype), 0), 0);
6074
bbf6f052
RK
6075 else if (GET_CODE (target) == REG)
6076 /* Store this field into a union of the proper type. */
6077 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6078 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6079 VOIDmode, 0, 1,
6080 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6081 else
6082 abort ();
6083
6084 /* Return the entire union. */
6085 return target;
6086 }
d6a5ac33 6087
7f62854a
RK
6088 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6089 {
6090 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6091 ro_modifier);
7f62854a
RK
6092
6093 /* If the signedness of the conversion differs and OP0 is
6094 a promoted SUBREG, clear that indication since we now
6095 have to do the proper extension. */
6096 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6097 && GET_CODE (op0) == SUBREG)
6098 SUBREG_PROMOTED_VAR_P (op0) = 0;
6099
6100 return op0;
6101 }
6102
1499e0a8 6103 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6104 if (GET_MODE (op0) == mode)
6105 return op0;
12342f90 6106
d6a5ac33
RK
6107 /* If OP0 is a constant, just convert it into the proper mode. */
6108 if (CONSTANT_P (op0))
6109 return
6110 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6111 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6112
26fcb35a 6113 if (modifier == EXPAND_INITIALIZER)
38a448ca 6114 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6115
bbf6f052 6116 if (target == 0)
d6a5ac33
RK
6117 return
6118 convert_to_mode (mode, op0,
6119 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6120 else
d6a5ac33
RK
6121 convert_move (target, op0,
6122 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6123 return target;
6124
6125 case PLUS_EXPR:
0f41302f
MS
6126 /* We come here from MINUS_EXPR when the second operand is a
6127 constant. */
bbf6f052
RK
6128 plus_expr:
6129 this_optab = add_optab;
6130
6131 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6132 something else, make sure we add the register to the constant and
6133 then to the other thing. This case can occur during strength
6134 reduction and doing it this way will produce better code if the
6135 frame pointer or argument pointer is eliminated.
6136
6137 fold-const.c will ensure that the constant is always in the inner
6138 PLUS_EXPR, so the only case we need to do anything about is if
6139 sp, ap, or fp is our second argument, in which case we must swap
6140 the innermost first argument and our second argument. */
6141
6142 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6143 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6144 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6145 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6146 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6147 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6148 {
6149 tree t = TREE_OPERAND (exp, 1);
6150
6151 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6152 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6153 }
6154
88f63c77 6155 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6156 something, we might be forming a constant. So try to use
6157 plus_constant. If it produces a sum and we can't accept it,
6158 use force_operand. This allows P = &ARR[const] to generate
6159 efficient code on machines where a SYMBOL_REF is not a valid
6160 address.
6161
6162 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6163 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6164 || mode == ptr_mode)
bbf6f052 6165 {
c980ac49
RS
6166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6167 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6168 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6169 {
6170 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6171 EXPAND_SUM);
6172 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6173 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6174 op1 = force_operand (op1, target);
6175 return op1;
6176 }
bbf6f052 6177
c980ac49
RS
6178 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6179 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6180 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6181 {
6182 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6183 EXPAND_SUM);
6184 if (! CONSTANT_P (op0))
6185 {
6186 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6187 VOIDmode, modifier);
709f5be1
RS
6188 /* Don't go to both_summands if modifier
6189 says it's not right to return a PLUS. */
6190 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6191 goto binop2;
c980ac49
RS
6192 goto both_summands;
6193 }
6194 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6195 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6196 op0 = force_operand (op0, target);
6197 return op0;
6198 }
bbf6f052
RK
6199 }
6200
6201 /* No sense saving up arithmetic to be done
6202 if it's all in the wrong mode to form part of an address.
6203 And force_operand won't know whether to sign-extend or
6204 zero-extend. */
6205 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6206 || mode != ptr_mode)
c980ac49 6207 goto binop;
bbf6f052
RK
6208
6209 preexpand_calls (exp);
e5e809f4 6210 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6211 subtarget = 0;
6212
921b3427
RK
6213 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6214 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6215
c980ac49 6216 both_summands:
bbf6f052
RK
6217 /* Make sure any term that's a sum with a constant comes last. */
6218 if (GET_CODE (op0) == PLUS
6219 && CONSTANT_P (XEXP (op0, 1)))
6220 {
6221 temp = op0;
6222 op0 = op1;
6223 op1 = temp;
6224 }
6225 /* If adding to a sum including a constant,
6226 associate it to put the constant outside. */
6227 if (GET_CODE (op1) == PLUS
6228 && CONSTANT_P (XEXP (op1, 1)))
6229 {
6230 rtx constant_term = const0_rtx;
6231
6232 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6233 if (temp != 0)
6234 op0 = temp;
6f90e075
JW
6235 /* Ensure that MULT comes first if there is one. */
6236 else if (GET_CODE (op0) == MULT)
38a448ca 6237 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6238 else
38a448ca 6239 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6240
6241 /* Let's also eliminate constants from op0 if possible. */
6242 op0 = eliminate_constant_term (op0, &constant_term);
6243
6244 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6245 their sum should be a constant. Form it into OP1, since the
6246 result we want will then be OP0 + OP1. */
6247
6248 temp = simplify_binary_operation (PLUS, mode, constant_term,
6249 XEXP (op1, 1));
6250 if (temp != 0)
6251 op1 = temp;
6252 else
38a448ca 6253 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6254 }
6255
6256 /* Put a constant term last and put a multiplication first. */
6257 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6258 temp = op1, op1 = op0, op0 = temp;
6259
6260 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6261 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6262
6263 case MINUS_EXPR:
ea87523e
RK
6264 /* For initializers, we are allowed to return a MINUS of two
6265 symbolic constants. Here we handle all cases when both operands
6266 are constant. */
bbf6f052
RK
6267 /* Handle difference of two symbolic constants,
6268 for the sake of an initializer. */
6269 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6270 && really_constant_p (TREE_OPERAND (exp, 0))
6271 && really_constant_p (TREE_OPERAND (exp, 1)))
6272 {
906c4e36 6273 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6274 VOIDmode, ro_modifier);
906c4e36 6275 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6276 VOIDmode, ro_modifier);
ea87523e 6277
ea87523e
RK
6278 /* If the last operand is a CONST_INT, use plus_constant of
6279 the negated constant. Else make the MINUS. */
6280 if (GET_CODE (op1) == CONST_INT)
6281 return plus_constant (op0, - INTVAL (op1));
6282 else
38a448ca 6283 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6284 }
6285 /* Convert A - const to A + (-const). */
6286 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6287 {
ae431183
RK
6288 tree negated = fold (build1 (NEGATE_EXPR, type,
6289 TREE_OPERAND (exp, 1)));
6290
6291 /* Deal with the case where we can't negate the constant
6292 in TYPE. */
6293 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6294 {
6295 tree newtype = signed_type (type);
6296 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6297 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6298 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6299
6300 if (! TREE_OVERFLOW (newneg))
6301 return expand_expr (convert (type,
6302 build (PLUS_EXPR, newtype,
6303 newop0, newneg)),
921b3427 6304 target, tmode, ro_modifier);
ae431183
RK
6305 }
6306 else
6307 {
6308 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6309 goto plus_expr;
6310 }
bbf6f052
RK
6311 }
6312 this_optab = sub_optab;
6313 goto binop;
6314
6315 case MULT_EXPR:
6316 preexpand_calls (exp);
6317 /* If first operand is constant, swap them.
6318 Thus the following special case checks need only
6319 check the second operand. */
6320 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6321 {
6322 register tree t1 = TREE_OPERAND (exp, 0);
6323 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6324 TREE_OPERAND (exp, 1) = t1;
6325 }
6326
6327 /* Attempt to return something suitable for generating an
6328 indexed address, for machines that support that. */
6329
88f63c77 6330 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6331 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6332 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6333 {
921b3427
RK
6334 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6335 EXPAND_SUM);
bbf6f052
RK
6336
6337 /* Apply distributive law if OP0 is x+c. */
6338 if (GET_CODE (op0) == PLUS
6339 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6340 return gen_rtx_PLUS (mode,
6341 gen_rtx_MULT (mode, XEXP (op0, 0),
6342 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6343 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6344 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6345
6346 if (GET_CODE (op0) != REG)
906c4e36 6347 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6348 if (GET_CODE (op0) != REG)
6349 op0 = copy_to_mode_reg (mode, op0);
6350
38a448ca
RH
6351 return gen_rtx_MULT (mode, op0,
6352 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6353 }
6354
e5e809f4 6355 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6356 subtarget = 0;
6357
6358 /* Check for multiplying things that have been extended
6359 from a narrower type. If this machine supports multiplying
6360 in that narrower type with a result in the desired type,
6361 do it that way, and avoid the explicit type-conversion. */
6362 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6363 && TREE_CODE (type) == INTEGER_TYPE
6364 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6365 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6366 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6367 && int_fits_type_p (TREE_OPERAND (exp, 1),
6368 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6369 /* Don't use a widening multiply if a shift will do. */
6370 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6371 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6372 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6373 ||
6374 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6375 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6376 ==
6377 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6378 /* If both operands are extended, they must either both
6379 be zero-extended or both be sign-extended. */
6380 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6381 ==
6382 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6383 {
6384 enum machine_mode innermode
6385 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6386 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6387 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6388 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6389 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6390 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6391 {
b10af0c8
TG
6392 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6393 {
6394 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6395 NULL_RTX, VOIDmode, 0);
6396 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6397 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6398 VOIDmode, 0);
6399 else
6400 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6401 NULL_RTX, VOIDmode, 0);
6402 goto binop2;
6403 }
6404 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6405 && innermode == word_mode)
6406 {
6407 rtx htem;
6408 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6409 NULL_RTX, VOIDmode, 0);
6410 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6411 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6412 VOIDmode, 0);
6413 else
6414 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6415 NULL_RTX, VOIDmode, 0);
6416 temp = expand_binop (mode, other_optab, op0, op1, target,
6417 unsignedp, OPTAB_LIB_WIDEN);
6418 htem = expand_mult_highpart_adjust (innermode,
6419 gen_highpart (innermode, temp),
6420 op0, op1,
6421 gen_highpart (innermode, temp),
6422 unsignedp);
6423 emit_move_insn (gen_highpart (innermode, temp), htem);
6424 return temp;
6425 }
bbf6f052
RK
6426 }
6427 }
6428 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6429 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6430 return expand_mult (mode, op0, op1, target, unsignedp);
6431
6432 case TRUNC_DIV_EXPR:
6433 case FLOOR_DIV_EXPR:
6434 case CEIL_DIV_EXPR:
6435 case ROUND_DIV_EXPR:
6436 case EXACT_DIV_EXPR:
6437 preexpand_calls (exp);
e5e809f4 6438 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6439 subtarget = 0;
6440 /* Possible optimization: compute the dividend with EXPAND_SUM
6441 then if the divisor is constant can optimize the case
6442 where some terms of the dividend have coeffs divisible by it. */
6443 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6444 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6445 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6446
6447 case RDIV_EXPR:
6448 this_optab = flodiv_optab;
6449 goto binop;
6450
6451 case TRUNC_MOD_EXPR:
6452 case FLOOR_MOD_EXPR:
6453 case CEIL_MOD_EXPR:
6454 case ROUND_MOD_EXPR:
6455 preexpand_calls (exp);
e5e809f4 6456 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6457 subtarget = 0;
6458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6459 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6460 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6461
6462 case FIX_ROUND_EXPR:
6463 case FIX_FLOOR_EXPR:
6464 case FIX_CEIL_EXPR:
6465 abort (); /* Not used for C. */
6466
6467 case FIX_TRUNC_EXPR:
906c4e36 6468 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6469 if (target == 0)
6470 target = gen_reg_rtx (mode);
6471 expand_fix (target, op0, unsignedp);
6472 return target;
6473
6474 case FLOAT_EXPR:
906c4e36 6475 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6476 if (target == 0)
6477 target = gen_reg_rtx (mode);
6478 /* expand_float can't figure out what to do if FROM has VOIDmode.
6479 So give it the correct mode. With -O, cse will optimize this. */
6480 if (GET_MODE (op0) == VOIDmode)
6481 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6482 op0);
6483 expand_float (target, op0,
6484 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6485 return target;
6486
6487 case NEGATE_EXPR:
5b22bee8 6488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6489 temp = expand_unop (mode, neg_optab, op0, target, 0);
6490 if (temp == 0)
6491 abort ();
6492 return temp;
6493
6494 case ABS_EXPR:
6495 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6496
2d7050fd 6497 /* Handle complex values specially. */
d6a5ac33
RK
6498 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6499 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6500 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6501
bbf6f052
RK
6502 /* Unsigned abs is simply the operand. Testing here means we don't
6503 risk generating incorrect code below. */
6504 if (TREE_UNSIGNED (type))
6505 return op0;
6506
2e5ec6cf 6507 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 6508 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
6509
6510 case MAX_EXPR:
6511 case MIN_EXPR:
6512 target = original_target;
e5e809f4 6513 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 6514 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6515 || GET_MODE (target) != mode
bbf6f052
RK
6516 || (GET_CODE (target) == REG
6517 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6518 target = gen_reg_rtx (mode);
906c4e36 6519 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6520 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6521
6522 /* First try to do it with a special MIN or MAX instruction.
6523 If that does not win, use a conditional jump to select the proper
6524 value. */
6525 this_optab = (TREE_UNSIGNED (type)
6526 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6527 : (code == MIN_EXPR ? smin_optab : smax_optab));
6528
6529 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6530 OPTAB_WIDEN);
6531 if (temp != 0)
6532 return temp;
6533
fa2981d8
JW
6534 /* At this point, a MEM target is no longer useful; we will get better
6535 code without it. */
6536
6537 if (GET_CODE (target) == MEM)
6538 target = gen_reg_rtx (mode);
6539
ee456b1c
RK
6540 if (target != op0)
6541 emit_move_insn (target, op0);
d6a5ac33 6542
bbf6f052 6543 op0 = gen_label_rtx ();
d6a5ac33 6544
f81497d9
RS
6545 /* If this mode is an integer too wide to compare properly,
6546 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6547 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6548 {
f81497d9 6549 if (code == MAX_EXPR)
d6a5ac33
RK
6550 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6551 target, op1, NULL_RTX, op0);
bbf6f052 6552 else
d6a5ac33
RK
6553 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6554 op1, target, NULL_RTX, op0);
ee456b1c 6555 emit_move_insn (target, op1);
bbf6f052 6556 }
f81497d9
RS
6557 else
6558 {
6559 if (code == MAX_EXPR)
6560 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6561 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6562 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6563 else
6564 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6565 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6566 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6567 if (temp == const0_rtx)
ee456b1c 6568 emit_move_insn (target, op1);
f81497d9
RS
6569 else if (temp != const_true_rtx)
6570 {
6571 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6572 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6573 else
6574 abort ();
ee456b1c 6575 emit_move_insn (target, op1);
f81497d9
RS
6576 }
6577 }
bbf6f052
RK
6578 emit_label (op0);
6579 return target;
6580
bbf6f052
RK
6581 case BIT_NOT_EXPR:
6582 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6583 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6584 if (temp == 0)
6585 abort ();
6586 return temp;
6587
6588 case FFS_EXPR:
6589 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6590 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6591 if (temp == 0)
6592 abort ();
6593 return temp;
6594
d6a5ac33
RK
6595 /* ??? Can optimize bitwise operations with one arg constant.
6596 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6597 and (a bitwise1 b) bitwise2 b (etc)
6598 but that is probably not worth while. */
6599
6600 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6601 boolean values when we want in all cases to compute both of them. In
6602 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6603 as actual zero-or-1 values and then bitwise anding. In cases where
6604 there cannot be any side effects, better code would be made by
6605 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6606 how to recognize those cases. */
6607
bbf6f052
RK
6608 case TRUTH_AND_EXPR:
6609 case BIT_AND_EXPR:
6610 this_optab = and_optab;
6611 goto binop;
6612
bbf6f052
RK
6613 case TRUTH_OR_EXPR:
6614 case BIT_IOR_EXPR:
6615 this_optab = ior_optab;
6616 goto binop;
6617
874726a8 6618 case TRUTH_XOR_EXPR:
bbf6f052
RK
6619 case BIT_XOR_EXPR:
6620 this_optab = xor_optab;
6621 goto binop;
6622
6623 case LSHIFT_EXPR:
6624 case RSHIFT_EXPR:
6625 case LROTATE_EXPR:
6626 case RROTATE_EXPR:
6627 preexpand_calls (exp);
e5e809f4 6628 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6629 subtarget = 0;
6630 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6631 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6632 unsignedp);
6633
d6a5ac33
RK
6634 /* Could determine the answer when only additive constants differ. Also,
6635 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6636 case LT_EXPR:
6637 case LE_EXPR:
6638 case GT_EXPR:
6639 case GE_EXPR:
6640 case EQ_EXPR:
6641 case NE_EXPR:
6642 preexpand_calls (exp);
6643 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6644 if (temp != 0)
6645 return temp;
d6a5ac33 6646
0f41302f 6647 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6648 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6649 && original_target
6650 && GET_CODE (original_target) == REG
6651 && (GET_MODE (original_target)
6652 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6653 {
d6a5ac33
RK
6654 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6655 VOIDmode, 0);
6656
bbf6f052
RK
6657 if (temp != original_target)
6658 temp = copy_to_reg (temp);
d6a5ac33 6659
bbf6f052 6660 op1 = gen_label_rtx ();
906c4e36 6661 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6662 GET_MODE (temp), unsignedp, 0);
6663 emit_jump_insn (gen_beq (op1));
6664 emit_move_insn (temp, const1_rtx);
6665 emit_label (op1);
6666 return temp;
6667 }
d6a5ac33 6668
bbf6f052
RK
6669 /* If no set-flag instruction, must generate a conditional
6670 store into a temporary variable. Drop through
6671 and handle this like && and ||. */
6672
6673 case TRUTH_ANDIF_EXPR:
6674 case TRUTH_ORIF_EXPR:
e44842fe 6675 if (! ignore
e5e809f4 6676 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
6677 /* Make sure we don't have a hard reg (such as function's return
6678 value) live across basic blocks, if not optimizing. */
6679 || (!optimize && GET_CODE (target) == REG
6680 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6681 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6682
6683 if (target)
6684 emit_clr_insn (target);
6685
bbf6f052
RK
6686 op1 = gen_label_rtx ();
6687 jumpifnot (exp, op1);
e44842fe
RK
6688
6689 if (target)
6690 emit_0_to_1_insn (target);
6691
bbf6f052 6692 emit_label (op1);
e44842fe 6693 return ignore ? const0_rtx : target;
bbf6f052
RK
6694
6695 case TRUTH_NOT_EXPR:
6696 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6697 /* The parser is careful to generate TRUTH_NOT_EXPR
6698 only with operands that are always zero or one. */
906c4e36 6699 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6700 target, 1, OPTAB_LIB_WIDEN);
6701 if (temp == 0)
6702 abort ();
6703 return temp;
6704
6705 case COMPOUND_EXPR:
6706 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6707 emit_queue ();
6708 return expand_expr (TREE_OPERAND (exp, 1),
6709 (ignore ? const0_rtx : target),
6710 VOIDmode, 0);
6711
6712 case COND_EXPR:
ac01eace
RK
6713 /* If we would have a "singleton" (see below) were it not for a
6714 conversion in each arm, bring that conversion back out. */
6715 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6716 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6717 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6718 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6719 {
6720 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6721 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6722
6723 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6724 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6725 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6726 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6727 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6728 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6729 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6730 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6731 return expand_expr (build1 (NOP_EXPR, type,
6732 build (COND_EXPR, TREE_TYPE (true),
6733 TREE_OPERAND (exp, 0),
6734 true, false)),
6735 target, tmode, modifier);
6736 }
6737
bbf6f052
RK
6738 {
6739 /* Note that COND_EXPRs whose type is a structure or union
6740 are required to be constructed to contain assignments of
6741 a temporary variable, so that we can evaluate them here
6742 for side effect only. If type is void, we must do likewise. */
6743
6744 /* If an arm of the branch requires a cleanup,
6745 only that cleanup is performed. */
6746
6747 tree singleton = 0;
6748 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
6749
6750 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6751 convert it to our mode, if necessary. */
6752 if (integer_onep (TREE_OPERAND (exp, 1))
6753 && integer_zerop (TREE_OPERAND (exp, 2))
6754 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6755 {
dd27116b
RK
6756 if (ignore)
6757 {
6758 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 6759 ro_modifier);
dd27116b
RK
6760 return const0_rtx;
6761 }
6762
921b3427 6763 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
6764 if (GET_MODE (op0) == mode)
6765 return op0;
d6a5ac33 6766
bbf6f052
RK
6767 if (target == 0)
6768 target = gen_reg_rtx (mode);
6769 convert_move (target, op0, unsignedp);
6770 return target;
6771 }
6772
ac01eace
RK
6773 /* Check for X ? A + B : A. If we have this, we can copy A to the
6774 output and conditionally add B. Similarly for unary operations.
6775 Don't do this if X has side-effects because those side effects
6776 might affect A or B and the "?" operation is a sequence point in
6777 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
6778
6779 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6780 && operand_equal_p (TREE_OPERAND (exp, 2),
6781 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6782 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6783 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6784 && operand_equal_p (TREE_OPERAND (exp, 1),
6785 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6786 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6787 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6788 && operand_equal_p (TREE_OPERAND (exp, 2),
6789 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6790 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6791 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6792 && operand_equal_p (TREE_OPERAND (exp, 1),
6793 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6794 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6795
01c8a7c8
RK
6796 /* If we are not to produce a result, we have no target. Otherwise,
6797 if a target was specified use it; it will not be used as an
6798 intermediate target unless it is safe. If no target, use a
6799 temporary. */
6800
6801 if (ignore)
6802 temp = 0;
6803 else if (original_target
e5e809f4 6804 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
6805 || (singleton && GET_CODE (original_target) == REG
6806 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6807 && original_target == var_rtx (singleton)))
6808 && GET_MODE (original_target) == mode
7c00d1fe
RK
6809#ifdef HAVE_conditional_move
6810 && (! can_conditionally_move_p (mode)
6811 || GET_CODE (original_target) == REG
6812 || TREE_ADDRESSABLE (type))
6813#endif
01c8a7c8
RK
6814 && ! (GET_CODE (original_target) == MEM
6815 && MEM_VOLATILE_P (original_target)))
6816 temp = original_target;
6817 else if (TREE_ADDRESSABLE (type))
6818 abort ();
6819 else
6820 temp = assign_temp (type, 0, 0, 1);
6821
ac01eace
RK
6822 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6823 do the test of X as a store-flag operation, do this as
6824 A + ((X != 0) << log C). Similarly for other simple binary
6825 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 6826 if (temp && singleton && binary_op
bbf6f052
RK
6827 && (TREE_CODE (binary_op) == PLUS_EXPR
6828 || TREE_CODE (binary_op) == MINUS_EXPR
6829 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6830 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
6831 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6832 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
6833 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6834 {
6835 rtx result;
6836 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6837 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6838 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6839 : xor_optab);
bbf6f052
RK
6840
6841 /* If we had X ? A : A + 1, do this as A + (X == 0).
6842
6843 We have to invert the truth value here and then put it
6844 back later if do_store_flag fails. We cannot simply copy
6845 TREE_OPERAND (exp, 0) to another variable and modify that
6846 because invert_truthvalue can modify the tree pointed to
6847 by its argument. */
6848 if (singleton == TREE_OPERAND (exp, 1))
6849 TREE_OPERAND (exp, 0)
6850 = invert_truthvalue (TREE_OPERAND (exp, 0));
6851
6852 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 6853 (safe_from_p (temp, singleton, 1)
906c4e36 6854 ? temp : NULL_RTX),
bbf6f052
RK
6855 mode, BRANCH_COST <= 1);
6856
ac01eace
RK
6857 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6858 result = expand_shift (LSHIFT_EXPR, mode, result,
6859 build_int_2 (tree_log2
6860 (TREE_OPERAND
6861 (binary_op, 1)),
6862 0),
e5e809f4 6863 (safe_from_p (temp, singleton, 1)
ac01eace
RK
6864 ? temp : NULL_RTX), 0);
6865
bbf6f052
RK
6866 if (result)
6867 {
906c4e36 6868 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6869 return expand_binop (mode, boptab, op1, result, temp,
6870 unsignedp, OPTAB_LIB_WIDEN);
6871 }
6872 else if (singleton == TREE_OPERAND (exp, 1))
6873 TREE_OPERAND (exp, 0)
6874 = invert_truthvalue (TREE_OPERAND (exp, 0));
6875 }
6876
dabf8373 6877 do_pending_stack_adjust ();
bbf6f052
RK
6878 NO_DEFER_POP;
6879 op0 = gen_label_rtx ();
6880
6881 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6882 {
6883 if (temp != 0)
6884 {
6885 /* If the target conflicts with the other operand of the
6886 binary op, we can't use it. Also, we can't use the target
6887 if it is a hard register, because evaluating the condition
6888 might clobber it. */
6889 if ((binary_op
e5e809f4 6890 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
6891 || (GET_CODE (temp) == REG
6892 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6893 temp = gen_reg_rtx (mode);
6894 store_expr (singleton, temp, 0);
6895 }
6896 else
906c4e36 6897 expand_expr (singleton,
2937cf87 6898 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6899 if (singleton == TREE_OPERAND (exp, 1))
6900 jumpif (TREE_OPERAND (exp, 0), op0);
6901 else
6902 jumpifnot (TREE_OPERAND (exp, 0), op0);
6903
956d6950 6904 start_cleanup_deferral ();
bbf6f052
RK
6905 if (binary_op && temp == 0)
6906 /* Just touch the other operand. */
6907 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6908 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6909 else if (binary_op)
6910 store_expr (build (TREE_CODE (binary_op), type,
6911 make_tree (type, temp),
6912 TREE_OPERAND (binary_op, 1)),
6913 temp, 0);
6914 else
6915 store_expr (build1 (TREE_CODE (unary_op), type,
6916 make_tree (type, temp)),
6917 temp, 0);
6918 op1 = op0;
bbf6f052 6919 }
bbf6f052
RK
6920 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6921 comparison operator. If we have one of these cases, set the
6922 output to A, branch on A (cse will merge these two references),
6923 then set the output to FOO. */
6924 else if (temp
6925 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6926 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6927 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6928 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
6929 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6930 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 6931 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
6932 {
6933 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6934 temp = gen_reg_rtx (mode);
6935 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6936 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6937
956d6950 6938 start_cleanup_deferral ();
bbf6f052
RK
6939 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6940 op1 = op0;
6941 }
6942 else if (temp
6943 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6944 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6945 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6946 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
6947 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6948 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 6949 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6950 {
6951 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6952 temp = gen_reg_rtx (mode);
6953 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6954 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6955
956d6950 6956 start_cleanup_deferral ();
bbf6f052
RK
6957 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6958 op1 = op0;
6959 }
6960 else
6961 {
6962 op1 = gen_label_rtx ();
6963 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6964
956d6950 6965 start_cleanup_deferral ();
bbf6f052
RK
6966 if (temp != 0)
6967 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6968 else
906c4e36
RK
6969 expand_expr (TREE_OPERAND (exp, 1),
6970 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 6971 end_cleanup_deferral ();
bbf6f052
RK
6972 emit_queue ();
6973 emit_jump_insn (gen_jump (op1));
6974 emit_barrier ();
6975 emit_label (op0);
956d6950 6976 start_cleanup_deferral ();
bbf6f052
RK
6977 if (temp != 0)
6978 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6979 else
906c4e36
RK
6980 expand_expr (TREE_OPERAND (exp, 2),
6981 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6982 }
6983
956d6950 6984 end_cleanup_deferral ();
bbf6f052
RK
6985
6986 emit_queue ();
6987 emit_label (op1);
6988 OK_DEFER_POP;
5dab5552 6989
bbf6f052
RK
6990 return temp;
6991 }
6992
6993 case TARGET_EXPR:
6994 {
6995 /* Something needs to be initialized, but we didn't know
6996 where that thing was when building the tree. For example,
6997 it could be the return value of a function, or a parameter
6998 to a function which lays down in the stack, or a temporary
6999 variable which must be passed by reference.
7000
7001 We guarantee that the expression will either be constructed
7002 or copied into our original target. */
7003
7004 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7005 tree cleanups = NULL_TREE;
5c062816 7006 tree exp1;
bbf6f052
RK
7007
7008 if (TREE_CODE (slot) != VAR_DECL)
7009 abort ();
7010
9c51f375
RK
7011 if (! ignore)
7012 target = original_target;
7013
bbf6f052
RK
7014 if (target == 0)
7015 {
7016 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7017 {
7018 target = DECL_RTL (slot);
5c062816 7019 /* If we have already expanded the slot, so don't do
ac993f4f 7020 it again. (mrs) */
5c062816
MS
7021 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7022 return target;
ac993f4f 7023 }
bbf6f052
RK
7024 else
7025 {
e9a25f70 7026 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7027 /* All temp slots at this level must not conflict. */
7028 preserve_temp_slots (target);
7029 DECL_RTL (slot) = target;
e9a25f70
JL
7030 if (TREE_ADDRESSABLE (slot))
7031 {
7032 TREE_ADDRESSABLE (slot) = 0;
7033 mark_addressable (slot);
7034 }
bbf6f052 7035
e287fd6e
RK
7036 /* Since SLOT is not known to the called function
7037 to belong to its stack frame, we must build an explicit
7038 cleanup. This case occurs when we must build up a reference
7039 to pass the reference as an argument. In this case,
7040 it is very likely that such a reference need not be
7041 built here. */
7042
7043 if (TREE_OPERAND (exp, 2) == 0)
7044 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7045 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7046 }
bbf6f052
RK
7047 }
7048 else
7049 {
7050 /* This case does occur, when expanding a parameter which
7051 needs to be constructed on the stack. The target
7052 is the actual stack address that we want to initialize.
7053 The function we call will perform the cleanup in this case. */
7054
8c042b47
RS
7055 /* If we have already assigned it space, use that space,
7056 not target that we were passed in, as our target
7057 parameter is only a hint. */
7058 if (DECL_RTL (slot) != 0)
7059 {
7060 target = DECL_RTL (slot);
7061 /* If we have already expanded the slot, so don't do
7062 it again. (mrs) */
7063 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7064 return target;
7065 }
21002281
JW
7066 else
7067 {
7068 DECL_RTL (slot) = target;
7069 /* If we must have an addressable slot, then make sure that
7070 the RTL that we just stored in slot is OK. */
7071 if (TREE_ADDRESSABLE (slot))
7072 {
7073 TREE_ADDRESSABLE (slot) = 0;
7074 mark_addressable (slot);
7075 }
7076 }
bbf6f052
RK
7077 }
7078
4847c938 7079 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7080 /* Mark it as expanded. */
7081 TREE_OPERAND (exp, 1) = NULL_TREE;
7082
e5e809f4 7083 TREE_USED (slot) = 1;
41531e5b 7084 store_expr (exp1, target, 0);
61d6b1cc 7085
e976b8b2 7086 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7087
41531e5b 7088 return target;
bbf6f052
RK
7089 }
7090
7091 case INIT_EXPR:
7092 {
7093 tree lhs = TREE_OPERAND (exp, 0);
7094 tree rhs = TREE_OPERAND (exp, 1);
7095 tree noncopied_parts = 0;
7096 tree lhs_type = TREE_TYPE (lhs);
7097
7098 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7099 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7100 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7101 TYPE_NONCOPIED_PARTS (lhs_type));
7102 while (noncopied_parts != 0)
7103 {
7104 expand_assignment (TREE_VALUE (noncopied_parts),
7105 TREE_PURPOSE (noncopied_parts), 0, 0);
7106 noncopied_parts = TREE_CHAIN (noncopied_parts);
7107 }
7108 return temp;
7109 }
7110
7111 case MODIFY_EXPR:
7112 {
7113 /* If lhs is complex, expand calls in rhs before computing it.
7114 That's so we don't compute a pointer and save it over a call.
7115 If lhs is simple, compute it first so we can give it as a
7116 target if the rhs is just a call. This avoids an extra temp and copy
7117 and that prevents a partial-subsumption which makes bad code.
7118 Actually we could treat component_ref's of vars like vars. */
7119
7120 tree lhs = TREE_OPERAND (exp, 0);
7121 tree rhs = TREE_OPERAND (exp, 1);
7122 tree noncopied_parts = 0;
7123 tree lhs_type = TREE_TYPE (lhs);
7124
7125 temp = 0;
7126
7127 if (TREE_CODE (lhs) != VAR_DECL
7128 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7129 && TREE_CODE (lhs) != PARM_DECL
7130 && ! (TREE_CODE (lhs) == INDIRECT_REF
7131 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7132 preexpand_calls (exp);
7133
7134 /* Check for |= or &= of a bitfield of size one into another bitfield
7135 of size 1. In this case, (unless we need the result of the
7136 assignment) we can do this more efficiently with a
7137 test followed by an assignment, if necessary.
7138
7139 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7140 things change so we do, this code should be enhanced to
7141 support it. */
7142 if (ignore
7143 && TREE_CODE (lhs) == COMPONENT_REF
7144 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7145 || TREE_CODE (rhs) == BIT_AND_EXPR)
7146 && TREE_OPERAND (rhs, 0) == lhs
7147 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7148 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7149 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7150 {
7151 rtx label = gen_label_rtx ();
7152
7153 do_jump (TREE_OPERAND (rhs, 1),
7154 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7155 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7156 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7157 (TREE_CODE (rhs) == BIT_IOR_EXPR
7158 ? integer_one_node
7159 : integer_zero_node)),
7160 0, 0);
e7c33f54 7161 do_pending_stack_adjust ();
bbf6f052
RK
7162 emit_label (label);
7163 return const0_rtx;
7164 }
7165
7166 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7167 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7168 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7169 TYPE_NONCOPIED_PARTS (lhs_type));
7170
7171 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7172 while (noncopied_parts != 0)
7173 {
7174 expand_assignment (TREE_PURPOSE (noncopied_parts),
7175 TREE_VALUE (noncopied_parts), 0, 0);
7176 noncopied_parts = TREE_CHAIN (noncopied_parts);
7177 }
7178 return temp;
7179 }
7180
7181 case PREINCREMENT_EXPR:
7182 case PREDECREMENT_EXPR:
7b8b9722 7183 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7184
7185 case POSTINCREMENT_EXPR:
7186 case POSTDECREMENT_EXPR:
7187 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7188 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7189
7190 case ADDR_EXPR:
987c71d9 7191 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7192 be a MEM corresponding to a stack slot. */
987c71d9
RK
7193 temp = 0;
7194
bbf6f052
RK
7195 /* Are we taking the address of a nested function? */
7196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7197 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7198 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7199 && ! TREE_STATIC (exp))
bbf6f052
RK
7200 {
7201 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7202 op0 = force_operand (op0, target);
7203 }
682ba3a6
RK
7204 /* If we are taking the address of something erroneous, just
7205 return a zero. */
7206 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7207 return const0_rtx;
bbf6f052
RK
7208 else
7209 {
e287fd6e
RK
7210 /* We make sure to pass const0_rtx down if we came in with
7211 ignore set, to avoid doing the cleanups twice for something. */
7212 op0 = expand_expr (TREE_OPERAND (exp, 0),
7213 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7214 (modifier == EXPAND_INITIALIZER
7215 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7216
119af78a
RK
7217 /* If we are going to ignore the result, OP0 will have been set
7218 to const0_rtx, so just return it. Don't get confused and
7219 think we are taking the address of the constant. */
7220 if (ignore)
7221 return op0;
7222
3539e816
MS
7223 op0 = protect_from_queue (op0, 0);
7224
896102d0
RK
7225 /* We would like the object in memory. If it is a constant,
7226 we can have it be statically allocated into memory. For
682ba3a6 7227 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7228 memory and store the value into it. */
7229
7230 if (CONSTANT_P (op0))
7231 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7232 op0);
987c71d9 7233 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7234 {
7235 mark_temp_addr_taken (op0);
7236 temp = XEXP (op0, 0);
7237 }
896102d0 7238
682ba3a6
RK
7239 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7240 || GET_CODE (op0) == CONCAT)
896102d0
RK
7241 {
7242 /* If this object is in a register, it must be not
0f41302f 7243 be BLKmode. */
896102d0 7244 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7245 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7246
7a0b7b9a 7247 mark_temp_addr_taken (memloc);
896102d0
RK
7248 emit_move_insn (memloc, op0);
7249 op0 = memloc;
7250 }
7251
bbf6f052
RK
7252 if (GET_CODE (op0) != MEM)
7253 abort ();
7254
7255 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7256 {
7257 temp = XEXP (op0, 0);
7258#ifdef POINTERS_EXTEND_UNSIGNED
7259 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7260 && mode == ptr_mode)
9fcfcce7 7261 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7262#endif
7263 return temp;
7264 }
987c71d9 7265
bbf6f052
RK
7266 op0 = force_operand (XEXP (op0, 0), target);
7267 }
987c71d9 7268
bbf6f052 7269 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7270 op0 = force_reg (Pmode, op0);
7271
dc6d66b3
RK
7272 if (GET_CODE (op0) == REG
7273 && ! REG_USERVAR_P (op0))
7274 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7275
7276 /* If we might have had a temp slot, add an equivalent address
7277 for it. */
7278 if (temp != 0)
7279 update_temp_slot_address (temp, op0);
7280
88f63c77
RK
7281#ifdef POINTERS_EXTEND_UNSIGNED
7282 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7283 && mode == ptr_mode)
9fcfcce7 7284 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7285#endif
7286
bbf6f052
RK
7287 return op0;
7288
7289 case ENTRY_VALUE_EXPR:
7290 abort ();
7291
7308a047
RS
7292 /* COMPLEX type for Extended Pascal & Fortran */
7293 case COMPLEX_EXPR:
7294 {
7295 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7296 rtx insns;
7308a047
RS
7297
7298 /* Get the rtx code of the operands. */
7299 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7300 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7301
7302 if (! target)
7303 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7304
6551fa4d 7305 start_sequence ();
7308a047
RS
7306
7307 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7308 emit_move_insn (gen_realpart (mode, target), op0);
7309 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7310
6551fa4d
JW
7311 insns = get_insns ();
7312 end_sequence ();
7313
7308a047 7314 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7315 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7316 each with a separate pseudo as destination.
7317 It's not correct for flow to treat them as a unit. */
6d6e61ce 7318 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7319 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7320 else
7321 emit_insns (insns);
7308a047
RS
7322
7323 return target;
7324 }
7325
7326 case REALPART_EXPR:
2d7050fd
RS
7327 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7328 return gen_realpart (mode, op0);
7308a047
RS
7329
7330 case IMAGPART_EXPR:
2d7050fd
RS
7331 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7332 return gen_imagpart (mode, op0);
7308a047
RS
7333
7334 case CONJ_EXPR:
7335 {
62acb978 7336 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7337 rtx imag_t;
6551fa4d 7338 rtx insns;
7308a047
RS
7339
7340 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7341
7342 if (! target)
d6a5ac33 7343 target = gen_reg_rtx (mode);
7308a047 7344
6551fa4d 7345 start_sequence ();
7308a047
RS
7346
7347 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7348 emit_move_insn (gen_realpart (partmode, target),
7349 gen_realpart (partmode, op0));
7308a047 7350
62acb978
RK
7351 imag_t = gen_imagpart (partmode, target);
7352 temp = expand_unop (partmode, neg_optab,
7353 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7354 if (temp != imag_t)
7355 emit_move_insn (imag_t, temp);
7356
6551fa4d
JW
7357 insns = get_insns ();
7358 end_sequence ();
7359
d6a5ac33
RK
7360 /* Conjugate should appear as a single unit
7361 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7362 each with a separate pseudo as destination.
7363 It's not correct for flow to treat them as a unit. */
6d6e61ce 7364 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7365 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7366 else
7367 emit_insns (insns);
7308a047
RS
7368
7369 return target;
7370 }
7371
e976b8b2
MS
7372 case TRY_CATCH_EXPR:
7373 {
7374 tree handler = TREE_OPERAND (exp, 1);
7375
7376 expand_eh_region_start ();
7377
7378 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7379
7380 expand_eh_region_end (handler);
7381
7382 return op0;
7383 }
7384
7385 case POPDCC_EXPR:
7386 {
7387 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 7388 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
7389 return const0_rtx;
7390 }
7391
7392 case POPDHC_EXPR:
7393 {
7394 rtx dhc = get_dynamic_handler_chain ();
38a448ca 7395 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
7396 return const0_rtx;
7397 }
7398
bbf6f052 7399 case ERROR_MARK:
66538193
RS
7400 op0 = CONST0_RTX (tmode);
7401 if (op0 != 0)
7402 return op0;
bbf6f052
RK
7403 return const0_rtx;
7404
7405 default:
90764a87 7406 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7407 }
7408
7409 /* Here to do an ordinary binary operator, generating an instruction
7410 from the optab already placed in `this_optab'. */
7411 binop:
7412 preexpand_calls (exp);
e5e809f4 7413 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7414 subtarget = 0;
7415 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7417 binop2:
7418 temp = expand_binop (mode, this_optab, op0, op1, target,
7419 unsignedp, OPTAB_LIB_WIDEN);
7420 if (temp == 0)
7421 abort ();
7422 return temp;
7423}
bbf6f052 7424
bbf6f052 7425
b93a436e
JL
7426\f
7427/* Return the alignment in bits of EXP, a pointer valued expression.
7428 But don't return more than MAX_ALIGN no matter what.
7429 The alignment returned is, by default, the alignment of the thing that
7430 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7431
7432 Otherwise, look at the expression to see if we can do better, i.e., if the
7433 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 7434
b93a436e
JL
7435static int
7436get_pointer_alignment (exp, max_align)
7437 tree exp;
7438 unsigned max_align;
bbf6f052 7439{
b93a436e
JL
7440 unsigned align, inner;
7441
7442 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7443 return 0;
7444
7445 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7446 align = MIN (align, max_align);
7447
7448 while (1)
bbf6f052 7449 {
b93a436e 7450 switch (TREE_CODE (exp))
bbf6f052 7451 {
b93a436e
JL
7452 case NOP_EXPR:
7453 case CONVERT_EXPR:
7454 case NON_LVALUE_EXPR:
7455 exp = TREE_OPERAND (exp, 0);
7456 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7457 return align;
7458 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7459 align = MIN (inner, max_align);
7460 break;
7461
7462 case PLUS_EXPR:
7463 /* If sum of pointer + int, restrict our maximum alignment to that
7464 imposed by the integer. If not, we can't do any better than
7465 ALIGN. */
7466 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7467 return align;
7468
7469 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7470 & (max_align - 1))
7471 != 0)
7472 max_align >>= 1;
7473
7474 exp = TREE_OPERAND (exp, 0);
7475 break;
7476
7477 case ADDR_EXPR:
7478 /* See what we are pointing at and look at its alignment. */
7479 exp = TREE_OPERAND (exp, 0);
7480 if (TREE_CODE (exp) == FUNCTION_DECL)
7481 align = FUNCTION_BOUNDARY;
7482 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7483 align = DECL_ALIGN (exp);
7484#ifdef CONSTANT_ALIGNMENT
7485 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7486 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 7487#endif
b93a436e 7488 return MIN (align, max_align);
c02bd5d9 7489
b93a436e
JL
7490 default:
7491 return align;
7492 }
7493 }
7494}
7495\f
7496/* Return the tree node and offset if a given argument corresponds to
7497 a string constant. */
7498
7499static tree
7500string_constant (arg, ptr_offset)
7501 tree arg;
7502 tree *ptr_offset;
7503{
7504 STRIP_NOPS (arg);
7505
7506 if (TREE_CODE (arg) == ADDR_EXPR
7507 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7508 {
7509 *ptr_offset = integer_zero_node;
7510 return TREE_OPERAND (arg, 0);
7511 }
7512 else if (TREE_CODE (arg) == PLUS_EXPR)
7513 {
7514 tree arg0 = TREE_OPERAND (arg, 0);
7515 tree arg1 = TREE_OPERAND (arg, 1);
7516
7517 STRIP_NOPS (arg0);
7518 STRIP_NOPS (arg1);
7519
7520 if (TREE_CODE (arg0) == ADDR_EXPR
7521 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 7522 {
b93a436e
JL
7523 *ptr_offset = arg1;
7524 return TREE_OPERAND (arg0, 0);
bbf6f052 7525 }
b93a436e
JL
7526 else if (TREE_CODE (arg1) == ADDR_EXPR
7527 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 7528 {
b93a436e
JL
7529 *ptr_offset = arg0;
7530 return TREE_OPERAND (arg1, 0);
bbf6f052 7531 }
b93a436e 7532 }
ca695ac9 7533
b93a436e
JL
7534 return 0;
7535}
ca695ac9 7536
b93a436e
JL
7537/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7538 way, because it could contain a zero byte in the middle.
7539 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 7540
b93a436e
JL
7541 Unfortunately, string_constant can't access the values of const char
7542 arrays with initializers, so neither can we do so here. */
e87b4f3f 7543
b93a436e
JL
7544static tree
7545c_strlen (src)
7546 tree src;
7547{
7548 tree offset_node;
7549 int offset, max;
7550 char *ptr;
e7c33f54 7551
b93a436e
JL
7552 src = string_constant (src, &offset_node);
7553 if (src == 0)
7554 return 0;
7555 max = TREE_STRING_LENGTH (src);
7556 ptr = TREE_STRING_POINTER (src);
7557 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7558 {
7559 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7560 compute the offset to the following null if we don't know where to
7561 start searching for it. */
7562 int i;
7563 for (i = 0; i < max; i++)
7564 if (ptr[i] == 0)
7565 return 0;
7566 /* We don't know the starting offset, but we do know that the string
7567 has no internal zero bytes. We can assume that the offset falls
7568 within the bounds of the string; otherwise, the programmer deserves
7569 what he gets. Subtract the offset from the length of the string,
7570 and return that. */
7571 /* This would perhaps not be valid if we were dealing with named
7572 arrays in addition to literal string constants. */
7573 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7574 }
e7c33f54 7575
b93a436e
JL
7576 /* We have a known offset into the string. Start searching there for
7577 a null character. */
7578 if (offset_node == 0)
7579 offset = 0;
7580 else
7581 {
7582 /* Did we get a long long offset? If so, punt. */
7583 if (TREE_INT_CST_HIGH (offset_node) != 0)
7584 return 0;
7585 offset = TREE_INT_CST_LOW (offset_node);
7586 }
7587 /* If the offset is known to be out of bounds, warn, and call strlen at
7588 runtime. */
7589 if (offset < 0 || offset > max)
7590 {
7591 warning ("offset outside bounds of constant string");
7592 return 0;
7593 }
7594 /* Use strlen to search for the first zero byte. Since any strings
7595 constructed with build_string will have nulls appended, we win even
7596 if we get handed something like (char[4])"abcd".
e7c33f54 7597
b93a436e
JL
7598 Since OFFSET is our starting index into the string, no further
7599 calculation is needed. */
7600 return size_int (strlen (ptr + offset));
7601}
1bbddf11 7602
b93a436e
JL
7603rtx
7604expand_builtin_return_addr (fndecl_code, count, tem)
7605 enum built_in_function fndecl_code;
7606 int count;
7607 rtx tem;
7608{
7609 int i;
e7c33f54 7610
b93a436e
JL
7611 /* Some machines need special handling before we can access
7612 arbitrary frames. For example, on the sparc, we must first flush
7613 all register windows to the stack. */
7614#ifdef SETUP_FRAME_ADDRESSES
7615 if (count > 0)
7616 SETUP_FRAME_ADDRESSES ();
7617#endif
e87b4f3f 7618
b93a436e
JL
7619 /* On the sparc, the return address is not in the frame, it is in a
7620 register. There is no way to access it off of the current frame
7621 pointer, but it can be accessed off the previous frame pointer by
7622 reading the value from the register window save area. */
7623#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7624 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7625 count--;
7626#endif
60bac6ea 7627
b93a436e
JL
7628 /* Scan back COUNT frames to the specified frame. */
7629 for (i = 0; i < count; i++)
7630 {
7631 /* Assume the dynamic chain pointer is in the word that the
7632 frame address points to, unless otherwise specified. */
7633#ifdef DYNAMIC_CHAIN_ADDRESS
7634 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7635#endif
7636 tem = memory_address (Pmode, tem);
7637 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7638 }
ca695ac9 7639
b93a436e
JL
7640 /* For __builtin_frame_address, return what we've got. */
7641 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7642 return tem;
e9a25f70 7643
b93a436e
JL
7644 /* For __builtin_return_address, Get the return address from that
7645 frame. */
7646#ifdef RETURN_ADDR_RTX
7647 tem = RETURN_ADDR_RTX (count, tem);
7648#else
7649 tem = memory_address (Pmode,
7650 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7651 tem = gen_rtx_MEM (Pmode, tem);
7652#endif
7653 return tem;
7654}
e9a25f70 7655
b93a436e
JL
7656/* __builtin_setjmp is passed a pointer to an array of five words (not
7657 all will be used on all machines). It operates similarly to the C
7658 library function of the same name, but is more efficient. Much of
7659 the code below (and for longjmp) is copied from the handling of
7660 non-local gotos.
ca695ac9 7661
b93a436e
JL
7662 NOTE: This is intended for use by GNAT and the exception handling
7663 scheme in the compiler and will only work in the method used by
7664 them. */
e9a25f70 7665
b93a436e 7666rtx
6fd1c67b 7667expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
7668 rtx buf_addr;
7669 rtx target;
6fd1c67b 7670 rtx first_label, next_label;
b93a436e 7671{
6fd1c67b 7672 rtx lab1 = gen_label_rtx ();
b93a436e
JL
7673 enum machine_mode sa_mode = Pmode, value_mode;
7674 rtx stack_save;
e9a25f70 7675
b93a436e 7676 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 7677
b93a436e
JL
7678#ifdef POINTERS_EXTEND_UNSIGNED
7679 buf_addr = convert_memory_address (Pmode, buf_addr);
7680#endif
d7f21d63 7681
b93a436e 7682 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 7683
b93a436e
JL
7684 if (target == 0 || GET_CODE (target) != REG
7685 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7686 target = gen_reg_rtx (value_mode);
d7f21d63 7687
b93a436e 7688 emit_queue ();
d7f21d63 7689
b93a436e
JL
7690 /* We store the frame pointer and the address of lab1 in the buffer
7691 and use the rest of it for the stack save area, which is
7692 machine-dependent. */
7693 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7694 virtual_stack_vars_rtx);
6fd1c67b
RH
7695 emit_move_insn (validize_mem
7696 (gen_rtx_MEM (Pmode,
b93a436e
JL
7697 plus_constant (buf_addr,
7698 GET_MODE_SIZE (Pmode)))),
6fd1c67b 7699 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 7700
b93a436e
JL
7701#ifdef HAVE_save_stack_nonlocal
7702 if (HAVE_save_stack_nonlocal)
7703 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7704#endif
6c174fc0 7705
b93a436e
JL
7706 stack_save = gen_rtx_MEM (sa_mode,
7707 plus_constant (buf_addr,
7708 2 * GET_MODE_SIZE (Pmode)));
7709 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 7710
6fd1c67b
RH
7711 /* If there is further processing to do, do it. */
7712#ifdef HAVE_builtin_setjmp_setup
7713 if (HAVE_builtin_setjmp_setup)
7714 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 7715#endif
d7f21d63 7716
6fd1c67b 7717 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 7718 emit_move_insn (target, const0_rtx);
6fd1c67b 7719 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
7720 emit_barrier ();
7721 emit_label (lab1);
d7f21d63 7722
6fd1c67b
RH
7723 /* Tell flow about the strange goings on. */
7724 current_function_has_nonlocal_label = 1;
7725
7726 /* Clobber the FP when we get here, so we have to make sure it's
7727 marked as used by this function. */
b93a436e 7728 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 7729
b93a436e
JL
7730 /* Mark the static chain as clobbered here so life information
7731 doesn't get messed up for it. */
7732 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 7733
b93a436e
JL
7734 /* Now put in the code to restore the frame pointer, and argument
7735 pointer, if needed. The code below is from expand_end_bindings
7736 in stmt.c; see detailed documentation there. */
7737#ifdef HAVE_nonlocal_goto
7738 if (! HAVE_nonlocal_goto)
7739#endif
7740 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 7741
b93a436e
JL
7742#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7743 if (fixed_regs[ARG_POINTER_REGNUM])
7744 {
7745#ifdef ELIMINABLE_REGS
081f5e7e 7746 int i;
b93a436e 7747 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 7748
b93a436e
JL
7749 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7750 if (elim_regs[i].from == ARG_POINTER_REGNUM
7751 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7752 break;
ca695ac9 7753
b93a436e
JL
7754 if (i == sizeof elim_regs / sizeof elim_regs [0])
7755#endif
7756 {
7757 /* Now restore our arg pointer from the address at which it
7758 was saved in our stack frame.
7759 If there hasn't be space allocated for it yet, make
7760 some now. */
7761 if (arg_pointer_save_area == 0)
7762 arg_pointer_save_area
7763 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7764 emit_move_insn (virtual_incoming_args_rtx,
7765 copy_to_reg (arg_pointer_save_area));
7766 }
7767 }
7768#endif
ca695ac9 7769
6fd1c67b
RH
7770#ifdef HAVE_builtin_setjmp_receiver
7771 if (HAVE_builtin_setjmp_receiver)
7772 emit_insn (gen_builtin_setjmp_receiver (lab1));
7773 else
7774#endif
b93a436e 7775#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
7776 if (HAVE_nonlocal_goto_receiver)
7777 emit_insn (gen_nonlocal_goto_receiver ());
7778 else
b93a436e 7779#endif
081f5e7e
KG
7780 {
7781 ; /* Nothing */
7782 }
6fd1c67b
RH
7783
7784 /* Set TARGET, and branch to the next-time-through label. */
7785 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7786 emit_jump_insn (gen_jump (next_label));
7787 emit_barrier ();
ca695ac9 7788
6fd1c67b
RH
7789 return target;
7790}
ca695ac9 7791
6fd1c67b
RH
7792void
7793expand_builtin_longjmp (buf_addr, value)
7794 rtx buf_addr, value;
7795{
7796 rtx fp, lab, stack;
7797 enum machine_mode sa_mode;
ca695ac9 7798
6fd1c67b
RH
7799#ifdef POINTERS_EXTEND_UNSIGNED
7800 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 7801#endif
6fd1c67b
RH
7802 buf_addr = force_reg (Pmode, buf_addr);
7803
7804 /* The value sent by longjmp is not allowed to be zero. Force it
7805 to one if so. */
7806 if (GET_CODE (value) == CONST_INT)
7807 {
7808 if (INTVAL (value) == 0)
7809 value = const1_rtx;
7810 }
7811 else
7812 {
7813 lab = gen_label_rtx ();
7814
7815 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7816 emit_jump_insn (gen_bne (lab));
7817 emit_move_insn (value, const1_rtx);
7818 emit_label (lab);
7819 }
7820
7821 /* Make sure the value is in the right mode to be copied to the chain. */
7822 if (GET_MODE (value) != VOIDmode)
7823 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7824
7825#ifdef HAVE_builtin_longjmp
7826 if (HAVE_builtin_longjmp)
7827 {
7828 /* Copy the "return value" to the static chain reg. */
7829 emit_move_insn (static_chain_rtx, value);
7830 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7831 emit_insn (gen_builtin_longjmp (buf_addr));
7832 }
7833 else
b93a436e 7834#endif
6fd1c67b
RH
7835 {
7836 fp = gen_rtx_MEM (Pmode, buf_addr);
7837 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7838 GET_MODE_SIZE (Pmode)));
e9a25f70 7839
6fd1c67b
RH
7840#ifdef HAVE_save_stack_nonlocal
7841 sa_mode = (HAVE_save_stack_nonlocal
7842 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
7843 : Pmode);
7844#else
7845 sa_mode = Pmode;
b93a436e 7846#endif
ca695ac9 7847
6fd1c67b
RH
7848 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7849 2 * GET_MODE_SIZE (Pmode)));
7850
7851 /* Pick up FP, label, and SP from the block and jump. This code is
7852 from expand_goto in stmt.c; see there for detailed comments. */
7853#if HAVE_nonlocal_goto
7854 if (HAVE_nonlocal_goto)
7855 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7856 else
b93a436e 7857#endif
6fd1c67b
RH
7858 {
7859 lab = copy_to_reg (lab);
60bac6ea 7860
6fd1c67b
RH
7861 /* Copy the "return value" to the static chain reg. */
7862 emit_move_insn (static_chain_rtx, value);
7863
7864 emit_move_insn (hard_frame_pointer_rtx, fp);
7865 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7866
7867 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7868 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7869 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7870 emit_indirect_jump (lab);
7871 }
7872 }
b93a436e 7873}
60bac6ea 7874
b93a436e
JL
7875\f
7876/* Expand an expression EXP that calls a built-in function,
7877 with result going to TARGET if that's convenient
7878 (and in mode MODE if that's convenient).
7879 SUBTARGET may be used as the target for computing one of EXP's operands.
7880 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 7881
b93a436e
JL
7882#define CALLED_AS_BUILT_IN(NODE) \
7883 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 7884
b93a436e
JL
7885static rtx
7886expand_builtin (exp, target, subtarget, mode, ignore)
7887 tree exp;
7888 rtx target;
7889 rtx subtarget;
7890 enum machine_mode mode;
7891 int ignore;
7892{
7893 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7894 tree arglist = TREE_OPERAND (exp, 1);
7895 rtx op0;
7896 rtx lab1, insns;
7897 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7898 optab builtin_optab;
60bac6ea 7899
b93a436e
JL
7900 switch (DECL_FUNCTION_CODE (fndecl))
7901 {
7902 case BUILT_IN_ABS:
7903 case BUILT_IN_LABS:
7904 case BUILT_IN_FABS:
7905 /* build_function_call changes these into ABS_EXPR. */
7906 abort ();
4ed67205 7907
b93a436e
JL
7908 case BUILT_IN_SIN:
7909 case BUILT_IN_COS:
7910 /* Treat these like sqrt, but only if the user asks for them. */
7911 if (! flag_fast_math)
7912 break;
7913 case BUILT_IN_FSQRT:
7914 /* If not optimizing, call the library function. */
7915 if (! optimize)
7916 break;
4ed67205 7917
b93a436e
JL
7918 if (arglist == 0
7919 /* Arg could be wrong type if user redeclared this fcn wrong. */
7920 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
7921 break;
7922
b93a436e
JL
7923 /* Stabilize and compute the argument. */
7924 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7925 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7926 {
7927 exp = copy_node (exp);
7928 arglist = copy_node (arglist);
7929 TREE_OPERAND (exp, 1) = arglist;
7930 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7931 }
7932 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 7933
b93a436e
JL
7934 /* Make a suitable register to place result in. */
7935 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 7936
b93a436e
JL
7937 emit_queue ();
7938 start_sequence ();
7565a035 7939
b93a436e
JL
7940 switch (DECL_FUNCTION_CODE (fndecl))
7941 {
7942 case BUILT_IN_SIN:
7943 builtin_optab = sin_optab; break;
7944 case BUILT_IN_COS:
7945 builtin_optab = cos_optab; break;
7946 case BUILT_IN_FSQRT:
7947 builtin_optab = sqrt_optab; break;
7948 default:
7949 abort ();
7950 }
4ed67205 7951
b93a436e
JL
7952 /* Compute into TARGET.
7953 Set TARGET to wherever the result comes back. */
7954 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7955 builtin_optab, op0, target, 0);
7956
7957 /* If we were unable to expand via the builtin, stop the
7958 sequence (without outputting the insns) and break, causing
38e01259 7959 a call to the library function. */
b93a436e 7960 if (target == 0)
4ed67205 7961 {
b93a436e
JL
7962 end_sequence ();
7963 break;
7964 }
4ed67205 7965
b93a436e
JL
7966 /* Check the results by default. But if flag_fast_math is turned on,
7967 then assume sqrt will always be called with valid arguments. */
4ed67205 7968
b93a436e
JL
7969 if (! flag_fast_math)
7970 {
7971 /* Don't define the builtin FP instructions
7972 if your machine is not IEEE. */
7973 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7974 abort ();
4ed67205 7975
b93a436e 7976 lab1 = gen_label_rtx ();
ca55abae 7977
b93a436e
JL
7978 /* Test the result; if it is NaN, set errno=EDOM because
7979 the argument was not in the domain. */
7980 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7981 emit_jump_insn (gen_beq (lab1));
7982
7983#ifdef TARGET_EDOM
7984 {
7985#ifdef GEN_ERRNO_RTX
7986 rtx errno_rtx = GEN_ERRNO_RTX;
7987#else
7988 rtx errno_rtx
7989 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
7990#endif
e87b4f3f 7991
b93a436e
JL
7992 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7993 }
7994#else
7995 /* We can't set errno=EDOM directly; let the library call do it.
7996 Pop the arguments right away in case the call gets deleted. */
7997 NO_DEFER_POP;
7998 expand_call (exp, target, 0);
7999 OK_DEFER_POP;
8000#endif
e7c33f54 8001
b93a436e
JL
8002 emit_label (lab1);
8003 }
0006469d 8004
b93a436e
JL
8005 /* Output the entire sequence. */
8006 insns = get_insns ();
8007 end_sequence ();
8008 emit_insns (insns);
8009
8010 return target;
0006469d 8011
b93a436e
JL
8012 case BUILT_IN_FMOD:
8013 break;
0006469d 8014
b93a436e
JL
8015 /* __builtin_apply_args returns block of memory allocated on
8016 the stack into which is stored the arg pointer, structure
8017 value address, static chain, and all the registers that might
8018 possibly be used in performing a function call. The code is
8019 moved to the start of the function so the incoming values are
8020 saved. */
8021 case BUILT_IN_APPLY_ARGS:
8022 /* Don't do __builtin_apply_args more than once in a function.
8023 Save the result of the first call and reuse it. */
8024 if (apply_args_value != 0)
8025 return apply_args_value;
8026 {
8027 /* When this function is called, it means that registers must be
8028 saved on entry to this function. So we migrate the
8029 call to the first insn of this function. */
8030 rtx temp;
8031 rtx seq;
0006469d 8032
b93a436e
JL
8033 start_sequence ();
8034 temp = expand_builtin_apply_args ();
8035 seq = get_insns ();
8036 end_sequence ();
0006469d 8037
b93a436e 8038 apply_args_value = temp;
0006469d 8039
b93a436e
JL
8040 /* Put the sequence after the NOTE that starts the function.
8041 If this is inside a SEQUENCE, make the outer-level insn
8042 chain current, so the code is placed at the start of the
8043 function. */
8044 push_topmost_sequence ();
8045 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8046 pop_topmost_sequence ();
8047 return temp;
8048 }
0006469d 8049
b93a436e
JL
8050 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8051 FUNCTION with a copy of the parameters described by
8052 ARGUMENTS, and ARGSIZE. It returns a block of memory
8053 allocated on the stack into which is stored all the registers
8054 that might possibly be used for returning the result of a
8055 function. ARGUMENTS is the value returned by
8056 __builtin_apply_args. ARGSIZE is the number of bytes of
8057 arguments that must be copied. ??? How should this value be
8058 computed? We'll also need a safe worst case value for varargs
8059 functions. */
8060 case BUILT_IN_APPLY:
8061 if (arglist == 0
8062 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8063 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8064 || TREE_CHAIN (arglist) == 0
8065 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8066 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8067 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8068 return const0_rtx;
8069 else
8070 {
8071 int i;
8072 tree t;
8073 rtx ops[3];
0006469d 8074
b93a436e
JL
8075 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8076 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8077
b93a436e
JL
8078 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8079 }
bbf6f052 8080
b93a436e
JL
8081 /* __builtin_return (RESULT) causes the function to return the
8082 value described by RESULT. RESULT is address of the block of
8083 memory returned by __builtin_apply. */
8084 case BUILT_IN_RETURN:
8085 if (arglist
8086 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8087 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8088 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8089 NULL_RTX, VOIDmode, 0));
8090 return const0_rtx;
ca695ac9 8091
b93a436e
JL
8092 case BUILT_IN_SAVEREGS:
8093 /* Don't do __builtin_saveregs more than once in a function.
8094 Save the result of the first call and reuse it. */
8095 if (saveregs_value != 0)
8096 return saveregs_value;
8097 {
8098 /* When this function is called, it means that registers must be
8099 saved on entry to this function. So we migrate the
8100 call to the first insn of this function. */
8101 rtx temp;
8102 rtx seq;
ca695ac9 8103
b93a436e
JL
8104 /* Now really call the function. `expand_call' does not call
8105 expand_builtin, so there is no danger of infinite recursion here. */
8106 start_sequence ();
ca695ac9 8107
b93a436e
JL
8108#ifdef EXPAND_BUILTIN_SAVEREGS
8109 /* Do whatever the machine needs done in this case. */
8110 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8111#else
8112 /* The register where the function returns its value
8113 is likely to have something else in it, such as an argument.
8114 So preserve that register around the call. */
ca695ac9 8115
b93a436e
JL
8116 if (value_mode != VOIDmode)
8117 {
8118 rtx valreg = hard_libcall_value (value_mode);
8119 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8120
b93a436e
JL
8121 emit_move_insn (saved_valreg, valreg);
8122 temp = expand_call (exp, target, ignore);
8123 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8124 }
8125 else
b93a436e
JL
8126 /* Generate the call, putting the value in a pseudo. */
8127 temp = expand_call (exp, target, ignore);
8128#endif
bbf6f052 8129
b93a436e
JL
8130 seq = get_insns ();
8131 end_sequence ();
bbf6f052 8132
b93a436e 8133 saveregs_value = temp;
bbf6f052 8134
b93a436e
JL
8135 /* Put the sequence after the NOTE that starts the function.
8136 If this is inside a SEQUENCE, make the outer-level insn
8137 chain current, so the code is placed at the start of the
8138 function. */
8139 push_topmost_sequence ();
8140 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8141 pop_topmost_sequence ();
8142 return temp;
8143 }
bbf6f052 8144
b93a436e
JL
8145 /* __builtin_args_info (N) returns word N of the arg space info
8146 for the current function. The number and meanings of words
8147 is controlled by the definition of CUMULATIVE_ARGS. */
8148 case BUILT_IN_ARGS_INFO:
8149 {
8150 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8151 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8152#if 0
8153 /* These are used by the code below that is if 0'ed away */
8154 int i;
b93a436e 8155 tree type, elts, result;
381127e8 8156#endif
bbf6f052 8157
b93a436e
JL
8158 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8159 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8160 __FILE__, __LINE__);
bbf6f052 8161
b93a436e
JL
8162 if (arglist != 0)
8163 {
8164 tree arg = TREE_VALUE (arglist);
8165 if (TREE_CODE (arg) != INTEGER_CST)
8166 error ("argument of `__builtin_args_info' must be constant");
8167 else
8168 {
8169 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8170
b93a436e
JL
8171 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8172 error ("argument of `__builtin_args_info' out of range");
8173 else
8174 return GEN_INT (word_ptr[wordnum]);
8175 }
bbf6f052
RK
8176 }
8177 else
b93a436e 8178 error ("missing argument in `__builtin_args_info'");
bbf6f052 8179
b93a436e 8180 return const0_rtx;
bbf6f052 8181
b93a436e
JL
8182#if 0
8183 for (i = 0; i < nwords; i++)
8184 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8185
b93a436e
JL
8186 type = build_array_type (integer_type_node,
8187 build_index_type (build_int_2 (nwords, 0)));
8188 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8189 TREE_CONSTANT (result) = 1;
8190 TREE_STATIC (result) = 1;
8191 result = build (INDIRECT_REF, build_pointer_type (type), result);
8192 TREE_CONSTANT (result) = 1;
8193 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8194#endif
8195 }
8196
8197 /* Return the address of the first anonymous stack arg. */
8198 case BUILT_IN_NEXT_ARG:
ca695ac9 8199 {
b93a436e
JL
8200 tree fntype = TREE_TYPE (current_function_decl);
8201
8202 if ((TYPE_ARG_TYPES (fntype) == 0
8203 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8204 == void_type_node))
8205 && ! current_function_varargs)
8206 {
8207 error ("`va_start' used in function with fixed args");
8208 return const0_rtx;
8209 }
8210
8211 if (arglist)
8212 {
8213 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8214 tree arg = TREE_VALUE (arglist);
8215
8216 /* Strip off all nops for the sake of the comparison. This
8217 is not quite the same as STRIP_NOPS. It does more.
8218 We must also strip off INDIRECT_EXPR for C++ reference
8219 parameters. */
8220 while (TREE_CODE (arg) == NOP_EXPR
8221 || TREE_CODE (arg) == CONVERT_EXPR
8222 || TREE_CODE (arg) == NON_LVALUE_EXPR
8223 || TREE_CODE (arg) == INDIRECT_REF)
8224 arg = TREE_OPERAND (arg, 0);
8225 if (arg != last_parm)
8226 warning ("second parameter of `va_start' not last named argument");
8227 }
8228 else if (! current_function_varargs)
8229 /* Evidently an out of date version of <stdarg.h>; can't validate
8230 va_start's second argument, but can still work as intended. */
8231 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8232 }
8233
b93a436e
JL
8234 return expand_binop (Pmode, add_optab,
8235 current_function_internal_arg_pointer,
8236 current_function_arg_offset_rtx,
8237 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8238
b93a436e
JL
8239 case BUILT_IN_CLASSIFY_TYPE:
8240 if (arglist != 0)
8241 {
8242 tree type = TREE_TYPE (TREE_VALUE (arglist));
8243 enum tree_code code = TREE_CODE (type);
8244 if (code == VOID_TYPE)
8245 return GEN_INT (void_type_class);
8246 if (code == INTEGER_TYPE)
8247 return GEN_INT (integer_type_class);
8248 if (code == CHAR_TYPE)
8249 return GEN_INT (char_type_class);
8250 if (code == ENUMERAL_TYPE)
8251 return GEN_INT (enumeral_type_class);
8252 if (code == BOOLEAN_TYPE)
8253 return GEN_INT (boolean_type_class);
8254 if (code == POINTER_TYPE)
8255 return GEN_INT (pointer_type_class);
8256 if (code == REFERENCE_TYPE)
8257 return GEN_INT (reference_type_class);
8258 if (code == OFFSET_TYPE)
8259 return GEN_INT (offset_type_class);
8260 if (code == REAL_TYPE)
8261 return GEN_INT (real_type_class);
8262 if (code == COMPLEX_TYPE)
8263 return GEN_INT (complex_type_class);
8264 if (code == FUNCTION_TYPE)
8265 return GEN_INT (function_type_class);
8266 if (code == METHOD_TYPE)
8267 return GEN_INT (method_type_class);
8268 if (code == RECORD_TYPE)
8269 return GEN_INT (record_type_class);
8270 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8271 return GEN_INT (union_type_class);
8272 if (code == ARRAY_TYPE)
8273 {
8274 if (TYPE_STRING_FLAG (type))
8275 return GEN_INT (string_type_class);
8276 else
8277 return GEN_INT (array_type_class);
8278 }
8279 if (code == SET_TYPE)
8280 return GEN_INT (set_type_class);
8281 if (code == FILE_TYPE)
8282 return GEN_INT (file_type_class);
8283 if (code == LANG_TYPE)
8284 return GEN_INT (lang_type_class);
8285 }
8286 return GEN_INT (no_type_class);
ca695ac9 8287
b93a436e
JL
8288 case BUILT_IN_CONSTANT_P:
8289 if (arglist == 0)
8290 return const0_rtx;
8291 else
8292 {
8293 tree arg = TREE_VALUE (arglist);
ca695ac9 8294
b93a436e
JL
8295 STRIP_NOPS (arg);
8296 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8297 || (TREE_CODE (arg) == ADDR_EXPR
8298 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8299 ? const1_rtx : const0_rtx);
8300 }
ca695ac9 8301
b93a436e
JL
8302 case BUILT_IN_FRAME_ADDRESS:
8303 /* The argument must be a nonnegative integer constant.
8304 It counts the number of frames to scan up the stack.
8305 The value is the address of that frame. */
8306 case BUILT_IN_RETURN_ADDRESS:
8307 /* The argument must be a nonnegative integer constant.
8308 It counts the number of frames to scan up the stack.
8309 The value is the return address saved in that frame. */
8310 if (arglist == 0)
8311 /* Warning about missing arg was already issued. */
8312 return const0_rtx;
8313 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8314 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8315 {
8316 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8317 error ("invalid arg to `__builtin_frame_address'");
8318 else
8319 error ("invalid arg to `__builtin_return_address'");
8320 return const0_rtx;
8321 }
8322 else
8323 {
8324 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8325 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8326 hard_frame_pointer_rtx);
ee33823f 8327
b93a436e
JL
8328 /* Some ports cannot access arbitrary stack frames. */
8329 if (tem == NULL)
8330 {
8331 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8332 warning ("unsupported arg to `__builtin_frame_address'");
8333 else
8334 warning ("unsupported arg to `__builtin_return_address'");
8335 return const0_rtx;
8336 }
ee33823f 8337
b93a436e
JL
8338 /* For __builtin_frame_address, return what we've got. */
8339 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8340 return tem;
ee33823f 8341
b93a436e
JL
8342 if (GET_CODE (tem) != REG)
8343 tem = copy_to_reg (tem);
8344 return tem;
8345 }
ee33823f 8346
b93a436e
JL
8347 /* Returns the address of the area where the structure is returned.
8348 0 otherwise. */
8349 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8350 if (arglist != 0
8351 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8352 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8353 return const0_rtx;
8354 else
8355 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 8356
b93a436e
JL
8357 case BUILT_IN_ALLOCA:
8358 if (arglist == 0
8359 /* Arg could be non-integer if user redeclared this fcn wrong. */
8360 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8361 break;
bbf6f052 8362
b93a436e
JL
8363 /* Compute the argument. */
8364 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 8365
b93a436e
JL
8366 /* Allocate the desired space. */
8367 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 8368
b93a436e
JL
8369 case BUILT_IN_FFS:
8370 /* If not optimizing, call the library function. */
8371 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8372 break;
ca695ac9 8373
b93a436e
JL
8374 if (arglist == 0
8375 /* Arg could be non-integer if user redeclared this fcn wrong. */
8376 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8377 break;
ca695ac9 8378
b93a436e
JL
8379 /* Compute the argument. */
8380 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8381 /* Compute ffs, into TARGET if possible.
8382 Set TARGET to wherever the result comes back. */
8383 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8384 ffs_optab, op0, target, 1);
8385 if (target == 0)
8386 abort ();
8387 return target;
bbf6f052 8388
b93a436e
JL
8389 case BUILT_IN_STRLEN:
8390 /* If not optimizing, call the library function. */
8391 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8392 break;
bbf6f052 8393
b93a436e
JL
8394 if (arglist == 0
8395 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8396 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8397 break;
8398 else
8399 {
8400 tree src = TREE_VALUE (arglist);
8401 tree len = c_strlen (src);
bbf6f052 8402
b93a436e
JL
8403 int align
8404 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 8405
b93a436e
JL
8406 rtx result, src_rtx, char_rtx;
8407 enum machine_mode insn_mode = value_mode, char_mode;
8408 enum insn_code icode;
46b68a37 8409
b93a436e
JL
8410 /* If the length is known, just return it. */
8411 if (len != 0)
8412 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 8413
b93a436e
JL
8414 /* If SRC is not a pointer type, don't do this operation inline. */
8415 if (align == 0)
8416 break;
bbf6f052 8417
b93a436e 8418 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 8419
b93a436e
JL
8420 while (insn_mode != VOIDmode)
8421 {
8422 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8423 if (icode != CODE_FOR_nothing)
8424 break;
ca695ac9 8425
b93a436e
JL
8426 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8427 }
8428 if (insn_mode == VOIDmode)
8429 break;
ca695ac9 8430
b93a436e
JL
8431 /* Make a place to write the result of the instruction. */
8432 result = target;
8433 if (! (result != 0
8434 && GET_CODE (result) == REG
8435 && GET_MODE (result) == insn_mode
8436 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8437 result = gen_reg_rtx (insn_mode);
ca695ac9 8438
b93a436e 8439 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 8440
b93a436e
JL
8441 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8442 result = gen_reg_rtx (insn_mode);
8443 src_rtx = memory_address (BLKmode,
8444 expand_expr (src, NULL_RTX, ptr_mode,
8445 EXPAND_NORMAL));
bbf6f052 8446
b93a436e
JL
8447 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8448 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 8449
b93a436e
JL
8450 /* Check the string is readable and has an end. */
8451 if (flag_check_memory_usage)
8452 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8453 src_rtx, ptr_mode,
8454 GEN_INT (MEMORY_USE_RO),
8455 TYPE_MODE (integer_type_node));
bbf6f052 8456
b93a436e
JL
8457 char_rtx = const0_rtx;
8458 char_mode = insn_operand_mode[(int)icode][2];
8459 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8460 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 8461
b93a436e
JL
8462 emit_insn (GEN_FCN (icode) (result,
8463 gen_rtx_MEM (BLKmode, src_rtx),
8464 char_rtx, GEN_INT (align)));
bbf6f052 8465
b93a436e
JL
8466 /* Return the value in the proper mode for this function. */
8467 if (GET_MODE (result) == value_mode)
8468 return result;
8469 else if (target != 0)
8470 {
8471 convert_move (target, result, 0);
8472 return target;
8473 }
8474 else
8475 return convert_to_mode (value_mode, result, 0);
8476 }
bbf6f052 8477
b93a436e
JL
8478 case BUILT_IN_STRCPY:
8479 /* If not optimizing, call the library function. */
8480 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8481 break;
bbf6f052 8482
b93a436e
JL
8483 if (arglist == 0
8484 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8485 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8486 || TREE_CHAIN (arglist) == 0
8487 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8488 break;
8489 else
8490 {
8491 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 8492
b93a436e
JL
8493 if (len == 0)
8494 break;
bbf6f052 8495
b93a436e 8496 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 8497
b93a436e
JL
8498 chainon (arglist, build_tree_list (NULL_TREE, len));
8499 }
6d100794 8500
b93a436e
JL
8501 /* Drops in. */
8502 case BUILT_IN_MEMCPY:
8503 /* If not optimizing, call the library function. */
8504 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8505 break;
e7c33f54 8506
b93a436e
JL
8507 if (arglist == 0
8508 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8509 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8510 || TREE_CHAIN (arglist) == 0
8511 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8512 != POINTER_TYPE)
8513 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8514 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8515 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8516 != INTEGER_TYPE))
8517 break;
8518 else
8519 {
8520 tree dest = TREE_VALUE (arglist);
8521 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8522 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8523 tree type;
e7c33f54 8524
b93a436e
JL
8525 int src_align
8526 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8527 int dest_align
8528 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8529 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
e7c33f54 8530
b93a436e
JL
8531 /* If either SRC or DEST is not a pointer type, don't do
8532 this operation in-line. */
8533 if (src_align == 0 || dest_align == 0)
8534 {
8535 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8536 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8537 break;
8538 }
e7c33f54 8539
b93a436e
JL
8540 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8541 dest_mem = gen_rtx_MEM (BLKmode,
8542 memory_address (BLKmode, dest_rtx));
8543 /* There could be a void* cast on top of the object. */
8544 while (TREE_CODE (dest) == NOP_EXPR)
8545 dest = TREE_OPERAND (dest, 0);
8546 type = TREE_TYPE (TREE_TYPE (dest));
8547 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8548 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8549 src_mem = gen_rtx_MEM (BLKmode,
8550 memory_address (BLKmode, src_rtx));
8551 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 8552
b93a436e
JL
8553 /* Just copy the rights of SRC to the rights of DEST. */
8554 if (flag_check_memory_usage)
8555 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8556 dest_rtx, ptr_mode,
8557 src_rtx, ptr_mode,
8558 len_rtx, TYPE_MODE (sizetype));
e7c33f54 8559
b93a436e
JL
8560 /* There could be a void* cast on top of the object. */
8561 while (TREE_CODE (src) == NOP_EXPR)
8562 src = TREE_OPERAND (src, 0);
8563 type = TREE_TYPE (TREE_TYPE (src));
8564 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
e7c33f54 8565
b93a436e
JL
8566 /* Copy word part most expediently. */
8567 dest_addr
8568 = emit_block_move (dest_mem, src_mem, len_rtx,
8569 MIN (src_align, dest_align));
e7c33f54 8570
b93a436e
JL
8571 if (dest_addr == 0)
8572 dest_addr = force_operand (dest_rtx, NULL_RTX);
e7c33f54 8573
b93a436e
JL
8574 return dest_addr;
8575 }
e7c33f54 8576
b93a436e
JL
8577 case BUILT_IN_MEMSET:
8578 /* If not optimizing, call the library function. */
8579 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8580 break;
e7c33f54 8581
b93a436e
JL
8582 if (arglist == 0
8583 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8584 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8585 || TREE_CHAIN (arglist) == 0
8586 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8587 != INTEGER_TYPE)
8588 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8589 || (INTEGER_TYPE
8590 != (TREE_CODE (TREE_TYPE
8591 (TREE_VALUE
8592 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8593 break;
8594 else
8595 {
8596 tree dest = TREE_VALUE (arglist);
8597 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8598 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8599 tree type;
e7c33f54 8600
b93a436e
JL
8601 int dest_align
8602 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8603 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
e7c33f54 8604
b93a436e
JL
8605 /* If DEST is not a pointer type, don't do this
8606 operation in-line. */
8607 if (dest_align == 0)
8608 break;
bbf6f052 8609
bf931ec8
JW
8610 /* If the arguments have side-effects, then we can only evaluate
8611 them at most once. The following code evaluates them twice if
8612 they are not constants because we break out to expand_call
8613 in that case. They can't be constants if they have side-effects
8614 so we can check for that first. Alternatively, we could call
8615 save_expr to make multiple evaluation safe. */
8616 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
8617 break;
8618
b93a436e
JL
8619 /* If VAL is not 0, don't do this operation in-line. */
8620 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8621 break;
bbf6f052 8622
b93a436e
JL
8623 /* If LEN does not expand to a constant, don't do this
8624 operation in-line. */
8625 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8626 if (GET_CODE (len_rtx) != CONST_INT)
8627 break;
bbf6f052 8628
b93a436e
JL
8629 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8630 dest_mem = gen_rtx_MEM (BLKmode,
8631 memory_address (BLKmode, dest_rtx));
8632
8633 /* Just check DST is writable and mark it as readable. */
8634 if (flag_check_memory_usage)
8635 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8636 dest_rtx, ptr_mode,
8637 len_rtx, TYPE_MODE (sizetype),
8638 GEN_INT (MEMORY_USE_WO),
8639 TYPE_MODE (integer_type_node));
bbf6f052 8640
b93a436e
JL
8641 /* There could be a void* cast on top of the object. */
8642 while (TREE_CODE (dest) == NOP_EXPR)
8643 dest = TREE_OPERAND (dest, 0);
87d1ea79
JC
8644
8645 if (TREE_CODE (dest) == ADDR_EXPR)
8646 /* If this is the address of an object, check whether the
8647 object is an array. */
8648 type = TREE_TYPE (TREE_OPERAND (dest, 0));
8649 else
8650 type = TREE_TYPE (TREE_TYPE (dest));
b93a436e 8651 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
bbf6f052 8652
b93a436e 8653 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 8654
b93a436e
JL
8655 if (dest_addr == 0)
8656 dest_addr = force_operand (dest_rtx, NULL_RTX);
bbf6f052 8657
b93a436e
JL
8658 return dest_addr;
8659 }
bbf6f052 8660
b93a436e
JL
8661/* These comparison functions need an instruction that returns an actual
8662 index. An ordinary compare that just sets the condition codes
8663 is not enough. */
8664#ifdef HAVE_cmpstrsi
8665 case BUILT_IN_STRCMP:
8666 /* If not optimizing, call the library function. */
8667 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8668 break;
bbf6f052 8669
b93a436e
JL
8670 /* If we need to check memory accesses, call the library function. */
8671 if (flag_check_memory_usage)
8672 break;
bbf6f052 8673
b93a436e
JL
8674 if (arglist == 0
8675 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8676 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8677 || TREE_CHAIN (arglist) == 0
8678 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8679 break;
8680 else if (!HAVE_cmpstrsi)
8681 break;
8682 {
8683 tree arg1 = TREE_VALUE (arglist);
8684 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 8685 tree len, len2;
a97f5a86 8686
b93a436e
JL
8687 len = c_strlen (arg1);
8688 if (len)
8689 len = size_binop (PLUS_EXPR, integer_one_node, len);
8690 len2 = c_strlen (arg2);
8691 if (len2)
8692 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 8693
b93a436e
JL
8694 /* If we don't have a constant length for the first, use the length
8695 of the second, if we know it. We don't require a constant for
8696 this case; some cost analysis could be done if both are available
8697 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 8698
b93a436e
JL
8699 If both strings have constant lengths, use the smaller. This
8700 could arise if optimization results in strcpy being called with
8701 two fixed strings, or if the code was machine-generated. We should
8702 add some code to the `memcmp' handler below to deal with such
8703 situations, someday. */
8704 if (!len || TREE_CODE (len) != INTEGER_CST)
8705 {
8706 if (len2)
8707 len = len2;
8708 else if (len == 0)
8709 break;
8710 }
8711 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8712 {
8713 if (tree_int_cst_lt (len2, len))
8714 len = len2;
8715 }
bbf6f052 8716
b93a436e
JL
8717 chainon (arglist, build_tree_list (NULL_TREE, len));
8718 }
bbf6f052 8719
b93a436e
JL
8720 /* Drops in. */
8721 case BUILT_IN_MEMCMP:
8722 /* If not optimizing, call the library function. */
8723 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8724 break;
bbf6f052 8725
b93a436e
JL
8726 /* If we need to check memory accesses, call the library function. */
8727 if (flag_check_memory_usage)
8728 break;
bbf6f052 8729
b93a436e
JL
8730 if (arglist == 0
8731 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8732 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8733 || TREE_CHAIN (arglist) == 0
8734 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8735 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8736 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8737 break;
8738 else if (!HAVE_cmpstrsi)
8739 break;
8740 {
8741 tree arg1 = TREE_VALUE (arglist);
8742 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8743 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8744 rtx result;
0842a179 8745
b93a436e
JL
8746 int arg1_align
8747 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8748 int arg2_align
8749 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8750 enum machine_mode insn_mode
8751 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 8752
b93a436e
JL
8753 /* If we don't have POINTER_TYPE, call the function. */
8754 if (arg1_align == 0 || arg2_align == 0)
8755 {
8756 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8757 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8758 break;
8759 }
bbf6f052 8760
b93a436e
JL
8761 /* Make a place to write the result of the instruction. */
8762 result = target;
8763 if (! (result != 0
8764 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8765 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8766 result = gen_reg_rtx (insn_mode);
bbf6f052 8767
b93a436e
JL
8768 emit_insn (gen_cmpstrsi (result,
8769 gen_rtx_MEM (BLKmode,
8770 expand_expr (arg1, NULL_RTX,
8771 ptr_mode,
8772 EXPAND_NORMAL)),
8773 gen_rtx_MEM (BLKmode,
8774 expand_expr (arg2, NULL_RTX,
8775 ptr_mode,
8776 EXPAND_NORMAL)),
8777 expand_expr (len, NULL_RTX, VOIDmode, 0),
8778 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 8779
b93a436e
JL
8780 /* Return the value in the proper mode for this function. */
8781 mode = TYPE_MODE (TREE_TYPE (exp));
8782 if (GET_MODE (result) == mode)
8783 return result;
8784 else if (target != 0)
8785 {
8786 convert_move (target, result, 0);
8787 return target;
8788 }
8789 else
8790 return convert_to_mode (mode, result, 0);
8791 }
8792#else
8793 case BUILT_IN_STRCMP:
8794 case BUILT_IN_MEMCMP:
8795 break;
8796#endif
bbf6f052 8797
b93a436e
JL
8798 case BUILT_IN_SETJMP:
8799 if (arglist == 0
8800 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8801 break;
6fd1c67b
RH
8802 else
8803 {
8804 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8805 VOIDmode, 0);
8806 rtx lab = gen_label_rtx ();
8807 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8808 emit_label (lab);
8809 return ret;
8810 }
bbf6f052 8811
6fd1c67b
RH
8812 /* __builtin_longjmp is passed a pointer to an array of five words.
8813 It's similar to the C library longjmp function but works with
8814 __builtin_setjmp above. */
b93a436e
JL
8815 case BUILT_IN_LONGJMP:
8816 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8817 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8818 break;
b93a436e 8819 else
b93a436e 8820 {
6fd1c67b
RH
8821 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8822 VOIDmode, 0);
8823 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8824 const0_rtx, VOIDmode, 0);
8825 expand_builtin_longjmp (buf_addr, value);
8826 return const0_rtx;
b93a436e 8827 }
bbf6f052 8828
b93a436e
JL
8829 /* Various hooks for the DWARF 2 __throw routine. */
8830 case BUILT_IN_UNWIND_INIT:
8831 expand_builtin_unwind_init ();
8832 return const0_rtx;
8833 case BUILT_IN_FP:
8834 return frame_pointer_rtx;
8835 case BUILT_IN_SP:
8836 return stack_pointer_rtx;
8837#ifdef DWARF2_UNWIND_INFO
8838 case BUILT_IN_DWARF_FP_REGNUM:
8839 return expand_builtin_dwarf_fp_regnum ();
8840 case BUILT_IN_DWARF_REG_SIZE:
8841 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 8842#endif
b93a436e
JL
8843 case BUILT_IN_FROB_RETURN_ADDR:
8844 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8845 case BUILT_IN_EXTRACT_RETURN_ADDR:
8846 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8847 case BUILT_IN_SET_RETURN_ADDR_REG:
8848 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8849 return const0_rtx;
8850 case BUILT_IN_EH_STUB:
8851 return expand_builtin_eh_stub ();
8852 case BUILT_IN_SET_EH_REGS:
8853 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8854 TREE_VALUE (TREE_CHAIN (arglist)));
8855 return const0_rtx;
ca695ac9 8856
b93a436e
JL
8857 default: /* just do library call, if unknown builtin */
8858 error ("built-in function `%s' not currently supported",
8859 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 8860 }
0006469d 8861
b93a436e
JL
8862 /* The switch statement above can drop through to cause the function
8863 to be called normally. */
0006469d 8864
b93a436e 8865 return expand_call (exp, target, ignore);
ca695ac9 8866}
b93a436e
JL
8867\f
8868/* Built-in functions to perform an untyped call and return. */
0006469d 8869
b93a436e
JL
8870/* For each register that may be used for calling a function, this
8871 gives a mode used to copy the register's value. VOIDmode indicates
8872 the register is not used for calling a function. If the machine
8873 has register windows, this gives only the outbound registers.
8874 INCOMING_REGNO gives the corresponding inbound register. */
8875static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8876
b93a436e
JL
8877/* For each register that may be used for returning values, this gives
8878 a mode used to copy the register's value. VOIDmode indicates the
8879 register is not used for returning values. If the machine has
8880 register windows, this gives only the outbound registers.
8881 INCOMING_REGNO gives the corresponding inbound register. */
8882static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8883
b93a436e
JL
8884/* For each register that may be used for calling a function, this
8885 gives the offset of that register into the block returned by
8886 __builtin_apply_args. 0 indicates that the register is not
8887 used for calling a function. */
8888static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8889
8890/* Return the offset of register REGNO into the block returned by
8891 __builtin_apply_args. This is not declared static, since it is
8892 needed in objc-act.c. */
0006469d 8893
b93a436e
JL
8894int
8895apply_args_register_offset (regno)
8896 int regno;
8897{
8898 apply_args_size ();
0006469d 8899
b93a436e
JL
8900 /* Arguments are always put in outgoing registers (in the argument
8901 block) if such make sense. */
8902#ifdef OUTGOING_REGNO
8903 regno = OUTGOING_REGNO(regno);
8904#endif
8905 return apply_args_reg_offset[regno];
8906}
904762c8 8907
b93a436e
JL
8908/* Return the size required for the block returned by __builtin_apply_args,
8909 and initialize apply_args_mode. */
8910
8911static int
8912apply_args_size ()
0006469d 8913{
b93a436e
JL
8914 static int size = -1;
8915 int align, regno;
2f6e6d22 8916 enum machine_mode mode;
0006469d 8917
b93a436e
JL
8918 /* The values computed by this function never change. */
8919 if (size < 0)
ca695ac9 8920 {
b93a436e
JL
8921 /* The first value is the incoming arg-pointer. */
8922 size = GET_MODE_SIZE (Pmode);
0006469d 8923
b93a436e
JL
8924 /* The second value is the structure value address unless this is
8925 passed as an "invisible" first argument. */
8926 if (struct_value_rtx)
8927 size += GET_MODE_SIZE (Pmode);
0006469d 8928
b93a436e
JL
8929 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8930 if (FUNCTION_ARG_REGNO_P (regno))
8931 {
8932 /* Search for the proper mode for copying this register's
8933 value. I'm not sure this is right, but it works so far. */
8934 enum machine_mode best_mode = VOIDmode;
0006469d 8935
b93a436e
JL
8936 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8937 mode != VOIDmode;
8938 mode = GET_MODE_WIDER_MODE (mode))
8939 if (HARD_REGNO_MODE_OK (regno, mode)
8940 && HARD_REGNO_NREGS (regno, mode) == 1)
8941 best_mode = mode;
0006469d 8942
b93a436e
JL
8943 if (best_mode == VOIDmode)
8944 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8945 mode != VOIDmode;
8946 mode = GET_MODE_WIDER_MODE (mode))
8947 if (HARD_REGNO_MODE_OK (regno, mode)
8948 && (mov_optab->handlers[(int) mode].insn_code
8949 != CODE_FOR_nothing))
8950 best_mode = mode;
0006469d 8951
b93a436e
JL
8952 mode = best_mode;
8953 if (mode == VOIDmode)
8954 abort ();
904762c8 8955
b93a436e
JL
8956 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8957 if (size % align != 0)
8958 size = CEIL (size, align) * align;
8959 apply_args_reg_offset[regno] = size;
8960 size += GET_MODE_SIZE (mode);
8961 apply_args_mode[regno] = mode;
8962 }
8963 else
8964 {
8965 apply_args_mode[regno] = VOIDmode;
8966 apply_args_reg_offset[regno] = 0;
8967 }
8968 }
8969 return size;
8970}
0006469d 8971
b93a436e
JL
8972/* Return the size required for the block returned by __builtin_apply,
8973 and initialize apply_result_mode. */
904762c8 8974
b93a436e
JL
8975static int
8976apply_result_size ()
8977{
8978 static int size = -1;
8979 int align, regno;
8980 enum machine_mode mode;
0006469d 8981
b93a436e
JL
8982 /* The values computed by this function never change. */
8983 if (size < 0)
8984 {
8985 size = 0;
0006469d 8986
b93a436e
JL
8987 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8988 if (FUNCTION_VALUE_REGNO_P (regno))
8989 {
8990 /* Search for the proper mode for copying this register's
8991 value. I'm not sure this is right, but it works so far. */
8992 enum machine_mode best_mode = VOIDmode;
0006469d 8993
b93a436e
JL
8994 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8995 mode != TImode;
8996 mode = GET_MODE_WIDER_MODE (mode))
8997 if (HARD_REGNO_MODE_OK (regno, mode))
8998 best_mode = mode;
0006469d 8999
b93a436e
JL
9000 if (best_mode == VOIDmode)
9001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9002 mode != VOIDmode;
9003 mode = GET_MODE_WIDER_MODE (mode))
9004 if (HARD_REGNO_MODE_OK (regno, mode)
9005 && (mov_optab->handlers[(int) mode].insn_code
9006 != CODE_FOR_nothing))
9007 best_mode = mode;
0006469d 9008
b93a436e
JL
9009 mode = best_mode;
9010 if (mode == VOIDmode)
9011 abort ();
9012
9013 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9014 if (size % align != 0)
9015 size = CEIL (size, align) * align;
9016 size += GET_MODE_SIZE (mode);
9017 apply_result_mode[regno] = mode;
9018 }
9019 else
9020 apply_result_mode[regno] = VOIDmode;
9021
9022 /* Allow targets that use untyped_call and untyped_return to override
9023 the size so that machine-specific information can be stored here. */
9024#ifdef APPLY_RESULT_SIZE
9025 size = APPLY_RESULT_SIZE;
9026#endif
9027 }
9028 return size;
9029}
0006469d 9030
b93a436e
JL
9031#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9032/* Create a vector describing the result block RESULT. If SAVEP is true,
9033 the result block is used to save the values; otherwise it is used to
9034 restore the values. */
9035
9036static rtx
9037result_vector (savep, result)
9038 int savep;
9039 rtx result;
9040{
9041 int regno, size, align, nelts;
9042 enum machine_mode mode;
9043 rtx reg, mem;
9044 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9045
9046 size = nelts = 0;
9047 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9048 if ((mode = apply_result_mode[regno]) != VOIDmode)
9049 {
9050 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9051 if (size % align != 0)
9052 size = CEIL (size, align) * align;
9053 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9054 mem = change_address (result, mode,
9055 plus_constant (XEXP (result, 0), size));
9056 savevec[nelts++] = (savep
9057 ? gen_rtx_SET (VOIDmode, mem, reg)
9058 : gen_rtx_SET (VOIDmode, reg, mem));
9059 size += GET_MODE_SIZE (mode);
ca695ac9 9060 }
b93a436e
JL
9061 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9062}
9063#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9064
b93a436e
JL
9065/* Save the state required to perform an untyped call with the same
9066 arguments as were passed to the current function. */
904762c8 9067
b93a436e
JL
9068static rtx
9069expand_builtin_apply_args ()
9070{
9071 rtx registers;
9072 int size, align, regno;
9073 enum machine_mode mode;
0006469d 9074
b93a436e
JL
9075 /* Create a block where the arg-pointer, structure value address,
9076 and argument registers can be saved. */
9077 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9078
b93a436e
JL
9079 /* Walk past the arg-pointer and structure value address. */
9080 size = GET_MODE_SIZE (Pmode);
9081 if (struct_value_rtx)
9082 size += GET_MODE_SIZE (Pmode);
0cb1d109 9083
b93a436e
JL
9084 /* Save each register used in calling a function to the block. */
9085 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9086 if ((mode = apply_args_mode[regno]) != VOIDmode)
9087 {
9088 rtx tem;
0cb1d109 9089
b93a436e
JL
9090 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9091 if (size % align != 0)
9092 size = CEIL (size, align) * align;
0006469d 9093
b93a436e 9094 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9095
b93a436e
JL
9096#ifdef STACK_REGS
9097 /* For reg-stack.c's stack register household.
9098 Compare with a similar piece of code in function.c. */
0006469d 9099
b93a436e
JL
9100 emit_insn (gen_rtx_USE (mode, tem));
9101#endif
0e8c9172 9102
b93a436e
JL
9103 emit_move_insn (change_address (registers, mode,
9104 plus_constant (XEXP (registers, 0),
9105 size)),
9106 tem);
9107 size += GET_MODE_SIZE (mode);
0e8c9172 9108 }
0006469d 9109
b93a436e
JL
9110 /* Save the arg pointer to the block. */
9111 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9112 copy_to_reg (virtual_incoming_args_rtx));
9113 size = GET_MODE_SIZE (Pmode);
0006469d 9114
b93a436e
JL
9115 /* Save the structure value address unless this is passed as an
9116 "invisible" first argument. */
9117 if (struct_value_incoming_rtx)
9118 {
9119 emit_move_insn (change_address (registers, Pmode,
9120 plus_constant (XEXP (registers, 0),
9121 size)),
9122 copy_to_reg (struct_value_incoming_rtx));
9123 size += GET_MODE_SIZE (Pmode);
9124 }
0006469d 9125
b93a436e
JL
9126 /* Return the address of the block. */
9127 return copy_addr_to_reg (XEXP (registers, 0));
9128}
0006469d 9129
b93a436e
JL
9130/* Perform an untyped call and save the state required to perform an
9131 untyped return of whatever value was returned by the given function. */
0006469d 9132
b93a436e
JL
9133static rtx
9134expand_builtin_apply (function, arguments, argsize)
9135 rtx function, arguments, argsize;
9136{
9137 int size, align, regno;
9138 enum machine_mode mode;
9139 rtx incoming_args, result, reg, dest, call_insn;
9140 rtx old_stack_level = 0;
9141 rtx call_fusage = 0;
0006469d 9142
b93a436e
JL
9143 /* Create a block where the return registers can be saved. */
9144 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9145
9146 /* ??? The argsize value should be adjusted here. */
9147
9148 /* Fetch the arg pointer from the ARGUMENTS block. */
9149 incoming_args = gen_reg_rtx (Pmode);
9150 emit_move_insn (incoming_args,
9151 gen_rtx_MEM (Pmode, arguments));
9152#ifndef STACK_GROWS_DOWNWARD
9153 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9154 incoming_args, 0, OPTAB_LIB_WIDEN);
9155#endif
9156
9157 /* Perform postincrements before actually calling the function. */
ca695ac9 9158 emit_queue ();
0006469d 9159
b93a436e
JL
9160 /* Push a new argument block and copy the arguments. */
9161 do_pending_stack_adjust ();
0006469d 9162
b93a436e
JL
9163 /* Save the stack with nonlocal if available */
9164#ifdef HAVE_save_stack_nonlocal
9165 if (HAVE_save_stack_nonlocal)
9166 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9167 else
9168#endif
9169 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9170
b93a436e
JL
9171 /* Push a block of memory onto the stack to store the memory arguments.
9172 Save the address in a register, and copy the memory arguments. ??? I
9173 haven't figured out how the calling convention macros effect this,
9174 but it's likely that the source and/or destination addresses in
9175 the block copy will need updating in machine specific ways. */
9176 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9177 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9178 gen_rtx_MEM (BLKmode, incoming_args),
9179 argsize,
9180 PARM_BOUNDARY / BITS_PER_UNIT);
9181
9182 /* Refer to the argument block. */
9183 apply_args_size ();
9184 arguments = gen_rtx_MEM (BLKmode, arguments);
9185
9186 /* Walk past the arg-pointer and structure value address. */
9187 size = GET_MODE_SIZE (Pmode);
9188 if (struct_value_rtx)
9189 size += GET_MODE_SIZE (Pmode);
9190
9191 /* Restore each of the registers previously saved. Make USE insns
9192 for each of these registers for use in making the call. */
9193 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9194 if ((mode = apply_args_mode[regno]) != VOIDmode)
9195 {
9196 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9197 if (size % align != 0)
9198 size = CEIL (size, align) * align;
9199 reg = gen_rtx_REG (mode, regno);
9200 emit_move_insn (reg,
9201 change_address (arguments, mode,
9202 plus_constant (XEXP (arguments, 0),
9203 size)));
9204
9205 use_reg (&call_fusage, reg);
9206 size += GET_MODE_SIZE (mode);
9207 }
9208
9209 /* Restore the structure value address unless this is passed as an
9210 "invisible" first argument. */
9211 size = GET_MODE_SIZE (Pmode);
9212 if (struct_value_rtx)
0006469d 9213 {
b93a436e
JL
9214 rtx value = gen_reg_rtx (Pmode);
9215 emit_move_insn (value,
9216 change_address (arguments, Pmode,
9217 plus_constant (XEXP (arguments, 0),
9218 size)));
9219 emit_move_insn (struct_value_rtx, value);
9220 if (GET_CODE (struct_value_rtx) == REG)
9221 use_reg (&call_fusage, struct_value_rtx);
9222 size += GET_MODE_SIZE (Pmode);
ca695ac9 9223 }
0006469d 9224
b93a436e
JL
9225 /* All arguments and registers used for the call are set up by now! */
9226 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9227
b93a436e
JL
9228 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9229 and we don't want to load it into a register as an optimization,
9230 because prepare_call_address already did it if it should be done. */
9231 if (GET_CODE (function) != SYMBOL_REF)
9232 function = memory_address (FUNCTION_MODE, function);
0006469d 9233
b93a436e
JL
9234 /* Generate the actual call instruction and save the return value. */
9235#ifdef HAVE_untyped_call
9236 if (HAVE_untyped_call)
9237 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9238 result, result_vector (1, result)));
9239 else
9240#endif
9241#ifdef HAVE_call_value
9242 if (HAVE_call_value)
ca695ac9 9243 {
b93a436e 9244 rtx valreg = 0;
0006469d 9245
b93a436e
JL
9246 /* Locate the unique return register. It is not possible to
9247 express a call that sets more than one return register using
9248 call_value; use untyped_call for that. In fact, untyped_call
9249 only needs to save the return registers in the given block. */
9250 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9251 if ((mode = apply_result_mode[regno]) != VOIDmode)
9252 {
9253 if (valreg)
9254 abort (); /* HAVE_untyped_call required. */
9255 valreg = gen_rtx_REG (mode, regno);
9256 }
0006469d 9257
b93a436e
JL
9258 emit_call_insn (gen_call_value (valreg,
9259 gen_rtx_MEM (FUNCTION_MODE, function),
9260 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9261
b93a436e
JL
9262 emit_move_insn (change_address (result, GET_MODE (valreg),
9263 XEXP (result, 0)),
9264 valreg);
ca695ac9 9265 }
b93a436e
JL
9266 else
9267#endif
9268 abort ();
0006469d 9269
b93a436e
JL
9270 /* Find the CALL insn we just emitted. */
9271 for (call_insn = get_last_insn ();
9272 call_insn && GET_CODE (call_insn) != CALL_INSN;
9273 call_insn = PREV_INSN (call_insn))
9274 ;
0006469d 9275
b93a436e
JL
9276 if (! call_insn)
9277 abort ();
0006469d 9278
b93a436e
JL
9279 /* Put the register usage information on the CALL. If there is already
9280 some usage information, put ours at the end. */
9281 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9282 {
b93a436e 9283 rtx link;
0006469d 9284
b93a436e
JL
9285 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9286 link = XEXP (link, 1))
9287 ;
9288
9289 XEXP (link, 1) = call_fusage;
ca695ac9 9290 }
b93a436e
JL
9291 else
9292 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9293
b93a436e
JL
9294 /* Restore the stack. */
9295#ifdef HAVE_save_stack_nonlocal
9296 if (HAVE_save_stack_nonlocal)
9297 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9298 else
9299#endif
9300 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9301
9302 /* Return the address of the result block. */
9303 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9304}
bbf6f052 9305
b93a436e 9306/* Perform an untyped return. */
ca695ac9
JB
9307
9308static void
b93a436e
JL
9309expand_builtin_return (result)
9310 rtx result;
bbf6f052 9311{
b93a436e
JL
9312 int size, align, regno;
9313 enum machine_mode mode;
9314 rtx reg;
9315 rtx call_fusage = 0;
bbf6f052 9316
b93a436e
JL
9317 apply_result_size ();
9318 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9319
b93a436e
JL
9320#ifdef HAVE_untyped_return
9321 if (HAVE_untyped_return)
ca695ac9 9322 {
b93a436e
JL
9323 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9324 emit_barrier ();
9325 return;
ca695ac9 9326 }
b93a436e 9327#endif
1499e0a8 9328
b93a436e
JL
9329 /* Restore the return value and note that each value is used. */
9330 size = 0;
9331 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9332 if ((mode = apply_result_mode[regno]) != VOIDmode)
9333 {
9334 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9335 if (size % align != 0)
9336 size = CEIL (size, align) * align;
9337 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9338 emit_move_insn (reg,
9339 change_address (result, mode,
9340 plus_constant (XEXP (result, 0),
9341 size)));
9342
9343 push_to_sequence (call_fusage);
9344 emit_insn (gen_rtx_USE (VOIDmode, reg));
9345 call_fusage = get_insns ();
9346 end_sequence ();
9347 size += GET_MODE_SIZE (mode);
9348 }
9349
9350 /* Put the USE insns before the return. */
9351 emit_insns (call_fusage);
9352
9353 /* Return whatever values was restored by jumping directly to the end
9354 of the function. */
9355 expand_null_return ();
ca695ac9
JB
9356}
9357\f
b93a436e
JL
9358/* Expand code for a post- or pre- increment or decrement
9359 and return the RTX for the result.
9360 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9361
b93a436e
JL
9362static rtx
9363expand_increment (exp, post, ignore)
9364 register tree exp;
9365 int post, ignore;
ca695ac9 9366{
b93a436e
JL
9367 register rtx op0, op1;
9368 register rtx temp, value;
9369 register tree incremented = TREE_OPERAND (exp, 0);
9370 optab this_optab = add_optab;
9371 int icode;
9372 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9373 int op0_is_copy = 0;
9374 int single_insn = 0;
9375 /* 1 means we can't store into OP0 directly,
9376 because it is a subreg narrower than a word,
9377 and we don't dare clobber the rest of the word. */
9378 int bad_subreg = 0;
1499e0a8 9379
b93a436e
JL
9380 /* Stabilize any component ref that might need to be
9381 evaluated more than once below. */
9382 if (!post
9383 || TREE_CODE (incremented) == BIT_FIELD_REF
9384 || (TREE_CODE (incremented) == COMPONENT_REF
9385 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9386 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9387 incremented = stabilize_reference (incremented);
9388 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9389 ones into save exprs so that they don't accidentally get evaluated
9390 more than once by the code below. */
9391 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9392 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9393 incremented = save_expr (incremented);
e9a25f70 9394
b93a436e
JL
9395 /* Compute the operands as RTX.
9396 Note whether OP0 is the actual lvalue or a copy of it:
9397 I believe it is a copy iff it is a register or subreg
9398 and insns were generated in computing it. */
e9a25f70 9399
b93a436e
JL
9400 temp = get_last_insn ();
9401 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9402
b93a436e
JL
9403 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9404 in place but instead must do sign- or zero-extension during assignment,
9405 so we copy it into a new register and let the code below use it as
9406 a copy.
e9a25f70 9407
b93a436e
JL
9408 Note that we can safely modify this SUBREG since it is know not to be
9409 shared (it was made by the expand_expr call above). */
9410
9411 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9412 {
9413 if (post)
9414 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9415 else
9416 bad_subreg = 1;
9417 }
9418 else if (GET_CODE (op0) == SUBREG
9419 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9420 {
9421 /* We cannot increment this SUBREG in place. If we are
9422 post-incrementing, get a copy of the old value. Otherwise,
9423 just mark that we cannot increment in place. */
9424 if (post)
9425 op0 = copy_to_reg (op0);
9426 else
9427 bad_subreg = 1;
e9a25f70
JL
9428 }
9429
b93a436e
JL
9430 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9431 && temp != get_last_insn ());
9432 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9433 EXPAND_MEMORY_USE_BAD);
1499e0a8 9434
b93a436e
JL
9435 /* Decide whether incrementing or decrementing. */
9436 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9437 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9438 this_optab = sub_optab;
9439
9440 /* Convert decrement by a constant into a negative increment. */
9441 if (this_optab == sub_optab
9442 && GET_CODE (op1) == CONST_INT)
ca695ac9 9443 {
b93a436e
JL
9444 op1 = GEN_INT (- INTVAL (op1));
9445 this_optab = add_optab;
ca695ac9 9446 }
1499e0a8 9447
b93a436e
JL
9448 /* For a preincrement, see if we can do this with a single instruction. */
9449 if (!post)
9450 {
9451 icode = (int) this_optab->handlers[(int) mode].insn_code;
9452 if (icode != (int) CODE_FOR_nothing
9453 /* Make sure that OP0 is valid for operands 0 and 1
9454 of the insn we want to queue. */
9455 && (*insn_operand_predicate[icode][0]) (op0, mode)
9456 && (*insn_operand_predicate[icode][1]) (op0, mode)
9457 && (*insn_operand_predicate[icode][2]) (op1, mode))
9458 single_insn = 1;
9459 }
bbf6f052 9460
b93a436e
JL
9461 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9462 then we cannot just increment OP0. We must therefore contrive to
9463 increment the original value. Then, for postincrement, we can return
9464 OP0 since it is a copy of the old value. For preincrement, expand here
9465 unless we can do it with a single insn.
bbf6f052 9466
b93a436e
JL
9467 Likewise if storing directly into OP0 would clobber high bits
9468 we need to preserve (bad_subreg). */
9469 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9470 {
b93a436e
JL
9471 /* This is the easiest way to increment the value wherever it is.
9472 Problems with multiple evaluation of INCREMENTED are prevented
9473 because either (1) it is a component_ref or preincrement,
9474 in which case it was stabilized above, or (2) it is an array_ref
9475 with constant index in an array in a register, which is
9476 safe to reevaluate. */
9477 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9478 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9479 ? MINUS_EXPR : PLUS_EXPR),
9480 TREE_TYPE (exp),
9481 incremented,
9482 TREE_OPERAND (exp, 1));
a358cee0 9483
b93a436e
JL
9484 while (TREE_CODE (incremented) == NOP_EXPR
9485 || TREE_CODE (incremented) == CONVERT_EXPR)
9486 {
9487 newexp = convert (TREE_TYPE (incremented), newexp);
9488 incremented = TREE_OPERAND (incremented, 0);
9489 }
bbf6f052 9490
b93a436e
JL
9491 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9492 return post ? op0 : temp;
9493 }
bbf6f052 9494
b93a436e
JL
9495 if (post)
9496 {
9497 /* We have a true reference to the value in OP0.
9498 If there is an insn to add or subtract in this mode, queue it.
9499 Queueing the increment insn avoids the register shuffling
9500 that often results if we must increment now and first save
9501 the old value for subsequent use. */
bbf6f052 9502
b93a436e
JL
9503#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9504 op0 = stabilize (op0);
9505#endif
41dfd40c 9506
b93a436e
JL
9507 icode = (int) this_optab->handlers[(int) mode].insn_code;
9508 if (icode != (int) CODE_FOR_nothing
9509 /* Make sure that OP0 is valid for operands 0 and 1
9510 of the insn we want to queue. */
9511 && (*insn_operand_predicate[icode][0]) (op0, mode)
9512 && (*insn_operand_predicate[icode][1]) (op0, mode))
9513 {
9514 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9515 op1 = force_reg (mode, op1);
bbf6f052 9516
b93a436e
JL
9517 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9518 }
9519 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9520 {
9521 rtx addr = (general_operand (XEXP (op0, 0), mode)
9522 ? force_reg (Pmode, XEXP (op0, 0))
9523 : copy_to_reg (XEXP (op0, 0)));
9524 rtx temp, result;
ca695ac9 9525
b93a436e
JL
9526 op0 = change_address (op0, VOIDmode, addr);
9527 temp = force_reg (GET_MODE (op0), op0);
9528 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9529 op1 = force_reg (mode, op1);
ca695ac9 9530
b93a436e
JL
9531 /* The increment queue is LIFO, thus we have to `queue'
9532 the instructions in reverse order. */
9533 enqueue_insn (op0, gen_move_insn (op0, temp));
9534 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9535 return result;
bbf6f052
RK
9536 }
9537 }
ca695ac9 9538
b93a436e
JL
9539 /* Preincrement, or we can't increment with one simple insn. */
9540 if (post)
9541 /* Save a copy of the value before inc or dec, to return it later. */
9542 temp = value = copy_to_reg (op0);
9543 else
9544 /* Arrange to return the incremented value. */
9545 /* Copy the rtx because expand_binop will protect from the queue,
9546 and the results of that would be invalid for us to return
9547 if our caller does emit_queue before using our result. */
9548 temp = copy_rtx (value = op0);
bbf6f052 9549
b93a436e
JL
9550 /* Increment however we can. */
9551 op1 = expand_binop (mode, this_optab, value, op1,
9552 flag_check_memory_usage ? NULL_RTX : op0,
9553 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9554 /* Make sure the value is stored into OP0. */
9555 if (op1 != op0)
9556 emit_move_insn (op0, op1);
5718612f 9557
b93a436e
JL
9558 return temp;
9559}
9560\f
9561/* Expand all function calls contained within EXP, innermost ones first.
9562 But don't look within expressions that have sequence points.
9563 For each CALL_EXPR, record the rtx for its value
9564 in the CALL_EXPR_RTL field. */
5718612f 9565
b93a436e
JL
9566static void
9567preexpand_calls (exp)
9568 tree exp;
9569{
9570 register int nops, i;
9571 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9572
b93a436e
JL
9573 if (! do_preexpand_calls)
9574 return;
5718612f 9575
b93a436e 9576 /* Only expressions and references can contain calls. */
bbf6f052 9577
b93a436e
JL
9578 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9579 return;
bbf6f052 9580
b93a436e
JL
9581 switch (TREE_CODE (exp))
9582 {
9583 case CALL_EXPR:
9584 /* Do nothing if already expanded. */
9585 if (CALL_EXPR_RTL (exp) != 0
9586 /* Do nothing if the call returns a variable-sized object. */
9587 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9588 /* Do nothing to built-in functions. */
9589 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9590 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9591 == FUNCTION_DECL)
9592 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9593 return;
bbf6f052 9594
b93a436e
JL
9595 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9596 return;
bbf6f052 9597
b93a436e
JL
9598 case COMPOUND_EXPR:
9599 case COND_EXPR:
9600 case TRUTH_ANDIF_EXPR:
9601 case TRUTH_ORIF_EXPR:
9602 /* If we find one of these, then we can be sure
9603 the adjust will be done for it (since it makes jumps).
9604 Do it now, so that if this is inside an argument
9605 of a function, we don't get the stack adjustment
9606 after some other args have already been pushed. */
9607 do_pending_stack_adjust ();
9608 return;
bbf6f052 9609
b93a436e
JL
9610 case BLOCK:
9611 case RTL_EXPR:
9612 case WITH_CLEANUP_EXPR:
9613 case CLEANUP_POINT_EXPR:
9614 case TRY_CATCH_EXPR:
9615 return;
bbf6f052 9616
b93a436e
JL
9617 case SAVE_EXPR:
9618 if (SAVE_EXPR_RTL (exp) != 0)
9619 return;
9620
9621 default:
9622 break;
ca695ac9 9623 }
bbf6f052 9624
b93a436e
JL
9625 nops = tree_code_length[(int) TREE_CODE (exp)];
9626 for (i = 0; i < nops; i++)
9627 if (TREE_OPERAND (exp, i) != 0)
9628 {
9629 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9630 if (type == 'e' || type == '<' || type == '1' || type == '2'
9631 || type == 'r')
9632 preexpand_calls (TREE_OPERAND (exp, i));
9633 }
9634}
9635\f
9636/* At the start of a function, record that we have no previously-pushed
9637 arguments waiting to be popped. */
bbf6f052 9638
b93a436e
JL
9639void
9640init_pending_stack_adjust ()
9641{
9642 pending_stack_adjust = 0;
9643}
bbf6f052 9644
b93a436e 9645/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9646 so the adjustment won't get done.
9647
9648 Note, if the current function calls alloca, then it must have a
9649 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9650
b93a436e
JL
9651void
9652clear_pending_stack_adjust ()
9653{
9654#ifdef EXIT_IGNORE_STACK
9655 if (optimize > 0
060fbabf
JL
9656 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9657 && EXIT_IGNORE_STACK
b93a436e
JL
9658 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9659 && ! flag_inline_functions)
9660 pending_stack_adjust = 0;
9661#endif
9662}
bbf6f052 9663
b93a436e
JL
9664/* Pop any previously-pushed arguments that have not been popped yet. */
9665
9666void
9667do_pending_stack_adjust ()
9668{
9669 if (inhibit_defer_pop == 0)
ca695ac9 9670 {
b93a436e
JL
9671 if (pending_stack_adjust != 0)
9672 adjust_stack (GEN_INT (pending_stack_adjust));
9673 pending_stack_adjust = 0;
bbf6f052 9674 }
bbf6f052
RK
9675}
9676\f
b93a436e 9677/* Expand conditional expressions. */
bbf6f052 9678
b93a436e
JL
9679/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9680 LABEL is an rtx of code CODE_LABEL, in this function and all the
9681 functions here. */
bbf6f052 9682
b93a436e
JL
9683void
9684jumpifnot (exp, label)
ca695ac9 9685 tree exp;
b93a436e 9686 rtx label;
bbf6f052 9687{
b93a436e
JL
9688 do_jump (exp, label, NULL_RTX);
9689}
bbf6f052 9690
b93a436e 9691/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9692
b93a436e
JL
9693void
9694jumpif (exp, label)
9695 tree exp;
9696 rtx label;
9697{
9698 do_jump (exp, NULL_RTX, label);
9699}
ca695ac9 9700
b93a436e
JL
9701/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9702 the result is zero, or IF_TRUE_LABEL if the result is one.
9703 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9704 meaning fall through in that case.
ca695ac9 9705
b93a436e
JL
9706 do_jump always does any pending stack adjust except when it does not
9707 actually perform a jump. An example where there is no jump
9708 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9709
b93a436e
JL
9710 This function is responsible for optimizing cases such as
9711 &&, || and comparison operators in EXP. */
5718612f 9712
b93a436e
JL
9713void
9714do_jump (exp, if_false_label, if_true_label)
9715 tree exp;
9716 rtx if_false_label, if_true_label;
9717{
9718 register enum tree_code code = TREE_CODE (exp);
9719 /* Some cases need to create a label to jump to
9720 in order to properly fall through.
9721 These cases set DROP_THROUGH_LABEL nonzero. */
9722 rtx drop_through_label = 0;
9723 rtx temp;
9724 rtx comparison = 0;
9725 int i;
9726 tree type;
9727 enum machine_mode mode;
ca695ac9 9728
b93a436e 9729 emit_queue ();
ca695ac9 9730
b93a436e 9731 switch (code)
ca695ac9 9732 {
b93a436e 9733 case ERROR_MARK:
ca695ac9 9734 break;
bbf6f052 9735
b93a436e
JL
9736 case INTEGER_CST:
9737 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9738 if (temp)
9739 emit_jump (temp);
9740 break;
bbf6f052 9741
b93a436e
JL
9742#if 0
9743 /* This is not true with #pragma weak */
9744 case ADDR_EXPR:
9745 /* The address of something can never be zero. */
9746 if (if_true_label)
9747 emit_jump (if_true_label);
9748 break;
9749#endif
bbf6f052 9750
b93a436e
JL
9751 case NOP_EXPR:
9752 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9753 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9754 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9755 goto normal;
9756 case CONVERT_EXPR:
9757 /* If we are narrowing the operand, we have to do the compare in the
9758 narrower mode. */
9759 if ((TYPE_PRECISION (TREE_TYPE (exp))
9760 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9761 goto normal;
9762 case NON_LVALUE_EXPR:
9763 case REFERENCE_EXPR:
9764 case ABS_EXPR:
9765 case NEGATE_EXPR:
9766 case LROTATE_EXPR:
9767 case RROTATE_EXPR:
9768 /* These cannot change zero->non-zero or vice versa. */
9769 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9770 break;
bbf6f052 9771
b93a436e
JL
9772#if 0
9773 /* This is never less insns than evaluating the PLUS_EXPR followed by
9774 a test and can be longer if the test is eliminated. */
9775 case PLUS_EXPR:
9776 /* Reduce to minus. */
9777 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9778 TREE_OPERAND (exp, 0),
9779 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9780 TREE_OPERAND (exp, 1))));
9781 /* Process as MINUS. */
ca695ac9 9782#endif
bbf6f052 9783
b93a436e
JL
9784 case MINUS_EXPR:
9785 /* Non-zero iff operands of minus differ. */
9786 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9787 TREE_OPERAND (exp, 0),
9788 TREE_OPERAND (exp, 1)),
9789 NE, NE);
9790 break;
bbf6f052 9791
b93a436e
JL
9792 case BIT_AND_EXPR:
9793 /* If we are AND'ing with a small constant, do this comparison in the
9794 smallest type that fits. If the machine doesn't have comparisons
9795 that small, it will be converted back to the wider comparison.
9796 This helps if we are testing the sign bit of a narrower object.
9797 combine can't do this for us because it can't know whether a
9798 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9799
b93a436e
JL
9800 if (! SLOW_BYTE_ACCESS
9801 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9802 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9803 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9804 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9805 && (type = type_for_mode (mode, 1)) != 0
9806 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9807 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9808 != CODE_FOR_nothing))
9809 {
9810 do_jump (convert (type, exp), if_false_label, if_true_label);
9811 break;
9812 }
9813 goto normal;
bbf6f052 9814
b93a436e
JL
9815 case TRUTH_NOT_EXPR:
9816 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9817 break;
bbf6f052 9818
b93a436e
JL
9819 case TRUTH_ANDIF_EXPR:
9820 if (if_false_label == 0)
9821 if_false_label = drop_through_label = gen_label_rtx ();
9822 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9823 start_cleanup_deferral ();
9824 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9825 end_cleanup_deferral ();
9826 break;
bbf6f052 9827
b93a436e
JL
9828 case TRUTH_ORIF_EXPR:
9829 if (if_true_label == 0)
9830 if_true_label = drop_through_label = gen_label_rtx ();
9831 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9832 start_cleanup_deferral ();
9833 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9834 end_cleanup_deferral ();
9835 break;
bbf6f052 9836
b93a436e
JL
9837 case COMPOUND_EXPR:
9838 push_temp_slots ();
9839 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9840 preserve_temp_slots (NULL_RTX);
9841 free_temp_slots ();
9842 pop_temp_slots ();
9843 emit_queue ();
9844 do_pending_stack_adjust ();
9845 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9846 break;
bbf6f052 9847
b93a436e
JL
9848 case COMPONENT_REF:
9849 case BIT_FIELD_REF:
9850 case ARRAY_REF:
9851 {
9852 int bitsize, bitpos, unsignedp;
9853 enum machine_mode mode;
9854 tree type;
9855 tree offset;
9856 int volatilep = 0;
9857 int alignment;
bbf6f052 9858
b93a436e
JL
9859 /* Get description of this reference. We don't actually care
9860 about the underlying object here. */
9861 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9862 &mode, &unsignedp, &volatilep,
9863 &alignment);
bbf6f052 9864
b93a436e
JL
9865 type = type_for_size (bitsize, unsignedp);
9866 if (! SLOW_BYTE_ACCESS
9867 && type != 0 && bitsize >= 0
9868 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9869 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9870 != CODE_FOR_nothing))
9871 {
9872 do_jump (convert (type, exp), if_false_label, if_true_label);
9873 break;
9874 }
9875 goto normal;
9876 }
bbf6f052 9877
b93a436e
JL
9878 case COND_EXPR:
9879 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9880 if (integer_onep (TREE_OPERAND (exp, 1))
9881 && integer_zerop (TREE_OPERAND (exp, 2)))
9882 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9883
b93a436e
JL
9884 else if (integer_zerop (TREE_OPERAND (exp, 1))
9885 && integer_onep (TREE_OPERAND (exp, 2)))
9886 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9887
b93a436e
JL
9888 else
9889 {
9890 register rtx label1 = gen_label_rtx ();
9891 drop_through_label = gen_label_rtx ();
bbf6f052 9892
b93a436e 9893 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9894
b93a436e
JL
9895 start_cleanup_deferral ();
9896 /* Now the THEN-expression. */
9897 do_jump (TREE_OPERAND (exp, 1),
9898 if_false_label ? if_false_label : drop_through_label,
9899 if_true_label ? if_true_label : drop_through_label);
9900 /* In case the do_jump just above never jumps. */
9901 do_pending_stack_adjust ();
9902 emit_label (label1);
bbf6f052 9903
b93a436e
JL
9904 /* Now the ELSE-expression. */
9905 do_jump (TREE_OPERAND (exp, 2),
9906 if_false_label ? if_false_label : drop_through_label,
9907 if_true_label ? if_true_label : drop_through_label);
9908 end_cleanup_deferral ();
9909 }
9910 break;
bbf6f052 9911
b93a436e
JL
9912 case EQ_EXPR:
9913 {
9914 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9915
b93a436e
JL
9916 if (integer_zerop (TREE_OPERAND (exp, 1)))
9917 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9918 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9919 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9920 do_jump
9921 (fold
9922 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9923 fold (build (EQ_EXPR, TREE_TYPE (exp),
9924 fold (build1 (REALPART_EXPR,
9925 TREE_TYPE (inner_type),
9926 TREE_OPERAND (exp, 0))),
9927 fold (build1 (REALPART_EXPR,
9928 TREE_TYPE (inner_type),
9929 TREE_OPERAND (exp, 1))))),
9930 fold (build (EQ_EXPR, TREE_TYPE (exp),
9931 fold (build1 (IMAGPART_EXPR,
9932 TREE_TYPE (inner_type),
9933 TREE_OPERAND (exp, 0))),
9934 fold (build1 (IMAGPART_EXPR,
9935 TREE_TYPE (inner_type),
9936 TREE_OPERAND (exp, 1))))))),
9937 if_false_label, if_true_label);
9938 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9939 && !can_compare_p (TYPE_MODE (inner_type)))
9940 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9941 else
9942 comparison = compare (exp, EQ, EQ);
9943 break;
9944 }
bbf6f052 9945
b93a436e
JL
9946 case NE_EXPR:
9947 {
9948 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9949
b93a436e
JL
9950 if (integer_zerop (TREE_OPERAND (exp, 1)))
9951 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9952 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9953 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9954 do_jump
9955 (fold
9956 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9957 fold (build (NE_EXPR, TREE_TYPE (exp),
9958 fold (build1 (REALPART_EXPR,
9959 TREE_TYPE (inner_type),
9960 TREE_OPERAND (exp, 0))),
9961 fold (build1 (REALPART_EXPR,
9962 TREE_TYPE (inner_type),
9963 TREE_OPERAND (exp, 1))))),
9964 fold (build (NE_EXPR, TREE_TYPE (exp),
9965 fold (build1 (IMAGPART_EXPR,
9966 TREE_TYPE (inner_type),
9967 TREE_OPERAND (exp, 0))),
9968 fold (build1 (IMAGPART_EXPR,
9969 TREE_TYPE (inner_type),
9970 TREE_OPERAND (exp, 1))))))),
9971 if_false_label, if_true_label);
9972 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9973 && !can_compare_p (TYPE_MODE (inner_type)))
9974 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9975 else
9976 comparison = compare (exp, NE, NE);
9977 break;
9978 }
bbf6f052 9979
b93a436e
JL
9980 case LT_EXPR:
9981 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9982 == MODE_INT)
9983 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9984 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9985 else
9986 comparison = compare (exp, LT, LTU);
9987 break;
bbf6f052 9988
b93a436e
JL
9989 case LE_EXPR:
9990 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9991 == MODE_INT)
9992 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9993 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9994 else
9995 comparison = compare (exp, LE, LEU);
9996 break;
bbf6f052 9997
b93a436e
JL
9998 case GT_EXPR:
9999 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10000 == MODE_INT)
10001 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10002 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10003 else
10004 comparison = compare (exp, GT, GTU);
10005 break;
bbf6f052 10006
b93a436e
JL
10007 case GE_EXPR:
10008 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10009 == MODE_INT)
10010 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10011 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10012 else
10013 comparison = compare (exp, GE, GEU);
10014 break;
bbf6f052 10015
b93a436e
JL
10016 default:
10017 normal:
10018 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10019#if 0
10020 /* This is not needed any more and causes poor code since it causes
10021 comparisons and tests from non-SI objects to have different code
10022 sequences. */
10023 /* Copy to register to avoid generating bad insns by cse
10024 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10025 if (!cse_not_expected && GET_CODE (temp) == MEM)
10026 temp = copy_to_reg (temp);
ca695ac9 10027#endif
b93a436e
JL
10028 do_pending_stack_adjust ();
10029 if (GET_CODE (temp) == CONST_INT)
10030 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10031 else if (GET_CODE (temp) == LABEL_REF)
10032 comparison = const_true_rtx;
10033 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10034 && !can_compare_p (GET_MODE (temp)))
10035 /* Note swapping the labels gives us not-equal. */
10036 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10037 else if (GET_MODE (temp) != VOIDmode)
10038 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10039 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10040 GET_MODE (temp), NULL_RTX, 0);
10041 else
10042 abort ();
10043 }
bbf6f052 10044
b93a436e
JL
10045 /* Do any postincrements in the expression that was tested. */
10046 emit_queue ();
bbf6f052 10047
b93a436e
JL
10048 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10049 straight into a conditional jump instruction as the jump condition.
10050 Otherwise, all the work has been done already. */
bbf6f052 10051
b93a436e
JL
10052 if (comparison == const_true_rtx)
10053 {
10054 if (if_true_label)
10055 emit_jump (if_true_label);
10056 }
10057 else if (comparison == const0_rtx)
10058 {
10059 if (if_false_label)
10060 emit_jump (if_false_label);
10061 }
10062 else if (comparison)
10063 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10064
b93a436e
JL
10065 if (drop_through_label)
10066 {
10067 /* If do_jump produces code that might be jumped around,
10068 do any stack adjusts from that code, before the place
10069 where control merges in. */
10070 do_pending_stack_adjust ();
10071 emit_label (drop_through_label);
10072 }
bbf6f052 10073}
b93a436e
JL
10074\f
10075/* Given a comparison expression EXP for values too wide to be compared
10076 with one insn, test the comparison and jump to the appropriate label.
10077 The code of EXP is ignored; we always test GT if SWAP is 0,
10078 and LT if SWAP is 1. */
bbf6f052 10079
b93a436e
JL
10080static void
10081do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10082 tree exp;
10083 int swap;
10084 rtx if_false_label, if_true_label;
10085{
10086 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10087 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10088 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10089 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10090 rtx drop_through_label = 0;
10091 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10092 int i;
bbf6f052 10093
b93a436e
JL
10094 if (! if_true_label || ! if_false_label)
10095 drop_through_label = gen_label_rtx ();
10096 if (! if_true_label)
10097 if_true_label = drop_through_label;
10098 if (! if_false_label)
10099 if_false_label = drop_through_label;
bbf6f052 10100
b93a436e
JL
10101 /* Compare a word at a time, high order first. */
10102 for (i = 0; i < nwords; i++)
f81497d9 10103 {
b93a436e
JL
10104 rtx comp;
10105 rtx op0_word, op1_word;
10106
10107 if (WORDS_BIG_ENDIAN)
10108 {
10109 op0_word = operand_subword_force (op0, i, mode);
10110 op1_word = operand_subword_force (op1, i, mode);
10111 }
f81497d9 10112 else
b93a436e
JL
10113 {
10114 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10115 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10116 }
10117
10118 /* All but high-order word must be compared as unsigned. */
10119 comp = compare_from_rtx (op0_word, op1_word,
10120 (unsignedp || i > 0) ? GTU : GT,
10121 unsignedp, word_mode, NULL_RTX, 0);
10122 if (comp == const_true_rtx)
10123 emit_jump (if_true_label);
10124 else if (comp != const0_rtx)
10125 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10126
10127 /* Consider lower words only if these are equal. */
10128 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10129 NULL_RTX, 0);
10130 if (comp == const_true_rtx)
10131 emit_jump (if_false_label);
10132 else if (comp != const0_rtx)
10133 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10134 }
ca695ac9 10135
b93a436e
JL
10136 if (if_false_label)
10137 emit_jump (if_false_label);
10138 if (drop_through_label)
10139 emit_label (drop_through_label);
f81497d9
RS
10140}
10141
b93a436e
JL
10142/* Compare OP0 with OP1, word at a time, in mode MODE.
10143 UNSIGNEDP says to do unsigned comparison.
10144 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10145
b93a436e
JL
10146void
10147do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10148 enum machine_mode mode;
10149 int unsignedp;
10150 rtx op0, op1;
10151 rtx if_false_label, if_true_label;
f81497d9 10152{
b93a436e
JL
10153 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10154 rtx drop_through_label = 0;
10155 int i;
f81497d9 10156
b93a436e
JL
10157 if (! if_true_label || ! if_false_label)
10158 drop_through_label = gen_label_rtx ();
10159 if (! if_true_label)
10160 if_true_label = drop_through_label;
10161 if (! if_false_label)
10162 if_false_label = drop_through_label;
f81497d9 10163
b93a436e
JL
10164 /* Compare a word at a time, high order first. */
10165 for (i = 0; i < nwords; i++)
10166 {
10167 rtx comp;
10168 rtx op0_word, op1_word;
bbf6f052 10169
b93a436e
JL
10170 if (WORDS_BIG_ENDIAN)
10171 {
10172 op0_word = operand_subword_force (op0, i, mode);
10173 op1_word = operand_subword_force (op1, i, mode);
10174 }
10175 else
10176 {
10177 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10178 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10179 }
bbf6f052 10180
b93a436e
JL
10181 /* All but high-order word must be compared as unsigned. */
10182 comp = compare_from_rtx (op0_word, op1_word,
10183 (unsignedp || i > 0) ? GTU : GT,
10184 unsignedp, word_mode, NULL_RTX, 0);
10185 if (comp == const_true_rtx)
10186 emit_jump (if_true_label);
10187 else if (comp != const0_rtx)
10188 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10189
b93a436e
JL
10190 /* Consider lower words only if these are equal. */
10191 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10192 NULL_RTX, 0);
10193 if (comp == const_true_rtx)
10194 emit_jump (if_false_label);
10195 else if (comp != const0_rtx)
10196 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10197 }
bbf6f052 10198
b93a436e
JL
10199 if (if_false_label)
10200 emit_jump (if_false_label);
10201 if (drop_through_label)
10202 emit_label (drop_through_label);
bbf6f052
RK
10203}
10204
b93a436e
JL
10205/* Given an EQ_EXPR expression EXP for values too wide to be compared
10206 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10207
b93a436e
JL
10208static void
10209do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10210 tree exp;
10211 rtx if_false_label, if_true_label;
bbf6f052 10212{
b93a436e
JL
10213 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10214 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10215 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10216 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10217 int i;
10218 rtx drop_through_label = 0;
bbf6f052 10219
b93a436e
JL
10220 if (! if_false_label)
10221 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10222
b93a436e
JL
10223 for (i = 0; i < nwords; i++)
10224 {
10225 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10226 operand_subword_force (op1, i, mode),
10227 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10228 word_mode, NULL_RTX, 0);
10229 if (comp == const_true_rtx)
10230 emit_jump (if_false_label);
10231 else if (comp != const0_rtx)
10232 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10233 }
bbf6f052 10234
b93a436e
JL
10235 if (if_true_label)
10236 emit_jump (if_true_label);
10237 if (drop_through_label)
10238 emit_label (drop_through_label);
bbf6f052 10239}
b93a436e
JL
10240\f
10241/* Jump according to whether OP0 is 0.
10242 We assume that OP0 has an integer mode that is too wide
10243 for the available compare insns. */
bbf6f052 10244
f5963e61 10245void
b93a436e
JL
10246do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10247 rtx op0;
10248 rtx if_false_label, if_true_label;
ca695ac9 10249{
b93a436e
JL
10250 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10251 rtx part;
10252 int i;
10253 rtx drop_through_label = 0;
bbf6f052 10254
b93a436e
JL
10255 /* The fastest way of doing this comparison on almost any machine is to
10256 "or" all the words and compare the result. If all have to be loaded
10257 from memory and this is a very wide item, it's possible this may
10258 be slower, but that's highly unlikely. */
bbf6f052 10259
b93a436e
JL
10260 part = gen_reg_rtx (word_mode);
10261 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10262 for (i = 1; i < nwords && part != 0; i++)
10263 part = expand_binop (word_mode, ior_optab, part,
10264 operand_subword_force (op0, i, GET_MODE (op0)),
10265 part, 1, OPTAB_WIDEN);
bbf6f052 10266
b93a436e
JL
10267 if (part != 0)
10268 {
10269 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10270 NULL_RTX, 0);
0f41302f 10271
b93a436e
JL
10272 if (comp == const_true_rtx)
10273 emit_jump (if_false_label);
10274 else if (comp == const0_rtx)
10275 emit_jump (if_true_label);
10276 else
10277 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10278
b93a436e
JL
10279 return;
10280 }
bbf6f052 10281
b93a436e
JL
10282 /* If we couldn't do the "or" simply, do this with a series of compares. */
10283 if (! if_false_label)
10284 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10285
b93a436e
JL
10286 for (i = 0; i < nwords; i++)
10287 {
10288 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10289 GET_MODE (op0)),
10290 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10291 if (comp == const_true_rtx)
10292 emit_jump (if_false_label);
10293 else if (comp != const0_rtx)
10294 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10295 }
bbf6f052 10296
b93a436e
JL
10297 if (if_true_label)
10298 emit_jump (if_true_label);
0f41302f 10299
b93a436e
JL
10300 if (drop_through_label)
10301 emit_label (drop_through_label);
bbf6f052 10302}
bbf6f052 10303
b93a436e
JL
10304/* Given a comparison expression in rtl form, output conditional branches to
10305 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10306
b93a436e
JL
10307static void
10308do_jump_for_compare (comparison, if_false_label, if_true_label)
10309 rtx comparison, if_false_label, if_true_label;
bbf6f052 10310{
b93a436e
JL
10311 if (if_true_label)
10312 {
10313 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10314 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10315 else
10316 abort ();
ca695ac9 10317
b93a436e
JL
10318 if (if_false_label)
10319 emit_jump (if_false_label);
10320 }
10321 else if (if_false_label)
10322 {
10323 rtx insn;
10324 rtx prev = get_last_insn ();
10325 rtx branch = 0;
0f41302f 10326
b93a436e
JL
10327 /* Output the branch with the opposite condition. Then try to invert
10328 what is generated. If more than one insn is a branch, or if the
10329 branch is not the last insn written, abort. If we can't invert
10330 the branch, emit make a true label, redirect this jump to that,
10331 emit a jump to the false label and define the true label. */
bbf6f052 10332
b93a436e
JL
10333 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10334 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10335 else
10336 abort ();
bbf6f052 10337
b93a436e
JL
10338 /* Here we get the first insn that was just emitted. It used to be the
10339 case that, on some machines, emitting the branch would discard
10340 the previous compare insn and emit a replacement. This isn't
10341 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 10342
b93a436e
JL
10343 if (prev == 0)
10344 insn = get_insns ();
10345 else if (INSN_DELETED_P (prev))
10346 abort ();
10347 else
10348 insn = NEXT_INSN (prev);
bbf6f052 10349
b93a436e
JL
10350 for (; insn; insn = NEXT_INSN (insn))
10351 if (GET_CODE (insn) == JUMP_INSN)
10352 {
10353 if (branch)
10354 abort ();
10355 branch = insn;
10356 }
a7c5971a 10357
b93a436e
JL
10358 if (branch != get_last_insn ())
10359 abort ();
bbf6f052 10360
b93a436e
JL
10361 JUMP_LABEL (branch) = if_false_label;
10362 if (! invert_jump (branch, if_false_label))
10363 {
10364 if_true_label = gen_label_rtx ();
10365 redirect_jump (branch, if_true_label);
10366 emit_jump (if_false_label);
10367 emit_label (if_true_label);
10368 }
10369 }
10370}
10371\f
10372/* Generate code for a comparison expression EXP
10373 (including code to compute the values to be compared)
10374 and set (CC0) according to the result.
10375 SIGNED_CODE should be the rtx operation for this comparison for
10376 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 10377
b93a436e
JL
10378 We force a stack adjustment unless there are currently
10379 things pushed on the stack that aren't yet used. */
ca695ac9 10380
b93a436e
JL
10381static rtx
10382compare (exp, signed_code, unsigned_code)
10383 register tree exp;
10384 enum rtx_code signed_code, unsigned_code;
10385{
10386 register rtx op0
10387 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10388 register rtx op1
10389 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10390 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10391 register enum machine_mode mode = TYPE_MODE (type);
10392 int unsignedp = TREE_UNSIGNED (type);
10393 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 10394
b93a436e
JL
10395#ifdef HAVE_canonicalize_funcptr_for_compare
10396 /* If function pointers need to be "canonicalized" before they can
10397 be reliably compared, then canonicalize them. */
10398 if (HAVE_canonicalize_funcptr_for_compare
10399 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10400 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10401 == FUNCTION_TYPE))
bbf6f052 10402 {
b93a436e 10403 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 10404
b93a436e
JL
10405 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10406 op0 = new_op0;
ca695ac9 10407 }
bbf6f052 10408
b93a436e
JL
10409 if (HAVE_canonicalize_funcptr_for_compare
10410 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10411 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10412 == FUNCTION_TYPE))
10413 {
10414 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 10415
b93a436e
JL
10416 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10417 op1 = new_op1;
10418 }
10419#endif
0f41302f 10420
b93a436e
JL
10421 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10422 ((mode == BLKmode)
10423 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10424 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 10425}
bbf6f052 10426
b93a436e
JL
10427/* Like compare but expects the values to compare as two rtx's.
10428 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10429
b93a436e
JL
10430 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10431 compared.
bbf6f052 10432
b93a436e
JL
10433 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10434 size of MODE should be used. */
ca695ac9 10435
b93a436e
JL
10436rtx
10437compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10438 register rtx op0, op1;
10439 enum rtx_code code;
10440 int unsignedp;
10441 enum machine_mode mode;
10442 rtx size;
10443 int align;
bbf6f052 10444{
b93a436e 10445 rtx tem;
bbf6f052 10446
b93a436e
JL
10447 /* If one operand is constant, make it the second one. Only do this
10448 if the other operand is not constant as well. */
e7c33f54 10449
b93a436e
JL
10450 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10451 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10452 {
b93a436e
JL
10453 tem = op0;
10454 op0 = op1;
10455 op1 = tem;
10456 code = swap_condition (code);
10457 }
bbf6f052 10458
b93a436e
JL
10459 if (flag_force_mem)
10460 {
10461 op0 = force_not_mem (op0);
10462 op1 = force_not_mem (op1);
10463 }
bbf6f052 10464
b93a436e 10465 do_pending_stack_adjust ();
ca695ac9 10466
b93a436e
JL
10467 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10468 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10469 return tem;
ca695ac9 10470
b93a436e
JL
10471#if 0
10472 /* There's no need to do this now that combine.c can eliminate lots of
10473 sign extensions. This can be less efficient in certain cases on other
10474 machines. */
ca695ac9 10475
b93a436e
JL
10476 /* If this is a signed equality comparison, we can do it as an
10477 unsigned comparison since zero-extension is cheaper than sign
10478 extension and comparisons with zero are done as unsigned. This is
10479 the case even on machines that can do fast sign extension, since
10480 zero-extension is easier to combine with other operations than
10481 sign-extension is. If we are comparing against a constant, we must
10482 convert it to what it would look like unsigned. */
10483 if ((code == EQ || code == NE) && ! unsignedp
10484 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10485 {
10486 if (GET_CODE (op1) == CONST_INT
10487 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10488 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10489 unsignedp = 1;
10490 }
10491#endif
ca695ac9 10492
b93a436e 10493 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 10494
b93a436e
JL
10495 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10496}
10497\f
10498/* Generate code to calculate EXP using a store-flag instruction
10499 and return an rtx for the result. EXP is either a comparison
10500 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10501
b93a436e 10502 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10503
b93a436e
JL
10504 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10505 cheap.
ca695ac9 10506
b93a436e
JL
10507 Return zero if there is no suitable set-flag instruction
10508 available on this machine.
ca695ac9 10509
b93a436e
JL
10510 Once expand_expr has been called on the arguments of the comparison,
10511 we are committed to doing the store flag, since it is not safe to
10512 re-evaluate the expression. We emit the store-flag insn by calling
10513 emit_store_flag, but only expand the arguments if we have a reason
10514 to believe that emit_store_flag will be successful. If we think that
10515 it will, but it isn't, we have to simulate the store-flag with a
10516 set/jump/set sequence. */
ca695ac9 10517
b93a436e
JL
10518static rtx
10519do_store_flag (exp, target, mode, only_cheap)
10520 tree exp;
10521 rtx target;
10522 enum machine_mode mode;
10523 int only_cheap;
10524{
10525 enum rtx_code code;
10526 tree arg0, arg1, type;
10527 tree tem;
10528 enum machine_mode operand_mode;
10529 int invert = 0;
10530 int unsignedp;
10531 rtx op0, op1;
10532 enum insn_code icode;
10533 rtx subtarget = target;
381127e8 10534 rtx result, label;
ca695ac9 10535
b93a436e
JL
10536 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10537 result at the end. We can't simply invert the test since it would
10538 have already been inverted if it were valid. This case occurs for
10539 some floating-point comparisons. */
ca695ac9 10540
b93a436e
JL
10541 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10542 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10543
b93a436e
JL
10544 arg0 = TREE_OPERAND (exp, 0);
10545 arg1 = TREE_OPERAND (exp, 1);
10546 type = TREE_TYPE (arg0);
10547 operand_mode = TYPE_MODE (type);
10548 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10549
b93a436e
JL
10550 /* We won't bother with BLKmode store-flag operations because it would mean
10551 passing a lot of information to emit_store_flag. */
10552 if (operand_mode == BLKmode)
10553 return 0;
ca695ac9 10554
b93a436e
JL
10555 /* We won't bother with store-flag operations involving function pointers
10556 when function pointers must be canonicalized before comparisons. */
10557#ifdef HAVE_canonicalize_funcptr_for_compare
10558 if (HAVE_canonicalize_funcptr_for_compare
10559 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10560 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10561 == FUNCTION_TYPE))
10562 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10563 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10564 == FUNCTION_TYPE))))
10565 return 0;
ca695ac9
JB
10566#endif
10567
b93a436e
JL
10568 STRIP_NOPS (arg0);
10569 STRIP_NOPS (arg1);
ca695ac9 10570
b93a436e
JL
10571 /* Get the rtx comparison code to use. We know that EXP is a comparison
10572 operation of some type. Some comparisons against 1 and -1 can be
10573 converted to comparisons with zero. Do so here so that the tests
10574 below will be aware that we have a comparison with zero. These
10575 tests will not catch constants in the first operand, but constants
10576 are rarely passed as the first operand. */
ca695ac9 10577
b93a436e
JL
10578 switch (TREE_CODE (exp))
10579 {
10580 case EQ_EXPR:
10581 code = EQ;
bbf6f052 10582 break;
b93a436e
JL
10583 case NE_EXPR:
10584 code = NE;
bbf6f052 10585 break;
b93a436e
JL
10586 case LT_EXPR:
10587 if (integer_onep (arg1))
10588 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10589 else
10590 code = unsignedp ? LTU : LT;
ca695ac9 10591 break;
b93a436e
JL
10592 case LE_EXPR:
10593 if (! unsignedp && integer_all_onesp (arg1))
10594 arg1 = integer_zero_node, code = LT;
10595 else
10596 code = unsignedp ? LEU : LE;
ca695ac9 10597 break;
b93a436e
JL
10598 case GT_EXPR:
10599 if (! unsignedp && integer_all_onesp (arg1))
10600 arg1 = integer_zero_node, code = GE;
10601 else
10602 code = unsignedp ? GTU : GT;
10603 break;
10604 case GE_EXPR:
10605 if (integer_onep (arg1))
10606 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10607 else
10608 code = unsignedp ? GEU : GE;
ca695ac9 10609 break;
ca695ac9 10610 default:
b93a436e 10611 abort ();
bbf6f052 10612 }
bbf6f052 10613
b93a436e
JL
10614 /* Put a constant second. */
10615 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10616 {
10617 tem = arg0; arg0 = arg1; arg1 = tem;
10618 code = swap_condition (code);
ca695ac9 10619 }
bbf6f052 10620
b93a436e
JL
10621 /* If this is an equality or inequality test of a single bit, we can
10622 do this by shifting the bit being tested to the low-order bit and
10623 masking the result with the constant 1. If the condition was EQ,
10624 we xor it with 1. This does not require an scc insn and is faster
10625 than an scc insn even if we have it. */
d39985fa 10626
b93a436e
JL
10627 if ((code == NE || code == EQ)
10628 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10629 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10630 {
10631 tree inner = TREE_OPERAND (arg0, 0);
10632 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10633 int ops_unsignedp;
bbf6f052 10634
b93a436e
JL
10635 /* If INNER is a right shift of a constant and it plus BITNUM does
10636 not overflow, adjust BITNUM and INNER. */
ca695ac9 10637
b93a436e
JL
10638 if (TREE_CODE (inner) == RSHIFT_EXPR
10639 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10640 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10641 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10642 < TYPE_PRECISION (type)))
ca695ac9 10643 {
b93a436e
JL
10644 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10645 inner = TREE_OPERAND (inner, 0);
ca695ac9 10646 }
ca695ac9 10647
b93a436e
JL
10648 /* If we are going to be able to omit the AND below, we must do our
10649 operations as unsigned. If we must use the AND, we have a choice.
10650 Normally unsigned is faster, but for some machines signed is. */
10651 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10652#ifdef LOAD_EXTEND_OP
10653 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10654#else
10655 : 1
10656#endif
10657 );
bbf6f052 10658
b93a436e
JL
10659 if (subtarget == 0 || GET_CODE (subtarget) != REG
10660 || GET_MODE (subtarget) != operand_mode
e5e809f4 10661 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10662 subtarget = 0;
bbf6f052 10663
b93a436e 10664 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10665
b93a436e
JL
10666 if (bitnum != 0)
10667 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10668 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10669
b93a436e
JL
10670 if (GET_MODE (op0) != mode)
10671 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10672
b93a436e
JL
10673 if ((code == EQ && ! invert) || (code == NE && invert))
10674 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10675 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10676
b93a436e
JL
10677 /* Put the AND last so it can combine with more things. */
10678 if (bitnum != TYPE_PRECISION (type) - 1)
10679 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10680
b93a436e
JL
10681 return op0;
10682 }
bbf6f052 10683
b93a436e
JL
10684 /* Now see if we are likely to be able to do this. Return if not. */
10685 if (! can_compare_p (operand_mode))
10686 return 0;
10687 icode = setcc_gen_code[(int) code];
10688 if (icode == CODE_FOR_nothing
10689 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 10690 {
b93a436e
JL
10691 /* We can only do this if it is one of the special cases that
10692 can be handled without an scc insn. */
10693 if ((code == LT && integer_zerop (arg1))
10694 || (! only_cheap && code == GE && integer_zerop (arg1)))
10695 ;
10696 else if (BRANCH_COST >= 0
10697 && ! only_cheap && (code == NE || code == EQ)
10698 && TREE_CODE (type) != REAL_TYPE
10699 && ((abs_optab->handlers[(int) operand_mode].insn_code
10700 != CODE_FOR_nothing)
10701 || (ffs_optab->handlers[(int) operand_mode].insn_code
10702 != CODE_FOR_nothing)))
10703 ;
10704 else
10705 return 0;
ca695ac9 10706 }
b93a436e
JL
10707
10708 preexpand_calls (exp);
10709 if (subtarget == 0 || GET_CODE (subtarget) != REG
10710 || GET_MODE (subtarget) != operand_mode
e5e809f4 10711 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10712 subtarget = 0;
10713
10714 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10715 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10716
10717 if (target == 0)
10718 target = gen_reg_rtx (mode);
10719
10720 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10721 because, if the emit_store_flag does anything it will succeed and
10722 OP0 and OP1 will not be used subsequently. */
ca695ac9 10723
b93a436e
JL
10724 result = emit_store_flag (target, code,
10725 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10726 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10727 operand_mode, unsignedp, 1);
ca695ac9 10728
b93a436e
JL
10729 if (result)
10730 {
10731 if (invert)
10732 result = expand_binop (mode, xor_optab, result, const1_rtx,
10733 result, 0, OPTAB_LIB_WIDEN);
10734 return result;
ca695ac9 10735 }
bbf6f052 10736
b93a436e
JL
10737 /* If this failed, we have to do this with set/compare/jump/set code. */
10738 if (GET_CODE (target) != REG
10739 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10740 target = gen_reg_rtx (GET_MODE (target));
10741
10742 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10743 result = compare_from_rtx (op0, op1, code, unsignedp,
10744 operand_mode, NULL_RTX, 0);
10745 if (GET_CODE (result) == CONST_INT)
10746 return (((result == const0_rtx && ! invert)
10747 || (result != const0_rtx && invert))
10748 ? const0_rtx : const1_rtx);
ca695ac9 10749
b93a436e
JL
10750 label = gen_label_rtx ();
10751 if (bcc_gen_fctn[(int) code] == 0)
10752 abort ();
0f41302f 10753
b93a436e
JL
10754 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10755 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10756 emit_label (label);
bbf6f052 10757
b93a436e 10758 return target;
ca695ac9 10759}
b93a436e
JL
10760\f
10761/* Generate a tablejump instruction (used for switch statements). */
10762
10763#ifdef HAVE_tablejump
e87b4f3f 10764
b93a436e
JL
10765/* INDEX is the value being switched on, with the lowest value
10766 in the table already subtracted.
10767 MODE is its expected mode (needed if INDEX is constant).
10768 RANGE is the length of the jump table.
10769 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10770
b93a436e
JL
10771 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10772 index value is out of range. */
0f41302f 10773
ca695ac9 10774void
b93a436e
JL
10775do_tablejump (index, mode, range, table_label, default_label)
10776 rtx index, range, table_label, default_label;
10777 enum machine_mode mode;
ca695ac9 10778{
b93a436e 10779 register rtx temp, vector;
88d3b7f0 10780
b93a436e
JL
10781 /* Do an unsigned comparison (in the proper mode) between the index
10782 expression and the value which represents the length of the range.
10783 Since we just finished subtracting the lower bound of the range
10784 from the index expression, this comparison allows us to simultaneously
10785 check that the original index expression value is both greater than
10786 or equal to the minimum value of the range and less than or equal to
10787 the maximum value of the range. */
709f5be1 10788
b93a436e
JL
10789 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10790 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 10791
b93a436e
JL
10792 /* If index is in range, it must fit in Pmode.
10793 Convert to Pmode so we can index with it. */
10794 if (mode != Pmode)
10795 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10796
b93a436e
JL
10797 /* Don't let a MEM slip thru, because then INDEX that comes
10798 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10799 and break_out_memory_refs will go to work on it and mess it up. */
10800#ifdef PIC_CASE_VECTOR_ADDRESS
10801 if (flag_pic && GET_CODE (index) != REG)
10802 index = copy_to_mode_reg (Pmode, index);
10803#endif
ca695ac9 10804
b93a436e
JL
10805 /* If flag_force_addr were to affect this address
10806 it could interfere with the tricky assumptions made
10807 about addresses that contain label-refs,
10808 which may be valid only very near the tablejump itself. */
10809 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10810 GET_MODE_SIZE, because this indicates how large insns are. The other
10811 uses should all be Pmode, because they are addresses. This code
10812 could fail if addresses and insns are not the same size. */
10813 index = gen_rtx_PLUS (Pmode,
10814 gen_rtx_MULT (Pmode, index,
10815 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10816 gen_rtx_LABEL_REF (Pmode, table_label));
10817#ifdef PIC_CASE_VECTOR_ADDRESS
10818 if (flag_pic)
10819 index = PIC_CASE_VECTOR_ADDRESS (index);
10820 else
bbf6f052 10821#endif
b93a436e
JL
10822 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10823 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10824 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10825 RTX_UNCHANGING_P (vector) = 1;
10826 convert_move (temp, vector, 0);
10827
10828 emit_jump_insn (gen_tablejump (temp, table_label));
10829
10830 /* If we are generating PIC code or if the table is PC-relative, the
10831 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10832 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10833 emit_barrier ();
bbf6f052 10834}
b93a436e
JL
10835
10836#endif /* HAVE_tablejump */